From c566d5a106a363186060d115a49867ace796ac53 Mon Sep 17 00:00:00 2001 From: Patrik Nordwall Date: Mon, 8 Dec 2014 11:02:14 +0100 Subject: [PATCH] per #15423 Remove deprecated features from akka-persistence * remove channels * remove View * remove Processor * collapse the complicated internal state management that was spread out between Processor, Eventsourced and Recovery * remove Recovery trait, this caused some duplication between Eventsourced and PersistentView, but but the enhanced PersistentView will not be based on recovery infrastructure, and therefore PersistentView code will be replaced anyway * remove PersistentBatch * remove LoopMessage * remove deleteMessages of individual messages * remove Persistent, PersistentRepr and PersistentImpl are kept * remove processorId * update doc sample code * note in migration guide about persistenceId * rename Resequencable to PersistentEnvelope --- .../PersistenceActorDeferBenchmark.scala | 38 - .../PersistentActorBenchmark.scala | 27 +- .../docs/persistence/PersistenceDocTest.java | 351 +- .../persistence/PersistencePluginDocTest.java | 46 +- akka-docs/rst/java/lambda-persistence.rst | 13 +- akka-docs/rst/java/persistence.rst | 9 +- .../project/migration-guide-2.3.x-2.4.x.rst | 13 + ...e-persistence-experimental-2.3.x-2.4.x.rst | 14 + .../docs/persistence/PersistenceDocSpec.scala | 309 +- .../PersistencePluginDocSpec.scala | 41 +- .../PersistenceSerializerDocSpec.scala | 2 +- akka-docs/rst/scala/persistence.rst | 19 +- .../persistence/journal/JournalPerfSpec.scala | 2 +- .../persistence/journal/JournalSpec.scala | 53 +- .../snapshot/SnapshotStoreSpec.scala | 2 +- .../local/LocalSnapshotStoreSpec.scala | 8 +- .../journal/japi/AsyncRecoveryPlugin.java | 69 +- .../journal/japi/AsyncWritePlugin.java | 49 +- .../journal/japi/SyncWritePlugin.java | 49 +- .../serialization/MessageFormats.java | 3096 +---------------- .../snapshot/japi/SnapshotStorePlugin.java | 80 +- .../src/main/protobuf/MessageFormats.proto | 27 +- .../src/main/resources/reference.conf | 3 - .../persistence/AtLeastOnceDelivery.scala | 8 +- .../main/scala/akka/persistence/Channel.scala | 384 -- .../scala/akka/persistence/Eventsourced.scala | 1010 +++--- .../akka/persistence/JournalProtocol.scala | 41 +- .../scala/akka/persistence/Persistence.scala | 47 +- .../scala/akka/persistence/Persistent.scala | 302 +- .../akka/persistence/PersistentActor.scala | 376 ++ .../akka/persistence/PersistentChannel.scala | 416 --- .../akka/persistence/PersistentView.scala | 312 +- .../scala/akka/persistence/Processor.scala | 484 --- .../scala/akka/persistence/Recovery.scala | 328 -- .../scala/akka/persistence/Snapshot.scala | 15 +- .../main/scala/akka/persistence/View.scala | 178 - .../persistence/journal/AsyncRecovery.scala | 3 - .../journal/AsyncWriteJournal.scala | 61 +- .../persistence/journal/AsyncWriteProxy.scala | 14 +- .../journal/SyncWriteJournal.scala | 59 +- .../journal/WriteJournalBase.scala | 6 +- .../journal/inmem/InmemJournal.scala | 6 - .../journal/japi/AsyncWriteJournal.scala | 6 - .../journal/japi/SyncWriteJournal.scala | 6 - .../journal/leveldb/LeveldbIdMapping.scala | 12 +- .../journal/leveldb/LeveldbKey.scala | 15 +- .../journal/leveldb/LeveldbRecovery.scala | 22 +- .../journal/leveldb/LeveldbStore.scala | 20 +- .../serialization/MessageSerializer.scala | 108 +- .../serialization/SnapshotSerializer.scala | 2 +- .../persistence/snapshot/SnapshotStore.scala | 4 +- .../AtLeastOnceDeliveryCrashSpec.scala | 10 +- .../AtLeastOnceDeliveryFailureSpec.scala | 4 +- .../scala/akka/persistence/ChannelSpec.scala | 195 -- .../scala/akka/persistence/FailureSpec.scala | 157 - .../persistence/NumberProcessorSpec.scala | 90 - .../akka/persistence/PerformanceSpec.scala | 218 +- .../akka/persistence/PersistenceSpec.scala | 12 +- .../PersistentActorFailureSpec.scala | 12 +- .../persistence/PersistentActorSpec.scala | 348 +- .../persistence/PersistentChannelSpec.scala | 151 - .../akka/persistence/PersistentViewSpec.scala | 10 +- .../persistence/ProcessorChannelSpec.scala | 210 -- .../akka/persistence/ProcessorSpec.scala | 473 --- .../akka/persistence/ProcessorStashSpec.scala | 132 - .../SnapshotDirectoryFailureSpec.scala | 16 +- .../SnapshotFailureRobustnessSpec.scala | 53 +- .../SnapshotSerializationSpec.scala | 41 +- .../scala/akka/persistence/SnapshotSpec.scala | 123 +- .../scala/akka/persistence/ViewSpec.scala | 321 -- .../journal/chaos/ChaosJournal.scala | 24 +- .../leveldb/SharedLeveldbJournalSpec.scala | 56 +- .../serialization/SerializerSpec.scala | 71 +- .../java/doc/LambdaPersistenceDocTest.java | 361 +- .../doc/LambdaPersistencePluginDocTest.java | 20 +- .../persistence/PersistentActorExample.java | 17 +- .../PersistentActorFailureExample.java | 2 +- .../sample/persistence/SnapshotExample.java | 1 + .../persistence/PersistentActorExample.java | 3 + .../PersistentActorFailureExample.java | 2 +- .../sample/persistence/SnapshotExample.java | 1 + .../PersistentActorFailureExample.scala | 6 +- .../sample/persistence/SnapshotExample.scala | 4 +- .../sample/persistence/ViewExample.scala | 13 +- 84 files changed, 2162 insertions(+), 9560 deletions(-) delete mode 100644 akka-persistence/src/main/scala/akka/persistence/Channel.scala create mode 100644 akka-persistence/src/main/scala/akka/persistence/PersistentActor.scala delete mode 100644 akka-persistence/src/main/scala/akka/persistence/PersistentChannel.scala delete mode 100644 akka-persistence/src/main/scala/akka/persistence/Processor.scala delete mode 100644 akka-persistence/src/main/scala/akka/persistence/Recovery.scala delete mode 100644 akka-persistence/src/main/scala/akka/persistence/View.scala delete mode 100644 akka-persistence/src/test/scala/akka/persistence/ChannelSpec.scala delete mode 100644 akka-persistence/src/test/scala/akka/persistence/FailureSpec.scala delete mode 100644 akka-persistence/src/test/scala/akka/persistence/NumberProcessorSpec.scala delete mode 100644 akka-persistence/src/test/scala/akka/persistence/PersistentChannelSpec.scala delete mode 100644 akka-persistence/src/test/scala/akka/persistence/ProcessorChannelSpec.scala delete mode 100644 akka-persistence/src/test/scala/akka/persistence/ProcessorSpec.scala delete mode 100644 akka-persistence/src/test/scala/akka/persistence/ProcessorStashSpec.scala delete mode 100644 akka-persistence/src/test/scala/akka/persistence/ViewSpec.scala diff --git a/akka-bench-jmh/src/main/scala/akka/persistence/PersistenceActorDeferBenchmark.scala b/akka-bench-jmh/src/main/scala/akka/persistence/PersistenceActorDeferBenchmark.scala index 9aa28b1ab0..0809dd5bf4 100644 --- a/akka-bench-jmh/src/main/scala/akka/persistence/PersistenceActorDeferBenchmark.scala +++ b/akka-bench-jmh/src/main/scala/akka/persistence/PersistenceActorDeferBenchmark.scala @@ -37,8 +37,6 @@ class PersistentActorDeferBenchmark { var system: ActorSystem = _ var probe: TestProbe = _ - var processor: ActorRef = _ - var processor_replyASAP: ActorRef = _ var persistAsync_defer: ActorRef = _ var persistAsync_defer_replyASAP: ActorRef = _ @@ -51,8 +49,6 @@ class PersistentActorDeferBenchmark { probe = TestProbe()(system) storageLocations.foreach(FileUtils.deleteDirectory) - processor = system.actorOf(Props(classOf[`processor, forward Persistent, like defer`], data10k.last), "p-1") - processor_replyASAP = system.actorOf(Props(classOf[`processor, forward Persistent, reply ASAP`], data10k.last), "p-2") persistAsync_defer = system.actorOf(Props(classOf[`persistAsync, defer`], data10k.last), "a-1") persistAsync_defer_replyASAP = system.actorOf(Props(classOf[`persistAsync, defer, respond ASAP`], data10k.last), "a-2") } @@ -65,22 +61,6 @@ class PersistentActorDeferBenchmark { storageLocations.foreach(FileUtils.deleteDirectory) } - @Benchmark - @OperationsPerInvocation(10000) - def tell_processor_Persistent_reply() { - for (i <- data10k) processor.tell(i, probe.ref) - - probe.expectMsg(data10k.last) - } - - @Benchmark - @OperationsPerInvocation(10000) - def tell_processor_Persistent_replyASAP() { - for (i <- data10k) processor_replyASAP.tell(i, probe.ref) - - probe.expectMsg(data10k.last) - } - @Benchmark @OperationsPerInvocation(10000) def tell_persistAsync_defer_persistAsync_reply() { @@ -99,24 +79,6 @@ class PersistentActorDeferBenchmark { } -class `processor, forward Persistent, like defer`(respondAfter: Int) extends Processor { - def receive = { - case n: Int => - self forward Persistent(Evt(n)) - self forward Evt(n) - case Persistent(p) => // ignore - case Evt(n) if n == respondAfter => sender() ! respondAfter - } -} -class `processor, forward Persistent, reply ASAP`(respondAfter: Int) extends Processor { - def receive = { - case n: Int => - self forward Persistent(Evt(n)) - if (n == respondAfter) sender() ! respondAfter - case _ => // ignore - } -} - class `persistAsync, defer`(respondAfter: Int) extends PersistentActor { override def persistenceId: String = self.path.name diff --git a/akka-bench-jmh/src/main/scala/akka/persistence/PersistentActorBenchmark.scala b/akka-bench-jmh/src/main/scala/akka/persistence/PersistentActorBenchmark.scala index 1e777c7393..07ff841b5b 100644 --- a/akka-bench-jmh/src/main/scala/akka/persistence/PersistentActorBenchmark.scala +++ b/akka-bench-jmh/src/main/scala/akka/persistence/PersistentActorBenchmark.scala @@ -28,7 +28,6 @@ class PersistentActorThroughputBenchmark { var probe: TestProbe = _ var actor: ActorRef = _ var persistPersistentActor: ActorRef = _ - var persistProcessor: ActorRef = _ var persistAsync1PersistentActor: ActorRef = _ var noPersistPersistentActor: ActorRef = _ var persistAsyncQuickReplyPersistentActor: ActorRef = _ @@ -44,8 +43,6 @@ class PersistentActorThroughputBenchmark { storageLocations.foreach(FileUtils.deleteDirectory) actor = system.actorOf(Props(classOf[BaselineActor], data10k.last), "a-1") - - persistProcessor = system.actorOf(Props(classOf[PersistProcessor], data10k.last), "p-1") noPersistPersistentActor = system.actorOf(Props(classOf[NoPersistPersistentActor], data10k.last), "nop-1") persistPersistentActor = system.actorOf(Props(classOf[PersistPersistentActor], data10k.last), "ep-1") @@ -78,22 +75,6 @@ class PersistentActorThroughputBenchmark { probe.expectMsg(Evt(data10k.last)) } - @Benchmark - @OperationsPerInvocation(10000) - def processor_persist_reply() { - for (i <- data10k) persistProcessor.tell(Persistent(i), probe.ref) - - probe.expectMsg(Evt(data10k.last)) - } - - @Benchmark - @OperationsPerInvocation(10000) - def processor_noPersist_reply() { - for (i <- data10k) persistProcessor.tell(i, probe.ref) - - probe.expectMsg(Evt(data10k.last)) - } - @Benchmark @OperationsPerInvocation(10000) def persistentActor_persistAsync_reply() { @@ -101,7 +82,7 @@ class PersistentActorThroughputBenchmark { probe.expectMsg(Evt(data10k.last)) } - + @Benchmark @OperationsPerInvocation(10000) def persistentActor_noPersist_reply() { @@ -144,12 +125,6 @@ class PersistPersistentActor(respondAfter: Int) extends PersistentActor { } } -class PersistProcessor(respondAfter: Int) extends Processor { - override def receive = { - case Persistent(n: Int, _) => if (n == respondAfter) sender() ! Evt(n) - case n: Int => if (n == respondAfter) sender() ! Evt(n) - } -} class PersistAsyncPersistentActor(respondAfter: Int) extends PersistentActor { override def persistenceId: String = self.path.name diff --git a/akka-docs/rst/java/code/docs/persistence/PersistenceDocTest.java b/akka-docs/rst/java/code/docs/persistence/PersistenceDocTest.java index 971601dcf5..a5d92a2130 100644 --- a/akka-docs/rst/java/code/docs/persistence/PersistenceDocTest.java +++ b/akka-docs/rst/java/code/docs/persistence/PersistenceDocTest.java @@ -24,7 +24,7 @@ public class PersistenceDocTest { public interface SomeOtherMessage {} - public interface ProcessorMethods { + public interface PersistentActorMethods { //#persistence-id public String persistenceId(); //#persistence-id @@ -32,64 +32,25 @@ public class PersistenceDocTest { public boolean recoveryRunning(); public boolean recoveryFinished(); //#recovery-status - //#current-message - public Persistent getCurrentPersistentMessage(); - //#current-message } static Object o1 = new Object() { - //#definition - class MyProcessor extends UntypedProcessor { - public void onReceive(Object message) throws Exception { - if (message instanceof Persistent) { - // message successfully written to journal - Persistent persistent = (Persistent)message; - Object payload = persistent.payload(); - Long sequenceNr = persistent.sequenceNr(); - // ... - } else if (message instanceof PersistenceFailure) { - // message failed to be written to journal - PersistenceFailure failure = (PersistenceFailure)message; - Object payload = failure.payload(); - Long sequenceNr = failure.sequenceNr(); - Throwable cause = failure.cause(); - // ... - } else if (message instanceof SomeOtherMessage) { - // message not written to journal - } - else { - unhandled(message); - } - } - } - //#definition - class MyActor extends UntypedActor { - ActorRef processor; - - public MyActor() { - //#usage - processor = getContext().actorOf(Props.create(MyProcessor.class), "myProcessor"); - - processor.tell(Persistent.create("foo"), getSelf()); - processor.tell("bar", getSelf()); - //#usage - } + ActorRef persistentActor; public void onReceive(Object message) throws Exception { - // ... } private void recover() { //#recover-explicit - processor.tell(Recover.create(), getSelf()); + persistentActor.tell(Recover.create(), getSelf()); //#recover-explicit } } }; static Object o2 = new Object() { - abstract class MyProcessor1 extends UntypedPersistentActor { + abstract class MyPersistentActor1 extends UntypedPersistentActor { //#recover-on-start-disabled @Override public void preStart() {} @@ -101,7 +62,7 @@ public class PersistenceDocTest { //#recover-on-restart-disabled } - abstract class MyProcessor2 extends UntypedPersistentActor { + abstract class MyPersistentActor2 extends UntypedPersistentActor { //#recover-on-start-custom @Override public void preStart() { @@ -110,19 +71,7 @@ public class PersistenceDocTest { //#recover-on-start-custom } - abstract class MyProcessor3 extends UntypedPersistentActor { - //#deletion - @Override - public void preRestart(Throwable reason, Option message) { - if (message.isDefined() && message.get() instanceof Persistent) { - deleteMessage(((Persistent) message.get()).sequenceNr()); - } - super.preRestart(reason, message); - } - //#deletion - } - - class MyProcessor4 extends UntypedPersistentActor implements ProcessorMethods { + class MyPersistentActor4 extends UntypedPersistentActor implements PersistentActorMethods { //#persistence-id-override @Override public String persistenceId() { @@ -135,40 +84,34 @@ public class PersistenceDocTest { public void onReceiveCommand(Object message) throws Exception {} } - class MyProcessor5 extends UntypedPersistentActor { - @Override - public String persistenceId() { return "persistence-id"; } - - //#recovery-completed - - @Override - public void onReceiveRecover(Object message) { - if (message instanceof RecoveryCompleted) { - recoveryCompleted(); - } + class MyPersistentActor5 extends UntypedPersistentActor { + @Override + public String persistenceId() { + return "persistence-id"; + } + + //#recovery-completed + @Override + public void onReceiveRecover(Object message) { + if (message instanceof RecoveryCompleted) { + // perform init after recovery, before any other messages + } + } + + @Override + public void onReceiveCommand(Object message) throws Exception { + if (message instanceof String) { // ... + } else { + unhandled(message); } - - @Override - public void onReceiveCommand(Object message) throws Exception { - if (message instanceof String) { - // ... - } else { - unhandled(message); - } - } - - private void recoveryCompleted() { - // perform init after recovery, before any other messages - // ... - } - - //#recovery-completed + } + //#recovery-completed } }; - static Object fullyDisabledRecoveyExample = new Object() { - abstract class MyProcessor1 extends UntypedPersistentActor { + static Object fullyDisabledRecoveryExample = new Object() { + abstract class MyPersistentActor1 extends UntypedPersistentActor { //#recover-fully-disabled @Override public void preStart() { getSelf().tell(Recover.create(0L), getSelf()); } @@ -215,11 +158,15 @@ public class PersistenceDocTest { class MyPersistentActor extends UntypedPersistentActorWithAtLeastOnceDelivery { private final ActorPath destination; + + @Override + public String persistenceId() { return "persistence-id"; } public MyPersistentActor(ActorPath destination) { this.destination = destination; } + @Override public void onReceiveCommand(Object message) { if (message instanceof String) { String s = (String) message; @@ -240,6 +187,7 @@ public class PersistenceDocTest { } } + @Override public void onReceiveRecover(Object event) { updateState(event); } @@ -273,97 +221,16 @@ public class PersistenceDocTest { //#at-least-once-example }; - static Object o3 = new Object() { - //#channel-example - class MyProcessor extends UntypedProcessor { - private final ActorRef destination; - private final ActorRef channel; - - public MyProcessor() { - this.destination = getContext().actorOf(Props.create(MyDestination.class)); - this.channel = getContext().actorOf(Channel.props(), "myChannel"); - } - - public void onReceive(Object message) throws Exception { - if (message instanceof Persistent) { - Persistent p = (Persistent)message; - Persistent out = p.withPayload("done " + p.payload()); - channel.tell(Deliver.create(out, destination.path()), getSelf()); - } - } - } - - class MyDestination extends UntypedActor { - public void onReceive(Object message) throws Exception { - if (message instanceof ConfirmablePersistent) { - ConfirmablePersistent p = (ConfirmablePersistent)message; - Object payload = p.payload(); - Long sequenceNr = p.sequenceNr(); - int redeliveries = p.redeliveries(); - // ... - p.confirm(); - } - } - } - //#channel-example - - class MyProcessor2 extends UntypedProcessor { - private final ActorRef destination; - private final ActorRef channel; - - public MyProcessor2(ActorRef destination) { - this.destination = getContext().actorOf(Props.create(MyDestination.class)); - //#channel-id-override - this.channel = getContext().actorOf(Channel.props("my-stable-channel-id")); - //#channel-id-override - - //#channel-custom-settings - getContext().actorOf(Channel.props( - ChannelSettings.create() - .withRedeliverInterval(Duration.create(30, TimeUnit.SECONDS)) - .withRedeliverMax(15))); - //#channel-custom-settings - - //#channel-custom-listener - class MyListener extends UntypedActor { - @Override - public void onReceive(Object message) throws Exception { - if (message instanceof RedeliverFailure) { - Iterable messages = - ((RedeliverFailure)message).getMessages(); - // ... - } - } - } - - final ActorRef myListener = getContext().actorOf(Props.create(MyListener.class)); - getContext().actorOf(Channel.props( - ChannelSettings.create().withRedeliverFailureListener(null))); - //#channel-custom-listener - - } - - public void onReceive(Object message) throws Exception { - if (message instanceof Persistent) { - Persistent p = (Persistent)message; - Persistent out = p.withPayload("done " + p.payload()); - channel.tell(Deliver.create(out, destination.path()), getSelf()); - - //#channel-example-reply - channel.tell(Deliver.create(out, getSender().path()), getSelf()); - //#channel-example-reply - } - } - } - }; - static Object o4 = new Object() { - //#save-snapshot - class MyProcessor extends UntypedProcessor { + class MyPersistentActor extends UntypedPersistentActor { + @Override + public String persistenceId() { return "persistence-id"; } + + //#save-snapshot private Object state; @Override - public void onReceive(Object message) throws Exception { + public void onReceiveCommand(Object message) { if (message.equals("snap")) { saveSnapshot(state); } else if (message instanceof SaveSnapshotSuccess) { @@ -374,32 +241,46 @@ public class PersistenceDocTest { // ... } } + //#save-snapshot + + @Override + public void onReceiveRecover(Object event) { + } } - //#save-snapshot + }; static Object o5 = new Object() { - //#snapshot-offer - class MyProcessor extends UntypedProcessor { + class MyPersistentActor extends UntypedPersistentActor { + @Override + public String persistenceId() { return "persistence-id"; } + + //#snapshot-offer private Object state; @Override - public void onReceive(Object message) throws Exception { + public void onReceiveRecover(Object message) { if (message instanceof SnapshotOffer) { state = ((SnapshotOffer)message).snapshot(); // ... - } else if (message instanceof Persistent) { + } else if (message instanceof RecoveryCompleted) { + // ... + } else { // ... } } + //#snapshot-offer + + @Override + public void onReceiveCommand(Object message) { + } } - //#snapshot-offer class MyActor extends UntypedActor { - ActorRef processor; + ActorRef persistentActor; public MyActor() { - processor = getContext().actorOf(Props.create(MyProcessor.class)); + persistentActor = getContext().actorOf(Props.create(MyPersistentActor.class)); } public void onReceive(Object message) throws Exception { @@ -408,105 +289,13 @@ public class PersistenceDocTest { private void recover() { //#snapshot-criteria - processor.tell(Recover.create(SnapshotSelectionCriteria.create(457L, System.currentTimeMillis())), null); + persistentActor.tell(Recover.create(SnapshotSelectionCriteria.create(457L, + System.currentTimeMillis())), null); //#snapshot-criteria } } }; - static Object o6 = new Object() { - //#batch-write - class MyProcessor extends UntypedProcessor { - public void onReceive(Object message) throws Exception { - if (message instanceof Persistent) { - Persistent p = (Persistent)message; - if (p.payload().equals("a")) { /* ... */ } - if (p.payload().equals("b")) { /* ... */ } - } - } - } - - class Example { - final ActorSystem system = ActorSystem.create("example"); - final ActorRef processor = system.actorOf(Props.create(MyProcessor.class)); - - public void batchWrite() { - processor.tell(PersistentBatch.create(asList( - Persistent.create("a"), - Persistent.create("b"))), null); - } - - // ... - } - //#batch-write - }; - - static Object o7 = new Object() { - abstract class MyProcessor extends UntypedProcessor { - ActorRef destination; - - public void foo() { - //#persistent-channel-example - final ActorRef channel = getContext().actorOf(PersistentChannel.props( - PersistentChannelSettings.create() - .withRedeliverInterval(Duration.create(30, TimeUnit.SECONDS)) - .withRedeliverMax(15)), "myPersistentChannel"); - - channel.tell(Deliver.create(Persistent.create("example"), destination.path()), getSelf()); - //#persistent-channel-example - //#persistent-channel-watermarks - PersistentChannelSettings.create() - .withPendingConfirmationsMax(10000) - .withPendingConfirmationsMin(2000); - //#persistent-channel-watermarks - //#persistent-channel-reply - PersistentChannelSettings.create().withReplyPersistent(true); - //#persistent-channel-reply - } - } - }; - - static Object o8 = new Object() { - //#reliable-event-delivery - class MyPersistentActor extends UntypedPersistentActor { - @Override - public String persistenceId() { return "some-persistence-id"; } - - private ActorRef destination; - private ActorRef channel; - - public MyPersistentActor(ActorRef destination) { - this.destination = destination; - this.channel = getContext().actorOf(Channel.props(), "channel"); - } - - private void handleEvent(String event) { - // update state - // ... - // reliably deliver events - channel.tell(Deliver.create(Persistent.create( - event, getCurrentPersistentMessage()), destination.path()), getSelf()); - } - - public void onReceiveRecover(Object msg) { - if (msg instanceof String) { - handleEvent((String)msg); - } - } - - public void onReceiveCommand(Object msg) { - if (msg.equals("cmd")) { - persist("evt", new Procedure() { - public void apply(String event) { - handleEvent(event); - } - }); - } - } - } - //#reliable-event-delivery - }; - static Object o9 = new Object() { //#persist-async class MyPersistentActor extends UntypedPersistentActor { @@ -541,9 +330,9 @@ public class PersistenceDocTest { public void usage() { final ActorSystem system = ActorSystem.create("example"); //#persist-async-usage - final ActorRef processor = system.actorOf(Props.create(MyPersistentActor.class)); - processor.tell("a", null); - processor.tell("b", null); + final ActorRef persistentActor = system.actorOf(Props.create(MyPersistentActor.class)); + persistentActor.tell("a", null); + persistentActor.tell("b", null); // possible order of received messages: // a @@ -586,9 +375,9 @@ public class PersistenceDocTest { public void usage() { final ActorSystem system = ActorSystem.create("example"); //#defer-caller - final ActorRef processor = system.actorOf(Props.create(MyPersistentActor.class)); - processor.tell("a", null); - processor.tell("b", null); + final ActorRef persistentActor = system.actorOf(Props.create(MyPersistentActor.class)); + persistentActor.tell("a", null); + persistentActor.tell("b", null); // order of received messages: // a diff --git a/akka-docs/rst/java/code/docs/persistence/PersistencePluginDocTest.java b/akka-docs/rst/java/code/docs/persistence/PersistencePluginDocTest.java index 5e74073018..74d90f3da2 100644 --- a/akka-docs/rst/java/code/docs/persistence/PersistencePluginDocTest.java +++ b/akka-docs/rst/java/code/docs/persistence/PersistencePluginDocTest.java @@ -4,26 +4,25 @@ package docs.persistence; -//#plugin-imports - -import akka.actor.*; -import akka.japi.Option; -import akka.japi.Procedure; -import akka.persistence.*; -import akka.persistence.japi.journal.JavaJournalSpec; -import akka.persistence.japi.snapshot.JavaSnapshotStoreSpec; -import akka.persistence.journal.japi.AsyncWriteJournal; -import akka.persistence.journal.leveldb.SharedLeveldbJournal; -import akka.persistence.journal.leveldb.SharedLeveldbStore; -import akka.persistence.snapshot.japi.SnapshotStore; -import com.typesafe.config.Config; -import com.typesafe.config.ConfigFactory; -import org.iq80.leveldb.util.FileUtils; -import scala.concurrent.Future; - import java.io.File; import java.util.ArrayList; import java.util.List; +import akka.actor.*; +import com.typesafe.config.Config; +import com.typesafe.config.ConfigFactory; +import org.iq80.leveldb.util.FileUtils; +import akka.persistence.japi.journal.JavaJournalSpec; +import akka.persistence.japi.snapshot.JavaSnapshotStoreSpec; +import akka.persistence.journal.leveldb.SharedLeveldbJournal; +import akka.persistence.journal.leveldb.SharedLeveldbStore; +import scala.concurrent.Future; +import akka.japi.Option; +import akka.japi.Procedure; + +//#plugin-imports +import akka.persistence.*; +import akka.persistence.journal.japi.AsyncWriteJournal; +import akka.persistence.snapshot.japi.SnapshotStore; //#plugin-imports @@ -91,23 +90,14 @@ public class PersistencePluginDocTest { return null; } - @Override - public Future doAsyncWriteConfirmations(Iterable confirmations) { - return null; - } - - @Override - public Future doAsyncDeleteMessages(Iterable messageIds, boolean permanent) { - return null; - } - @Override public Future doAsyncDeleteMessagesTo(String persistenceId, long toSequenceNr, boolean permanent) { return null; } @Override - public Future doAsyncReplayMessages(String persistenceId, long fromSequenceNr, long toSequenceNr, long max, Procedure replayCallback) { + public Future doAsyncReplayMessages(String persistenceId, long fromSequenceNr, + long toSequenceNr, long max, Procedure replayCallback) { return null; } diff --git a/akka-docs/rst/java/lambda-persistence.rst b/akka-docs/rst/java/lambda-persistence.rst index 669c4588be..77a09bbaca 100644 --- a/akka-docs/rst/java/lambda-persistence.rst +++ b/akka-docs/rst/java/lambda-persistence.rst @@ -130,6 +130,8 @@ It contains instructions on how to run the ``PersistentActorExample``. with ``context().become()`` and ``context().unbecome()``. To get the actor into the same state after recovery you need to take special care to perform the same state transitions with ``become`` and ``unbecome`` in the ``receiveRecover`` method as you would have done in the command handler. + Note that when using ``become`` from ``receiveRecover`` it will still only use the ``receiveRecover`` + behavior when replaying the events. When replay is completed it will use the new behavior. Identifiers ----------- @@ -159,6 +161,15 @@ In this case, a persistent actor must be recovered explicitly by sending it a `` .. includecode:: ../../../akka-samples/akka-sample-persistence-java-lambda/src/main/java/doc/LambdaPersistenceDocTest.java#recover-explicit +.. warning:: + + If ``preStart`` is overriden by an empty implementation, incoming commands will not be processed by the + ``PersistentActor`` until it receives a ``Recover`` and finishes recovery. + +In order to completely skip recovery, you can signal it with ``Recover.create(0L)`` + +.. includecode:: ../../../akka-samples/akka-sample-persistence-java-lambda/src/main/java/doc/LambdaPersistenceDocTest.java#recover-fully-disabled + If not overridden, ``preStart`` sends a ``Recover`` message to ``self()``. Applications may also override ``preStart`` to define further ``Recover`` parameters such as an upper sequence number bound, for example. @@ -206,7 +217,7 @@ The ordering between events is still guaranteed ("evt-b-1" will be sent after "e .. includecode:: ../../../akka-samples/akka-sample-persistence-java-lambda/src/main/java/doc/LambdaPersistenceDocTest.java#persist-async .. note:: - In order to implement the pattern known as "*command sourcing*" simply ``persistAsync`` all incoming events right away, + In order to implement the pattern known as "*command sourcing*" simply call ``persistAsync`` on all incoming messages right away, and handle them in the callback. .. warning:: diff --git a/akka-docs/rst/java/persistence.rst b/akka-docs/rst/java/persistence.rst index 43b946d13e..8d2e408498 100644 --- a/akka-docs/rst/java/persistence.rst +++ b/akka-docs/rst/java/persistence.rst @@ -132,6 +132,8 @@ It contains instructions on how to run the ``PersistentActorExample``. with ``getContext().become()`` and ``getContext().unbecome()``. To get the actor into the same state after recovery you need to take special care to perform the same state transitions with ``become`` and ``unbecome`` in the ``receiveRecover`` method as you would have done in the command handler. + Note that when using ``become`` from ``receiveRecover`` it will still only use the ``receiveRecover`` + behavior when replaying the events. When replay is completed it will use the new behavior. Identifiers ----------- @@ -163,8 +165,9 @@ In this case, a persistent actor must be recovered explicitly by sending it a `` .. includecode:: code/docs/persistence/PersistenceDocTest.java#recover-explicit .. warning:: -If ``preStart`` is overriden by an empty implementation, incoming commands will not be processed by the -``PersistentActor`` until it receives a ``Recover`` and finishes recovery. + + If ``preStart`` is overriden by an empty implementation, incoming commands will not be processed by the + ``PersistentActor`` until it receives a ``Recover`` and finishes recovery. In order to completely skip recovery, you can signal it with ``Recover.create(0L)`` @@ -219,7 +222,7 @@ The ordering between events is still guaranteed ("evt-b-1" will be sent after "e .. includecode:: code/docs/persistence/PersistenceDocTest.java#persist-async .. note:: - In order to implement the pattern known as "*command sourcing*" simply ``persistAsync`` all incoming events right away, + In order to implement the pattern known as "*command sourcing*" simply ``persistAsync`` all incoming messages right away, and handle them in the callback. .. warning:: diff --git a/akka-docs/rst/project/migration-guide-2.3.x-2.4.x.rst b/akka-docs/rst/project/migration-guide-2.3.x-2.4.x.rst index f88000d4bd..5a8bf34e7f 100644 --- a/akka-docs/rst/project/migration-guide-2.3.x-2.4.x.rst +++ b/akka-docs/rst/project/migration-guide-2.3.x-2.4.x.rst @@ -147,4 +147,17 @@ Default interval for TestKit.awaitAssert changed to 100 ms Default check interval changed from 800 ms to 100 ms. You can define the interval explicitly if you need a longer interval. +persistenceId +============= + +It is now mandatory to define the ``persistenceId`` in subclasses of ``PersistentActor``, ``UntypedPersistentActor`` +and ``AbstractPersistentId``. + +The rationale behind this change being stricter de-coupling of your Actor hierarchy and the logical +"which persistent entity this actor represents". + +In case you want to perserve the old behavior of providing the actor's path as the default ``persistenceId``, you can easily +implement it yourself either as a helper trait or simply by overriding ``persistenceId`` as follows:: + + override def persistenceId = self.path.toStringWithoutAddress diff --git a/akka-docs/rst/project/migration-guide-persistence-experimental-2.3.x-2.4.x.rst b/akka-docs/rst/project/migration-guide-persistence-experimental-2.3.x-2.4.x.rst index e584415d18..0f089fd248 100644 --- a/akka-docs/rst/project/migration-guide-persistence-experimental-2.3.x-2.4.x.rst +++ b/akka-docs/rst/project/migration-guide-persistence-experimental-2.3.x-2.4.x.rst @@ -98,6 +98,20 @@ Using the ``PersistentActor`` instead of ``Processor`` also shifts the responsib to the receiver instead of the sender of the message. Previously, using ``Processor``, clients would have to wrap messages as ``Persistent(cmd)`` manually, as well as have to be aware of the receiver being a ``Processor``, which didn't play well with transparency of the ActorRefs in general. +How to migrate data from Processor to PersistentActor +----------------------------------------------------- + +The recommended approach for migrating persisted messages from a ``Processor`` to events that can be replayed by +a ``PersistentActor`` is to write a custom migration tool with a ``PersistentView`` and a ``PersistentActor``. +Connect the ``PersistentView`` to the ``persistenceId`` of the old ``Processor`` to replay the stored persistent +messages. Send the messages from the view to a ``PersistentActor`` with another ``persistenceId``. There you can +transform the old messages to domain events that the real ``PersistentActor`` will be able to understand. Store +the events with ``persistAsync``. + +Note that you can implement back-pressure between the writing ``PersistentActor`` and the reading ``PersistentView`` +by turning off auto-update in the view and send custom ``Update`` messages to the view with a limited `replayMax` +value. + Removed deleteMessage ===================== diff --git a/akka-docs/rst/scala/code/docs/persistence/PersistenceDocSpec.scala b/akka-docs/rst/scala/code/docs/persistence/PersistenceDocSpec.scala index 083991524e..954ddf2f9c 100644 --- a/akka-docs/rst/scala/code/docs/persistence/PersistenceDocSpec.scala +++ b/akka-docs/rst/scala/code/docs/persistence/PersistenceDocSpec.scala @@ -28,37 +28,7 @@ trait PersistenceDocSpec { import system._ new AnyRef { - //#definition - import akka.persistence.{ PersistenceFailure, Persistent, Processor } - - class MyProcessor extends Processor { - def receive = { - case Persistent(payload, sequenceNr) => - // message successfully written to journal - case PersistenceFailure(payload, sequenceNr, cause) => - // message failed to be written to journal - case m: SomeOtherMessage => - // message not written to journal - } - } - //#definition - - //#usage - import akka.actor.Props - - val processor = actorOf(Props[MyProcessor], name = "myProcessor") - - processor ! Persistent("foo") // will be journaled - processor ! "bar" // will not be journaled - //#usage - - //#recover-explicit - processor ! Recover() - //#recover-explicit - } - - new AnyRef { - trait MyProcessor1 extends PersistentActor { + trait MyPersistentActor1 extends PersistentActor { //#recover-on-start-disabled override def preStart() = () //#recover-on-start-disabled @@ -67,7 +37,7 @@ trait PersistenceDocSpec { //#recover-on-restart-disabled } - trait MyProcessor2 extends PersistentActor { + trait MyPersistentActor2 extends PersistentActor { //#recover-on-start-custom override def preStart() { self ! Recover(toSequenceNr = 457L) @@ -75,51 +45,40 @@ trait PersistenceDocSpec { //#recover-on-start-custom } - trait MyProcessor3 extends PersistentActor { - //#deletion - override def preRestart(reason: Throwable, message: Option[Any]) { - message match { - case Some(p: Persistent) => deleteMessage(p.sequenceNr) - case _ => - } - super.preRestart(reason, message) - } - //#deletion - } + val persistentActor = system.deadLetters + //#recover-explicit + persistentActor ! Recover() + //#recover-explicit - class MyProcessor4 extends PersistentActor { + class MyPersistentActor4 extends PersistentActor { override def persistenceId = "my-stable-persistence-id" //#recovery-completed - def receiveRecover: Receive = { - case RecoveryCompleted => recoveryCompleted() + override def receiveRecover: Receive = { + case RecoveryCompleted => + // perform init after recovery, before any other messages + //... case evt => //... } - def receiveCommand: Receive = { + override def receiveCommand: Receive = { case msg => //... } - - def recoveryCompleted(): Unit = { - // perform init after recovery, before any other messages - // ... - } - //#recovery-completed } } new AnyRef { - trait MyProcessor1 extends PersistentActor { - //#recover-fully-disabled - override def preStart() = self ! Recover(toSequenceNr = 0L) - //#recover-fully-disabled - } + trait MyPersistentActor1 extends PersistentActor { + //#recover-fully-disabled + override def preStart() = self ! Recover(toSequenceNr = 0L) + //#recover-fully-disabled + } } new AnyRef { - trait ProcessorMethods { + trait PersistentActorMethods { //#persistence-id def persistenceId: String //#persistence-id @@ -127,15 +86,16 @@ trait PersistenceDocSpec { def recoveryRunning: Boolean def recoveryFinished: Boolean //#recovery-status - //#current-message - implicit def currentPersistentMessage: Option[Persistent] - //#current-message } - class MyProcessor1 extends Processor with ProcessorMethods { + class MyPersistentActor1 extends PersistentActor with PersistentActorMethods { //#persistence-id-override override def persistenceId = "my-stable-persistence-id" //#persistence-id-override - def receive = { + + override def receiveRecover: Receive = { + case _ => + } + override def receiveCommand: Receive = { case _ => } } @@ -156,12 +116,14 @@ trait PersistenceDocSpec { class MyPersistentActor(destination: ActorPath) extends PersistentActor with AtLeastOnceDelivery { - def receiveCommand: Receive = { + override def persistenceId: String = "persistence-id" + + override def receiveCommand: Receive = { case s: String => persist(MsgSent(s))(updateState) case Confirm(deliveryId) => persist(MsgConfirmed(deliveryId))(updateState) } - def receiveRecover: Receive = { + override def receiveRecover: Receive = { case evt: Evt => updateState(evt) } @@ -184,221 +146,66 @@ trait PersistenceDocSpec { } new AnyRef { - //#channel-example - import akka.actor.{ Actor, Props } - import akka.persistence.{ Channel, Deliver, Persistent, Processor } - class MyProcessor extends Processor { - val destination = context.actorOf(Props[MyDestination]) - val channel = context.actorOf(Channel.props(), name = "myChannel") + class MyPersistentActor extends PersistentActor { + override def persistenceId = "my-stable-persistence-id" - def receive = { - case p @ Persistent(payload, _) => - channel ! Deliver(p.withPayload(s"processed ${payload}"), destination.path) - } - } - - class MyDestination extends Actor { - def receive = { - case p @ ConfirmablePersistent(payload, sequenceNr, redeliveries) => - // ... - p.confirm() - } - } - //#channel-example - - class MyProcessor2 extends Processor { - val destination = context.actorOf(Props[MyDestination]) - val channel = - //#channel-id-override - context.actorOf(Channel.props("my-stable-channel-id")) - //#channel-id-override - - //#channel-custom-settings - context.actorOf(Channel.props( - ChannelSettings(redeliverInterval = 30 seconds, redeliverMax = 15)), - name = "myChannel") - //#channel-custom-settings - - def receive = { - case p @ Persistent(payload, _) => - //#channel-example-reply - channel ! Deliver(p.withPayload(s"processed ${payload}"), sender().path) - //#channel-example-reply - } - - //#channel-custom-listener - class MyListener extends Actor { - def receive = { - case RedeliverFailure(messages) => // ... - } - } - - val myListener = context.actorOf(Props[MyListener]) - val myChannel = context.actorOf(Channel.props( - ChannelSettings(redeliverFailureListener = Some(myListener)))) - //#channel-custom-listener - } - - class MyProcessor3 extends Processor { - def receive = { - //#payload-pattern-matching - case Persistent(payload, _) => - //#payload-pattern-matching - } - } - - class MyProcessor4 extends Processor { - def receive = { - //#sequence-nr-pattern-matching - case Persistent(_, sequenceNr) => - //#sequence-nr-pattern-matching - } - } - } - - new AnyRef { - //#fsm-example - import akka.actor.FSM - import akka.persistence.{ Persistent, Processor } - - class PersistentDoor extends Processor with FSM[String, Int] { - startWith("closed", 0) - - when("closed") { - case Event(Persistent("open", _), counter) => - goto("open") using (counter + 1) replying (counter) - } - - when("open") { - case Event(Persistent("close", _), counter) => - goto("closed") using (counter + 1) replying (counter) - } - } - //#fsm-example - } - - new AnyRef { - //#save-snapshot - class MyProcessor extends Processor { + //#save-snapshot var state: Any = _ - def receive = { + override def receiveCommand: Receive = { case "snap" => saveSnapshot(state) case SaveSnapshotSuccess(metadata) => // ... case SaveSnapshotFailure(metadata, reason) => // ... } + //#save-snapshot + + override def receiveRecover: Receive = ??? } - //#save-snapshot } new AnyRef { - //#snapshot-offer - class MyProcessor extends Processor { + class MyPersistentActor extends PersistentActor { + override def persistenceId = "my-stable-persistence-id" + + //#snapshot-offer var state: Any = _ - def receive = { + override def receiveRecover: Receive = { case SnapshotOffer(metadata, offeredSnapshot) => state = offeredSnapshot - case Persistent(payload, sequenceNr) => // ... + case RecoveryCompleted => + case event => // ... } + //#snapshot-offer + + override def receiveCommand: Receive = ??? } - //#snapshot-offer import akka.actor.Props - val processor = system.actorOf(Props[MyProcessor]) + val persistentActor = system.actorOf(Props[MyPersistentActor]) //#snapshot-criteria - processor ! Recover(fromSnapshot = SnapshotSelectionCriteria( + persistentActor ! Recover(fromSnapshot = SnapshotSelectionCriteria( maxSequenceNr = 457L, maxTimestamp = System.currentTimeMillis)) //#snapshot-criteria } new AnyRef { - import akka.actor.Props - //#batch-write - class MyProcessor extends Processor { - def receive = { - case Persistent("a", _) => // ... - case Persistent("b", _) => // ... - } - } - val system = ActorSystem("example") - val processor = system.actorOf(Props[MyProcessor]) - - processor ! PersistentBatch(List(Persistent("a"), Persistent("b"))) - //#batch-write - system.terminate() - } - - new AnyRef { - import akka.actor._ - trait MyActor extends Actor { - val destination: ActorRef = null - //#persistent-channel-example - val channel = context.actorOf(PersistentChannel.props( - PersistentChannelSettings(redeliverInterval = 30 seconds, redeliverMax = 15)), - name = "myPersistentChannel") - - channel ! Deliver(Persistent("example"), destination.path) - //#persistent-channel-example - //#persistent-channel-watermarks - PersistentChannelSettings( - pendingConfirmationsMax = 10000, - pendingConfirmationsMin = 2000) - //#persistent-channel-watermarks - //#persistent-channel-reply - PersistentChannelSettings(replyPersistent = true) - //#persistent-channel-reply - } - } - - new AnyRef { - import akka.actor.ActorRef - - //#reliable-event-delivery - class MyPersistentActor(destination: ActorRef) extends PersistentActor { - val channel = context.actorOf(Channel.props("channel")) - - override def persistenceId = "my-stable-persistence-id" - - def handleEvent(event: String) = { - // update state - // ... - // reliably deliver events - channel ! Deliver(Persistent(event), destination.path) - } - - def receiveRecover: Receive = { - case event: String => handleEvent(event) - } - - def receiveCommand: Receive = { - case "cmd" => { - // ... - persist("evt")(handleEvent) - } - } - } - //#reliable-event-delivery - } - - new AnyRef { - - val processor = system.actorOf(Props[MyPersistentActor]()) + val persistentActor = system.actorOf(Props[MyPersistentActor]()) //#persist-async class MyPersistentActor extends PersistentActor { override def persistenceId = "my-stable-persistence-id" - def receiveRecover: Receive = { + override def receiveRecover: Receive = { case _ => // handle recovery here } - def receiveCommand: Receive = { + override def receiveCommand: Receive = { case c: String => { sender() ! c persistAsync(s"evt-$c-1") { e => sender() ! e } @@ -408,8 +215,8 @@ trait PersistenceDocSpec { } // usage - processor ! "a" - processor ! "b" + persistentActor ! "a" + persistentActor ! "b" // possible order of received messages: // a @@ -423,18 +230,18 @@ trait PersistenceDocSpec { } new AnyRef { - val processor = system.actorOf(Props[MyPersistentActor]()) + val persistentActor = system.actorOf(Props[MyPersistentActor]()) //#defer class MyPersistentActor extends PersistentActor { override def persistenceId = "my-stable-persistence-id" - def receiveRecover: Receive = { + override def receiveRecover: Receive = { case _ => // handle recovery here } - def receiveCommand: Receive = { + override def receiveCommand: Receive = { case c: String => { sender() ! c persistAsync(s"evt-$c-1") { e => sender() ! e } @@ -446,8 +253,8 @@ trait PersistenceDocSpec { //#defer //#defer-caller - processor ! "a" - processor ! "b" + persistentActor ! "a" + persistentActor ! "b" // order of received messages: // a @@ -469,7 +276,7 @@ trait PersistenceDocSpec { override def persistenceId: String = "some-persistence-id" override def viewId: String = "some-persistence-id-view" - def receive: Actor.Receive = { + def receive: Receive = { case payload if isPersistent => // handle message from journal... case payload => diff --git a/akka-docs/rst/scala/code/docs/persistence/PersistencePluginDocSpec.scala b/akka-docs/rst/scala/code/docs/persistence/PersistencePluginDocSpec.scala index 1887bf6f27..6ebc9bcf6b 100644 --- a/akka-docs/rst/scala/code/docs/persistence/PersistencePluginDocSpec.scala +++ b/akka-docs/rst/scala/code/docs/persistence/PersistencePluginDocSpec.scala @@ -4,19 +4,19 @@ package docs.persistence -//#plugin-imports - import akka.actor.ActorSystem -import akka.persistence._ -import akka.persistence.journal._ -import akka.persistence.snapshot._ import akka.testkit.TestKit import com.typesafe.config._ import org.scalatest.WordSpec - import scala.collection.immutable.Seq import scala.concurrent.Future import scala.concurrent.duration._ + +//#plugin-imports +import akka.persistence._ +import akka.persistence.journal._ +import akka.persistence.snapshot._ + //#plugin-imports object PersistencePluginDocSpec { @@ -122,15 +122,18 @@ trait SharedLeveldbPluginDocSpec { class MyJournal extends AsyncWriteJournal { def asyncWriteMessages(messages: Seq[PersistentRepr]): Future[Unit] = ??? - def asyncWriteConfirmations(confirmations: Seq[PersistentConfirmation]): Future[Unit] = ??? - def asyncDeleteMessages(messageIds: Seq[PersistentId], permanent: Boolean): Future[Unit] = ??? - def asyncDeleteMessagesTo(persistenceId: String, toSequenceNr: Long, permanent: Boolean): Future[Unit] = ??? - def asyncReplayMessages(persistenceId: String, fromSequenceNr: Long, toSequenceNr: Long, max: Long)(replayCallback: (PersistentRepr) => Unit): Future[Unit] = ??? - def asyncReadHighestSequenceNr(persistenceId: String, fromSequenceNr: Long): Future[Long] = ??? + def asyncDeleteMessagesTo(persistenceId: String, toSequenceNr: Long, + permanent: Boolean): Future[Unit] = ??? + def asyncReplayMessages(persistenceId: String, fromSequenceNr: Long, + toSequenceNr: Long, max: Long)( + replayCallback: (PersistentRepr) => Unit): Future[Unit] = ??? + def asyncReadHighestSequenceNr(persistenceId: String, + fromSequenceNr: Long): Future[Long] = ??? } class MySnapshotStore extends SnapshotStore { - def loadAsync(persistenceId: String, criteria: SnapshotSelectionCriteria): Future[Option[SelectedSnapshot]] = ??? + def loadAsync(persistenceId: String, + criteria: SnapshotSelectionCriteria): Future[Option[SelectedSnapshot]] = ??? def saveAsync(metadata: SnapshotMetadata, snapshot: Any): Future[Unit] = ??? def saved(metadata: SnapshotMetadata): Unit = ??? def delete(metadata: SnapshotMetadata): Unit = ??? @@ -145,8 +148,8 @@ object PersistenceTCKDoc { class MyJournalSpec extends JournalSpec { override val config = ConfigFactory.parseString( """ - |akka.persistence.journal.plugin = "my.journal.plugin" - """.stripMargin) + akka.persistence.journal.plugin = "my.journal.plugin" + """) } //#journal-tck-scala } @@ -157,8 +160,8 @@ object PersistenceTCKDoc { class MySnapshotStoreSpec extends SnapshotStoreSpec { override val config = ConfigFactory.parseString( """ - |akka.persistence.snapshot-store.plugin = "my.snapshot-store.plugin" - """.stripMargin) + akka.persistence.snapshot-store.plugin = "my.snapshot-store.plugin" + """) } //#snapshot-store-tck-scala } @@ -172,8 +175,8 @@ object PersistenceTCKDoc { class MyJournalSpec extends JournalSpec { override val config = ConfigFactory.parseString( """ - |akka.persistence.journal.plugin = "my.journal.plugin" - """.stripMargin) + akka.persistence.journal.plugin = "my.journal.plugin" + """) val storageLocations = List( new File(system.settings.config.getString("akka.persistence.journal.leveldb.dir")), @@ -192,4 +195,4 @@ object PersistenceTCKDoc { } //#journal-tck-before-after-scala } -} \ No newline at end of file +} diff --git a/akka-docs/rst/scala/code/docs/persistence/PersistenceSerializerDocSpec.scala b/akka-docs/rst/scala/code/docs/persistence/PersistenceSerializerDocSpec.scala index c46e28cc6b..f5ce3176f4 100644 --- a/akka-docs/rst/scala/code/docs/persistence/PersistenceSerializerDocSpec.scala +++ b/akka-docs/rst/scala/code/docs/persistence/PersistenceSerializerDocSpec.scala @@ -27,7 +27,7 @@ class PersistenceSerializerDocSpec extends WordSpec { } } //#custom-serializer-config - """.stripMargin + """ val system = ActorSystem("PersistenceSerializerDocSpec", ConfigFactory.parseString(customSerializerConfig)) try { diff --git a/akka-docs/rst/scala/persistence.rst b/akka-docs/rst/scala/persistence.rst index cd72eb5d6e..db46d2c915 100644 --- a/akka-docs/rst/scala/persistence.rst +++ b/akka-docs/rst/scala/persistence.rst @@ -124,6 +124,8 @@ It contains instructions on how to run the ``PersistentActorExample``. with ``context.become()`` and ``context.unbecome()``. To get the actor into the same state after recovery you need to take special care to perform the same state transitions with ``become`` and ``unbecome`` in the ``receiveRecover`` method as you would have done in the command handler. + Note that when using ``become`` from ``receiveRecover`` it will still only use the ``receiveRecover`` + behavior when replaying the events. When replay is completed it will use the new behavior. Identifiers ----------- @@ -154,8 +156,9 @@ In this case, a persistent actor must be recovered explicitly by sending it a `` .. includecode:: code/docs/persistence/PersistenceDocSpec.scala#recover-explicit .. warning:: -If ``preStart`` is overriden by an empty implementation, incoming commands will not be processed by the -``PersistentActor`` until it receives a ``Recover`` and finishes recovery. + + If ``preStart`` is overriden by an empty implementation, incoming commands will not be processed by the + ``PersistentActor`` until it receives a ``Recover`` and finishes recovery. In order to completely skip recovery, you can signal it with ``Recover(toSequenceNr = OL)`` @@ -211,7 +214,7 @@ The ordering between events is still guaranteed ("evt-b-1" will be sent after "e .. note:: In order to implement the pattern known as "*command sourcing*" simply call ``persistAsync(cmd)(...)`` right away on all incomming - messages right away, and handle them in the callback. + messages, and handle them in the callback. .. warning:: The callback will not be invoked if the actor is restarted (or stopped) in between the call to @@ -656,16 +659,6 @@ or in your Akka configuration. The LevelDB Java port is for testing purposes only. -Miscellaneous -============= - -State machines --------------- - -State machines can be persisted by mixing in the ``FSM`` trait into persistent actors. - -.. includecode:: code/docs/persistence/PersistenceDocSpec.scala#fsm-example - Configuration ============= diff --git a/akka-persistence-tck/src/main/scala/akka/persistence/journal/JournalPerfSpec.scala b/akka-persistence-tck/src/main/scala/akka/persistence/journal/JournalPerfSpec.scala index 18f6228df7..eb88b828d0 100644 --- a/akka-persistence-tck/src/main/scala/akka/persistence/journal/JournalPerfSpec.scala +++ b/akka-persistence-tck/src/main/scala/akka/persistence/journal/JournalPerfSpec.scala @@ -12,7 +12,7 @@ import scala.collection.immutable import scala.concurrent.duration._ object JournalPerfSpec { - class BenchActor(val persistenceId: String, replyTo: ActorRef, replyAfter: Int) extends PersistentActor + class BenchActor(override val persistenceId: String, replyTo: ActorRef, replyAfter: Int) extends PersistentActor with ActorLogging { var counter = 0 diff --git a/akka-persistence-tck/src/main/scala/akka/persistence/journal/JournalSpec.scala b/akka-persistence-tck/src/main/scala/akka/persistence/journal/JournalSpec.scala index 8d51027ca1..3f407c344a 100644 --- a/akka-persistence-tck/src/main/scala/akka/persistence/journal/JournalSpec.scala +++ b/akka-persistence-tck/src/main/scala/akka/persistence/journal/JournalSpec.scala @@ -12,11 +12,8 @@ import com.typesafe.config._ object JournalSpec { val config = ConfigFactory.parseString( """ - |akka.persistence.publish-confirmations = on - |akka.persistence.publish-plugin-commands = on - """.stripMargin) - - case class Confirmation(persistenceId: String, channelId: String, sequenceNr: Long) extends PersistentConfirmation + akka.persistence.publish-plugin-commands = on + """) } /** @@ -50,7 +47,7 @@ trait JournalSpec extends PluginSpec { extension.journalFor(null) def replayedMessage(snr: Long, deleted: Boolean = false, confirms: Seq[String] = Nil): ReplayedMessage = - ReplayedMessage(PersistentImpl(s"a-${snr}", snr, pid, deleted, confirms, senderProbe.ref)) + ReplayedMessage(PersistentImpl(s"a-${snr}", snr, pid, deleted, senderProbe.ref)) def writeMessages(from: Int, to: Int, pid: String, sender: ActorRef): Unit = { val msgs = from to to map { i ⇒ PersistentRepr(payload = s"a-${i}", sequenceNr = i, persistenceId = pid, sender = sender) } @@ -60,7 +57,7 @@ trait JournalSpec extends PluginSpec { probe.expectMsg(WriteMessagesSuccessful) from to to foreach { i ⇒ - probe.expectMsgPF() { case WriteMessageSuccess(PersistentImpl(payload, `i`, `pid`, _, _, `sender`), _) ⇒ payload should be(s"a-${i}") } + probe.expectMsgPF() { case WriteMessageSuccess(PersistentImpl(payload, `i`, `pid`, _, `sender`), _) ⇒ payload should be(s"a-${i}") } } } @@ -140,49 +137,7 @@ trait JournalSpec extends PluginSpec { } } } - "replay confirmed messages with corresponding channel ids contained in the confirmed field" in { - val confs = List(Confirmation(pid, "c1", 3), Confirmation(pid, "c2", 3)) - val lpid = pid - journal ! WriteConfirmations(confs, receiverProbe.ref) - receiverProbe.expectMsg(WriteConfirmationsSuccess(confs)) - - journal ! ReplayMessages(1, Long.MaxValue, Long.MaxValue, pid, receiverProbe.ref, replayDeleted = true) - 1 to 5 foreach { i ⇒ - i match { - case 1 | 2 | 4 | 5 ⇒ receiverProbe.expectMsg(replayedMessage(i)) - case 3 ⇒ receiverProbe.expectMsgPF() { - case ReplayedMessage(PersistentImpl(payload, `i`, `lpid`, false, confirms, _)) ⇒ - confirms should have length (2) - confirms should contain("c1") - confirms should contain("c2") - } - } - } - } - "ignore orphan deletion markers" in { - val msgIds = List(PersistentIdImpl(pid, 3), PersistentIdImpl(pid, 4)) - journal ! DeleteMessages(msgIds, true, Some(receiverProbe.ref)) // delete message - receiverProbe.expectMsg(DeleteMessagesSuccess(msgIds)) - - journal ! DeleteMessages(msgIds, false, Some(receiverProbe.ref)) // write orphan marker - receiverProbe.expectMsg(DeleteMessagesSuccess(msgIds)) - - journal ! ReplayMessages(1, Long.MaxValue, Long.MaxValue, pid, receiverProbe.ref) - List(1, 2, 5) foreach { i ⇒ receiverProbe.expectMsg(replayedMessage(i)) } - } - "ignore orphan confirmation markers" in { - val msgIds = List(PersistentIdImpl(pid, 3)) - journal ! DeleteMessages(msgIds, true, Some(receiverProbe.ref)) // delete message - receiverProbe.expectMsg(DeleteMessagesSuccess(msgIds)) - - val confs = List(Confirmation(pid, "c1", 3), Confirmation(pid, "c2", 3)) - journal ! WriteConfirmations(confs, receiverProbe.ref) - receiverProbe.expectMsg(WriteConfirmationsSuccess(confs)) - - journal ! ReplayMessages(1, Long.MaxValue, Long.MaxValue, pid, receiverProbe.ref) - List(1, 2, 4, 5) foreach { i ⇒ receiverProbe.expectMsg(replayedMessage(i)) } - } "return a highest stored sequence number > 0 if the persistent actor has already written messages and the message log is non-empty" in { journal ! ReadHighestSequenceNr(3L, pid, receiverProbe.ref) receiverProbe.expectMsg(ReadHighestSequenceNrSuccess(5)) diff --git a/akka-persistence-tck/src/main/scala/akka/persistence/snapshot/SnapshotStoreSpec.scala b/akka-persistence-tck/src/main/scala/akka/persistence/snapshot/SnapshotStoreSpec.scala index 2bbcf86e10..d7f2d43ec8 100644 --- a/akka-persistence-tck/src/main/scala/akka/persistence/snapshot/SnapshotStoreSpec.scala +++ b/akka-persistence-tck/src/main/scala/akka/persistence/snapshot/SnapshotStoreSpec.scala @@ -48,7 +48,7 @@ trait SnapshotStoreSpec extends PluginSpec { } "A snapshot store" must { - "not load a snapshot given an invalid processor id" in { + "not load a snapshot given an invalid persistenceId" in { snapshotStore.tell(LoadSnapshot("invalid", SnapshotSelectionCriteria.Latest, Long.MaxValue), senderProbe.ref) senderProbe.expectMsg(LoadSnapshotResult(None, Long.MaxValue)) } diff --git a/akka-persistence-tck/src/test/scala/akka/persistence/snapshot/local/LocalSnapshotStoreSpec.scala b/akka-persistence-tck/src/test/scala/akka/persistence/snapshot/local/LocalSnapshotStoreSpec.scala index a6c0009165..6aadd28ab1 100644 --- a/akka-persistence-tck/src/test/scala/akka/persistence/snapshot/local/LocalSnapshotStoreSpec.scala +++ b/akka-persistence-tck/src/test/scala/akka/persistence/snapshot/local/LocalSnapshotStoreSpec.scala @@ -8,9 +8,9 @@ import akka.persistence.snapshot.SnapshotStoreSpec class LocalSnapshotStoreSpec extends SnapshotStoreSpec with PluginCleanup { lazy val config = ConfigFactory.parseString( """ - |akka.test.timefactor = 3 - |akka.persistence.snapshot-store.plugin = "akka.persistence.snapshot-store.local" - |akka.persistence.snapshot-store.local.dir = "target/snapshots" - """.stripMargin) + akka.test.timefactor = 3 + akka.persistence.snapshot-store.plugin = "akka.persistence.snapshot-store.local" + akka.persistence.snapshot-store.local.dir = "target/snapshots" + """) } diff --git a/akka-persistence/src/main/java/akka/persistence/journal/japi/AsyncRecoveryPlugin.java b/akka-persistence/src/main/java/akka/persistence/journal/japi/AsyncRecoveryPlugin.java index d193f64c27..e19d272547 100644 --- a/akka-persistence/src/main/java/akka/persistence/journal/japi/AsyncRecoveryPlugin.java +++ b/akka-persistence/src/main/java/akka/persistence/journal/japi/AsyncRecoveryPlugin.java @@ -10,38 +10,41 @@ import akka.japi.Procedure; import akka.persistence.PersistentRepr; interface AsyncRecoveryPlugin { - //#async-replay-plugin-api - /** - * Java API, Plugin API: asynchronously replays persistent messages. - * Implementations replay a message by calling `replayCallback`. The returned - * future must be completed when all messages (matching the sequence number - * bounds) have been replayed. The future must be completed with a failure if - * any of the persistent messages could not be replayed. - * - * The `replayCallback` must also be called with messages that have been marked - * as deleted. In this case a replayed message's `deleted` method must return - * `true`. - * - * The channel ids of delivery confirmations that are available for a replayed - * message must be contained in that message's `confirms` sequence. - * - * @param persistenceId processor id. - * @param fromSequenceNr sequence number where replay should start (inclusive). - * @param toSequenceNr sequence number where replay should end (inclusive). - * @param max maximum number of messages to be replayed. - * @param replayCallback called to replay a single message. Can be called from any - * thread. - */ - Future doAsyncReplayMessages(String persistenceId, long fromSequenceNr, long toSequenceNr, long max, Procedure replayCallback); + //#async-replay-plugin-api + /** + * Java API, Plugin API: asynchronously replays persistent messages. + * Implementations replay a message by calling `replayCallback`. The returned + * future must be completed when all messages (matching the sequence number + * bounds) have been replayed. The future must be completed with a failure if + * any of the persistent messages could not be replayed. + * + * The `replayCallback` must also be called with messages that have been + * marked as deleted. In this case a replayed message's `deleted` method must + * return `true`. + * + * @param persistenceId + * id of the persistent actor. + * @param fromSequenceNr + * sequence number where replay should start (inclusive). + * @param toSequenceNr + * sequence number where replay should end (inclusive). + * @param max + * maximum number of messages to be replayed. + * @param replayCallback + * called to replay a single message. Can be called from any thread. + */ + Future doAsyncReplayMessages(String persistenceId, long fromSequenceNr, long toSequenceNr, long max, + Procedure replayCallback); - /** - * Java API, Plugin API: asynchronously reads the highest stored sequence number - * for the given `persistenceId`. - * - * @param persistenceId processor id. - * @param fromSequenceNr hint where to start searching for the highest sequence - * number. - */ - Future doAsyncReadHighestSequenceNr(String persistenceId, long fromSequenceNr); - //#async-replay-plugin-api + /** + * Java API, Plugin API: asynchronously reads the highest stored sequence + * number for the given `persistenceId`. + * + * @param persistenceId + * id of the persistent actor. + * @param fromSequenceNr + * hint where to start searching for the highest sequence number. + */ + Future doAsyncReadHighestSequenceNr(String persistenceId, long fromSequenceNr); + //#async-replay-plugin-api } diff --git a/akka-persistence/src/main/java/akka/persistence/journal/japi/AsyncWritePlugin.java b/akka-persistence/src/main/java/akka/persistence/journal/japi/AsyncWritePlugin.java index 00aa6376b4..d07433fc28 100644 --- a/akka-persistence/src/main/java/akka/persistence/journal/japi/AsyncWritePlugin.java +++ b/akka-persistence/src/main/java/akka/persistence/journal/japi/AsyncWritePlugin.java @@ -9,38 +9,21 @@ import scala.concurrent.Future; import akka.persistence.*; interface AsyncWritePlugin { - //#async-write-plugin-api - /** - * Java API, Plugin API: synchronously writes a batch of persistent messages to the - * journal. The batch write must be atomic i.e. either all persistent messages in the - * batch are written or none. - */ - Future doAsyncWriteMessages(Iterable messages); + //#async-write-plugin-api + /** + * Java API, Plugin API: synchronously writes a batch of persistent messages + * to the journal. The batch write must be atomic i.e. either all persistent + * messages in the batch are written or none. + */ + Future doAsyncWriteMessages(Iterable messages); - /** - * Java API, Plugin API: synchronously writes a batch of delivery confirmations to - * the journal. - * - * @deprecated doAsyncWriteConfirmations will be removed, since Channels will be removed (since 2.3.4) - */ - @Deprecated Future doAsyncWriteConfirmations(Iterable confirmations); - - /** - * Java API, Plugin API: synchronously deletes messages identified by `messageIds` - * from the journal. If `permanent` is set to `false`, the persistent messages are - * marked as deleted, otherwise they are permanently deleted. - * - * @deprecated doAsyncDeleteMessages will be removed (since 2.3.4) - */ - @Deprecated Future doAsyncDeleteMessages(Iterable messageIds, boolean permanent); - - /** - * Java API, Plugin API: synchronously deletes all persistent messages up to - * `toSequenceNr`. If `permanent` is set to `false`, the persistent messages are - * marked as deleted, otherwise they are permanently deleted. - * - * @see AsyncRecoveryPlugin - */ - Future doAsyncDeleteMessagesTo(String persistenceId, long toSequenceNr, boolean permanent); - //#async-write-plugin-api + /** + * Java API, Plugin API: synchronously deletes all persistent messages up to + * `toSequenceNr`. If `permanent` is set to `false`, the persistent messages + * are marked as deleted, otherwise they are permanently deleted. + * + * @see AsyncRecoveryPlugin + */ + Future doAsyncDeleteMessagesTo(String persistenceId, long toSequenceNr, boolean permanent); + //#async-write-plugin-api } diff --git a/akka-persistence/src/main/java/akka/persistence/journal/japi/SyncWritePlugin.java b/akka-persistence/src/main/java/akka/persistence/journal/japi/SyncWritePlugin.java index fcaad3fd25..bffc0c7939 100644 --- a/akka-persistence/src/main/java/akka/persistence/journal/japi/SyncWritePlugin.java +++ b/akka-persistence/src/main/java/akka/persistence/journal/japi/SyncWritePlugin.java @@ -7,38 +7,21 @@ package akka.persistence.journal.japi; import akka.persistence.*; interface SyncWritePlugin { - //#sync-write-plugin-api - /** - * Java API, Plugin API: synchronously writes a batch of persistent messages to the - * journal. The batch write must be atomic i.e. either all persistent messages in the - * batch are written or none. - */ - void doWriteMessages(Iterable messages); + //#sync-write-plugin-api + /** + * Java API, Plugin API: synchronously writes a batch of persistent messages + * to the journal. The batch write must be atomic i.e. either all persistent + * messages in the batch are written or none. + */ + void doWriteMessages(Iterable messages); - /** - * Java API, Plugin API: synchronously writes a batch of delivery confirmations to - * the journal. - * - * @deprecated doWriteConfirmations will be removed, since Channels will be removed (since 2.3.4) - */ - @Deprecated void doWriteConfirmations(Iterable confirmations); - - /** - * Java API, Plugin API: synchronously deletes messages identified by `messageIds` - * from the journal. If `permanent` is set to `false`, the persistent messages are - * marked as deleted, otherwise they are permanently deleted. - * - * @deprecated doDeleteMessages will be removed (since 2.3.4) - */ - @Deprecated void doDeleteMessages(Iterable messageIds, boolean permanent); - - /** - * Java API, Plugin API: synchronously deletes all persistent messages up to - * `toSequenceNr`. If `permanent` is set to `false`, the persistent messages are - * marked as deleted, otherwise they are permanently deleted. - * - * @see AsyncRecoveryPlugin - */ - void doDeleteMessagesTo(String persistenceId, long toSequenceNr, boolean permanent); - //#sync-write-plugin-api + /** + * Java API, Plugin API: synchronously deletes all persistent messages up to + * `toSequenceNr`. If `permanent` is set to `false`, the persistent messages + * are marked as deleted, otherwise they are permanently deleted. + * + * @see AsyncRecoveryPlugin + */ + void doDeleteMessagesTo(String persistenceId, long toSequenceNr, boolean permanent); + //#sync-write-plugin-api } diff --git a/akka-persistence/src/main/java/akka/persistence/serialization/MessageFormats.java b/akka-persistence/src/main/java/akka/persistence/serialization/MessageFormats.java index 3171786369..1d2c08f378 100644 --- a/akka-persistence/src/main/java/akka/persistence/serialization/MessageFormats.java +++ b/akka-persistence/src/main/java/akka/persistence/serialization/MessageFormats.java @@ -8,692 +8,6 @@ public final class MessageFormats { public static void registerAllExtensions( com.google.protobuf.ExtensionRegistry registry) { } - public interface PersistentMessageBatchOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // repeated .PersistentMessage batch = 1; - /** - * repeated .PersistentMessage batch = 1; - */ - java.util.List - getBatchList(); - /** - * repeated .PersistentMessage batch = 1; - */ - akka.persistence.serialization.MessageFormats.PersistentMessage getBatch(int index); - /** - * repeated .PersistentMessage batch = 1; - */ - int getBatchCount(); - /** - * repeated .PersistentMessage batch = 1; - */ - java.util.List - getBatchOrBuilderList(); - /** - * repeated .PersistentMessage batch = 1; - */ - akka.persistence.serialization.MessageFormats.PersistentMessageOrBuilder getBatchOrBuilder( - int index); - } - /** - * Protobuf type {@code PersistentMessageBatch} - */ - public static final class PersistentMessageBatch extends - com.google.protobuf.GeneratedMessage - implements PersistentMessageBatchOrBuilder { - // Use PersistentMessageBatch.newBuilder() to construct. - private PersistentMessageBatch(com.google.protobuf.GeneratedMessage.Builder builder) { - super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private PersistentMessageBatch(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final PersistentMessageBatch defaultInstance; - public static PersistentMessageBatch getDefaultInstance() { - return defaultInstance; - } - - public PersistentMessageBatch getDefaultInstanceForType() { - return defaultInstance; - } - - private final com.google.protobuf.UnknownFieldSet unknownFields; - @java.lang.Override - public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { - return this.unknownFields; - } - private PersistentMessageBatch( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); - int mutable_bitField0_ = 0; - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder(); - try { - boolean done = false; - while (!done) { - int tag = input.readTag(); - switch (tag) { - case 0: - done = true; - break; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - done = true; - } - break; - } - case 10: { - if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) { - batch_ = new java.util.ArrayList(); - mutable_bitField0_ |= 0x00000001; - } - batch_.add(input.readMessage(akka.persistence.serialization.MessageFormats.PersistentMessage.PARSER, extensionRegistry)); - break; - } - } - } - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - throw e.setUnfinishedMessage(this); - } catch (java.io.IOException e) { - throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); - } finally { - if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { - batch_ = java.util.Collections.unmodifiableList(batch_); - } - this.unknownFields = unknownFields.build(); - makeExtensionsImmutable(); - } - } - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return akka.persistence.serialization.MessageFormats.internal_static_PersistentMessageBatch_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return akka.persistence.serialization.MessageFormats.internal_static_PersistentMessageBatch_fieldAccessorTable - .ensureFieldAccessorsInitialized( - akka.persistence.serialization.MessageFormats.PersistentMessageBatch.class, akka.persistence.serialization.MessageFormats.PersistentMessageBatch.Builder.class); - } - - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public PersistentMessageBatch parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new PersistentMessageBatch(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - // repeated .PersistentMessage batch = 1; - public static final int BATCH_FIELD_NUMBER = 1; - private java.util.List batch_; - /** - * repeated .PersistentMessage batch = 1; - */ - public java.util.List getBatchList() { - return batch_; - } - /** - * repeated .PersistentMessage batch = 1; - */ - public java.util.List - getBatchOrBuilderList() { - return batch_; - } - /** - * repeated .PersistentMessage batch = 1; - */ - public int getBatchCount() { - return batch_.size(); - } - /** - * repeated .PersistentMessage batch = 1; - */ - public akka.persistence.serialization.MessageFormats.PersistentMessage getBatch(int index) { - return batch_.get(index); - } - /** - * repeated .PersistentMessage batch = 1; - */ - public akka.persistence.serialization.MessageFormats.PersistentMessageOrBuilder getBatchOrBuilder( - int index) { - return batch_.get(index); - } - - private void initFields() { - batch_ = java.util.Collections.emptyList(); - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - for (int i = 0; i < getBatchCount(); i++) { - if (!getBatch(i).isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - for (int i = 0; i < batch_.size(); i++) { - output.writeMessage(1, batch_.get(i)); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - for (int i = 0; i < batch_.size(); i++) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, batch_.get(i)); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - public static akka.persistence.serialization.MessageFormats.PersistentMessageBatch parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static akka.persistence.serialization.MessageFormats.PersistentMessageBatch parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static akka.persistence.serialization.MessageFormats.PersistentMessageBatch parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static akka.persistence.serialization.MessageFormats.PersistentMessageBatch parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static akka.persistence.serialization.MessageFormats.PersistentMessageBatch parseFrom(java.io.InputStream input) - throws java.io.IOException { - return PARSER.parseFrom(input); - } - public static akka.persistence.serialization.MessageFormats.PersistentMessageBatch parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); - } - public static akka.persistence.serialization.MessageFormats.PersistentMessageBatch parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); - } - public static akka.persistence.serialization.MessageFormats.PersistentMessageBatch parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); - } - public static akka.persistence.serialization.MessageFormats.PersistentMessageBatch parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return PARSER.parseFrom(input); - } - public static akka.persistence.serialization.MessageFormats.PersistentMessageBatch parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(akka.persistence.serialization.MessageFormats.PersistentMessageBatch prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - /** - * Protobuf type {@code PersistentMessageBatch} - */ - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements akka.persistence.serialization.MessageFormats.PersistentMessageBatchOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return akka.persistence.serialization.MessageFormats.internal_static_PersistentMessageBatch_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return akka.persistence.serialization.MessageFormats.internal_static_PersistentMessageBatch_fieldAccessorTable - .ensureFieldAccessorsInitialized( - akka.persistence.serialization.MessageFormats.PersistentMessageBatch.class, akka.persistence.serialization.MessageFormats.PersistentMessageBatch.Builder.class); - } - - // Construct using akka.persistence.serialization.MessageFormats.PersistentMessageBatch.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - getBatchFieldBuilder(); - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - if (batchBuilder_ == null) { - batch_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000001); - } else { - batchBuilder_.clear(); - } - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return akka.persistence.serialization.MessageFormats.internal_static_PersistentMessageBatch_descriptor; - } - - public akka.persistence.serialization.MessageFormats.PersistentMessageBatch getDefaultInstanceForType() { - return akka.persistence.serialization.MessageFormats.PersistentMessageBatch.getDefaultInstance(); - } - - public akka.persistence.serialization.MessageFormats.PersistentMessageBatch build() { - akka.persistence.serialization.MessageFormats.PersistentMessageBatch result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - public akka.persistence.serialization.MessageFormats.PersistentMessageBatch buildPartial() { - akka.persistence.serialization.MessageFormats.PersistentMessageBatch result = new akka.persistence.serialization.MessageFormats.PersistentMessageBatch(this); - int from_bitField0_ = bitField0_; - if (batchBuilder_ == null) { - if (((bitField0_ & 0x00000001) == 0x00000001)) { - batch_ = java.util.Collections.unmodifiableList(batch_); - bitField0_ = (bitField0_ & ~0x00000001); - } - result.batch_ = batch_; - } else { - result.batch_ = batchBuilder_.build(); - } - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof akka.persistence.serialization.MessageFormats.PersistentMessageBatch) { - return mergeFrom((akka.persistence.serialization.MessageFormats.PersistentMessageBatch)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(akka.persistence.serialization.MessageFormats.PersistentMessageBatch other) { - if (other == akka.persistence.serialization.MessageFormats.PersistentMessageBatch.getDefaultInstance()) return this; - if (batchBuilder_ == null) { - if (!other.batch_.isEmpty()) { - if (batch_.isEmpty()) { - batch_ = other.batch_; - bitField0_ = (bitField0_ & ~0x00000001); - } else { - ensureBatchIsMutable(); - batch_.addAll(other.batch_); - } - onChanged(); - } - } else { - if (!other.batch_.isEmpty()) { - if (batchBuilder_.isEmpty()) { - batchBuilder_.dispose(); - batchBuilder_ = null; - batch_ = other.batch_; - bitField0_ = (bitField0_ & ~0x00000001); - batchBuilder_ = - com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? - getBatchFieldBuilder() : null; - } else { - batchBuilder_.addAllMessages(other.batch_); - } - } - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - for (int i = 0; i < getBatchCount(); i++) { - if (!getBatch(i).isInitialized()) { - - return false; - } - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - akka.persistence.serialization.MessageFormats.PersistentMessageBatch parsedMessage = null; - try { - parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (akka.persistence.serialization.MessageFormats.PersistentMessageBatch) e.getUnfinishedMessage(); - throw e; - } finally { - if (parsedMessage != null) { - mergeFrom(parsedMessage); - } - } - return this; - } - private int bitField0_; - - // repeated .PersistentMessage batch = 1; - private java.util.List batch_ = - java.util.Collections.emptyList(); - private void ensureBatchIsMutable() { - if (!((bitField0_ & 0x00000001) == 0x00000001)) { - batch_ = new java.util.ArrayList(batch_); - bitField0_ |= 0x00000001; - } - } - - private com.google.protobuf.RepeatedFieldBuilder< - akka.persistence.serialization.MessageFormats.PersistentMessage, akka.persistence.serialization.MessageFormats.PersistentMessage.Builder, akka.persistence.serialization.MessageFormats.PersistentMessageOrBuilder> batchBuilder_; - - /** - * repeated .PersistentMessage batch = 1; - */ - public java.util.List getBatchList() { - if (batchBuilder_ == null) { - return java.util.Collections.unmodifiableList(batch_); - } else { - return batchBuilder_.getMessageList(); - } - } - /** - * repeated .PersistentMessage batch = 1; - */ - public int getBatchCount() { - if (batchBuilder_ == null) { - return batch_.size(); - } else { - return batchBuilder_.getCount(); - } - } - /** - * repeated .PersistentMessage batch = 1; - */ - public akka.persistence.serialization.MessageFormats.PersistentMessage getBatch(int index) { - if (batchBuilder_ == null) { - return batch_.get(index); - } else { - return batchBuilder_.getMessage(index); - } - } - /** - * repeated .PersistentMessage batch = 1; - */ - public Builder setBatch( - int index, akka.persistence.serialization.MessageFormats.PersistentMessage value) { - if (batchBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureBatchIsMutable(); - batch_.set(index, value); - onChanged(); - } else { - batchBuilder_.setMessage(index, value); - } - return this; - } - /** - * repeated .PersistentMessage batch = 1; - */ - public Builder setBatch( - int index, akka.persistence.serialization.MessageFormats.PersistentMessage.Builder builderForValue) { - if (batchBuilder_ == null) { - ensureBatchIsMutable(); - batch_.set(index, builderForValue.build()); - onChanged(); - } else { - batchBuilder_.setMessage(index, builderForValue.build()); - } - return this; - } - /** - * repeated .PersistentMessage batch = 1; - */ - public Builder addBatch(akka.persistence.serialization.MessageFormats.PersistentMessage value) { - if (batchBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureBatchIsMutable(); - batch_.add(value); - onChanged(); - } else { - batchBuilder_.addMessage(value); - } - return this; - } - /** - * repeated .PersistentMessage batch = 1; - */ - public Builder addBatch( - int index, akka.persistence.serialization.MessageFormats.PersistentMessage value) { - if (batchBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureBatchIsMutable(); - batch_.add(index, value); - onChanged(); - } else { - batchBuilder_.addMessage(index, value); - } - return this; - } - /** - * repeated .PersistentMessage batch = 1; - */ - public Builder addBatch( - akka.persistence.serialization.MessageFormats.PersistentMessage.Builder builderForValue) { - if (batchBuilder_ == null) { - ensureBatchIsMutable(); - batch_.add(builderForValue.build()); - onChanged(); - } else { - batchBuilder_.addMessage(builderForValue.build()); - } - return this; - } - /** - * repeated .PersistentMessage batch = 1; - */ - public Builder addBatch( - int index, akka.persistence.serialization.MessageFormats.PersistentMessage.Builder builderForValue) { - if (batchBuilder_ == null) { - ensureBatchIsMutable(); - batch_.add(index, builderForValue.build()); - onChanged(); - } else { - batchBuilder_.addMessage(index, builderForValue.build()); - } - return this; - } - /** - * repeated .PersistentMessage batch = 1; - */ - public Builder addAllBatch( - java.lang.Iterable values) { - if (batchBuilder_ == null) { - ensureBatchIsMutable(); - super.addAll(values, batch_); - onChanged(); - } else { - batchBuilder_.addAllMessages(values); - } - return this; - } - /** - * repeated .PersistentMessage batch = 1; - */ - public Builder clearBatch() { - if (batchBuilder_ == null) { - batch_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000001); - onChanged(); - } else { - batchBuilder_.clear(); - } - return this; - } - /** - * repeated .PersistentMessage batch = 1; - */ - public Builder removeBatch(int index) { - if (batchBuilder_ == null) { - ensureBatchIsMutable(); - batch_.remove(index); - onChanged(); - } else { - batchBuilder_.remove(index); - } - return this; - } - /** - * repeated .PersistentMessage batch = 1; - */ - public akka.persistence.serialization.MessageFormats.PersistentMessage.Builder getBatchBuilder( - int index) { - return getBatchFieldBuilder().getBuilder(index); - } - /** - * repeated .PersistentMessage batch = 1; - */ - public akka.persistence.serialization.MessageFormats.PersistentMessageOrBuilder getBatchOrBuilder( - int index) { - if (batchBuilder_ == null) { - return batch_.get(index); } else { - return batchBuilder_.getMessageOrBuilder(index); - } - } - /** - * repeated .PersistentMessage batch = 1; - */ - public java.util.List - getBatchOrBuilderList() { - if (batchBuilder_ != null) { - return batchBuilder_.getMessageOrBuilderList(); - } else { - return java.util.Collections.unmodifiableList(batch_); - } - } - /** - * repeated .PersistentMessage batch = 1; - */ - public akka.persistence.serialization.MessageFormats.PersistentMessage.Builder addBatchBuilder() { - return getBatchFieldBuilder().addBuilder( - akka.persistence.serialization.MessageFormats.PersistentMessage.getDefaultInstance()); - } - /** - * repeated .PersistentMessage batch = 1; - */ - public akka.persistence.serialization.MessageFormats.PersistentMessage.Builder addBatchBuilder( - int index) { - return getBatchFieldBuilder().addBuilder( - index, akka.persistence.serialization.MessageFormats.PersistentMessage.getDefaultInstance()); - } - /** - * repeated .PersistentMessage batch = 1; - */ - public java.util.List - getBatchBuilderList() { - return getBatchFieldBuilder().getBuilderList(); - } - private com.google.protobuf.RepeatedFieldBuilder< - akka.persistence.serialization.MessageFormats.PersistentMessage, akka.persistence.serialization.MessageFormats.PersistentMessage.Builder, akka.persistence.serialization.MessageFormats.PersistentMessageOrBuilder> - getBatchFieldBuilder() { - if (batchBuilder_ == null) { - batchBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< - akka.persistence.serialization.MessageFormats.PersistentMessage, akka.persistence.serialization.MessageFormats.PersistentMessage.Builder, akka.persistence.serialization.MessageFormats.PersistentMessageOrBuilder>( - batch_, - ((bitField0_ & 0x00000001) == 0x00000001), - getParentForChildren(), - isClean()); - batch_ = null; - } - return batchBuilder_; - } - - // @@protoc_insertion_point(builder_scope:PersistentMessageBatch) - } - - static { - defaultInstance = new PersistentMessageBatch(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:PersistentMessageBatch) - } - public interface PersistentMessageOrBuilder extends com.google.protobuf.MessageOrBuilder { @@ -746,75 +60,6 @@ public final class MessageFormats { */ boolean getDeleted(); - // optional int32 redeliveries = 6; - /** - * optional int32 redeliveries = 6; - */ - boolean hasRedeliveries(); - /** - * optional int32 redeliveries = 6; - */ - int getRedeliveries(); - - // repeated string confirms = 7; - /** - * repeated string confirms = 7; - */ - java.util.List - getConfirmsList(); - /** - * repeated string confirms = 7; - */ - int getConfirmsCount(); - /** - * repeated string confirms = 7; - */ - java.lang.String getConfirms(int index); - /** - * repeated string confirms = 7; - */ - com.google.protobuf.ByteString - getConfirmsBytes(int index); - - // optional bool confirmable = 8; - /** - * optional bool confirmable = 8; - */ - boolean hasConfirmable(); - /** - * optional bool confirmable = 8; - */ - boolean getConfirmable(); - - // optional .DeliveredMessage confirmMessage = 9; - /** - * optional .DeliveredMessage confirmMessage = 9; - */ - boolean hasConfirmMessage(); - /** - * optional .DeliveredMessage confirmMessage = 9; - */ - akka.persistence.serialization.MessageFormats.DeliveredMessage getConfirmMessage(); - /** - * optional .DeliveredMessage confirmMessage = 9; - */ - akka.persistence.serialization.MessageFormats.DeliveredMessageOrBuilder getConfirmMessageOrBuilder(); - - // optional string confirmTarget = 10; - /** - * optional string confirmTarget = 10; - */ - boolean hasConfirmTarget(); - /** - * optional string confirmTarget = 10; - */ - java.lang.String getConfirmTarget(); - /** - * optional string confirmTarget = 10; - */ - com.google.protobuf.ByteString - getConfirmTargetBytes(); - // optional string sender = 11; /** * optional string sender = 11; @@ -909,44 +154,8 @@ public final class MessageFormats { deleted_ = input.readBool(); break; } - case 48: { - bitField0_ |= 0x00000010; - redeliveries_ = input.readInt32(); - break; - } - case 58: { - if (!((mutable_bitField0_ & 0x00000020) == 0x00000020)) { - confirms_ = new com.google.protobuf.LazyStringArrayList(); - mutable_bitField0_ |= 0x00000020; - } - confirms_.add(input.readBytes()); - break; - } - case 64: { - bitField0_ |= 0x00000020; - confirmable_ = input.readBool(); - break; - } - case 74: { - akka.persistence.serialization.MessageFormats.DeliveredMessage.Builder subBuilder = null; - if (((bitField0_ & 0x00000040) == 0x00000040)) { - subBuilder = confirmMessage_.toBuilder(); - } - confirmMessage_ = input.readMessage(akka.persistence.serialization.MessageFormats.DeliveredMessage.PARSER, extensionRegistry); - if (subBuilder != null) { - subBuilder.mergeFrom(confirmMessage_); - confirmMessage_ = subBuilder.buildPartial(); - } - bitField0_ |= 0x00000040; - break; - } - case 82: { - bitField0_ |= 0x00000080; - confirmTarget_ = input.readBytes(); - break; - } case 90: { - bitField0_ |= 0x00000100; + bitField0_ |= 0x00000010; sender_ = input.readBytes(); break; } @@ -958,9 +167,6 @@ public final class MessageFormats { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { - if (((mutable_bitField0_ & 0x00000020) == 0x00000020)) { - confirms_ = new com.google.protobuf.UnmodifiableLazyStringList(confirms_); - } this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } @@ -1032,7 +238,7 @@ public final class MessageFormats { } // optional string persistenceId = 3; - public static final int PersistenceId_FIELD_NUMBER = 3; + public static final int PERSISTENCEID_FIELD_NUMBER = 3; private java.lang.Object persistenceId_; /** * optional string persistenceId = 3; @@ -1090,133 +296,6 @@ public final class MessageFormats { return deleted_; } - // optional int32 redeliveries = 6; - public static final int REDELIVERIES_FIELD_NUMBER = 6; - private int redeliveries_; - /** - * optional int32 redeliveries = 6; - */ - public boolean hasRedeliveries() { - return ((bitField0_ & 0x00000010) == 0x00000010); - } - /** - * optional int32 redeliveries = 6; - */ - public int getRedeliveries() { - return redeliveries_; - } - - // repeated string confirms = 7; - public static final int CONFIRMS_FIELD_NUMBER = 7; - private com.google.protobuf.LazyStringList confirms_; - /** - * repeated string confirms = 7; - */ - public java.util.List - getConfirmsList() { - return confirms_; - } - /** - * repeated string confirms = 7; - */ - public int getConfirmsCount() { - return confirms_.size(); - } - /** - * repeated string confirms = 7; - */ - public java.lang.String getConfirms(int index) { - return confirms_.get(index); - } - /** - * repeated string confirms = 7; - */ - public com.google.protobuf.ByteString - getConfirmsBytes(int index) { - return confirms_.getByteString(index); - } - - // optional bool confirmable = 8; - public static final int CONFIRMABLE_FIELD_NUMBER = 8; - private boolean confirmable_; - /** - * optional bool confirmable = 8; - */ - public boolean hasConfirmable() { - return ((bitField0_ & 0x00000020) == 0x00000020); - } - /** - * optional bool confirmable = 8; - */ - public boolean getConfirmable() { - return confirmable_; - } - - // optional .DeliveredMessage confirmMessage = 9; - public static final int CONFIRMMESSAGE_FIELD_NUMBER = 9; - private akka.persistence.serialization.MessageFormats.DeliveredMessage confirmMessage_; - /** - * optional .DeliveredMessage confirmMessage = 9; - */ - public boolean hasConfirmMessage() { - return ((bitField0_ & 0x00000040) == 0x00000040); - } - /** - * optional .DeliveredMessage confirmMessage = 9; - */ - public akka.persistence.serialization.MessageFormats.DeliveredMessage getConfirmMessage() { - return confirmMessage_; - } - /** - * optional .DeliveredMessage confirmMessage = 9; - */ - public akka.persistence.serialization.MessageFormats.DeliveredMessageOrBuilder getConfirmMessageOrBuilder() { - return confirmMessage_; - } - - // optional string confirmTarget = 10; - public static final int CONFIRMTARGET_FIELD_NUMBER = 10; - private java.lang.Object confirmTarget_; - /** - * optional string confirmTarget = 10; - */ - public boolean hasConfirmTarget() { - return ((bitField0_ & 0x00000080) == 0x00000080); - } - /** - * optional string confirmTarget = 10; - */ - public java.lang.String getConfirmTarget() { - java.lang.Object ref = confirmTarget_; - if (ref instanceof java.lang.String) { - return (java.lang.String) ref; - } else { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - java.lang.String s = bs.toStringUtf8(); - if (bs.isValidUtf8()) { - confirmTarget_ = s; - } - return s; - } - } - /** - * optional string confirmTarget = 10; - */ - public com.google.protobuf.ByteString - getConfirmTargetBytes() { - java.lang.Object ref = confirmTarget_; - if (ref instanceof java.lang.String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - confirmTarget_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - // optional string sender = 11; public static final int SENDER_FIELD_NUMBER = 11; private java.lang.Object sender_; @@ -1224,7 +303,7 @@ public final class MessageFormats { * optional string sender = 11; */ public boolean hasSender() { - return ((bitField0_ & 0x00000100) == 0x00000100); + return ((bitField0_ & 0x00000010) == 0x00000010); } /** * optional string sender = 11; @@ -1265,11 +344,6 @@ public final class MessageFormats { sequenceNr_ = 0L; persistenceId_ = ""; deleted_ = false; - redeliveries_ = 0; - confirms_ = com.google.protobuf.LazyStringArrayList.EMPTY; - confirmable_ = false; - confirmMessage_ = akka.persistence.serialization.MessageFormats.DeliveredMessage.getDefaultInstance(); - confirmTarget_ = ""; sender_ = ""; } private byte memoizedIsInitialized = -1; @@ -1303,21 +377,6 @@ public final class MessageFormats { output.writeBool(4, deleted_); } if (((bitField0_ & 0x00000010) == 0x00000010)) { - output.writeInt32(6, redeliveries_); - } - for (int i = 0; i < confirms_.size(); i++) { - output.writeBytes(7, confirms_.getByteString(i)); - } - if (((bitField0_ & 0x00000020) == 0x00000020)) { - output.writeBool(8, confirmable_); - } - if (((bitField0_ & 0x00000040) == 0x00000040)) { - output.writeMessage(9, confirmMessage_); - } - if (((bitField0_ & 0x00000080) == 0x00000080)) { - output.writeBytes(10, getConfirmTargetBytes()); - } - if (((bitField0_ & 0x00000100) == 0x00000100)) { output.writeBytes(11, getSenderBytes()); } getUnknownFields().writeTo(output); @@ -1346,31 +405,6 @@ public final class MessageFormats { .computeBoolSize(4, deleted_); } if (((bitField0_ & 0x00000010) == 0x00000010)) { - size += com.google.protobuf.CodedOutputStream - .computeInt32Size(6, redeliveries_); - } - { - int dataSize = 0; - for (int i = 0; i < confirms_.size(); i++) { - dataSize += com.google.protobuf.CodedOutputStream - .computeBytesSizeNoTag(confirms_.getByteString(i)); - } - size += dataSize; - size += 1 * getConfirmsList().size(); - } - if (((bitField0_ & 0x00000020) == 0x00000020)) { - size += com.google.protobuf.CodedOutputStream - .computeBoolSize(8, confirmable_); - } - if (((bitField0_ & 0x00000040) == 0x00000040)) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(9, confirmMessage_); - } - if (((bitField0_ & 0x00000080) == 0x00000080)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(10, getConfirmTargetBytes()); - } - if (((bitField0_ & 0x00000100) == 0x00000100)) { size += com.google.protobuf.CodedOutputStream .computeBytesSize(11, getSenderBytes()); } @@ -1483,7 +517,6 @@ public final class MessageFormats { private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { getPayloadFieldBuilder(); - getConfirmMessageFieldBuilder(); } } private static Builder create() { @@ -1504,22 +537,8 @@ public final class MessageFormats { bitField0_ = (bitField0_ & ~0x00000004); deleted_ = false; bitField0_ = (bitField0_ & ~0x00000008); - redeliveries_ = 0; - bitField0_ = (bitField0_ & ~0x00000010); - confirms_ = com.google.protobuf.LazyStringArrayList.EMPTY; - bitField0_ = (bitField0_ & ~0x00000020); - confirmable_ = false; - bitField0_ = (bitField0_ & ~0x00000040); - if (confirmMessageBuilder_ == null) { - confirmMessage_ = akka.persistence.serialization.MessageFormats.DeliveredMessage.getDefaultInstance(); - } else { - confirmMessageBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000080); - confirmTarget_ = ""; - bitField0_ = (bitField0_ & ~0x00000100); sender_ = ""; - bitField0_ = (bitField0_ & ~0x00000200); + bitField0_ = (bitField0_ & ~0x00000010); return this; } @@ -1571,32 +590,6 @@ public final class MessageFormats { if (((from_bitField0_ & 0x00000010) == 0x00000010)) { to_bitField0_ |= 0x00000010; } - result.redeliveries_ = redeliveries_; - if (((bitField0_ & 0x00000020) == 0x00000020)) { - confirms_ = new com.google.protobuf.UnmodifiableLazyStringList( - confirms_); - bitField0_ = (bitField0_ & ~0x00000020); - } - result.confirms_ = confirms_; - if (((from_bitField0_ & 0x00000040) == 0x00000040)) { - to_bitField0_ |= 0x00000020; - } - result.confirmable_ = confirmable_; - if (((from_bitField0_ & 0x00000080) == 0x00000080)) { - to_bitField0_ |= 0x00000040; - } - if (confirmMessageBuilder_ == null) { - result.confirmMessage_ = confirmMessage_; - } else { - result.confirmMessage_ = confirmMessageBuilder_.build(); - } - if (((from_bitField0_ & 0x00000100) == 0x00000100)) { - to_bitField0_ |= 0x00000080; - } - result.confirmTarget_ = confirmTarget_; - if (((from_bitField0_ & 0x00000200) == 0x00000200)) { - to_bitField0_ |= 0x00000100; - } result.sender_ = sender_; result.bitField0_ = to_bitField0_; onBuilt(); @@ -1628,32 +621,8 @@ public final class MessageFormats { if (other.hasDeleted()) { setDeleted(other.getDeleted()); } - if (other.hasRedeliveries()) { - setRedeliveries(other.getRedeliveries()); - } - if (!other.confirms_.isEmpty()) { - if (confirms_.isEmpty()) { - confirms_ = other.confirms_; - bitField0_ = (bitField0_ & ~0x00000020); - } else { - ensureConfirmsIsMutable(); - confirms_.addAll(other.confirms_); - } - onChanged(); - } - if (other.hasConfirmable()) { - setConfirmable(other.getConfirmable()); - } - if (other.hasConfirmMessage()) { - mergeConfirmMessage(other.getConfirmMessage()); - } - if (other.hasConfirmTarget()) { - bitField0_ |= 0x00000100; - confirmTarget_ = other.confirmTarget_; - onChanged(); - } if (other.hasSender()) { - bitField0_ |= 0x00000200; + bitField0_ |= 0x00000010; sender_ = other.sender_; onChanged(); } @@ -1947,363 +916,13 @@ public final class MessageFormats { return this; } - // optional int32 redeliveries = 6; - private int redeliveries_ ; - /** - * optional int32 redeliveries = 6; - */ - public boolean hasRedeliveries() { - return ((bitField0_ & 0x00000010) == 0x00000010); - } - /** - * optional int32 redeliveries = 6; - */ - public int getRedeliveries() { - return redeliveries_; - } - /** - * optional int32 redeliveries = 6; - */ - public Builder setRedeliveries(int value) { - bitField0_ |= 0x00000010; - redeliveries_ = value; - onChanged(); - return this; - } - /** - * optional int32 redeliveries = 6; - */ - public Builder clearRedeliveries() { - bitField0_ = (bitField0_ & ~0x00000010); - redeliveries_ = 0; - onChanged(); - return this; - } - - // repeated string confirms = 7; - private com.google.protobuf.LazyStringList confirms_ = com.google.protobuf.LazyStringArrayList.EMPTY; - private void ensureConfirmsIsMutable() { - if (!((bitField0_ & 0x00000020) == 0x00000020)) { - confirms_ = new com.google.protobuf.LazyStringArrayList(confirms_); - bitField0_ |= 0x00000020; - } - } - /** - * repeated string confirms = 7; - */ - public java.util.List - getConfirmsList() { - return java.util.Collections.unmodifiableList(confirms_); - } - /** - * repeated string confirms = 7; - */ - public int getConfirmsCount() { - return confirms_.size(); - } - /** - * repeated string confirms = 7; - */ - public java.lang.String getConfirms(int index) { - return confirms_.get(index); - } - /** - * repeated string confirms = 7; - */ - public com.google.protobuf.ByteString - getConfirmsBytes(int index) { - return confirms_.getByteString(index); - } - /** - * repeated string confirms = 7; - */ - public Builder setConfirms( - int index, java.lang.String value) { - if (value == null) { - throw new NullPointerException(); - } - ensureConfirmsIsMutable(); - confirms_.set(index, value); - onChanged(); - return this; - } - /** - * repeated string confirms = 7; - */ - public Builder addConfirms( - java.lang.String value) { - if (value == null) { - throw new NullPointerException(); - } - ensureConfirmsIsMutable(); - confirms_.add(value); - onChanged(); - return this; - } - /** - * repeated string confirms = 7; - */ - public Builder addAllConfirms( - java.lang.Iterable values) { - ensureConfirmsIsMutable(); - super.addAll(values, confirms_); - onChanged(); - return this; - } - /** - * repeated string confirms = 7; - */ - public Builder clearConfirms() { - confirms_ = com.google.protobuf.LazyStringArrayList.EMPTY; - bitField0_ = (bitField0_ & ~0x00000020); - onChanged(); - return this; - } - /** - * repeated string confirms = 7; - */ - public Builder addConfirmsBytes( - com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - ensureConfirmsIsMutable(); - confirms_.add(value); - onChanged(); - return this; - } - - // optional bool confirmable = 8; - private boolean confirmable_ ; - /** - * optional bool confirmable = 8; - */ - public boolean hasConfirmable() { - return ((bitField0_ & 0x00000040) == 0x00000040); - } - /** - * optional bool confirmable = 8; - */ - public boolean getConfirmable() { - return confirmable_; - } - /** - * optional bool confirmable = 8; - */ - public Builder setConfirmable(boolean value) { - bitField0_ |= 0x00000040; - confirmable_ = value; - onChanged(); - return this; - } - /** - * optional bool confirmable = 8; - */ - public Builder clearConfirmable() { - bitField0_ = (bitField0_ & ~0x00000040); - confirmable_ = false; - onChanged(); - return this; - } - - // optional .DeliveredMessage confirmMessage = 9; - private akka.persistence.serialization.MessageFormats.DeliveredMessage confirmMessage_ = akka.persistence.serialization.MessageFormats.DeliveredMessage.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< - akka.persistence.serialization.MessageFormats.DeliveredMessage, akka.persistence.serialization.MessageFormats.DeliveredMessage.Builder, akka.persistence.serialization.MessageFormats.DeliveredMessageOrBuilder> confirmMessageBuilder_; - /** - * optional .DeliveredMessage confirmMessage = 9; - */ - public boolean hasConfirmMessage() { - return ((bitField0_ & 0x00000080) == 0x00000080); - } - /** - * optional .DeliveredMessage confirmMessage = 9; - */ - public akka.persistence.serialization.MessageFormats.DeliveredMessage getConfirmMessage() { - if (confirmMessageBuilder_ == null) { - return confirmMessage_; - } else { - return confirmMessageBuilder_.getMessage(); - } - } - /** - * optional .DeliveredMessage confirmMessage = 9; - */ - public Builder setConfirmMessage(akka.persistence.serialization.MessageFormats.DeliveredMessage value) { - if (confirmMessageBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - confirmMessage_ = value; - onChanged(); - } else { - confirmMessageBuilder_.setMessage(value); - } - bitField0_ |= 0x00000080; - return this; - } - /** - * optional .DeliveredMessage confirmMessage = 9; - */ - public Builder setConfirmMessage( - akka.persistence.serialization.MessageFormats.DeliveredMessage.Builder builderForValue) { - if (confirmMessageBuilder_ == null) { - confirmMessage_ = builderForValue.build(); - onChanged(); - } else { - confirmMessageBuilder_.setMessage(builderForValue.build()); - } - bitField0_ |= 0x00000080; - return this; - } - /** - * optional .DeliveredMessage confirmMessage = 9; - */ - public Builder mergeConfirmMessage(akka.persistence.serialization.MessageFormats.DeliveredMessage value) { - if (confirmMessageBuilder_ == null) { - if (((bitField0_ & 0x00000080) == 0x00000080) && - confirmMessage_ != akka.persistence.serialization.MessageFormats.DeliveredMessage.getDefaultInstance()) { - confirmMessage_ = - akka.persistence.serialization.MessageFormats.DeliveredMessage.newBuilder(confirmMessage_).mergeFrom(value).buildPartial(); - } else { - confirmMessage_ = value; - } - onChanged(); - } else { - confirmMessageBuilder_.mergeFrom(value); - } - bitField0_ |= 0x00000080; - return this; - } - /** - * optional .DeliveredMessage confirmMessage = 9; - */ - public Builder clearConfirmMessage() { - if (confirmMessageBuilder_ == null) { - confirmMessage_ = akka.persistence.serialization.MessageFormats.DeliveredMessage.getDefaultInstance(); - onChanged(); - } else { - confirmMessageBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000080); - return this; - } - /** - * optional .DeliveredMessage confirmMessage = 9; - */ - public akka.persistence.serialization.MessageFormats.DeliveredMessage.Builder getConfirmMessageBuilder() { - bitField0_ |= 0x00000080; - onChanged(); - return getConfirmMessageFieldBuilder().getBuilder(); - } - /** - * optional .DeliveredMessage confirmMessage = 9; - */ - public akka.persistence.serialization.MessageFormats.DeliveredMessageOrBuilder getConfirmMessageOrBuilder() { - if (confirmMessageBuilder_ != null) { - return confirmMessageBuilder_.getMessageOrBuilder(); - } else { - return confirmMessage_; - } - } - /** - * optional .DeliveredMessage confirmMessage = 9; - */ - private com.google.protobuf.SingleFieldBuilder< - akka.persistence.serialization.MessageFormats.DeliveredMessage, akka.persistence.serialization.MessageFormats.DeliveredMessage.Builder, akka.persistence.serialization.MessageFormats.DeliveredMessageOrBuilder> - getConfirmMessageFieldBuilder() { - if (confirmMessageBuilder_ == null) { - confirmMessageBuilder_ = new com.google.protobuf.SingleFieldBuilder< - akka.persistence.serialization.MessageFormats.DeliveredMessage, akka.persistence.serialization.MessageFormats.DeliveredMessage.Builder, akka.persistence.serialization.MessageFormats.DeliveredMessageOrBuilder>( - confirmMessage_, - getParentForChildren(), - isClean()); - confirmMessage_ = null; - } - return confirmMessageBuilder_; - } - - // optional string confirmTarget = 10; - private java.lang.Object confirmTarget_ = ""; - /** - * optional string confirmTarget = 10; - */ - public boolean hasConfirmTarget() { - return ((bitField0_ & 0x00000100) == 0x00000100); - } - /** - * optional string confirmTarget = 10; - */ - public java.lang.String getConfirmTarget() { - java.lang.Object ref = confirmTarget_; - if (!(ref instanceof java.lang.String)) { - java.lang.String s = ((com.google.protobuf.ByteString) ref) - .toStringUtf8(); - confirmTarget_ = s; - return s; - } else { - return (java.lang.String) ref; - } - } - /** - * optional string confirmTarget = 10; - */ - public com.google.protobuf.ByteString - getConfirmTargetBytes() { - java.lang.Object ref = confirmTarget_; - if (ref instanceof String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - confirmTarget_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - /** - * optional string confirmTarget = 10; - */ - public Builder setConfirmTarget( - java.lang.String value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000100; - confirmTarget_ = value; - onChanged(); - return this; - } - /** - * optional string confirmTarget = 10; - */ - public Builder clearConfirmTarget() { - bitField0_ = (bitField0_ & ~0x00000100); - confirmTarget_ = getDefaultInstance().getConfirmTarget(); - onChanged(); - return this; - } - /** - * optional string confirmTarget = 10; - */ - public Builder setConfirmTargetBytes( - com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000100; - confirmTarget_ = value; - onChanged(); - return this; - } - // optional string sender = 11; private java.lang.Object sender_ = ""; /** * optional string sender = 11; */ public boolean hasSender() { - return ((bitField0_ & 0x00000200) == 0x00000200); + return ((bitField0_ & 0x00000010) == 0x00000010); } /** * optional string sender = 11; @@ -2343,7 +962,7 @@ public final class MessageFormats { if (value == null) { throw new NullPointerException(); } - bitField0_ |= 0x00000200; + bitField0_ |= 0x00000010; sender_ = value; onChanged(); return this; @@ -2352,7 +971,7 @@ public final class MessageFormats { * optional string sender = 11; */ public Builder clearSender() { - bitField0_ = (bitField0_ & ~0x00000200); + bitField0_ = (bitField0_ & ~0x00000010); sender_ = getDefaultInstance().getSender(); onChanged(); return this; @@ -2365,7 +984,7 @@ public final class MessageFormats { if (value == null) { throw new NullPointerException(); } - bitField0_ |= 0x00000200; + bitField0_ |= 0x00000010; sender_ = value; onChanged(); return this; @@ -2962,1626 +1581,6 @@ public final class MessageFormats { // @@protoc_insertion_point(class_scope:PersistentPayload) } - public interface DeliveredMessageOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // optional string persistenceId = 1; - /** - * optional string persistenceId = 1; - */ - boolean hasPersistenceId(); - /** - * optional string persistenceId = 1; - */ - java.lang.String getPersistenceId(); - /** - * optional string persistenceId = 1; - */ - com.google.protobuf.ByteString - getPersistenceIdBytes(); - - // optional string channelId = 2; - /** - * optional string channelId = 2; - */ - boolean hasChannelId(); - /** - * optional string channelId = 2; - */ - java.lang.String getChannelId(); - /** - * optional string channelId = 2; - */ - com.google.protobuf.ByteString - getChannelIdBytes(); - - // optional int64 persistentSequenceNr = 3; - /** - * optional int64 persistentSequenceNr = 3; - */ - boolean hasPersistentSequenceNr(); - /** - * optional int64 persistentSequenceNr = 3; - */ - long getPersistentSequenceNr(); - - // optional int64 deliverySequenceNr = 4; - /** - * optional int64 deliverySequenceNr = 4; - */ - boolean hasDeliverySequenceNr(); - /** - * optional int64 deliverySequenceNr = 4; - */ - long getDeliverySequenceNr(); - - // optional string channel = 5; - /** - * optional string channel = 5; - */ - boolean hasChannel(); - /** - * optional string channel = 5; - */ - java.lang.String getChannel(); - /** - * optional string channel = 5; - */ - com.google.protobuf.ByteString - getChannelBytes(); - } - /** - * Protobuf type {@code DeliveredMessage} - */ - public static final class DeliveredMessage extends - com.google.protobuf.GeneratedMessage - implements DeliveredMessageOrBuilder { - // Use DeliveredMessage.newBuilder() to construct. - private DeliveredMessage(com.google.protobuf.GeneratedMessage.Builder builder) { - super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private DeliveredMessage(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final DeliveredMessage defaultInstance; - public static DeliveredMessage getDefaultInstance() { - return defaultInstance; - } - - public DeliveredMessage getDefaultInstanceForType() { - return defaultInstance; - } - - private final com.google.protobuf.UnknownFieldSet unknownFields; - @java.lang.Override - public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { - return this.unknownFields; - } - private DeliveredMessage( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); - int mutable_bitField0_ = 0; - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder(); - try { - boolean done = false; - while (!done) { - int tag = input.readTag(); - switch (tag) { - case 0: - done = true; - break; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - done = true; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - persistenceId_ = input.readBytes(); - break; - } - case 18: { - bitField0_ |= 0x00000002; - channelId_ = input.readBytes(); - break; - } - case 24: { - bitField0_ |= 0x00000004; - persistentSequenceNr_ = input.readInt64(); - break; - } - case 32: { - bitField0_ |= 0x00000008; - deliverySequenceNr_ = input.readInt64(); - break; - } - case 42: { - bitField0_ |= 0x00000010; - channel_ = input.readBytes(); - break; - } - } - } - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - throw e.setUnfinishedMessage(this); - } catch (java.io.IOException e) { - throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); - } finally { - this.unknownFields = unknownFields.build(); - makeExtensionsImmutable(); - } - } - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return akka.persistence.serialization.MessageFormats.internal_static_DeliveredMessage_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return akka.persistence.serialization.MessageFormats.internal_static_DeliveredMessage_fieldAccessorTable - .ensureFieldAccessorsInitialized( - akka.persistence.serialization.MessageFormats.DeliveredMessage.class, akka.persistence.serialization.MessageFormats.DeliveredMessage.Builder.class); - } - - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public DeliveredMessage parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new DeliveredMessage(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - private int bitField0_; - // optional string persistenceId = 1; - public static final int PersistenceId_FIELD_NUMBER = 1; - private java.lang.Object persistenceId_; - /** - * optional string persistenceId = 1; - */ - public boolean hasPersistenceId() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - /** - * optional string persistenceId = 1; - */ - public java.lang.String getPersistenceId() { - java.lang.Object ref = persistenceId_; - if (ref instanceof java.lang.String) { - return (java.lang.String) ref; - } else { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - java.lang.String s = bs.toStringUtf8(); - if (bs.isValidUtf8()) { - persistenceId_ = s; - } - return s; - } - } - /** - * optional string persistenceId = 1; - */ - public com.google.protobuf.ByteString - getPersistenceIdBytes() { - java.lang.Object ref = persistenceId_; - if (ref instanceof java.lang.String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - persistenceId_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - - // optional string channelId = 2; - public static final int CHANNELID_FIELD_NUMBER = 2; - private java.lang.Object channelId_; - /** - * optional string channelId = 2; - */ - public boolean hasChannelId() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - /** - * optional string channelId = 2; - */ - public java.lang.String getChannelId() { - java.lang.Object ref = channelId_; - if (ref instanceof java.lang.String) { - return (java.lang.String) ref; - } else { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - java.lang.String s = bs.toStringUtf8(); - if (bs.isValidUtf8()) { - channelId_ = s; - } - return s; - } - } - /** - * optional string channelId = 2; - */ - public com.google.protobuf.ByteString - getChannelIdBytes() { - java.lang.Object ref = channelId_; - if (ref instanceof java.lang.String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - channelId_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - - // optional int64 persistentSequenceNr = 3; - public static final int PERSISTENTSEQUENCENR_FIELD_NUMBER = 3; - private long persistentSequenceNr_; - /** - * optional int64 persistentSequenceNr = 3; - */ - public boolean hasPersistentSequenceNr() { - return ((bitField0_ & 0x00000004) == 0x00000004); - } - /** - * optional int64 persistentSequenceNr = 3; - */ - public long getPersistentSequenceNr() { - return persistentSequenceNr_; - } - - // optional int64 deliverySequenceNr = 4; - public static final int DELIVERYSEQUENCENR_FIELD_NUMBER = 4; - private long deliverySequenceNr_; - /** - * optional int64 deliverySequenceNr = 4; - */ - public boolean hasDeliverySequenceNr() { - return ((bitField0_ & 0x00000008) == 0x00000008); - } - /** - * optional int64 deliverySequenceNr = 4; - */ - public long getDeliverySequenceNr() { - return deliverySequenceNr_; - } - - // optional string channel = 5; - public static final int CHANNEL_FIELD_NUMBER = 5; - private java.lang.Object channel_; - /** - * optional string channel = 5; - */ - public boolean hasChannel() { - return ((bitField0_ & 0x00000010) == 0x00000010); - } - /** - * optional string channel = 5; - */ - public java.lang.String getChannel() { - java.lang.Object ref = channel_; - if (ref instanceof java.lang.String) { - return (java.lang.String) ref; - } else { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - java.lang.String s = bs.toStringUtf8(); - if (bs.isValidUtf8()) { - channel_ = s; - } - return s; - } - } - /** - * optional string channel = 5; - */ - public com.google.protobuf.ByteString - getChannelBytes() { - java.lang.Object ref = channel_; - if (ref instanceof java.lang.String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - channel_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - - private void initFields() { - persistenceId_ = ""; - channelId_ = ""; - persistentSequenceNr_ = 0L; - deliverySequenceNr_ = 0L; - channel_ = ""; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBytes(1, getPersistenceIdBytes()); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeBytes(2, getChannelIdBytes()); - } - if (((bitField0_ & 0x00000004) == 0x00000004)) { - output.writeInt64(3, persistentSequenceNr_); - } - if (((bitField0_ & 0x00000008) == 0x00000008)) { - output.writeInt64(4, deliverySequenceNr_); - } - if (((bitField0_ & 0x00000010) == 0x00000010)) { - output.writeBytes(5, getChannelBytes()); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(1, getPersistenceIdBytes()); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(2, getChannelIdBytes()); - } - if (((bitField0_ & 0x00000004) == 0x00000004)) { - size += com.google.protobuf.CodedOutputStream - .computeInt64Size(3, persistentSequenceNr_); - } - if (((bitField0_ & 0x00000008) == 0x00000008)) { - size += com.google.protobuf.CodedOutputStream - .computeInt64Size(4, deliverySequenceNr_); - } - if (((bitField0_ & 0x00000010) == 0x00000010)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(5, getChannelBytes()); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - public static akka.persistence.serialization.MessageFormats.DeliveredMessage parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static akka.persistence.serialization.MessageFormats.DeliveredMessage parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static akka.persistence.serialization.MessageFormats.DeliveredMessage parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static akka.persistence.serialization.MessageFormats.DeliveredMessage parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static akka.persistence.serialization.MessageFormats.DeliveredMessage parseFrom(java.io.InputStream input) - throws java.io.IOException { - return PARSER.parseFrom(input); - } - public static akka.persistence.serialization.MessageFormats.DeliveredMessage parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); - } - public static akka.persistence.serialization.MessageFormats.DeliveredMessage parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); - } - public static akka.persistence.serialization.MessageFormats.DeliveredMessage parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); - } - public static akka.persistence.serialization.MessageFormats.DeliveredMessage parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return PARSER.parseFrom(input); - } - public static akka.persistence.serialization.MessageFormats.DeliveredMessage parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(akka.persistence.serialization.MessageFormats.DeliveredMessage prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - /** - * Protobuf type {@code DeliveredMessage} - */ - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements akka.persistence.serialization.MessageFormats.DeliveredMessageOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return akka.persistence.serialization.MessageFormats.internal_static_DeliveredMessage_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return akka.persistence.serialization.MessageFormats.internal_static_DeliveredMessage_fieldAccessorTable - .ensureFieldAccessorsInitialized( - akka.persistence.serialization.MessageFormats.DeliveredMessage.class, akka.persistence.serialization.MessageFormats.DeliveredMessage.Builder.class); - } - - // Construct using akka.persistence.serialization.MessageFormats.DeliveredMessage.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - persistenceId_ = ""; - bitField0_ = (bitField0_ & ~0x00000001); - channelId_ = ""; - bitField0_ = (bitField0_ & ~0x00000002); - persistentSequenceNr_ = 0L; - bitField0_ = (bitField0_ & ~0x00000004); - deliverySequenceNr_ = 0L; - bitField0_ = (bitField0_ & ~0x00000008); - channel_ = ""; - bitField0_ = (bitField0_ & ~0x00000010); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return akka.persistence.serialization.MessageFormats.internal_static_DeliveredMessage_descriptor; - } - - public akka.persistence.serialization.MessageFormats.DeliveredMessage getDefaultInstanceForType() { - return akka.persistence.serialization.MessageFormats.DeliveredMessage.getDefaultInstance(); - } - - public akka.persistence.serialization.MessageFormats.DeliveredMessage build() { - akka.persistence.serialization.MessageFormats.DeliveredMessage result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - public akka.persistence.serialization.MessageFormats.DeliveredMessage buildPartial() { - akka.persistence.serialization.MessageFormats.DeliveredMessage result = new akka.persistence.serialization.MessageFormats.DeliveredMessage(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - result.persistenceId_ = persistenceId_; - if (((from_bitField0_ & 0x00000002) == 0x00000002)) { - to_bitField0_ |= 0x00000002; - } - result.channelId_ = channelId_; - if (((from_bitField0_ & 0x00000004) == 0x00000004)) { - to_bitField0_ |= 0x00000004; - } - result.persistentSequenceNr_ = persistentSequenceNr_; - if (((from_bitField0_ & 0x00000008) == 0x00000008)) { - to_bitField0_ |= 0x00000008; - } - result.deliverySequenceNr_ = deliverySequenceNr_; - if (((from_bitField0_ & 0x00000010) == 0x00000010)) { - to_bitField0_ |= 0x00000010; - } - result.channel_ = channel_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof akka.persistence.serialization.MessageFormats.DeliveredMessage) { - return mergeFrom((akka.persistence.serialization.MessageFormats.DeliveredMessage)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(akka.persistence.serialization.MessageFormats.DeliveredMessage other) { - if (other == akka.persistence.serialization.MessageFormats.DeliveredMessage.getDefaultInstance()) return this; - if (other.hasPersistenceId()) { - bitField0_ |= 0x00000001; - persistenceId_ = other.persistenceId_; - onChanged(); - } - if (other.hasChannelId()) { - bitField0_ |= 0x00000002; - channelId_ = other.channelId_; - onChanged(); - } - if (other.hasPersistentSequenceNr()) { - setPersistentSequenceNr(other.getPersistentSequenceNr()); - } - if (other.hasDeliverySequenceNr()) { - setDeliverySequenceNr(other.getDeliverySequenceNr()); - } - if (other.hasChannel()) { - bitField0_ |= 0x00000010; - channel_ = other.channel_; - onChanged(); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - akka.persistence.serialization.MessageFormats.DeliveredMessage parsedMessage = null; - try { - parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (akka.persistence.serialization.MessageFormats.DeliveredMessage) e.getUnfinishedMessage(); - throw e; - } finally { - if (parsedMessage != null) { - mergeFrom(parsedMessage); - } - } - return this; - } - private int bitField0_; - - // optional string persistenceId = 1; - private java.lang.Object persistenceId_ = ""; - /** - * optional string persistenceId = 1; - */ - public boolean hasPersistenceId() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - /** - * optional string persistenceId = 1; - */ - public java.lang.String getPersistenceId() { - java.lang.Object ref = persistenceId_; - if (!(ref instanceof java.lang.String)) { - java.lang.String s = ((com.google.protobuf.ByteString) ref) - .toStringUtf8(); - persistenceId_ = s; - return s; - } else { - return (java.lang.String) ref; - } - } - /** - * optional string persistenceId = 1; - */ - public com.google.protobuf.ByteString - getPersistenceIdBytes() { - java.lang.Object ref = persistenceId_; - if (ref instanceof String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - persistenceId_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - /** - * optional string persistenceId = 1; - */ - public Builder setPersistenceId( - java.lang.String value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000001; - persistenceId_ = value; - onChanged(); - return this; - } - /** - * optional string persistenceId = 1; - */ - public Builder clearPersistenceId() { - bitField0_ = (bitField0_ & ~0x00000001); - persistenceId_ = getDefaultInstance().getPersistenceId(); - onChanged(); - return this; - } - /** - * optional string persistenceId = 1; - */ - public Builder setPersistenceIdBytes( - com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000001; - persistenceId_ = value; - onChanged(); - return this; - } - - // optional string channelId = 2; - private java.lang.Object channelId_ = ""; - /** - * optional string channelId = 2; - */ - public boolean hasChannelId() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - /** - * optional string channelId = 2; - */ - public java.lang.String getChannelId() { - java.lang.Object ref = channelId_; - if (!(ref instanceof java.lang.String)) { - java.lang.String s = ((com.google.protobuf.ByteString) ref) - .toStringUtf8(); - channelId_ = s; - return s; - } else { - return (java.lang.String) ref; - } - } - /** - * optional string channelId = 2; - */ - public com.google.protobuf.ByteString - getChannelIdBytes() { - java.lang.Object ref = channelId_; - if (ref instanceof String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - channelId_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - /** - * optional string channelId = 2; - */ - public Builder setChannelId( - java.lang.String value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000002; - channelId_ = value; - onChanged(); - return this; - } - /** - * optional string channelId = 2; - */ - public Builder clearChannelId() { - bitField0_ = (bitField0_ & ~0x00000002); - channelId_ = getDefaultInstance().getChannelId(); - onChanged(); - return this; - } - /** - * optional string channelId = 2; - */ - public Builder setChannelIdBytes( - com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000002; - channelId_ = value; - onChanged(); - return this; - } - - // optional int64 persistentSequenceNr = 3; - private long persistentSequenceNr_ ; - /** - * optional int64 persistentSequenceNr = 3; - */ - public boolean hasPersistentSequenceNr() { - return ((bitField0_ & 0x00000004) == 0x00000004); - } - /** - * optional int64 persistentSequenceNr = 3; - */ - public long getPersistentSequenceNr() { - return persistentSequenceNr_; - } - /** - * optional int64 persistentSequenceNr = 3; - */ - public Builder setPersistentSequenceNr(long value) { - bitField0_ |= 0x00000004; - persistentSequenceNr_ = value; - onChanged(); - return this; - } - /** - * optional int64 persistentSequenceNr = 3; - */ - public Builder clearPersistentSequenceNr() { - bitField0_ = (bitField0_ & ~0x00000004); - persistentSequenceNr_ = 0L; - onChanged(); - return this; - } - - // optional int64 deliverySequenceNr = 4; - private long deliverySequenceNr_ ; - /** - * optional int64 deliverySequenceNr = 4; - */ - public boolean hasDeliverySequenceNr() { - return ((bitField0_ & 0x00000008) == 0x00000008); - } - /** - * optional int64 deliverySequenceNr = 4; - */ - public long getDeliverySequenceNr() { - return deliverySequenceNr_; - } - /** - * optional int64 deliverySequenceNr = 4; - */ - public Builder setDeliverySequenceNr(long value) { - bitField0_ |= 0x00000008; - deliverySequenceNr_ = value; - onChanged(); - return this; - } - /** - * optional int64 deliverySequenceNr = 4; - */ - public Builder clearDeliverySequenceNr() { - bitField0_ = (bitField0_ & ~0x00000008); - deliverySequenceNr_ = 0L; - onChanged(); - return this; - } - - // optional string channel = 5; - private java.lang.Object channel_ = ""; - /** - * optional string channel = 5; - */ - public boolean hasChannel() { - return ((bitField0_ & 0x00000010) == 0x00000010); - } - /** - * optional string channel = 5; - */ - public java.lang.String getChannel() { - java.lang.Object ref = channel_; - if (!(ref instanceof java.lang.String)) { - java.lang.String s = ((com.google.protobuf.ByteString) ref) - .toStringUtf8(); - channel_ = s; - return s; - } else { - return (java.lang.String) ref; - } - } - /** - * optional string channel = 5; - */ - public com.google.protobuf.ByteString - getChannelBytes() { - java.lang.Object ref = channel_; - if (ref instanceof String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - channel_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - /** - * optional string channel = 5; - */ - public Builder setChannel( - java.lang.String value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000010; - channel_ = value; - onChanged(); - return this; - } - /** - * optional string channel = 5; - */ - public Builder clearChannel() { - bitField0_ = (bitField0_ & ~0x00000010); - channel_ = getDefaultInstance().getChannel(); - onChanged(); - return this; - } - /** - * optional string channel = 5; - */ - public Builder setChannelBytes( - com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000010; - channel_ = value; - onChanged(); - return this; - } - - // @@protoc_insertion_point(builder_scope:DeliveredMessage) - } - - static { - defaultInstance = new DeliveredMessage(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:DeliveredMessage) - } - - public interface DeliverMessageOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // optional .PersistentMessage persistent = 1; - /** - * optional .PersistentMessage persistent = 1; - */ - boolean hasPersistent(); - /** - * optional .PersistentMessage persistent = 1; - */ - akka.persistence.serialization.MessageFormats.PersistentMessage getPersistent(); - /** - * optional .PersistentMessage persistent = 1; - */ - akka.persistence.serialization.MessageFormats.PersistentMessageOrBuilder getPersistentOrBuilder(); - - // optional string destination = 2; - /** - * optional string destination = 2; - */ - boolean hasDestination(); - /** - * optional string destination = 2; - */ - java.lang.String getDestination(); - /** - * optional string destination = 2; - */ - com.google.protobuf.ByteString - getDestinationBytes(); - } - /** - * Protobuf type {@code DeliverMessage} - */ - public static final class DeliverMessage extends - com.google.protobuf.GeneratedMessage - implements DeliverMessageOrBuilder { - // Use DeliverMessage.newBuilder() to construct. - private DeliverMessage(com.google.protobuf.GeneratedMessage.Builder builder) { - super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private DeliverMessage(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final DeliverMessage defaultInstance; - public static DeliverMessage getDefaultInstance() { - return defaultInstance; - } - - public DeliverMessage getDefaultInstanceForType() { - return defaultInstance; - } - - private final com.google.protobuf.UnknownFieldSet unknownFields; - @java.lang.Override - public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { - return this.unknownFields; - } - private DeliverMessage( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); - int mutable_bitField0_ = 0; - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder(); - try { - boolean done = false; - while (!done) { - int tag = input.readTag(); - switch (tag) { - case 0: - done = true; - break; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - done = true; - } - break; - } - case 10: { - akka.persistence.serialization.MessageFormats.PersistentMessage.Builder subBuilder = null; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - subBuilder = persistent_.toBuilder(); - } - persistent_ = input.readMessage(akka.persistence.serialization.MessageFormats.PersistentMessage.PARSER, extensionRegistry); - if (subBuilder != null) { - subBuilder.mergeFrom(persistent_); - persistent_ = subBuilder.buildPartial(); - } - bitField0_ |= 0x00000001; - break; - } - case 18: { - bitField0_ |= 0x00000002; - destination_ = input.readBytes(); - break; - } - } - } - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - throw e.setUnfinishedMessage(this); - } catch (java.io.IOException e) { - throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); - } finally { - this.unknownFields = unknownFields.build(); - makeExtensionsImmutable(); - } - } - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return akka.persistence.serialization.MessageFormats.internal_static_DeliverMessage_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return akka.persistence.serialization.MessageFormats.internal_static_DeliverMessage_fieldAccessorTable - .ensureFieldAccessorsInitialized( - akka.persistence.serialization.MessageFormats.DeliverMessage.class, akka.persistence.serialization.MessageFormats.DeliverMessage.Builder.class); - } - - public static com.google.protobuf.Parser PARSER = - new com.google.protobuf.AbstractParser() { - public DeliverMessage parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new DeliverMessage(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - private int bitField0_; - // optional .PersistentMessage persistent = 1; - public static final int PERSISTENT_FIELD_NUMBER = 1; - private akka.persistence.serialization.MessageFormats.PersistentMessage persistent_; - /** - * optional .PersistentMessage persistent = 1; - */ - public boolean hasPersistent() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - /** - * optional .PersistentMessage persistent = 1; - */ - public akka.persistence.serialization.MessageFormats.PersistentMessage getPersistent() { - return persistent_; - } - /** - * optional .PersistentMessage persistent = 1; - */ - public akka.persistence.serialization.MessageFormats.PersistentMessageOrBuilder getPersistentOrBuilder() { - return persistent_; - } - - // optional string destination = 2; - public static final int DESTINATION_FIELD_NUMBER = 2; - private java.lang.Object destination_; - /** - * optional string destination = 2; - */ - public boolean hasDestination() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - /** - * optional string destination = 2; - */ - public java.lang.String getDestination() { - java.lang.Object ref = destination_; - if (ref instanceof java.lang.String) { - return (java.lang.String) ref; - } else { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - java.lang.String s = bs.toStringUtf8(); - if (bs.isValidUtf8()) { - destination_ = s; - } - return s; - } - } - /** - * optional string destination = 2; - */ - public com.google.protobuf.ByteString - getDestinationBytes() { - java.lang.Object ref = destination_; - if (ref instanceof java.lang.String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - destination_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - - private void initFields() { - persistent_ = akka.persistence.serialization.MessageFormats.PersistentMessage.getDefaultInstance(); - destination_ = ""; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (hasPersistent()) { - if (!getPersistent().isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, persistent_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeBytes(2, getDestinationBytes()); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, persistent_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(2, getDestinationBytes()); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - public static akka.persistence.serialization.MessageFormats.DeliverMessage parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static akka.persistence.serialization.MessageFormats.DeliverMessage parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static akka.persistence.serialization.MessageFormats.DeliverMessage parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static akka.persistence.serialization.MessageFormats.DeliverMessage parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static akka.persistence.serialization.MessageFormats.DeliverMessage parseFrom(java.io.InputStream input) - throws java.io.IOException { - return PARSER.parseFrom(input); - } - public static akka.persistence.serialization.MessageFormats.DeliverMessage parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); - } - public static akka.persistence.serialization.MessageFormats.DeliverMessage parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); - } - public static akka.persistence.serialization.MessageFormats.DeliverMessage parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); - } - public static akka.persistence.serialization.MessageFormats.DeliverMessage parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return PARSER.parseFrom(input); - } - public static akka.persistence.serialization.MessageFormats.DeliverMessage parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(akka.persistence.serialization.MessageFormats.DeliverMessage prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - /** - * Protobuf type {@code DeliverMessage} - */ - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements akka.persistence.serialization.MessageFormats.DeliverMessageOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return akka.persistence.serialization.MessageFormats.internal_static_DeliverMessage_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return akka.persistence.serialization.MessageFormats.internal_static_DeliverMessage_fieldAccessorTable - .ensureFieldAccessorsInitialized( - akka.persistence.serialization.MessageFormats.DeliverMessage.class, akka.persistence.serialization.MessageFormats.DeliverMessage.Builder.class); - } - - // Construct using akka.persistence.serialization.MessageFormats.DeliverMessage.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - getPersistentFieldBuilder(); - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - if (persistentBuilder_ == null) { - persistent_ = akka.persistence.serialization.MessageFormats.PersistentMessage.getDefaultInstance(); - } else { - persistentBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000001); - destination_ = ""; - bitField0_ = (bitField0_ & ~0x00000002); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return akka.persistence.serialization.MessageFormats.internal_static_DeliverMessage_descriptor; - } - - public akka.persistence.serialization.MessageFormats.DeliverMessage getDefaultInstanceForType() { - return akka.persistence.serialization.MessageFormats.DeliverMessage.getDefaultInstance(); - } - - public akka.persistence.serialization.MessageFormats.DeliverMessage build() { - akka.persistence.serialization.MessageFormats.DeliverMessage result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - public akka.persistence.serialization.MessageFormats.DeliverMessage buildPartial() { - akka.persistence.serialization.MessageFormats.DeliverMessage result = new akka.persistence.serialization.MessageFormats.DeliverMessage(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - if (persistentBuilder_ == null) { - result.persistent_ = persistent_; - } else { - result.persistent_ = persistentBuilder_.build(); - } - if (((from_bitField0_ & 0x00000002) == 0x00000002)) { - to_bitField0_ |= 0x00000002; - } - result.destination_ = destination_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof akka.persistence.serialization.MessageFormats.DeliverMessage) { - return mergeFrom((akka.persistence.serialization.MessageFormats.DeliverMessage)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(akka.persistence.serialization.MessageFormats.DeliverMessage other) { - if (other == akka.persistence.serialization.MessageFormats.DeliverMessage.getDefaultInstance()) return this; - if (other.hasPersistent()) { - mergePersistent(other.getPersistent()); - } - if (other.hasDestination()) { - bitField0_ |= 0x00000002; - destination_ = other.destination_; - onChanged(); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (hasPersistent()) { - if (!getPersistent().isInitialized()) { - - return false; - } - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - akka.persistence.serialization.MessageFormats.DeliverMessage parsedMessage = null; - try { - parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (akka.persistence.serialization.MessageFormats.DeliverMessage) e.getUnfinishedMessage(); - throw e; - } finally { - if (parsedMessage != null) { - mergeFrom(parsedMessage); - } - } - return this; - } - private int bitField0_; - - // optional .PersistentMessage persistent = 1; - private akka.persistence.serialization.MessageFormats.PersistentMessage persistent_ = akka.persistence.serialization.MessageFormats.PersistentMessage.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< - akka.persistence.serialization.MessageFormats.PersistentMessage, akka.persistence.serialization.MessageFormats.PersistentMessage.Builder, akka.persistence.serialization.MessageFormats.PersistentMessageOrBuilder> persistentBuilder_; - /** - * optional .PersistentMessage persistent = 1; - */ - public boolean hasPersistent() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - /** - * optional .PersistentMessage persistent = 1; - */ - public akka.persistence.serialization.MessageFormats.PersistentMessage getPersistent() { - if (persistentBuilder_ == null) { - return persistent_; - } else { - return persistentBuilder_.getMessage(); - } - } - /** - * optional .PersistentMessage persistent = 1; - */ - public Builder setPersistent(akka.persistence.serialization.MessageFormats.PersistentMessage value) { - if (persistentBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - persistent_ = value; - onChanged(); - } else { - persistentBuilder_.setMessage(value); - } - bitField0_ |= 0x00000001; - return this; - } - /** - * optional .PersistentMessage persistent = 1; - */ - public Builder setPersistent( - akka.persistence.serialization.MessageFormats.PersistentMessage.Builder builderForValue) { - if (persistentBuilder_ == null) { - persistent_ = builderForValue.build(); - onChanged(); - } else { - persistentBuilder_.setMessage(builderForValue.build()); - } - bitField0_ |= 0x00000001; - return this; - } - /** - * optional .PersistentMessage persistent = 1; - */ - public Builder mergePersistent(akka.persistence.serialization.MessageFormats.PersistentMessage value) { - if (persistentBuilder_ == null) { - if (((bitField0_ & 0x00000001) == 0x00000001) && - persistent_ != akka.persistence.serialization.MessageFormats.PersistentMessage.getDefaultInstance()) { - persistent_ = - akka.persistence.serialization.MessageFormats.PersistentMessage.newBuilder(persistent_).mergeFrom(value).buildPartial(); - } else { - persistent_ = value; - } - onChanged(); - } else { - persistentBuilder_.mergeFrom(value); - } - bitField0_ |= 0x00000001; - return this; - } - /** - * optional .PersistentMessage persistent = 1; - */ - public Builder clearPersistent() { - if (persistentBuilder_ == null) { - persistent_ = akka.persistence.serialization.MessageFormats.PersistentMessage.getDefaultInstance(); - onChanged(); - } else { - persistentBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - /** - * optional .PersistentMessage persistent = 1; - */ - public akka.persistence.serialization.MessageFormats.PersistentMessage.Builder getPersistentBuilder() { - bitField0_ |= 0x00000001; - onChanged(); - return getPersistentFieldBuilder().getBuilder(); - } - /** - * optional .PersistentMessage persistent = 1; - */ - public akka.persistence.serialization.MessageFormats.PersistentMessageOrBuilder getPersistentOrBuilder() { - if (persistentBuilder_ != null) { - return persistentBuilder_.getMessageOrBuilder(); - } else { - return persistent_; - } - } - /** - * optional .PersistentMessage persistent = 1; - */ - private com.google.protobuf.SingleFieldBuilder< - akka.persistence.serialization.MessageFormats.PersistentMessage, akka.persistence.serialization.MessageFormats.PersistentMessage.Builder, akka.persistence.serialization.MessageFormats.PersistentMessageOrBuilder> - getPersistentFieldBuilder() { - if (persistentBuilder_ == null) { - persistentBuilder_ = new com.google.protobuf.SingleFieldBuilder< - akka.persistence.serialization.MessageFormats.PersistentMessage, akka.persistence.serialization.MessageFormats.PersistentMessage.Builder, akka.persistence.serialization.MessageFormats.PersistentMessageOrBuilder>( - persistent_, - getParentForChildren(), - isClean()); - persistent_ = null; - } - return persistentBuilder_; - } - - // optional string destination = 2; - private java.lang.Object destination_ = ""; - /** - * optional string destination = 2; - */ - public boolean hasDestination() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - /** - * optional string destination = 2; - */ - public java.lang.String getDestination() { - java.lang.Object ref = destination_; - if (!(ref instanceof java.lang.String)) { - java.lang.String s = ((com.google.protobuf.ByteString) ref) - .toStringUtf8(); - destination_ = s; - return s; - } else { - return (java.lang.String) ref; - } - } - /** - * optional string destination = 2; - */ - public com.google.protobuf.ByteString - getDestinationBytes() { - java.lang.Object ref = destination_; - if (ref instanceof String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - destination_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - /** - * optional string destination = 2; - */ - public Builder setDestination( - java.lang.String value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000002; - destination_ = value; - onChanged(); - return this; - } - /** - * optional string destination = 2; - */ - public Builder clearDestination() { - bitField0_ = (bitField0_ & ~0x00000002); - destination_ = getDefaultInstance().getDestination(); - onChanged(); - return this; - } - /** - * optional string destination = 2; - */ - public Builder setDestinationBytes( - com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000002; - destination_ = value; - onChanged(); - return this; - } - - // @@protoc_insertion_point(builder_scope:DeliverMessage) - } - - static { - defaultInstance = new DeliverMessage(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:DeliverMessage) - } - public interface AtLeastOnceDeliverySnapshotOrBuilder extends com.google.protobuf.MessageOrBuilder { @@ -6136,11 +3135,6 @@ public final class MessageFormats { // @@protoc_insertion_point(class_scope:AtLeastOnceDeliverySnapshot) } - private static com.google.protobuf.Descriptors.Descriptor - internal_static_PersistentMessageBatch_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_PersistentMessageBatch_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_PersistentMessage_descriptor; private static @@ -6151,16 +3145,6 @@ public final class MessageFormats { private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_PersistentPayload_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_DeliveredMessage_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_DeliveredMessage_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_DeliverMessage_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_DeliverMessage_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_AtLeastOnceDeliverySnapshot_descriptor; private static @@ -6180,67 +3164,39 @@ public final class MessageFormats { descriptor; static { java.lang.String[] descriptorData = { - "\n\024MessageFormats.proto\";\n\026PersistentMess" + - "ageBatch\022!\n\005batch\030\001 \003(\0132\022.PersistentMess" + - "age\"\201\002\n\021PersistentMessage\022#\n\007payload\030\001 \001" + - "(\0132\022.PersistentPayload\022\022\n\nsequenceNr\030\002 \001" + - "(\003\022\023\n\013persistenceId\030\003 \001(\t\022\017\n\007deleted\030\004 \001(\010" + - "\022\024\n\014redeliveries\030\006 \001(\005\022\020\n\010confirms\030\007 \003(\t" + - "\022\023\n\013confirmable\030\010 \001(\010\022)\n\016confirmMessage\030" + - "\t \001(\0132\021.DeliveredMessage\022\025\n\rconfirmTarge" + - "t\030\n \001(\t\022\016\n\006sender\030\013 \001(\t\"S\n\021PersistentPay" + - "load\022\024\n\014serializerId\030\001 \002(\005\022\017\n\007payload\030\002 ", - "\002(\014\022\027\n\017payloadManifest\030\003 \001(\014\"\205\001\n\020Deliver" + - "edMessage\022\023\n\013persistenceId\030\001 \001(\t\022\021\n\tchanne" + - "lId\030\002 \001(\t\022\034\n\024persistentSequenceNr\030\003 \001(\003\022" + - "\032\n\022deliverySequenceNr\030\004 \001(\003\022\017\n\007channel\030\005" + - " \001(\t\"M\n\016DeliverMessage\022&\n\npersistent\030\001 \001" + - "(\0132\022.PersistentMessage\022\023\n\013destination\030\002 " + - "\001(\t\"\356\001\n\033AtLeastOnceDeliverySnapshot\022\031\n\021c" + - "urrentDeliveryId\030\001 \002(\003\022O\n\025unconfirmedDel" + - "iveries\030\002 \003(\01320.AtLeastOnceDeliverySnaps" + - "hot.UnconfirmedDelivery\032c\n\023UnconfirmedDe", - "livery\022\022\n\ndeliveryId\030\001 \002(\003\022\023\n\013destinatio" + - "n\030\002 \002(\t\022#\n\007payload\030\003 \002(\0132\022.PersistentPay" + - "loadB\"\n\036akka.persistence.serializationH\001" + "\n\024MessageFormats.proto\"\204\001\n\021PersistentMes" + + "sage\022#\n\007payload\030\001 \001(\0132\022.PersistentPayloa" + + "d\022\022\n\nsequenceNr\030\002 \001(\003\022\025\n\rpersistenceId\030\003" + + " \001(\t\022\017\n\007deleted\030\004 \001(\010\022\016\n\006sender\030\013 \001(\t\"S\n" + + "\021PersistentPayload\022\024\n\014serializerId\030\001 \002(\005" + + "\022\017\n\007payload\030\002 \002(\014\022\027\n\017payloadManifest\030\003 \001" + + "(\014\"\356\001\n\033AtLeastOnceDeliverySnapshot\022\031\n\021cu" + + "rrentDeliveryId\030\001 \002(\003\022O\n\025unconfirmedDeli" + + "veries\030\002 \003(\01320.AtLeastOnceDeliverySnapsh" + + "ot.UnconfirmedDelivery\032c\n\023UnconfirmedDel", + "ivery\022\022\n\ndeliveryId\030\001 \002(\003\022\023\n\013destination" + + "\030\002 \002(\t\022#\n\007payload\030\003 \002(\0132\022.PersistentPayl" + + "oadB\"\n\036akka.persistence.serializationH\001" }; com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { public com.google.protobuf.ExtensionRegistry assignDescriptors( com.google.protobuf.Descriptors.FileDescriptor root) { descriptor = root; - internal_static_PersistentMessageBatch_descriptor = - getDescriptor().getMessageTypes().get(0); - internal_static_PersistentMessageBatch_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_PersistentMessageBatch_descriptor, - new java.lang.String[] { "Batch", }); internal_static_PersistentMessage_descriptor = - getDescriptor().getMessageTypes().get(1); + getDescriptor().getMessageTypes().get(0); internal_static_PersistentMessage_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_PersistentMessage_descriptor, - new java.lang.String[] { "Payload", "SequenceNr", "PersistenceId", "Deleted", "Redeliveries", "Confirms", "Confirmable", "ConfirmMessage", "ConfirmTarget", "Sender", }); + new java.lang.String[] { "Payload", "SequenceNr", "PersistenceId", "Deleted", "Sender", }); internal_static_PersistentPayload_descriptor = - getDescriptor().getMessageTypes().get(2); + getDescriptor().getMessageTypes().get(1); internal_static_PersistentPayload_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_PersistentPayload_descriptor, new java.lang.String[] { "SerializerId", "Payload", "PayloadManifest", }); - internal_static_DeliveredMessage_descriptor = - getDescriptor().getMessageTypes().get(3); - internal_static_DeliveredMessage_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_DeliveredMessage_descriptor, - new java.lang.String[] { "PersistenceId", "ChannelId", "PersistentSequenceNr", "DeliverySequenceNr", "Channel", }); - internal_static_DeliverMessage_descriptor = - getDescriptor().getMessageTypes().get(4); - internal_static_DeliverMessage_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_DeliverMessage_descriptor, - new java.lang.String[] { "Persistent", "Destination", }); internal_static_AtLeastOnceDeliverySnapshot_descriptor = - getDescriptor().getMessageTypes().get(5); + getDescriptor().getMessageTypes().get(2); internal_static_AtLeastOnceDeliverySnapshot_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_AtLeastOnceDeliverySnapshot_descriptor, diff --git a/akka-persistence/src/main/java/akka/persistence/snapshot/japi/SnapshotStorePlugin.java b/akka-persistence/src/main/java/akka/persistence/snapshot/japi/SnapshotStorePlugin.java index 3b506ce87f..3a022630ad 100644 --- a/akka-persistence/src/main/java/akka/persistence/snapshot/japi/SnapshotStorePlugin.java +++ b/akka-persistence/src/main/java/akka/persistence/snapshot/japi/SnapshotStorePlugin.java @@ -6,47 +6,55 @@ package akka.persistence.snapshot.japi; import scala.concurrent.Future; -import akka.japi.Option; +import akka.japi.Option; import akka.persistence.*; interface SnapshotStorePlugin { - //#snapshot-store-plugin-api - /** - * Java API, Plugin API: asynchronously loads a snapshot. - * - * @param persistenceId processor id. - * @param criteria selection criteria for loading. - */ - Future> doLoadAsync(String persistenceId, SnapshotSelectionCriteria criteria); + //#snapshot-store-plugin-api + /** + * Java API, Plugin API: asynchronously loads a snapshot. + * + * @param persistenceId + * id of the persistent actor. + * @param criteria + * selection criteria for loading. + */ + Future> doLoadAsync(String persistenceId, SnapshotSelectionCriteria criteria); - /** - * Java API, Plugin API: asynchronously saves a snapshot. - * - * @param metadata snapshot metadata. - * @param snapshot snapshot. - */ - Future doSaveAsync(SnapshotMetadata metadata, Object snapshot); + /** + * Java API, Plugin API: asynchronously saves a snapshot. + * + * @param metadata + * snapshot metadata. + * @param snapshot + * snapshot. + */ + Future doSaveAsync(SnapshotMetadata metadata, Object snapshot); - /** - * Java API, Plugin API: called after successful saving of a snapshot. - * - * @param metadata snapshot metadata. - */ - void onSaved(SnapshotMetadata metadata) throws Exception; + /** + * Java API, Plugin API: called after successful saving of a snapshot. + * + * @param metadata + * snapshot metadata. + */ + void onSaved(SnapshotMetadata metadata) throws Exception; - /** - * Java API, Plugin API: deletes the snapshot identified by `metadata`. - * - * @param metadata snapshot metadata. - */ - void doDelete(SnapshotMetadata metadata) throws Exception; + /** + * Java API, Plugin API: deletes the snapshot identified by `metadata`. + * + * @param metadata + * snapshot metadata. + */ + void doDelete(SnapshotMetadata metadata) throws Exception; - /** - * Java API, Plugin API: deletes all snapshots matching `criteria`. - * - * @param persistenceId processor id. - * @param criteria selection criteria for deleting. - */ - void doDelete(String persistenceId, SnapshotSelectionCriteria criteria) throws Exception; - //#snapshot-store-plugin-api + /** + * Java API, Plugin API: deletes all snapshots matching `criteria`. + * + * @param persistenceId + * id of the persistent actor. + * @param criteria + * selection criteria for deleting. + */ + void doDelete(String persistenceId, SnapshotSelectionCriteria criteria) throws Exception; + //#snapshot-store-plugin-api } diff --git a/akka-persistence/src/main/protobuf/MessageFormats.proto b/akka-persistence/src/main/protobuf/MessageFormats.proto index 3b28ef6a7f..51399f76ee 100644 --- a/akka-persistence/src/main/protobuf/MessageFormats.proto +++ b/akka-persistence/src/main/protobuf/MessageFormats.proto @@ -5,20 +5,16 @@ option java_package = "akka.persistence.serialization"; option optimize_for = SPEED; -message PersistentMessageBatch { - repeated PersistentMessage batch = 1; -} - message PersistentMessage { optional PersistentPayload payload = 1; optional int64 sequenceNr = 2; optional string persistenceId = 3; optional bool deleted = 4; - optional int32 redeliveries = 6; - repeated string confirms = 7; - optional bool confirmable = 8; - optional DeliveredMessage confirmMessage = 9; - optional string confirmTarget = 10; + // optional int32 redeliveries = 6; // Removed in 2.4 + // repeated string confirms = 7; // Removed in 2.4 + // optional bool confirmable = 8; // Removed in 2.4 + // optional DeliveredMessage confirmMessage = 9; // Removed in 2.4 + // optional string confirmTarget = 10; optional string sender = 11; } @@ -28,19 +24,6 @@ message PersistentPayload { optional bytes payloadManifest = 3; } -message DeliveredMessage { - optional string persistenceId = 1; - optional string channelId = 2; - optional int64 persistentSequenceNr = 3; - optional int64 deliverySequenceNr = 4; - optional string channel = 5; -} - -message DeliverMessage { - optional PersistentMessage persistent = 1; - optional string destination = 2; -} - message AtLeastOnceDeliverySnapshot { message UnconfirmedDelivery { required int64 deliveryId = 1; diff --git a/akka-persistence/src/main/resources/reference.conf b/akka-persistence/src/main/resources/reference.conf index 9de5404a3e..5a287874dc 100644 --- a/akka-persistence/src/main/resources/reference.conf +++ b/akka-persistence/src/main/resources/reference.conf @@ -29,9 +29,6 @@ akka { # Maximum size of a persistent message batch written to the journal. max-message-batch-size = 200 - # Maximum size of a confirmation batch written to the journal. - max-confirmation-batch-size = 10000 - # Maximum size of a deletion batch written to the journal. max-deletion-batch-size = 10000 diff --git a/akka-persistence/src/main/scala/akka/persistence/AtLeastOnceDelivery.scala b/akka-persistence/src/main/scala/akka/persistence/AtLeastOnceDelivery.scala index 95ec317e8a..8e4878d2c1 100644 --- a/akka-persistence/src/main/scala/akka/persistence/AtLeastOnceDelivery.scala +++ b/akka-persistence/src/main/scala/akka/persistence/AtLeastOnceDelivery.scala @@ -103,9 +103,7 @@ object AtLeastOnceDelivery { * serialization mechanism. It is easiest to include the bytes of the `AtLeastOnceDeliverySnapshot` * as a blob in your custom snapshot. */ -trait AtLeastOnceDelivery extends Processor { - // FIXME The reason for extending Processor instead of PersistentActor is - // the class hierarchy for UntypedPersistentActorWithAtLeastOnceDelivery +trait AtLeastOnceDelivery extends Eventsourced { import AtLeastOnceDelivery._ import AtLeastOnceDelivery.Internal._ @@ -302,9 +300,9 @@ trait AtLeastOnceDelivery extends Processor { super.aroundPostStop() } - override private[persistence] def onReplaySuccess(receive: Receive, awaitReplay: Boolean): Unit = { - super.onReplaySuccess(receive, awaitReplay) + override private[persistence] def onReplaySuccess(): Unit = { redeliverOverdue() + super.onReplaySuccess() } /** diff --git a/akka-persistence/src/main/scala/akka/persistence/Channel.scala b/akka-persistence/src/main/scala/akka/persistence/Channel.scala deleted file mode 100644 index 33580a920d..0000000000 --- a/akka-persistence/src/main/scala/akka/persistence/Channel.scala +++ /dev/null @@ -1,384 +0,0 @@ -/** - * Copyright (C) 2009-2014 Typesafe Inc. - */ - -package akka.persistence - -import java.lang.{ Iterable ⇒ JIterable } - -import scala.collection.immutable -import scala.collection.JavaConverters._ -import scala.concurrent.duration._ -import scala.language.postfixOps - -import akka.actor._ - -import akka.persistence.serialization.Message -import akka.persistence.JournalProtocol._ - -// TODO: remove Channel - -/** - * A [[Channel]] configuration object. - * - * @param redeliverMax Maximum number of redelivery attempts. - * @param redeliverInterval Interval between redelivery attempts. - * @param redeliverFailureListener Receiver of [[RedeliverFailure]] notifications which are sent when the number - * of redeliveries reaches `redeliverMax` for a sequence of messages. To enforce - * a redelivery of these messages, the listener has to restart the sending processor. - * Alternatively, it can also confirm these messages, preventing further redeliveries. - */ -@SerialVersionUID(1L) -@deprecated("Channel will be removed, see `akka.persistence.AtLeastOnceDelivery` instead.", since = "2.3.4") -final case class ChannelSettings( - val redeliverMax: Int = 5, - val redeliverInterval: FiniteDuration = 5.seconds, - val redeliverFailureListener: Option[ActorRef] = None) { - - /** - * Java API. - */ - def withRedeliverMax(redeliverMax: Int): ChannelSettings = - copy(redeliverMax = redeliverMax) - - /** - * Java API. - */ - def withRedeliverInterval(redeliverInterval: FiniteDuration): ChannelSettings = - copy(redeliverInterval = redeliverInterval) - - /** - * Java API. - */ - def withRedeliverFailureListener(redeliverFailureListener: ActorRef): ChannelSettings = - copy(redeliverFailureListener = Option(redeliverFailureListener)) -} - -@deprecated("Channel will be removed, see `akka.persistence.AtLeastOnceDelivery` instead.", since = "2.3.4") -object ChannelSettings { - /** - * Java API. - */ - def create() = ChannelSettings.apply() -} - -/** - * A channel is used by [[Processor]]s (and [[View]]s) for sending [[Persistent]] messages to destinations. - * The main responsibility of a channel is to prevent redundant delivery of replayed messages to destinations - * when a processor is recovered. - * - * A channel is instructed to deliver a persistent message to a destination with the [[Deliver]] command. A - * destination is provided as `ActorPath` and messages are sent via that path's `ActorSelection`. - * - * {{{ - * class ForwardExample extends Processor { - * val destination = context.actorOf(Props[MyDestination]) - * val channel = context.actorOf(Channel.props(), "myChannel") - * - * def receive = { - * case m @ Persistent(payload, _) => - * // forward modified message to destination - * channel forward Deliver(m.withPayload(s"fw: ${payload}"), destination.path) - * } - * } - * }}} - * - * To reply to the sender of a persistent message, the `sender` reference should be used as channel - * destination. - * - * {{{ - * class ReplyExample extends Processor { - * val channel = context.actorOf(Channel.props(), "myChannel") - * - * def receive = { - * case m @ Persistent(payload, _) => - * // reply modified message to sender - * channel ! Deliver(m.withPayload(s"re: ${payload}"), sender.path) - * } - * } - * }}} - * - * Redundant delivery of messages to destinations is only prevented if the receipt of these messages - * is explicitly confirmed. Therefore, persistent messages that are delivered via a channel are of type - * [[ConfirmablePersistent]]. Their receipt can be confirmed by a destination by calling the `confirm()` - * method on these messages. - * - * {{{ - * class MyDestination extends Actor { - * def receive = { - * case cp @ ConfirmablePersistent(payload, sequenceNr, redeliveries) => cp.confirm() - * } - * } - * }}} - * - * If a destination does not confirm the receipt of a `ConfirmablePersistent` message, it will be redelivered - * by the channel according to the parameters in [[ChannelSettings]]. Redelivered messages have a `redeliveries` - * value greater than zero. - * - * If the maximum number of redeliveries is reached for certain messages, they are removed from the channel and - * a `redeliverFailureListener` (if specified, see [[ChannelSettings]]) is notified about these messages with a - * [[RedeliverFailure]] message. Besides other application-specific tasks, this listener can restart the sending - * processor to enforce a redelivery of these messages or confirm these messages to prevent further redeliveries. - * - * @see [[Deliver]] - */ -@deprecated("Channel will be removed, see `akka.persistence.AtLeastOnceDelivery` instead.", since = "2.3.4") -final class Channel private[akka] (_channelId: Option[String], channelSettings: ChannelSettings) extends Actor { - import channelSettings._ - - private val id = _channelId match { - case Some(cid) ⇒ cid - case None ⇒ Persistence(context.system).channelId(self) - } - - private val journal = Persistence(context.system).confirmationBatchingJournalForChannel(id) - private val delivery = context.actorOf(Props(classOf[ReliableDelivery], channelSettings)) - - def receive = { - case d @ Deliver(persistent: PersistentRepr, _) ⇒ - if (!persistent.confirms.contains(id)) delivery forward d.copy(prepareDelivery(persistent)) - case d: RedeliverFailure ⇒ redeliverFailureListener.foreach(_ ! d) - case d: Delivered ⇒ delivery forward d - } - - private def prepareDelivery(persistent: PersistentRepr): PersistentRepr = - ConfirmablePersistentImpl(persistent, - confirmTarget = journal, - confirmMessage = DeliveredByChannel(persistent.persistenceId, id, persistent.sequenceNr, channel = self)) -} - -@deprecated("Channel will be removed, see `akka.persistence.AtLeastOnceDelivery` instead.", since = "2.3.4") -object Channel { - /** - * Returns a channel actor configuration object for creating a [[Channel]] with a - * generated id and default [[ChannelSettings]]. - */ - def props(): Props = - props(ChannelSettings()) - - /** - * Returns a channel actor configuration object for creating a [[Channel]] with a - * generated id and specified `channelSettings`. - * - * @param channelSettings channel configuration object. - */ - def props(channelSettings: ChannelSettings): Props = - Props(classOf[Channel], None, channelSettings) - - /** - * Returns a channel actor configuration object for creating a [[Channel]] with the - * specified id and default [[ChannelSettings]]. - * - * @param channelId channel id. - */ - def props(channelId: String): Props = - props(channelId, ChannelSettings()) - - /** - * Returns a channel actor configuration object for creating a [[Channel]] with the - * specified id and specified `channelSettings`. - * - * @param channelId channel id. - * @param channelSettings channel configuration object. - */ - def props(channelId: String, channelSettings: ChannelSettings): Props = - Props(classOf[Channel], Some(channelId), channelSettings) -} - -/** - * Instructs a [[Channel]] or [[PersistentChannel]] to deliver a `persistent` message to - * a `destination`. - * - * @param persistent persistent message. - * @param destination persistent message destination. - */ -@SerialVersionUID(1L) -@deprecated("Channel will be removed, see `akka.persistence.AtLeastOnceDelivery` instead.", since = "2.3.4") -final case class Deliver(persistent: Persistent, destination: ActorPath) extends Message - -@deprecated("Channel will be removed, see `akka.persistence.AtLeastOnceDelivery` instead.", since = "2.3.4") -object Deliver { - /** - * Java API. - */ - def create(persistent: Persistent, destination: ActorPath) = Deliver(persistent, destination) -} - -/** - * Plugin API: confirmation message generated by receivers of [[ConfirmablePersistent]] messages - * by calling `ConfirmablePersistent.confirm()`. - */ -@deprecated("Channel will be removed, see `akka.persistence.AtLeastOnceDelivery` instead.", since = "2.3.4") -trait Delivered extends Message { - def channelId: String - def persistentSequenceNr: Long - def deliverySequenceNr: Long - def channel: ActorRef - - /** - * INTERNAL API. - */ - private[persistence] def update(deliverySequenceNr: Long = deliverySequenceNr, channel: ActorRef = channel): Delivered -} - -/** - * Plugin API. - */ -@deprecated("Channel will be removed, see `akka.persistence.AtLeastOnceDelivery` instead.", since = "2.3.4") -final case class DeliveredByChannel( - @deprecatedName('processorId) persistenceId: String, - channelId: String, - persistentSequenceNr: Long, - deliverySequenceNr: Long = 0L, - channel: ActorRef = null) extends Delivered with PersistentConfirmation { - - def sequenceNr: Long = persistentSequenceNr - def update(deliverySequenceNr: Long, channel: ActorRef): DeliveredByChannel = - copy(deliverySequenceNr = deliverySequenceNr, channel = channel) -} - -/** - * INTERNAL API. - */ -@deprecated("Channel will be removed, see `akka.persistence.AtLeastOnceDelivery` instead.", since = "2.3.4") -private[persistence] class DeliveredByChannelBatching(journal: ActorRef, settings: PersistenceSettings) extends Actor { - private val publish = settings.internal.publishConfirmations - private val batchMax = settings.journal.maxConfirmationBatchSize - - private var batching = false - private var batch = Vector.empty[DeliveredByChannel] - - def receive = { - case WriteConfirmationsSuccess(confirmations) ⇒ - if (batch.isEmpty) batching = false else journalBatch() - confirmations.foreach { c ⇒ - val dbc = c.asInstanceOf[DeliveredByChannel] - if (dbc.channel != null) dbc.channel ! c - if (publish) context.system.eventStream.publish(c) - } - case WriteConfirmationsFailure(_) ⇒ - if (batch.isEmpty) batching = false else journalBatch() - case d: DeliveredByChannel ⇒ - addToBatch(d) - if (!batching || maxBatchSizeReached) journalBatch() - case m ⇒ journal forward m - } - - def addToBatch(pc: DeliveredByChannel): Unit = - batch = batch :+ pc - - def maxBatchSizeReached: Boolean = - batch.length >= batchMax - - def journalBatch(): Unit = { - journal ! WriteConfirmations(batch, self) - batch = Vector.empty - batching = true - } -} - -/** - * Notification message to inform channel listeners about messages that have reached the maximum - * number of redeliveries. - */ -@deprecated("Channel will be removed, see `akka.persistence.AtLeastOnceDelivery` instead.", since = "2.3.4") -final case class RedeliverFailure(messages: immutable.Seq[ConfirmablePersistent]) { - /** - * Java API. - */ - def getMessages: JIterable[ConfirmablePersistent] = messages.asJava -} - -/** - * Reliably deliver messages contained in [[Deliver]] requests to their destinations. Unconfirmed - * messages are redelivered according to the parameters in [[ChannelSettings]]. - */ -@deprecated("Channel will be removed, see `akka.persistence.AtLeastOnceDelivery` instead.", since = "2.3.4") -private class ReliableDelivery(redeliverSettings: ChannelSettings) extends Actor { - import redeliverSettings._ - import ReliableDelivery._ - - private val redelivery = context.actorOf(Props(classOf[Redelivery], redeliverSettings)) - private var deliveryAttempts: DeliveryAttempts = immutable.SortedMap.empty - private var deliverySequenceNr: Long = 0L - - def receive = { - case d @ Deliver(persistent: ConfirmablePersistentImpl, destination) ⇒ - val dsnr = nextDeliverySequenceNr() - val psnr = persistent.sequenceNr - val confirm = persistent.confirmMessage.update(deliverySequenceNr = dsnr) - val updated = persistent.update(confirmMessage = confirm, sequenceNr = if (psnr == 0) dsnr else psnr) - context.actorSelection(destination).tell(updated, sender()) - deliveryAttempts += (dsnr -> DeliveryAttempt(updated, destination, sender())) - case d: Delivered ⇒ - deliveryAttempts -= d.deliverySequenceNr - redelivery forward d - case Redeliver ⇒ - val limit = System.nanoTime - redeliverInterval.toNanos - val (older, younger) = deliveryAttempts.span { case (_, a) ⇒ a.timestamp < limit } - redelivery ! Redeliver(older, redeliverMax) - deliveryAttempts = younger - } - - private def nextDeliverySequenceNr(): Long = { - deliverySequenceNr += 1 - deliverySequenceNr - } -} - -@deprecated("Channel will be removed, see `akka.persistence.AtLeastOnceDelivery` instead.", since = "2.3.4") -private object ReliableDelivery { - type DeliveryAttempts = immutable.SortedMap[Long, DeliveryAttempt] - type FailedAttempts = Vector[ConfirmablePersistentImpl] - - final case class DeliveryAttempt(persistent: ConfirmablePersistentImpl, destination: ActorPath, sender: ActorRef, timestamp: Long = System.nanoTime) { - def incrementRedeliveryCount = - copy(persistent.update(redeliveries = persistent.redeliveries + 1)) - } - - final case class Redeliver(attempts: DeliveryAttempts, redeliveryMax: Int) -} - -/** - * Redelivery process used by [[ReliableDelivery]]. - */ -@deprecated("Channel will be removed, see `akka.persistence.AtLeastOnceDelivery` instead.", since = "2.3.4") -private class Redelivery(redeliverSettings: ChannelSettings) extends Actor { - import context.dispatcher - import redeliverSettings._ - import ReliableDelivery._ - - private var redeliveryAttempts: DeliveryAttempts = immutable.SortedMap.empty - private var redeliverySchedule: Cancellable = _ - - def receive = { - case Redeliver(as, max) ⇒ - val (attempts, failed) = (redeliveryAttempts ++ as).foldLeft[(DeliveryAttempts, FailedAttempts)]((immutable.SortedMap.empty, Vector.empty)) { - case ((attempts, failed), (k, attempt)) ⇒ - val persistent = attempt.persistent - if (persistent.redeliveries >= redeliverMax) { - (attempts, failed :+ persistent) - } else { - val updated = attempt.incrementRedeliveryCount - context.actorSelection(updated.destination).tell(updated.persistent, updated.sender) - (attempts.updated(k, updated), failed) - - } - } - redeliveryAttempts = attempts - scheduleRedelivery() - failed.headOption.foreach(_.confirmMessage.channel ! RedeliverFailure(failed)) - case c: Delivered ⇒ - redeliveryAttempts -= c.deliverySequenceNr - } - - override def preStart(): Unit = - scheduleRedelivery() - - override def postStop(): Unit = - redeliverySchedule.cancel() - - private def scheduleRedelivery(): Unit = - redeliverySchedule = context.system.scheduler.scheduleOnce(redeliverInterval, context.parent, Redeliver) -} - diff --git a/akka-persistence/src/main/scala/akka/persistence/Eventsourced.scala b/akka-persistence/src/main/scala/akka/persistence/Eventsourced.scala index 9eeaf02a3b..aa2ac03956 100644 --- a/akka-persistence/src/main/scala/akka/persistence/Eventsourced.scala +++ b/akka-persistence/src/main/scala/akka/persistence/Eventsourced.scala @@ -4,209 +4,239 @@ package akka.persistence -import java.lang.{ Iterable ⇒ JIterable } - -import akka.actor.{ AbstractActor, UntypedActor } -import akka.japi.{ Procedure, Util } -import akka.persistence.JournalProtocol._ - +import java.util.concurrent.atomic.AtomicInteger import scala.collection.immutable +import scala.util.control.NonFatal +import akka.actor.ActorCell +import akka.actor.ActorKilledException +import akka.actor.ActorLogging +import akka.actor.Stash +import akka.actor.StashFactory +import akka.dispatch.Envelope /** - * INTERNAL API. - * - * Event sourcing mixin for a [[Processor]]. + * INTERNAL API */ -private[persistence] trait Eventsourced extends ProcessorImpl { - // TODO consolidate these traits as PersistentActor #15230 - - /** - * Processor recovery state. Waits for recovery completion and then changes to - * `processingCommands` - */ - private val recovering: State = new State { - // cache the recoveryBehavior since it's a def for binary compatibility in 2.3.x - private val _recoveryBehavior: Receive = recoveryBehavior - - override def toString: String = "recovering" - - def aroundReceive(receive: Receive, message: Any) { - // Since we are recovering we can ignore the receive behavior from the stack - Eventsourced.super.aroundReceive(_recoveryBehavior, message) - message match { - case _: ReadHighestSequenceNrSuccess | _: ReadHighestSequenceNrFailure ⇒ - currentState = processingCommands - case _ ⇒ - } - } - } - - /** - * Command processing state. If event persistence is pending after processing a - * command, event persistence is triggered and state changes to `persistingEvents`. - * - * There's no need to loop commands though the journal any more i.e. they can now be - * directly offered as `LoopSuccess` to the state machine implemented by `Processor`. - */ - private val processingCommands: State = new State { - override def toString: String = "processing commands" - - def aroundReceive(receive: Receive, message: Any) = message match { - case _: ConfirmablePersistent ⇒ - doAroundReceive(receive, message) - case PersistentBatch(b) ⇒ - throw new UnsupportedOperationException("Persistent command batches not supported") - case _: PersistentRepr ⇒ - throw new UnsupportedOperationException("Persistent commands not supported") - case WriteMessageSuccess(p, id) ⇒ - // instanceId mismatch can happen for persistAsync and defer in case of actor restart - // while message is in flight, in that case we ignore the call to the handler - if (id == instanceId) { - withCurrentPersistent(p)(p ⇒ pendingInvocations.peek().handler(p.payload)) - onWriteComplete() - } - case LoopMessageSuccess(l, id) ⇒ - // instanceId mismatch can happen for persistAsync and defer in case of actor restart - // while message is in flight, in that case we ignore the call to the handler - if (id == instanceId) { - pendingInvocations.peek().handler(l) - onWriteComplete() - } - case s @ WriteMessagesSuccessful ⇒ Eventsourced.super.aroundReceive(receive, s) - case f: WriteMessagesFailed ⇒ Eventsourced.super.aroundReceive(receive, f) - case _ ⇒ - doAroundReceive(receive, message) - } - - private def doAroundReceive(receive: Receive, message: Any): Unit = { - Eventsourced.super.aroundReceive(receive, LoopMessageSuccess(message, instanceId)) - - if (pendingStashingPersistInvocations > 0) { - currentState = persistingEvents - } - - if (resequenceableEventBatch.nonEmpty) flushBatch() - else processorStash.unstash() - } - - private def onWriteComplete(): Unit = { - pendingInvocations.pop() - } - } - - /** - * Event persisting state. Remains until pending events are persisted and then changes - * state to `processingCommands`. Only events to be persisted are processed. All other - * messages are stashed internally. - */ - private val persistingEvents: State = new State { - override def toString: String = "persisting events" - - def aroundReceive(receive: Receive, message: Any): Unit = message match { - case _: ConfirmablePersistent ⇒ - processorStash.stash() - case PersistentBatch(b) ⇒ - b foreach { - case p: PersistentRepr ⇒ deleteMessage(p.sequenceNr, permanent = true) - case r ⇒ // ignore, nothing to delete (was not a persistent message) - } - throw new UnsupportedOperationException("Persistent command batches not supported") - case p: PersistentRepr ⇒ - deleteMessage(p.sequenceNr, permanent = true) - throw new UnsupportedOperationException("Persistent commands not supported") - - case WriteMessageSuccess(p, id) ⇒ - // instanceId mismatch can happen for persistAsync and defer in case of actor restart - // while message is in flight, in that case we ignore the call to the handler - if (id == instanceId) { - withCurrentPersistent(p)(p ⇒ pendingInvocations.peek().handler(p.payload)) - onWriteComplete() - } - - case e @ WriteMessageFailure(p, _, id) ⇒ - Eventsourced.super.aroundReceive(receive, message) // stops actor by default - // instanceId mismatch can happen for persistAsync and defer in case of actor restart - // while message is in flight, in that case the handler has already been discarded - if (id == instanceId) - onWriteComplete() - case LoopMessageSuccess(l, id) ⇒ - if (id == instanceId) { - pendingInvocations.peek().handler(l) - onWriteComplete() - } - case s @ WriteMessagesSuccessful ⇒ Eventsourced.super.aroundReceive(receive, s) - case f: WriteMessagesFailed ⇒ Eventsourced.super.aroundReceive(receive, f) - case other ⇒ processorStash.stash() - } - - private def onWriteComplete(): Unit = { - pendingInvocations.pop() match { - case _: StashingHandlerInvocation ⇒ - // enables an early return to `processingCommands`, because if this counter hits `0`, - // we know the remaining pendingInvocations are all `persistAsync` created, which - // means we can go back to processing commands also - and these callbacks will be called as soon as possible - pendingStashingPersistInvocations -= 1 - case _ ⇒ // do nothing - } - - if (pendingStashingPersistInvocations == 0) { - currentState = processingCommands - processorStash.unstash() - } - } - - } - - /** - * INTERNAL API. - * - * This is a def and not a val because of binary compatibility in 2.3.x. - * It is cached where it is used. - */ - private def recoveryBehavior: Receive = { - case Persistent(payload, _) if recoveryRunning && receiveRecover.isDefinedAt(payload) ⇒ - receiveRecover(payload) - case s: SnapshotOffer if receiveRecover.isDefinedAt(s) ⇒ - receiveRecover(s) - case f: RecoveryFailure if receiveRecover.isDefinedAt(f) ⇒ - receiveRecover(f) - case RecoveryCompleted if receiveRecover.isDefinedAt(RecoveryCompleted) ⇒ - receiveRecover(RecoveryCompleted) - } +private[persistence] object Eventsourced { + // ok to wrap around (2*Int.MaxValue restarts will not happen within a journal roundtrip) + private val instanceIdCounter = new AtomicInteger(1) private sealed trait PendingHandlerInvocation { def evt: Any def handler: Any ⇒ Unit } - /** forces processor to stash incoming commands untill all these invocations are handled */ + /** forces actor to stash incoming commands untill all these invocations are handled */ private final case class StashingHandlerInvocation(evt: Any, handler: Any ⇒ Unit) extends PendingHandlerInvocation - /** does not force the processor to stash commands; Originates from either `persistAsync` or `defer` calls */ + /** does not force the actor to stash commands; Originates from either `persistAsync` or `defer` calls */ private final case class AsyncHandlerInvocation(evt: Any, handler: Any ⇒ Unit) extends PendingHandlerInvocation +} - /** Used instead of iterating `pendingInvocations` in order to check if safe to revert to processing commands */ +/** + * INTERNAL API. + * + * Scala API and implementation details of [[PersistentActor]], [[AbstractPersistentActor]] and + * [[UntypedPersistentActor]]. + */ +private[persistence] trait Eventsourced extends Snapshotter with Stash with StashFactory { + import JournalProtocol._ + import SnapshotProtocol.LoadSnapshotResult + import Eventsourced._ + + private val extension = Persistence(context.system) + private lazy val journal = extension.journalFor(persistenceId) + + private val instanceId: Int = Eventsourced.instanceIdCounter.getAndIncrement() + + // FIXME useJournalBatching + // I have a feeling that this var can be eliminated, either by just removing the functionality or by + // checking pendingStashingPersistInvocations > 0 in doAroundReceive. + // + //On the first suggestion: when a write is currently pending, how much do we gain in latency + // by submitting the persist writes immediately instead of waiting until the acknowledgement + // comes in? The other thought is that sync and async persistence will rarely be mixed within + //the same Actor, in which case this flag actually does nothing (unless I am missing something). + + private var journalBatch = Vector.empty[PersistentEnvelope] + private val maxMessageBatchSize = extension.settings.journal.maxMessageBatchSize + private var writeInProgress = false + private var sequenceNr: Long = 0L + + private var _lastSequenceNr: Long = 0L + + private var currentState: State = recoveryPending + + // Used instead of iterating `pendingInvocations` in order to check if safe to revert to processing commands private var pendingStashingPersistInvocations: Long = 0 - /** Holds user-supplied callbacks for persist/persistAsync calls */ + // Holds user-supplied callbacks for persist/persistAsync calls private val pendingInvocations = new java.util.LinkedList[PendingHandlerInvocation]() // we only append / isEmpty / get(0) on it - private var resequenceableEventBatch: List[Resequenceable] = Nil - // When using only `persistAsync` and `defer` max throughput is increased by using the - // batching implemented in `Processor`, but when using `persist` we want to use the atomic - // PeristentBatch for the emitted events. This implementation can be improved when - // Processor and Eventsourced are consolidated into one class - private var useProcessorBatching: Boolean = true + private var eventBatch: List[PersistentEnvelope] = Nil - private var currentState: State = recovering - private val processorStash = createStash() + private val internalStash = createStash() - private def flushBatch() { - if (useProcessorBatching) - resequenceableEventBatch.reverse foreach { Eventsourced.super.aroundReceive(receive, _) } - else - Eventsourced.super.aroundReceive(receive, PersistentBatch(resequenceableEventBatch.reverse)) - - resequenceableEventBatch = Nil - useProcessorBatching = true + private val unstashFilterPredicate: Any ⇒ Boolean = { + case _: WriteMessageSuccess ⇒ false + case _: ReplayedMessage ⇒ false + case _ ⇒ true } + /** + * Id of the persistent entity for which messages should be replayed. + */ + def persistenceId: String + + /** + * Returns `persistenceId`. + */ + override def snapshotterId: String = persistenceId + + /** + * Highest received sequence number so far or `0L` if this actor hasn't replayed + * or stored any persistent events yet. + */ + def lastSequenceNr: Long = _lastSequenceNr + + /** + * Returns `lastSequenceNr`. + */ + def snapshotSequenceNr: Long = lastSequenceNr + + /** + * INTERNAL API. + * Called whenever a message replay succeeds. + * May be implemented by subclass. + */ + private[persistence] def onReplaySuccess(): Unit = () + + /** + * INTERNAL API. + * Called whenever a message replay fails. + * May be implemented by subclass. + * @param cause failure cause. + */ + private[persistence] def onReplayFailure(cause: Throwable): Unit = () + + /** + * User-overridable callback. Called when a persistent actor is started. Default implementation sends + * a `Recover()` to `self`. + */ + @throws(classOf[Exception]) + override def preStart(): Unit = + self ! Recover() + + /** + * INTERNAL API. + */ + override protected[akka] def aroundReceive(receive: Receive, message: Any): Unit = + currentState.stateReceive(receive, message) + + /** + * INTERNAL API. + */ + override protected[akka] def aroundPreRestart(reason: Throwable, message: Option[Any]): Unit = { + try { + internalStash.unstashAll() + unstashAll(unstashFilterPredicate) + } finally { + message match { + case Some(WriteMessageSuccess(m, _)) ⇒ + flushJournalBatch() + super.aroundPreRestart(reason, Some(m)) + case Some(LoopMessageSuccess(m, _)) ⇒ + flushJournalBatch() + super.aroundPreRestart(reason, Some(m)) + case Some(ReplayedMessage(m)) ⇒ + flushJournalBatch() + super.aroundPreRestart(reason, Some(m)) + case mo ⇒ + flushJournalBatch() + super.aroundPreRestart(reason, None) + } + } + } + + /** + * User-overridable callback. Called before a persistent actor is restarted. Default implementation sends + * a `Recover(lastSequenceNr)` message to `self` if `message` is defined, `Recover() otherwise`. + */ + override def preRestart(reason: Throwable, message: Option[Any]): Unit = { + super.preRestart(reason, message) + message match { + case Some(_) ⇒ self ! Recover(toSequenceNr = lastSequenceNr) + case None ⇒ self ! Recover() + } + } + + /** + * INTERNAL API. + */ + override protected[akka] def aroundPostStop(): Unit = + try { + internalStash.unstashAll() + unstashAll(unstashFilterPredicate) + } finally super.aroundPostStop() + + override def unhandled(message: Any): Unit = { + message match { + case RecoveryCompleted ⇒ // mute + case RecoveryFailure(cause) ⇒ + val errorMsg = s"PersistentActor killed after recovery failure (persisten id = [${persistenceId}]). " + + "To avoid killing persistent actors on recovery failure, a PersistentActor must handle RecoveryFailure messages. " + + "RecoveryFailure was caused by: " + cause + throw new ActorKilledException(errorMsg) + case PersistenceFailure(payload, sequenceNumber, cause) ⇒ + val errorMsg = "PersistentActor killed after persistence failure " + + s"(persistent id = [${persistenceId}], sequence nr = [${sequenceNumber}], payload class = [${payload.getClass.getName}]). " + + "To avoid killing persistent actors on persistence failure, a PersistentActor must handle PersistenceFailure messages. " + + "PersistenceFailure was caused by: " + cause + throw new ActorKilledException(errorMsg) + case m ⇒ super.unhandled(m) + } + } + + private def changeState(state: State): Unit = { + currentState = state + } + + private def updateLastSequenceNr(persistent: PersistentRepr): Unit = + if (persistent.sequenceNr > _lastSequenceNr) _lastSequenceNr = persistent.sequenceNr + + private def setLastSequenceNr(value: Long): Unit = + _lastSequenceNr = value + + private def nextSequenceNr(): Long = { + sequenceNr += 1L + sequenceNr + } + + private def flushJournalBatch(): Unit = { + journal ! WriteMessages(journalBatch, self, instanceId) + journalBatch = Vector.empty + writeInProgress = true + } + + /** + * Recovery handler that receives persisted events during recovery. If a state snapshot + * has been captured and saved, this handler will receive a [[SnapshotOffer]] message + * followed by events that are younger than the offered snapshot. + * + * This handler must not have side-effects other than changing persistent actor state i.e. it + * should not perform actions that may fail, such as interacting with external services, + * for example. + * + * If recovery fails, the actor will be stopped. This can be customized by + * handling [[RecoveryFailure]]. + * + * @see [[Recover]] + */ + def receiveRecover: Receive + + /** + * Command handler. Typically validates commands against current state (and/or by + * communication with other actors). On successful validation, one or more events are + * derived from a command and these events are then persisted by calling `persist`. + */ + def receiveCommand: Receive + /** * Asynchronously persists `event`. On successful persistence, `handler` is called with the * persisted event. It is guaranteed that no new commands will be received by a persistent actor @@ -231,8 +261,7 @@ private[persistence] trait Eventsourced extends ProcessorImpl { final def persist[A](event: A)(handler: A ⇒ Unit): Unit = { pendingStashingPersistInvocations += 1 pendingInvocations addLast StashingHandlerInvocation(event, handler.asInstanceOf[Any ⇒ Unit]) - resequenceableEventBatch = PersistentRepr(event) :: resequenceableEventBatch - useProcessorBatching = false + eventBatch = PersistentRepr(event) :: eventBatch } /** @@ -267,7 +296,7 @@ private[persistence] trait Eventsourced extends ProcessorImpl { */ final def persistAsync[A](event: A)(handler: A ⇒ Unit): Unit = { pendingInvocations addLast AsyncHandlerInvocation(event, handler.asInstanceOf[Any ⇒ Unit]) - resequenceableEventBatch = PersistentRepr(event) :: resequenceableEventBatch + eventBatch = PersistentRepr(event) :: eventBatch } /** @@ -305,7 +334,7 @@ private[persistence] trait Eventsourced extends ProcessorImpl { handler(event) } else { pendingInvocations addLast AsyncHandlerInvocation(event, handler.asInstanceOf[Any ⇒ Unit]) - resequenceableEventBatch = NonPersistentRepr(event, sender()) :: resequenceableEventBatch + eventBatch = NonPersistentRepr(event, sender()) :: eventBatch } } @@ -332,386 +361,323 @@ private[persistence] trait Eventsourced extends ProcessorImpl { events.foreach(defer(_)(handler)) /** - * Recovery handler that receives persisted events during recovery. If a state snapshot - * has been captured and saved, this handler will receive a [[SnapshotOffer]] message - * followed by events that are younger than the offered snapshot. + * Permanently deletes all persistent messages with sequence numbers less than or equal `toSequenceNr`. * - * This handler must not have side-effects other than changing persistent actor state i.e. it - * should not perform actions that may fail, such as interacting with external services, - * for example. - * - * If recovery fails, the actor will be stopped. This can be customized by - * handling [[RecoveryFailure]]. - * - * @see [[Recover]] + * @param toSequenceNr upper sequence number bound of persistent messages to be deleted. */ - def receiveRecover: Receive + def deleteMessages(toSequenceNr: Long): Unit = { + deleteMessages(toSequenceNr, permanent = true) + } /** - * Command handler. Typically validates commands against current state (and/or by - * communication with other actors). On successful validation, one or more events are - * derived from a command and these events are then persisted by calling `persist`. - * Commands sent to event sourced persistent actors should not be [[Persistent]] messages. + * Deletes all persistent messages with sequence numbers less than or equal `toSequenceNr`. If `permanent` + * is set to `false`, the persistent messages are marked as deleted in the journal, otherwise + * they permanently deleted from the journal. + * + * @param toSequenceNr upper sequence number bound of persistent messages to be deleted. + * @param permanent if `false`, the message is marked as deleted, otherwise it is permanently deleted. */ - def receiveCommand: Receive + def deleteMessages(toSequenceNr: Long, permanent: Boolean): Unit = { + journal ! DeleteMessagesTo(persistenceId, toSequenceNr, permanent) + } + + /** + * Returns `true` if this persistent actor is currently recovering. + */ + def recoveryRunning: Boolean = currentState.recoveryRunning + + /** + * Returns `true` if this persistent actor has successfully finished recovery. + */ + def recoveryFinished: Boolean = !recoveryRunning override def unstashAll() { // Internally, all messages are processed by unstashing them from // the internal stash one-by-one. Hence, an unstashAll() from the // user stash must be prepended to the internal stash. - processorStash.prepend(clearStash()) + internalStash.prepend(clearStash()) + } + + private trait State { + def stateReceive(receive: Receive, message: Any): Unit + def recoveryRunning: Boolean } /** - * INTERNAL API. + * Initial state, waits for `Recover` request, and then submits a `LoadSnapshot` request to the snapshot + * store and changes to `recoveryStarted` state. All incoming messages except `Recover` are stashed. */ - override protected[akka] def aroundReceive(receive: Receive, message: Any) { - currentState.aroundReceive(receive, message) + private def recoveryPending = new State { + override def toString: String = "recovery pending" + override def recoveryRunning: Boolean = true + + override def stateReceive(receive: Receive, message: Any): Unit = message match { + case Recover(fromSnap, toSnr, replayMax) ⇒ + changeState(recoveryStarted(replayMax)) + loadSnapshot(snapshotterId, fromSnap, toSnr) + case _ ⇒ internalStash.stash() + } } /** - * INTERNAL API. + * Processes a loaded snapshot, if any. A loaded snapshot is offered with a `SnapshotOffer` + * message to the actor's `receiveRecover`. Then initiates a message replay, either starting + * from the loaded snapshot or from scratch, and switches to `replayStarted` state. + * All incoming messages are stashed. + * + * @param replayMax maximum number of messages to replay. */ - override protected[akka] def aroundPreRestart(reason: Throwable, message: Option[Any]): Unit = { - // flushJournalBatch will send outstanding persistAsync and defer events to the journal - // and also prevent those to be unstashed in Processor.aroundPreRestart - flushJournalBatch() - super.aroundPreRestart(reason, message) + private def recoveryStarted(replayMax: Long) = new State { + + private val recoveryBehavior: Receive = { + val _receiveRecover = receiveRecover + + { + case PersistentRepr(payload, _) if recoveryRunning && _receiveRecover.isDefinedAt(payload) ⇒ + _receiveRecover(payload) + case s: SnapshotOffer if _receiveRecover.isDefinedAt(s) ⇒ + _receiveRecover(s) + case f: RecoveryFailure if _receiveRecover.isDefinedAt(f) ⇒ + _receiveRecover(f) + case RecoveryCompleted if _receiveRecover.isDefinedAt(RecoveryCompleted) ⇒ + _receiveRecover(RecoveryCompleted) + } + } + + override def toString: String = s"recovery started (replayMax = [${replayMax}])" + override def recoveryRunning: Boolean = true + + override def stateReceive(receive: Receive, message: Any) = message match { + case r: Recover ⇒ // ignore + case LoadSnapshotResult(sso, toSnr) ⇒ + sso.foreach { + case SelectedSnapshot(metadata, snapshot) ⇒ + setLastSequenceNr(metadata.sequenceNr) + // Since we are recovering we can ignore the receive behavior from the stack + Eventsourced.super.aroundReceive(recoveryBehavior, SnapshotOffer(metadata, snapshot)) + } + changeState(replayStarted(recoveryBehavior)) + journal ! ReplayMessages(lastSequenceNr + 1L, toSnr, replayMax, persistenceId, self) + case other ⇒ internalStash.stash() + } } /** - * Calls `super.preRestart` then unstashes all messages from the internal stash. + * Processes replayed messages, if any. The actor's `receiveRecover` is invoked with the replayed + * events. + * + * If replay succeeds it switches to `initializing` state and requests the highest stored sequence + * number from the journal. Otherwise RecoveryFailure is emitted. + * If replay succeeds the `onReplaySuccess` callback method is called, otherwise `onReplayFailure`. + * + * If processing of a replayed event fails, the exception is caught and + * stored for being thrown later and state is changed to `recoveryFailed`. + * + * All incoming messages are stashed. */ - override def preRestart(reason: Throwable, message: Option[Any]) { - processorStash.unstashAll() - super.preRestart(reason, message) + private def replayStarted(recoveryBehavior: Receive) = new State { + override def toString: String = s"replay started" + override def recoveryRunning: Boolean = true + + override def stateReceive(receive: Receive, message: Any) = message match { + case r: Recover ⇒ // ignore + case ReplayedMessage(p) ⇒ + try { + updateLastSequenceNr(p) + Eventsourced.super.aroundReceive(recoveryBehavior, p) + } catch { + case NonFatal(t) ⇒ + val currentMsg = context.asInstanceOf[ActorCell].currentMessage + changeState(replayFailed(t, currentMsg)) // delay throwing exception to prepareRestart + } + case ReplayMessagesSuccess ⇒ + onReplaySuccess() // callback for subclass implementation + changeState(initializing(recoveryBehavior)) + journal ! ReadHighestSequenceNr(lastSequenceNr, persistenceId, self) + case ReplayMessagesFailure(cause) ⇒ + onReplayFailure(cause) // callback for subclass implementation + // FIXME what happens if RecoveryFailure is handled, i.e. actor is not stopped? + Eventsourced.super.aroundReceive(recoveryBehavior, RecoveryFailure(cause)) + case other ⇒ + internalStash.stash() + } } /** - * Calls `super.postStop` then unstashes all messages from the internal stash. + * Consumes remaining replayed messages and then changes to `prepareRestart`. The + * message that caused the exception during replay, is re-added to the mailbox and + * re-received in `prepareRestart` state. */ - override def postStop() { - processorStash.unstashAll() - super.postStop() + private def replayFailed(cause: Throwable, failureMessage: Envelope) = new State { + + override def toString: String = "replay failed" + override def recoveryRunning: Boolean = true + + override def stateReceive(receive: Receive, message: Any) = message match { + case ReplayMessagesFailure(_) ⇒ + replayCompleted() + // journal couldn't tell the maximum stored sequence number, hence the next + // replay must be a full replay (up to the highest stored sequence number) + // Recover(lastSequenceNr) is sent by preRestart + setLastSequenceNr(Long.MaxValue) + case ReplayMessagesSuccess ⇒ replayCompleted() + case ReplayedMessage(p) ⇒ updateLastSequenceNr(p) + case r: Recover ⇒ // ignore + case _ ⇒ internalStash.stash() + } + + def replayCompleted(): Unit = { + changeState(prepareRestart(cause)) + mailbox.enqueueFirst(self, failureMessage) + } } /** - * INTERNAL API. - * - * Only here for binary compatibility in 2.3.x. + * Re-receives the replayed message that caused an exception and re-throws that exception. */ - protected[persistence] val initialBehavior: Receive = recoveryBehavior orElse { - case msg if receiveCommand.isDefinedAt(msg) ⇒ - receiveCommand(msg) - } -} + private def prepareRestart(cause: Throwable) = new State { + override def toString: String = "prepare restart" + override def recoveryRunning: Boolean = true -/** - * An event sourced processor. - */ -@deprecated("EventsourcedProcessor will be removed in 2.4.x, instead extend the API equivalent `akka.persistence.PersistentProcessor`", since = "2.3.4") -trait EventsourcedProcessor extends Processor with Eventsourced { - // TODO remove Processor #15230 - def receive = receiveCommand -} - -/** - * An persistent Actor - can be used to implement command or event sourcing. - */ -trait PersistentActor extends ProcessorImpl with Eventsourced { - def receive = receiveCommand -} - -/** - * Java API: an persistent actor - can be used to implement command or event sourcing. - */ -abstract class UntypedPersistentActor extends UntypedActor with ProcessorImpl with Eventsourced { - - final def onReceive(message: Any) = onReceiveCommand(message) - - final def receiveRecover: Receive = { - case msg ⇒ onReceiveRecover(msg) - } - - final def receiveCommand: Receive = { - case msg ⇒ onReceiveCommand(msg) + override def stateReceive(receive: Receive, message: Any) = message match { + case ReplayedMessage(_) ⇒ throw cause + case _ ⇒ // ignore + } } /** - * Java API: asynchronously persists `event`. On successful persistence, `handler` is called with the - * persisted event. It is guaranteed that no new commands will be received by a persistent actor - * between a call to `persist` and the execution of its `handler`. This also holds for - * multiple `persist` calls per received command. Internally, this is achieved by stashing new - * commands and unstashing them when the `event` has been persisted and handled. The stash used - * for that is an internal stash which doesn't interfere with the inherited user stash. - * - * An event `handler` may close over persistent actor state and modify it. The `getSender()` of a persisted - * event is the sender of the corresponding command. This means that one can reply to a command - * sender within an event `handler`. - * - * Within an event handler, applications usually update persistent actor state using persisted event - * data, notify listeners and reply to command senders. - * - * If persistence of an event fails, the persistent actor will be stopped. This can be customized by - * handling [[PersistenceFailure]] in [[onReceiveCommand]]. - * - * @param event event to be persisted. - * @param handler handler for each persisted `event` + * Processes the highest stored sequence number response from the journal and then switches + * to `processingCommands` state. + * All incoming messages are stashed. */ - final def persist[A](event: A, handler: Procedure[A]): Unit = - persist(event)(event ⇒ handler(event)) + private def initializing(recoveryBehavior: Receive) = new State { + override def toString: String = "initializing" + override def recoveryRunning: Boolean = true + + override def stateReceive(receive: Receive, message: Any) = message match { + case ReadHighestSequenceNrSuccess(highest) ⇒ + changeState(processingCommands) + sequenceNr = highest + internalStash.unstashAll() + Eventsourced.super.aroundReceive(recoveryBehavior, RecoveryCompleted) + case ReadHighestSequenceNrFailure(cause) ⇒ + Eventsourced.super.aroundReceive(recoveryBehavior, RecoveryFailure(cause)) + case other ⇒ + internalStash.stash() + } + } /** - * Java API: asynchronously persists `events` in specified order. This is equivalent to calling - * `persist[A](event: A, handler: Procedure[A])` multiple times with the same `handler`, - * except that `events` are persisted atomically with this method. - * - * @param events events to be persisted. - * @param handler handler for each persisted `events` + * Common receive handler for processingCommands and persistingEvents */ - final def persist[A](events: JIterable[A], handler: Procedure[A]): Unit = - persist(Util.immutableSeq(events))(event ⇒ handler(event)) + private abstract class ProcessingState extends State { + val common: Receive = { + case WriteMessageSuccess(p, id) ⇒ + // instanceId mismatch can happen for persistAsync and defer in case of actor restart + // while message is in flight, in that case we ignore the call to the handler + if (id == instanceId) { + updateLastSequenceNr(p) + try pendingInvocations.peek().handler(p.payload) finally onWriteMessageComplete() + } + case WriteMessageFailure(p, cause, id) ⇒ + // instanceId mismatch can happen for persistAsync and defer in case of actor restart + // while message is in flight, in that case the handler has already been discarded + if (id == instanceId) { + Eventsourced.super.aroundReceive(receive, PersistenceFailure(p.payload, p.sequenceNr, cause)) // stops actor by default + onWriteMessageComplete() + } + case LoopMessageSuccess(l, id) ⇒ + // instanceId mismatch can happen for persistAsync and defer in case of actor restart + // while message is in flight, in that case we ignore the call to the handler + if (id == instanceId) { + try pendingInvocations.peek().handler(l) finally onWriteMessageComplete() + } + case WriteMessagesSuccessful | WriteMessagesFailed(_) ⇒ + if (journalBatch.isEmpty) writeInProgress = false else flushJournalBatch() + } + + def onWriteMessageComplete(): Unit = + pendingInvocations.pop() + } /** - * JAVA API: asynchronously persists `event`. On successful persistence, `handler` is called with the - * persisted event. - * - * Unlike `persist` the persistent actor will continue to receive incomming commands between the - * call to `persist` and executing it's `handler`. This asynchronous, non-stashing, version of - * of persist should be used when you favor throughput over the "command-2 only processed after - * command-1 effects' have been applied" guarantee, which is provided by the plain [[persist]] method. - * - * An event `handler` may close over persistent actor state and modify it. The `sender` of a persisted - * event is the sender of the corresponding command. This means that one can reply to a command - * sender within an event `handler`. - * - * If persistence of an event fails, the persistent actor will be stopped. This can be customized by - * handling [[PersistenceFailure]] in [[receiveCommand]]. - * - * @param event event to be persisted - * @param handler handler for each persisted `event` + * Command processing state. If event persistence is pending after processing a + * command, event persistence is triggered and state changes to `persistingEvents`. */ - final def persistAsync[A](event: A)(handler: Procedure[A]): Unit = - super[Eventsourced].persistAsync(event)(event ⇒ handler(event)) + private val processingCommands: State = new ProcessingState { + override def toString: String = "processing commands" + override def recoveryRunning: Boolean = false + + override def stateReceive(receive: Receive, message: Any) = + if (common.isDefinedAt(message)) common(message) + else doAroundReceive(receive, message) + + private def doAroundReceive(receive: Receive, message: Any): Unit = { + Eventsourced.super.aroundReceive(receive, message) + + if (eventBatch.nonEmpty) flushBatch() + + if (pendingStashingPersistInvocations > 0) changeState(persistingEvents) + else internalStash.unstash() + } + + private def flushBatch() { + // When using only `persistAsync` and `defer` max throughput is increased by using + // batching, but when using `persist` we want to use one atomic WriteMessages + // for the emitted events. + // Flush previously collected events, if any, separately from the `persist` batch + if (pendingStashingPersistInvocations > 0 && journalBatch.nonEmpty) + flushJournalBatch() + + eventBatch.reverse.foreach { p ⇒ + addToBatch(p) + if (!writeInProgress || maxBatchSizeReached) flushJournalBatch() + } + + eventBatch = Nil + } + + private def addToBatch(p: PersistentEnvelope): Unit = p match { + case p: PersistentRepr ⇒ + journalBatch :+= p.update(persistenceId = persistenceId, sequenceNr = nextSequenceNr(), sender = sender()) + case r: PersistentEnvelope ⇒ + journalBatch :+= r + } + + private def maxBatchSizeReached: Boolean = + journalBatch.size >= maxMessageBatchSize + + } /** - * JAVA API: asynchronously persists `events` in specified order. This is equivalent to calling - * `persistAsync[A](event: A)(handler: A => Unit)` multiple times with the same `handler`, - * except that `events` are persisted atomically with this method. - * - * @param events events to be persisted - * @param handler handler for each persisted `events` + * Event persisting state. Remains until pending events are persisted and then changes + * state to `processingCommands`. Only events to be persisted are processed. All other + * messages are stashed internally. */ - final def persistAsync[A](events: JIterable[A])(handler: A ⇒ Unit): Unit = - super[Eventsourced].persistAsync(Util.immutableSeq(events))(event ⇒ handler(event)) + private val persistingEvents: State = new ProcessingState { + override def toString: String = "persisting events" + override def recoveryRunning: Boolean = false - /** - * Defer the handler execution until all pending handlers have been executed. - * Allows to define logic within the actor, which will respect the invocation-order-guarantee - * in respect to `persistAsync` calls. That is, if `persistAsync` was invoked before defer, - * the corresponding handlers will be invoked in the same order as they were registered in. - * - * This call will NOT result in `event` being persisted, please use `persist` or `persistAsync`, - * if the given event should possible to replay. - * - * If there are no pending persist handler calls, the handler will be called immediatly. - * - * In the event of persistence failures (indicated by [[PersistenceFailure]] messages being sent to the - * [[PersistentActor]], you can handle these messages, which in turn will enable the deferred handlers to run afterwards. - * If persistence failure messages are left `unhandled`, the default behavior is to stop the Actor, thus the handlers - * will not be run. - * - * @param event event to be handled in the future, when preceeding persist operations have been processes - * @param handler handler for the given `event` - */ - final def defer[A](event: A)(handler: Procedure[A]): Unit = - super[Eventsourced].defer(event)(event ⇒ handler(event)) + override def stateReceive(receive: Receive, message: Any) = + if (common.isDefinedAt(message)) common(message) + else internalStash.stash() - /** - * Defer the handler execution until all pending handlers have been executed. - * Allows to define logic within the actor, which will respect the invocation-order-guarantee - * in respect to `persistAsync` calls. That is, if `persistAsync` was invoked before defer, - * the corresponding handlers will be invoked in the same order as they were registered in. - * - * This call will NOT result in `event` being persisted, please use `persist` or `persistAsync`, - * if the given event should possible to replay. - * - * If there are no pending persist handler calls, the handler will be called immediatly. - * - * In the event of persistence failures (indicated by [[PersistenceFailure]] messages being sent to the - * [[PersistentActor]], you can handle these messages, which in turn will enable the deferred handlers to run afterwards. - * If persistence failure messages are left `unhandled`, the default behavior is to stop the Actor, thus the handlers - * will not be run. - * - * @param events event to be handled in the future, when preceeding persist operations have been processes - * @param handler handler for each `event` - */ - final def defer[A](events: JIterable[A])(handler: Procedure[A]): Unit = - super[Eventsourced].defer(Util.immutableSeq(events))(event ⇒ handler(event)) + override def onWriteMessageComplete(): Unit = { + pendingInvocations.pop() match { + case _: StashingHandlerInvocation ⇒ + // enables an early return to `processingCommands`, because if this counter hits `0`, + // we know the remaining pendingInvocations are all `persistAsync` created, which + // means we can go back to processing commands also - and these callbacks will be called as soon as possible + pendingStashingPersistInvocations -= 1 + case _ ⇒ // do nothing + } - /** - * Java API: recovery handler that receives persisted events during recovery. If a state snapshot - * has been captured and saved, this handler will receive a [[SnapshotOffer]] message - * followed by events that are younger than the offered snapshot. - * - * This handler must not have side-effects other than changing persistent actor state i.e. it - * should not perform actions that may fail, such as interacting with external services, - * for example. - * - * If recovery fails, the actor will be stopped. This can be customized by - * handling [[RecoveryFailure]]. - * - * @see [[Recover]] - */ - @throws(classOf[Exception]) - def onReceiveRecover(msg: Any): Unit + if (pendingStashingPersistInvocations == 0) { + changeState(processingCommands) + internalStash.unstash() + } + } - /** - * Java API: command handler. Typically validates commands against current state (and/or by - * communication with other actors). On successful validation, one or more events are - * derived from a command and these events are then persisted by calling `persist`. - * Commands sent to event sourced persistent actors must not be [[Persistent]] or - * [[PersistentBatch]] messages. In this case an `UnsupportedOperationException` is - * thrown by the persistent actor. - */ - @throws(classOf[Exception]) - def onReceiveCommand(msg: Any): Unit -} - -/** - * Java API: an persistent actor - can be used to implement command or event sourcing. - */ -abstract class AbstractPersistentActor extends AbstractActor with PersistentActor with Eventsourced { - - /** - * Java API: asynchronously persists `event`. On successful persistence, `handler` is called with the - * persisted event. It is guaranteed that no new commands will be received by a persistent actor - * between a call to `persist` and the execution of its `handler`. This also holds for - * multiple `persist` calls per received command. Internally, this is achieved by stashing new - * commands and unstashing them when the `event` has been persisted and handled. The stash used - * for that is an internal stash which doesn't interfere with the inherited user stash. - * - * An event `handler` may close over persistent actor state and modify it. The `getSender()` of a persisted - * event is the sender of the corresponding command. This means that one can reply to a command - * sender within an event `handler`. - * - * Within an event handler, applications usually update persistent actor state using persisted event - * data, notify listeners and reply to command senders. - * - * If persistence of an event fails, the persistent actor will be stopped. This can be customized by - * handling [[PersistenceFailure]] in [[receiveCommand]]. - * - * @param event event to be persisted. - * @param handler handler for each persisted `event` - */ - final def persist[A](event: A, handler: Procedure[A]): Unit = - persist(event)(event ⇒ handler(event)) - - /** - * Java API: asynchronously persists `events` in specified order. This is equivalent to calling - * `persist[A](event: A, handler: Procedure[A])` multiple times with the same `handler`, - * except that `events` are persisted atomically with this method. - * - * @param events events to be persisted. - * @param handler handler for each persisted `events` - */ - final def persist[A](events: JIterable[A], handler: Procedure[A]): Unit = - persist(Util.immutableSeq(events))(event ⇒ handler(event)) - - /** - * Java API: asynchronously persists `event`. On successful persistence, `handler` is called with the - * persisted event. - * - * Unlike `persist` the persistent actor will continue to receive incomming commands between the - * call to `persistAsync` and executing it's `handler`. This asynchronous, non-stashing, version of - * of persist should be used when you favor throughput over the strict ordering guarantees that `persist` guarantees. - * - * If persistence of an event fails, the persistent actor will be stopped. This can be customized by - * handling [[PersistenceFailure]] in [[receiveCommand]]. - * - * @param event event to be persisted - * @param handler handler for each persisted `event` - */ - final def persistAsync[A](event: A, handler: Procedure[A]): Unit = - persistAsync(event)(event ⇒ handler(event)) - - /** - * Defer the handler execution until all pending handlers have been executed. - * Allows to define logic within the actor, which will respect the invocation-order-guarantee - * in respect to `persistAsync` calls. That is, if `persistAsync` was invoked before defer, - * the corresponding handlers will be invoked in the same order as they were registered in. - * - * This call will NOT result in `event` being persisted, please use `persist` or `persistAsync`, - * if the given event should possible to replay. - * - * If there are no pending persist handler calls, the handler will be called immediatly. - * - * In the event of persistence failures (indicated by [[PersistenceFailure]] messages being sent to the - * [[PersistentActor]], you can handle these messages, which in turn will enable the deferred handlers to run afterwards. - * If persistence failure messages are left `unhandled`, the default behavior is to stop the Actor, thus the handlers - * will not be run. - * - * @param event event to be handled in the future, when preceeding persist operations have been processes - * @param handler handler for the given `event` - */ - final def defer[A](event: A)(handler: Procedure[A]): Unit = - super.defer(event)(event ⇒ handler(event)) - - /** - * Defer the handler execution until all pending handlers have been executed. - * Allows to define logic within the actor, which will respect the invocation-order-guarantee - * in respect to `persistAsync` calls. That is, if `persistAsync` was invoked before defer, - * the corresponding handlers will be invoked in the same order as they were registered in. - * - * This call will NOT result in `event` being persisted, please use `persist` or `persistAsync`, - * if the given event should possible to replay. - * - * If there are no pending persist handler calls, the handler will be called immediatly. - * - * In the event of persistence failures (indicated by [[PersistenceFailure]] messages being sent to the - * [[PersistentActor]], you can handle these messages, which in turn will enable the deferred handlers to run afterwards. - * If persistence failure messages are left `unhandled`, the default behavior is to stop the Actor, thus the handlers - * will not be run. - * - * @param events event to be handled in the future, when preceeding persist operations have been processes - * @param handler handler for each `event` - */ - final def defer[A](events: JIterable[A])(handler: Procedure[A]): Unit = - super.defer(Util.immutableSeq(events))(event ⇒ handler(event)) - - /** - * Java API: asynchronously persists `events` in specified order. This is equivalent to calling - * `persistAsync[A](event: A)(handler: A => Unit)` multiple times with the same `handler`, - * except that `events` are persisted atomically with this method. - * - * @param events events to be persisted - * @param handler handler for each persisted `events` - */ - final def persistAsync[A](events: JIterable[A], handler: Procedure[A]): Unit = - persistAsync(Util.immutableSeq(events))(event ⇒ handler(event)) - - override def receive = super[PersistentActor].receive + } } - -/** - * Java API: an event sourced processor. - */ -@deprecated("UntypedEventsourcedProcessor will be removed in 2.4.x, instead extend the API equivalent `akka.persistence.PersistentProcessor`", since = "2.3.4") -abstract class UntypedEventsourcedProcessor extends UntypedPersistentActor { - override def persistenceId: String = processorId -} - -/** - * Java API: compatible with lambda expressions (to be used with [[akka.japi.pf.ReceiveBuilder]]): - * command handler. Typically validates commands against current state (and/or by - * communication with other actors). On successful validation, one or more events are - * derived from a command and these events are then persisted by calling `persist`. - * Commands sent to event sourced processors must not be [[Persistent]] or - * [[PersistentBatch]] messages. In this case an `UnsupportedOperationException` is - * thrown by the processor. - */ -@deprecated("AbstractEventsourcedProcessor will be removed in 2.4.x, instead extend the API equivalent `akka.persistence.PersistentProcessor`", since = "2.3.4") -abstract class AbstractEventsourcedProcessor extends AbstractPersistentActor { - override def persistenceId: String = processorId -} diff --git a/akka-persistence/src/main/scala/akka/persistence/JournalProtocol.scala b/akka-persistence/src/main/scala/akka/persistence/JournalProtocol.scala index 567e9faf31..880e9fbc4b 100644 --- a/akka-persistence/src/main/scala/akka/persistence/JournalProtocol.scala +++ b/akka-persistence/src/main/scala/akka/persistence/JournalProtocol.scala @@ -11,20 +11,9 @@ import akka.actor._ /** * INTERNAL API. * - * Messages exchanged between persistent actors, views, channels and a journal. + * Messages exchanged between persistent actors, views and a journal. */ private[persistence] object JournalProtocol { - /** - * Request to delete messages identified by `messageIds`. If `permanent` is set to `false`, - * the persistent messages are marked as deleted, otherwise they are permanently deleted. - */ - final case class DeleteMessages(messageIds: immutable.Seq[PersistentId], permanent: Boolean, requestor: Option[ActorRef] = None) - - /** - * Reply message to a successful [[DeleteMessages]] request. - */ - final case class DeleteMessagesSuccess(messageIds: immutable.Seq[PersistentId]) - /** * Reply message to a failed [[DeleteMessages]] request. */ @@ -37,28 +26,13 @@ private[persistence] object JournalProtocol { */ final case class DeleteMessagesTo(persistenceId: String, toSequenceNr: Long, permanent: Boolean) - /** - * Request to write delivery confirmations. - */ - final case class WriteConfirmations(confirmations: immutable.Seq[PersistentConfirmation], requestor: ActorRef) - - /** - * Reply message to a successful [[WriteConfirmations]] request. - */ - final case class WriteConfirmationsSuccess(confirmations: immutable.Seq[PersistentConfirmation]) - - /** - * Reply message to a failed [[WriteConfirmations]] request. - */ - final case class WriteConfirmationsFailure(cause: Throwable) - /** * Request to write messages. * * @param messages messages to be written. * @param persistentActor write requestor. */ - final case class WriteMessages(messages: immutable.Seq[Resequenceable], persistentActor: ActorRef, actorInstanceId: Int) + final case class WriteMessages(messages: immutable.Seq[PersistentEnvelope], persistentActor: ActorRef, actorInstanceId: Int) /** * Reply message to a successful [[WriteMessages]] request. This reply is sent to the requestor @@ -92,16 +66,7 @@ private[persistence] object JournalProtocol { final case class WriteMessageFailure(message: PersistentRepr, cause: Throwable, actorInstanceId: Int) /** - * Request to loop a `message` back to `persistent actor`, without persisting the message. Looping of messages - * through a journal is required to preserve message order with persistent messages. - * - * @param message message to be looped through the journal. - * @param persistentActor loop requestor. - */ - final case class LoopMessage(message: Any, persistentActor: ActorRef, actorInstanceId: Int) - - /** - * Reply message to a [[LoopMessage]] request. + * Reply message to a [[WriteMessages]] with a non-persistent message. * * @param message looped message. */ diff --git a/akka-persistence/src/main/scala/akka/persistence/Persistence.scala b/akka-persistence/src/main/scala/akka/persistence/Persistence.scala index b341d605f0..cca664d91f 100644 --- a/akka-persistence/src/main/scala/akka/persistence/Persistence.scala +++ b/akka-persistence/src/main/scala/akka/persistence/Persistence.scala @@ -21,9 +21,6 @@ final class PersistenceSettings(config: Config) { val maxMessageBatchSize: Int = config.getInt("journal.max-message-batch-size") - val maxConfirmationBatchSize: Int = - config.getInt("journal.max-confirmation-batch-size") - val maxDeletionBatchSize: Int = config.getInt("journal.max-deletion-batch-size") } @@ -69,10 +66,6 @@ final class PersistenceSettings(config: Config) { config.hasPath(path) && config.getBoolean(path) } - val publishConfirmations: Boolean = { - val path = "publish-confirmations" - config.hasPath(path) && config.getBoolean(path) - } } } @@ -108,59 +101,29 @@ class Persistence(val system: ExtendedActorSystem) extends Extension { else DefaultPluginDispatcherId } - private val confirmationBatchLayer = system.systemActorOf( - Props(classOf[DeliveredByChannelBatching], journal, settings), "confirmation-batch-layer") - - private val deletionBatchLayer = system.systemActorOf( - Props(classOf[DeliveredByPersistentChannelBatching], journal, settings), "deletion-batch-layer") - /** - * Creates a canonical processor id from a processor actor ref. - */ - @deprecated("Use `persistenceId` instead. Processor will be removed.", since = "2.3.4") - def processorId(processor: ActorRef): String = id(processor) - - /** - * Creates a canonical persistent actor id from a processor actor ref. + * Creates a canonical persistent actor id from a persistent actor ref. */ def persistenceId(persistentActor: ActorRef): String = id(persistentActor) /** - * Creates a canonical channel id from a channel actor ref. - */ - @deprecated("Channels will be removed. You may want to use `akka.persistence.AtLeastOnceDelivery` instead.", since = "2.3.4") - def channelId(channel: ActorRef): String = id(channel) - - /** - * Returns a snapshot store for a processor identified by `persistenceId`. + * Returns a snapshot store for a persistent actor identified by `persistenceId`. */ def snapshotStoreFor(persistenceId: String): ActorRef = { // Currently returns a snapshot store singleton but this methods allows for later - // optimizations where each processor can have its own snapshot store actor. + // optimizations where each persistent actor can have its own snapshot store actor. snapshotStore } /** - * Returns a journal for a processor identified by `persistenceId`. + * Returns a journal for a persistent actor identified by `persistenceId`. */ def journalFor(persistenceId: String): ActorRef = { // Currently returns a journal singleton but this methods allows for later - // optimizations where each processor can have its own journal actor. + // optimizations where each persistent actor can have its own journal actor. journal } - /** - * INTERNAL API. - */ - private[persistence] def confirmationBatchingJournalForChannel(channelId: String): ActorRef = - confirmationBatchLayer - - /** - * INTERNAL API. - */ - private[persistence] def deletionBatchingJournalForChannel(channelId: String): ActorRef = - deletionBatchLayer - private def createPlugin(pluginType: String)(dispatcherSelector: Class[_] ⇒ String) = { val pluginConfigPath = config.getString(s"${pluginType}.plugin") val pluginConfig = system.settings.config.getConfig(pluginConfigPath) diff --git a/akka-persistence/src/main/scala/akka/persistence/Persistent.scala b/akka-persistence/src/main/scala/akka/persistence/Persistent.scala index 5b47692d79..cc58cd0d79 100644 --- a/akka-persistence/src/main/scala/akka/persistence/Persistent.scala +++ b/akka-persistence/src/main/scala/akka/persistence/Persistent.scala @@ -19,9 +19,9 @@ import akka.persistence.serialization.Message * * Marks messages which can be resequenced by the [[akka.persistence.journal.AsyncWriteJournal]]. * - * In essence it is either an [[NonPersistentRepr]] or [[Persistent]]. + * In essence it is either an [[NonPersistentRepr]] or [[PersistentRepr]]. */ -private[persistence] sealed trait Resequenceable { +private[persistence] sealed trait PersistentEnvelope { // FIXME PN: Rename to PersistentEnvelope def payload: Any def sender: ActorRef } @@ -30,156 +30,7 @@ private[persistence] sealed trait Resequenceable { * INTERNAL API * Message which can be resequenced by the Journal, but will not be persisted. */ -private[persistence] final case class NonPersistentRepr(payload: Any, sender: ActorRef) extends Resequenceable - -/** Persistent message. */ -@deprecated("Use akka.persistence.PersistentActor instead.", since = "2.3.4") -sealed abstract class Persistent extends Resequenceable { - /** - * This persistent message's payload. - */ - //#payload - def payload: Any - //#payload - - /** - * This persistent message's sequence number. - */ - //#sequence-nr - def sequenceNr: Long - //#sequence-nr - - /** - * Creates a new persistent message with the specified `payload`. - */ - def withPayload(payload: Any): Persistent -} - -@deprecated("Use akka.persistence.PersistentActor instead", since = "2.3.4") -object Persistent { - /** - * Java API: creates a new persistent message. Must only be used outside processors. - * - * @param payload payload of new persistent message. - */ - def create(payload: Any): Persistent = - create(payload, null) - - /** - * Java API: creates a new persistent message, derived from the specified current message. The current - * message can be obtained inside a [[Processor]] by calling `getCurrentPersistentMessage()`. - * - * @param payload payload of new persistent message. - * @param currentPersistentMessage current persistent message. - */ - def create(payload: Any, currentPersistentMessage: Persistent): Persistent = - apply(payload)(Option(currentPersistentMessage)) - - /** - * Creates a new persistent message, derived from an implicit current message. - * When used inside a [[Processor]], this is the optional current [[Persistent]] - * message of that processor. - * - * @param payload payload of the new persistent message. - * @param currentPersistentMessage optional current persistent message, defaults to `None`. - */ - @deprecated("Use akka.persistence.PersistentActor instead", since = "2.3.4") - def apply(payload: Any)(implicit currentPersistentMessage: Option[Persistent] = None): Persistent = - currentPersistentMessage.map(_.withPayload(payload)).getOrElse(PersistentRepr(payload)) - - /** - * [[Persistent]] extractor. - */ - @deprecated("Use akka.persistence.PersistentActor instead", since = "2.3.4") - def unapply(persistent: Persistent): Option[(Any, Long)] = - Some((persistent.payload, persistent.sequenceNr)) -} - -/** - * Persistent message that has been delivered by a [[Channel]] or [[PersistentChannel]]. Channel - * destinations that receive messages of this type can confirm their receipt by calling [[confirm]]. - */ -@deprecated("Use akka.persistence.PersistentActor instead", since = "2.3.4") -sealed abstract class ConfirmablePersistent extends Persistent { - /** - * Called by [[Channel]] and [[PersistentChannel]] destinations to confirm the receipt of a - * persistent message. - */ - def confirm(): Unit - - /** - * Number of redeliveries. Only greater than zero if message has been redelivered by a [[Channel]] - * or [[PersistentChannel]]. - */ - def redeliveries: Int -} - -@deprecated("Use akka.persistence.PersistentActor instead", since = "2.3.4") -object ConfirmablePersistent { - /** - * [[ConfirmablePersistent]] extractor. - */ - @deprecated("Use akka.persistence.PersistentActor instead", since = "2.3.4") - def unapply(persistent: ConfirmablePersistent): Option[(Any, Long, Int)] = - Some((persistent.payload, persistent.sequenceNr, persistent.redeliveries)) -} - -/** - * Instructs a [[Processor]] to atomically write the contained [[Persistent]] messages to the - * journal. The processor receives the written messages individually as [[Persistent]] messages. - * During recovery, they are also replayed individually. - */ -@deprecated("Use akka.persistence.PersistentActor instead", since = "2.3.4") -final case class PersistentBatch(batch: immutable.Seq[Resequenceable]) extends Message - -@deprecated("Use akka.persistence.PersistentActor instead", since = "2.3.4") -object PersistentBatch { - /** - * Java API. - */ - def create(persistentBatch: JIterable[Persistent]) = - PersistentBatch(immutableSeq(persistentBatch)) -} - -/** - * Plugin API: confirmation entry written by journal plugins. - */ -@deprecated("Channel will be removed, see `akka.persistence.AtLeastOnceDelivery` instead.", since = "2.3.4") -trait PersistentConfirmation { - @deprecated("Use `persistenceId` instead. Processor will be removed.", since = "2.3.4") - final def processorId: String = persistenceId - def persistenceId: String - def channelId: String - def sequenceNr: Long -} - -/** - * Plugin API: persistent message identifier. - */ -@deprecated("deleteMessages will be removed.", since = "2.3.4") -trait PersistentId { - - /** - * Persistent id that journals a persistent message - */ - def processorId: String - - /** - * Persistent id that journals a persistent message - */ - def persistenceId: String = processorId - - /** - * A persistent message's sequence number. - */ - def sequenceNr: Long -} - -/** - * INTERNAL API. - */ -@deprecated("deleteMessages will be removed.", since = "2.3.4") -private[persistence] final case class PersistentIdImpl(processorId: String, sequenceNr: Long) extends PersistentId +private[persistence] final case class NonPersistentRepr(payload: Any, sender: ActorRef) extends PersistentEnvelope /** * Plugin API: representation of a persistent message in the journal plugin API. @@ -188,9 +39,7 @@ private[persistence] final case class PersistentIdImpl(processorId: String, sequ * @see [[journal.AsyncWriteJournal]] * @see [[journal.AsyncRecovery]] */ -trait PersistentRepr extends Persistent with Resequenceable with PersistentId with Message { - // todo we want to get rid of the Persistent() wrapper from user land; PersistentRepr is here to stay. #15230 - +trait PersistentRepr extends PersistentEnvelope with Message { import scala.collection.JavaConverters._ /** @@ -198,50 +47,26 @@ trait PersistentRepr extends Persistent with Resequenceable with PersistentId wi */ def payload: Any + /** + * Persistent id that journals a persistent message + */ + def persistenceId: String + + /** + * This persistent message's sequence number. + */ + def sequenceNr: Long + + /** + * Creates a new persistent message with the specified `payload`. + */ + def withPayload(payload: Any): PersistentRepr + /** * `true` if this message is marked as deleted. */ def deleted: Boolean - /** - * Number of redeliveries. Only greater than zero if message has been redelivered by a [[Channel]] - * or [[PersistentChannel]]. - */ - @deprecated("Channel will be removed.", since = "2.3.4") - def redeliveries: Int - - /** - * Channel ids of delivery confirmations that are available for this message. Only non-empty - * for replayed messages. - */ - @deprecated("Channel will be removed.", since = "2.3.4") - def confirms: immutable.Seq[String] - - /** - * Java API, Plugin API: channel ids of delivery confirmations that are available for this - * message. Only non-empty for replayed messages. - */ - @deprecated("Channel will be removed.", since = "2.3.4") - def getConfirms: JList[String] = confirms.asJava - - /** - * `true` only if this message has been delivered by a channel. - */ - @deprecated("Channel will be removed.", since = "2.3.4") - def confirmable: Boolean - - /** - * Delivery confirmation message. - */ - @deprecated("Channel will be removed.", since = "2.3.4") - def confirmMessage: Delivered - - /** - * Delivery confirmation message. - */ - @deprecated("Channel will be removed.", since = "2.3.4") - def confirmTarget: ActorRef - /** * Sender of this message. */ @@ -263,18 +88,14 @@ trait PersistentRepr extends Persistent with Resequenceable with PersistentId wi */ def update( sequenceNr: Long = sequenceNr, - @deprecatedName('processorId) persistenceId: String = persistenceId, + persistenceId: String = persistenceId, deleted: Boolean = deleted, - @deprecated("Channel will be removed.", since = "2.3.4") redeliveries: Int = redeliveries, - @deprecated("Channel will be removed.", since = "2.3.4") confirms: immutable.Seq[String] = confirms, - @deprecated("Channel will be removed.", since = "2.3.4") confirmMessage: Delivered = confirmMessage, - @deprecated("Channel will be removed.", since = "2.3.4") confirmTarget: ActorRef = confirmTarget, sender: ActorRef = sender): PersistentRepr } object PersistentRepr { /** - * Plugin API: value of an undefined processor or channel id. + * Plugin API: value of an undefined processor id. */ val Undefined = "" @@ -284,21 +105,21 @@ object PersistentRepr { def apply( payload: Any, sequenceNr: Long = 0L, - @deprecatedName('processorId) persistenceId: String = PersistentRepr.Undefined, + persistenceId: String = PersistentRepr.Undefined, deleted: Boolean = false, - @deprecated("Channel will be removed.", since = "2.3.4") redeliveries: Int = 0, - @deprecated("Channel will be removed.", since = "2.3.4") confirms: immutable.Seq[String] = Nil, - @deprecated("Channel will be removed.", since = "2.3.4") confirmable: Boolean = false, - @deprecated("Channel will be removed.", since = "2.3.4") confirmMessage: Delivered = null, - @deprecated("Channel will be removed.", since = "2.3.4") confirmTarget: ActorRef = null, - sender: ActorRef = null) = - if (confirmable) ConfirmablePersistentImpl(payload, sequenceNr, persistenceId, deleted, redeliveries, confirms, confirmMessage, confirmTarget, sender) - else PersistentImpl(payload, sequenceNr, persistenceId, deleted, confirms, sender) + sender: ActorRef = null): PersistentRepr = + PersistentImpl(payload, sequenceNr, persistenceId, deleted, sender) /** * Java API, Plugin API. */ def create = apply _ + + /** + * extractor of payload and sequenceNr. + */ + def unapply(persistent: PersistentRepr): Option[(Any, Long)] = + Some((persistent.payload, persistent.sequenceNr)) } /** @@ -307,12 +128,11 @@ object PersistentRepr { private[persistence] final case class PersistentImpl( payload: Any, sequenceNr: Long, - @deprecatedName('processorId) override val persistenceId: String, + override val persistenceId: String, deleted: Boolean, - confirms: immutable.Seq[String], - sender: ActorRef) extends Persistent with PersistentRepr { + sender: ActorRef) extends PersistentRepr { - def withPayload(payload: Any): Persistent = + def withPayload(payload: Any): PersistentRepr = copy(payload = payload) def prepareWrite(sender: ActorRef) = @@ -320,62 +140,10 @@ private[persistence] final case class PersistentImpl( def update( sequenceNr: Long, - @deprecatedName('processorId) persistenceId: String, + persistenceId: String, deleted: Boolean, - redeliveries: Int, - confirms: immutable.Seq[String], - confirmMessage: Delivered, - confirmTarget: ActorRef, sender: ActorRef) = - copy(sequenceNr = sequenceNr, persistenceId = persistenceId, deleted = deleted, confirms = confirms, sender = sender) + copy(sequenceNr = sequenceNr, persistenceId = persistenceId, deleted = deleted, sender = sender) - val redeliveries: Int = 0 - val confirmable: Boolean = false - val confirmMessage: Delivered = null - val confirmTarget: ActorRef = null - - @deprecated("Use persistenceId.", since = "2.3.4") - override def processorId = persistenceId } -/** - * INTERNAL API. - */ -@deprecated("ConfirmablePersistent will be removed, see `AtLeastOnceDelivery` instead.", since = "2.3.4") -private[persistence] final case class ConfirmablePersistentImpl( - payload: Any, - sequenceNr: Long, - @deprecatedName('processorId) override val persistenceId: String, - deleted: Boolean, - redeliveries: Int, - confirms: immutable.Seq[String], - confirmMessage: Delivered, - confirmTarget: ActorRef, - sender: ActorRef) extends ConfirmablePersistent with PersistentRepr { - - def withPayload(payload: Any): ConfirmablePersistent = - copy(payload = payload) - - def confirm(): Unit = - if (confirmTarget != null) confirmTarget ! confirmMessage - - def confirmable = true - - def prepareWrite(sender: ActorRef) = - copy(sender = sender, confirmMessage = null, confirmTarget = null) - - def update(sequenceNr: Long, @deprecatedName('processorId) persistenceId: String, deleted: Boolean, redeliveries: Int, confirms: immutable.Seq[String], confirmMessage: Delivered, confirmTarget: ActorRef, sender: ActorRef) = - copy(sequenceNr = sequenceNr, persistenceId = persistenceId, deleted = deleted, redeliveries = redeliveries, confirms = confirms, confirmMessage = confirmMessage, confirmTarget = confirmTarget, sender = sender) - - @deprecated("Use persistenceId.", since = "2.3.4") - override def processorId = persistenceId -} - -/** - * INTERNAL API. - */ -@deprecated("ConfirmablePersistent will be removed, see `AtLeastOnceDelivery` instead.", since = "2.3.4") -private[persistence] object ConfirmablePersistentImpl { - def apply(persistent: PersistentRepr, confirmMessage: Delivered, confirmTarget: ActorRef = null): ConfirmablePersistentImpl = - ConfirmablePersistentImpl(persistent.payload, persistent.sequenceNr, persistent.persistenceId, persistent.deleted, persistent.redeliveries, persistent.confirms, confirmMessage, confirmTarget, persistent.sender) -} diff --git a/akka-persistence/src/main/scala/akka/persistence/PersistentActor.scala b/akka-persistence/src/main/scala/akka/persistence/PersistentActor.scala new file mode 100644 index 0000000000..b5ad2771e7 --- /dev/null +++ b/akka-persistence/src/main/scala/akka/persistence/PersistentActor.scala @@ -0,0 +1,376 @@ +/** + * Copyright (C) 2009-2014 Typesafe Inc. + */ +package akka.persistence + +import java.lang.{ Iterable ⇒ JIterable } +import akka.actor.UntypedActor +import akka.japi.Procedure +import akka.actor.AbstractActor +import akka.japi.Util + +/** + * Sent to a [[PersistentActor]] if a journal fails to write a persistent message. If + * not handled, an `akka.actor.ActorKilledException` is thrown by that persistent actor. + * + * @param payload payload of the persistent message. + * @param sequenceNr sequence number of the persistent message. + * @param cause failure cause. + */ +@SerialVersionUID(1L) +case class PersistenceFailure(payload: Any, sequenceNr: Long, cause: Throwable) + +/** + * Sent to a [[PersistentActor]] if a journal fails to replay messages or fetch that persistent actor's + * highest sequence number. If not handled, the actor will be stopped. + */ +@SerialVersionUID(1L) +case class RecoveryFailure(cause: Throwable) + +abstract class RecoveryCompleted +/** + * Sent to a [[PersistentActor]] when the journal replay has been finished. + */ +@SerialVersionUID(1L) +case object RecoveryCompleted extends RecoveryCompleted { + /** + * Java API: get the singleton instance + */ + def getInstance = this +} + +/** + * Instructs a persistent actor to recover itself. Recovery will start from a snapshot if the persistent actor has + * previously saved one or more snapshots and at least one of these snapshots matches the specified + * `fromSnapshot` criteria. Otherwise, recovery will start from scratch by replaying all journaled + * messages. + * + * If recovery starts from a snapshot, the persistent actor is offered that snapshot with a [[SnapshotOffer]] + * message, followed by replayed messages, if any, that are younger than the snapshot, up to the + * specified upper sequence number bound (`toSequenceNr`). + * + * @param fromSnapshot criteria for selecting a saved snapshot from which recovery should start. Default + * is latest (= youngest) snapshot. + * @param toSequenceNr upper sequence number bound (inclusive) for recovery. Default is no upper bound. + * @param replayMax maximum number of messages to replay. Default is no limit. + */ +@SerialVersionUID(1L) +final case class Recover(fromSnapshot: SnapshotSelectionCriteria = SnapshotSelectionCriteria.Latest, toSequenceNr: Long = Long.MaxValue, replayMax: Long = Long.MaxValue) + +object Recover { + /** + * Java API. + * + * @see [[Recover]] + */ + def create() = Recover() + + /** + * Java API. + * + * @see [[Recover]] + */ + def create(toSequenceNr: Long) = + Recover(toSequenceNr = toSequenceNr) + + /** + * Java API. + * + * @see [[Recover]] + */ + def create(fromSnapshot: SnapshotSelectionCriteria) = + Recover(fromSnapshot = fromSnapshot) + + /** + * Java API. + * + * @see [[Recover]] + */ + def create(fromSnapshot: SnapshotSelectionCriteria, toSequenceNr: Long) = + Recover(fromSnapshot, toSequenceNr) + + /** + * Java API. + * + * @see [[Recover]] + */ + def create(fromSnapshot: SnapshotSelectionCriteria, toSequenceNr: Long, replayMax: Long) = + Recover(fromSnapshot, toSequenceNr, replayMax) +} + +/** + * An persistent Actor - can be used to implement command or event sourcing. + */ +trait PersistentActor extends Eventsourced { + def receive = receiveCommand +} + +/** + * Java API: an persistent actor - can be used to implement command or event sourcing. + */ +abstract class UntypedPersistentActor extends UntypedActor with Eventsourced { + + final def onReceive(message: Any) = onReceiveCommand(message) + + final def receiveRecover: Receive = { + case msg ⇒ onReceiveRecover(msg) + } + + final def receiveCommand: Receive = { + case msg ⇒ onReceiveCommand(msg) + } + + /** + * Java API: asynchronously persists `event`. On successful persistence, `handler` is called with the + * persisted event. It is guaranteed that no new commands will be received by a persistent actor + * between a call to `persist` and the execution of its `handler`. This also holds for + * multiple `persist` calls per received command. Internally, this is achieved by stashing new + * commands and unstashing them when the `event` has been persisted and handled. The stash used + * for that is an internal stash which doesn't interfere with the inherited user stash. + * + * An event `handler` may close over persistent actor state and modify it. The `getSender()` of a persisted + * event is the sender of the corresponding command. This means that one can reply to a command + * sender within an event `handler`. + * + * Within an event handler, applications usually update persistent actor state using persisted event + * data, notify listeners and reply to command senders. + * + * If persistence of an event fails, the persistent actor will be stopped. This can be customized by + * handling [[PersistenceFailure]] in [[onReceiveCommand]]. + * + * @param event event to be persisted. + * @param handler handler for each persisted `event` + */ + final def persist[A](event: A, handler: Procedure[A]): Unit = + persist(event)(event ⇒ handler(event)) + + /** + * Java API: asynchronously persists `events` in specified order. This is equivalent to calling + * `persist[A](event: A, handler: Procedure[A])` multiple times with the same `handler`, + * except that `events` are persisted atomically with this method. + * + * @param events events to be persisted. + * @param handler handler for each persisted `events` + */ + final def persist[A](events: JIterable[A], handler: Procedure[A]): Unit = + persist(Util.immutableSeq(events))(event ⇒ handler(event)) + + /** + * JAVA API: asynchronously persists `event`. On successful persistence, `handler` is called with the + * persisted event. + * + * Unlike `persist` the persistent actor will continue to receive incomming commands between the + * call to `persist` and executing it's `handler`. This asynchronous, non-stashing, version of + * of persist should be used when you favor throughput over the "command-2 only processed after + * command-1 effects' have been applied" guarantee, which is provided by the plain [[persist]] method. + * + * An event `handler` may close over persistent actor state and modify it. The `sender` of a persisted + * event is the sender of the corresponding command. This means that one can reply to a command + * sender within an event `handler`. + * + * If persistence of an event fails, the persistent actor will be stopped. This can be customized by + * handling [[PersistenceFailure]] in [[receiveCommand]]. + * + * @param event event to be persisted + * @param handler handler for each persisted `event` + */ + final def persistAsync[A](event: A)(handler: Procedure[A]): Unit = + super[Eventsourced].persistAsync(event)(event ⇒ handler(event)) + + /** + * JAVA API: asynchronously persists `events` in specified order. This is equivalent to calling + * `persistAsync[A](event: A)(handler: A => Unit)` multiple times with the same `handler`, + * except that `events` are persisted atomically with this method. + * + * @param events events to be persisted + * @param handler handler for each persisted `events` + */ + final def persistAsync[A](events: JIterable[A])(handler: A ⇒ Unit): Unit = + super[Eventsourced].persistAsync(Util.immutableSeq(events))(event ⇒ handler(event)) + + /** + * Defer the handler execution until all pending handlers have been executed. + * Allows to define logic within the actor, which will respect the invocation-order-guarantee + * in respect to `persistAsync` calls. That is, if `persistAsync` was invoked before defer, + * the corresponding handlers will be invoked in the same order as they were registered in. + * + * This call will NOT result in `event` being persisted, please use `persist` or `persistAsync`, + * if the given event should possible to replay. + * + * If there are no pending persist handler calls, the handler will be called immediatly. + * + * In the event of persistence failures (indicated by [[PersistenceFailure]] messages being sent to the + * [[PersistentActor]], you can handle these messages, which in turn will enable the deferred handlers to run afterwards. + * If persistence failure messages are left `unhandled`, the default behavior is to stop the Actor, thus the handlers + * will not be run. + * + * @param event event to be handled in the future, when preceeding persist operations have been processes + * @param handler handler for the given `event` + */ + final def defer[A](event: A)(handler: Procedure[A]): Unit = + super[Eventsourced].defer(event)(event ⇒ handler(event)) + + /** + * Defer the handler execution until all pending handlers have been executed. + * Allows to define logic within the actor, which will respect the invocation-order-guarantee + * in respect to `persistAsync` calls. That is, if `persistAsync` was invoked before defer, + * the corresponding handlers will be invoked in the same order as they were registered in. + * + * This call will NOT result in `event` being persisted, please use `persist` or `persistAsync`, + * if the given event should possible to replay. + * + * If there are no pending persist handler calls, the handler will be called immediatly. + * + * In the event of persistence failures (indicated by [[PersistenceFailure]] messages being sent to the + * [[PersistentActor]], you can handle these messages, which in turn will enable the deferred handlers to run afterwards. + * If persistence failure messages are left `unhandled`, the default behavior is to stop the Actor, thus the handlers + * will not be run. + * + * @param events event to be handled in the future, when preceeding persist operations have been processes + * @param handler handler for each `event` + */ + final def defer[A](events: JIterable[A])(handler: Procedure[A]): Unit = + super[Eventsourced].defer(Util.immutableSeq(events))(event ⇒ handler(event)) + + /** + * Java API: recovery handler that receives persisted events during recovery. If a state snapshot + * has been captured and saved, this handler will receive a [[SnapshotOffer]] message + * followed by events that are younger than the offered snapshot. + * + * This handler must not have side-effects other than changing persistent actor state i.e. it + * should not perform actions that may fail, such as interacting with external services, + * for example. + * + * If recovery fails, the actor will be stopped. This can be customized by + * handling [[RecoveryFailure]]. + * + * @see [[Recover]] + */ + @throws(classOf[Exception]) + def onReceiveRecover(msg: Any): Unit + + /** + * Java API: command handler. Typically validates commands against current state (and/or by + * communication with other actors). On successful validation, one or more events are + * derived from a command and these events are then persisted by calling `persist`. + */ + @throws(classOf[Exception]) + def onReceiveCommand(msg: Any): Unit +} + +/** + * Java API: an persistent actor - can be used to implement command or event sourcing. + */ +abstract class AbstractPersistentActor extends AbstractActor with PersistentActor with Eventsourced { + + /** + * Java API: asynchronously persists `event`. On successful persistence, `handler` is called with the + * persisted event. It is guaranteed that no new commands will be received by a persistent actor + * between a call to `persist` and the execution of its `handler`. This also holds for + * multiple `persist` calls per received command. Internally, this is achieved by stashing new + * commands and unstashing them when the `event` has been persisted and handled. The stash used + * for that is an internal stash which doesn't interfere with the inherited user stash. + * + * An event `handler` may close over persistent actor state and modify it. The `getSender()` of a persisted + * event is the sender of the corresponding command. This means that one can reply to a command + * sender within an event `handler`. + * + * Within an event handler, applications usually update persistent actor state using persisted event + * data, notify listeners and reply to command senders. + * + * If persistence of an event fails, the persistent actor will be stopped. This can be customized by + * handling [[PersistenceFailure]] in [[receiveCommand]]. + * + * @param event event to be persisted. + * @param handler handler for each persisted `event` + */ + final def persist[A](event: A, handler: Procedure[A]): Unit = + persist(event)(event ⇒ handler(event)) + + /** + * Java API: asynchronously persists `events` in specified order. This is equivalent to calling + * `persist[A](event: A, handler: Procedure[A])` multiple times with the same `handler`, + * except that `events` are persisted atomically with this method. + * + * @param events events to be persisted. + * @param handler handler for each persisted `events` + */ + final def persist[A](events: JIterable[A], handler: Procedure[A]): Unit = + persist(Util.immutableSeq(events))(event ⇒ handler(event)) + + /** + * Java API: asynchronously persists `event`. On successful persistence, `handler` is called with the + * persisted event. + * + * Unlike `persist` the persistent actor will continue to receive incomming commands between the + * call to `persistAsync` and executing it's `handler`. This asynchronous, non-stashing, version of + * of persist should be used when you favor throughput over the strict ordering guarantees that `persist` guarantees. + * + * If persistence of an event fails, the persistent actor will be stopped. This can be customized by + * handling [[PersistenceFailure]] in [[receiveCommand]]. + * + * @param event event to be persisted + * @param handler handler for each persisted `event` + */ + final def persistAsync[A](event: A, handler: Procedure[A]): Unit = + persistAsync(event)(event ⇒ handler(event)) + + /** + * Defer the handler execution until all pending handlers have been executed. + * Allows to define logic within the actor, which will respect the invocation-order-guarantee + * in respect to `persistAsync` calls. That is, if `persistAsync` was invoked before defer, + * the corresponding handlers will be invoked in the same order as they were registered in. + * + * This call will NOT result in `event` being persisted, please use `persist` or `persistAsync`, + * if the given event should possible to replay. + * + * If there are no pending persist handler calls, the handler will be called immediatly. + * + * In the event of persistence failures (indicated by [[PersistenceFailure]] messages being sent to the + * [[PersistentActor]], you can handle these messages, which in turn will enable the deferred handlers to run afterwards. + * If persistence failure messages are left `unhandled`, the default behavior is to stop the Actor, thus the handlers + * will not be run. + * + * @param event event to be handled in the future, when preceeding persist operations have been processes + * @param handler handler for the given `event` + */ + final def defer[A](event: A)(handler: Procedure[A]): Unit = + super.defer(event)(event ⇒ handler(event)) + + /** + * Defer the handler execution until all pending handlers have been executed. + * Allows to define logic within the actor, which will respect the invocation-order-guarantee + * in respect to `persistAsync` calls. That is, if `persistAsync` was invoked before defer, + * the corresponding handlers will be invoked in the same order as they were registered in. + * + * This call will NOT result in `event` being persisted, please use `persist` or `persistAsync`, + * if the given event should possible to replay. + * + * If there are no pending persist handler calls, the handler will be called immediatly. + * + * In the event of persistence failures (indicated by [[PersistenceFailure]] messages being sent to the + * [[PersistentActor]], you can handle these messages, which in turn will enable the deferred handlers to run afterwards. + * If persistence failure messages are left `unhandled`, the default behavior is to stop the Actor, thus the handlers + * will not be run. + * + * @param events event to be handled in the future, when preceeding persist operations have been processes + * @param handler handler for each `event` + */ + final def defer[A](events: JIterable[A])(handler: Procedure[A]): Unit = + super.defer(Util.immutableSeq(events))(event ⇒ handler(event)) + + /** + * Java API: asynchronously persists `events` in specified order. This is equivalent to calling + * `persistAsync[A](event: A)(handler: A => Unit)` multiple times with the same `handler`, + * except that `events` are persisted atomically with this method. + * + * @param events events to be persisted + * @param handler handler for each persisted `events` + */ + final def persistAsync[A](events: JIterable[A], handler: Procedure[A]): Unit = + persistAsync(Util.immutableSeq(events))(event ⇒ handler(event)) + + override def receive = super[PersistentActor].receive + +} + diff --git a/akka-persistence/src/main/scala/akka/persistence/PersistentChannel.scala b/akka-persistence/src/main/scala/akka/persistence/PersistentChannel.scala deleted file mode 100644 index 01c730183a..0000000000 --- a/akka-persistence/src/main/scala/akka/persistence/PersistentChannel.scala +++ /dev/null @@ -1,416 +0,0 @@ -/** - * Copyright (C) 2009-2014 Typesafe Inc. - */ - -package akka.persistence - -import scala.concurrent.duration._ -import scala.language.postfixOps - -import akka.AkkaException -import akka.actor._ -import akka.persistence.JournalProtocol._ - -/** - * A [[PersistentChannel]] configuration object. - * - * @param redeliverMax Maximum number of redelivery attempts. - * @param redeliverInterval Interval between redelivery attempts. - * @param redeliverFailureListener Receiver of [[RedeliverFailure]] notifications which are sent when the number - * of redeliveries reaches `redeliverMax` for a sequence of messages. To enforce - * a redelivery of these messages, the listener has to [[Reset]] the persistent - * channel. Alternatively, it can also confirm these messages, preventing further - * redeliveries. - * @param replyPersistent If `true` the sender will receive the successfully stored [[Persistent]] message that has - * been submitted with a [[Deliver]] request, or a [[PersistenceFailure]] message in case of - * a persistence failure. - * @param pendingConfirmationsMax Message delivery is suspended by a channel if the number of pending reaches the - * specified value and is resumed again if the number of pending confirmations falls - * below `pendingConfirmationsMin`. - * @param pendingConfirmationsMin Message delivery is resumed if the number of pending confirmations falls below - * this limit. It is suspended again if it reaches `pendingConfirmationsMax`. - * Message delivery is enabled for a channel if the number of pending confirmations - * is below this limit, or, is resumed again if it falls below this limit. - * @param idleTimeout Maximum interval between read attempts made by a persistent channel. This settings applies, - * for example, after a journal failed to serve a read request. The next read request is then - * made after the configured timeout. - */ -@SerialVersionUID(1L) -@deprecated("PersistentChannel will be removed, see `akka.persistence.AtLeastOnceDelivery` instead.", since = "2.3.4") -final case class PersistentChannelSettings( - val redeliverMax: Int = 5, - val redeliverInterval: FiniteDuration = 5.seconds, - val redeliverFailureListener: Option[ActorRef] = None, - val replyPersistent: Boolean = false, - val pendingConfirmationsMax: Long = Long.MaxValue, - val pendingConfirmationsMin: Long = Long.MaxValue, - val idleTimeout: FiniteDuration = 1.minute) { - - /** - * Java API. - */ - def withRedeliverMax(redeliverMax: Int): PersistentChannelSettings = - copy(redeliverMax = redeliverMax) - - /** - * Java API. - */ - def withRedeliverInterval(redeliverInterval: FiniteDuration): PersistentChannelSettings = - copy(redeliverInterval = redeliverInterval) - - /** - * Java API. - */ - def withRedeliverFailureListener(redeliverFailureListener: ActorRef): PersistentChannelSettings = - copy(redeliverFailureListener = Option(redeliverFailureListener)) - - /** - * Java API. - */ - def withReplyPersistent(replyPersistent: Boolean): PersistentChannelSettings = - copy(replyPersistent = replyPersistent) - - /** - * Java API. - */ - def withPendingConfirmationsMax(pendingConfirmationsMax: Long): PersistentChannelSettings = - copy(pendingConfirmationsMax = pendingConfirmationsMax) - - /** - * Java API. - */ - def withPendingConfirmationsMin(pendingConfirmationsMin: Long): PersistentChannelSettings = - copy(pendingConfirmationsMin = pendingConfirmationsMin) - - /** - * Java API. - */ - def withIdleTimeout(idleTimeout: FiniteDuration): PersistentChannelSettings = - copy(idleTimeout = idleTimeout) - - /** - * Converts this configuration object to [[ChannelSettings]]. - */ - def toChannelSettings: ChannelSettings = - ChannelSettings(redeliverMax, redeliverInterval, redeliverFailureListener) -} - -@deprecated("PersistentChannel will be removed, see `akka.persistence.AtLeastOnceDelivery` instead.", since = "2.3.4") -object PersistentChannelSettings { - /** - * Java API. - */ - def create() = PersistentChannelSettings.apply() -} - -/** - * Resets a [[PersistentChannel]], forcing it to redeliver all unconfirmed persistent - * messages. This does not affect writing [[Deliver]] requests. - */ -@deprecated("PersistentChannel will be removed, see `akka.persistence.AtLeastOnceDelivery` instead.", since = "2.3.4") -case object Reset { - /** - * Java API. - */ - def getInstance() = this -} - -/** - * Exception thrown by a [[PersistentChannel]] child actor to re-initiate delivery. - */ -@deprecated("PersistentChannel will be removed, see `akka.persistence.AtLeastOnceDelivery` instead.", since = "2.3.4") -class ResetException extends AkkaException("Channel reset on application request") - -/** - * A [[PersistentChannel]] implements the same functionality as a [[Channel]] but additionally persists - * [[Deliver]] requests before they are served. Persistent channels are useful in combination with slow - * destinations or destinations that are unavailable for a long time. `Deliver` requests that have been - * persisted by a persistent channel are deleted when destinations confirm the receipt of the corresponding - * messages. - * - * The number of pending confirmations can be limited by a persistent channel based on the parameters of - * [[PersistentChannelSettings]]. It can suspend delivery when the number of pending confirmations reaches - * `pendingConfirmationsMax` and resume delivery again when this number falls below `pendingConfirmationsMin`. - * This prevents both flooding destinations with more messages than they can process and unlimited memory - * consumption by the channel. A persistent channel continues to persist [[Deliver]] request even when - * message delivery is temporarily suspended. - * - * A persistent channel can also reply to [[Deliver]] senders if the request has been successfully persisted - * or not (see `replyPersistent` parameter in [[PersistentChannelSettings]]). In case of success, the channel - * replies with the contained [[Persistent]] message, otherwise with a [[PersistenceFailure]] message. - */ -@deprecated("PersistentChannel will be removed, see `akka.persistence.AtLeastOnceDelivery` instead.", since = "2.3.4") -final class PersistentChannel private[akka] (_channelId: Option[String], channelSettings: PersistentChannelSettings) extends Actor { - private val id = _channelId match { - case Some(cid) ⇒ cid - case None ⇒ Persistence(context.system).channelId(self) - } - - private val requestReader = context.actorOf(Props(classOf[RequestReader], id, channelSettings)) - private val requestWriter = context.actorOf(Props(classOf[RequestWriter], id, channelSettings, requestReader)) - - def receive = { - case d @ Deliver(persistent: PersistentRepr, destination) ⇒ - // Persist the Deliver request by sending reliableStorage a Persistent message - // with the Deliver request as payload. This persistent message is referred to - // as the wrapper message, whereas the persistent message contained in the Deliver - // request is referred to as wrapped message. - if (!persistent.confirms.contains(id)) requestWriter forward Persistent(d) - case Reset ⇒ requestReader ! Reset - } -} - -@deprecated("Channel will be removed, see `akka.persistence.AtLeastOnceDelivery` instead.", since = "2.3.4") -object PersistentChannel { - /** - * Returns a channel actor configuration object for creating a [[PersistentChannel]] with a - * generated id and default [[PersistentChannelSettings]]. - */ - def props(): Props = props(PersistentChannelSettings()) - - /** - * Returns a channel actor configuration object for creating a [[PersistentChannel]] with a - * generated id and specified `channelSettings`. - * - * @param channelSettings channel configuration object. - */ - def props(channelSettings: PersistentChannelSettings): Props = - Props(classOf[PersistentChannel], None, channelSettings) - - /** - * Returns a channel actor configuration object for creating a [[PersistentChannel]] with the - * specified id and default [[PersistentChannelSettings]]. - * - * @param channelId channel id. - */ - def props(channelId: String): Props = - props(channelId, PersistentChannelSettings()) - - /** - * Returns a channel actor configuration object for creating a [[PersistentChannel]] with the - * specified id and specified `channelSettings`. - * - * @param channelId channel id. - * @param channelSettings channel configuration object. - */ - def props(channelId: String, channelSettings: PersistentChannelSettings): Props = - Props(classOf[PersistentChannel], Some(channelId), channelSettings) -} - -/** - * Plugin API. - */ -@deprecated("PersistentChannel will be removed, see `akka.persistence.AtLeastOnceDelivery` instead.", since = "2.3.4") -final case class DeliveredByPersistentChannel( - channelId: String, - persistentSequenceNr: Long, - deliverySequenceNr: Long = 0L, - channel: ActorRef = null) extends Delivered with PersistentId { - - override def persistenceId: String = channelId - @deprecated("Use persistenceId.", since = "2.3.4") - override def processorId = persistenceId - def sequenceNr: Long = persistentSequenceNr - def update(deliverySequenceNr: Long, channel: ActorRef): DeliveredByPersistentChannel = - copy(deliverySequenceNr = deliverySequenceNr, channel = channel) -} - -/** - * INTERNAL API. - */ -@deprecated("PersistentChannel will be removed, see `akka.persistence.AtLeastOnceDelivery` instead.", since = "2.3.4") -private[persistence] class DeliveredByPersistentChannelBatching(journal: ActorRef, settings: PersistenceSettings) extends Actor { - private val publish = settings.internal.publishConfirmations - private val batchMax = settings.journal.maxConfirmationBatchSize - - private var batching = false - private var batch = Vector.empty[DeliveredByPersistentChannel] - - def receive = { - case DeleteMessagesSuccess(messageIds) ⇒ - if (batch.isEmpty) batching = false else journalBatch() - messageIds.foreach { - case c: DeliveredByPersistentChannel ⇒ - c.channel ! c - if (publish) context.system.eventStream.publish(c) - } - case DeleteMessagesFailure(_) ⇒ - if (batch.isEmpty) batching = false else journalBatch() - case d: DeliveredByPersistentChannel ⇒ - addToBatch(d) - if (!batching || maxBatchSizeReached) journalBatch() - case m ⇒ journal forward m - } - - def addToBatch(pc: DeliveredByPersistentChannel): Unit = - batch = batch :+ pc - - def maxBatchSizeReached: Boolean = - batch.length >= batchMax - - def journalBatch(): Unit = { - journal ! DeleteMessages(batch, true, Some(self)) - batch = Vector.empty - batching = true - } -} - -/** - * Writes [[Deliver]] requests to the journal. - */ -@deprecated("PersistentChannel will be removed, see `akka.persistence.AtLeastOnceDelivery` instead.", since = "2.3.4") -private class RequestWriter(channelId: String, channelSettings: PersistentChannelSettings, reader: ActorRef) extends Processor { - import RequestWriter._ - import channelSettings._ - - private val cbJournal = extension.confirmationBatchingJournalForChannel(channelId) - - override val persistenceId = channelId - - def receive = { - case p @ Persistent(Deliver(wrapped: PersistentRepr, _), _) ⇒ - if (!recoveryRunning && wrapped.persistenceId != PersistentRepr.Undefined) { - // Write a delivery confirmation to the journal so that replayed Deliver - // requests from a sending processor are not persisted again. Replaying - // Deliver requests is now the responsibility of this processor - // and confirmation by destination is done to the wrapper p.sequenceNr. - cbJournal ! DeliveredByChannel(wrapped.persistenceId, channelId, wrapped.sequenceNr) - } - - if (!recoveryRunning && replyPersistent) - sender() ! wrapped - - case p: PersistenceFailure ⇒ - if (replyPersistent) sender() ! p - } - - override protected[akka] def aroundReceive(receive: Receive, message: Any): Unit = { - super.aroundReceive(receive, message) - message match { - case WriteMessagesSuccessful | WriteMessagesFailed(_) ⇒ - // activate reader after to reduce delivery latency - reader ! RequestsWritten - case _ ⇒ - } - } - - override def preRestart(reason: Throwable, message: Option[Any]): Unit = { - self ! Recover(replayMax = 0L) - } - - override def preStart(): Unit = { - self ! Recover(replayMax = 0L) - } -} - -@deprecated("Channel will be removed, see `akka.persistence.AtLeastOnceDelivery` instead.", since = "2.3.4") -private object RequestWriter { - case object RequestsWritten -} - -/** - * Reads [[Deliver]] requests from the journal and processes them. The number of `Deliver` requests - * processed per iteration depends on - * - * - `pendingConfirmationsMax` parameter in [[PersistentChannelSettings]] - * - `pendingConfirmationsMin` parameter in [[PersistentChannelSettings]] and the - * - current number of pending confirmations. - * - * @see [[PersistentChannel]] - */ -@deprecated("PersistentChannel will be removed, see `akka.persistence.AtLeastOnceDelivery` instead.", since = "2.3.4") -private class RequestReader(channelId: String, channelSettings: PersistentChannelSettings) extends Actor with Recovery { - import RequestWriter._ - import channelSettings._ - - private val delivery = context.actorOf(Props(classOf[ReliableDelivery], channelSettings.toChannelSettings)) - - private val idle: State = new State { - override def toString: String = "idle" - - def aroundReceive(receive: Receive, message: Any): Unit = message match { - case r: Recover ⇒ // ignore - case other ⇒ process(receive, other) - } - } - - def receive = { - case p @ Persistent(d @ Deliver(wrapped: PersistentRepr, destination), snr) ⇒ - val wrapper = p.asInstanceOf[PersistentRepr] - val prepared = prepareDelivery(wrapped, wrapper) - numReplayed += 1 - numPending += 1 - delivery forward d.copy(prepared) - case d: Delivered ⇒ - delivery forward d - numPending = math.max(numPending - 1L, 0L) - if (numPending == pendingConfirmationsMin) onReadRequest() - case d @ RedeliverFailure(ms) ⇒ - val numPendingPrev = numPending - numPending = math.max(numPending - ms.length, 0L) - if (numPendingPrev > pendingConfirmationsMin && numPending <= pendingConfirmationsMin) onReadRequest() - redeliverFailureListener.foreach(_.tell(d, context.parent)) - case RequestsWritten | ReceiveTimeout ⇒ - if (numPending <= pendingConfirmationsMin) onReadRequest() - case Reset ⇒ throw new ResetException - } - - def onReplaySuccess(receive: Receive, await: Boolean): Unit = { - onReplayComplete() - if (numReplayed > 0 && numPending <= pendingConfirmationsMin) onReadRequest() - numReplayed = 0L - } - - def onReplayFailure(receive: Receive, await: Boolean, cause: Throwable): Unit = { - onReplayComplete() - } - - override def persistenceId: String = channelId - - def snapshotterId: String = - s"${channelId}-reader" - - private val dbJournal = extension.deletionBatchingJournalForChannel(channelId) - - /** - * Number of delivery requests replayed (read) per iteration. - */ - private var numReplayed = 0L - - /** - * Number of pending confirmations. - */ - private var numPending = 0L - - context.setReceiveTimeout(channelSettings.idleTimeout) - - private def onReplayComplete(): Unit = { - _currentState = idle - receiverStash.unstashAll() - } - - private def onReadRequest(): Unit = if (_currentState == idle) { - _currentState = replayStarted(await = false) - dbJournal ! ReplayMessages(lastSequenceNr + 1L, Long.MaxValue, pendingConfirmationsMax - numPending, persistenceId, self) - } - - /** - * @param wrapped persistent message contained in a deliver request - * @param wrapper persistent message that contains a deliver request - */ - private def prepareDelivery(wrapped: PersistentRepr, wrapper: PersistentRepr): PersistentRepr = { - ConfirmablePersistentImpl(wrapped, - confirmTarget = dbJournal, - confirmMessage = DeliveredByPersistentChannel(channelId, wrapper.sequenceNr, channel = self)) - } - - override def preRestart(reason: Throwable, message: Option[Any]): Unit = { - try receiverStash.unstashAll() finally super.preRestart(reason, message) - } - - override def preStart(): Unit = { - super.preStart() - self ! Recover(replayMax = 0L) - self ! RequestsWritten // considers savepoint loaded from snapshot (TODO) - } -} diff --git a/akka-persistence/src/main/scala/akka/persistence/PersistentView.scala b/akka-persistence/src/main/scala/akka/persistence/PersistentView.scala index 92d83ee62c..cc9c656de8 100644 --- a/akka-persistence/src/main/scala/akka/persistence/PersistentView.scala +++ b/akka-persistence/src/main/scala/akka/persistence/PersistentView.scala @@ -5,9 +5,16 @@ package akka.persistence import scala.concurrent.duration._ - -import akka.actor._ -import akka.persistence.JournalProtocol._ +import scala.util.control.NonFatal +import akka.actor.AbstractActor +import akka.actor.Actor +import akka.actor.ActorCell +import akka.actor.ActorKilledException +import akka.actor.Cancellable +import akka.actor.Stash +import akka.actor.StashFactory +import akka.actor.UntypedActor +import akka.dispatch.Envelope /** * Instructs a [[PersistentView]] to update itself. This will run a single incremental message replay with @@ -17,8 +24,7 @@ import akka.persistence.JournalProtocol._ * * @param await if `true`, processing of further messages sent to the view will be delayed until the * incremental message replay, triggered by this update request, completes. If `false`, - * any message sent to the view may interleave with replayed [[Persistent]] message - * stream. + * any message sent to the view may interleave with replayed persistent event stream. * @param replayMax maximum number of messages to replay when handling this update request. Defaults * to `Long.MaxValue` (i.e. no limit). */ @@ -68,81 +74,21 @@ object Update { * - [[autoUpdate]] for turning automated updates on or off * - [[autoUpdateReplayMax]] for limiting the number of replayed messages per view update cycle */ -trait PersistentView extends Actor with Recovery { +trait PersistentView extends Actor with Snapshotter with Stash with StashFactory { + import JournalProtocol._ + import SnapshotProtocol.LoadSnapshotResult import context.dispatcher - /** - * INTERNAL API. - * - * Extends the `replayStarted` state of [[Recovery]] with logic to handle [[Update]] requests - * sent by users. - */ - private[persistence] override def replayStarted(await: Boolean) = new State { - private var delegateAwaiting = await - private var delegate = PersistentView.super.replayStarted(await) - - override def toString: String = delegate.toString - - override def aroundReceive(receive: Receive, message: Any) = message match { - case Update(false, _) ⇒ // ignore - case u @ Update(true, _) if !delegateAwaiting ⇒ - delegateAwaiting = true - delegate = PersistentView.super.replayStarted(await = true) - delegate.aroundReceive(receive, u) - case other ⇒ - delegate.aroundReceive(receive, other) - } - } - - /** - * When receiving an [[Update]] request, switches to `replayStarted` state and triggers - * an incremental message replay. Invokes the actor's current behavior for any other - * received message. - */ - private val idle: State = new State { - override def toString: String = "idle" - - def aroundReceive(receive: Receive, message: Any): Unit = message match { - case r: Recover ⇒ // ignore - case Update(awaitUpdate, replayMax) ⇒ - _currentState = replayStarted(await = awaitUpdate) - journal ! ReplayMessages(lastSequenceNr + 1L, Long.MaxValue, replayMax, persistenceId, self) - case other ⇒ process(receive, other) - } - } - - /** - * INTERNAL API. - */ - private[persistence] def onReplaySuccess(receive: Receive, await: Boolean): Unit = - onReplayComplete(await) - - /** - * INTERNAL API. - */ - private[persistence] def onReplayFailure(receive: Receive, await: Boolean, cause: Throwable): Unit = - onReplayComplete(await) - - /** - * Switches to `idle` state and schedules the next update if `autoUpdate` returns `true`. - */ - private def onReplayComplete(await: Boolean): Unit = { - _currentState = idle - if (autoUpdate) schedule = Some(context.system.scheduler.scheduleOnce(autoUpdateInterval, self, Update(await = false, autoUpdateReplayMax))) - if (await) receiverStash.unstashAll() - } - - /** - * INTERNAL API - * WARNING: This implementation UNWRAPS PERSISTENT() before delivering to the receive block. - */ - override private[persistence] def runReceive(receive: Receive)(msg: Persistent): Unit = - receive.applyOrElse(msg.payload, unhandled) - + private val extension = Persistence(context.system) private val viewSettings = extension.settings.view + private lazy val journal = extension.journalFor(persistenceId) private var schedule: Option[Cancellable] = None + private var _lastSequenceNr: Long = 0L + private val internalStash = createStash() + private var currentState: State = recoveryPending + /** * View id is used as identifier for snapshots performed by this [[PersistentView]]. * This allows the View to keep separate snapshots of data than the [[PersistentActor]] originating the message stream. @@ -165,6 +111,11 @@ trait PersistentView extends Actor with Recovery { */ def viewId: String + /** + * Id of the persistent entity for which messages should be replayed. + */ + def persistenceId: String + /** * Returns `viewId`. */ @@ -174,8 +125,7 @@ trait PersistentView extends Actor with Recovery { * If `true`, the currently processed message was persisted (is sent from the Journal). * If `false`, the currently processed message comes from another actor (from "user-land"). */ - def isPersistent: Boolean = - currentPersistentMessage.isDefined + def isPersistent: Boolean = currentState.recoveryRunning /** * If `true`, this view automatically updates itself with an interval specified by `autoUpdateInterval`. @@ -202,6 +152,23 @@ trait PersistentView extends Actor with Recovery { def autoUpdateReplayMax: Long = viewSettings.autoUpdateReplayMax + /** + * Highest received sequence number so far or `0L` if this actor hasn't replayed + * any persistent events yet. + */ + def lastSequenceNr: Long = _lastSequenceNr + + /** + * Returns `lastSequenceNr`. + */ + def snapshotSequenceNr: Long = lastSequenceNr + + private def setLastSequenceNr(value: Long): Unit = + _lastSequenceNr = value + + private def updateLastSequenceNr(persistent: PersistentRepr): Unit = + if (persistent.sequenceNr > _lastSequenceNr) _lastSequenceNr = persistent.sequenceNr + /** * Triggers an initial recovery, starting form a snapshot, if any, and replaying at most `autoUpdateReplayMax` * messages (following that snapshot). @@ -211,14 +178,205 @@ trait PersistentView extends Actor with Recovery { self ! Recover(replayMax = autoUpdateReplayMax) } + /** + * INTERNAL API. + */ + override protected[akka] def aroundReceive(receive: Receive, message: Any): Unit = { + currentState.stateReceive(receive, message) + } + override def preRestart(reason: Throwable, message: Option[Any]): Unit = { - try receiverStash.unstashAll() finally super.preRestart(reason, message) + try internalStash.unstashAll() finally super.preRestart(reason, message) } override def postStop(): Unit = { schedule.foreach(_.cancel()) super.postStop() } + + override def unhandled(message: Any): Unit = { + message match { + case RecoveryCompleted ⇒ // mute + case RecoveryFailure(cause) ⇒ + val errorMsg = s"PersistentView killed after recovery failure (persisten id = [${persistenceId}]). " + + "To avoid killing persistent actors on recovery failure, a PersistentView must handle RecoveryFailure messages. " + + "RecoveryFailure was caused by: " + cause + throw new ActorKilledException(errorMsg) + case m ⇒ super.unhandled(m) + } + } + + private def changeState(state: State): Unit = { + currentState = state + } + + // TODO There are some duplication of the recovery state management here and in Eventsourced.scala, + // but the enhanced PersistentView will not be based on recovery infrastructure, and + // therefore this code will be replaced anyway + + private trait State { + def stateReceive(receive: Receive, message: Any): Unit + def recoveryRunning: Boolean + } + + /** + * Initial state, waits for `Recover` request, and then submits a `LoadSnapshot` request to the snapshot + * store and changes to `recoveryStarted` state. All incoming messages except `Recover` are stashed. + */ + private def recoveryPending = new State { + override def toString: String = "recovery pending" + override def recoveryRunning: Boolean = true + + override def stateReceive(receive: Receive, message: Any): Unit = message match { + case Recover(fromSnap, toSnr, replayMax) ⇒ + changeState(recoveryStarted(replayMax)) + loadSnapshot(snapshotterId, fromSnap, toSnr) + case _ ⇒ internalStash.stash() + } + } + + /** + * Processes a loaded snapshot, if any. A loaded snapshot is offered with a `SnapshotOffer` + * message to the actor's `receiveRecover`. Then initiates a message replay, either starting + * from the loaded snapshot or from scratch, and switches to `replayStarted` state. + * All incoming messages are stashed. + * + * @param replayMax maximum number of messages to replay. + */ + private def recoveryStarted(replayMax: Long) = new State { + + override def toString: String = s"recovery started (replayMax = [${replayMax}])" + override def recoveryRunning: Boolean = true + + override def stateReceive(receive: Receive, message: Any) = message match { + case r: Recover ⇒ // ignore + case LoadSnapshotResult(sso, toSnr) ⇒ + sso.foreach { + case SelectedSnapshot(metadata, snapshot) ⇒ + setLastSequenceNr(metadata.sequenceNr) + // Since we are recovering we can ignore the receive behavior from the stack + PersistentView.super.aroundReceive(receive, SnapshotOffer(metadata, snapshot)) + } + changeState(replayStarted(await = true)) + journal ! ReplayMessages(lastSequenceNr + 1L, toSnr, replayMax, persistenceId, self) + case other ⇒ internalStash.stash() + } + } + + /** + * Processes replayed messages, if any. The actor's `receiveRecover` is invoked with the replayed + * events. + * + * If replay succeeds it switches to `initializing` state and requests the highest stored sequence + * number from the journal. Otherwise RecoveryFailure is emitted. + * If replay succeeds the `onReplaySuccess` callback method is called, otherwise `onReplayFailure`. + * + * If processing of a replayed event fails, the exception is caught and + * stored for being thrown later and state is changed to `recoveryFailed`. + * + * All incoming messages are stashed. + */ + private def replayStarted(await: Boolean) = new State { + override def toString: String = s"replay started" + override def recoveryRunning: Boolean = true + + private var stashUpdate = await + + override def stateReceive(receive: Receive, message: Any) = message match { + case Update(false, _) ⇒ // ignore + case u @ Update(true, _) if !stashUpdate ⇒ + stashUpdate = true + internalStash.stash() + case r: Recover ⇒ // ignore + case ReplayedMessage(p) ⇒ + try { + updateLastSequenceNr(p) + PersistentView.super.aroundReceive(receive, p.payload) + } catch { + case NonFatal(t) ⇒ + val currentMsg = context.asInstanceOf[ActorCell].currentMessage + changeState(replayFailed(t, currentMsg)) // delay throwing exception to prepareRestart + } + case ReplayMessagesSuccess ⇒ + onReplayComplete(await) + case ReplayMessagesFailure(cause) ⇒ + onReplayComplete(await) + // FIXME what happens if RecoveryFailure is handled, i.e. actor is not stopped? + PersistentView.super.aroundReceive(receive, RecoveryFailure(cause)) + case other ⇒ + internalStash.stash() + } + + /** + * Switches to `idle` state and schedules the next update if `autoUpdate` returns `true`. + */ + private def onReplayComplete(await: Boolean): Unit = { + changeState(idle) + if (autoUpdate) schedule = Some(context.system.scheduler.scheduleOnce(autoUpdateInterval, self, Update(await = false, autoUpdateReplayMax))) + if (await) internalStash.unstashAll() + } + } + + /** + * Consumes remaining replayed messages and then changes to `prepareRestart`. The + * message that caused the exception during replay, is re-added to the mailbox and + * re-received in `prepareRestart` state. + */ + private def replayFailed(cause: Throwable, failureMessage: Envelope) = new State { + + override def toString: String = "replay failed" + override def recoveryRunning: Boolean = true + + override def stateReceive(receive: Receive, message: Any) = message match { + case ReplayMessagesFailure(_) ⇒ + replayCompleted() + // journal couldn't tell the maximum stored sequence number, hence the next + // replay must be a full replay (up to the highest stored sequence number) + // Recover(lastSequenceNr) is sent by preRestart + setLastSequenceNr(Long.MaxValue) + case ReplayMessagesSuccess ⇒ replayCompleted() + case ReplayedMessage(p) ⇒ updateLastSequenceNr(p) + case r: Recover ⇒ // ignore + case _ ⇒ internalStash.stash() + } + + def replayCompleted(): Unit = { + changeState(prepareRestart(cause)) + mailbox.enqueueFirst(self, failureMessage) + } + } + + /** + * Re-receives the replayed message that caused an exception and re-throws that exception. + */ + private def prepareRestart(cause: Throwable) = new State { + override def toString: String = "prepare restart" + override def recoveryRunning: Boolean = true + + override def stateReceive(receive: Receive, message: Any) = message match { + case ReplayedMessage(_) ⇒ throw cause + case _ ⇒ // ignore + } + } + + /** + * When receiving an [[Update]] request, switches to `replayStarted` state and triggers + * an incremental message replay. Invokes the actor's current behavior for any other + * received message. + */ + private val idle: State = new State { + override def toString: String = "idle" + override def recoveryRunning: Boolean = false + + override def stateReceive(receive: Receive, message: Any): Unit = message match { + case r: Recover ⇒ // ignore + case Update(awaitUpdate, replayMax) ⇒ + changeState(replayStarted(await = awaitUpdate)) + journal ! ReplayMessages(lastSequenceNr + 1L, Long.MaxValue, replayMax, persistenceId, self) + case other ⇒ PersistentView.super.aroundReceive(receive, other) + } + } + } /** diff --git a/akka-persistence/src/main/scala/akka/persistence/Processor.scala b/akka-persistence/src/main/scala/akka/persistence/Processor.scala deleted file mode 100644 index c0e39a55d3..0000000000 --- a/akka-persistence/src/main/scala/akka/persistence/Processor.scala +++ /dev/null @@ -1,484 +0,0 @@ -/** - * Copyright (C) 2009-2014 Typesafe Inc. - */ - -package akka.persistence - -import akka.AkkaException -import akka.actor._ -import akka.dispatch._ -import java.util.concurrent.atomic.AtomicInteger - -/** - * An actor that persists (journals) messages of type [[Persistent]]. Messages of other types are not persisted. - * - * {{{ - * import akka.persistence.{ Persistent, Processor } - * - * class MyProcessor extends Processor { - * def receive = { - * case Persistent(payload, sequenceNr) => // message has been written to journal - * case other => // message has not been written to journal - * } - * } - * - * val processor = actorOf(Props[MyProcessor], name = "myProcessor") - * - * processor ! Persistent("foo") - * processor ! "bar" - * }}} - * - * During start and restart, persistent messages are replayed to a processor so that it can recover internal - * state from these messages. New messages sent to a processor during recovery do not interfere with replayed - * messages, hence applications don't need to wait for a processor to complete its recovery. - * - * Automated recovery can be turned off or customized by overriding the [[preStart]] and [[preRestart]] life - * cycle hooks. If automated recovery is turned off, an application can explicitly recover a processor by - * sending it a [[Recover]] message. - * - * [[Persistent]] messages are assigned sequence numbers that are generated on a per-processor basis. A sequence - * starts at `1L` and doesn't contain gaps unless a processor (logically) deletes a message - * - * During recovery, a processor internally buffers new messages until recovery completes, so that new messages - * do not interfere with replayed messages. This internal buffer (the ''processor stash'') is isolated from the - * ''user stash'' inherited by `akka.actor.Stash`. `Processor` implementation classes can therefore use the - * ''user stash'' for stashing/unstashing both persistent and transient messages. - * - * Processors can also store snapshots of internal state by calling [[saveSnapshot]]. During recovery, a saved - * snapshot is offered to the processor with a [[SnapshotOffer]] message, followed by replayed messages, if any, - * that are younger than the snapshot. Default is to offer the latest saved snapshot. - * - * @see [[UntypedProcessor]] - * @see [[Recover]] - * @see [[PersistentBatch]] - */ -@deprecated("Processor will be removed. Instead extend `akka.persistence.PersistentActor` and use it's `persistAsync(command)(callback)` method to get equivalent semantics.", since = "2.3.4") -trait Processor extends ProcessorImpl { - /** - * Persistence id. Defaults to this persistent-actors's path and can be overridden. - */ - override def persistenceId: String = processorId -} - -/** - * INTERNAL API - */ -private[akka] object ProcessorImpl { - // ok to wrap around (2*Int.MaxValue restarts will not happen within a journal roundtrip) - private val instanceIdCounter = new AtomicInteger -} - -/** INTERNAL API */ -@deprecated("Processor will be removed. Instead extend `akka.persistence.PersistentActor` and use it's `persistAsync(command)(callback)` method to get equivalent semantics.", since = "2.3.4") -private[akka] trait ProcessorImpl extends Actor with Recovery { - // TODO: remove Processor in favor of PersistentActor #15230 - - import JournalProtocol._ - - private[persistence] val instanceId: Int = ProcessorImpl.instanceIdCounter.incrementAndGet() - - /** - * Processes the highest stored sequence number response from the journal and then switches - * to `processing` state. - */ - private val initializing = new State { - override def toString: String = "initializing" - - def aroundReceive(receive: Receive, message: Any) = message match { - case ReadHighestSequenceNrSuccess(highest) ⇒ - _currentState = processing - sequenceNr = highest - receiverStash.unstashAll() - onRecoveryCompleted(receive) - case ReadHighestSequenceNrFailure(cause) ⇒ - onRecoveryFailure(receive, cause) - case other ⇒ - receiverStash.stash() - } - } - - /** - * Journals and processes new messages, both persistent and transient. - */ - private val processing = new State { - override def toString: String = "processing" - - private var batching = false - - def aroundReceive(receive: Receive, message: Any) = message match { - case r: Recover ⇒ // ignore - case ReplayedMessage(p) ⇒ processPersistent(receive, p) // can occur after unstash from user stash - case WriteMessageSuccess(p: PersistentRepr, _) ⇒ processPersistent(receive, p) - case WriteMessageSuccess(r: Resequenceable, _) ⇒ process(receive, r) - case WriteMessageFailure(p, cause, _) ⇒ process(receive, PersistenceFailure(p.payload, p.sequenceNr, cause)) - case LoopMessageSuccess(m, _) ⇒ process(receive, m) - case WriteMessagesSuccessful | WriteMessagesFailed(_) ⇒ - if (processorBatch.isEmpty) batching = false else journalBatch() - case p: PersistentRepr ⇒ - addToBatch(p) - if (!batching || maxBatchSizeReached) journalBatch() - case n: NonPersistentRepr ⇒ - addToBatch(n) - if (!batching || maxBatchSizeReached) journalBatch() - case pb: PersistentBatch ⇒ - // submit all batched messages before submitting this user batch (isolated) - if (!processorBatch.isEmpty) journalBatch() - addToBatch(pb) - journalBatch() - case m ⇒ - // submit all batched messages before looping this message - if (processorBatch.isEmpty) batching = false else journalBatch() - journal forward LoopMessage(m, self, instanceId) - } - - def addToBatch(p: Resequenceable): Unit = p match { - case p: PersistentRepr ⇒ - processorBatch = processorBatch :+ p.update(persistenceId = persistenceId, sequenceNr = nextSequenceNr(), sender = sender()) - case r ⇒ - processorBatch = processorBatch :+ r - } - - def addToBatch(pb: PersistentBatch): Unit = - pb.batch.foreach(addToBatch) - - def maxBatchSizeReached: Boolean = - processorBatch.length >= extension.settings.journal.maxMessageBatchSize - - def journalBatch(): Unit = { - flushJournalBatch() - batching = true - } - } - - /** - * INTERNAL API. - * - * Switches to `initializing` state and requests the highest stored sequence number from the journal. - */ - private[persistence] def onReplaySuccess(receive: Receive, awaitReplay: Boolean): Unit = { - _currentState = initializing - journal ! ReadHighestSequenceNr(lastSequenceNr, persistenceId, self) - } - - /** - * INTERNAL API. - */ - private[persistence] def onReplayFailure(receive: Receive, awaitReplay: Boolean, cause: Throwable): Unit = - onRecoveryFailure(receive, cause) - - /** - * Invokes this processor's behavior with a `RecoveryFailure` message. - */ - private def onRecoveryFailure(receive: Receive, cause: Throwable): Unit = - receive.applyOrElse(RecoveryFailure(cause), unhandled) - - /** - * Invokes this processor's behavior with a `RecoveryFinished` message. - */ - private def onRecoveryCompleted(receive: Receive): Unit = - receive.applyOrElse(RecoveryCompleted, unhandled) - - private val _persistenceId = extension.persistenceId(self) - - private var processorBatch = Vector.empty[Resequenceable] - private var sequenceNr: Long = 0L - - /** - * Processor id. Defaults to this processor's path and can be overridden. - */ - @deprecated("Override `persistenceId: String` instead. Processor will be removed.", since = "2.3.4") - override def processorId: String = _persistenceId // TODO: remove processorId - - /** - * Returns `persistenceId`. - */ - def snapshotterId: String = persistenceId - - /** - * Returns `true` if this processor is currently recovering. - */ - def recoveryRunning: Boolean = - _currentState != processing - - /** - * Returns `true` if this processor has successfully finished recovery. - */ - def recoveryFinished: Boolean = - _currentState == processing - - /** - * Marks a persistent message, identified by `sequenceNr`, as deleted. A message marked as deleted is - * not replayed during recovery. This method is usually called inside `preRestartProcessor` when a - * persistent message caused an exception. Processors that want to re-receive that persistent message - * during recovery should not call this method. - * - * @param sequenceNr sequence number of the persistent message to be deleted. - */ - @deprecated("deleteMessage(sequenceNr) will be removed. Instead, validate before persist, and use deleteMessages for pruning.", since = "2.3.4") - def deleteMessage(sequenceNr: Long): Unit = { - deleteMessage(sequenceNr, permanent = false) - } - - /** - * Deletes a persistent message identified by `sequenceNr`. If `permanent` is set to `false`, - * the persistent message is marked as deleted in the journal, otherwise it is permanently - * deleted from the journal. A deleted message is not replayed during recovery. This method - * is usually called inside `preRestartProcessor` when a persistent message caused an exception. - * Processors that want to re-receive that persistent message during recovery should not call - * this method. - * - * @param sequenceNr sequence number of the persistent message to be deleted. - * @param permanent if `false`, the message is marked as deleted, otherwise it is permanently deleted. - */ - @deprecated("deleteMessage(sequenceNr) will be removed. Instead, validate before persist, and use deleteMessages for pruning.", since = "2.3.4") - def deleteMessage(sequenceNr: Long, permanent: Boolean): Unit = { - journal ! DeleteMessages(List(PersistentIdImpl(persistenceId, sequenceNr)), permanent) - } - - /** - * Permanently deletes all persistent messages with sequence numbers less than or equal `toSequenceNr`. - * - * @param toSequenceNr upper sequence number bound of persistent messages to be deleted. - */ - def deleteMessages(toSequenceNr: Long): Unit = { - deleteMessages(toSequenceNr, permanent = true) - } - - /** - * Deletes all persistent messages with sequence numbers less than or equal `toSequenceNr`. If `permanent` - * is set to `false`, the persistent messages are marked as deleted in the journal, otherwise - * they permanently deleted from the journal. - * - * @param toSequenceNr upper sequence number bound of persistent messages to be deleted. - * @param permanent if `false`, the message is marked as deleted, otherwise it is permanently deleted. - */ - def deleteMessages(toSequenceNr: Long, permanent: Boolean): Unit = { - journal ! DeleteMessagesTo(persistenceId, toSequenceNr, permanent) - } - - /** - * INTERNAL API - */ - private[akka] def flushJournalBatch(): Unit = { - journal ! WriteMessages(processorBatch, self, instanceId) - processorBatch = Vector.empty - } - - /** - * INTERNAL API. - */ - override protected[akka] def aroundPostStop(): Unit = { - // calls `super.aroundPostStop` to allow Processor to be used as a stackable modification - try unstashAll(unstashFilterPredicate) finally super.aroundPostStop() - } - - /** - * INTERNAL API. - */ - override protected[akka] def aroundPreRestart(reason: Throwable, message: Option[Any]): Unit = { - try { - receiverStash.prepend(processorBatch.map(p ⇒ Envelope(p, p.sender, context.system))) - receiverStash.unstashAll() - unstashAll(unstashFilterPredicate) - } finally { - message match { - case Some(WriteMessageSuccess(m, _)) ⇒ super.aroundPreRestart(reason, Some(m)) - case Some(LoopMessageSuccess(m, _)) ⇒ super.aroundPreRestart(reason, Some(m)) - case Some(ReplayedMessage(m)) ⇒ super.aroundPreRestart(reason, Some(m)) - case mo ⇒ super.aroundPreRestart(reason, None) - } - } - } - - /** - * User-overridable callback. Called when a processor is started. Default implementation sends - * a `Recover()` to `self`. - */ - @throws(classOf[Exception]) - override def preStart(): Unit = { - self ! Recover() - } - - /** - * User-overridable callback. Called before a processor is restarted. Default implementation sends - * a `Recover(lastSequenceNr)` message to `self` if `message` is defined, `Recover() otherwise`. - */ - override def preRestart(reason: Throwable, message: Option[Any]): Unit = { - super.preRestart(reason, message) - message match { - case Some(_) ⇒ self ! Recover(toSequenceNr = lastSequenceNr) - case None ⇒ self ! Recover() - } - } - - override def unhandled(message: Any): Unit = { - message match { - case RecoveryCompleted ⇒ // mute - case RecoveryFailure(cause) ⇒ - val errorMsg = s"Processor killed after recovery failure (persisten id = [${persistenceId}]). " + - "To avoid killing processors on recovery failure, a processor must handle RecoveryFailure messages. " + - "RecoveryFailure was caused by: " + cause - throw new ActorKilledException(errorMsg) - case PersistenceFailure(payload, sequenceNumber, cause) ⇒ - val errorMsg = "Processor killed after persistence failure " + - s"(persistent id = [${persistenceId}], sequence nr = [${sequenceNumber}], payload class = [${payload.getClass.getName}]). " + - "To avoid killing processors on persistence failure, a processor must handle PersistenceFailure messages. " + - "PersistenceFailure was caused by: " + cause - throw new ActorKilledException(errorMsg) - case m ⇒ super.unhandled(m) - } - } - - private def nextSequenceNr(): Long = { - sequenceNr += 1L - sequenceNr - } - - private val unstashFilterPredicate: Any ⇒ Boolean = { - case _: WriteMessageSuccess ⇒ false - case _: ReplayedMessage ⇒ false - case _ ⇒ true - } -} - -/** - * Sent to a [[Processor]] if a journal fails to write a [[Persistent]] message. If - * not handled, an `akka.actor.ActorKilledException` is thrown by that processor. - * - * @param payload payload of the persistent message. - * @param sequenceNr sequence number of the persistent message. - * @param cause failure cause. - */ -@SerialVersionUID(1L) -case class PersistenceFailure(payload: Any, sequenceNr: Long, cause: Throwable) - -/** - * Sent to a [[Processor]] if a journal fails to replay messages or fetch that processor's - * highest sequence number. If not handled, the prossor will be stopped. - */ -@SerialVersionUID(1L) -case class RecoveryFailure(cause: Throwable) - -abstract class RecoveryCompleted -/** - * Sent to a [[Processor]] when the journal replay has been finished. - */ -@SerialVersionUID(1L) -case object RecoveryCompleted extends RecoveryCompleted { - /** - * Java API: get the singleton instance - */ - def getInstance = this -} - -/** - * Java API: an actor that persists (journals) messages of type [[Persistent]]. Messages of other types - * are not persisted. - * - * {{{ - * import akka.persistence.Persistent; - * import akka.persistence.Processor; - * - * class MyProcessor extends UntypedProcessor { - * public void onReceive(Object message) throws Exception { - * if (message instanceof Persistent) { - * // message has been written to journal - * Persistent persistent = (Persistent)message; - * Object payload = persistent.payload(); - * Long sequenceNr = persistent.sequenceNr(); - * // ... - * } else { - * // message has not been written to journal - * } - * } - * } - * - * // ... - * - * ActorRef processor = getContext().actorOf(Props.create(MyProcessor.class), "myProcessor"); - * - * processor.tell(Persistent.create("foo"), null); - * processor.tell("bar", null); - * }}} - * - * During start and restart, persistent messages are replayed to a processor so that it can recover internal - * state from these messages. New messages sent to a processor during recovery do not interfere with replayed - * messages, hence applications don't need to wait for a processor to complete its recovery. - * - * Automated recovery can be turned off or customized by overriding the [[preStart]] and [[preRestart]] life - * cycle hooks. If automated recovery is turned off, an application can explicitly recover a processor by - * sending it a [[Recover]] message. - * - * [[Persistent]] messages are assigned sequence numbers that are generated on a per-processor basis. A sequence - * starts at `1L` and doesn't contain gaps unless a processor (logically) deletes a message. - * - * During recovery, a processor internally buffers new messages until recovery completes, so that new messages - * do not interfere with replayed messages. This internal buffer (the ''processor stash'') is isolated from the - * ''user stash'' inherited by `akka.actor.Stash`. `Processor` implementation classes can therefore use the - * ''user stash'' for stashing/unstashing both persistent and transient messages. - * - * Processors can also store snapshots of internal state by calling [[saveSnapshot]]. During recovery, a saved - * snapshot is offered to the processor with a [[SnapshotOffer]] message, followed by replayed messages, if any, - * that are younger than the snapshot. Default is to offer the latest saved snapshot. - * - * @see [[Processor]] - * @see [[Recover]] - * @see [[PersistentBatch]] - */ -@deprecated("UntypedProcessor will be removed. Instead extend `akka.persistence.UntypedPersistentActor` and use it's `persistAsync(command)(callback)` method to get equivalent semantics.", since = "2.3.4") -abstract class UntypedProcessor extends UntypedActor with Processor - -/** - * Java API: compatible with lambda expressions - * - * An actor that persists (journals) messages of type [[Persistent]]. Messages of other types - * are not persisted. - *

- * Example: - *

- * class MyProcessor extends AbstractProcessor {
- *   public MyProcessor() {
- *     receive(ReceiveBuilder.
- *       match(Persistent.class, p -> {
- *         Object payload = p.payload();
- *         Long sequenceNr = p.sequenceNr();
- *                 // ...
- *       }).build()
- *     );
- *   }
- * }
- *
- * // ...
- *
- * ActorRef processor = context().actorOf(Props.create(MyProcessor.class), "myProcessor");
- *
- * processor.tell(Persistent.create("foo"), null);
- * processor.tell("bar", null);
- * 
- * - * During start and restart, persistent messages are replayed to a processor so that it can recover internal - * state from these messages. New messages sent to a processor during recovery do not interfere with replayed - * messages, hence applications don't need to wait for a processor to complete its recovery. - * - * Automated recovery can be turned off or customized by overriding the [[preStart]] and [[preRestart]] life - * cycle hooks. If automated recovery is turned off, an application can explicitly recover a processor by - * sending it a [[Recover]] message. - * - * [[Persistent]] messages are assigned sequence numbers that are generated on a per-processor basis. A sequence - * starts at `1L` and doesn't contain gaps unless a processor (logically) deletes a message. - * - * During recovery, a processor internally buffers new messages until recovery completes, so that new messages - * do not interfere with replayed messages. This internal buffer (the ''processor stash'') is isolated from the - * ''user stash'' inherited by `akka.actor.Stash`. `Processor` implementation classes can therefore use the - * ''user stash'' for stashing/unstashing both persistent and transient messages. - * - * Processors can also store snapshots of internal state by calling [[saveSnapshot]]. During recovery, a saved - * snapshot is offered to the processor with a [[SnapshotOffer]] message, followed by replayed messages, if any, - * that are younger than the snapshot. Default is to offer the latest saved snapshot. - * - * @see [[Processor]] - * @see [[Recover]] - * @see [[PersistentBatch]] - */ -@deprecated("AbstractProcessor will be removed. Instead extend `akka.persistence.AbstractPersistentActor` and use it's `persistAsync(command)(callback)` method to get equivalent semantics.", since = "2.3.4") -abstract class AbstractProcessor extends AbstractActor with Processor diff --git a/akka-persistence/src/main/scala/akka/persistence/Recovery.scala b/akka-persistence/src/main/scala/akka/persistence/Recovery.scala deleted file mode 100644 index 93ea3053b0..0000000000 --- a/akka-persistence/src/main/scala/akka/persistence/Recovery.scala +++ /dev/null @@ -1,328 +0,0 @@ -/** - * Copyright (C) 2009-2014 Typesafe Inc. - */ - -package akka.persistence - -import akka.actor._ -import akka.dispatch.Envelope -import akka.persistence.JournalProtocol._ -import akka.persistence.SnapshotProtocol.LoadSnapshotResult - -import scala.util.control.NonFatal - -/** - * Recovery state machine that loads snapshots and replays messages. - * - * @see [[PersistentActor]] - * @see [[PersistentView]] - */ -trait Recovery extends Actor with Snapshotter with Stash with StashFactory { - /** - * INTERNAL API. - * - * Recovery state. - */ - private[persistence] trait State { - def aroundReceive(receive: Receive, message: Any): Unit - - protected def process(receive: Receive, message: Any) = - // calls `Recovery.super.aroundReceive` to allow Processor to be used as a stackable modification - Recovery.super.aroundReceive(receive, message) - - protected def processPersistent(receive: Receive, persistent: Persistent) = - withCurrentPersistent(persistent)(runReceive(receive)) - - protected def recordFailure(cause: Throwable): Unit = { - _recoveryFailureCause = cause - _recoveryFailureMessage = context.asInstanceOf[ActorCell].currentMessage - } - } - - /** - * INTERNAL API. - * - * This is used to deliver a persistent message to the actor’s behavior - * through withCurrentPersistent(). - */ - private[persistence] def runReceive(receive: Receive)(msg: Persistent): Unit = - // calls `Recovery.super.aroundReceive` to allow Processor to be used as a stackable modification - Recovery.super.aroundReceive(receive, msg) - - /** - * INTERNAL API. - * - * Initial state, waits for `Recover` request, submit a `LoadSnapshot` request to the snapshot - * store and changes to `recoveryStarted` state. - */ - private[persistence] val recoveryPending = new State { - override def toString: String = "recovery pending" - - def aroundReceive(receive: Receive, message: Any): Unit = message match { - case Recover(fromSnap, toSnr, replayMax) ⇒ - _currentState = recoveryStarted(replayMax) - loadSnapshot(snapshotterId, fromSnap, toSnr) - case _ ⇒ receiverStash.stash() - } - } - - /** - * INTERNAL API. - * - * Processes a loaded snapshot, if any. A loaded snapshot is offered with a `SnapshotOffer` - * message to the actor's current behavior. Then initiates a message replay, either starting - * from the loaded snapshot or from scratch, and switches to `replayStarted` state. - * - * @param replayMax maximum number of messages to replay. - */ - private[persistence] def recoveryStarted(replayMax: Long) = new State { - override def toString: String = s"recovery started (replayMax = [${replayMax}])" - - def aroundReceive(receive: Receive, message: Any) = message match { - case r: Recover ⇒ // ignore - case LoadSnapshotResult(sso, toSnr) ⇒ - sso.foreach { - case SelectedSnapshot(metadata, snapshot) ⇒ - updateLastSequenceNr(metadata.sequenceNr) - process(receive, SnapshotOffer(metadata, snapshot)) - } - _currentState = replayStarted(await = true) - journal ! ReplayMessages(lastSequenceNr + 1L, toSnr, replayMax, persistenceId, self) - case other ⇒ receiverStash.stash() - } - } - - /** - * INTERNAL API. - * - * Processes replayed messages, if any. The actor's current behavior is invoked with the replayed - * [[Persistent]] messages. If processing of a replayed message fails, the exception is caught and - * stored for being thrown later and state is changed to `recoveryFailed`. If replay succeeds the - * `onReplaySuccess` method is called, otherwise `onReplayFailure`. - * - * @param await if `true` processing of further messages will be delayed until replay completes, - * otherwise, the actor's behavior is invoked immediately with these messages. - */ - private[persistence] def replayStarted(await: Boolean) = new State { - override def toString: String = s"replay started (await = [${await}])" - - def aroundReceive(receive: Receive, message: Any) = message match { - case r: Recover ⇒ // ignore - case ReplayedMessage(p) ⇒ - try processPersistent(receive, p) catch { - case NonFatal(t) ⇒ - _currentState = replayFailed // delay throwing exception to prepareRestart - recordFailure(t) - } - case ReplayMessagesSuccess ⇒ onReplaySuccess(receive, await) - case ReplayMessagesFailure(cause) ⇒ onReplayFailure(receive, await, cause) - case other ⇒ - if (await) receiverStash.stash() else process(receive, other) - } - } - - /** - * INTERNAL API. - * - * Consumes remaining replayed messages and then changes to `prepareRestart`. The - * message that caused the exception during replay, is re-added to the mailbox and - * re-received in `prepareRestart`. - */ - private[persistence] val replayFailed = new State { - override def toString: String = "replay failed" - - def aroundReceive(receive: Receive, message: Any) = message match { - case ReplayMessagesFailure(_) ⇒ - replayCompleted() - // journal couldn't tell the maximum stored sequence number, hence the next - // replay must be a full replay (up to the highest stored sequence number) - updateLastSequenceNr(Long.MaxValue) - case ReplayMessagesSuccess ⇒ replayCompleted() - case ReplayedMessage(p) ⇒ updateLastSequenceNr(p) - case r: Recover ⇒ // ignore - case _ ⇒ receiverStash.stash() - } - - def replayCompleted(): Unit = { - _currentState = prepareRestart - mailbox.enqueueFirst(self, _recoveryFailureMessage) - } - } - - /** - * INTERNAL API. - * - * Re-receives the replayed message that caused an exception and re-throws that exception. - */ - private[persistence] val prepareRestart = new State { - override def toString: String = "prepare restart" - - def aroundReceive(receive: Receive, message: Any) = message match { - case ReplayedMessage(_) ⇒ throw _recoveryFailureCause - case _ ⇒ // ignore - } - } - - private var _recoveryFailureCause: Throwable = _ - private var _recoveryFailureMessage: Envelope = _ - - private var _lastSequenceNr: Long = 0L - private var _currentPersistent: Persistent = _ - - /** - * Id of the processor for which messages should be replayed. - */ - @deprecated("Override `persistenceId` instead. Processor will be removed.", since = "2.3.4") - def processorId: String = extension.persistenceId(self) // TODO: remove processorId - - /** - * Id of the persistent entity for which messages should be replayed. - */ - def persistenceId: String - - /** INTERNAL API */ - private[persistence] def withCurrentPersistent(persistent: Persistent)(body: Persistent ⇒ Unit): Unit = try { - _currentPersistent = persistent - updateLastSequenceNr(persistent) - body(persistent) - } finally _currentPersistent = null - - /** INTERNAL API. */ - private[persistence] def updateLastSequenceNr(persistent: Persistent): Unit = - if (persistent.sequenceNr > _lastSequenceNr) _lastSequenceNr = persistent.sequenceNr - - /** INTERNAL API. */ - private[persistence] def updateLastSequenceNr(value: Long): Unit = - _lastSequenceNr = value - - /** - * Returns the current persistent message if there is any. - */ - @deprecated("currentPersistentMessage will be removed, sequence number can be retrieved with `lastSequenceNr`.", since = "2.3.4") - implicit def currentPersistentMessage: Option[Persistent] = Option(_currentPersistent) - - /** - * Java API: returns the current persistent message or `null` if there is none. - */ - @deprecated("getCurrentPersistentMessage will be removed, sequence number can be retrieved with `lastSequenceNr`.", since = "2.3.4") - def getCurrentPersistentMessage = currentPersistentMessage.getOrElse(null) - - /** - * Highest received sequence number so far or `0L` if this actor hasn't received a persistent - * message yet. Usually equal to the sequence number of `currentPersistentMessage` (unless a - * receiver implementation is about to re-order persistent messages using `stash()` and `unstash()`). - */ - def lastSequenceNr: Long = _lastSequenceNr - - /** - * Returns `lastSequenceNr`. - */ - def snapshotSequenceNr: Long = lastSequenceNr - - /** - * INTERNAL API. - */ - private[persistence] var _currentState: State = recoveryPending - - /** - * INTERNAL API. - * - * Called whenever a message replay succeeds. - * - * @param receive the actor's current behavior. - * @param awaitReplay `awaitReplay` value of the calling `replayStarted` state. - */ - private[persistence] def onReplaySuccess(receive: Receive, awaitReplay: Boolean): Unit - - /** - * INTERNAL API. - * - * Called whenever a message replay fails. - * - * @param receive the actor's current behavior. - * @param awaitReplay `awaitReplay` value of the calling `replayStarted` state. - * @param cause failure cause. - */ - private[persistence] def onReplayFailure(receive: Receive, awaitReplay: Boolean, cause: Throwable): Unit - - /** - * INTERNAL API. - */ - private[persistence] val extension = Persistence(context.system) - - /** - * INTERNAL API. - */ - private[persistence] lazy val journal = extension.journalFor(persistenceId) - - /** - * INTERNAL API. - */ - private[persistence] val receiverStash = createStash() - - /** - * INTERNAL API. - */ - override protected[akka] def aroundReceive(receive: Receive, message: Any): Unit = { - _currentState.aroundReceive(receive, message) - } -} - -/** - * Instructs a persistent actor to recover itself. Recovery will start from a snapshot if the persistent actor has - * previously saved one or more snapshots and at least one of these snapshots matches the specified - * `fromSnapshot` criteria. Otherwise, recovery will start from scratch by replaying all journaled - * messages. - * - * If recovery starts from a snapshot, the persistent actor is offered that snapshot with a [[SnapshotOffer]] - * message, followed by replayed messages, if any, that are younger than the snapshot, up to the - * specified upper sequence number bound (`toSequenceNr`). - * - * @param fromSnapshot criteria for selecting a saved snapshot from which recovery should start. Default - * is latest (= youngest) snapshot. - * @param toSequenceNr upper sequence number bound (inclusive) for recovery. Default is no upper bound. - * @param replayMax maximum number of messages to replay. Default is no limit. - */ -@SerialVersionUID(1L) -final case class Recover(fromSnapshot: SnapshotSelectionCriteria = SnapshotSelectionCriteria.Latest, toSequenceNr: Long = Long.MaxValue, replayMax: Long = Long.MaxValue) - -object Recover { - /** - * Java API. - * - * @see [[Recover]] - */ - def create() = Recover() - - /** - * Java API. - * - * @see [[Recover]] - */ - def create(toSequenceNr: Long) = - Recover(toSequenceNr = toSequenceNr) - - /** - * Java API. - * - * @see [[Recover]] - */ - def create(fromSnapshot: SnapshotSelectionCriteria) = - Recover(fromSnapshot = fromSnapshot) - - /** - * Java API. - * - * @see [[Recover]] - */ - def create(fromSnapshot: SnapshotSelectionCriteria, toSequenceNr: Long) = - Recover(fromSnapshot, toSequenceNr) - - /** - * Java API. - * - * @see [[Recover]] - */ - def create(fromSnapshot: SnapshotSelectionCriteria, toSequenceNr: Long, replayMax: Long) = - Recover(fromSnapshot, toSequenceNr, replayMax) -} diff --git a/akka-persistence/src/main/scala/akka/persistence/Snapshot.scala b/akka-persistence/src/main/scala/akka/persistence/Snapshot.scala index f9ce6dbf3a..72c3dfc864 100644 --- a/akka-persistence/src/main/scala/akka/persistence/Snapshot.scala +++ b/akka-persistence/src/main/scala/akka/persistence/Snapshot.scala @@ -13,10 +13,7 @@ package akka.persistence * @param timestamp time at which the snapshot was saved. */ @SerialVersionUID(1L) //#snapshot-metadata -final case class SnapshotMetadata(@deprecatedName('processorId) persistenceId: String, sequenceNr: Long, timestamp: Long = 0L) { - @deprecated("Use persistenceId instead.", since = "2.3.4") - def processorId: String = persistenceId -} +final case class SnapshotMetadata(persistenceId: String, sequenceNr: Long, timestamp: Long = 0L) //#snapshot-metadata /** @@ -123,10 +120,7 @@ private[persistence] object SnapshotProtocol { * @param criteria criteria for selecting a snapshot from which recovery should start. * @param toSequenceNr upper sequence number bound (inclusive) for recovery. */ - final case class LoadSnapshot(@deprecatedName('processorId) persistenceId: String, criteria: SnapshotSelectionCriteria, toSequenceNr: Long) { - @deprecated("Use persistenceId instead.", since = "2.3.4") - def processorId: String = persistenceId - } + final case class LoadSnapshot(persistenceId: String, criteria: SnapshotSelectionCriteria, toSequenceNr: Long) /** * Response message to a [[LoadSnapshot]] message. @@ -156,8 +150,5 @@ private[persistence] object SnapshotProtocol { * @param persistenceId persistent actor id. * @param criteria criteria for selecting snapshots to be deleted. */ - final case class DeleteSnapshots(@deprecatedName('processorId) persistenceId: String, criteria: SnapshotSelectionCriteria) { - @deprecated("Use persistenceId instead.", since = "2.3.4") - def processorId: String = persistenceId - } + final case class DeleteSnapshots(persistenceId: String, criteria: SnapshotSelectionCriteria) } diff --git a/akka-persistence/src/main/scala/akka/persistence/View.scala b/akka-persistence/src/main/scala/akka/persistence/View.scala deleted file mode 100644 index 28048c73b3..0000000000 --- a/akka-persistence/src/main/scala/akka/persistence/View.scala +++ /dev/null @@ -1,178 +0,0 @@ -/** - * Copyright (C) 2009-2014 Typesafe Inc. - */ - -package akka.persistence - -import scala.concurrent.duration._ - -import akka.actor._ -import akka.persistence.JournalProtocol._ - -/** - * A view replicates the persistent message stream of a processor. Implementation classes receive the - * message stream as [[Persistent]] messages. These messages can be processed to update internal state - * in order to maintain an (eventual consistent) view of the state of the corresponding processor. A - * view can also run on a different node, provided that a replicated journal is used. Implementation - * classes reference a processor by implementing `persistenceId`. - * - * Views can also store snapshots of internal state by calling [[#saveSnapshot]]. The snapshots of a view - * are independent of those of the referenced processor. During recovery, a saved snapshot is offered - * to the view with a [[SnapshotOffer]] message, followed by replayed messages, if any, that are younger - * than the snapshot. Default is to offer the latest saved snapshot. - * - * By default, a view automatically updates itself with an interval returned by `autoUpdateInterval`. - * This method can be overridden by implementation classes to define a view instance-specific update - * interval. The default update interval for all views of an actor system can be configured with the - * `akka.persistence.view.auto-update-interval` configuration key. Applications may trigger additional - * view updates by sending the view [[Update]] requests. See also methods - * - * - [[#autoUpdate]] for turning automated updates on or off - * - [[#autoUpdateReplayMax]] for limiting the number of replayed messages per view update cycle - * - * Views can also use channels to communicate with destinations in the same way as processors can do. - */ -@deprecated("Use `akka.persistence.PersistentView` instead.", since = "2.3.4") -trait View extends Actor with Recovery { - import context.dispatcher - - /** - * INTERNAL API. - * - * Extends the `replayStarted` state of [[Recovery]] with logic to handle [[Update]] requests - * sent by users. - */ - private[persistence] override def replayStarted(await: Boolean) = new State { - private var delegateAwaiting = await - private var delegate = View.super.replayStarted(await) - - override def toString: String = delegate.toString - - override def aroundReceive(receive: Receive, message: Any) = message match { - case Update(false, _) ⇒ // ignore - case u @ Update(true, _) if !delegateAwaiting ⇒ - delegateAwaiting = true - delegate = View.super.replayStarted(await = true) - delegate.aroundReceive(receive, u) - case other ⇒ - delegate.aroundReceive(receive, other) - } - } - - /** - * When receiving an [[Update]] request, switches to `replayStarted` state and triggers - * an incremental message replay. Invokes the actor's current behavior for any other - * received message. - */ - private val idle: State = new State { - override def toString: String = "idle" - - def aroundReceive(receive: Receive, message: Any): Unit = message match { - case r: Recover ⇒ // ignore - case Update(awaitUpdate, replayMax) ⇒ - _currentState = replayStarted(await = awaitUpdate) - journal ! ReplayMessages(lastSequenceNr + 1L, Long.MaxValue, replayMax, persistenceId, self) - case other ⇒ process(receive, other) - } - } - - /** - * INTERNAL API. - */ - private[persistence] def onReplaySuccess(receive: Receive, await: Boolean): Unit = - onReplayComplete(await) - - /** - * INTERNAL API. - */ - private[persistence] def onReplayFailure(receive: Receive, await: Boolean, cause: Throwable): Unit = - onReplayComplete(await) - - /** - * Switches to `idle` state and schedules the next update if `autoUpdate` returns `true`. - */ - private def onReplayComplete(await: Boolean): Unit = { - _currentState = idle - if (autoUpdate) schedule = Some(context.system.scheduler.scheduleOnce(autoUpdateInterval, self, Update(await = false, autoUpdateReplayMax))) - if (await) receiverStash.unstashAll() - } - - private val _viewId = extension.persistenceId(self) - private val viewSettings = extension.settings.view - - private var schedule: Option[Cancellable] = None - - /** - * View id. Defaults to this view's path and can be overridden. - */ - def viewId: String = _viewId - - /** - * Returns `viewId`. - */ - def snapshotterId: String = viewId - - /** - * Persistence id. Defaults to this persistent-actors's path and can be overridden. - */ - override def persistenceId: String = processorId - - /** - * If `true`, this view automatically updates itself with an interval specified by `autoUpdateInterval`. - * If `false`, applications must explicitly update this view by sending [[Update]] requests. The default - * value can be configured with the `akka.persistence.view.auto-update` configuration key. This method - * can be overridden by implementation classes to return non-default values. - */ - def autoUpdate: Boolean = - viewSettings.autoUpdate - - /** - * The interval for automated updates. The default value can be configured with the - * `akka.persistence.view.auto-update-interval` configuration key. This method can be - * overridden by implementation classes to return non-default values. - */ - def autoUpdateInterval: FiniteDuration = - viewSettings.autoUpdateInterval - - /** - * The maximum number of messages to replay per update. The default value can be configured with the - * `akka.persistence.view.auto-update-replay-max` configuration key. This method can be overridden by - * implementation classes to return non-default values. - */ - def autoUpdateReplayMax: Long = - viewSettings.autoUpdateReplayMax - - /** - * Triggers an initial recovery, starting form a snapshot, if any, and replaying at most `autoUpdateReplayMax` - * messages (following that snapshot). - */ - override def preStart(): Unit = { - super.preStart() - self ! Recover(replayMax = autoUpdateReplayMax) - } - - override def preRestart(reason: Throwable, message: Option[Any]): Unit = { - try receiverStash.unstashAll() finally super.preRestart(reason, message) - } - - override def postStop(): Unit = { - schedule.foreach(_.cancel()) - super.postStop() - } -} - -/** - * Java API. - * - * @see [[View]] - */ -@deprecated("Use `akka.persistence.UntypedPersistentView instead.", since = "2.3.4") -abstract class UntypedView extends UntypedActor with View - -/** - * Java API: compatible with lambda expressions (to be used with [[akka.japi.pf.ReceiveBuilder]]) - * - * @see [[View]] - */ -@deprecated("Use `akka.persistence.AbstractPersistentView` instead.", since = "2.3.4") -abstract class AbstractView extends AbstractActor with View diff --git a/akka-persistence/src/main/scala/akka/persistence/journal/AsyncRecovery.scala b/akka-persistence/src/main/scala/akka/persistence/journal/AsyncRecovery.scala index e16c497ff0..880eec7053 100644 --- a/akka-persistence/src/main/scala/akka/persistence/journal/AsyncRecovery.scala +++ b/akka-persistence/src/main/scala/akka/persistence/journal/AsyncRecovery.scala @@ -24,9 +24,6 @@ trait AsyncRecovery { * as deleted. In this case a replayed message's `deleted` method must return * `true`. * - * The channel ids of delivery confirmations that are available for a replayed - * message must be contained in that message's `confirms` sequence. - * * @param persistenceId persistent actor id. * @param fromSequenceNr sequence number where replay should start (inclusive). * @param toSequenceNr sequence number where replay should end (inclusive). diff --git a/akka-persistence/src/main/scala/akka/persistence/journal/AsyncWriteJournal.scala b/akka-persistence/src/main/scala/akka/persistence/journal/AsyncWriteJournal.scala index 7b8d677f60..1de02f47dd 100644 --- a/akka-persistence/src/main/scala/akka/persistence/journal/AsyncWriteJournal.scala +++ b/akka-persistence/src/main/scala/akka/persistence/journal/AsyncWriteJournal.scala @@ -28,61 +28,46 @@ trait AsyncWriteJournal extends Actor with WriteJournalBase with AsyncRecovery { private var resequencerCounter = 1L def receive = { - case WriteMessages(resequenceables, processor, actorInstanceId) ⇒ + case WriteMessages(messages, persistentActor, actorInstanceId) ⇒ val cctr = resequencerCounter - def resequence(f: PersistentRepr ⇒ Any) = resequenceables.zipWithIndex.foreach { - case (p: PersistentRepr, i) ⇒ resequencer ! Desequenced(f(p), cctr + i + 1, processor, p.sender) - case (r, i) ⇒ resequencer ! Desequenced(LoopMessageSuccess(r.payload, actorInstanceId), cctr + i + 1, processor, r.sender) + def resequence(f: PersistentRepr ⇒ Any) = messages.zipWithIndex.foreach { + case (p: PersistentRepr, i) ⇒ resequencer ! Desequenced(f(p), cctr + i + 1, persistentActor, p.sender) + case (r, i) ⇒ resequencer ! Desequenced(LoopMessageSuccess(r.payload, actorInstanceId), cctr + i + 1, persistentActor, r.sender) } - asyncWriteMessages(preparePersistentBatch(resequenceables)) onComplete { + asyncWriteMessages(preparePersistentBatch(messages)) onComplete { case Success(_) ⇒ - resequencer ! Desequenced(WriteMessagesSuccessful, cctr, processor, self) + resequencer ! Desequenced(WriteMessagesSuccessful, cctr, persistentActor, self) resequence(WriteMessageSuccess(_, actorInstanceId)) case Failure(e) ⇒ - resequencer ! Desequenced(WriteMessagesFailed(e), cctr, processor, self) + resequencer ! Desequenced(WriteMessagesFailed(e), cctr, persistentActor, self) resequence(WriteMessageFailure(_, e, actorInstanceId)) } - resequencerCounter += resequenceables.length + 1 - case r @ ReplayMessages(fromSequenceNr, toSequenceNr, max, persistenceId, processor, replayDeleted) ⇒ - // Send replayed messages and replay result to processor directly. No need + resequencerCounter += messages.length + 1 + case r @ ReplayMessages(fromSequenceNr, toSequenceNr, max, persistenceId, persistentActor, replayDeleted) ⇒ + // Send replayed messages and replay result to persistentActor directly. No need // to resequence replayed messages relative to written and looped messages. asyncReplayMessages(persistenceId, fromSequenceNr, toSequenceNr, max) { p ⇒ - if (!p.deleted || replayDeleted) processor.tell(ReplayedMessage(p), p.sender) + if (!p.deleted || replayDeleted) persistentActor.tell(ReplayedMessage(p), p.sender) } map { case _ ⇒ ReplayMessagesSuccess } recover { case e ⇒ ReplayMessagesFailure(e) - } pipeTo (processor) onSuccess { + } pipeTo (persistentActor) onSuccess { case _ if publish ⇒ context.system.eventStream.publish(r) } - case ReadHighestSequenceNr(fromSequenceNr, persistenceId, processor) ⇒ - // Send read highest sequence number to processor directly. No need + case ReadHighestSequenceNr(fromSequenceNr, persistenceId, persistentActor) ⇒ + // Send read highest sequence number to persistentActor directly. No need // to resequence the result relative to written and looped messages. asyncReadHighestSequenceNr(persistenceId, fromSequenceNr).map { highest ⇒ ReadHighestSequenceNrSuccess(highest) } recover { case e ⇒ ReadHighestSequenceNrFailure(e) - } pipeTo (processor) - case c @ WriteConfirmations(confirmationsBatch, requestor) ⇒ - asyncWriteConfirmations(confirmationsBatch) onComplete { - case Success(_) ⇒ requestor ! WriteConfirmationsSuccess(confirmationsBatch) - case Failure(e) ⇒ requestor ! WriteConfirmationsFailure(e) - } - case d @ DeleteMessages(messageIds, permanent, requestorOption) ⇒ - asyncDeleteMessages(messageIds, permanent) onComplete { - case Success(_) ⇒ - requestorOption.foreach(_ ! DeleteMessagesSuccess(messageIds)) - if (publish) context.system.eventStream.publish(d) - case Failure(e) ⇒ - } + } pipeTo (persistentActor) case d @ DeleteMessagesTo(persistenceId, toSequenceNr, permanent) ⇒ asyncDeleteMessagesTo(persistenceId, toSequenceNr, permanent) onComplete { case Success(_) ⇒ if (publish) context.system.eventStream.publish(d) case Failure(e) ⇒ } - case LoopMessage(message, processor, actorInstanceId) ⇒ - resequencer ! Desequenced(LoopMessageSuccess(message, actorInstanceId), resequencerCounter, processor, sender) - resequencerCounter += 1 } //#journal-plugin-api @@ -93,20 +78,6 @@ trait AsyncWriteJournal extends Actor with WriteJournalBase with AsyncRecovery { */ def asyncWriteMessages(messages: immutable.Seq[PersistentRepr]): Future[Unit] - /** - * Plugin API: asynchronously writes a batch of delivery confirmations to the journal. - */ - @deprecated("writeConfirmations will be removed, since Channels will be removed.", since = "2.3.4") - def asyncWriteConfirmations(confirmations: immutable.Seq[PersistentConfirmation]): Future[Unit] - - /** - * Plugin API: asynchronously deletes messages identified by `messageIds` from the - * journal. If `permanent` is set to `false`, the persistent messages are marked as - * deleted, otherwise they are permanently deleted. - */ - @deprecated("asyncDeleteMessages will be removed.", since = "2.3.4") - def asyncDeleteMessages(messageIds: immutable.Seq[PersistentId], permanent: Boolean): Future[Unit] - /** * Plugin API: asynchronously deletes all persistent messages up to `toSequenceNr` * (inclusive). If `permanent` is set to `false`, the persistent messages are marked @@ -136,7 +107,7 @@ private[persistence] object AsyncWriteJournal { private def resequence(d: Desequenced) { if (d.snr == delivered + 1) { delivered = d.snr - d.target tell (d.msg, d.sender) + d.target.tell(d.msg, d.sender) } else { delayed += (d.snr -> d) } diff --git a/akka-persistence/src/main/scala/akka/persistence/journal/AsyncWriteProxy.scala b/akka-persistence/src/main/scala/akka/persistence/journal/AsyncWriteProxy.scala index 4d506ebdfc..f00e7c9206 100644 --- a/akka-persistence/src/main/scala/akka/persistence/journal/AsyncWriteProxy.scala +++ b/akka-persistence/src/main/scala/akka/persistence/journal/AsyncWriteProxy.scala @@ -40,12 +40,6 @@ private[persistence] trait AsyncWriteProxy extends AsyncWriteJournal with Stash def asyncWriteMessages(messages: immutable.Seq[PersistentRepr]): Future[Unit] = (store ? WriteMessages(messages)).mapTo[Unit] - def asyncWriteConfirmations(confirmations: immutable.Seq[PersistentConfirmation]): Future[Unit] = - (store ? WriteConfirmations(confirmations)).mapTo[Unit] - - def asyncDeleteMessages(messageIds: immutable.Seq[PersistentId], permanent: Boolean): Future[Unit] = - (store ? DeleteMessages(messageIds, permanent)).mapTo[Unit] - def asyncDeleteMessagesTo(persistenceId: String, toSequenceNr: Long, permanent: Boolean): Future[Unit] = (store ? DeleteMessagesTo(persistenceId, toSequenceNr, permanent)).mapTo[Unit] @@ -74,12 +68,6 @@ private[persistence] object AsyncWriteTarget { @SerialVersionUID(1L) final case class WriteMessages(messages: immutable.Seq[PersistentRepr]) - @SerialVersionUID(1L) - final case class WriteConfirmations(confirmations: immutable.Seq[PersistentConfirmation]) - - @SerialVersionUID(1L) - final case class DeleteMessages(messageIds: immutable.Seq[PersistentId], permanent: Boolean) - @SerialVersionUID(1L) final case class DeleteMessagesTo(persistenceId: String, toSequenceNr: Long, permanent: Boolean) @@ -119,4 +107,4 @@ private class ReplayMediator(replayCallback: PersistentRepr ⇒ Unit, replayComp replayCompletionPromise.failure(new AsyncReplayTimeoutException(s"replay timed out after ${replayTimeout.toSeconds} seconds inactivity")) context.stop(self) } -} \ No newline at end of file +} diff --git a/akka-persistence/src/main/scala/akka/persistence/journal/SyncWriteJournal.scala b/akka-persistence/src/main/scala/akka/persistence/journal/SyncWriteJournal.scala index 8a59176728..546350b0ac 100644 --- a/akka-persistence/src/main/scala/akka/persistence/journal/SyncWriteJournal.scala +++ b/akka-persistence/src/main/scala/akka/persistence/journal/SyncWriteJournal.scala @@ -23,58 +23,43 @@ trait SyncWriteJournal extends Actor with WriteJournalBase with AsyncRecovery { private val publish = extension.settings.internal.publishPluginCommands final def receive = { - case WriteMessages(resequenceables, processor, actorInstanceId) ⇒ - Try(writeMessages(preparePersistentBatch(resequenceables))) match { + case WriteMessages(messages, persistentActor, actorInstanceId) ⇒ + Try(writeMessages(preparePersistentBatch(messages))) match { case Success(_) ⇒ - processor ! WriteMessagesSuccessful - resequenceables.foreach { - case p: PersistentRepr ⇒ processor.tell(WriteMessageSuccess(p, actorInstanceId), p.sender) - case r ⇒ processor.tell(LoopMessageSuccess(r.payload, actorInstanceId), r.sender) + persistentActor ! WriteMessagesSuccessful + messages.foreach { + case p: PersistentRepr ⇒ persistentActor.tell(WriteMessageSuccess(p, actorInstanceId), p.sender) + case r ⇒ persistentActor.tell(LoopMessageSuccess(r.payload, actorInstanceId), r.sender) } case Failure(e) ⇒ - processor ! WriteMessagesFailed(e) - resequenceables.foreach { - case p: PersistentRepr ⇒ processor tell (WriteMessageFailure(p, e, actorInstanceId), p.sender) - case r ⇒ processor tell (LoopMessageSuccess(r.payload, actorInstanceId), r.sender) + persistentActor ! WriteMessagesFailed(e) + messages.foreach { + case p: PersistentRepr ⇒ persistentActor.tell(WriteMessageFailure(p, e, actorInstanceId), p.sender) + case r ⇒ persistentActor.tell(LoopMessageSuccess(r.payload, actorInstanceId), r.sender) } throw e } - case r @ ReplayMessages(fromSequenceNr, toSequenceNr, max, persistenceId, processor, replayDeleted) ⇒ + case r @ ReplayMessages(fromSequenceNr, toSequenceNr, max, persistenceId, persistentActor, replayDeleted) ⇒ asyncReplayMessages(persistenceId, fromSequenceNr, toSequenceNr, max) { p ⇒ - if (!p.deleted || replayDeleted) processor.tell(ReplayedMessage(p), p.sender) + if (!p.deleted || replayDeleted) persistentActor.tell(ReplayedMessage(p), p.sender) } map { case _ ⇒ ReplayMessagesSuccess } recover { case e ⇒ ReplayMessagesFailure(e) - } pipeTo (processor) onSuccess { + } pipeTo (persistentActor) onSuccess { case _ if publish ⇒ context.system.eventStream.publish(r) } - case ReadHighestSequenceNr(fromSequenceNr, persistenceId, processor) ⇒ + case ReadHighestSequenceNr(fromSequenceNr, persistenceId, persistentActor) ⇒ asyncReadHighestSequenceNr(persistenceId, fromSequenceNr).map { highest ⇒ ReadHighestSequenceNrSuccess(highest) } recover { case e ⇒ ReadHighestSequenceNrFailure(e) - } pipeTo (processor) - case WriteConfirmations(confirmationsBatch, requestor) ⇒ - Try(writeConfirmations(confirmationsBatch)) match { - case Success(_) ⇒ requestor ! WriteConfirmationsSuccess(confirmationsBatch) - case Failure(e) ⇒ requestor ! WriteConfirmationsFailure(e) - } - case d @ DeleteMessages(messageIds, permanent, requestorOption) ⇒ - Try(deleteMessages(messageIds, permanent)) match { - case Success(_) ⇒ - requestorOption.foreach(_ ! DeleteMessagesSuccess(messageIds)) - if (publish) context.system.eventStream.publish(d) - case Failure(e) ⇒ - requestorOption.foreach(_ ! DeleteMessagesFailure(e)) - } + } pipeTo (persistentActor) case d @ DeleteMessagesTo(persistenceId, toSequenceNr, permanent) ⇒ Try(deleteMessagesTo(persistenceId, toSequenceNr, permanent)) match { case Success(_) ⇒ if (publish) context.system.eventStream.publish(d) case Failure(e) ⇒ } - case LoopMessage(message, processor, actorInstanceId) ⇒ - processor forward LoopMessageSuccess(message, actorInstanceId) } //#journal-plugin-api @@ -85,20 +70,6 @@ trait SyncWriteJournal extends Actor with WriteJournalBase with AsyncRecovery { */ def writeMessages(messages: immutable.Seq[PersistentRepr]): Unit - /** - * Plugin API: synchronously writes a batch of delivery confirmations to the journal. - */ - @deprecated("writeConfirmations will be removed, since Channels will be removed.", since = "2.3.4") - def writeConfirmations(confirmations: immutable.Seq[PersistentConfirmation]): Unit - - /** - * Plugin API: synchronously deletes messages identified by `messageIds` from the - * journal. If `permanent` is set to `false`, the persistent messages are marked as - * deleted, otherwise they are permanently deleted. - */ - @deprecated("deleteMessages will be removed.", since = "2.3.4") - def deleteMessages(messageIds: immutable.Seq[PersistentId], permanent: Boolean): Unit - /** * Plugin API: synchronously deletes all persistent messages up to `toSequenceNr` * (inclusive). If `permanent` is set to `false`, the persistent messages are marked diff --git a/akka-persistence/src/main/scala/akka/persistence/journal/WriteJournalBase.scala b/akka-persistence/src/main/scala/akka/persistence/journal/WriteJournalBase.scala index 79d5e685c6..b28160c98e 100644 --- a/akka-persistence/src/main/scala/akka/persistence/journal/WriteJournalBase.scala +++ b/akka-persistence/src/main/scala/akka/persistence/journal/WriteJournalBase.scala @@ -4,17 +4,17 @@ package akka.persistence.journal -import akka.persistence.{ PersistentRepr, Resequenceable } +import akka.persistence.{ PersistentRepr, PersistentEnvelope } import akka.actor.Actor import scala.collection.immutable private[akka] trait WriteJournalBase { this: Actor ⇒ - protected def preparePersistentBatch(rb: immutable.Seq[Resequenceable]): immutable.Seq[PersistentRepr] = + protected def preparePersistentBatch(rb: immutable.Seq[PersistentEnvelope]): immutable.Seq[PersistentRepr] = rb.filter(persistentPrepareWrite).asInstanceOf[immutable.Seq[PersistentRepr]] // filter instead of flatMap to avoid Some allocations - private def persistentPrepareWrite(r: Resequenceable): Boolean = r match { + private def persistentPrepareWrite(r: PersistentEnvelope): Boolean = r match { case p: PersistentRepr ⇒ p.prepareWrite(); true case _ ⇒ diff --git a/akka-persistence/src/main/scala/akka/persistence/journal/inmem/InmemJournal.scala b/akka-persistence/src/main/scala/akka/persistence/journal/inmem/InmemJournal.scala index 2541892a07..6ee46ba902 100644 --- a/akka-persistence/src/main/scala/akka/persistence/journal/inmem/InmemJournal.scala +++ b/akka-persistence/src/main/scala/akka/persistence/journal/inmem/InmemJournal.scala @@ -78,12 +78,6 @@ private[persistence] class InmemStore extends Actor with InmemMessages { def receive = { case WriteMessages(msgs) ⇒ sender() ! msgs.foreach(add) - case WriteConfirmations(cnfs) ⇒ - sender() ! cnfs.foreach(cnf ⇒ update(cnf.persistenceId, cnf.sequenceNr)(p ⇒ p.update(confirms = cnf.channelId +: p.confirms))) - case DeleteMessages(msgIds, false) ⇒ - sender() ! msgIds.foreach(msgId ⇒ update(msgId.persistenceId, msgId.sequenceNr)(_.update(deleted = true))) - case DeleteMessages(msgIds, true) ⇒ - sender() ! msgIds.foreach(msgId ⇒ delete(msgId.persistenceId, msgId.sequenceNr)) case DeleteMessagesTo(pid, tsnr, false) ⇒ sender() ! (1L to tsnr foreach { snr ⇒ update(pid, snr)(_.update(deleted = true)) }) case DeleteMessagesTo(pid, tsnr, true) ⇒ diff --git a/akka-persistence/src/main/scala/akka/persistence/journal/japi/AsyncWriteJournal.scala b/akka-persistence/src/main/scala/akka/persistence/journal/japi/AsyncWriteJournal.scala index 166ec51570..d3fb5a1e29 100644 --- a/akka-persistence/src/main/scala/akka/persistence/journal/japi/AsyncWriteJournal.scala +++ b/akka-persistence/src/main/scala/akka/persistence/journal/japi/AsyncWriteJournal.scala @@ -19,12 +19,6 @@ abstract class AsyncWriteJournal extends AsyncRecovery with SAsyncWriteJournal w final def asyncWriteMessages(messages: immutable.Seq[PersistentRepr]) = doAsyncWriteMessages(messages.asJava).map(Unit.unbox) - final def asyncWriteConfirmations(confirmations: immutable.Seq[PersistentConfirmation]) = - doAsyncWriteConfirmations(confirmations.asJava).map(Unit.unbox) - - final def asyncDeleteMessages(messageIds: immutable.Seq[PersistentId], permanent: Boolean) = - doAsyncDeleteMessages(messageIds.asJava, permanent).map(Unit.unbox) - final def asyncDeleteMessagesTo(persistenceId: String, toSequenceNr: Long, permanent: Boolean) = doAsyncDeleteMessagesTo(persistenceId, toSequenceNr, permanent).map(Unit.unbox) } diff --git a/akka-persistence/src/main/scala/akka/persistence/journal/japi/SyncWriteJournal.scala b/akka-persistence/src/main/scala/akka/persistence/journal/japi/SyncWriteJournal.scala index b89c85e07c..18858d88e8 100644 --- a/akka-persistence/src/main/scala/akka/persistence/journal/japi/SyncWriteJournal.scala +++ b/akka-persistence/src/main/scala/akka/persistence/journal/japi/SyncWriteJournal.scala @@ -17,12 +17,6 @@ abstract class SyncWriteJournal extends AsyncRecovery with SSyncWriteJournal wit final def writeMessages(messages: immutable.Seq[PersistentRepr]) = doWriteMessages(messages.asJava) - final def writeConfirmations(confirmations: immutable.Seq[PersistentConfirmation]) = - doWriteConfirmations(confirmations.asJava) - - final def deleteMessages(messageIds: immutable.Seq[PersistentId], permanent: Boolean) = - doDeleteMessages(messageIds.asJava, permanent) - final def deleteMessagesTo(persistenceId: String, toSequenceNr: Long, permanent: Boolean) = doDeleteMessagesTo(persistenceId, toSequenceNr, permanent) } diff --git a/akka-persistence/src/main/scala/akka/persistence/journal/leveldb/LeveldbIdMapping.scala b/akka-persistence/src/main/scala/akka/persistence/journal/leveldb/LeveldbIdMapping.scala index b6fcf5ae49..18749d701b 100644 --- a/akka-persistence/src/main/scala/akka/persistence/journal/leveldb/LeveldbIdMapping.scala +++ b/akka-persistence/src/main/scala/akka/persistence/journal/leveldb/LeveldbIdMapping.scala @@ -11,7 +11,7 @@ import akka.actor.Actor /** * INTERNAL API. * - * LevelDB backed persistent mapping of `String`-based persistent actor and channel ids to numeric ids. + * LevelDB backed persistent mapping of `String`-based persistent actor ids to numeric ids. */ private[persistence] trait LeveldbIdMapping extends Actor { this: LeveldbStore ⇒ import Key._ @@ -20,7 +20,7 @@ private[persistence] trait LeveldbIdMapping extends Actor { this: LeveldbStore private var idMap: Map[String, Int] = Map.empty /** - * Get the mapped numeric id for the specified persistent actor or channel `id`. Creates and + * Get the mapped numeric id for the specified persistent actor `id`. Creates and * stores a new mapping if necessary. */ def numericId(id: String): Int = idMap.get(id) match { @@ -29,7 +29,7 @@ private[persistence] trait LeveldbIdMapping extends Actor { this: LeveldbStore } private def readIdMap(): Map[String, Int] = withIterator { iter ⇒ - iter.seek(keyToBytes(idKey(idOffset))) + iter.seek(keyToBytes(mappingKey(idOffset))) readIdMap(Map.empty, iter) } @@ -37,16 +37,16 @@ private[persistence] trait LeveldbIdMapping extends Actor { this: LeveldbStore if (!iter.hasNext) pathMap else { val nextEntry = iter.next() val nextKey = keyFromBytes(nextEntry.getKey) - if (!isIdKey(nextKey)) pathMap else { + if (!isMappingKey(nextKey)) pathMap else { val nextVal = new String(nextEntry.getValue, "UTF-8") - readIdMap(pathMap + (nextVal -> id(nextKey)), iter) + readIdMap(pathMap + (nextVal -> nextKey.mappingId), iter) } } } private def writeIdMapping(id: String, numericId: Int): Int = { idMap = idMap + (id -> numericId) - leveldb.put(keyToBytes(idKey(numericId)), id.getBytes("UTF-8")) + leveldb.put(keyToBytes(mappingKey(numericId)), id.getBytes("UTF-8")) numericId } diff --git a/akka-persistence/src/main/scala/akka/persistence/journal/leveldb/LeveldbKey.scala b/akka-persistence/src/main/scala/akka/persistence/journal/leveldb/LeveldbKey.scala index 2756a11827..f8a3357622 100644 --- a/akka-persistence/src/main/scala/akka/persistence/journal/leveldb/LeveldbKey.scala +++ b/akka-persistence/src/main/scala/akka/persistence/journal/leveldb/LeveldbKey.scala @@ -13,14 +13,14 @@ import java.nio.ByteBuffer private[leveldb] final case class Key( persistenceId: Int, sequenceNr: Long, - channelId: Int) + mappingId: Int) private[leveldb] object Key { def keyToBytes(key: Key): Array[Byte] = { val bb = ByteBuffer.allocate(20) bb.putInt(key.persistenceId) bb.putLong(key.sequenceNr) - bb.putInt(key.channelId) + bb.putInt(key.mappingId) bb.array } @@ -28,19 +28,18 @@ private[leveldb] object Key { val bb = ByteBuffer.wrap(bytes) val aid = bb.getInt val snr = bb.getLong - val cid = bb.getInt - new Key(aid, snr, cid) + val mid = bb.getInt + new Key(aid, snr, mid) } def counterKey(persistenceId: Int): Key = Key(persistenceId, 0L, 0) def counterToBytes(ctr: Long): Array[Byte] = ByteBuffer.allocate(8).putLong(ctr).array def counterFromBytes(bytes: Array[Byte]): Long = ByteBuffer.wrap(bytes).getLong - def id(key: Key) = key.channelId - def idKey(id: Int) = Key(1, 0L, id) - def isIdKey(key: Key): Boolean = key.persistenceId == 1 + def mappingKey(id: Int) = Key(1, 0L, id) + def isMappingKey(key: Key): Boolean = key.persistenceId == 1 def deletionKey(persistenceId: Int, sequenceNr: Long): Key = Key(persistenceId, sequenceNr, 1) - def isDeletionKey(key: Key): Boolean = key.channelId == 1 + def isDeletionKey(key: Key): Boolean = key.mappingId == 1 } diff --git a/akka-persistence/src/main/scala/akka/persistence/journal/leveldb/LeveldbRecovery.scala b/akka-persistence/src/main/scala/akka/persistence/journal/leveldb/LeveldbRecovery.scala index 5f1acef194..bf62730b9a 100644 --- a/akka-persistence/src/main/scala/akka/persistence/journal/leveldb/LeveldbRecovery.scala +++ b/akka-persistence/src/main/scala/akka/persistence/journal/leveldb/LeveldbRecovery.scala @@ -40,34 +40,22 @@ private[persistence] trait LeveldbRecovery extends AsyncRecovery { this: Leveldb val nextKey = keyFromBytes(nextEntry.getKey) if (nextKey.sequenceNr > toSequenceNr) { // end iteration here - } else if (nextKey.channelId != 0) { - // phantom confirmation (just advance iterator) + } else if (isDeletionKey(nextKey)) { + // this case is needed to discard old events with deletion marker go(iter, nextKey, ctr, replayCallback) } else if (key.persistenceId == nextKey.persistenceId) { val msg = persistentFromBytes(nextEntry.getValue) val del = deletion(iter, nextKey) - val cnf = confirms(iter, nextKey, Nil) if (ctr < max) { - replayCallback(msg.update(confirms = cnf, deleted = del)) + replayCallback(msg.update(deleted = del)) go(iter, nextKey, ctr + 1L, replayCallback) } } } } - @scala.annotation.tailrec - def confirms(iter: DBIterator, key: Key, channelIds: List[String]): List[String] = { - if (iter.hasNext) { - val nextEntry = iter.peekNext() - val nextKey = keyFromBytes(nextEntry.getKey) - if (key.persistenceId == nextKey.persistenceId && key.sequenceNr == nextKey.sequenceNr) { - val nextValue = new String(nextEntry.getValue, "UTF-8") - iter.next() - confirms(iter, nextKey, nextValue :: channelIds) - } else channelIds - } else channelIds - } - + // need to have this to be able to read journal created with 2.3.x, which + // supported deletion of individual events def deletion(iter: DBIterator, key: Key): Boolean = { if (iter.hasNext) { val nextEntry = iter.peekNext() diff --git a/akka-persistence/src/main/scala/akka/persistence/journal/leveldb/LeveldbStore.scala b/akka-persistence/src/main/scala/akka/persistence/journal/leveldb/LeveldbStore.scala index 3283ab3b88..85aa3d4e26 100644 --- a/akka-persistence/src/main/scala/akka/persistence/journal/leveldb/LeveldbStore.scala +++ b/akka-persistence/src/main/scala/akka/persistence/journal/leveldb/LeveldbStore.scala @@ -43,16 +43,6 @@ private[persistence] trait LeveldbStore extends Actor with LeveldbIdMapping with def writeMessages(messages: immutable.Seq[PersistentRepr]) = withBatch(batch ⇒ messages.foreach(message ⇒ addToMessageBatch(message, batch))) - def writeConfirmations(confirmations: immutable.Seq[PersistentConfirmation]) = - withBatch(batch ⇒ confirmations.foreach(confirmation ⇒ addToConfirmationBatch(confirmation, batch))) - - def deleteMessages(messageIds: immutable.Seq[PersistentId], permanent: Boolean) = withBatch { batch ⇒ - messageIds foreach { messageId ⇒ - if (permanent) batch.delete(keyToBytes(Key(numericId(messageId.persistenceId), messageId.sequenceNr, 0))) - else batch.put(keyToBytes(deletionKey(numericId(messageId.persistenceId), messageId.sequenceNr)), Array.emptyByteArray) - } - } - def deleteMessagesTo(persistenceId: String, toSequenceNr: Long, permanent: Boolean) = withBatch { batch ⇒ val nid = numericId(persistenceId) @@ -64,7 +54,7 @@ private[persistence] trait LeveldbStore extends Actor with LeveldbIdMapping with } fromSequenceNr to toSequenceNr foreach { sequenceNr ⇒ - if (permanent) batch.delete(keyToBytes(Key(nid, sequenceNr, 0))) // TODO: delete confirmations and deletion markers, if any. + if (permanent) batch.delete(keyToBytes(Key(nid, sequenceNr, 0))) // TODO: delete deletion markers, if any. else batch.put(keyToBytes(deletionKey(nid, sequenceNr)), Array.emptyByteArray) } } @@ -102,12 +92,6 @@ private[persistence] trait LeveldbStore extends Actor with LeveldbIdMapping with batch.put(keyToBytes(Key(nid, persistent.sequenceNr, 0)), persistentToBytes(persistent)) } - private def addToConfirmationBatch(confirmation: PersistentConfirmation, batch: WriteBatch): Unit = { - val npid = numericId(confirmation.persistenceId) - val ncid = numericId(confirmation.channelId) - batch.put(keyToBytes(Key(npid, confirmation.sequenceNr, ncid)), confirmation.channelId.getBytes("UTF-8")) - } - override def preStart() { leveldb = leveldbFactory.open(leveldbDir, if (nativeLeveldb) leveldbOptions else leveldbOptions.compressionType(CompressionType.NONE)) super.preStart() @@ -129,8 +113,6 @@ class SharedLeveldbStore extends { val configPath = "akka.persistence.journal.le def receive = { case WriteMessages(msgs) ⇒ sender() ! writeMessages(msgs) - case WriteConfirmations(cnfs) ⇒ sender() ! writeConfirmations(cnfs) - case DeleteMessages(messageIds, permanent) ⇒ sender() ! deleteMessages(messageIds, permanent) case DeleteMessagesTo(pid, tsnr, permanent) ⇒ sender() ! deleteMessagesTo(pid, tsnr, permanent) case ReadHighestSequenceNr(pid, fromSequenceNr) ⇒ sender() ! readHighestSequenceNr(numericId(pid)) case ReplayMessages(pid, fromSnr, toSnr, max) ⇒ diff --git a/akka-persistence/src/main/scala/akka/persistence/serialization/MessageSerializer.scala b/akka-persistence/src/main/scala/akka/persistence/serialization/MessageSerializer.scala index 21ca6e583b..4fce00eb68 100644 --- a/akka-persistence/src/main/scala/akka/persistence/serialization/MessageSerializer.scala +++ b/akka-persistence/src/main/scala/akka/persistence/serialization/MessageSerializer.scala @@ -21,18 +21,13 @@ import scala.collection.immutable.VectorBuilder trait Message extends Serializable /** - * Protobuf serializer for [[PersistentBatch]], [[PersistentRepr]] and [[Deliver]] messages. + * Protobuf serializer for [[PersistentRepr]] and [[AtLeastOnceDelivery]] messages. */ class MessageSerializer(val system: ExtendedActorSystem) extends Serializer { import PersistentRepr.Undefined - val PersistentBatchClass = classOf[PersistentBatch] val PersistentReprClass = classOf[PersistentRepr] val PersistentImplClass = classOf[PersistentImpl] - val ConfirmablePersistentImplClass = classOf[ConfirmablePersistentImpl] - val DeliveredByTransientChannelClass = classOf[DeliveredByChannel] - val DeliveredByPersistentChannelClass = classOf[DeliveredByPersistentChannel] - val DeliverClass = classOf[Deliver] val AtLeastOnceDeliverySnapshotClass = classOf[AtLeastOnceDeliverySnap] def identifier: Int = 7 @@ -45,35 +40,26 @@ class MessageSerializer(val system: ExtendedActorSystem) extends Serializer { } /** - * Serializes [[PersistentBatch]], [[PersistentRepr]] and [[Deliver]] messages. Delegates - * serialization of a persistent message's payload to a matching `akka.serialization.Serializer`. + * Serializes persistent messages. Delegates serialization of a persistent + * message's payload to a matching `akka.serialization.Serializer`. */ def toBinary(o: AnyRef): Array[Byte] = o match { - case b: PersistentBatch ⇒ persistentMessageBatchBuilder(b).build().toByteArray - case p: PersistentRepr ⇒ persistentMessageBuilder(p).build().toByteArray - case c: DeliveredByChannel ⇒ deliveredMessageBuilder(c).build().toByteArray - case c: DeliveredByPersistentChannel ⇒ deliveredMessageBuilder(c).build().toByteArray - case d: Deliver ⇒ deliverMessageBuilder(d).build.toByteArray - case a: AtLeastOnceDeliverySnap ⇒ atLeastOnceDeliverySnapshotBuilder(a).build.toByteArray - case _ ⇒ throw new IllegalArgumentException(s"Can't serialize object of type ${o.getClass}") + case p: PersistentRepr ⇒ persistentMessageBuilder(p).build().toByteArray + case a: AtLeastOnceDeliverySnap ⇒ atLeastOnceDeliverySnapshotBuilder(a).build.toByteArray + case _ ⇒ throw new IllegalArgumentException(s"Can't serialize object of type ${o.getClass}") } /** - * Deserializes [[PersistentBatch]], [[PersistentRepr]] and [[Deliver]] messages. Delegates - * deserialization of a persistent message's payload to a matching `akka.serialization.Serializer`. + * Deserializes persistent messages. Delegates deserialization of a persistent + * message's payload to a matching `akka.serialization.Serializer`. */ def fromBinary(bytes: Array[Byte], manifest: Option[Class[_]]): Message = manifest match { case None ⇒ persistent(PersistentMessage.parseFrom(bytes)) case Some(c) ⇒ c match { - case PersistentImplClass ⇒ persistent(PersistentMessage.parseFrom(bytes)) - case ConfirmablePersistentImplClass ⇒ persistent(PersistentMessage.parseFrom(bytes)) - case PersistentReprClass ⇒ persistent(PersistentMessage.parseFrom(bytes)) - case PersistentBatchClass ⇒ persistentBatch(PersistentMessageBatch.parseFrom(bytes)) - case DeliveredByTransientChannelClass ⇒ delivered(DeliveredMessage.parseFrom(bytes)) - case DeliveredByPersistentChannelClass ⇒ delivered(DeliveredMessage.parseFrom(bytes)) - case DeliverClass ⇒ deliver(DeliverMessage.parseFrom(bytes)) - case AtLeastOnceDeliverySnapshotClass ⇒ atLeastOnceDeliverySnapshot(AtLeastOnceDeliverySnapshot.parseFrom(bytes)) - case _ ⇒ throw new IllegalArgumentException(s"Can't deserialize object of type ${c}") + case PersistentImplClass ⇒ persistent(PersistentMessage.parseFrom(bytes)) + case PersistentReprClass ⇒ persistent(PersistentMessage.parseFrom(bytes)) + case AtLeastOnceDeliverySnapshotClass ⇒ atLeastOnceDeliverySnapshot(AtLeastOnceDeliverySnapshot.parseFrom(bytes)) + case _ ⇒ throw new IllegalArgumentException(s"Can't deserialize object of type ${c}") } } @@ -81,13 +67,6 @@ class MessageSerializer(val system: ExtendedActorSystem) extends Serializer { // toBinary helpers // - private def deliverMessageBuilder(deliver: Deliver) = { - val builder = DeliverMessage.newBuilder - builder.setPersistent(persistentMessageBuilder(deliver.persistent.asInstanceOf[PersistentRepr])) - builder.setDestination(deliver.destination.toString) - builder - } - def atLeastOnceDeliverySnapshotBuilder(snap: AtLeastOnceDeliverySnap): AtLeastOnceDeliverySnapshot.Builder = { val builder = AtLeastOnceDeliverySnapshot.newBuilder builder.setCurrentDeliveryId(snap.currentDeliveryId) @@ -115,29 +94,15 @@ class MessageSerializer(val system: ExtendedActorSystem) extends Serializer { unconfirmedDeliveries.result()) } - private def persistentMessageBatchBuilder(persistentBatch: PersistentBatch) = { - val builder = PersistentMessageBatch.newBuilder - persistentBatch.batch. - filter(_.isInstanceOf[PersistentRepr]). - foreach(p ⇒ builder.addBatch(persistentMessageBuilder(p.asInstanceOf[PersistentRepr]))) - builder - } - private def persistentMessageBuilder(persistent: PersistentRepr) = { val builder = PersistentMessage.newBuilder if (persistent.persistenceId != Undefined) builder.setPersistenceId(persistent.persistenceId) - if (persistent.confirmMessage != null) builder.setConfirmMessage(deliveredMessageBuilder(persistent.confirmMessage)) - if (persistent.confirmTarget != null) builder.setConfirmTarget(Serialization.serializedActorPath(persistent.confirmTarget)) if (persistent.sender != null) builder.setSender(Serialization.serializedActorPath(persistent.sender)) - persistent.confirms.foreach(builder.addConfirms) - builder.setPayload(persistentPayloadBuilder(persistent.payload.asInstanceOf[AnyRef])) builder.setSequenceNr(persistent.sequenceNr) builder.setDeleted(persistent.deleted) - builder.setRedeliveries(persistent.redeliveries) - builder.setConfirmable(persistent.confirmable) builder } @@ -153,52 +118,23 @@ class MessageSerializer(val system: ExtendedActorSystem) extends Serializer { builder } - // serialize actor references with full address information (defaultAddress) + // serialize actor references with full address information (defaultAddress) transportInformation match { case Some(ti) ⇒ Serialization.currentTransportInformation.withValue(ti) { payloadBuilder() } case None ⇒ payloadBuilder() } } - private def deliveredMessageBuilder(delivered: Delivered) = { - val builder = DeliveredMessage.newBuilder - - if (delivered.channel != null) builder.setChannel(Serialization.serializedActorPath(delivered.channel)) - - builder.setChannelId(delivered.channelId) - builder.setPersistentSequenceNr(delivered.persistentSequenceNr) - builder.setDeliverySequenceNr(delivered.deliverySequenceNr) - - delivered match { - case c: DeliveredByChannel ⇒ builder.setPersistenceId(c.persistenceId) - case _ ⇒ builder - } - } - // // fromBinary helpers // - private def deliver(deliverMessage: DeliverMessage): Deliver = { - Deliver( - persistent(deliverMessage.getPersistent), - ActorPath.fromString(deliverMessage.getDestination)) - } - - private def persistentBatch(persistentMessageBatch: PersistentMessageBatch): PersistentBatch = - PersistentBatch(immutableSeq(persistentMessageBatch.getBatchList).map(persistent)) - private def persistent(persistentMessage: PersistentMessage): PersistentRepr = { PersistentRepr( payload(persistentMessage.getPayload), persistentMessage.getSequenceNr, if (persistentMessage.hasPersistenceId) persistentMessage.getPersistenceId else Undefined, persistentMessage.getDeleted, - persistentMessage.getRedeliveries, - immutableSeq(persistentMessage.getConfirmsList), - persistentMessage.getConfirmable, - if (persistentMessage.hasConfirmMessage) delivered(persistentMessage.getConfirmMessage) else null, - if (persistentMessage.hasConfirmTarget) system.provider.resolveActorRef(persistentMessage.getConfirmTarget) else null, if (persistentMessage.hasSender) system.provider.resolveActorRef(persistentMessage.getSender) else null) } @@ -212,22 +148,4 @@ class MessageSerializer(val system: ExtendedActorSystem) extends Serializer { payloadClass).get } - private def delivered(deliveredMessage: DeliveredMessage): Delivered = { - val channel = if (deliveredMessage.hasChannel) system.provider.resolveActorRef(deliveredMessage.getChannel) else null - - if (deliveredMessage.hasPersistenceId) { - DeliveredByChannel( - deliveredMessage.getPersistenceId, - deliveredMessage.getChannelId, - deliveredMessage.getPersistentSequenceNr, - deliveredMessage.getDeliverySequenceNr, - channel) - } else { - DeliveredByPersistentChannel( - deliveredMessage.getChannelId, - deliveredMessage.getPersistentSequenceNr, - deliveredMessage.getDeliverySequenceNr, - channel) - } - } } diff --git a/akka-persistence/src/main/scala/akka/persistence/serialization/SnapshotSerializer.scala b/akka-persistence/src/main/scala/akka/persistence/serialization/SnapshotSerializer.scala index ca9c627fab..6d34ba5ab9 100644 --- a/akka-persistence/src/main/scala/akka/persistence/serialization/SnapshotSerializer.scala +++ b/akka-persistence/src/main/scala/akka/persistence/serialization/SnapshotSerializer.scala @@ -14,7 +14,7 @@ import scala.util.Failure /** * Wrapper for snapshot `data`. Snapshot `data` are the actual snapshot objects captured by - * a [[Processor]]. + * the persistent actor. * * @see [[SnapshotSerializer]] */ diff --git a/akka-persistence/src/main/scala/akka/persistence/snapshot/SnapshotStore.scala b/akka-persistence/src/main/scala/akka/persistence/snapshot/SnapshotStore.scala index c60eabbd7c..41335dd188 100644 --- a/akka-persistence/src/main/scala/akka/persistence/snapshot/SnapshotStore.scala +++ b/akka-persistence/src/main/scala/akka/persistence/snapshot/SnapshotStore.scala @@ -55,7 +55,7 @@ trait SnapshotStore extends Actor { /** * Plugin API: asynchronously loads a snapshot. * - * @param persistenceId processor id. + * @param persistenceId id of the persistent actor. * @param criteria selection criteria for loading. */ def loadAsync(persistenceId: String, criteria: SnapshotSelectionCriteria): Future[Option[SelectedSnapshot]] @@ -86,7 +86,7 @@ trait SnapshotStore extends Actor { /** * Plugin API: deletes all snapshots matching `criteria`. * - * @param persistenceId processor id. + * @param persistenceId id of the persistent actor. * @param criteria selection criteria for deleting. */ def delete(persistenceId: String, criteria: SnapshotSelectionCriteria) diff --git a/akka-persistence/src/test/scala/akka/persistence/AtLeastOnceDeliveryCrashSpec.scala b/akka-persistence/src/test/scala/akka/persistence/AtLeastOnceDeliveryCrashSpec.scala index 15dc44c436..acb4a31911 100644 --- a/akka-persistence/src/test/scala/akka/persistence/AtLeastOnceDeliveryCrashSpec.scala +++ b/akka-persistence/src/test/scala/akka/persistence/AtLeastOnceDeliveryCrashSpec.scala @@ -12,7 +12,7 @@ object AtLeastOnceDeliveryCrashSpec { class StoppingStrategySupervisor(testProbe: ActorRef) extends Actor { import scala.concurrent.duration._ - override val supervisorStrategy = OneForOneStrategy(maxNrOfRetries = 10, withinTimeRange = 10 seconds) { + override val supervisorStrategy = OneForOneStrategy(maxNrOfRetries = 10, withinTimeRange = 10.seconds) { case _: IllegalStateException ⇒ Stop case t ⇒ super.supervisorStrategy.decider.applyOrElse(t, (_: Any) ⇒ Escalate) } @@ -32,6 +32,8 @@ object AtLeastOnceDeliveryCrashSpec { with AtLeastOnceDelivery with ActorLogging { import CrashingActor._ + override def persistenceId = self.path.name + override def receiveRecover: Receive = { case Message ⇒ send() case CrashMessage ⇒ @@ -68,9 +70,9 @@ class AtLeastOnceDeliveryCrashSpec extends AkkaSpec(PersistenceSpec.config("inme system.stop(superVisor) deathProbe.expectTerminated(superVisor) - testProbe.expectNoMsg(250 millis) + testProbe.expectNoMsg(250.millis) createCrashActorUnderSupervisor() - testProbe.expectNoMsg(1 second) + testProbe.expectNoMsg(1.second) } } -} \ No newline at end of file +} diff --git a/akka-persistence/src/test/scala/akka/persistence/AtLeastOnceDeliveryFailureSpec.scala b/akka-persistence/src/test/scala/akka/persistence/AtLeastOnceDeliveryFailureSpec.scala index c446738b8d..e07ffd0000 100644 --- a/akka-persistence/src/test/scala/akka/persistence/AtLeastOnceDeliveryFailureSpec.scala +++ b/akka-persistence/src/test/scala/akka/persistence/AtLeastOnceDeliveryFailureSpec.scala @@ -70,7 +70,7 @@ object AtLeastOnceDeliveryFailureSpec { override def redeliverInterval = 500.milliseconds - override def processorId = "chaosSender" + override def persistenceId = "chaosSender" def receiveCommand: Receive = { case i: Int ⇒ @@ -101,7 +101,7 @@ object AtLeastOnceDeliveryFailureSpec { def receiveRecover: Receive = { case evt: Evt ⇒ updateState(evt) case RecoveryFailure(_) ⇒ - // journal failed during recovery, throw exception to re-recover processor + // journal failed during recovery, throw exception to re-recover persistent actor throw new TestException(debugMessage("recovery failed")) } diff --git a/akka-persistence/src/test/scala/akka/persistence/ChannelSpec.scala b/akka-persistence/src/test/scala/akka/persistence/ChannelSpec.scala deleted file mode 100644 index 61bba0a5ba..0000000000 --- a/akka-persistence/src/test/scala/akka/persistence/ChannelSpec.scala +++ /dev/null @@ -1,195 +0,0 @@ -/** - * Copyright (C) 2009-2014 Typesafe Inc. - */ - -package akka.persistence - -import scala.concurrent.duration._ -import scala.language.postfixOps - -import com.typesafe.config._ - -import akka.actor._ -import akka.testkit._ - -object ChannelSpec { - class TestDestinationProcessor(name: String) extends NamedProcessor(name) { - def receive = { - case cp @ ConfirmablePersistent("a", _, _) ⇒ cp.confirm() - case cp @ ConfirmablePersistent("b", _, _) ⇒ cp.confirm() - case cp @ ConfirmablePersistent("boom", _, _) if (recoveryFinished) ⇒ throw new TestException("boom") - } - } - - class TestReceiver(testActor: ActorRef) extends Actor { - def receive = { - case cp @ ConfirmablePersistent(payload, _, _) ⇒ - testActor ! payload - cp.confirm() - } - } - - class TestListener(probe: ActorRef) extends Actor { - def receive = { - case RedeliverFailure(messages) ⇒ messages.foreach(probe ! _.payload) - } - } -} - -abstract class ChannelSpec(config: Config) extends AkkaSpec(config) with PersistenceSpec with ImplicitSender { - import ChannelSpec._ - - protected var defaultTestChannel: ActorRef = _ - protected var redeliverTestChannel: ActorRef = _ - - override protected def beforeEach: Unit = { - super.beforeEach() - defaultTestChannel = createDefaultTestChannel() - redeliverTestChannel = createRedeliverTestChannel() - } - - override protected def afterEach(): Unit = { - system.stop(defaultTestChannel) - system.stop(redeliverTestChannel) - super.afterEach() - } - - private def redeliverChannelSettings(listener: Option[ActorRef]): ChannelSettings = - ChannelSettings(redeliverMax = 2, redeliverInterval = 100 milliseconds, redeliverFailureListener = listener) - - def createDefaultTestChannel(): ActorRef = - system.actorOf(Channel.props(s"${name}-default", ChannelSettings())) - - def createRedeliverTestChannel(): ActorRef = - system.actorOf(Channel.props(s"${name}-redeliver", redeliverChannelSettings(None))) - - def createRedeliverTestChannel(listener: Option[ActorRef]): ActorRef = - system.actorOf(Channel.props(s"${name}-redeliver-listener", redeliverChannelSettings(listener))) - - def subscribeToConfirmation(probe: TestProbe): Unit = - system.eventStream.subscribe(probe.ref, classOf[Delivered]) - - def awaitConfirmation(probe: TestProbe): Unit = - probe.expectMsgType[Delivered] - - def actorRefFor(topLevelName: String) = - extension.system.provider.resolveActorRef(RootActorPath(Address("akka", system.name)) / "user" / topLevelName) - - "A channel" must { - "must resolve destination references and preserve message order" in { - val empty = actorRefFor("testDestination") // will be an EmptyLocalActorRef - val probe = TestProbe() - val destination = system.actorOf(Props(classOf[TestReceiver], probe.ref), "testDestination") - - defaultTestChannel ! Deliver(PersistentRepr("a"), empty.path) - defaultTestChannel ! Deliver(Persistent("b"), destination.path) - defaultTestChannel ! Deliver(Persistent("c"), destination.path) - - probe.expectMsg("a") - probe.expectMsg("b") - probe.expectMsg("c") - } - "support processors as destination" in { - val destination = system.actorOf(Props(classOf[TestDestinationProcessor], name)) - val confirmProbe = TestProbe() - - subscribeToConfirmation(confirmProbe) - - defaultTestChannel ! Deliver(Persistent("a"), destination.path) - - awaitConfirmation(confirmProbe) - } - "support processors as destination that may fail" in { - val destination = system.actorOf(Props(classOf[TestDestinationProcessor], name)) - val confirmProbe = TestProbe() - - subscribeToConfirmation(confirmProbe) - - defaultTestChannel ! Deliver(Persistent("a"), destination.path) - defaultTestChannel ! Deliver(Persistent("boom"), destination.path) - defaultTestChannel ! Deliver(Persistent("b"), destination.path) - - awaitConfirmation(confirmProbe) - awaitConfirmation(confirmProbe) - } - "accept confirmable persistent messages for delivery" in { - val confirmProbe = TestProbe() - val destinationProbe = TestProbe() - - subscribeToConfirmation(confirmProbe) - - defaultTestChannel ! Deliver(PersistentRepr("a", confirmable = true), destinationProbe.ref.path) - - destinationProbe.expectMsgPF() { case m @ ConfirmablePersistent("a", _, _) ⇒ m.confirm() } - awaitConfirmation(confirmProbe) - } - "redeliver on missing confirmation" in { - val probe = TestProbe() - - redeliverTestChannel ! Deliver(Persistent("b"), probe.ref.path) - - probe.expectMsgPF() { case m @ ConfirmablePersistent("b", _, redeliveries) ⇒ redeliveries should be(0) } - probe.expectMsgPF() { case m @ ConfirmablePersistent("b", _, redeliveries) ⇒ redeliveries should be(1) } - probe.expectMsgPF() { case m @ ConfirmablePersistent("b", _, redeliveries) ⇒ redeliveries should be(2); m.confirm() } - } - "redeliver in correct relative order" in { - val deliveries = redeliverChannelSettings(None).redeliverMax + 1 - val interval = redeliverChannelSettings(None).redeliverInterval.toMillis / 5 * 4 - - val probe = TestProbe() - val cycles = 9 - - 1 to cycles foreach { i ⇒ - redeliverTestChannel ! Deliver(Persistent(i), probe.ref.path) - Thread.sleep(interval) - } - - val received = (1 to (cycles * deliveries)).foldLeft(Vector.empty[ConfirmablePersistent]) { - case (acc, _) ⇒ acc :+ probe.expectMsgType[ConfirmablePersistent] - } - - val grouped = received.groupBy(_.redeliveries) - val expected = 1 to 9 toVector - - grouped(0).map(_.payload) should be(expected) - grouped(1).map(_.payload) should be(expected) - grouped(2).map(_.payload) should be(expected) - } - "redeliver not more than redeliverMax on missing confirmation" in { - val probe = TestProbe() - - redeliverTestChannel ! Deliver(PersistentRepr("a"), probe.ref.path) - - probe.expectMsgPF() { case m @ ConfirmablePersistent("a", _, redeliveries) ⇒ redeliveries should be(0) } - probe.expectMsgPF() { case m @ ConfirmablePersistent("a", _, redeliveries) ⇒ redeliveries should be(1) } - probe.expectMsgPF() { case m @ ConfirmablePersistent("a", _, redeliveries) ⇒ redeliveries should be(2) } - probe.expectNoMsg(300 milliseconds) - } - "preserve message order to the same destination" in { - val probe = TestProbe() - val destination = system.actorOf(Props(classOf[TestReceiver], probe.ref)) - - 1 to 10 foreach { i ⇒ - defaultTestChannel ! Deliver(PersistentRepr(s"test-${i}"), destination.path) - } - - 1 to 10 foreach { i ⇒ - probe.expectMsg(s"test-${i}") - } - } - "notify redelivery failure listener" in { - val probe = TestProbe() - val listener = system.actorOf(Props(classOf[TestListener], probe.ref)) - val channel = createRedeliverTestChannel(Some(listener)) - - 1 to 3 foreach { i ⇒ channel ! Deliver(Persistent(i), system.deadLetters.path) } - - probe.expectMsgAllOf(1, 2, 3) - system.stop(channel) - } - } -} - -class LeveldbChannelSpec extends ChannelSpec(PersistenceSpec.config("leveldb", "LeveldbChannelSpec")) -class InmemChannelSpec extends ChannelSpec(PersistenceSpec.config("inmem", "InmemChannelSpec")) - diff --git a/akka-persistence/src/test/scala/akka/persistence/FailureSpec.scala b/akka-persistence/src/test/scala/akka/persistence/FailureSpec.scala deleted file mode 100644 index 26053d1a4c..0000000000 --- a/akka-persistence/src/test/scala/akka/persistence/FailureSpec.scala +++ /dev/null @@ -1,157 +0,0 @@ -/** - * Copyright (C) 2009-2014 Typesafe Inc. - */ - -package akka.persistence - -import scala.concurrent.duration._ -import scala.concurrent.forkjoin.ThreadLocalRandom -import scala.language.postfixOps - -import com.typesafe.config.ConfigFactory - -import akka.actor._ -import akka.testkit._ - -object FailureSpec { - val config = ConfigFactory.parseString( - s""" - akka.persistence.processor.chaos.live-processing-failure-rate = 0.3 - akka.persistence.processor.chaos.replay-processing-failure-rate = 0.1 - akka.persistence.destination.chaos.confirm-failure-rate = 0.3 - akka.persistence.journal.plugin = "akka.persistence.journal.chaos" - akka.persistence.journal.chaos.write-failure-rate = 0.3 - akka.persistence.journal.chaos.confirm-failure-rate = 0.2 - akka.persistence.journal.chaos.delete-failure-rate = 0.3 - akka.persistence.journal.chaos.replay-failure-rate = 0.25 - akka.persistence.journal.chaos.read-highest-failure-rate = 0.1 - akka.persistence.journal.chaos.class = akka.persistence.journal.chaos.ChaosJournal - akka.persistence.snapshot-store.local.dir = "target/snapshots-failure-spec/" - """) - - val numMessages = 10 - - case object Start - final case class Done(ints: Vector[Int]) - - final case class ProcessingFailure(i: Int) - final case class JournalingFailure(i: Int) - - trait ChaosSupport { this: Actor ⇒ - def random = ThreadLocalRandom.current - - var state = Vector.empty[Int] - - def contains(i: Int): Boolean = - state.contains(i) - - def add(i: Int): Unit = { - state :+= i - if (state.length == numMessages) sender() ! Done(state) - } - - def shouldFail(rate: Double) = - random.nextDouble() < rate - } - - class ChaosProcessor(destination: ActorRef) extends Processor with ChaosSupport with ActorLogging { - val config = context.system.settings.config.getConfig("akka.persistence.processor.chaos") - val liveProcessingFailureRate = config.getDouble("live-processing-failure-rate") - val replayProcessingFailureRate = config.getDouble("replay-processing-failure-rate") - - val channel = context.actorOf(Channel.props("channel", ChannelSettings(redeliverMax = 10, redeliverInterval = 500 milliseconds)), "channel") - - override def persistenceId = "chaos" - - def receive = { - case p @ Persistent(i: Int, _) ⇒ - val failureRate = if (recoveryRunning) replayProcessingFailureRate else liveProcessingFailureRate - if (contains(i)) { - log.debug(debugMessage(s"ignored duplicate ${i}")) - } else if (shouldFail(failureRate)) { - throw new TestException(debugMessage(s"rejected payload ${i}")) - } else { - add(i) - channel forward Deliver(p, destination.path) - log.debug(debugMessage(s"processed payload ${i}")) - } - case PersistenceFailure(i: Int, _, _) ⇒ - // inform sender about journaling failure so that it can resend - sender() ! JournalingFailure(i) - case RecoveryFailure(_) ⇒ - // journal failed during recovery, throw exception to re-recover processor - throw new TestException(debugMessage("recovery failed")) - } - - override def preRestart(reason: Throwable, message: Option[Any]): Unit = { - message match { - case Some(p @ Persistent(i: Int, _)) if !recoveryRunning ⇒ - deleteMessage(p.sequenceNr) - log.debug(debugMessage(s"requested deletion of payload ${i}")) - // inform sender about processing failure so that it can resend - sender() ! ProcessingFailure(i) - case _ ⇒ - } - super.preRestart(reason, message) - } - - private def debugMessage(msg: String): String = - s"[processor] ${msg} (mode = ${if (recoveryRunning) "replay" else "live"} snr = ${lastSequenceNr} state = ${state.sorted})" - } - - class ChaosDestination extends Actor with ChaosSupport with ActorLogging { - val config = context.system.settings.config.getConfig("akka.persistence.destination.chaos") - val confirmFailureRate = config.getDouble("confirm-failure-rate") - - def receive = { - case cp @ ConfirmablePersistent(i: Int, _, _) ⇒ - if (shouldFail(confirmFailureRate)) { - log.error(debugMessage("confirm message failed", cp)) - } else if (contains(i)) { - log.debug(debugMessage("ignored duplicate", cp)) - } else { - add(i) - cp.confirm() - log.debug(debugMessage("received and confirmed message", cp)) - } - } - - private def debugMessage(msg: String, cp: ConfirmablePersistent): String = - s"[destination] ${msg} (message = ConfirmablePersistent(${cp.payload}, ${cp.sequenceNr}, ${cp.redeliveries}), state = ${state.sorted})" - } - - class ChaosProcessorApp(probe: ActorRef) extends Actor with ActorLogging { - val destination = context.actorOf(Props[ChaosDestination], "destination") - val processor = context.actorOf(Props(classOf[ChaosProcessor], destination), "processor") - - def receive = { - case Start ⇒ 1 to numMessages foreach (processor ! Persistent(_)) - case Done(ints) ⇒ probe ! Done(ints) - case ProcessingFailure(i) ⇒ - processor ! Persistent(i) - log.debug(s"resent ${i} after processing failure") - case JournalingFailure(i) ⇒ - processor ! Persistent(i) - log.debug(s"resent ${i} after journaling failure") - } - } -} - -class FailureSpec extends AkkaSpec(FailureSpec.config) with Cleanup with ImplicitSender { - import FailureSpec._ - - "The journaling protocol (= conversation between a processor and a journal)" must { - "tolerate and recover from random failures" in { - system.actorOf(Props(classOf[ChaosProcessorApp], testActor)) ! Start - expectDone() // by processor - expectDone() // by destination - - system.actorOf(Props(classOf[ChaosProcessorApp], testActor)) // recovery of new instance should have same outcome - expectDone() // by processor - // destination doesn't receive messages again because all have been confirmed already - } - } - - def expectDone() = - expectMsgPF(numMessages seconds) { case Done(ints) ⇒ ints.sorted should be(1 to numMessages toVector) } -} diff --git a/akka-persistence/src/test/scala/akka/persistence/NumberProcessorSpec.scala b/akka-persistence/src/test/scala/akka/persistence/NumberProcessorSpec.scala deleted file mode 100644 index f02c593dd5..0000000000 --- a/akka-persistence/src/test/scala/akka/persistence/NumberProcessorSpec.scala +++ /dev/null @@ -1,90 +0,0 @@ -/** - * Copyright (C) 2009-2014 Typesafe Inc. - */ -package akka.persistence - -import scala.language.postfixOps - -import com.typesafe.config._ - -import scala.concurrent.duration._ - -import akka.actor._ -import akka.persistence._ - -import akka.testkit._ - -object NumberProcessorSpec { - case class SetNumber(number: Int) - case class Add(number: Int) - case class Subtract(number: Int) - case object DecrementAndGet - case object GetNumber - - class NumberProcessorWithPersistentChannel(name: String) extends NamedProcessor(name) { - var num = 0 - - val channel = context.actorOf(PersistentChannel.props(channelId = "stable_id", - PersistentChannelSettings(redeliverInterval = 30 seconds, redeliverMax = 15)), - name = "myPersistentChannel") - - def receive = { - case Persistent(SetNumber(number), _) ⇒ num = number - case Persistent(Add(number), _) ⇒ num = num + number - case Persistent(Subtract(number), _) ⇒ num = num - number - case GetNumber ⇒ channel ! Deliver(Persistent(num), sender().path) - case p @ Persistent(DecrementAndGet, _) ⇒ - num = num - 1 - channel ! Deliver(p.withPayload(num), sender().path) - } - } -} - -/* - * This test found the problem described in ticket #3933 - */ -class NumberProcessorSpec extends AkkaSpec(PersistenceSpec.config("leveldb", "NumberProcessorSpec")) - with PersistenceSpec { - import NumberProcessorSpec._ - - "A processor using a persistent channel" must { - - "resurrect with the correct state, not replaying confirmed messages to clients" in { - val deliveredProbe = TestProbe() - system.eventStream.subscribe(deliveredProbe.testActor, classOf[DeliveredByPersistentChannel]) - - val probe = TestProbe() - - val processor = namedProcessor[NumberProcessorWithPersistentChannel] - processor.tell(GetNumber, probe.testActor) - - val zero = probe.expectMsgType[ConfirmablePersistent] - zero.confirm() - zero.payload should equal(0) - - deliveredProbe.expectMsgType[DeliveredByPersistentChannel] - - processor.tell(Persistent(DecrementAndGet), probe.testActor) - - val decrementFrom0 = probe.expectMsgType[ConfirmablePersistent] - decrementFrom0.confirm() - decrementFrom0.payload should equal(-1) - - deliveredProbe.expectMsgType[DeliveredByPersistentChannel] - - watch(processor) - system.stop(processor) - expectMsgType[Terminated] - - val processorResurrected = namedProcessor[NumberProcessorWithPersistentChannel] - processorResurrected.tell(Persistent(DecrementAndGet), probe.testActor) - - val decrementFromMinus1 = probe.expectMsgType[ConfirmablePersistent] - decrementFromMinus1.confirm() - decrementFromMinus1.payload should equal(-2) - - deliveredProbe.expectMsgType[DeliveredByPersistentChannel] - } - } -} - diff --git a/akka-persistence/src/test/scala/akka/persistence/PerformanceSpec.scala b/akka-persistence/src/test/scala/akka/persistence/PerformanceSpec.scala index 5e63294400..c3d003861c 100644 --- a/akka-persistence/src/test/scala/akka/persistence/PerformanceSpec.scala +++ b/akka-persistence/src/test/scala/akka/persistence/PerformanceSpec.scala @@ -16,90 +16,45 @@ object PerformanceSpec { // accurate throughput measurements val config = """ - akka.persistence.performance.cycles.warmup = 300 akka.persistence.performance.cycles.load = 1000 """ - case object StartMeasure case object StopMeasure final case class FailAt(sequenceNr: Long) - trait Measure extends { this: Actor ⇒ - val NanoToSecond = 1000.0 * 1000 * 1000 + class Measure(numberOfMessages: Int) { + private val NanoToSecond = 1000.0 * 1000 * 1000 - var startTime: Long = 0L - var stopTime: Long = 0L - - var startSequenceNr = 0L - var stopSequenceNr = 0L + private var startTime: Long = 0L + private var stopTime: Long = 0L def startMeasure(): Unit = { - startSequenceNr = lastSequenceNr startTime = System.nanoTime } - def stopMeasure(): Unit = { - stopSequenceNr = lastSequenceNr + def stopMeasure(): Double = { stopTime = System.nanoTime - sender() ! (NanoToSecond * (stopSequenceNr - startSequenceNr) / (stopTime - startTime)) - } - - def lastSequenceNr: Long - } - - class PerformanceTestDestination extends Actor with Measure { - var lastSequenceNr = 0L - - val confirm: PartialFunction[Any, Any] = { - case cp @ ConfirmablePersistent(payload, sequenceNr, _) ⇒ - lastSequenceNr = sequenceNr - cp.confirm() - payload - } - - def receive = confirm andThen { - case StartMeasure ⇒ startMeasure() - case StopMeasure ⇒ stopMeasure() - case m ⇒ if (lastSequenceNr % 1000 == 0) print(".") + (NanoToSecond * numberOfMessages / (stopTime - startTime)) } } - abstract class PerformanceTestProcessor(name: String) extends NamedProcessor(name) with Measure { + abstract class PerformanceTestPersistentActor(name: String) extends NamedPersistentActor(name) { var failAt: Long = -1 - val controlBehavior: Receive = { - case StartMeasure ⇒ startMeasure() - case StopMeasure ⇒ stopMeasure() - case FailAt(sequenceNr) ⇒ failAt = sequenceNr - } - - override def postRestart(reason: Throwable) { - super.postRestart(reason) - receive(StartMeasure) - } - } - - class CommandsourcedTestProcessor(name: String) extends PerformanceTestProcessor(name) { - def receive = controlBehavior orElse { - case p: Persistent ⇒ - if (lastSequenceNr % 1000 == 0) if (recoveryRunning) print("r") else print(".") - if (lastSequenceNr == failAt) throw new TestException("boom") - } - } - - class CommandsourcedTestPersistentActor(name: String) extends PerformanceTestProcessor(name) with PersistentActor { - - override val controlBehavior: Receive = { - case StartMeasure ⇒ startMeasure() - case StopMeasure ⇒ defer(StopMeasure)(_ ⇒ stopMeasure()) - case FailAt(sequenceNr) ⇒ failAt = sequenceNr - } - - val receiveRecover: Receive = { + override val receiveRecover: Receive = { case _ ⇒ if (lastSequenceNr % 1000 == 0) print("r") } - val receiveCommand: Receive = controlBehavior orElse { + val controlBehavior: Receive = { + case StopMeasure ⇒ defer(StopMeasure)(_ ⇒ sender() ! StopMeasure) + case FailAt(sequenceNr) ⇒ failAt = sequenceNr + } + + } + + class CommandsourcedTestPersistentActor(name: String) extends PerformanceTestPersistentActor(name) { + + override val receiveCommand: Receive = controlBehavior orElse { case cmd ⇒ persistAsync(cmd) { _ ⇒ if (lastSequenceNr % 1000 == 0) print(".") if (lastSequenceNr == failAt) throw new TestException("boom") @@ -107,12 +62,9 @@ object PerformanceSpec { } } - class EventsourcedTestProcessor(name: String) extends PerformanceTestProcessor(name) with PersistentActor { - val receiveRecover: Receive = { - case _ ⇒ if (lastSequenceNr % 1000 == 0) print("r") - } + class EventsourcedTestPersistentActor(name: String) extends PerformanceTestPersistentActor(name) { - val receiveCommand: Receive = controlBehavior orElse { + override val receiveCommand: Receive = controlBehavior orElse { case cmd ⇒ persist(cmd) { _ ⇒ if (lastSequenceNr % 1000 == 0) print(".") if (lastSequenceNr == failAt) throw new TestException("boom") @@ -120,11 +72,27 @@ object PerformanceSpec { } } - class StashingEventsourcedTestProcessor(name: String) extends PerformanceTestProcessor(name) with PersistentActor { - val receiveRecover: Receive = { - case _ ⇒ if (lastSequenceNr % 1000 == 0) print("r") + /** + * `persist` every 10th message, otherwise `persistAsync` + */ + class MixedTestPersistentActor(name: String) extends PerformanceTestPersistentActor(name) { + var counter = 0 + + val handler: Any ⇒ Unit = { evt ⇒ + if (lastSequenceNr % 1000 == 0) print(".") + if (lastSequenceNr == failAt) throw new TestException("boom") } + val receiveCommand: Receive = controlBehavior orElse { + case cmd ⇒ + counter += 1 + if (counter % 10 == 0) persist(cmd)(handler) + else persistAsync(cmd)(handler) + } + } + + class StashingEventsourcedTestPersistentActor(name: String) extends PerformanceTestPersistentActor(name) { + val printProgress: PartialFunction[Any, Any] = { case m ⇒ if (lastSequenceNr % 1000 == 0) print("."); m } @@ -146,82 +114,50 @@ object PerformanceSpec { class PerformanceSpec extends AkkaSpec(PersistenceSpec.config("leveldb", "PerformanceSpec", serialization = "off").withFallback(ConfigFactory.parseString(PerformanceSpec.config))) with PersistenceSpec with ImplicitSender { import PerformanceSpec._ - val warmupCycles = system.settings.config.getInt("akka.persistence.performance.cycles.warmup") val loadCycles = system.settings.config.getInt("akka.persistence.performance.cycles.load") - def stressCommandsourcedProcessor(failAt: Option[Long]): Unit = { - val processor = namedProcessor[CommandsourcedTestProcessor] - failAt foreach { processor ! FailAt(_) } - 1 to warmupCycles foreach { i ⇒ processor ! Persistent(s"msg${i}") } - processor ! StartMeasure - 1 to loadCycles foreach { i ⇒ processor ! Persistent(s"msg${i}") } - processor ! StopMeasure - expectMsgPF(100 seconds) { - case throughput: Double ⇒ println(f"\nthroughput = $throughput%.2f persistent processor commands per second") - } + def stressPersistentActor(persistentActor: ActorRef, failAt: Option[Long], description: String): Unit = { + failAt foreach { persistentActor ! FailAt(_) } + val m = new Measure(loadCycles) + m.startMeasure() + 1 to loadCycles foreach { i ⇒ persistentActor ! s"msg${i}" } + persistentActor ! StopMeasure + expectMsg(100.seconds, StopMeasure) + println(f"\nthroughput = ${m.stopMeasure()}%.2f $description per second") } def stressCommandsourcedPersistentActor(failAt: Option[Long]): Unit = { - val processor = namedProcessor[CommandsourcedTestPersistentActor] - failAt foreach { processor ! FailAt(_) } - 1 to warmupCycles foreach { i ⇒ processor ! s"msg${i}" } - processor ! StartMeasure - 1 to loadCycles foreach { i ⇒ processor ! s"msg${i}" } - processor ! StopMeasure - expectMsgPF(100 seconds) { - case throughput: Double ⇒ println(f"\nthroughput = $throughput%.2f persistent actor commands per second") - } + val persistentActor = namedPersistentActor[CommandsourcedTestPersistentActor] + stressPersistentActor(persistentActor, failAt, "persistent commands") } - def stressPersistentActor(failAt: Option[Long]): Unit = { - val processor = namedProcessor[EventsourcedTestProcessor] - failAt foreach { processor ! FailAt(_) } - 1 to warmupCycles foreach { i ⇒ processor ! s"msg${i}" } - processor ! StartMeasure - 1 to loadCycles foreach { i ⇒ processor ! s"msg${i}" } - processor ! StopMeasure - expectMsgPF(100 seconds) { - case throughput: Double ⇒ println(f"\nthroughput = $throughput%.2f persistent events per second") - } + def stressEventSourcedPersistentActor(failAt: Option[Long]): Unit = { + val persistentActor = namedPersistentActor[EventsourcedTestPersistentActor] + stressPersistentActor(persistentActor, failAt, "persistent events") + } + + def stressMixedPersistentActor(failAt: Option[Long]): Unit = { + val persistentActor = namedPersistentActor[MixedTestPersistentActor] + stressPersistentActor(persistentActor, failAt, "persistent events & commands") } def stressStashingPersistentActor(): Unit = { - val processor = namedProcessor[StashingEventsourcedTestProcessor] - 1 to warmupCycles foreach { i ⇒ processor ! "b" } - processor ! StartMeasure + val persistentActor = namedPersistentActor[StashingEventsourcedTestPersistentActor] + val m = new Measure(loadCycles) + m.startMeasure() val cmds = 1 to (loadCycles / 3) flatMap (_ ⇒ List("a", "b", "c")) - processor ! StartMeasure - cmds foreach (processor ! _) - processor ! StopMeasure - expectMsgPF(100 seconds) { - case throughput: Double ⇒ println(f"\nthroughput = $throughput%.2f persistent events per second") - } + cmds foreach (persistentActor ! _) + persistentActor ! StopMeasure + expectMsg(100.seconds, StopMeasure) + println(f"\nthroughput = ${m.stopMeasure()}%.2f persistent events per second") } - def stressPersistentChannel(): Unit = { - val channel = system.actorOf(PersistentChannel.props()) - val destination = system.actorOf(Props[PerformanceTestDestination]) - 1 to warmupCycles foreach { i ⇒ channel ! Deliver(PersistentRepr(s"msg${i}", persistenceId = "test"), destination.path) } - channel ! Deliver(Persistent(StartMeasure), destination.path) - 1 to loadCycles foreach { i ⇒ channel ! Deliver(PersistentRepr(s"msg${i}", persistenceId = "test"), destination.path) } - channel ! Deliver(Persistent(StopMeasure), destination.path) - expectMsgPF(100 seconds) { - case throughput: Double ⇒ println(f"\nthroughput = $throughput%.2f persistent messages per second") + "Warmup persistent actor" should { + "exercise" in { + stressCommandsourcedPersistentActor(None) } - } - - def subscribeToConfirmation(probe: TestProbe): Unit = - system.eventStream.subscribe(probe.ref, classOf[DeliveredByPersistentChannel]) - - def awaitConfirmation(probe: TestProbe): Unit = - probe.expectMsgType[DeliveredByPersistentChannel] - - "A command sourced processor" should { - "have some reasonable throughput" in { - stressCommandsourcedProcessor(None) - } - "have some reasonable throughput under failure conditions" in { - stressCommandsourcedProcessor(Some(warmupCycles + loadCycles / 10)) + "exercise some more" in { + stressCommandsourcedPersistentActor(None) } } @@ -233,26 +169,20 @@ class PerformanceSpec extends AkkaSpec(PersistenceSpec.config("leveldb", "Perfor "An event sourced persistent actor" should { "have some reasonable throughput" in { - stressPersistentActor(None) + stressEventSourcedPersistentActor(None) } "have some reasonable throughput under failure conditions" in { - stressPersistentActor(Some(warmupCycles + loadCycles / 10)) + stressEventSourcedPersistentActor(Some(loadCycles / 10)) } "have some reasonable throughput with stashing and unstashing every 3rd command" in { stressStashingPersistentActor() } } - "A persistent channel" should { + "A mixed command and event sourced persistent actor" should { "have some reasonable throughput" in { - val probe = TestProbe() - subscribeToConfirmation(probe) - - stressPersistentChannel() - - probe.fishForMessage(100.seconds) { - case DeliveredByPersistentChannel(_, snr, _, _) ⇒ snr == warmupCycles + loadCycles + 2 - } + stressMixedPersistentActor(None) } } + } diff --git a/akka-persistence/src/test/scala/akka/persistence/PersistenceSpec.scala b/akka-persistence/src/test/scala/akka/persistence/PersistenceSpec.scala index ee760b67f7..395b324e0f 100644 --- a/akka-persistence/src/test/scala/akka/persistence/PersistenceSpec.scala +++ b/akka-persistence/src/test/scala/akka/persistence/PersistenceSpec.scala @@ -35,9 +35,9 @@ trait PersistenceSpec extends BeforeAndAfterEach with Cleanup { this: AkkaSpec def namePrefix: String = system.name /** - * Creates a processor with current name as constructor argument. + * Creates a persistent actor with current name as constructor argument. */ - def namedProcessor[T <: NamedProcessor: ClassTag] = + def namedPersistentActor[T <: NamedPersistentActor: ClassTag] = system.actorOf(Props(implicitly[ClassTag[T]].runtimeClass, name)) override protected def beforeEach() { @@ -52,7 +52,6 @@ object PersistenceSpec { s""" akka.actor.serialize-creators = ${serialization} akka.actor.serialize-messages = ${serialization} - akka.persistence.publish-confirmations = on akka.persistence.publish-plugin-commands = on akka.persistence.journal.plugin = "akka.persistence.journal.${plugin}" akka.persistence.journal.leveldb.dir = "target/journal-${test}" @@ -76,16 +75,11 @@ trait Cleanup { this: AkkaSpec ⇒ } } -@deprecated("Use NamedPersistentActor instead.", since = "2.3.4") -abstract class NamedProcessor(name: String) extends Processor { - override def persistenceId: String = name -} - abstract class NamedPersistentActor(name: String) extends PersistentActor { override def persistenceId: String = name } -trait TurnOffRecoverOnStart { this: Processor ⇒ +trait TurnOffRecoverOnStart { this: Eventsourced ⇒ override def preStart(): Unit = () } diff --git a/akka-persistence/src/test/scala/akka/persistence/PersistentActorFailureSpec.scala b/akka-persistence/src/test/scala/akka/persistence/PersistentActorFailureSpec.scala index 678fb0e944..f5a7205e48 100644 --- a/akka-persistence/src/test/scala/akka/persistence/PersistentActorFailureSpec.scala +++ b/akka-persistence/src/test/scala/akka/persistence/PersistentActorFailureSpec.scala @@ -58,8 +58,8 @@ object PersistentActorFailureSpec { class PersistentActorFailureSpec extends AkkaSpec(PersistenceSpec.config("inmem", "SnapshotFailureRobustnessSpec", extraConfig = Some( """ - |akka.persistence.journal.inmem.class = "akka.persistence.PersistentActorFailureSpec$FailingInmemJournal" - """.stripMargin))) with PersistenceSpec with ImplicitSender { + akka.persistence.journal.inmem.class = "akka.persistence.PersistentActorFailureSpec$FailingInmemJournal" + """))) with PersistenceSpec with ImplicitSender { import PersistentActorSpec._ import PersistentActorFailureSpec._ @@ -67,15 +67,15 @@ class PersistentActorFailureSpec extends AkkaSpec(PersistenceSpec.config("inmem" override protected def beforeEach() { super.beforeEach() - val processor = namedProcessor[Behavior1Processor] - processor ! Cmd("a") - processor ! GetState + val persistentActor = namedPersistentActor[Behavior1PersistentActor] + persistentActor ! Cmd("a") + persistentActor ! GetState expectMsg(List("a-1", "a-2")) } "A persistent actor" must { "throw ActorKilledException if recovery from persisted events fail" in { - system.actorOf(Props(classOf[Supervisor], testActor)) ! Props(classOf[Behavior1Processor], name) + system.actorOf(Props(classOf[Supervisor], testActor)) ! Props(classOf[Behavior1PersistentActor], name) expectMsgType[ActorRef] expectMsgType[ActorKilledException] } diff --git a/akka-persistence/src/test/scala/akka/persistence/PersistentActorSpec.scala b/akka-persistence/src/test/scala/akka/persistence/PersistentActorSpec.scala index 1b0a7efc50..de3360eb9b 100644 --- a/akka-persistence/src/test/scala/akka/persistence/PersistentActorSpec.scala +++ b/akka-persistence/src/test/scala/akka/persistence/PersistentActorSpec.scala @@ -14,12 +14,15 @@ import akka.testkit.TestProbe import java.util.concurrent.atomic.AtomicInteger import scala.util.Random import scala.util.control.NoStackTrace +import akka.testkit.TestLatch +import scala.concurrent.Await object PersistentActorSpec { final case class Cmd(data: Any) final case class Evt(data: Any) + final case class LatchCmd(latch: TestLatch, data: Any) extends NoSerializationVerificationNeeded - abstract class ExamplePersistentActor(name: String) extends NamedProcessor(name) with PersistentActor { + abstract class ExamplePersistentActor(name: String) extends NamedPersistentActor(name) with PersistentActor { var events: List[Any] = Nil val updateState: Receive = { @@ -34,14 +37,14 @@ object PersistentActorSpec { def receiveRecover = updateState } - class Behavior1Processor(name: String) extends ExamplePersistentActor(name) { + class Behavior1PersistentActor(name: String) extends ExamplePersistentActor(name) { val receiveCommand: Receive = commonBehavior orElse { case Cmd(data) ⇒ persist(Seq(Evt(s"${data}-1"), Evt(s"${data}-2")))(updateState) } } - class Behavior2Processor(name: String) extends ExamplePersistentActor(name) { + class Behavior2PersistentActor(name: String) extends ExamplePersistentActor(name) { val receiveCommand: Receive = commonBehavior orElse { case Cmd(data) ⇒ persist(Seq(Evt(s"${data}-1"), Evt(s"${data}-2")))(updateState) @@ -49,7 +52,7 @@ object PersistentActorSpec { } } - class Behavior3Processor(name: String) extends ExamplePersistentActor(name) { + class Behavior3PersistentActor(name: String) extends ExamplePersistentActor(name) { val receiveCommand: Receive = commonBehavior orElse { case Cmd(data) ⇒ persist(Seq(Evt(s"${data}-11"), Evt(s"${data}-12")))(updateState) @@ -57,7 +60,7 @@ object PersistentActorSpec { } } - class ChangeBehaviorInLastEventHandlerProcessor(name: String) extends ExamplePersistentActor(name) { + class ChangeBehaviorInLastEventHandlerPersistentActor(name: String) extends ExamplePersistentActor(name) { val newBehavior: Receive = { case Cmd(data) ⇒ persist(Evt(s"${data}-21"))(updateState) @@ -76,7 +79,7 @@ object PersistentActorSpec { } } - class ChangeBehaviorInFirstEventHandlerProcessor(name: String) extends ExamplePersistentActor(name) { + class ChangeBehaviorInFirstEventHandlerPersistentActor(name: String) extends ExamplePersistentActor(name) { val newBehavior: Receive = { case Cmd(data) ⇒ persist(Evt(s"${data}-21")) { event ⇒ @@ -95,7 +98,7 @@ object PersistentActorSpec { } } - class ChangeBehaviorInCommandHandlerFirstProcessor(name: String) extends ExamplePersistentActor(name) { + class ChangeBehaviorInCommandHandlerFirstPersistentActor(name: String) extends ExamplePersistentActor(name) { val newBehavior: Receive = { case Cmd(data) ⇒ context.unbecome() @@ -110,7 +113,7 @@ object PersistentActorSpec { } } - class ChangeBehaviorInCommandHandlerLastProcessor(name: String) extends ExamplePersistentActor(name) { + class ChangeBehaviorInCommandHandlerLastPersistentActor(name: String) extends ExamplePersistentActor(name) { val newBehavior: Receive = { case Cmd(data) ⇒ persist(Seq(Evt(s"${data}-31"), Evt(s"${data}-32")))(updateState) @@ -137,10 +140,9 @@ object PersistentActorSpec { } def receiveCommand: Receive = commonBehavior orElse { - case c: Cmd ⇒ handleCmd(c) - case SaveSnapshotSuccess(_) ⇒ probe ! "saved" - case "snap" ⇒ saveSnapshot(events) - case ConfirmablePersistent(c: Cmd, _, _) ⇒ handleCmd(c) + case c: Cmd ⇒ handleCmd(c) + case SaveSnapshotSuccess(_) ⇒ probe ! "saved" + case "snap" ⇒ saveSnapshot(events) } } @@ -161,14 +163,13 @@ object PersistentActorSpec { } } - class ReplyInEventHandlerProcessor(name: String) extends ExamplePersistentActor(name) { + class ReplyInEventHandlerPersistentActor(name: String) extends ExamplePersistentActor(name) { val receiveCommand: Receive = { - case Cmd("a") ⇒ persist(Evt("a"))(evt ⇒ sender() ! evt.data) - case p: Persistent ⇒ sender() ! p // not expected + case Cmd("a") ⇒ persist(Evt("a"))(evt ⇒ sender() ! evt.data) } } - class UserStashProcessor(name: String) extends ExamplePersistentActor(name) { + class UserStashPersistentActor(name: String) extends ExamplePersistentActor(name) { var stashed = false val receiveCommand: Receive = { case Cmd("a") ⇒ if (!stashed) { stash(); stashed = true } else sender() ! "a" @@ -177,7 +178,7 @@ object PersistentActorSpec { } } - class UserStashManyProcessor(name: String) extends ExamplePersistentActor(name) { + class UserStashManyPersistentActor(name: String) extends ExamplePersistentActor(name) { val receiveCommand: Receive = commonBehavior orElse { case Cmd("a") ⇒ persist(Evt("a")) { evt ⇒ updateState(evt) @@ -197,7 +198,7 @@ object PersistentActorSpec { case other ⇒ stash() } } - class AsyncPersistProcessor(name: String) extends ExamplePersistentActor(name) { + class AsyncPersistPersistentActor(name: String) extends ExamplePersistentActor(name) { var counter = 0 val receiveCommand: Receive = commonBehavior orElse { @@ -213,7 +214,7 @@ object PersistentActorSpec { counter } } - class AsyncPersistThreeTimesProcessor(name: String) extends ExamplePersistentActor(name) { + class AsyncPersistThreeTimesPersistentActor(name: String) extends ExamplePersistentActor(name) { var counter = 0 val receiveCommand: Receive = commonBehavior orElse { @@ -232,7 +233,7 @@ object PersistentActorSpec { counter } } - class AsyncPersistSameEventTwiceProcessor(name: String) extends ExamplePersistentActor(name) { + class AsyncPersistSameEventTwicePersistentActor(name: String) extends ExamplePersistentActor(name) { // atomic because used from inside the *async* callbacks val sendMsgCounter = new AtomicInteger() @@ -250,7 +251,7 @@ object PersistentActorSpec { persistAsync(event) { evt ⇒ sender() ! s"${evt.data}-b-${sendMsgCounter.incrementAndGet()}" } } } - class AsyncPersistAndPersistMixedSyncAsyncSyncProcessor(name: String) extends ExamplePersistentActor(name) { + class AsyncPersistAndPersistMixedSyncAsyncSyncPersistentActor(name: String) extends ExamplePersistentActor(name) { var counter = 0 @@ -277,7 +278,7 @@ object PersistentActorSpec { counter } } - class AsyncPersistAndPersistMixedSyncAsyncProcessor(name: String) extends ExamplePersistentActor(name) { + class AsyncPersistAndPersistMixedSyncAsyncPersistentActor(name: String) extends ExamplePersistentActor(name) { var sendMsgCounter = 0 @@ -319,7 +320,7 @@ object PersistentActorSpec { } } - class UserStashFailureProcessor(name: String) extends ExamplePersistentActor(name) { + class UserStashFailurePersistentActor(name: String) extends ExamplePersistentActor(name) { val receiveCommand: Receive = commonBehavior orElse { case Cmd(data) ⇒ if (data == "b-2") throw new TestException("boom") @@ -340,13 +341,13 @@ object PersistentActorSpec { } } - class AnyValEventProcessor(name: String) extends ExamplePersistentActor(name) { + class AnyValEventPersistentActor(name: String) extends ExamplePersistentActor(name) { val receiveCommand: Receive = { case Cmd("a") ⇒ persist(5)(evt ⇒ sender() ! evt) } } - class HandleRecoveryFinishedEventProcessor(name: String, probe: ActorRef) extends SnapshottingPersistentActor(name, probe) { + class HandleRecoveryFinishedEventPersistentActor(name: String, probe: ActorRef) extends SnapshottingPersistentActor(name, probe) { val sendingRecover: Receive = { case msg: SnapshotOffer ⇒ // sending ourself a normal message tests @@ -404,6 +405,20 @@ object PersistentActorSpec { } } + class StressOrdering(name: String) extends ExamplePersistentActor(name) { + val receiveCommand: Receive = { + case LatchCmd(latch, data) ⇒ + sender() ! data + Await.ready(latch, 5.seconds) + persistAsync(data)(_ ⇒ ()) + case Cmd(data) ⇒ + sender() ! data + persist(data)(_ ⇒ ()) + case s: String ⇒ + sender() ! s + } + } + class StackableTestPersistentActor(val probe: ActorRef) extends StackableTestPersistentActor.BaseActor with PersistentActor with StackableTestPersistentActor.MixinActor { override def persistenceId: String = "StackableTestPersistentActor" @@ -460,7 +475,7 @@ object PersistentActorSpec { } override protected[akka] def aroundReceive(receive: Receive, message: Any) = { - probe ! "base aroundReceive" + if (message == "restart" && recoveryFinished) { probe ! s"base aroundReceive $message" } super.aroundReceive(receive, message) } } @@ -487,7 +502,7 @@ object PersistentActorSpec { } override protected[akka] def aroundReceive(receive: Receive, message: Any) = { - if (message == "restart" && recoveryFinished) { probe ! "mixin aroundReceive" } + if (message == "restart" && recoveryFinished) { probe ! s"mixin aroundReceive $message" } super.aroundReceive(receive, message) } } @@ -501,194 +516,165 @@ abstract class PersistentActorSpec(config: Config) extends AkkaSpec(config) with override protected def beforeEach() { super.beforeEach() - val processor = namedProcessor[Behavior1Processor] - processor ! Cmd("a") - processor ! GetState + val persistentActor = namedPersistentActor[Behavior1PersistentActor] + persistentActor ! Cmd("a") + persistentActor ! GetState expectMsg(List("a-1", "a-2")) } "A persistent actor" must { "recover from persisted events" in { - val processor = namedProcessor[Behavior1Processor] - processor ! GetState + val persistentActor = namedPersistentActor[Behavior1PersistentActor] + persistentActor ! GetState expectMsg(List("a-1", "a-2")) } "handle multiple emitted events in correct order (for a single persist call)" in { - val processor = namedProcessor[Behavior1Processor] - processor ! Cmd("b") - processor ! GetState + val persistentActor = namedPersistentActor[Behavior1PersistentActor] + persistentActor ! Cmd("b") + persistentActor ! GetState expectMsg(List("a-1", "a-2", "b-1", "b-2")) } "handle multiple emitted events in correct order (for multiple persist calls)" in { - val processor = namedProcessor[Behavior2Processor] - processor ! Cmd("b") - processor ! GetState + val persistentActor = namedPersistentActor[Behavior2PersistentActor] + persistentActor ! Cmd("b") + persistentActor ! GetState expectMsg(List("a-1", "a-2", "b-1", "b-2", "b-3", "b-4")) } "receive emitted events immediately after command" in { - val processor = namedProcessor[Behavior3Processor] - processor ! Cmd("b") - processor ! Cmd("c") - processor ! GetState + val persistentActor = namedPersistentActor[Behavior3PersistentActor] + persistentActor ! Cmd("b") + persistentActor ! Cmd("c") + persistentActor ! GetState expectMsg(List("a-1", "a-2", "b-10", "b-11", "b-12", "c-10", "c-11", "c-12")) } "recover on command failure" in { - val processor = namedProcessor[Behavior3Processor] - processor ! Cmd("b") - processor ! "boom" - processor ! Cmd("c") - processor ! GetState + val persistentActor = namedPersistentActor[Behavior3PersistentActor] + persistentActor ! Cmd("b") + persistentActor ! "boom" + persistentActor ! Cmd("c") + persistentActor ! GetState // cmd that was added to state before failure (b-10) is not replayed ... expectMsg(List("a-1", "a-2", "b-11", "b-12", "c-10", "c-11", "c-12")) } "allow behavior changes in event handler (when handling first event)" in { - val processor = namedProcessor[ChangeBehaviorInFirstEventHandlerProcessor] - processor ! Cmd("b") - processor ! Cmd("c") - processor ! Cmd("d") - processor ! Cmd("e") - processor ! GetState + val persistentActor = namedPersistentActor[ChangeBehaviorInFirstEventHandlerPersistentActor] + persistentActor ! Cmd("b") + persistentActor ! Cmd("c") + persistentActor ! Cmd("d") + persistentActor ! Cmd("e") + persistentActor ! GetState expectMsg(List("a-1", "a-2", "b-0", "c-21", "c-22", "d-0", "e-21", "e-22")) } "allow behavior changes in event handler (when handling last event)" in { - val processor = namedProcessor[ChangeBehaviorInLastEventHandlerProcessor] - processor ! Cmd("b") - processor ! Cmd("c") - processor ! Cmd("d") - processor ! Cmd("e") - processor ! GetState + val persistentActor = namedPersistentActor[ChangeBehaviorInLastEventHandlerPersistentActor] + persistentActor ! Cmd("b") + persistentActor ! Cmd("c") + persistentActor ! Cmd("d") + persistentActor ! Cmd("e") + persistentActor ! GetState expectMsg(List("a-1", "a-2", "b-0", "c-21", "c-22", "d-0", "e-21", "e-22")) } "allow behavior changes in command handler (as first action)" in { - val processor = namedProcessor[ChangeBehaviorInCommandHandlerFirstProcessor] - processor ! Cmd("b") - processor ! Cmd("c") - processor ! Cmd("d") - processor ! Cmd("e") - processor ! GetState + val persistentActor = namedPersistentActor[ChangeBehaviorInCommandHandlerFirstPersistentActor] + persistentActor ! Cmd("b") + persistentActor ! Cmd("c") + persistentActor ! Cmd("d") + persistentActor ! Cmd("e") + persistentActor ! GetState expectMsg(List("a-1", "a-2", "b-0", "c-30", "c-31", "c-32", "d-0", "e-30", "e-31", "e-32")) } "allow behavior changes in command handler (as last action)" in { - val processor = namedProcessor[ChangeBehaviorInCommandHandlerLastProcessor] - processor ! Cmd("b") - processor ! Cmd("c") - processor ! Cmd("d") - processor ! Cmd("e") - processor ! GetState + val persistentActor = namedPersistentActor[ChangeBehaviorInCommandHandlerLastPersistentActor] + persistentActor ! Cmd("b") + persistentActor ! Cmd("c") + persistentActor ! Cmd("d") + persistentActor ! Cmd("e") + persistentActor ! GetState expectMsg(List("a-1", "a-2", "b-0", "c-30", "c-31", "c-32", "d-0", "e-30", "e-31", "e-32")) } "support snapshotting" in { - val processor1 = system.actorOf(Props(classOf[SnapshottingPersistentActor], name, testActor)) - processor1 ! Cmd("b") - processor1 ! "snap" - processor1 ! Cmd("c") + val persistentActor1 = system.actorOf(Props(classOf[SnapshottingPersistentActor], name, testActor)) + persistentActor1 ! Cmd("b") + persistentActor1 ! "snap" + persistentActor1 ! Cmd("c") expectMsg("saved") - processor1 ! GetState + persistentActor1 ! GetState expectMsg(List("a-1", "a-2", "b-41", "b-42", "c-41", "c-42")) - val processor2 = system.actorOf(Props(classOf[SnapshottingPersistentActor], name, testActor)) + val persistentActor2 = system.actorOf(Props(classOf[SnapshottingPersistentActor], name, testActor)) expectMsg("offered") - processor2 ! GetState + persistentActor2 ! GetState expectMsg(List("a-1", "a-2", "b-41", "b-42", "c-41", "c-42")) } "support context.become during recovery" in { - val processor1 = system.actorOf(Props(classOf[SnapshottingPersistentActor], name, testActor)) - processor1 ! Cmd("b") - processor1 ! "snap" - processor1 ! Cmd("c") + val persistentActor1 = system.actorOf(Props(classOf[SnapshottingPersistentActor], name, testActor)) + persistentActor1 ! Cmd("b") + persistentActor1 ! "snap" + persistentActor1 ! Cmd("c") expectMsg("saved") - processor1 ! GetState + persistentActor1 ! GetState expectMsg(List("a-1", "a-2", "b-41", "b-42", "c-41", "c-42")) - val processor2 = system.actorOf(Props(classOf[SnapshottingBecomingPersistentActor], name, testActor)) + val persistentActor2 = system.actorOf(Props(classOf[SnapshottingBecomingPersistentActor], name, testActor)) expectMsg("offered") expectMsg("I am becoming") - processor2 ! GetState + persistentActor2 ! GetState expectMsg(List("a-1", "a-2", "b-41", "b-42", "c-41", "c-42")) } - "support confirmable persistent" in { - val processor1 = system.actorOf(Props(classOf[SnapshottingPersistentActor], name, testActor)) - processor1 ! Cmd("b") - processor1 ! "snap" - processor1 ! ConfirmablePersistentImpl(Cmd("c"), 4711, "some-id", false, 0, Seq.empty, null, null, null) - expectMsg("saved") - processor1 ! GetState - expectMsg(List("a-1", "a-2", "b-41", "b-42", "c-41", "c-42")) - - val processor2 = system.actorOf(Props(classOf[SnapshottingPersistentActor], name, testActor)) - expectMsg("offered") - processor2 ! GetState - expectMsg(List("a-1", "a-2", "b-41", "b-42", "c-41", "c-42")) - } - "reject Persistent messages" in { - val probe = TestProbe() - val processor = namedProcessor[ReplyInEventHandlerProcessor] - - EventFilter[UnsupportedOperationException](occurrences = 1) intercept { - processor.tell(Persistent("not allowed"), probe.ref) - } - - processor.tell(Cmd("w"), probe.ref) - processor.tell(Cmd("w"), probe.ref) - processor.tell(Cmd("w"), probe.ref) - EventFilter[UnsupportedOperationException](occurrences = 1) intercept { - processor.tell(Persistent("not allowed when persisting"), probe.ref) - } - } "be able to reply within an event handler" in { - val processor = namedProcessor[ReplyInEventHandlerProcessor] - processor ! Cmd("a") + val persistentActor = namedPersistentActor[ReplyInEventHandlerPersistentActor] + persistentActor ! Cmd("a") expectMsg("a") } "support user stash operations" in { - val processor = namedProcessor[UserStashProcessor] - processor ! Cmd("a") - processor ! Cmd("b") - processor ! Cmd("c") + val persistentActor = namedPersistentActor[UserStashPersistentActor] + persistentActor ! Cmd("a") + persistentActor ! Cmd("b") + persistentActor ! Cmd("c") expectMsg("b") expectMsg("c") expectMsg("a") } "support user stash operations with several stashed messages" in { - val processor = namedProcessor[UserStashManyProcessor] + val persistentActor = namedPersistentActor[UserStashManyPersistentActor] val n = 10 val cmds = 1 to n flatMap (_ ⇒ List(Cmd("a"), Cmd("b-1"), Cmd("b-2"), Cmd("c"))) val evts = 1 to n flatMap (_ ⇒ List("a", "c", "b-1", "b-2")) - cmds foreach (processor ! _) - processor ! GetState + cmds foreach (persistentActor ! _) + persistentActor ! GetState expectMsg((List("a-1", "a-2") ++ evts)) } "support user stash operations under failures" in { - val processor = namedProcessor[UserStashFailureProcessor] + val persistentActor = namedPersistentActor[UserStashFailurePersistentActor] val bs = 1 to 10 map ("b-" + _) - processor ! Cmd("a") - bs foreach (processor ! Cmd(_)) - processor ! Cmd("c") - processor ! GetState + persistentActor ! Cmd("a") + bs foreach (persistentActor ! Cmd(_)) + persistentActor ! Cmd("c") + persistentActor ! GetState expectMsg(List("a-1", "a-2", "a", "c") ++ bs.filter(_ != "b-2")) } "be able to persist events that extend AnyVal" in { - val processor = namedProcessor[AnyValEventProcessor] - processor ! Cmd("a") + val persistentActor = namedPersistentActor[AnyValEventPersistentActor] + persistentActor ! Cmd("a") expectMsg(5) } "be able to opt-out from stashing messages until all events have been processed" in { - val processor = namedProcessor[AsyncPersistProcessor] - processor ! Cmd("x") - processor ! Cmd("y") + val persistentActor = namedPersistentActor[AsyncPersistPersistentActor] + persistentActor ! Cmd("x") + persistentActor ! Cmd("y") expectMsg("x") expectMsg("y") // "y" command was processed before event persisted expectMsg("x-1") expectMsg("y-2") } "support multiple persistAsync calls for one command, and execute them 'when possible', not hindering command processing" in { - val processor = namedProcessor[AsyncPersistThreeTimesProcessor] + val persistentActor = namedPersistentActor[AsyncPersistThreeTimesPersistentActor] val commands = 1 to 10 map { i ⇒ Cmd(s"c-$i") } commands foreach { i ⇒ Thread.sleep(Random.nextInt(10)) - processor ! i + persistentActor ! i } val all: Seq[String] = this.receiveN(40).asInstanceOf[Seq[String]] // each command = 1 reply + 3 event-replies @@ -701,16 +687,16 @@ abstract class PersistentActorSpec(config: Config) extends AkkaSpec(config) with acks should equal(expectedAcks) } "reply to the original sender() of a command, even when using persistAsync" in { - // sanity check, the setting of sender() for PersistentRepl is handled by Processor currently + // sanity check, the setting of sender() for PersistentRepl is handled by PersistentActor currently // but as we want to remove it soon, keeping the explicit test here. - val processor = namedProcessor[AsyncPersistThreeTimesProcessor] + val persistentActor = namedPersistentActor[AsyncPersistThreeTimesPersistentActor] val commands = 1 to 10 map { i ⇒ Cmd(s"c-$i") } val probes = Vector.fill(10)(TestProbe()) (probes zip commands) foreach { case (p, c) ⇒ - processor.tell(c, p.ref) + persistentActor.tell(c, p.ref) } val ackClass = classOf[String] @@ -719,8 +705,8 @@ abstract class PersistentActorSpec(config: Config) extends AkkaSpec(config) with } } "support the same event being asyncPersist'ed multiple times" in { - val processor = namedProcessor[AsyncPersistSameEventTwiceProcessor] - processor ! Cmd("x") + val persistentActor = namedPersistentActor[AsyncPersistSameEventTwicePersistentActor] + persistentActor ! Cmd("x") expectMsg("x") expectMsg("x-a-1") @@ -728,10 +714,10 @@ abstract class PersistentActorSpec(config: Config) extends AkkaSpec(config) with expectNoMsg(100.millis) } "support a mix of persist calls (sync, async, sync) and persist calls in expected order" in { - val processor = namedProcessor[AsyncPersistAndPersistMixedSyncAsyncSyncProcessor] - processor ! Cmd("a") - processor ! Cmd("b") - processor ! Cmd("c") + val persistentActor = namedPersistentActor[AsyncPersistAndPersistMixedSyncAsyncSyncPersistentActor] + persistentActor ! Cmd("a") + persistentActor ! Cmd("b") + persistentActor ! Cmd("c") expectMsg("a") expectMsg("a-e1-1") // persist expectMsg("a-ea2-2") // persistAsync, but ordering enforced by sync persist below @@ -748,10 +734,10 @@ abstract class PersistentActorSpec(config: Config) extends AkkaSpec(config) with expectNoMsg(100.millis) } "support a mix of persist calls (sync, async) and persist calls" in { - val processor = namedProcessor[AsyncPersistAndPersistMixedSyncAsyncProcessor] - processor ! Cmd("a") - processor ! Cmd("b") - processor ! Cmd("c") + val persistentActor = namedPersistentActor[AsyncPersistAndPersistMixedSyncAsyncPersistentActor] + persistentActor ! Cmd("a") + persistentActor ! Cmd("b") + persistentActor ! Cmd("c") expectMsg("a") expectMsg("a-e1-1") // persist, must be before next command @@ -771,16 +757,16 @@ abstract class PersistentActorSpec(config: Config) extends AkkaSpec(config) with expectNoMsg(100.millis) } "correlate persistAsync handlers after restart" in { - val processor = namedProcessor[AsyncPersistHandlerCorrelationCheck] - for (n ← 1 to 100) processor ! Cmd(n) - processor ! "boom" - for (n ← 1 to 20) processor ! Cmd(n) - processor ! Cmd("done") + val persistentActor = namedPersistentActor[AsyncPersistHandlerCorrelationCheck] + for (n ← 1 to 100) persistentActor ! Cmd(n) + persistentActor ! "boom" + for (n ← 1 to 20) persistentActor ! Cmd(n) + persistentActor ! Cmd("done") expectMsg(5.seconds, "done") } "allow deferring handlers in order to provide ordered processing in respect to persist handlers" in { - val processor = namedProcessor[DeferringWithPersistActor] - processor ! Cmd("a") + val persistentActor = namedPersistentActor[DeferringWithPersistActor] + persistentActor ! Cmd("a") expectMsg("d-1") expectMsg("a-2") expectMsg("d-3") @@ -788,8 +774,8 @@ abstract class PersistentActorSpec(config: Config) extends AkkaSpec(config) with expectNoMsg(100.millis) } "allow deferring handlers in order to provide ordered processing in respect to asyncPersist handlers" in { - val processor = namedProcessor[DeferringWithAsyncPersistActor] - processor ! Cmd("a") + val persistentActor = namedPersistentActor[DeferringWithAsyncPersistActor] + persistentActor ! Cmd("a") expectMsg("d-a-1") expectMsg("pa-a-2") expectMsg("d-a-3") @@ -797,11 +783,11 @@ abstract class PersistentActorSpec(config: Config) extends AkkaSpec(config) with expectNoMsg(100.millis) } "invoke deferred handlers, in presence of mixed a long series persist / persistAsync calls" in { - val processor = namedProcessor[DeferringMixedCallsPPADDPADPersistActor] + val persistentActor = namedPersistentActor[DeferringMixedCallsPPADDPADPersistActor] val p1, p2 = TestProbe() - processor.tell(Cmd("a"), p1.ref) - processor.tell(Cmd("b"), p2.ref) + persistentActor.tell(Cmd("a"), p1.ref) + persistentActor.tell(Cmd("b"), p2.ref) p1.expectMsg("p-a-1") p1.expectMsg("pa-a-2") p1.expectMsg("d-a-3") @@ -819,19 +805,19 @@ abstract class PersistentActorSpec(config: Config) extends AkkaSpec(config) with expectNoMsg(100.millis) } "invoke deferred handlers right away, if there are no pending persist handlers registered" in { - val processor = namedProcessor[DeferringWithNoPersistCallsPersistActor] - processor ! Cmd("a") + val persistentActor = namedPersistentActor[DeferringWithNoPersistCallsPersistActor] + persistentActor ! Cmd("a") expectMsg("d-1") expectMsg("d-2") expectMsg("d-3") expectNoMsg(100.millis) } "invoke deferred handlers, perserving the original sender references" in { - val processor = namedProcessor[DeferringWithAsyncPersistActor] + val persistentActor = namedPersistentActor[DeferringWithAsyncPersistActor] val p1, p2 = TestProbe() - processor.tell(Cmd("a"), p1.ref) - processor.tell(Cmd("b"), p2.ref) + persistentActor.tell(Cmd("a"), p1.ref) + persistentActor.tell(Cmd("b"), p2.ref) p1.expectMsg("d-a-1") p1.expectMsg("pa-a-2") p1.expectMsg("d-a-3") @@ -844,22 +830,35 @@ abstract class PersistentActorSpec(config: Config) extends AkkaSpec(config) with expectNoMsg(100.millis) } "receive RecoveryFinished if it is handled after all events have been replayed" in { - val processor1 = system.actorOf(Props(classOf[SnapshottingPersistentActor], name, testActor)) - processor1 ! Cmd("b") - processor1 ! "snap" - processor1 ! Cmd("c") + val persistentActor1 = system.actorOf(Props(classOf[SnapshottingPersistentActor], name, testActor)) + persistentActor1 ! Cmd("b") + persistentActor1 ! "snap" + persistentActor1 ! Cmd("c") expectMsg("saved") - processor1 ! GetState + persistentActor1 ! GetState expectMsg(List("a-1", "a-2", "b-41", "b-42", "c-41", "c-42")) - val processor2 = system.actorOf(Props(classOf[HandleRecoveryFinishedEventProcessor], name, testActor)) + val persistentActor2 = system.actorOf(Props(classOf[HandleRecoveryFinishedEventPersistentActor], name, testActor)) expectMsg("offered") expectMsg(RecoveryCompleted) expectMsg("I am the stashed") expectMsg("I am the recovered") - processor2 ! GetState + persistentActor2 ! GetState expectMsg(List("a-1", "a-2", "b-41", "b-42", "c-41", "c-42", RecoveryCompleted)) } + "preserv order of incoming messages" in { + val persistentActor = namedPersistentActor[StressOrdering] + persistentActor ! Cmd("a") + val latch = TestLatch(1) + persistentActor ! LatchCmd(latch, "b") + persistentActor ! "c" + expectMsg("a") + expectMsg("b") + persistentActor ! "d" + latch.countDown() + expectMsg("c") + expectMsg("d") + } "be used as a stackable modification" in { val persistentActor = system.actorOf(Props(classOf[StackableTestPersistentActor], testActor)) expectMsg("mixin aroundPreStart") @@ -867,8 +866,8 @@ abstract class PersistentActorSpec(config: Config) extends AkkaSpec(config) with expectMsg("preStart") persistentActor ! "restart" - expectMsg("mixin aroundReceive") - expectMsg("base aroundReceive") + expectMsg("mixin aroundReceive restart") + expectMsg("base aroundReceive restart") expectMsg("mixin aroundPreRestart") expectMsg("base aroundPreRestart") @@ -887,6 +886,7 @@ abstract class PersistentActorSpec(config: Config) extends AkkaSpec(config) with expectNoMsg(100.millis) } + } } diff --git a/akka-persistence/src/test/scala/akka/persistence/PersistentChannelSpec.scala b/akka-persistence/src/test/scala/akka/persistence/PersistentChannelSpec.scala deleted file mode 100644 index bc0e273f37..0000000000 --- a/akka-persistence/src/test/scala/akka/persistence/PersistentChannelSpec.scala +++ /dev/null @@ -1,151 +0,0 @@ -/** - * Copyright (C) 2009-2014 Typesafe Inc. - */ - -package akka.persistence - -import scala.concurrent.duration._ -import scala.language.postfixOps - -import com.typesafe.config._ - -import akka.actor._ -import akka.testkit._ - -object PersistentChannelSpec { - class SlowDestination(probe: ActorRef, maxReceived: Long) extends Actor { - import context.dispatcher - - val delay = 100.millis - var received = Vector.empty[ConfirmablePersistent] - - def receive = { - case cp: ConfirmablePersistent ⇒ - if (received.isEmpty) context.system.scheduler.scheduleOnce(delay, self, "confirm") - received :+= cp - case "confirm" ⇒ - if (received.size > maxReceived) probe ! s"number of received messages to high: ${received.size}" - else probe ! received.head.payload - received.head.confirm() - received = received.tail - if (received.nonEmpty) context.system.scheduler.scheduleOnce(delay, self, "confirm") - } - } -} - -abstract class PersistentChannelSpec(config: Config) extends ChannelSpec(config) { - import PersistentChannelSpec._ - - private def redeliverChannelSettings(listener: Option[ActorRef]): PersistentChannelSettings = - PersistentChannelSettings(redeliverMax = 2, redeliverInterval = 100 milliseconds, redeliverFailureListener = listener, idleTimeout = 5.seconds) - - private def createDefaultTestChannel(name: String): ActorRef = - system.actorOf(PersistentChannel.props(s"${name}-default", PersistentChannelSettings(idleTimeout = 5.seconds))) - - override def createDefaultTestChannel(): ActorRef = - createDefaultTestChannel(name) - - override def createRedeliverTestChannel(): ActorRef = - system.actorOf(PersistentChannel.props(s"${name}-redeliver", redeliverChannelSettings(None))) - - override def createRedeliverTestChannel(listener: Option[ActorRef]): ActorRef = - system.actorOf(PersistentChannel.props(s"${name}-redeliver-listener", redeliverChannelSettings(listener))) - - "A persistent channel" must { - "support Persistent replies to Deliver senders" in { - val destProbe = TestProbe() - val replyProbe = TestProbe() - - val channel1 = system.actorOf(PersistentChannel.props(s"${name}-with-reply", PersistentChannelSettings(replyPersistent = true))) - - channel1 tell (Deliver(Persistent("a"), destProbe.ref.path), replyProbe.ref) - destProbe.expectMsgPF() { case cp @ ConfirmablePersistent("a", _, _) ⇒ cp.confirm() } - replyProbe.expectMsgPF() { case Persistent("a", _) ⇒ } - - channel1 tell (Deliver(PersistentRepr("b", sequenceNr = 13), destProbe.ref.path), replyProbe.ref) - destProbe.expectMsgPF() { case cp @ ConfirmablePersistent("b", 13, _) ⇒ cp.confirm() } - replyProbe.expectMsgPF() { case Persistent("b", 13) ⇒ } - - system.stop(channel1) - } - "not modify certain persistent message fields" in { - val destProbe = TestProbe() - - val persistent1 = PersistentRepr(payload = "a", persistenceId = "p1", confirms = List("c1", "c2"), sender = defaultTestChannel, sequenceNr = 13) - val persistent2 = PersistentRepr(payload = "b", persistenceId = "p1", confirms = List("c1", "c2"), sender = defaultTestChannel) - - defaultTestChannel ! Deliver(persistent1, destProbe.ref.path) - defaultTestChannel ! Deliver(persistent2, destProbe.ref.path) - - destProbe.expectMsgPF() { case cp @ ConfirmablePersistentImpl("a", 13, "p1", _, _, Seq("c1", "c2"), _, _, channel) ⇒ cp.confirm() } - destProbe.expectMsgPF() { case cp @ ConfirmablePersistentImpl("b", 2, "p1", _, _, Seq("c1", "c2"), _, _, channel) ⇒ cp.confirm() } - } - "redeliver un-confirmed stored messages during recovery" in { - val confirmProbe = TestProbe() - val forwardProbe = TestProbe() - - subscribeToConfirmation(confirmProbe) - - val channel1 = createDefaultTestChannel("extra") - channel1 tell (Deliver(Persistent("a1"), forwardProbe.ref.path), null) - channel1 tell (Deliver(Persistent("a2"), forwardProbe.ref.path), null) - - forwardProbe.expectMsgPF() { case m @ ConfirmablePersistent("a1", _, _) ⇒ /* no confirmation */ } - forwardProbe.expectMsgPF() { case m @ ConfirmablePersistent("a2", _, _) ⇒ m.confirm() } - - awaitConfirmation(confirmProbe) - - system.stop(channel1) - - val channel2 = createDefaultTestChannel("extra") - channel2 tell (Deliver(Persistent("a3"), forwardProbe.ref.path), null) - - forwardProbe.expectMsgPF() { case m @ ConfirmablePersistent("a1", _, _) ⇒ m.confirm() } - forwardProbe.expectMsgPF() { case m @ ConfirmablePersistent("a3", _, _) ⇒ m.confirm() } - - awaitConfirmation(confirmProbe) - awaitConfirmation(confirmProbe) - - system.stop(channel2) - } - "not flood destinations" in { - val probe = TestProbe() - val settings = PersistentChannelSettings( - redeliverMax = 0, - redeliverInterval = 1.minute, - pendingConfirmationsMax = 4, - pendingConfirmationsMin = 2) - - val channel = system.actorOf(PersistentChannel.props(s"${name}-watermark", settings)) - val destination = system.actorOf(Props(classOf[SlowDestination], probe.ref, settings.pendingConfirmationsMax)) - - 1 to 10 foreach { i ⇒ channel ! Deliver(Persistent(i), destination.path) } - 1 to 10 foreach { i ⇒ probe.expectMsg(i) } - - system.stop(channel) - } - "redeliver on reset" in { - val probe = TestProbe() - val settings = PersistentChannelSettings( - redeliverMax = 0, - redeliverInterval = 1.minute, - pendingConfirmationsMax = 4, - pendingConfirmationsMin = 2) - - val channel = system.actorOf(PersistentChannel.props(s"${name}-reset", settings)) - - 1 to 3 foreach { i ⇒ channel ! Deliver(Persistent(i), probe.ref.path) } - 1 to 3 foreach { i ⇒ probe.expectMsgPF() { case ConfirmablePersistent(`i`, _, _) ⇒ } } - - channel ! Reset - - 1 to 3 foreach { i ⇒ probe.expectMsgPF() { case ConfirmablePersistent(`i`, _, _) ⇒ } } - - system.stop(channel) - } - } -} - -class LeveldbPersistentChannelSpec extends PersistentChannelSpec(PersistenceSpec.config("leveldb", "LeveldbPersistentChannelSpec")) -class InmemPersistentChannelSpec extends PersistentChannelSpec(PersistenceSpec.config("inmem", "InmemPersistentChannelSpec")) - diff --git a/akka-persistence/src/test/scala/akka/persistence/PersistentViewSpec.scala b/akka-persistence/src/test/scala/akka/persistence/PersistentViewSpec.scala index 61b8dfe878..c1212782b4 100644 --- a/akka-persistence/src/test/scala/akka/persistence/PersistentViewSpec.scala +++ b/akka-persistence/src/test/scala/akka/persistence/PersistentViewSpec.scala @@ -177,22 +177,16 @@ abstract class PersistentViewSpec(config: Config) extends AkkaSpec(config) with super.afterEach() } - def subscribeToConfirmation(probe: TestProbe): Unit = - system.eventStream.subscribe(probe.ref, classOf[Delivered]) - - def awaitConfirmation(probe: TestProbe): Unit = - probe.expectMsgType[Delivered] - def subscribeToReplay(probe: TestProbe): Unit = system.eventStream.subscribe(probe.ref, classOf[ReplayMessages]) "A persistent view" must { - "receive past updates from a processor" in { + "receive past updates from a persistent actor" in { view = system.actorOf(Props(classOf[TestPersistentView], name, viewProbe.ref)) viewProbe.expectMsg("replicated-a-1") viewProbe.expectMsg("replicated-b-2") } - "receive live updates from a processor" in { + "receive live updates from a persistent actor" in { view = system.actorOf(Props(classOf[TestPersistentView], name, viewProbe.ref)) viewProbe.expectMsg("replicated-a-1") viewProbe.expectMsg("replicated-b-2") diff --git a/akka-persistence/src/test/scala/akka/persistence/ProcessorChannelSpec.scala b/akka-persistence/src/test/scala/akka/persistence/ProcessorChannelSpec.scala deleted file mode 100644 index 4a39e3e9c5..0000000000 --- a/akka-persistence/src/test/scala/akka/persistence/ProcessorChannelSpec.scala +++ /dev/null @@ -1,210 +0,0 @@ -/** - * Copyright (C) 2009-2014 Typesafe Inc. - */ - -package akka.persistence - -import scala.concurrent.duration._ -import scala.language.postfixOps - -import com.typesafe.config._ - -import akka.actor._ -import akka.testkit._ - -object ProcessorChannelSpec { - class TestProcessor(name: String, channelProps: Props) extends NamedProcessor(name) { - val destination = context.actorOf(Props[TestDestination]) - val channel = context.actorOf(channelProps) - - def receive = { - case m @ Persistent(s: String, _) if s.startsWith("a") ⇒ - // forward to destination via channel, - // destination replies to initial sender - channel forward Deliver(m.withPayload(s"fw: ${s}"), destination.path) - case m @ Persistent(s: String, _) if s.startsWith("b") ⇒ - // reply to sender via channel - channel ! Deliver(m.withPayload(s"re: ${s}"), sender().path) - case m @ Persistent(s: String, _) if s.startsWith("c") ⇒ - // don't use channel - sender() ! s"got: ${s}" - case "replay" ⇒ throw new TestException("replay requested") - } - } - - class TestDestination extends Actor { - def receive = { - case m: Persistent ⇒ sender() ! m - } - } - - class ResendingProcessor(name: String, channelProps: Props, destination: ActorRef) extends NamedProcessor(name) { - val channel = context.actorOf(channelProps) - - def receive = { - case p: Persistent ⇒ channel ! Deliver(p, destination.path) - case "replay" ⇒ throw new TestException("replay requested") - } - } - - class ResendingPersistentActor(name: String, channelProps: Props, destination: ActorRef) extends NamedProcessor(name) with PersistentActor { - val channel = context.actorOf(channelProps) - - var events: List[String] = Nil - - def handleEvent(event: String) = { - events = event :: events - channel ! Deliver(Persistent(event), destination.path) - } - - def receiveRecover: Receive = { - case event: String ⇒ handleEvent(event) - } - - def receiveCommand: Receive = { - case "cmd" ⇒ persist("evt")(handleEvent) - case "replay" ⇒ throw new TestException("replay requested") - } - } -} - -abstract class ProcessorChannelSpec(config: Config) extends AkkaSpec(config) with PersistenceSpec with ImplicitSender { - import ProcessorChannelSpec._ - - private var processor: ActorRef = _ - - override protected def beforeEach(): Unit = { - super.beforeEach() - setupTestProcessorData() - processor = createTestProcessor() - } - - override protected def afterEach(): Unit = { - system.stop(processor) - super.afterEach() - } - - def subscribeToConfirmation(probe: TestProbe): Unit = - system.eventStream.subscribe(probe.ref, classOf[Delivered]) - - def awaitConfirmation(probe: TestProbe): Unit = - probe.expectMsgType[Delivered] - - def createTestProcessor(): ActorRef = - system.actorOf(Props(classOf[TestProcessor], name, testChannelProps)) - - def testChannelProps: Props - - def testResendingChannelProps: Props - - def setupTestProcessorData(): Unit = { - val confirmProbe = TestProbe() - val forwardProbe = TestProbe() - val replyProbe = TestProbe() - val senderProbe = TestProbe() - - val processor = createTestProcessor() - - subscribeToConfirmation(confirmProbe) - - processor tell (Persistent("a1"), forwardProbe.ref) - processor tell (Persistent("b1"), replyProbe.ref) - processor tell (Persistent("c1"), senderProbe.ref) - - forwardProbe.expectMsgPF() { case m @ ConfirmablePersistent("fw: a1", _, _) ⇒ m.confirm() } - replyProbe.expectMsgPF() { case m @ ConfirmablePersistent("re: b1", _, _) ⇒ m.confirm() } - senderProbe.expectMsg("got: c1") - - awaitConfirmation(confirmProbe) - awaitConfirmation(confirmProbe) - - system.stop(processor) - } - - "A processor that uses a channel" can { - "forward new messages to destination" in { - processor ! Persistent("a2") - expectMsgPF() { case m @ ConfirmablePersistent("fw: a2", _, _) ⇒ m.confirm() } - } - "reply new messages to senders" in { - processor ! Persistent("b2") - expectMsgPF() { case m @ ConfirmablePersistent("re: b2", _, _) ⇒ m.confirm() } - } - "de-duplicate confirmed messages on restart" in { - processor ! Persistent("c3") - expectMsg("got: c3") - processor ! Persistent("a3") - expectMsgPF() { case m @ ConfirmablePersistent("fw: a3", _, _) ⇒ m.confirm() } - - processor ! "replay" - expectMsg("got: c3") - expectNoMsg(1.second) - } - "de-duplicate confirmed messages on starting new with same processor id" in { - processor ! Persistent("c4") - expectMsg("got: c4") - processor ! Persistent("a4") - expectMsgPF() { case m @ ConfirmablePersistent("fw: a4", _, _) ⇒ m.confirm() } - - val p2 = createTestProcessor() - expectMsg("got: c4") - expectNoMsg(1.second) - } - "resend unconfirmed messages on restart" in { - val probe = TestProbe() - val p = system.actorOf(Props(classOf[ResendingProcessor], "rp", testResendingChannelProps, probe.ref)) - - p ! Persistent("a") - - probe.expectMsgPF() { case cp @ ConfirmablePersistent("a", 1L, 0) ⇒ } - probe.expectMsgPF() { case cp @ ConfirmablePersistent("a", 1L, 1) ⇒ } - probe.expectNoMsg(200 milliseconds) - - p ! "replay" - - probe.expectMsgPF() { case cp @ ConfirmablePersistent("a", 1L, 0) ⇒ } - probe.expectMsgPF() { case cp @ ConfirmablePersistent("a", 1L, 1) ⇒ cp.confirm() } - } - } - - "A persistent actor that uses a channel" can { - "reliably deliver events" in { - val probe = TestProbe() - val ep = system.actorOf(Props(classOf[ResendingPersistentActor], "rep", testResendingChannelProps, probe.ref)) - - ep ! "cmd" - - probe.expectMsgPF() { case cp @ ConfirmablePersistent("evt", _, 0) ⇒ } - probe.expectMsgPF() { case cp @ ConfirmablePersistent("evt", _, 1) ⇒ } - probe.expectNoMsg(200 milliseconds) - - ep ! "replay" - - probe.expectMsgPF() { case cp @ ConfirmablePersistent("evt", _, 0) ⇒ } - probe.expectMsgPF() { case cp @ ConfirmablePersistent("evt", _, 1) ⇒ cp.confirm() } - } - } -} - -class LeveldbProcessorChannelSpec extends ProcessorChannelSpec(PersistenceSpec.config("leveldb", "LeveldbProcessorChannelSpec")) { - def testChannelProps: Props = Channel.props(s"${name}-channel") - def testResendingChannelProps: Props = - Channel.props("channel", ChannelSettings(redeliverMax = 1, redeliverInterval = 100 milliseconds)) -} -class InmemProcessorChannelSpec extends ProcessorChannelSpec(PersistenceSpec.config("inmem", "InmemProcessorChannelSpec")) { - def testChannelProps: Props = Channel.props(s"${name}-channel") - def testResendingChannelProps: Props = - Channel.props("channel", ChannelSettings(redeliverMax = 1, redeliverInterval = 100 milliseconds)) -} - -class LeveldbProcessorPersistentChannelSpec extends ProcessorChannelSpec(PersistenceSpec.config("leveldb", "LeveldbProcessorPersistentChannelSpec")) { - def testChannelProps: Props = PersistentChannel.props(s"${name}-channel") - def testResendingChannelProps: Props = - PersistentChannel.props("channel", PersistentChannelSettings(redeliverMax = 1, redeliverInterval = 100 milliseconds)) -} -class InmemProcessorPersistentChannelSpec extends ProcessorChannelSpec(PersistenceSpec.config("inmem", "InmemProcessorPersistentChannelSpec")) { - def testChannelProps: Props = PersistentChannel.props(s"${name}-channel") - def testResendingChannelProps: Props = - PersistentChannel.props("channel", PersistentChannelSettings(redeliverMax = 1, redeliverInterval = 100 milliseconds)) -} - diff --git a/akka-persistence/src/test/scala/akka/persistence/ProcessorSpec.scala b/akka-persistence/src/test/scala/akka/persistence/ProcessorSpec.scala deleted file mode 100644 index fd5a48229f..0000000000 --- a/akka-persistence/src/test/scala/akka/persistence/ProcessorSpec.scala +++ /dev/null @@ -1,473 +0,0 @@ -/** - * Copyright (C) 2009-2014 Typesafe Inc. - */ - -package akka.persistence - -import akka.actor._ -import akka.testkit._ -import com.typesafe.config._ -import scala.concurrent.duration._ -import scala.collection.immutable.Seq -import scala.util.control.NoStackTrace - -object ProcessorSpec { - class RecoverTestProcessor(name: String) extends NamedProcessor(name) { - var state = List.empty[String] - def receive = { - case "boom" ⇒ throw new TestException("boom") - case Persistent("boom", _) ⇒ throw new TestException("boom") - case Persistent(payload, snr) ⇒ state = s"${payload}-${snr}" :: state - case GetState ⇒ sender() ! state.reverse - } - - override def preRestart(reason: Throwable, message: Option[Any]) = { - message match { - case Some(m: Persistent) ⇒ deleteMessage(m.sequenceNr) // delete message from journal - case _ ⇒ // ignore - } - super.preRestart(reason, message) - } - } - - class RecoverOffTestProcessor(name: String) extends RecoverTestProcessor(name) with TurnOffRecoverOnStart - - class StoredSenderTestProcessor(name: String) extends NamedProcessor(name) { - def receive = { - case Persistent(payload, _) ⇒ sender() ! payload - } - } - - class RecoveryStatusTestProcessor(name: String) extends NamedProcessor(name) { - def receive = { - case Persistent("c", _) if !recoveryRunning ⇒ sender() ! "c" - case Persistent(payload, _) if recoveryRunning ⇒ sender() ! payload - } - } - - class BehaviorChangeTestProcessor(name: String) extends NamedProcessor(name) { - val acceptA: Actor.Receive = { - case Persistent("a", _) ⇒ - sender() ! "a" - context.become(acceptB) - } - - val acceptB: Actor.Receive = { - case Persistent("b", _) ⇒ - sender() ! "b" - context.become(acceptA) - } - - def receive = acceptA - } - - class FsmTestProcessor(name: String) extends NamedProcessor(name) with FSM[String, Int] { - startWith("closed", 0) - - when("closed") { - case Event(Persistent("a", _), counter) ⇒ - goto("open") using (counter + 1) replying (counter) - } - - when("open") { - case Event(Persistent("b", _), counter) ⇒ - goto("closed") using (counter + 1) replying (counter) - } - } - - class OutboundMessageTestProcessor(name: String) extends NamedProcessor(name) { - def receive = { - case Persistent(payload, snr) ⇒ sender() ! Persistent(snr) - } - } - - class ResumeTestException extends TestException("test") - - class ResumeTestSupervisor(name: String) extends Actor { - val processor = context.actorOf(Props(classOf[ResumeTestProcessor], name)) - - override val supervisorStrategy = - OneForOneStrategy() { - case _: ResumeTestException ⇒ SupervisorStrategy.Resume - } - - def receive = { - case m ⇒ processor forward m - } - } - - class ResumeTestProcessor(name: String) extends NamedProcessor(name) { - var state: List[String] = Nil - def receive = { - case "boom" ⇒ throw new ResumeTestException - case Persistent(payload, snr) ⇒ state = s"${payload}-${snr}" :: state - case GetState ⇒ sender() ! state.reverse - } - } - - class LastReplayedMsgFailsTestProcessor(name: String) extends RecoverTestProcessor(name) { - override def preRestart(reason: Throwable, message: Option[Any]) = { - message match { - case Some(m: Persistent) ⇒ if (recoveryRunning) deleteMessage(m.sequenceNr) - case _ ⇒ - } - super.preRestart(reason, message) - } - } - - class AnyReplayedMsgFailsTestProcessor(name: String) extends RecoverTestProcessor(name) { - val failOnReplayedA: Actor.Receive = { - case Persistent("a", _) if recoveryRunning ⇒ throw new TestException("boom") - } - - override def receive = failOnReplayedA orElse super.receive - } - - final case class Delete1(snr: Long) - final case class DeleteN(toSnr: Long) - - class DeleteMessageTestProcessor(name: String) extends RecoverTestProcessor(name) { - override def receive = deleteReceive orElse super.receive - - def deleteReceive: Actor.Receive = { - case Delete1(snr) ⇒ deleteMessage(snr) - case DeleteN(toSnr) ⇒ deleteMessages(toSnr) - } - } - - class StackableTestProcessor(val probe: ActorRef) extends StackableTestProcessor.BaseActor with Processor with StackableTestProcessor.MixinActor { - override def persistenceId: String = "StackableTestPersistentActor" - - def receive = { - case "restart" ⇒ throw new Exception("triggering restart") with NoStackTrace { override def toString = "Boom!" } - } - - override def preStart(): Unit = { - probe ! "preStart" - super.preStart() - } - - override def postStop(): Unit = { - probe ! "postStop" - super.postStop() - } - - override def preRestart(reason: Throwable, message: Option[Any]): Unit = { - probe ! "preRestart" - super.preRestart(reason, message) - } - - override def postRestart(reason: Throwable): Unit = { - probe ! "postRestart" - super.postRestart(reason) - } - } - - object StackableTestProcessor { - trait BaseActor extends Actor { this: StackableTestProcessor ⇒ - override protected[akka] def aroundPreStart() = { - probe ! "base aroundPreStart" - super.aroundPreStart() - } - - override protected[akka] def aroundPostStop() = { - probe ! "base aroundPostStop" - super.aroundPostStop() - } - - override protected[akka] def aroundPreRestart(reason: Throwable, message: Option[Any]) = { - probe ! "base aroundPreRestart" - super.aroundPreRestart(reason, message) - } - - override protected[akka] def aroundPostRestart(reason: Throwable) = { - probe ! "base aroundPostRestart" - super.aroundPostRestart(reason) - } - - override protected[akka] def aroundReceive(receive: Receive, message: Any) = { - probe ! "base aroundReceive" - super.aroundReceive(receive, message) - } - } - - trait MixinActor extends Actor { this: StackableTestProcessor ⇒ - override protected[akka] def aroundPreStart() = { - probe ! "mixin aroundPreStart" - super.aroundPreStart() - } - - override protected[akka] def aroundPostStop() = { - probe ! "mixin aroundPostStop" - super.aroundPostStop() - } - - override protected[akka] def aroundPreRestart(reason: Throwable, message: Option[Any]) = { - probe ! "mixin aroundPreRestart" - super.aroundPreRestart(reason, message) - } - - override protected[akka] def aroundPostRestart(reason: Throwable) = { - probe ! "mixin aroundPostRestart" - super.aroundPostRestart(reason) - } - - override protected[akka] def aroundReceive(receive: Receive, message: Any) = { - if (message == "restart" && recoveryFinished) { probe ! "mixin aroundReceive" } - super.aroundReceive(receive, message) - } - } - } -} - -abstract class ProcessorSpec(config: Config) extends AkkaSpec(config) with PersistenceSpec with ImplicitSender { - import JournalProtocol._ - import ProcessorSpec._ - - override protected def beforeEach() { - super.beforeEach() - - val processor = namedProcessor[RecoverTestProcessor] - processor ! Persistent("a") - processor ! Persistent("b") - processor ! GetState - expectMsg(List("a-1", "b-2")) - } - - "A processor" must { - "recover state on explicit request" in { - val processor = namedProcessor[RecoverOffTestProcessor] - processor ! Recover() - processor ! GetState - expectMsg(List("a-1", "b-2")) - } - "recover state automatically" in { - val processor = namedProcessor[RecoverTestProcessor] - processor ! GetState - expectMsg(List("a-1", "b-2")) - } - "recover state automatically on restart" in { - val processor = namedProcessor[RecoverTestProcessor] - processor ! "boom" - processor ! GetState - expectMsg(List("a-1", "b-2")) - } - "buffer new messages until recovery completed" in { - val processor = namedProcessor[RecoverOffTestProcessor] - processor ! Persistent("c") - processor ! Recover() - processor ! Persistent("d") - processor ! GetState - expectMsg(List("a-1", "b-2", "c-3", "d-4")) - } - "ignore redundant recovery requests" in { - val processor = namedProcessor[RecoverOffTestProcessor] - processor ! Persistent("c") - processor ! Recover() - processor ! Persistent("d") - processor ! Recover() - processor ! Persistent("e") - processor ! GetState - expectMsg(List("a-1", "b-2", "c-3", "d-4", "e-5")) - } - "buffer new messages until restart-recovery completed" in { - val processor = namedProcessor[RecoverTestProcessor] - processor ! "boom" - processor ! Persistent("c") - processor ! Persistent("d") - processor ! GetState - expectMsg(List("a-1", "b-2", "c-3", "d-4")) - } - "allow deletion of journaled messages on failure" in { - val processor = namedProcessor[RecoverTestProcessor] - processor ! Persistent("boom") // journaled message causes failure and will be deleted - processor ! GetState - expectMsg(List("a-1", "b-2")) - } - "allow deletion of journaled messages on failure and buffer new messages until restart-recovery completed" in { - val processor = namedProcessor[RecoverTestProcessor] - processor ! Persistent("boom") // journaled message causes failure and will be deleted - processor ! Persistent("c") - processor ! Persistent("d") - processor ! GetState - expectMsg(List("a-1", "b-2", "c-4", "d-5")) // deleted message leaves gap in sequence - } - "store sender references and restore them for replayed messages" in { - namedProcessor[StoredSenderTestProcessor] - List("a", "b") foreach (expectMsg(_)) - } - "properly indicate its recovery status" in { - val processor = namedProcessor[RecoveryStatusTestProcessor] - processor ! Persistent("c") - List("a", "b", "c") foreach (expectMsg(_)) - } - "continue journaling when changing behavior" in { - val processor = namedProcessor[BehaviorChangeTestProcessor] - processor ! Persistent("a") - processor ! Persistent("b") - List("a", "b", "a", "b") foreach (expectMsg(_)) - } - "derive outbound messages from the current message" in { - val processor = namedProcessor[OutboundMessageTestProcessor] - processor ! Persistent("c") - 1 to 3 foreach { _ ⇒ expectMsgPF() { case Persistent(payload, snr) ⇒ payload should be(snr) } } - } - "support recovery with upper sequence number bound" in { - val processor = namedProcessor[RecoverOffTestProcessor] - processor ! Recover(toSequenceNr = 1L) - processor ! GetState - expectMsg(List("a-1")) - } - "never replace journaled messages" in { - val processor1 = namedProcessor[RecoverOffTestProcessor] - processor1 ! Recover(toSequenceNr = 1L) - processor1 ! Persistent("c") - processor1 ! GetState - expectMsg(List("a-1", "c-3")) - - val processor2 = namedProcessor[RecoverOffTestProcessor] - processor2 ! Recover() - processor2 ! GetState - expectMsg(List("a-1", "b-2", "c-3")) - } - "be able to skip restart recovery when being resumed" in { - val supervisor1 = system.actorOf(Props(classOf[ResumeTestSupervisor], "processor")) - supervisor1 ! Persistent("a") - supervisor1 ! Persistent("b") - supervisor1 ! GetState - expectMsg(List("a-1", "b-2")) - - val supervisor2 = system.actorOf(Props(classOf[ResumeTestSupervisor], "processor")) - supervisor2 ! Persistent("c") - supervisor2 ! "boom" - supervisor2 ! Persistent("d") - supervisor2 ! GetState - expectMsg(List("a-1", "b-2", "c-3", "d-4")) - - val supervisor3 = system.actorOf(Props(classOf[ResumeTestSupervisor], "processor")) - supervisor3 ! GetState - expectMsg(List("a-1", "b-2", "c-3", "d-4")) - } - "be able to re-run restart recovery when it fails with last replayed message" in { - val processor = namedProcessor[LastReplayedMsgFailsTestProcessor] - processor ! Persistent("c") - processor ! Persistent("boom") - processor ! Persistent("d") - processor ! GetState - expectMsg(List("a-1", "b-2", "c-3", "d-5")) - } - "be able to re-run initial recovery when it fails with a message that is not the last replayed message" in { - val processor = namedProcessor[AnyReplayedMsgFailsTestProcessor] - processor ! Persistent("c") - processor ! GetState - expectMsg(List("b-2", "c-3")) - } - "be able to re-run restart recovery when it fails with a message that is not the last replayed message" in { - val processor = system.actorOf(Props(classOf[AnyReplayedMsgFailsTestProcessor], "other")) // new processor, no initial replay - processor ! Persistent("b") - processor ! Persistent("a") - processor ! Persistent("c") - processor ! Persistent("d") - processor ! Persistent("e") - processor ! Persistent("f") - processor ! Persistent("g") - processor ! Persistent("h") - processor ! Persistent("i") - processor ! "boom" - processor ! Persistent("j") - processor ! GetState - expectMsg(List("b-1", "c-3", "d-4", "e-5", "f-6", "g-7", "h-8", "i-9", "j-10")) - } - "support batch writes" in { - val processor = namedProcessor[RecoverTestProcessor] - processor ! PersistentBatch(Seq(Persistent("c"), Persistent("d"), Persistent("e"))) - processor ! Persistent("f") - processor ! GetState - expectMsg(List("a-1", "b-2", "c-3", "d-4", "e-5", "f-6")) - } - "support single message deletions" in { - val deleteProbe = TestProbe() - - system.eventStream.subscribe(deleteProbe.ref, classOf[DeleteMessages]) - - val processor1 = namedProcessor[DeleteMessageTestProcessor] - processor1 ! Persistent("c") - processor1 ! Persistent("d") - processor1 ! Persistent("e") - processor1 ! Delete1(4) - deleteProbe.expectMsgType[DeleteMessages] - - val processor2 = namedProcessor[DeleteMessageTestProcessor] - processor2 ! GetState - - expectMsg(List("a-1", "b-2", "c-3", "e-5")) - } - "support bulk message deletions" in { - val deleteProbe = TestProbe() - - system.eventStream.subscribe(deleteProbe.ref, classOf[DeleteMessagesTo]) - - val processor1 = namedProcessor[DeleteMessageTestProcessor] - processor1 ! Persistent("c") - processor1 ! Persistent("d") - processor1 ! Persistent("e") - processor1 ! DeleteN(4) - deleteProbe.expectMsgType[DeleteMessagesTo] - - val processor2 = namedProcessor[DeleteMessageTestProcessor] - processor2 ! GetState - - expectMsg(List("e-5")) - - processor2 ! Persistent("f") - processor2 ! Persistent("g") - processor2 ! DeleteN(6) - deleteProbe.expectMsgType[DeleteMessagesTo] - - val processor3 = namedProcessor[DeleteMessageTestProcessor] - processor3 ! GetState - - expectMsg(List("g-7")) - } - } - - "A processor" can { - "be a finite state machine" in { - val processor = namedProcessor[FsmTestProcessor] - processor ! Persistent("a") - processor ! Persistent("b") - List(0, 1, 2, 3) foreach (expectMsg(_)) - } - - "be used as a stackable modification" in { - val processor = system.actorOf(Props(classOf[StackableTestProcessor], testActor)) - expectMsg("mixin aroundPreStart") - expectMsg("base aroundPreStart") - expectMsg("preStart") - - processor ! "restart" - expectMsg("mixin aroundReceive") - expectMsg("base aroundReceive") - - expectMsg("mixin aroundPreRestart") - expectMsg("base aroundPreRestart") - expectMsg("preRestart") - expectMsg("postStop") - - expectMsg("mixin aroundPostRestart") - expectMsg("base aroundPostRestart") - expectMsg("postRestart") - expectMsg("preStart") - - processor ! PoisonPill - expectMsg("mixin aroundPostStop") - expectMsg("base aroundPostStop") - expectMsg("postStop") - - expectNoMsg(100.millis) - } - } -} - -class LeveldbProcessorSpec extends ProcessorSpec(PersistenceSpec.config("leveldb", "LeveldbProcessorSpec")) -class InmemProcessorSpec extends ProcessorSpec(PersistenceSpec.config("inmem", "InmemProcessorSpec")) diff --git a/akka-persistence/src/test/scala/akka/persistence/ProcessorStashSpec.scala b/akka-persistence/src/test/scala/akka/persistence/ProcessorStashSpec.scala deleted file mode 100644 index 46bf95fa76..0000000000 --- a/akka-persistence/src/test/scala/akka/persistence/ProcessorStashSpec.scala +++ /dev/null @@ -1,132 +0,0 @@ -/** - * Copyright (C) 2009-2014 Typesafe Inc. - */ - -package akka.persistence - -import com.typesafe.config._ - -import akka.actor._ -import akka.testkit._ - -object ProcessorStashSpec { - class StashingProcessor(name: String) extends NamedProcessor(name) { - var state: List[String] = Nil - - val behaviorA: Actor.Receive = { - case Persistent("a", snr) ⇒ - update("a", snr) - context.become(behaviorB) - case Persistent("b", snr) ⇒ - update("b", snr) - case Persistent("c", snr) ⇒ - update("c", snr) - unstashAll() - case "x" ⇒ - update("x") - case "boom" ⇒ throw new TestException("boom") - case Persistent("boom", _) ⇒ throw new TestException("boom") - case GetState ⇒ sender() ! state.reverse - } - - val behaviorB: Actor.Receive = { - case Persistent("b", _) ⇒ - stash() - context.become(behaviorA) - case "x" ⇒ - stash() - } - - def receive = behaviorA - - def update(payload: String, snr: Long = 0L) { - state = s"${payload}-${snr}" :: state - } - } - - class RecoveryFailureStashingProcessor(name: String) extends StashingProcessor(name) { - override def preRestart(reason: Throwable, message: Option[Any]) = { - message match { - case Some(m: Persistent) ⇒ if (recoveryRunning) deleteMessage(m.sequenceNr) - case _ ⇒ - } - super.preRestart(reason, message) - } - } -} - -abstract class ProcessorStashSpec(config: Config) extends AkkaSpec(config) with PersistenceSpec with ImplicitSender { - import ProcessorStashSpec._ - - "A processor" must { - "support user stash and unstash operations for persistent messages" in { - val p1 = namedProcessor[StashingProcessor] - p1 ! Persistent("a") - p1 ! Persistent("b") - p1 ! Persistent("c") - p1 ! GetState - expectMsg(List("a-1", "c-3", "b-2")) - - val p2 = namedProcessor[StashingProcessor] - p2 ! Persistent("a") - p2 ! Persistent("b") - p2 ! Persistent("c") - p2 ! GetState - expectMsg(List("a-1", "c-3", "b-2", "a-4", "c-6", "b-5")) - } - "support user stash and unstash operations for persistent and transient messages" in { - val p1 = namedProcessor[StashingProcessor] - p1 ! Persistent("a") - p1 ! "x" - p1 ! Persistent("b") - p1 ! Persistent("c") - p1 ! GetState - expectMsg(List("a-1", "c-3", "x-0", "b-2")) - - val p2 = namedProcessor[StashingProcessor] - p2 ! Persistent("a") - p2 ! "x" - p2 ! Persistent("b") - p2 ! Persistent("c") - p2 ! GetState - expectMsg(List("a-1", "c-3", "b-2", "a-4", "c-6", "x-0", "b-5")) - } - "support restarts between user stash and unstash operations" in { - val p1 = namedProcessor[StashingProcessor] - p1 ! Persistent("a") - p1 ! Persistent("b") - p1 ! "boom" - p1 ! Persistent("c") - p1 ! GetState - expectMsg(List("a-1", "c-3", "b-2")) - - val p2 = namedProcessor[StashingProcessor] - p2 ! Persistent("a") - p2 ! Persistent("b") - p2 ! "boom" - p2 ! Persistent("c") - p2 ! GetState - expectMsg(List("a-1", "c-3", "b-2", "a-4", "c-6", "b-5")) - } - "support multiple restarts between user stash and unstash operations" in { - val p1 = namedProcessor[RecoveryFailureStashingProcessor] - p1 ! Persistent("a") - p1 ! Persistent("b") - p1 ! Persistent("boom") - p1 ! Persistent("c") - p1 ! GetState - expectMsg(List("a-1", "c-4", "b-2")) - - val p2 = namedProcessor[RecoveryFailureStashingProcessor] - p2 ! Persistent("a") - p2 ! Persistent("b") - p2 ! Persistent("boom") - p2 ! Persistent("c") - p2 ! GetState - expectMsg(List("a-1", "c-4", "b-2", "a-5", "c-8", "b-6")) - } - } -} - -class LeveldbProcessorStashSpec extends ProcessorStashSpec(PersistenceSpec.config("leveldb", "LeveldbProcessorStashSpec")) -class InmemProcessorStashSpec extends ProcessorStashSpec(PersistenceSpec.config("inmem", "InmemProcessorStashSpec")) diff --git a/akka-persistence/src/test/scala/akka/persistence/SnapshotDirectoryFailureSpec.scala b/akka-persistence/src/test/scala/akka/persistence/SnapshotDirectoryFailureSpec.scala index 8b4863e2c5..fc17b7e22d 100644 --- a/akka-persistence/src/test/scala/akka/persistence/SnapshotDirectoryFailureSpec.scala +++ b/akka-persistence/src/test/scala/akka/persistence/SnapshotDirectoryFailureSpec.scala @@ -11,13 +11,17 @@ import akka.actor.{ ActorInitializationException, Props, ActorRef } object SnapshotDirectoryFailureSpec { val inUseSnapshotPath = "target/inUseSnapshotPath" - class TestProcessor(name: String, probe: ActorRef) extends Processor { + class TestPersistentActor(name: String, probe: ActorRef) extends PersistentActor { override def persistenceId: String = name override def preStart(): Unit = () - def receive = { + override def receiveRecover: Receive = { + case SnapshotOffer(md, s) ⇒ probe ! ((md, s)) + } + + override def receiveCommand = { case s: String ⇒ saveSnapshot(s) case SaveSnapshotSuccess(md) ⇒ probe ! md.sequenceNr case other ⇒ probe ! other @@ -27,8 +31,8 @@ object SnapshotDirectoryFailureSpec { class SnapshotDirectoryFailureSpec extends AkkaSpec(PersistenceSpec.config("leveldb", "SnapshotDirectoryFailureSpec", extraConfig = Some( s""" - |akka.persistence.snapshot-store.local.dir = "${SnapshotDirectoryFailureSpec.inUseSnapshotPath}" - """.stripMargin))) with ImplicitSender { + akka.persistence.snapshot-store.local.dir = "${SnapshotDirectoryFailureSpec.inUseSnapshotPath}" + """))) with ImplicitSender { import SnapshotDirectoryFailureSpec._ @@ -45,8 +49,8 @@ class SnapshotDirectoryFailureSpec extends AkkaSpec(PersistenceSpec.config("leve "A local snapshot store configured with an failing directory name " must { "throw an exception at startup" in { EventFilter[ActorInitializationException](occurrences = 1).intercept { - val processor = system.actorOf(Props(classOf[TestProcessor], "SnapshotDirectoryFailureSpec-1", testActor)) - processor ! "blahonga" + val p = system.actorOf(Props(classOf[TestPersistentActor], "SnapshotDirectoryFailureSpec-1", testActor)) + p ! "blahonga" } } } diff --git a/akka-persistence/src/test/scala/akka/persistence/SnapshotFailureRobustnessSpec.scala b/akka-persistence/src/test/scala/akka/persistence/SnapshotFailureRobustnessSpec.scala index 0746352130..22cdb00695 100644 --- a/akka-persistence/src/test/scala/akka/persistence/SnapshotFailureRobustnessSpec.scala +++ b/akka-persistence/src/test/scala/akka/persistence/SnapshotFailureRobustnessSpec.scala @@ -15,20 +15,35 @@ import scala.language.postfixOps object SnapshotFailureRobustnessSpec { - class SaveSnapshotTestProcessor(name: String, probe: ActorRef) extends NamedProcessor(name) { - def receive = { - case Persistent(payload, snr) ⇒ saveSnapshot(payload) - case SaveSnapshotSuccess(md) ⇒ probe ! md.sequenceNr - case SnapshotOffer(md, s) ⇒ probe ! ((md, s)) - case other ⇒ probe ! other + case class Cmd(payload: String) + + class SaveSnapshotTestPersistentActor(name: String, probe: ActorRef) extends NamedPersistentActor(name) { + override def receiveRecover: Receive = { + case SnapshotOffer(md, s) ⇒ probe ! ((md, s)) + case other ⇒ probe ! other + } + + override def receiveCommand = { + case Cmd(payload) ⇒ persist(payload)(_ ⇒ saveSnapshot(payload)) + case SaveSnapshotSuccess(md) ⇒ probe ! md.sequenceNr + case other ⇒ probe ! other } } - class LoadSnapshotTestProcessor(name: String, probe: ActorRef) extends NamedProcessor(name) { - def receive = { - case Persistent(payload, snr) ⇒ probe ! s"${payload}-${snr}" - case SnapshotOffer(md, s) ⇒ probe ! ((md, s)) - case other ⇒ probe ! other + class LoadSnapshotTestPersistentActor(name: String, probe: ActorRef) extends NamedPersistentActor(name) { + override def receiveRecover: Receive = { + case SnapshotOffer(md, s) ⇒ probe ! ((md, s)) + case payload: String ⇒ probe ! s"${payload}-${lastSequenceNr}" + case other ⇒ probe ! other + } + + override def receiveCommand = { + case Cmd(payload) ⇒ + persist(payload) { _ ⇒ + probe ! s"${payload}-${lastSequenceNr}" + } + case SnapshotOffer(md, s) ⇒ probe ! ((md, s)) + case other ⇒ probe ! other } override def preStart() = () } @@ -45,27 +60,27 @@ object SnapshotFailureRobustnessSpec { class SnapshotFailureRobustnessSpec extends AkkaSpec(PersistenceSpec.config("leveldb", "SnapshotFailureRobustnessSpec", serialization = "off", extraConfig = Some( """ - |akka.persistence.snapshot-store.local.class = "akka.persistence.SnapshotFailureRobustnessSpec$FailingLocalSnapshotStore" - """.stripMargin))) with PersistenceSpec with ImplicitSender { + akka.persistence.snapshot-store.local.class = "akka.persistence.SnapshotFailureRobustnessSpec$FailingLocalSnapshotStore" + """))) with PersistenceSpec with ImplicitSender { import SnapshotFailureRobustnessSpec._ - "A processor with a failing snapshot" must { + "A persistentActor with a failing snapshot" must { "recover state starting from the most recent complete snapshot" in { - val sProcessor = system.actorOf(Props(classOf[SaveSnapshotTestProcessor], name, testActor)) + val sPersistentActor = system.actorOf(Props(classOf[SaveSnapshotTestPersistentActor], name, testActor)) val persistenceId = name expectMsg(RecoveryCompleted) - sProcessor ! Persistent("blahonga") + sPersistentActor ! Cmd("blahonga") expectMsg(1) - sProcessor ! Persistent("kablama") + sPersistentActor ! Cmd("kablama") expectMsg(2) system.eventStream.publish(TestEvent.Mute( EventFilter.error(start = "Error loading snapshot ["))) system.eventStream.subscribe(testActor, classOf[Logging.Error]) try { - val lProcessor = system.actorOf(Props(classOf[LoadSnapshotTestProcessor], name, testActor)) - lProcessor ! Recover() + val lPersistentActor = system.actorOf(Props(classOf[LoadSnapshotTestPersistentActor], name, testActor)) + lPersistentActor ! Recover() expectMsgPF() { case (SnapshotMetadata(`persistenceId`, 1, timestamp), state) ⇒ state should be("blahonga") diff --git a/akka-persistence/src/test/scala/akka/persistence/SnapshotSerializationSpec.scala b/akka-persistence/src/test/scala/akka/persistence/SnapshotSerializationSpec.scala index f776da7490..73dfd6d891 100644 --- a/akka-persistence/src/test/scala/akka/persistence/SnapshotSerializationSpec.scala +++ b/akka-persistence/src/test/scala/akka/persistence/SnapshotSerializationSpec.scala @@ -47,12 +47,17 @@ object SnapshotSerializationSpec { } } - class TestProcessor(name: String, probe: ActorRef) extends NamedProcessor(name) { - def receive = { + class TestPersistentActor(name: String, probe: ActorRef) extends NamedPersistentActor(name) { + + override def receiveRecover: Receive = { + case SnapshotOffer(md, s) ⇒ probe ! ((md, s)) + case RecoveryCompleted ⇒ // ignore + case other ⇒ probe ! other + } + + override def receiveCommand = { case s: String ⇒ saveSnapshot(new MySnapshot(s)) case SaveSnapshotSuccess(md) ⇒ probe ! md.sequenceNr - case SnapshotOffer(md, s) ⇒ probe ! ((md, s)) - case RecoveryCompleted ⇒ // ignore case other ⇒ probe ! other } } @@ -61,28 +66,28 @@ object SnapshotSerializationSpec { class SnapshotSerializationSpec extends AkkaSpec(PersistenceSpec.config("leveldb", "SnapshotSerializationSpec", serialization = "off", extraConfig = Some( """ - |akka.actor { - | serializers { - | my-snapshot = "akka.persistence.SnapshotSerializationSpec$MySerializer" - | } - | serialization-bindings { - | "akka.persistence.SnapshotSerializationSpec$SerializationMarker" = my-snapshot - | } - |} - """.stripMargin))) with PersistenceSpec with ImplicitSender { + akka.actor { + serializers { + my-snapshot = "akka.persistence.SnapshotSerializationSpec$MySerializer" + } + serialization-bindings { + "akka.persistence.SnapshotSerializationSpec$SerializationMarker" = my-snapshot + } + } + """))) with PersistenceSpec with ImplicitSender { import SnapshotSerializationSpec._ import SnapshotSerializationSpec.XXXXXXXXXXXXXXXXXXXX._ - "A processor with custom Serializer" must { + "A PersistentActor with custom Serializer" must { "be able to handle serialization header of more than 255 bytes" in { - val sProcessor = system.actorOf(Props(classOf[TestProcessor], name, testActor)) + val sPersistentActor = system.actorOf(Props(classOf[TestPersistentActor], name, testActor)) val persistenceId = name - sProcessor ! "blahonga" + sPersistentActor ! "blahonga" expectMsg(0) - val lProcessor = system.actorOf(Props(classOf[TestProcessor], name, testActor)) - lProcessor ! Recover() + val lPersistentActor = system.actorOf(Props(classOf[TestPersistentActor], name, testActor)) + lPersistentActor ! Recover() expectMsgPF() { case (SnapshotMetadata(`persistenceId`, 0, timestamp), state) ⇒ state should be(new MySnapshot("blahonga")) diff --git a/akka-persistence/src/test/scala/akka/persistence/SnapshotSpec.scala b/akka-persistence/src/test/scala/akka/persistence/SnapshotSpec.scala index 2df4db483f..dfca6a7b76 100644 --- a/akka-persistence/src/test/scala/akka/persistence/SnapshotSpec.scala +++ b/akka-persistence/src/test/scala/akka/persistence/SnapshotSpec.scala @@ -10,21 +10,40 @@ import akka.testkit._ object SnapshotSpec { case object TakeSnapshot - class SaveSnapshotTestProcessor(name: String, probe: ActorRef) extends NamedProcessor(name) { + class SaveSnapshotTestPersistentActor(name: String, probe: ActorRef) extends NamedPersistentActor(name) { var state = List.empty[String] - def receive = { - case Persistent(payload, snr) ⇒ state = s"${payload}-${snr}" :: state - case TakeSnapshot ⇒ saveSnapshot(state) - case SaveSnapshotSuccess(md) ⇒ probe ! md.sequenceNr - case GetState ⇒ probe ! state.reverse + + override def receiveRecover: Receive = { + case payload: String ⇒ state = s"${payload}-${lastSequenceNr}" :: state + case SnapshotOffer(_, snapshot: List[String]) ⇒ state = snapshot + } + + override def receiveCommand = { + case payload: String ⇒ + persist(payload) { _ ⇒ + state = s"${payload}-${lastSequenceNr}" :: state + } + case TakeSnapshot ⇒ saveSnapshot(state) + case SaveSnapshotSuccess(md) ⇒ probe ! md.sequenceNr + case GetState ⇒ probe ! state.reverse } } - class LoadSnapshotTestProcessor(name: String, probe: ActorRef) extends NamedProcessor(name) { - def receive = { - case Persistent(payload, snr) ⇒ probe ! s"${payload}-${snr}" - case SnapshotOffer(md, s) ⇒ probe ! ((md, s)) - case other ⇒ probe ! other + class LoadSnapshotTestPersistentActor(name: String, probe: ActorRef) extends NamedPersistentActor(name) { + override def receiveRecover: Receive = { + case payload: String ⇒ probe ! s"${payload}-${lastSequenceNr}" + case SnapshotOffer(md, s) ⇒ probe ! ((md, s)) + case other ⇒ probe ! other + } + + override def receiveCommand = { + case "done" ⇒ probe ! "done" + case payload: String ⇒ + persist(payload) { _ ⇒ + probe ! s"${payload}-${lastSequenceNr}" + } + case SnapshotOffer(md, s) ⇒ probe ! ((md, s)) + case other ⇒ probe ! other } override def preStart() = () } @@ -32,8 +51,8 @@ object SnapshotSpec { final case class Delete1(metadata: SnapshotMetadata) final case class DeleteN(criteria: SnapshotSelectionCriteria) - class DeleteSnapshotTestProcessor(name: String, probe: ActorRef) extends LoadSnapshotTestProcessor(name, probe) { - override def receive = receiveDelete orElse super.receive + class DeleteSnapshotTestPersistentActor(name: String, probe: ActorRef) extends LoadSnapshotTestPersistentActor(name, probe) { + override def receiveCommand = receiveDelete orElse super.receiveCommand def receiveDelete: Receive = { case Delete1(metadata) ⇒ deleteSnapshot(metadata.sequenceNr, metadata.timestamp) case DeleteN(criteria) ⇒ deleteSnapshots(criteria) @@ -48,25 +67,25 @@ class SnapshotSpec extends AkkaSpec(PersistenceSpec.config("leveldb", "SnapshotS override protected def beforeEach() { super.beforeEach() - val processor = system.actorOf(Props(classOf[SaveSnapshotTestProcessor], name, testActor)) - processor ! Persistent("a") - processor ! TakeSnapshot - processor ! Persistent("b") - processor ! TakeSnapshot - processor ! Persistent("c") - processor ! Persistent("d") - processor ! TakeSnapshot - processor ! Persistent("e") - processor ! Persistent("f") + val persistentActor = system.actorOf(Props(classOf[SaveSnapshotTestPersistentActor], name, testActor)) + persistentActor ! "a" + persistentActor ! TakeSnapshot + persistentActor ! "b" + persistentActor ! TakeSnapshot + persistentActor ! "c" + persistentActor ! "d" + persistentActor ! TakeSnapshot + persistentActor ! "e" + persistentActor ! "f" expectMsgAllOf(1L, 2L, 4L) } - "A processor" must { + "A persistentActor" must { "recover state starting from the most recent snapshot" in { - val processor = system.actorOf(Props(classOf[LoadSnapshotTestProcessor], name, testActor)) + val persistentActor = system.actorOf(Props(classOf[LoadSnapshotTestPersistentActor], name, testActor)) val persistenceId = name - processor ! Recover() + persistentActor ! Recover() expectMsgPF() { case (SnapshotMetadata(`persistenceId`, 4, timestamp), state) ⇒ @@ -78,10 +97,10 @@ class SnapshotSpec extends AkkaSpec(PersistenceSpec.config("leveldb", "SnapshotS expectMsg(RecoveryCompleted) } "recover state starting from the most recent snapshot matching an upper sequence number bound" in { - val processor = system.actorOf(Props(classOf[LoadSnapshotTestProcessor], name, testActor)) + val persistentActor = system.actorOf(Props(classOf[LoadSnapshotTestPersistentActor], name, testActor)) val persistenceId = name - processor ! Recover(toSequenceNr = 3) + persistentActor ! Recover(toSequenceNr = 3) expectMsgPF() { case (SnapshotMetadata(`persistenceId`, 2, timestamp), state) ⇒ @@ -92,11 +111,11 @@ class SnapshotSpec extends AkkaSpec(PersistenceSpec.config("leveldb", "SnapshotS expectMsg(RecoveryCompleted) } "recover state starting from the most recent snapshot matching an upper sequence number bound (without further replay)" in { - val processor = system.actorOf(Props(classOf[LoadSnapshotTestProcessor], name, testActor)) + val persistentActor = system.actorOf(Props(classOf[LoadSnapshotTestPersistentActor], name, testActor)) val persistenceId = name - processor ! Recover(toSequenceNr = 4) - processor ! "done" + persistentActor ! Recover(toSequenceNr = 4) + persistentActor ! "done" expectMsgPF() { case (SnapshotMetadata(`persistenceId`, 4, timestamp), state) ⇒ @@ -107,10 +126,10 @@ class SnapshotSpec extends AkkaSpec(PersistenceSpec.config("leveldb", "SnapshotS expectMsg("done") } "recover state starting from the most recent snapshot matching criteria" in { - val processor = system.actorOf(Props(classOf[LoadSnapshotTestProcessor], name, testActor)) + val persistentActor = system.actorOf(Props(classOf[LoadSnapshotTestPersistentActor], name, testActor)) val persistenceId = name - processor ! Recover(fromSnapshot = SnapshotSelectionCriteria(maxSequenceNr = 2)) + persistentActor ! Recover(fromSnapshot = SnapshotSelectionCriteria(maxSequenceNr = 2)) expectMsgPF() { case (SnapshotMetadata(`persistenceId`, 2, timestamp), state) ⇒ @@ -124,10 +143,10 @@ class SnapshotSpec extends AkkaSpec(PersistenceSpec.config("leveldb", "SnapshotS expectMsg(RecoveryCompleted) } "recover state starting from the most recent snapshot matching criteria and an upper sequence number bound" in { - val processor = system.actorOf(Props(classOf[LoadSnapshotTestProcessor], name, testActor)) + val persistentActor = system.actorOf(Props(classOf[LoadSnapshotTestPersistentActor], name, testActor)) val persistenceId = name - processor ! Recover(fromSnapshot = SnapshotSelectionCriteria(maxSequenceNr = 2), toSequenceNr = 3) + persistentActor ! Recover(fromSnapshot = SnapshotSelectionCriteria(maxSequenceNr = 2), toSequenceNr = 3) expectMsgPF() { case (SnapshotMetadata(`persistenceId`, 2, timestamp), state) ⇒ @@ -138,9 +157,9 @@ class SnapshotSpec extends AkkaSpec(PersistenceSpec.config("leveldb", "SnapshotS expectMsg(RecoveryCompleted) } "recover state from scratch if snapshot based recovery is disabled" in { - val processor = system.actorOf(Props(classOf[LoadSnapshotTestProcessor], name, testActor)) + val persistentActor = system.actorOf(Props(classOf[LoadSnapshotTestPersistentActor], name, testActor)) - processor ! Recover(fromSnapshot = SnapshotSelectionCriteria.None, toSequenceNr = 3) + persistentActor ! Recover(fromSnapshot = SnapshotSelectionCriteria.None, toSequenceNr = 3) expectMsg("a-1") expectMsg("b-2") @@ -150,14 +169,14 @@ class SnapshotSpec extends AkkaSpec(PersistenceSpec.config("leveldb", "SnapshotS "support single message deletions" in { val deleteProbe = TestProbe() - val processor1 = system.actorOf(Props(classOf[DeleteSnapshotTestProcessor], name, testActor)) + val persistentActor1 = system.actorOf(Props(classOf[DeleteSnapshotTestPersistentActor], name, testActor)) val persistenceId = name system.eventStream.subscribe(deleteProbe.ref, classOf[DeleteSnapshot]) - // recover processor from 3rd snapshot and then delete snapshot - processor1 ! Recover(toSequenceNr = 4) - processor1 ! "done" + // recover persistentActor from 3rd snapshot and then delete snapshot + persistentActor1 ! Recover(toSequenceNr = 4) + persistentActor1 ! "done" val metadata = expectMsgPF() { case (md @ SnapshotMetadata(`persistenceId`, 4, _), state) ⇒ @@ -167,13 +186,13 @@ class SnapshotSpec extends AkkaSpec(PersistenceSpec.config("leveldb", "SnapshotS expectMsg(RecoveryCompleted) expectMsg("done") - processor1 ! Delete1(metadata) + persistentActor1 ! Delete1(metadata) deleteProbe.expectMsgType[DeleteSnapshot] - // recover processor from 2nd snapshot (3rd was deleted) plus replayed messages - val processor2 = system.actorOf(Props(classOf[DeleteSnapshotTestProcessor], name, testActor)) + // recover persistentActor from 2nd snapshot (3rd was deleted) plus replayed messages + val persistentActor2 = system.actorOf(Props(classOf[DeleteSnapshotTestPersistentActor], name, testActor)) - processor2 ! Recover(toSequenceNr = 4) + persistentActor2 ! Recover(toSequenceNr = 4) expectMsgPF() { case (md @ SnapshotMetadata(`persistenceId`, 2, _), state) ⇒ state should be(List("a-1", "b-2").reverse) @@ -186,14 +205,14 @@ class SnapshotSpec extends AkkaSpec(PersistenceSpec.config("leveldb", "SnapshotS "support bulk message deletions" in { val deleteProbe = TestProbe() - val processor1 = system.actorOf(Props(classOf[DeleteSnapshotTestProcessor], name, testActor)) + val persistentActor1 = system.actorOf(Props(classOf[DeleteSnapshotTestPersistentActor], name, testActor)) val persistenceId = name system.eventStream.subscribe(deleteProbe.ref, classOf[DeleteSnapshots]) - // recover processor and the delete first three (= all) snapshots - processor1 ! Recover(toSequenceNr = 4) - processor1 ! DeleteN(SnapshotSelectionCriteria(maxSequenceNr = 4)) + // recover persistentActor and the delete first three (= all) snapshots + persistentActor1 ! Recover(toSequenceNr = 4) + persistentActor1 ! DeleteN(SnapshotSelectionCriteria(maxSequenceNr = 4)) expectMsgPF() { case (md @ SnapshotMetadata(`persistenceId`, 4, _), state) ⇒ state should be(List("a-1", "b-2", "c-3", "d-4").reverse) @@ -201,10 +220,10 @@ class SnapshotSpec extends AkkaSpec(PersistenceSpec.config("leveldb", "SnapshotS expectMsg(RecoveryCompleted) deleteProbe.expectMsgType[DeleteSnapshots] - // recover processor from replayed messages (all snapshots deleted) - val processor2 = system.actorOf(Props(classOf[DeleteSnapshotTestProcessor], name, testActor)) + // recover persistentActor from replayed messages (all snapshots deleted) + val persistentActor2 = system.actorOf(Props(classOf[DeleteSnapshotTestPersistentActor], name, testActor)) - processor2 ! Recover(toSequenceNr = 4) + persistentActor2 ! Recover(toSequenceNr = 4) expectMsg("a-1") expectMsg("b-2") expectMsg("c-3") diff --git a/akka-persistence/src/test/scala/akka/persistence/ViewSpec.scala b/akka-persistence/src/test/scala/akka/persistence/ViewSpec.scala deleted file mode 100644 index bad21e7bc2..0000000000 --- a/akka-persistence/src/test/scala/akka/persistence/ViewSpec.scala +++ /dev/null @@ -1,321 +0,0 @@ -/** - * Copyright (C) 2009-2014 Typesafe Inc. - */ -package akka.persistence - -import akka.actor._ -import akka.persistence.JournalProtocol.ReplayMessages -import akka.testkit._ -import com.typesafe.config.Config - -import scala.concurrent.duration._ - -object ViewSpec { - private class TestPersistentActor(name: String, probe: ActorRef) extends NamedPersistentActor(name) { - def receiveCommand = { - case msg ⇒ - persist(msg) { m ⇒ probe ! s"${m}-${lastSequenceNr}" } - } - - override def receiveRecover: Receive = { - case _ ⇒ - } - } - - private class TestView(name: String, probe: ActorRef, interval: FiniteDuration, var failAt: Option[String]) extends View { - def this(name: String, probe: ActorRef, interval: FiniteDuration) = - this(name, probe, interval, None) - - def this(name: String, probe: ActorRef) = - this(name, probe, 100.milliseconds) - - override def autoUpdateInterval: FiniteDuration = interval.dilated(context.system) - override val processorId: String = name - - var last: String = _ - - def receive = { - case "get" ⇒ - probe ! last - case "boom" ⇒ - throw new TestException("boom") - case Persistent(payload, _) if Some(payload) == failAt ⇒ - throw new TestException("boom") - case Persistent(payload, sequenceNr) ⇒ - last = s"replicated-${payload}-${sequenceNr}" - probe ! last - } - - override def postRestart(reason: Throwable): Unit = { - super.postRestart(reason) - failAt = None - } - } - - private class PassiveTestView(name: String, probe: ActorRef, var failAt: Option[String]) extends View { - override val persistenceId: String = name - - override def autoUpdate: Boolean = false - override def autoUpdateReplayMax: Long = 0L // no message replay during initial recovery - - var last: String = _ - - def receive = { - case "get" ⇒ - probe ! last - case Persistent(payload, _) if Some(payload) == failAt ⇒ - throw new TestException("boom") - case Persistent(payload, sequenceNr) ⇒ - last = s"replicated-${payload}-${sequenceNr}" - } - - override def postRestart(reason: Throwable): Unit = { - super.postRestart(reason) - failAt = None - } - - } - - private class ActiveTestView(name: String, probe: ActorRef) extends View { - override val persistenceId: String = name - override def autoUpdateInterval: FiniteDuration = 50.millis - override def autoUpdateReplayMax: Long = 2 - - def receive = { - case Persistent(payload, sequenceNr) ⇒ - probe ! s"replicated-${payload}-${sequenceNr}" - } - } - - private class TestDestination(probe: ActorRef) extends Actor { - def receive = { - case cp @ ConfirmablePersistent(payload, sequenceNr, _) ⇒ - cp.confirm() - probe ! s"${payload}-${sequenceNr}" - } - } - - private class EmittingView(name: String, destination: ActorRef) extends View { - override val persistenceId: String = name - override def autoUpdateInterval: FiniteDuration = 100.milliseconds.dilated(context.system) - - val channel = context.actorOf(Channel.props(s"${name}-channel")) - - def receive = { - case "restart" ⇒ - throw new TestException("restart requested") - case Persistent(payload, sequenceNr) ⇒ - channel ! Deliver(Persistent(s"emitted-${payload}"), destination.path) - } - } - - private class SnapshottingView(name: String, probe: ActorRef) extends View { - override val persistenceId: String = name - override val viewId: String = s"${name}-replicator" - - override def autoUpdateInterval: FiniteDuration = 100.microseconds.dilated(context.system) - - var last: String = _ - - def receive = { - case "get" ⇒ - probe ! last - case "snap" ⇒ - saveSnapshot(last) - case "restart" ⇒ - throw new TestException("restart requested") - case SaveSnapshotSuccess(_) ⇒ - probe ! "snapped" - case SnapshotOffer(metadata, snapshot: String) ⇒ - last = snapshot - probe ! last - case Persistent(payload, sequenceNr) ⇒ - last = s"replicated-${payload}-${sequenceNr}" - probe ! last - } - } -} - -abstract class ViewSpec(config: Config) extends AkkaSpec(config) with PersistenceSpec with ImplicitSender { - import akka.persistence.ViewSpec._ - - var persistor: ActorRef = _ - var view: ActorRef = _ - - var processorProbe: TestProbe = _ - var viewProbe: TestProbe = _ - - override protected def beforeEach(): Unit = { - super.beforeEach() - - processorProbe = TestProbe() - viewProbe = TestProbe() - - persistor = system.actorOf(Props(classOf[TestPersistentActor], name, processorProbe.ref)) - persistor ! "a" - persistor ! "b" - - processorProbe.expectMsg("a-1") - processorProbe.expectMsg("b-2") - } - - override protected def afterEach(): Unit = { - system.stop(persistor) - system.stop(view) - super.afterEach() - } - - def subscribeToConfirmation(probe: TestProbe): Unit = - system.eventStream.subscribe(probe.ref, classOf[Delivered]) - - def awaitConfirmation(probe: TestProbe): Unit = - probe.expectMsgType[Delivered] - - def subscribeToReplay(probe: TestProbe): Unit = - system.eventStream.subscribe(probe.ref, classOf[ReplayMessages]) - - "A view" must { - "receive past updates from a processor" in { - view = system.actorOf(Props(classOf[TestView], name, viewProbe.ref)) - viewProbe.expectMsg("replicated-a-1") - viewProbe.expectMsg("replicated-b-2") - } - "receive live updates from a processor" in { - view = system.actorOf(Props(classOf[TestView], name, viewProbe.ref)) - viewProbe.expectMsg("replicated-a-1") - viewProbe.expectMsg("replicated-b-2") - persistor ! "c" - viewProbe.expectMsg("replicated-c-3") - } - "run updates at specified interval" in { - view = system.actorOf(Props(classOf[TestView], name, viewProbe.ref, 2.seconds)) - // initial update is done on start - viewProbe.expectMsg("replicated-a-1") - viewProbe.expectMsg("replicated-b-2") - // live updates takes 5 seconds to replicate - persistor ! "c" - viewProbe.expectNoMsg(1.second) - viewProbe.expectMsg("replicated-c-3") - } - "run updates on user request" in { - view = system.actorOf(Props(classOf[TestView], name, viewProbe.ref, 5.seconds)) - viewProbe.expectMsg("replicated-a-1") - viewProbe.expectMsg("replicated-b-2") - persistor ! "c" - processorProbe.expectMsg("c-3") - view ! Update(await = false) - viewProbe.expectMsg("replicated-c-3") - } - "run updates on user request and await update" in { - view = system.actorOf(Props(classOf[TestView], name, viewProbe.ref, 5.seconds)) - viewProbe.expectMsg("replicated-a-1") - viewProbe.expectMsg("replicated-b-2") - persistor ! "c" - processorProbe.expectMsg("c-3") - view ! Update(await = true) - view ! "get" - viewProbe.expectMsg("replicated-c-3") - } - "run updates again on failure outside an update cycle" in { - view = system.actorOf(Props(classOf[TestView], name, viewProbe.ref, 5.seconds)) - viewProbe.expectMsg("replicated-a-1") - viewProbe.expectMsg("replicated-b-2") - view ! "boom" - viewProbe.expectMsg("replicated-a-1") - viewProbe.expectMsg("replicated-b-2") - } - "run updates again on failure during an update cycle" in { - persistor ! "c" - processorProbe.expectMsg("c-3") - view = system.actorOf(Props(classOf[TestView], name, viewProbe.ref, 5.seconds, Some("b"))) - viewProbe.expectMsg("replicated-a-1") - viewProbe.expectMsg("replicated-a-1") - viewProbe.expectMsg("replicated-b-2") - viewProbe.expectMsg("replicated-c-3") - } - "run size-limited updates on user request" in { - persistor ! "c" - persistor ! "d" - persistor ! "e" - persistor ! "f" - - processorProbe.expectMsg("c-3") - processorProbe.expectMsg("d-4") - processorProbe.expectMsg("e-5") - processorProbe.expectMsg("f-6") - - view = system.actorOf(Props(classOf[PassiveTestView], name, viewProbe.ref, None)) - - view ! Update(await = true, replayMax = 2) - view ! "get" - viewProbe.expectMsg("replicated-b-2") - - view ! Update(await = true, replayMax = 1) - view ! "get" - viewProbe.expectMsg("replicated-c-3") - - view ! Update(await = true, replayMax = 4) - view ! "get" - viewProbe.expectMsg("replicated-f-6") - } - "run size-limited updates automatically" in { - val replayProbe = TestProbe() - - persistor ! "c" - persistor ! "d" - - processorProbe.expectMsg("c-3") - processorProbe.expectMsg("d-4") - - subscribeToReplay(replayProbe) - - view = system.actorOf(Props(classOf[ActiveTestView], name, viewProbe.ref)) - - viewProbe.expectMsg("replicated-a-1") - viewProbe.expectMsg("replicated-b-2") - viewProbe.expectMsg("replicated-c-3") - viewProbe.expectMsg("replicated-d-4") - - replayProbe.expectMsgPF() { case ReplayMessages(1L, _, 2L, _, _, _) ⇒ } - replayProbe.expectMsgPF() { case ReplayMessages(3L, _, 2L, _, _, _) ⇒ } - replayProbe.expectMsgPF() { case ReplayMessages(5L, _, 2L, _, _, _) ⇒ } - } - } - - "A view" can { - "use channels" in { - val confirmProbe = TestProbe() - val destinationProbe = TestProbe() - val destination = system.actorOf(Props(classOf[TestDestination], destinationProbe.ref)) - - subscribeToConfirmation(confirmProbe) - - view = system.actorOf(Props(classOf[EmittingView], name, destination)) - destinationProbe.expectMsg("emitted-a-1") - destinationProbe.expectMsg("emitted-b-2") - awaitConfirmation(confirmProbe) - awaitConfirmation(confirmProbe) - - view ! "restart" - persistor ! "c" - - destinationProbe.expectMsg("emitted-c-3") - awaitConfirmation(confirmProbe) - } - "take snapshots" in { - view = system.actorOf(Props(classOf[SnapshottingView], name, viewProbe.ref)) - viewProbe.expectMsg("replicated-a-1") - viewProbe.expectMsg("replicated-b-2") - view ! "snap" - viewProbe.expectMsg("snapped") - view ! "restart" - persistor ! "c" - viewProbe.expectMsg("replicated-b-2") - viewProbe.expectMsg("replicated-c-3") - } - } -} - -class LeveldbViewSpec extends ViewSpec(PersistenceSpec.config("leveldb", "LeveldbViewSpec")) -class InmemViewSpec extends ViewSpec(PersistenceSpec.config("inmem", "InmemViewSpec")) - diff --git a/akka-persistence/src/test/scala/akka/persistence/journal/chaos/ChaosJournal.scala b/akka-persistence/src/test/scala/akka/persistence/journal/chaos/ChaosJournal.scala index cab45a07f8..28d5c4e4fd 100644 --- a/akka-persistence/src/test/scala/akka/persistence/journal/chaos/ChaosJournal.scala +++ b/akka-persistence/src/test/scala/akka/persistence/journal/chaos/ChaosJournal.scala @@ -15,18 +15,12 @@ import akka.persistence.journal.inmem.InmemMessages class WriteFailedException(ps: Seq[PersistentRepr]) extends TestException(s"write failed for payloads = [${ps.map(_.payload)}]") -class ConfirmFailedException(cs: Seq[PersistentConfirmation]) - extends TestException(s"write failed for confirmations = [${cs.map(c ⇒ s"${c.persistenceId}-${c.sequenceNr}-${c.channelId}")}]") - class ReplayFailedException(ps: Seq[PersistentRepr]) extends TestException(s"recovery failed after replaying payloads = [${ps.map(_.payload)}]") class ReadHighestFailedException extends TestException(s"recovery failed when reading highest sequence number") -class DeleteFailedException(messageIds: immutable.Seq[PersistentId]) - extends TestException(s"delete failed for message ids = [${messageIds}]") - /** * Keep [[ChaosJournal]] state in an external singleton so that it survives journal restarts. * The journal itself uses a dedicated dispatcher, so there won't be any visibility issues. @@ -38,7 +32,6 @@ class ChaosJournal extends SyncWriteJournal { val config = context.system.settings.config.getConfig("akka.persistence.journal.chaos") val writeFailureRate = config.getDouble("write-failure-rate") - val confirmFailureRate = config.getDouble("confirm-failure-rate") val deleteFailureRate = config.getDouble("delete-failure-rate") val replayFailureRate = config.getDouble("replay-failure-rate") val readHighestFailureRate = config.getDouble("read-highest-failure-rate") @@ -49,17 +42,12 @@ class ChaosJournal extends SyncWriteJournal { if (shouldFail(writeFailureRate)) throw new WriteFailedException(messages) else messages.foreach(add) - def writeConfirmations(confirmations: immutable.Seq[PersistentConfirmation]): Unit = - if (shouldFail(confirmFailureRate)) throw new ConfirmFailedException(confirmations) - else confirmations.foreach(cnf ⇒ update(cnf.persistenceId, cnf.sequenceNr)(p ⇒ p.update(confirms = cnf.channelId +: p.confirms))) - - def deleteMessages(messageIds: immutable.Seq[PersistentId], permanent: Boolean): Unit = - if (shouldFail(deleteFailureRate)) throw new DeleteFailedException(messageIds) - else if (permanent) messageIds.foreach(mid ⇒ update(mid.persistenceId, mid.sequenceNr)(_.update(deleted = true))) - else messageIds.foreach(mid ⇒ del(mid.persistenceId, mid.sequenceNr)) - - def deleteMessagesTo(persistenceId: String, toSequenceNr: Long, permanent: Boolean): Unit = - (1L to toSequenceNr).map(PersistentIdImpl(persistenceId, _)) + def deleteMessagesTo(persistenceId: String, toSequenceNr: Long, permanent: Boolean): Unit = { + (1L to toSequenceNr).foreach { snr ⇒ + if (permanent) update(persistenceId, snr)(_.update(deleted = true)) + else del(persistenceId, snr) + } + } def asyncReplayMessages(persistenceId: String, fromSequenceNr: Long, toSequenceNr: Long, max: Long)(replayCallback: (PersistentRepr) ⇒ Unit): Future[Unit] = if (shouldFail(replayFailureRate)) { diff --git a/akka-persistence/src/test/scala/akka/persistence/journal/leveldb/SharedLeveldbJournalSpec.scala b/akka-persistence/src/test/scala/akka/persistence/journal/leveldb/SharedLeveldbJournalSpec.scala index 6028625426..8d02998903 100644 --- a/akka-persistence/src/test/scala/akka/persistence/journal/leveldb/SharedLeveldbJournalSpec.scala +++ b/akka-persistence/src/test/scala/akka/persistence/journal/leveldb/SharedLeveldbJournalSpec.scala @@ -39,18 +39,24 @@ object SharedLeveldbJournalSpec { } """ - class ExampleProcessor(probe: ActorRef, name: String) extends NamedProcessor(name) { - def receive = { - case Persistent(payload, _) ⇒ probe ! payload + class ExamplePersistentActor(probe: ActorRef, name: String) extends NamedPersistentActor(name) { + override def receiveRecover = { + case RecoveryCompleted ⇒ // ignore + case payload ⇒ probe ! payload + } + override def receiveCommand = { + case payload ⇒ persist(payload) { _ ⇒ + probe ! payload + } } } class ExampleApp(probe: ActorRef, storePath: ActorPath) extends Actor { - val processor = context.actorOf(Props(classOf[ExampleProcessor], probe, context.system.name)) + val p = context.actorOf(Props(classOf[ExamplePersistentActor], probe, context.system.name)) def receive = { case ActorIdentity(1, Some(store)) ⇒ SharedLeveldbJournal.setStore(store, context.system) - case m ⇒ processor forward m + case m ⇒ p forward m } override def preStart(): Unit = { @@ -63,44 +69,44 @@ object SharedLeveldbJournalSpec { class SharedLeveldbJournalSpec extends AkkaSpec(SharedLeveldbJournalSpec.config) with Cleanup { import SharedLeveldbJournalSpec._ - val processorASystem = ActorSystem("processorA", system.settings.config) - val processorBSystem = ActorSystem("processorB", system.settings.config) + val systemA = ActorSystem("SysA", system.settings.config) + val systemB = ActorSystem("SysB", system.settings.config) override protected def afterTermination() { - shutdown(processorASystem) - shutdown(processorBSystem) + shutdown(systemA) + shutdown(systemB) super.afterTermination() } "A LevelDB store" can { "be shared by multiple actor systems" in { - val processorAProbe = new TestProbe(processorASystem) - val processorBProbe = new TestProbe(processorBSystem) + val probeA = new TestProbe(systemA) + val probeB = new TestProbe(systemB) system.actorOf(Props[SharedLeveldbStore], "store") val storePath = RootActorPath(system.asInstanceOf[ExtendedActorSystem].provider.getDefaultAddress) / "user" / "store" - val appA = processorASystem.actorOf(Props(classOf[ExampleApp], processorAProbe.ref, storePath)) - val appB = processorBSystem.actorOf(Props(classOf[ExampleApp], processorBProbe.ref, storePath)) + val appA = systemA.actorOf(Props(classOf[ExampleApp], probeA.ref, storePath)) + val appB = systemB.actorOf(Props(classOf[ExampleApp], probeB.ref, storePath)) - appA ! Persistent("a1") - appB ! Persistent("b1") + appA ! "a1" + appB ! "b1" - processorAProbe.expectMsg("a1") - processorBProbe.expectMsg("b1") + probeA.expectMsg("a1") + probeB.expectMsg("b1") - val recoveredAppA = processorASystem.actorOf(Props(classOf[ExampleApp], processorAProbe.ref, storePath)) - val recoveredAppB = processorBSystem.actorOf(Props(classOf[ExampleApp], processorBProbe.ref, storePath)) + val recoveredAppA = systemA.actorOf(Props(classOf[ExampleApp], probeA.ref, storePath)) + val recoveredAppB = systemB.actorOf(Props(classOf[ExampleApp], probeB.ref, storePath)) - recoveredAppA ! Persistent("a2") - recoveredAppB ! Persistent("b2") + recoveredAppA ! "a2" + recoveredAppB ! "b2" - processorAProbe.expectMsg("a1") - processorAProbe.expectMsg("a2") + probeA.expectMsg("a1") + probeA.expectMsg("a2") - processorBProbe.expectMsg("b1") - processorBProbe.expectMsg("b2") + probeB.expectMsg("b1") + probeB.expectMsg("b2") } } } diff --git a/akka-persistence/src/test/scala/akka/persistence/serialization/SerializerSpec.scala b/akka-persistence/src/test/scala/akka/persistence/serialization/SerializerSpec.scala index 09680172ef..8c753639f0 100644 --- a/akka-persistence/src/test/scala/akka/persistence/serialization/SerializerSpec.scala +++ b/akka-persistence/src/test/scala/akka/persistence/serialization/SerializerSpec.scala @@ -99,17 +99,8 @@ class MessageSerializerPersistenceSpec extends AkkaSpec(customSerializers) { "A message serializer" when { "not given a manifest" must { - "handle custom ConfirmablePersistent message serialization" in { - val persistent = PersistentRepr(MyPayload("a"), 13, "p1", true, 3, List("c1", "c2"), confirmable = true, DeliveredByChannel("p2", "c2", 14), testActor, testActor) - val serializer = serialization.findSerializerFor(persistent) - - val bytes = serializer.toBinary(persistent) - val deserialized = serializer.fromBinary(bytes, None) - - deserialized should be(persistent.withPayload(MyPayload(".a."))) - } "handle custom Persistent message serialization" in { - val persistent = PersistentRepr(MyPayload("a"), 13, "p1", true, 0, List("c1", "c2"), confirmable = false, DeliveredByChannel("p2", "c2", 14), testActor, testActor) + val persistent = PersistentRepr(MyPayload("a"), 13, "p1", true, testActor) val serializer = serialization.findSerializerFor(persistent) val bytes = serializer.toBinary(persistent) @@ -119,17 +110,8 @@ class MessageSerializerPersistenceSpec extends AkkaSpec(customSerializers) { } } "given a PersistentRepr manifest" must { - "handle custom ConfirmablePersistent message serialization" in { - val persistent = PersistentRepr(MyPayload("b"), 13, "p1", true, 3, List("c1", "c2"), confirmable = true, DeliveredByChannel("p2", "c2", 14), testActor, testActor) - val serializer = serialization.findSerializerFor(persistent) - - val bytes = serializer.toBinary(persistent) - val deserialized = serializer.fromBinary(bytes, Some(classOf[PersistentRepr])) - - deserialized should be(persistent.withPayload(MyPayload(".b."))) - } "handle custom Persistent message serialization" in { - val persistent = PersistentRepr(MyPayload("b"), 13, "p1", true, 3, List("c1", "c2"), confirmable = true, DeliveredByChannel("p2", "c2", 14), testActor, testActor) + val persistent = PersistentRepr(MyPayload("b"), 13, "p1", true, testActor) val serializer = serialization.findSerializerFor(persistent) val bytes = serializer.toBinary(persistent) @@ -138,26 +120,6 @@ class MessageSerializerPersistenceSpec extends AkkaSpec(customSerializers) { deserialized should be(persistent.withPayload(MyPayload(".b."))) } } - "given a Confirm manifest" must { - "handle DeliveryByChannel message serialization" in { - val confirmation = DeliveredByChannel("p2", "c2", 14) - val serializer = serialization.findSerializerFor(confirmation) - - val bytes = serializer.toBinary(confirmation) - val deserialized = serializer.fromBinary(bytes, Some(classOf[DeliveredByChannel])) - - deserialized should be(confirmation) - } - "handle DeliveredByPersistentChannel message serialization" in { - val confirmation = DeliveredByPersistentChannel("c2", 14) - val serializer = serialization.findSerializerFor(confirmation) - - val bytes = serializer.toBinary(confirmation) - val deserialized = serializer.fromBinary(bytes, Some(classOf[DeliveredByPersistentChannel])) - - deserialized should be(confirmation) - } - } "given AtLeastOnceDeliverySnapshot" must { "handle empty unconfirmed" in { @@ -199,12 +161,7 @@ object MessageSerializerRemotingSpec { class RemoteActor extends Actor { def receive = { - case PersistentBatch(Persistent(MyPayload(data), _) +: tail) ⇒ sender() ! s"b${data}" - case ConfirmablePersistent(MyPayload(data), _, _) ⇒ sender() ! s"c${data}" - case Persistent(MyPayload(data), _) ⇒ sender() ! s"p${data}" - case DeliveredByChannel(pid, cid, msnr, dsnr, ep) ⇒ sender() ! s"${pid},${cid},${msnr},${dsnr},${ep.path.name.startsWith("testActor")}" - case DeliveredByPersistentChannel(cid, msnr, dsnr, ep) ⇒ sender() ! s"${cid},${msnr},${dsnr},${ep.path.name.startsWith("testActor")}" - case Deliver(Persistent(payload, _), dp) ⇒ context.actorSelection(dp) ! payload + case PersistentRepr(MyPayload(data), _) ⇒ sender() ! s"p${data}" } } @@ -231,29 +188,9 @@ class MessageSerializerRemotingSpec extends AkkaSpec(remote.withFallback(customS "A message serializer" must { "custom-serialize Persistent messages during remoting" in { - localActor ! Persistent(MyPayload("a")) + localActor ! PersistentRepr(MyPayload("a")) expectMsg("p.a.") } - "custom-serialize ConfirmablePersistent messages during remoting" in { - localActor ! PersistentRepr(MyPayload("a"), confirmable = true) - expectMsg("c.a.") - } - "custom-serialize Persistent message batches during remoting" in { - localActor ! PersistentBatch(immutable.Seq(Persistent(MyPayload("a")))) - expectMsg("b.a.") - } - "serialize DeliveredByChannel messages during remoting" in { - localActor ! DeliveredByChannel("a", "b", 2, 3, testActor) - expectMsg("a,b,2,3,true") - } - "serialize DeliveredByPersistentChannel messages during remoting" in { - localActor ! DeliveredByPersistentChannel("c", 2, 3, testActor) - expectMsg("c,2,3,true") - } - "serialize Deliver messages during remoting" in { - localActor ! Deliver(Persistent("a"), ActorPath.fromString(testActor.path.toStringWithAddress(address(system)))) - expectMsg("a") - } } } diff --git a/akka-samples/akka-sample-persistence-java-lambda/src/main/java/doc/LambdaPersistenceDocTest.java b/akka-samples/akka-sample-persistence-java-lambda/src/main/java/doc/LambdaPersistenceDocTest.java index b8ec3e735b..19c2fd8c1a 100644 --- a/akka-samples/akka-sample-persistence-java-lambda/src/main/java/doc/LambdaPersistenceDocTest.java +++ b/akka-samples/akka-sample-persistence-java-lambda/src/main/java/doc/LambdaPersistenceDocTest.java @@ -25,7 +25,7 @@ public class LambdaPersistenceDocTest { public interface SomeOtherMessage {} - public interface ProcessorMethods { + public interface PersistentActorMethods { //#persistence-id public String persistenceId(); //#persistence-id @@ -33,63 +33,22 @@ public class LambdaPersistenceDocTest { public boolean recoveryRunning(); public boolean recoveryFinished(); //#recovery-status - //#current-message - public Persistent getCurrentPersistentMessage(); - //#current-message } static Object o1 = new Object() { - //#definition - class MyProcessor extends AbstractProcessor { - public MyProcessor() { - receive(ReceiveBuilder. - match(Persistent.class, p -> { - // message successfully written to journal - Object payload = p.payload(); - Long sequenceNr = p.sequenceNr(); - // ... - }). - match(PersistenceFailure.class, failure -> { - // message failed to be written to journal - Object payload = failure.payload(); - Long sequenceNr = failure.sequenceNr(); - Throwable cause = failure.cause(); - // ... - }). - match(SomeOtherMessage.class, message -> { - // message not written to journal - }).build() - ); - } - } - //#definition - - class MyActor extends AbstractActor { - ActorRef processor; - - public MyActor() { - //#usage - processor = context().actorOf(Props.create(MyProcessor.class), "myProcessor"); - - processor.tell(Persistent.create("foo"), null); - processor.tell("bar", null); - //#usage - - receive(ReceiveBuilder. - match(Persistent.class, received -> {/* ... */}).build() - ); - } - - private void recover() { - //#recover-explicit - processor.tell(Recover.create(), null); - //#recover-explicit - } + + private void recover() { + ActorRef persistentActor =null; + + //#recover-explicit + persistentActor.tell(Recover.create(), null); + //#recover-explicit } + }; static Object o2 = new Object() { - abstract class MyProcessor1 extends AbstractProcessor { + abstract class MyPersistentActor1 extends AbstractPersistentActor { //#recover-on-start-disabled @Override public void preStart() {} @@ -101,7 +60,7 @@ public class LambdaPersistenceDocTest { //#recover-on-restart-disabled } - abstract class MyProcessor2 extends AbstractProcessor { + abstract class MyPersistentActor2 extends AbstractPersistentActor { //#recover-on-start-custom @Override public void preStart() { @@ -110,19 +69,7 @@ public class LambdaPersistenceDocTest { //#recover-on-start-custom } - abstract class MyProcessor3 extends AbstractProcessor { - //#deletion - @Override - public void preRestart(Throwable reason, Option message) { - if (message.isDefined() && message.get() instanceof Persistent) { - deleteMessage(((Persistent) message.get()).sequenceNr()); - } - super.preRestart(reason, message); - } - //#deletion - } - - class MyProcessor4 extends AbstractProcessor implements ProcessorMethods { + class MyPersistentActor4 extends AbstractPersistentActor implements PersistentActorMethods { //#persistence-id-override @Override public String persistenceId() { @@ -130,11 +77,19 @@ public class LambdaPersistenceDocTest { } //#persistence-id-override - public MyProcessor4() { - receive(ReceiveBuilder. - match(Persistent.class, received -> {/* ... */}).build() - ); + + @Override + public PartialFunction receiveCommand() { + return ReceiveBuilder. + match(String.class, cmd -> {/* ... */}).build(); } + + @Override + public PartialFunction receiveRecover() { + return ReceiveBuilder. + match(String.class, evt -> {/* ... */}).build(); + } + } //#recovery-completed @@ -147,7 +102,8 @@ public class LambdaPersistenceDocTest { @Override public PartialFunction receiveRecover() { return ReceiveBuilder. match(RecoveryCompleted.class, r -> { - recoveryCompleted(); + // perform init after recovery, before any other messages + // ... }). match(String.class, this::handleEvent).build(); } @@ -158,11 +114,6 @@ public class LambdaPersistenceDocTest { s -> persist("evt", this::handleEvent)).build(); } - private void recoveryCompleted() { - // perform init after recovery, before any other messages - // ... - } - private void handleEvent(String event) { // update state // ... @@ -172,10 +123,20 @@ public class LambdaPersistenceDocTest { //#recovery-completed }; + static Object fullyDisabledRecoveyExample = new Object() { + abstract class MyPersistentActor1 extends UntypedPersistentActor { + //#recover-fully-disabled + @Override + public void preStart() { getSelf().tell(Recover.create(0L), getSelf()); } + //#recover-fully-disabled + } + }; + static Object atLeastOnceExample = new Object() { //#at-least-once-example class Msg implements Serializable { + private static final long serialVersionUID = 1L; public final long deliveryId; public final String s; @@ -186,6 +147,7 @@ public class LambdaPersistenceDocTest { } class Confirm implements Serializable { + private static final long serialVersionUID = 1L; public final long deliveryId; public Confirm(long deliveryId) { @@ -195,6 +157,7 @@ public class LambdaPersistenceDocTest { class MsgSent implements Serializable { + private static final long serialVersionUID = 1L; public final String s; public MsgSent(String s) { @@ -202,6 +165,7 @@ public class LambdaPersistenceDocTest { } } class MsgConfirmed implements Serializable { + private static final long serialVersionUID = 1L; public final long deliveryId; public MsgConfirmed(long deliveryId) { @@ -216,6 +180,10 @@ public class LambdaPersistenceDocTest { this.destination = destination; } + @Override public String persistenceId() { + return "persistence-id"; + } + @Override public PartialFunction receiveCommand() { return ReceiveBuilder. @@ -256,98 +224,17 @@ public class LambdaPersistenceDocTest { } } //#at-least-once-example - }; - - - static Object o3 = new Object() { - //#channel-example - class MyProcessor extends AbstractProcessor { - private final ActorRef destination; - private final ActorRef channel; - - public MyProcessor() { - this.destination = context().actorOf(Props.create(MyDestination.class)); - this.channel = context().actorOf(Channel.props(), "myChannel"); - - receive(ReceiveBuilder. - match(Persistent.class, p -> { - Persistent out = p.withPayload("done " + p.payload()); - channel.tell(Deliver.create(out, destination.path()), self()); - }).build() - ); - } - } - - class MyDestination extends AbstractActor { - public MyDestination() { - receive(ReceiveBuilder. - match(ConfirmablePersistent.class, p -> { - Object payload = p.payload(); - Long sequenceNr = p.sequenceNr(); - int redeliveries = p.redeliveries(); - // ... - p.confirm(); - }).build() - ); - } - } - //#channel-example - - class MyProcessor2 extends AbstractProcessor { - private final ActorRef destination; - private final ActorRef channel; - - public MyProcessor2(ActorRef destination) { - this.destination = context().actorOf(Props.create(MyDestination.class)); - //#channel-id-override - this.channel = context().actorOf(Channel.props("my-stable-channel-id")); - //#channel-id-override - - //#channel-custom-settings - context().actorOf( - Channel.props(ChannelSettings.create() - .withRedeliverInterval(Duration.create(30, TimeUnit.SECONDS)) - .withRedeliverMax(15))); - //#channel-custom-settings - - //#channel-custom-listener - class MyListener extends AbstractActor { - public MyListener() { - receive(ReceiveBuilder. - match(RedeliverFailure.class, r -> { - Iterable messages = r.getMessages(); - // ... - }).build() - ); - } - } - - final ActorRef myListener = context().actorOf(Props.create(MyListener.class)); - context().actorOf(Channel.props( - ChannelSettings.create().withRedeliverFailureListener(null))); - //#channel-custom-listener - - receive(ReceiveBuilder. - match(Persistent.class, p -> { - Persistent out = p.withPayload("done " + p.payload()); - channel.tell(Deliver.create(out, destination.path()), self()); - - //#channel-example-reply - channel.tell(Deliver.create(out, sender().path()), self()); - //#channel-example-reply - }).build() - ); - } - } + }; static Object o4 = new Object() { - //#save-snapshot - class MyProcessor extends AbstractProcessor { + class MyPersistentActor extends AbstractPersistentActor { + + //#save-snapshot private Object state; - public MyProcessor() { - receive(ReceiveBuilder. + @Override public PartialFunction receiveCommand() { + return ReceiveBuilder. match(String.class, s -> s.equals("snap"), s -> saveSnapshot(state)). match(SaveSnapshotSuccess.class, ss -> { @@ -357,35 +244,54 @@ public class LambdaPersistenceDocTest { match(SaveSnapshotFailure.class, sf -> { SnapshotMetadata metadata = sf.metadata(); // ... - }).build() - ); + }).build(); } + //#save-snapshot + + @Override public String persistenceId() { + return "persistence-id"; + } + + @Override public PartialFunction receiveRecover() { + return ReceiveBuilder. + match(RecoveryCompleted.class, r -> {/* ...*/}).build(); + } + } - //#save-snapshot }; static Object o5 = new Object() { - //#snapshot-offer - class MyProcessor extends AbstractProcessor { + + class MyPersistentActor extends AbstractPersistentActor { + //#snapshot-offer private Object state; - public MyProcessor() { - receive(ReceiveBuilder. + @Override public PartialFunction receiveRecover() { + return ReceiveBuilder. match(SnapshotOffer.class, s -> { state = s.snapshot(); // ... }). - match(Persistent.class, p -> {/* ...*/}).build() - ); + match(String.class, s -> {/* ...*/}).build(); } + //#snapshot-offer + + @Override public String persistenceId() { + return "persistence-id"; + } + + @Override public PartialFunction receiveCommand() { + return ReceiveBuilder. + match(String.class, s -> {/* ...*/}).build(); + } } - //#snapshot-offer + class MyActor extends AbstractActor { - ActorRef processor; + ActorRef persistentActor; public MyActor() { - processor = context().actorOf(Props.create(MyProcessor.class)); + persistentActor = context().actorOf(Props.create(MyPersistentActor.class)); receive(ReceiveBuilder. match(Object.class, o -> {/* ... */}).build() ); @@ -393,7 +299,7 @@ public class LambdaPersistenceDocTest { private void recover() { //#snapshot-criteria - processor.tell(Recover.create( + persistentActor.tell(Recover.create( SnapshotSelectionCriteria .create(457L, System.currentTimeMillis())), null); //#snapshot-criteria @@ -401,99 +307,6 @@ public class LambdaPersistenceDocTest { } }; - static Object o6 = new Object() { - //#batch-write - class MyProcessor extends AbstractProcessor { - public MyProcessor() { - receive(ReceiveBuilder. - match(Persistent.class, p -> p.payload().equals("a"), - p -> {/* ... */}). - match(Persistent.class, p -> p.payload().equals("b"), - p -> {/* ... */}).build() - ); - } - } - - class Example { - final ActorSystem system = ActorSystem.create("example"); - final ActorRef processor = system.actorOf(Props.create(MyProcessor.class)); - - public void batchWrite() { - processor.tell(PersistentBatch - .create(asList(Persistent.create("a"), - Persistent.create("b"))), null); - } - - // ... - } - //#batch-write - }; - - static Object o7 = new Object() { - abstract class MyProcessor extends AbstractProcessor { - ActorRef destination; - - public void foo() { - //#persistent-channel-example - final ActorRef channel = context().actorOf( - PersistentChannel.props( - PersistentChannelSettings.create() - .withRedeliverInterval(Duration.create(30, TimeUnit.SECONDS)) - .withRedeliverMax(15)), - "myPersistentChannel"); - - channel.tell(Deliver.create(Persistent.create("example"), destination.path()), self()); - //#persistent-channel-example - //#persistent-channel-watermarks - PersistentChannelSettings.create() - .withPendingConfirmationsMax(10000) - .withPendingConfirmationsMin(2000); - //#persistent-channel-watermarks - //#persistent-channel-reply - PersistentChannelSettings.create().withReplyPersistent(true); - //#persistent-channel-reply - } - } - }; - - static Object o8 = new Object() { - //#reliable-event-delivery - class MyEventsourcedProcessor extends AbstractPersistentActor { - private ActorRef destination; - private ActorRef channel; - - public MyEventsourcedProcessor(ActorRef destination) { - this.destination = destination; - this.channel = context().actorOf(Channel.props(), "channel"); - } - - @Override public String persistenceId() { - return "my-stable-persistence-id"; - } - - private void handleEvent(String event) { - // update state - // ... - // reliably deliver events - channel.tell(Deliver.create( - Persistent.create(event, getCurrentPersistentMessage()), - destination.path()), self()); - } - - @Override public PartialFunction receiveRecover() { - return ReceiveBuilder. - match(String.class, this::handleEvent).build(); - } - - @Override public PartialFunction receiveCommand() { - return ReceiveBuilder. - match(String.class, s -> s.equals("cmd"), - s -> persist("evt", this::handleEvent)).build(); - } - } - //#reliable-event-delivery - }; - static Object o9 = new Object() { //#persist-async class MyPersistentActor extends AbstractPersistentActor { @@ -528,9 +341,9 @@ public class LambdaPersistenceDocTest { public void usage() { final ActorSystem system = ActorSystem.create("example"); //#persist-async-usage - final ActorRef processor = system.actorOf(Props.create(MyPersistentActor.class)); - processor.tell("a", null); - processor.tell("b", null); + final ActorRef persistentActor = system.actorOf(Props.create(MyPersistentActor.class)); + persistentActor.tell("a", null); + persistentActor.tell("b", null); // possible order of received messages: // a @@ -581,9 +394,9 @@ public class LambdaPersistenceDocTest { final ActorSystem system = ActorSystem.create("example"); final ActorRef sender = null; // your imaginary sender here //#defer-caller - final ActorRef processor = system.actorOf(Props.create(MyPersistentActor.class)); - processor.tell("a", sender); - processor.tell("b", sender); + final ActorRef persistentActor = system.actorOf(Props.create(MyPersistentActor.class)); + persistentActor.tell("a", sender); + persistentActor.tell("b", sender); // order of received messages: // a diff --git a/akka-samples/akka-sample-persistence-java-lambda/src/main/java/doc/LambdaPersistencePluginDocTest.java b/akka-samples/akka-sample-persistence-java-lambda/src/main/java/doc/LambdaPersistencePluginDocTest.java index 0aa29404c6..4a4bfd65fa 100644 --- a/akka-samples/akka-sample-persistence-java-lambda/src/main/java/doc/LambdaPersistencePluginDocTest.java +++ b/akka-samples/akka-sample-persistence-java-lambda/src/main/java/doc/LambdaPersistencePluginDocTest.java @@ -5,17 +5,19 @@ package doc; //#plugin-imports -import akka.japi.pf.ReceiveBuilder; -import scala.concurrent.Future; -import akka.japi.Option; -import akka.japi.Procedure; import akka.persistence.*; import akka.persistence.journal.japi.*; import akka.persistence.snapshot.japi.*; //#plugin-imports + import akka.actor.*; import akka.persistence.journal.leveldb.SharedLeveldbJournal; import akka.persistence.journal.leveldb.SharedLeveldbStore; +import akka.japi.pf.ReceiveBuilder; +import scala.concurrent.Future; +import akka.japi.Option; +import akka.japi.Procedure; + public class LambdaPersistencePluginDocTest { @@ -81,16 +83,6 @@ public class LambdaPersistencePluginDocTest { return null; } - @Override - public Future doAsyncWriteConfirmations(Iterable confirmations) { - return null; - } - - @Override - public Future doAsyncDeleteMessages(Iterable messageIds, boolean permanent) { - return null; - } - @Override public Future doAsyncDeleteMessagesTo(String persistenceId, long toSequenceNr, boolean permanent) { return null; diff --git a/akka-samples/akka-sample-persistence-java-lambda/src/main/java/sample/persistence/PersistentActorExample.java b/akka-samples/akka-sample-persistence-java-lambda/src/main/java/sample/persistence/PersistentActorExample.java index e3bb11710c..b77c9a825a 100644 --- a/akka-samples/akka-sample-persistence-java-lambda/src/main/java/sample/persistence/PersistentActorExample.java +++ b/akka-samples/akka-sample-persistence-java-lambda/src/main/java/sample/persistence/PersistentActorExample.java @@ -21,6 +21,7 @@ import java.util.ArrayList; import static java.util.Arrays.asList; class Cmd implements Serializable { + private static final long serialVersionUID = 1L; private final String data; public Cmd(String data) { @@ -33,6 +34,7 @@ class Cmd implements Serializable { } class Evt implements Serializable { + private static final long serialVersionUID = 1L; private final String data; public Evt(String data) { @@ -45,6 +47,7 @@ class Evt implements Serializable { } class ExampleState implements Serializable { + private static final long serialVersionUID = 1L; private final ArrayList events; public ExampleState() { @@ -116,13 +119,13 @@ class ExamplePersistentActor extends AbstractPersistentActor { public class PersistentActorExample { public static void main(String... args) throws Exception { final ActorSystem system = ActorSystem.create("example"); - final ActorRef processor = system.actorOf(Props.create(ExamplePersistentActor.class), "processor-4-java8"); - processor.tell(new Cmd("foo"), null); - processor.tell(new Cmd("baz"), null); - processor.tell(new Cmd("bar"), null); - processor.tell("snap", null); - processor.tell(new Cmd("buzz"), null); - processor.tell("print", null); + final ActorRef persistentActor = system.actorOf(Props.create(ExamplePersistentActor.class), "persistentActor-4-java8"); + persistentActor.tell(new Cmd("foo"), null); + persistentActor.tell(new Cmd("baz"), null); + persistentActor.tell(new Cmd("bar"), null); + persistentActor.tell("snap", null); + persistentActor.tell(new Cmd("buzz"), null); + persistentActor.tell("print", null); Thread.sleep(1000); system.terminate(); diff --git a/akka-samples/akka-sample-persistence-java-lambda/src/main/java/sample/persistence/PersistentActorFailureExample.java b/akka-samples/akka-sample-persistence-java-lambda/src/main/java/sample/persistence/PersistentActorFailureExample.java index 7d87d97363..ee58240804 100644 --- a/akka-samples/akka-sample-persistence-java-lambda/src/main/java/sample/persistence/PersistentActorFailureExample.java +++ b/akka-samples/akka-sample-persistence-java-lambda/src/main/java/sample/persistence/PersistentActorFailureExample.java @@ -72,6 +72,6 @@ public class PersistentActorFailureExample { // etc ... Thread.sleep(1000); - system.shutdown(); + system.terminate(); } } diff --git a/akka-samples/akka-sample-persistence-java-lambda/src/main/java/sample/persistence/SnapshotExample.java b/akka-samples/akka-sample-persistence-java-lambda/src/main/java/sample/persistence/SnapshotExample.java index 8e7f4af8cc..cc8c43d4f5 100644 --- a/akka-samples/akka-sample-persistence-java-lambda/src/main/java/sample/persistence/SnapshotExample.java +++ b/akka-samples/akka-sample-persistence-java-lambda/src/main/java/sample/persistence/SnapshotExample.java @@ -18,6 +18,7 @@ import java.util.ArrayList; public class SnapshotExample { public static class ExampleState implements Serializable { + private static final long serialVersionUID = 1L; private final ArrayList received; public ExampleState() { diff --git a/akka-samples/akka-sample-persistence-java/src/main/java/sample/persistence/PersistentActorExample.java b/akka-samples/akka-sample-persistence-java/src/main/java/sample/persistence/PersistentActorExample.java index a10533d5b0..d5c6a1b8f2 100644 --- a/akka-samples/akka-sample-persistence-java/src/main/java/sample/persistence/PersistentActorExample.java +++ b/akka-samples/akka-sample-persistence-java/src/main/java/sample/persistence/PersistentActorExample.java @@ -13,6 +13,7 @@ import java.util.ArrayList; import static java.util.Arrays.asList; class Cmd implements Serializable { + private static final long serialVersionUID = 1L; private final String data; public Cmd(String data) { @@ -25,6 +26,7 @@ class Cmd implements Serializable { } class Evt implements Serializable { + private static final long serialVersionUID = 1L; private final String data; public Evt(String data) { @@ -37,6 +39,7 @@ class Evt implements Serializable { } class ExampleState implements Serializable { + private static final long serialVersionUID = 1L; private final ArrayList events; public ExampleState() { diff --git a/akka-samples/akka-sample-persistence-java/src/main/java/sample/persistence/PersistentActorFailureExample.java b/akka-samples/akka-sample-persistence-java/src/main/java/sample/persistence/PersistentActorFailureExample.java index 0db0bbc4fa..94654db5f3 100644 --- a/akka-samples/akka-sample-persistence-java/src/main/java/sample/persistence/PersistentActorFailureExample.java +++ b/akka-samples/akka-sample-persistence-java/src/main/java/sample/persistence/PersistentActorFailureExample.java @@ -71,6 +71,6 @@ public class PersistentActorFailureExample { // etc ... Thread.sleep(1000); - system.shutdown(); + system.terminate(); } } diff --git a/akka-samples/akka-sample-persistence-java/src/main/java/sample/persistence/SnapshotExample.java b/akka-samples/akka-sample-persistence-java/src/main/java/sample/persistence/SnapshotExample.java index 65cdeae01c..789bbc9fba 100644 --- a/akka-samples/akka-sample-persistence-java/src/main/java/sample/persistence/SnapshotExample.java +++ b/akka-samples/akka-sample-persistence-java/src/main/java/sample/persistence/SnapshotExample.java @@ -14,6 +14,7 @@ import java.util.ArrayList; public class SnapshotExample { public static class ExampleState implements Serializable { + private static final long serialVersionUID = 1L; private final ArrayList received; public ExampleState() { diff --git a/akka-samples/akka-sample-persistence-scala/src/main/scala/sample/persistence/PersistentActorFailureExample.scala b/akka-samples/akka-sample-persistence-scala/src/main/scala/sample/persistence/PersistentActorFailureExample.scala index 2c9469059e..56b57cd1af 100644 --- a/akka-samples/akka-sample-persistence-scala/src/main/scala/sample/persistence/PersistentActorFailureExample.scala +++ b/akka-samples/akka-sample-persistence-scala/src/main/scala/sample/persistence/PersistentActorFailureExample.scala @@ -9,7 +9,7 @@ object PersistentActorFailureExample extends App { var received: List[String] = Nil // state - def receiveCommand: Actor.Receive = { + def receiveCommand: Receive = { case "print" => println(s"received ${received.reverse}") case "boom" => throw new Exception("boom") case payload: String => @@ -17,7 +17,7 @@ object PersistentActorFailureExample extends App { } - def receiveRecover: Actor.Receive = { + def receiveRecover: Receive = { case s: String => received = s :: received } } @@ -49,5 +49,5 @@ object PersistentActorFailureExample extends App { // etc ... Thread.sleep(1000) - system.shutdown() + system.terminate() } diff --git a/akka-samples/akka-sample-persistence-scala/src/main/scala/sample/persistence/SnapshotExample.scala b/akka-samples/akka-sample-persistence-scala/src/main/scala/sample/persistence/SnapshotExample.scala index 73e01a2137..9d3c6b7ae6 100644 --- a/akka-samples/akka-sample-persistence-scala/src/main/scala/sample/persistence/SnapshotExample.scala +++ b/akka-samples/akka-sample-persistence-scala/src/main/scala/sample/persistence/SnapshotExample.scala @@ -14,7 +14,7 @@ object SnapshotExample extends App { var state = ExampleState() - def receiveCommand: Actor.Receive = { + def receiveCommand: Receive = { case "print" => println("current state = " + state) case "snap" => saveSnapshot(state) case SaveSnapshotSuccess(metadata) => // ... @@ -23,7 +23,7 @@ object SnapshotExample extends App { persist(s) { evt => state = state.updated(evt) } } - def receiveRecover: Actor.Receive = { + def receiveRecover: Receive = { case SnapshotOffer(_, s: ExampleState) => println("offered state = " + s) state = s diff --git a/akka-samples/akka-sample-persistence-scala/src/main/scala/sample/persistence/ViewExample.scala b/akka-samples/akka-sample-persistence-scala/src/main/scala/sample/persistence/ViewExample.scala index bf6ad89078..fb385d9728 100644 --- a/akka-samples/akka-sample-persistence-scala/src/main/scala/sample/persistence/ViewExample.scala +++ b/akka-samples/akka-sample-persistence-scala/src/main/scala/sample/persistence/ViewExample.scala @@ -11,7 +11,7 @@ object ViewExample extends App { var count = 1 - def receiveCommand: Actor.Receive = { + def receiveCommand: Receive = { case payload: String => println(s"persistentActor received ${payload} (nr = ${count})") persist(payload + count) { evt => @@ -19,7 +19,7 @@ object ViewExample extends App { } } - def receiveRecover: Actor.Receive = { + def receiveRecover: Receive = { case _: String => count += 1 } } @@ -32,15 +32,20 @@ object ViewExample extends App { def receive = { case "snap" => + println(s"view saving snapshot") saveSnapshot(numReplicated) case SnapshotOffer(metadata, snapshot: Int) => numReplicated = snapshot println(s"view received snapshot offer ${snapshot} (metadata = ${metadata})") case payload if isPersistent => numReplicated += 1 - println(s"view received persistent ${payload} (num replicated = ${numReplicated})") + println(s"view replayed event ${payload} (num replicated = ${numReplicated})") + case SaveSnapshotSuccess(metadata) => + println(s"view saved snapshot (metadata = ${metadata})") + case SaveSnapshotFailure(metadata, reason) => + println(s"view snapshot failure (metadata = ${metadata}), caused by ${reason}") case payload => - println(s"view received not persitent ${payload}") + println(s"view received other message ${payload}") } }