!per #3729 Journaling protocol optimization

- internal batching of individually received Persistent messages
- testing fault tolerance of Processor in presence of random
  * journaling failures
  * processing failures
This commit is contained in:
Martin Krasser 2013-11-20 13:47:42 +01:00
parent 5af6d0711a
commit 6e2f80bab0
39 changed files with 563 additions and 364 deletions

View file

@ -60,14 +60,14 @@ object LoggingDocSpec {
reqId += 1
val always = Map("requestId" -> reqId)
val perMessage = currentMessage match {
case r: Req => Map("visitorId" -> r.visitorId)
case _ => Map()
case r: Req Map("visitorId" -> r.visitorId)
case _ Map()
}
always ++ perMessage
}
def receive: Receive = {
case r: Req => {
case r: Req {
log.info(s"Starting new request: ${r.work}")
}
}
@ -75,7 +75,6 @@ object LoggingDocSpec {
//#mdc-actor
//#my-event-listener
import akka.event.Logging.InitializeLogger
import akka.event.Logging.LoggerInitialized
@ -117,7 +116,7 @@ object LoggingDocSpec {
class LoggingDocSpec extends AkkaSpec {
import LoggingDocSpec.{MyActor, MdcActor, MdcActorMixin, Req}
import LoggingDocSpec.{ MyActor, MdcActor, MdcActorMixin, Req }
"use a logging actor" in {
val myActor = system.actorOf(Props[MyActor])

View file

@ -18,15 +18,12 @@ trait PersistenceDocSpec {
class MyProcessor extends Processor {
def receive = {
case Persistent(payload, sequenceNr) {
// message successfully written to journal
}
case PersistenceFailure(payload, sequenceNr, cause) {
// message failed to be written to journal
}
case other {
// message not written to journal
}
case Persistent(payload, sequenceNr)
// message successfully written to journal
case PersistenceFailure(payload, sequenceNr, cause)
// message failed to be written to journal
case other
// message not written to journal
}
}
//#definition
@ -109,18 +106,16 @@ trait PersistenceDocSpec {
val channel = context.actorOf(Channel.props(), name = "myChannel")
def receive = {
case p @ Persistent(payload, _) {
case p @ Persistent(payload, _)
channel ! Deliver(p.withPayload(s"processed ${payload}"), destination)
}
}
}
class MyDestination extends Actor {
def receive = {
case p @ ConfirmablePersistent(payload, _) {
case p @ ConfirmablePersistent(payload, _)
println(s"received ${payload}")
p.confirm()
}
}
}
//#channel-example
@ -135,7 +130,7 @@ trait PersistenceDocSpec {
//#channel-id-override
def receive = {
case p @ Persistent(payload, _) {
case p @ Persistent(payload, _)
//#channel-example-reply
channel ! Deliver(p.withPayload(s"processed ${payload}"), sender)
//#channel-example-reply
@ -144,8 +139,7 @@ trait PersistenceDocSpec {
//#resolve-destination
//#resolve-sender
channel forward Deliver(p, destination, Resolve.Sender)
//#resolve-sender
}
//#resolve-sender
}
}
@ -175,15 +169,13 @@ trait PersistenceDocSpec {
startWith("closed", 0)
when("closed") {
case Event(Persistent("open", _), counter) {
case Event(Persistent("open", _), counter)
goto("open") using (counter + 1) replying (counter)
}
}
when("open") {
case Event(Persistent("close", _), counter) {
case Event(Persistent("close", _), counter)
goto("closed") using (counter + 1) replying (counter)
}
}
}
//#fsm-example
@ -239,7 +231,7 @@ trait PersistenceDocSpec {
val system = ActorSystem("example")
val processor = system.actorOf(Props[MyProcessor])
processor ! PersistentBatch(Vector(Persistent("a"), Persistent("b")))
processor ! PersistentBatch(List(Persistent("a"), Persistent("b")))
//#batch-write
system.shutdown()
}

View file

@ -23,6 +23,9 @@ import akka.persistence.snapshot._
object PersistencePluginDocSpec {
val config =
"""
//#max-batch-size
akka.persistence.journal.max-batch-size = 200
//#max-batch-size
//#journal-config
akka.persistence.journal.leveldb.dir = "target/journal"
//#journal-config
@ -69,8 +72,7 @@ class PersistencePluginDocSpec extends WordSpec {
}
class MyJournal extends AsyncWriteJournal {
def writeAsync(persistent: PersistentRepr): Future[Unit] = ???
def writeBatchAsync(persistentBatch: Seq[PersistentRepr]): Future[Unit] = ???
def writeAsync(persistentBatch: Seq[PersistentRepr]): Future[Unit] = ???
def deleteAsync(processorId: String, fromSequenceNr: Long, toSequenceNr: Long, permanent: Boolean): Future[Unit] = ???
def confirmAsync(processorId: String, sequenceNr: Long, channelId: String): Future[Unit] = ???
def replayAsync(processorId: String, fromSequenceNr: Long, toSequenceNr: Long)(replayCallback: (PersistentRepr) Unit): Future[Long] = ???