replace unicode arrows

* ⇒, →, ←
* because we don't want to show them in documentation snippets and
  then it's complicated to avoid that when snippets are
  located in src/test/scala in individual modules
* dont replace object `→` in FSM.scala and PersistentFSM.scala
This commit is contained in:
Patrik Nordwall 2019-02-09 15:25:39 +01:00
parent e4d38f92a4
commit 5c96a5f556
1521 changed files with 18846 additions and 18786 deletions

View file

@ -12,7 +12,7 @@ For example, following snippet will fall with timeout exception:
```scala
...
.toMat(StreamConverters.asInputStream().mapMaterializedValue { inputStream
.toMat(StreamConverters.asInputStream().mapMaterializedValue { inputStream =>
inputStream.read() // this could block forever
...
}).run()

View file

@ -115,7 +115,7 @@ Akka circuit breaker provides a way to achieve such use case:
All methods above accepts an argument `defineFailureFn`
Type of `defineFailureFn`: @scala[`Try[T] Boolean`]@java[`BiFunction[Optional[T], Optional[Throwable], java.lang.Boolean]`]
Type of `defineFailureFn`: @scala[`Try[T] => Boolean`]@java[`BiFunction[Optional[T], Optional[Throwable], java.lang.Boolean]`]
@scala[This is a function which takes in a `Try[T]` and returns a `Boolean`. The `Try[T]` correspond to the `Future[T]` of the protected call.]
@java[The response of a protected call is modelled using `Optional[T]` for a successful return value and `Optional[Throwable]` for exceptions.] This function should return `true` if the call should increase failure count, else false.

View file

@ -82,7 +82,7 @@ For example, following snippet will fall with timeout exception:
```scala
...
.toMat(StreamConverters.asInputStream().mapMaterializedValue { inputStream
.toMat(StreamConverters.asInputStream().mapMaterializedValue { inputStream =>
inputStream.read() // this could block forever
...
}).run()

View file

@ -9,5 +9,5 @@ trait CompileOnlySpec {
* Given a block of code... does NOT execute it.
* Useful when writing code samples in tests, which should only be compiled.
*/
def compileOnlySpec(body: Unit) = ()
def compileOnlySpec(body: => Unit) = ()
}

View file

@ -31,8 +31,8 @@ class MyActor extends Actor {
val log = Logging(context.system, this)
def receive = {
case "test" log.info("received test")
case _ log.info("received unknown message")
case "test" => log.info("received test")
case _ => log.info("received unknown message")
}
}
//#my-actor
@ -45,20 +45,20 @@ class FirstActor extends Actor {
val child = context.actorOf(Props[MyActor], name = "myChild")
//#plus-some-behavior
def receive = {
case x sender() ! x
case x => sender() ! x
}
//#plus-some-behavior
}
//#context-actorOf
class ActorWithArgs(arg: String) extends Actor {
def receive = { case _ () }
def receive = { case _ => () }
}
//#actor-with-value-class-argument
class Argument(val value: String) extends AnyVal
class ValueClassActor(arg: Argument) extends Actor {
def receive = { case _ () }
def receive = { case _ => () }
}
object ValueClassActor {
@ -83,7 +83,7 @@ class DemoActorWrapper extends Actor {
class DemoActor(magicNumber: Int) extends Actor {
def receive = {
case x: Int sender() ! (x + magicNumber)
case x: Int => sender() ! (x + magicNumber)
}
}
@ -93,7 +93,7 @@ class DemoActorWrapper extends Actor {
// ...
//#props-factory
def receive = {
case msg
case msg =>
}
//#props-factory
}
@ -111,8 +111,8 @@ class ActorWithMessagesWrapper {
class MyActor extends Actor with ActorLogging {
import MyActor._
def receive = {
case Greeting(greeter) log.info(s"I was greeted by $greeter.")
case Goodbye log.info("Someone said goodbye to me.")
case Greeting(greeter) => log.info(s"I was greeted by $greeter.")
case Goodbye => log.info("Someone said goodbye to me.")
}
}
//#messages-in-companion
@ -139,13 +139,13 @@ class Hook extends Actor {
class ReplyException extends Actor {
def receive = {
case _
case _ =>
//#reply-exception
try {
val result = operation()
sender() ! result
} catch {
case e: Exception
case e: Exception =>
sender() ! akka.actor.Status.Failure(e)
throw e
}
@ -163,10 +163,10 @@ class StoppingActorsWrapper {
val child: ActorRef = ???
def receive = {
case "interrupt-child"
case "interrupt-child" =>
context stop child
case "done"
case "done" =>
context stop self
}
@ -185,15 +185,15 @@ class Manager extends Actor {
val worker = context.watch(context.actorOf(Props[Cruncher], "worker"))
def receive = {
case "job" worker ! "crunch"
case Shutdown
case "job" => worker ! "crunch"
case Shutdown =>
worker ! PoisonPill
context become shuttingDown
}
def shuttingDown: Receive = {
case "job" sender() ! "service unavailable, shutting down"
case Terminated(`worker`)
case "job" => sender() ! "service unavailable, shutting down"
case Terminated(`worker`) =>
context stop self
}
}
@ -201,7 +201,7 @@ class Manager extends Actor {
class Cruncher extends Actor {
def receive = {
case "crunch" // crunch...
case "crunch" => // crunch...
}
}
@ -212,10 +212,10 @@ class Swapper extends Actor {
val log = Logging(system, this)
def receive = {
case Swap
case Swap =>
log.info("Hi")
become({
case Swap
case Swap =>
log.info("Ho")
unbecome() // resets the latest 'become' (just for fun)
}, discardOld = false) // push on top instead of replace
@ -237,22 +237,22 @@ object SwapperApp extends App {
//#receive-orElse
trait ProducerBehavior {
this: Actor
this: Actor =>
val producerBehavior: Receive = {
case GiveMeThings
case GiveMeThings =>
sender() ! Give("thing")
}
}
trait ConsumerBehavior {
this: Actor with ActorLogging
this: Actor with ActorLogging =>
val consumerBehavior: Receive = {
case ref: ActorRef
case ref: ActorRef =>
ref ! GiveMeThings
case Give(thing)
case Give(thing) =>
log.info("Got a thing! It's {}", thing)
}
}
@ -289,7 +289,7 @@ class Pinger extends Actor {
var countDown = 100
def receive = {
case Pong
case Pong =>
println(s"${self.path} received pong, count down $countDown")
if (countDown > 0) {
@ -304,7 +304,7 @@ class Pinger extends Actor {
class Ponger(pinger: ActorRef) extends Actor {
def receive = {
case Ping
case Ping =>
println(s"${self.path} received ping")
pinger ! Pong
}
@ -331,7 +331,7 @@ class ActorDocSpec extends AkkaSpec("""
import context._
val myActor = actorOf(Props[MyActor], name = "myactor")
def receive = {
case x myActor ! x
case x => myActor ! x
}
}
//#import-context
@ -348,17 +348,17 @@ class ActorDocSpec extends AkkaSpec("""
// TODO: convert docs to AkkaSpec(Map(...))
val filter = EventFilter.custom {
case e: Logging.Info true
case _ false
case e: Logging.Info => true
case _ => false
}
system.eventStream.publish(TestEvent.Mute(filter))
system.eventStream.subscribe(testActor, classOf[Logging.Info])
myActor ! "test"
expectMsgPF(1 second) { case Logging.Info(_, _, "received test") true }
expectMsgPF(1 second) { case Logging.Info(_, _, "received test") => true }
myActor ! "unknown"
expectMsgPF(1 second) { case Logging.Info(_, _, "received unknown message") true }
expectMsgPF(1 second) { case Logging.Info(_, _, "received unknown message") => true }
system.eventStream.unsubscribe(testActor)
system.eventStream.publish(TestEvent.UnMute(filter))
@ -436,8 +436,8 @@ class ActorDocSpec extends AkkaSpec("""
"creating actor with IndirectActorProducer" in {
class Echo(name: String) extends Actor {
def receive = {
case n: Int sender() ! name
case message
case n: Int => sender() ! name
case message =>
val target = testActor
//#forward
target forward message
@ -514,10 +514,10 @@ class ActorDocSpec extends AkkaSpec("""
// To set an initial delay
context.setReceiveTimeout(30 milliseconds)
def receive = {
case "Hello"
case "Hello" =>
// To set in a response to a message
context.setReceiveTimeout(100 milliseconds)
case ReceiveTimeout
case ReceiveTimeout =>
// To turn it off
context.setReceiveTimeout(Duration.Undefined)
throw new RuntimeException("Receive timed out")
@ -530,18 +530,18 @@ class ActorDocSpec extends AkkaSpec("""
class HotSwapActor extends Actor {
import context._
def angry: Receive = {
case "foo" sender() ! "I am already angry?"
case "bar" become(happy)
case "foo" => sender() ! "I am already angry?"
case "bar" => become(happy)
}
def happy: Receive = {
case "bar" sender() ! "I am already happy :-)"
case "foo" become(angry)
case "bar" => sender() ! "I am already happy :-)"
case "foo" => become(angry)
}
def receive = {
case "foo" become(angry)
case "bar" become(happy)
case "foo" => become(angry)
case "bar" => become(happy)
}
}
//#hot-swap-actor
@ -555,16 +555,16 @@ class ActorDocSpec extends AkkaSpec("""
import akka.actor.Stash
class ActorWithProtocol extends Actor with Stash {
def receive = {
case "open"
case "open" =>
unstashAll()
context.become({
case "write" // do writing...
case "close"
case "write" => // do writing...
case "close" =>
unstashAll()
context.unbecome()
case msg stash()
case msg => stash()
}, discardOld = false) // stack on top instead of replacing
case msg stash()
case msg => stash()
}
}
//#stash
@ -581,9 +581,9 @@ class ActorDocSpec extends AkkaSpec("""
var lastSender = context.system.deadLetters
def receive = {
case "kill"
case "kill" =>
context.stop(child); lastSender = sender()
case Terminated(`child`) lastSender ! "finished"
case Terminated(`child`) => lastSender ! "finished"
}
}
//#watch
@ -606,7 +606,7 @@ class ActorDocSpec extends AkkaSpec("""
victim ! Kill
expectMsgPF(hint = "expecting victim to terminate") {
case Terminated(v) if v == victim v // the Actor has indeed terminated
case Terminated(v) if v == victim => v // the Actor has indeed terminated
}
//#kill
}
@ -640,15 +640,15 @@ class ActorDocSpec extends AkkaSpec("""
context.actorSelection("/user/another") ! Identify(identifyId)
def receive = {
case ActorIdentity(`identifyId`, Some(ref))
case ActorIdentity(`identifyId`, Some(ref)) =>
context.watch(ref)
context.become(active(ref))
case ActorIdentity(`identifyId`, None) context.stop(self)
case ActorIdentity(`identifyId`, None) => context.stop(self)
}
def active(another: ActorRef): Actor.Receive = {
case Terminated(`another`) context.stop(self)
case Terminated(`another`) => context.stop(self)
}
}
//#identify
@ -673,7 +673,7 @@ class ActorDocSpec extends AkkaSpec("""
// the actor has been stopped
} catch {
// the actor wasn't stopped within 5 seconds
case e: akka.pattern.AskTimeoutException
case e: akka.pattern.AskTimeoutException =>
}
//#gracefulStop
}
@ -690,9 +690,9 @@ class ActorDocSpec extends AkkaSpec("""
val f: Future[Result] =
for {
x ask(actorA, Request).mapTo[Int] // call pattern directly
s (actorB ask Request).mapTo[String] // call by implicit conversion
d (actorC ? Request).mapTo[Double] // call by symbolic name
x <- ask(actorA, Request).mapTo[Int] // call pattern directly
s <- (actorB ask Request).mapTo[String] // call by implicit conversion
d <- (actorC ? Request).mapTo[Double] // call by symbolic name
} yield Result(x, s, d)
f pipeTo actorD // .. or ..
@ -702,12 +702,12 @@ class ActorDocSpec extends AkkaSpec("""
class Replier extends Actor {
def receive = {
case ref: ActorRef
case ref: ActorRef =>
//#reply-with-sender
sender().tell("reply", context.parent) // replies will go back to parent
sender().!("reply")(context.parent) // alternative syntax (beware of the parens!)
//#reply-with-sender
case x
case x =>
//#reply-without-sender
sender() ! x // replies will go to this actor
//#reply-without-sender
@ -730,8 +730,8 @@ class ActorDocSpec extends AkkaSpec("""
"using ActorDSL outside of akka.actor package" in {
import akka.actor.ActorDSL._
actor(new Act {
superviseWith(OneForOneStrategy() { case _ Stop; Restart; Resume; Escalate })
superviseWith(AllForOneStrategy() { case _ Stop; Restart; Resume; Escalate })
superviseWith(OneForOneStrategy() { case _ => Stop; Restart; Resume; Escalate })
superviseWith(AllForOneStrategy() { case _ => Stop; Restart; Resume; Escalate })
})
}
@ -739,11 +739,11 @@ class ActorDocSpec extends AkkaSpec("""
val someActor = system.actorOf(Props(classOf[Replier], this))
//#coordinated-shutdown-addTask
CoordinatedShutdown(system).addTask(
CoordinatedShutdown.PhaseBeforeServiceUnbind, "someTaskName") { ()
CoordinatedShutdown.PhaseBeforeServiceUnbind, "someTaskName") { () =>
import akka.pattern.ask
import system.dispatcher
implicit val timeout = Timeout(5.seconds)
(someActor ? "stop").map(_ Done)
(someActor ? "stop").map(_ => Done)
}
//#coordinated-shutdown-addTask

View file

@ -12,7 +12,7 @@ import scala.concurrent.{ ExecutionContext, Future }
// #blocking-in-actor
class BlockingActor extends Actor {
def receive = {
case i: Int
case i: Int =>
Thread.sleep(5000) //block for 5 seconds, representing blocking I/O, etc
println(s"Blocking operation finished: ${i}")
}
@ -24,7 +24,7 @@ class BlockingFutureActor extends Actor {
implicit val executionContext: ExecutionContext = context.dispatcher
def receive = {
case i: Int
case i: Int =>
println(s"Calling blocking Future: ${i}")
Future {
Thread.sleep(5000) //block for 5 seconds
@ -39,7 +39,7 @@ class SeparateDispatcherFutureActor extends Actor {
implicit val executionContext: ExecutionContext = context.system.dispatchers.lookup("my-blocking-dispatcher")
def receive = {
case i: Int
case i: Int =>
println(s"Calling blocking Future: ${i}")
Future {
Thread.sleep(5000) //block for 5 seconds
@ -52,7 +52,7 @@ class SeparateDispatcherFutureActor extends Actor {
// #print-actor
class PrintActor extends Actor {
def receive = {
case i: Int
case i: Int =>
println(s"PrintActor: ${i}")
}
}
@ -67,7 +67,7 @@ object BlockingDispatcherSample {
val actor1 = system.actorOf(Props(new BlockingFutureActor))
val actor2 = system.actorOf(Props(new PrintActor))
for (i 1 to 100) {
for (i <- 1 to 100) {
actor1 ! i
actor2 ! i
}
@ -103,7 +103,7 @@ object SeparateDispatcherSample {
val actor1 = system.actorOf(Props(new SeparateDispatcherFutureActor))
val actor2 = system.actorOf(Props(new PrintActor))
for (i 1 to 100) {
for (i <- 1 to 100) {
actor1 ! i
actor2 ! i
}

View file

@ -6,7 +6,7 @@ package docs.actor
import language.postfixOps
import akka.testkit.{ AkkaSpec MyFavoriteTestFrameWorkPlusAkkaTestKit }
import akka.testkit.{ AkkaSpec => MyFavoriteTestFrameWorkPlusAkkaTestKit }
import akka.util.ByteString
//#test-code
@ -55,24 +55,24 @@ class FSMDocSpec extends MyFavoriteTestFrameWorkPlusAkkaTestKit {
//#when-syntax
when(Idle) {
case Event(SetTarget(ref), Uninitialized)
case Event(SetTarget(ref), Uninitialized) =>
stay using Todo(ref, Vector.empty)
}
//#when-syntax
//#transition-elided
onTransition {
case Active -> Idle
case Active -> Idle =>
stateData match {
case Todo(ref, queue) ref ! Batch(queue)
case _ // nothing to do
case Todo(ref, queue) => ref ! Batch(queue)
case _ => // nothing to do
}
}
//#transition-elided
//#when-syntax
when(Active, stateTimeout = 1 second) {
case Event(Flush | StateTimeout, t: Todo)
case Event(Flush | StateTimeout, t: Todo) =>
goto(Idle) using t.copy(queue = Vector.empty)
}
//#when-syntax
@ -80,10 +80,10 @@ class FSMDocSpec extends MyFavoriteTestFrameWorkPlusAkkaTestKit {
//#unhandled-elided
whenUnhandled {
// common code for both states
case Event(Queue(obj), t @ Todo(_, v))
case Event(Queue(obj), t @ Todo(_, v)) =>
goto(Active) using t.copy(queue = v :+ obj)
case Event(e, s)
case Event(e, s) =>
log.warning("received unhandled request {} in state {}/{}", e, stateName, s)
stay
}
@ -109,16 +109,16 @@ class FSMDocSpec extends MyFavoriteTestFrameWorkPlusAkkaTestKit {
//#modifier-syntax
when(SomeState) {
case Event(msg, _)
case Event(msg, _) =>
goto(Processing) using (newData) forMax (5 seconds) replying (WillDo)
}
//#modifier-syntax
//#transition-syntax
onTransition {
case Idle -> Active setTimer("timeout", Tick, 1 second, repeat = true)
case Active -> _ cancelTimer("timeout")
case x -> Idle log.info("entering Idle from " + x)
case Idle -> Active => setTimer("timeout", Tick, 1 second, repeat = true)
case Active -> _ => cancelTimer("timeout")
case x -> Idle => log.info("entering Idle from " + x)
}
//#transition-syntax
@ -132,7 +132,7 @@ class FSMDocSpec extends MyFavoriteTestFrameWorkPlusAkkaTestKit {
//#stop-syntax
when(Error) {
case Event("stop", _)
case Event("stop", _) =>
// do cleanup ...
stop()
}
@ -140,38 +140,38 @@ class FSMDocSpec extends MyFavoriteTestFrameWorkPlusAkkaTestKit {
//#transform-syntax
when(SomeState)(transform {
case Event(bytes: ByteString, read) stay using (read + bytes.length)
case Event(bytes: ByteString, read) => stay using (read + bytes.length)
} using {
case s @ FSM.State(state, read, timeout, stopReason, replies) if read > 1000
case s @ FSM.State(state, read, timeout, stopReason, replies) if read > 1000 =>
goto(Processing)
})
//#transform-syntax
//#alt-transform-syntax
val processingTrigger: PartialFunction[State, State] = {
case s @ FSM.State(state, read, timeout, stopReason, replies) if read > 1000
case s @ FSM.State(state, read, timeout, stopReason, replies) if read > 1000 =>
goto(Processing)
}
when(SomeState)(transform {
case Event(bytes: ByteString, read) stay using (read + bytes.length)
case Event(bytes: ByteString, read) => stay using (read + bytes.length)
} using processingTrigger)
//#alt-transform-syntax
//#termination-syntax
onTermination {
case StopEvent(FSM.Normal, state, data) // ...
case StopEvent(FSM.Shutdown, state, data) // ...
case StopEvent(FSM.Failure(cause), state, data) // ...
case StopEvent(FSM.Normal, state, data) => // ...
case StopEvent(FSM.Shutdown, state, data) => // ...
case StopEvent(FSM.Failure(cause), state, data) => // ...
}
//#termination-syntax
//#unhandled-syntax
whenUnhandled {
case Event(x: X, data)
case Event(x: X, data) =>
log.info("Received unhandled event: " + x)
stay
case Event(msg, _)
case Event(msg, _) =>
log.warning("Received unknown event: " + msg)
goto(Error)
}
@ -185,7 +185,7 @@ class FSMDocSpec extends MyFavoriteTestFrameWorkPlusAkkaTestKit {
//#body-elided
override def logDepth = 12
onTermination {
case StopEvent(FSM.Failure(_), state, data)
case StopEvent(FSM.Failure(_), state, data) =>
val lastEvents = getLog.mkString("\n\t")
log.warning("Failure in state " + state + " with data " + data + "\n" +
"Events leading up to this point:\n\t" + lastEvents)

View file

@ -50,14 +50,14 @@ class Listener extends Actor with ActorLogging {
context.setReceiveTimeout(15 seconds)
def receive = {
case Progress(percent)
case Progress(percent) =>
log.info("Current progress: {} %", percent)
if (percent >= 100.0) {
log.info("That's all, shutting down")
context.system.terminate()
}
case ReceiveTimeout
case ReceiveTimeout =>
// No progress within 15 seconds, ServiceUnavailable
log.error("Shutting down due to unavailable service")
context.system.terminate()
@ -84,7 +84,7 @@ class Worker extends Actor with ActorLogging {
// Stop the CounterService child if it throws ServiceUnavailable
override val supervisorStrategy = OneForOneStrategy() {
case _: CounterService.ServiceUnavailable Stop
case _: CounterService.ServiceUnavailable => Stop
}
// The sender of the initial Start message will continuously be notified
@ -95,18 +95,18 @@ class Worker extends Actor with ActorLogging {
import context.dispatcher // Use this Actors' Dispatcher as ExecutionContext
def receive = LoggingReceive {
case Start if progressListener.isEmpty
case Start if progressListener.isEmpty =>
progressListener = Some(sender())
context.system.scheduler.schedule(Duration.Zero, 1 second, self, Do)
case Do
case Do =>
counterService ! Increment(1)
counterService ! Increment(1)
counterService ! Increment(1)
// Send current progress to the initial sender
counterService ? GetCurrentCount map {
case CurrentCount(_, count) Progress(100.0 * count / totalCount)
case CurrentCount(_, count) => Progress(100.0 * count / totalCount)
} pipeTo progressListener.get
}
}
@ -138,7 +138,7 @@ class CounterService extends Actor {
override val supervisorStrategy = OneForOneStrategy(
maxNrOfRetries = 3,
withinTimeRange = 5 seconds) {
case _: Storage.StorageException Restart
case _: Storage.StorageException => Restart
}
val key = self.path.name
@ -169,21 +169,21 @@ class CounterService extends Actor {
def receive = LoggingReceive {
case Entry(k, v) if k == key && counter == None
case Entry(k, v) if k == key && counter == None =>
// Reply from Storage of the initial value, now we can create the Counter
val c = context.actorOf(Props(classOf[Counter], key, v))
counter = Some(c)
// Tell the counter to use current storage
c ! UseStorage(storage)
// and send the buffered backlog to the counter
for ((replyTo, msg) backlog) c.tell(msg, sender = replyTo)
for ((replyTo, msg) <- backlog) c.tell(msg, sender = replyTo)
backlog = IndexedSeq.empty
case msg: Increment forwardOrPlaceInBacklog(msg)
case msg: Increment => forwardOrPlaceInBacklog(msg)
case msg: GetCurrentCount forwardOrPlaceInBacklog(msg)
case msg: GetCurrentCount => forwardOrPlaceInBacklog(msg)
case Terminated(actorRef) if Some(actorRef) == storage
case Terminated(actorRef) if Some(actorRef) == storage =>
// After 3 restarts the storage child is stopped.
// We receive Terminated because we watch the child, see initStorage.
storage = None
@ -192,7 +192,7 @@ class CounterService extends Actor {
// Try to re-establish storage after while
context.system.scheduler.scheduleOnce(10 seconds, self, Reconnect)
case Reconnect
case Reconnect =>
// Re-establish storage after the scheduled delay
initStorage()
}
@ -202,8 +202,8 @@ class CounterService extends Actor {
// the counter. Before that we place the messages in a backlog, to be sent
// to the counter when it is initialized.
counter match {
case Some(c) c forward msg
case None
case Some(c) => c forward msg
case None =>
if (backlog.size >= MaxBacklog)
throw new ServiceUnavailable(
"CounterService not available, lack of initial value")
@ -233,15 +233,15 @@ class Counter(key: String, initialValue: Long) extends Actor {
var storage: Option[ActorRef] = None
def receive = LoggingReceive {
case UseStorage(s)
case UseStorage(s) =>
storage = s
storeCount()
case Increment(n)
case Increment(n) =>
count += n
storeCount()
case GetCurrentCount
case GetCurrentCount =>
sender() ! CurrentCount(key, count)
}
@ -274,8 +274,8 @@ class Storage extends Actor {
val db = DummyDB
def receive = LoggingReceive {
case Store(Entry(key, count)) db.save(key, count)
case Get(key) sender() ! Entry(key, db.load(key).getOrElse(0L))
case Store(Entry(key, count)) => db.save(key, count)
case Get(key) => sender() ! Entry(key, db.load(key).getOrElse(0L))
}
}

View file

@ -29,15 +29,15 @@ object FaultHandlingDocSpec {
override val supervisorStrategy =
OneForOneStrategy(maxNrOfRetries = 10, withinTimeRange = 1 minute) {
case _: ArithmeticException Resume
case _: NullPointerException Restart
case _: IllegalArgumentException Stop
case _: Exception Escalate
case _: ArithmeticException => Resume
case _: NullPointerException => Restart
case _: IllegalArgumentException => Stop
case _: Exception => Escalate
}
//#strategy
def receive = {
case p: Props sender() ! context.actorOf(p)
case p: Props => sender() ! context.actorOf(p)
}
}
//#supervisor
@ -51,15 +51,15 @@ object FaultHandlingDocSpec {
override val supervisorStrategy =
OneForOneStrategy(maxNrOfRetries = 10, withinTimeRange = 1 minute) {
case _: ArithmeticException Resume
case _: NullPointerException Restart
case _: IllegalArgumentException Stop
case _: Exception Escalate
case _: ArithmeticException => Resume
case _: NullPointerException => Restart
case _: IllegalArgumentException => Stop
case _: Exception => Escalate
}
//#strategy2
def receive = {
case p: Props sender() ! context.actorOf(p)
case p: Props => sender() ! context.actorOf(p)
}
// override default to kill all children during restart
override def preRestart(cause: Throwable, msg: Option[Any]): Unit = {}
@ -74,9 +74,9 @@ object FaultHandlingDocSpec {
override val supervisorStrategy =
OneForOneStrategy(maxNrOfRetries = 10, withinTimeRange = 1 minute) {
case _: ArithmeticException Resume
case t
super.supervisorStrategy.decider.applyOrElse(t, (_: Any) Escalate)
case _: ArithmeticException => Resume
case t =>
super.supervisorStrategy.decider.applyOrElse(t, (_: Any) => Escalate)
}
//#default-strategy-fallback
@ -87,9 +87,9 @@ object FaultHandlingDocSpec {
class Child extends Actor {
var state = 0
def receive = {
case ex: Exception throw ex
case x: Int state = x
case "get" sender() ! state
case ex: Exception => throw ex
case x: Int => state = x
case "get" => sender() ! state
}
}
//#child
@ -149,7 +149,7 @@ class FaultHandlingDocSpec(_system: ActorSystem) extends TestKit(_system)
//#stop
watch(child) // have testActor watch child
child ! new IllegalArgumentException // break it
expectMsgPF() { case Terminated(`child`) () }
expectMsgPF() { case Terminated(`child`) => () }
//#stop
}
EventFilter[Exception]("CRASH", occurrences = 2) intercept {
@ -162,7 +162,7 @@ class FaultHandlingDocSpec(_system: ActorSystem) extends TestKit(_system)
child2 ! new Exception("CRASH") // escalate failure
expectMsgPF() {
case t @ Terminated(`child2`) if t.existenceConfirmed ()
case t @ Terminated(`child2`) if t.existenceConfirmed => ()
}
//#escalate-kill
//#escalate-restart

View file

@ -11,7 +11,7 @@ object InitializationDocSpec {
class PreStartInitExample extends Actor {
override def receive = {
case _ // Ignore
case _ => // Ignore
}
//#preStartInit
@ -38,14 +38,14 @@ object InitializationDocSpec {
var initializeMe: Option[String] = None
override def receive = {
case "init"
case "init" =>
initializeMe = Some("Up and running")
context.become(initialized, discardOld = true)
}
def initialized: Receive = {
case "U OK?" initializeMe foreach { sender() ! _ }
case "U OK?" => initializeMe foreach { sender() ! _ }
}
//#messageInit

View file

@ -18,7 +18,7 @@ class PropsEdgeCaseSpec extends WordSpec with CompileOnlySpec {
//#props-edge-cases-value-class-example
class ValueActor(value: MyValueClass) extends Actor {
def receive = {
case multiplier: Long sender() ! (value.v * multiplier)
case multiplier: Long => sender() ! (value.v * multiplier)
}
}
val valueClassProp = Props(classOf[ValueActor], MyValueClass(5)) // Unsupported
@ -27,7 +27,7 @@ class PropsEdgeCaseSpec extends WordSpec with CompileOnlySpec {
//#props-edge-cases-default-values
class DefaultValueActor(a: Int, b: Int = 5) extends Actor {
def receive = {
case x: Int sender() ! ((a + x) * b)
case x: Int => sender() ! ((a + x) * b)
}
}
@ -35,7 +35,7 @@ class PropsEdgeCaseSpec extends WordSpec with CompileOnlySpec {
class DefaultValueActor2(b: Int = 5) extends Actor {
def receive = {
case x: Int sender() ! (x * b)
case x: Int => sender() ! (x * b)
}
}
val defaultValueProp2 = Props[DefaultValueActor2] // Unsupported

View file

@ -44,7 +44,7 @@ class SchedulerDocSpec extends AkkaSpec(Map("akka.loglevel" -> "INFO")) {
val Tick = "tick"
class TickActor extends Actor {
def receive = {
case Tick //Do something
case Tick => //Do something
}
}
val tickActor = system.actorOf(Props(classOf[TickActor], this))

View file

@ -19,13 +19,13 @@ class SharedMutableStateDocSpec {
class EchoActor extends Actor {
def receive = {
case msg sender() ! msg
case msg => sender() ! msg
}
}
class CleanUpActor extends Actor {
def receive = {
case set: mutable.Set[_] set.clear()
case set: mutable.Set[_] => set.clear()
}
}
@ -44,7 +44,7 @@ class SharedMutableStateDocSpec {
}
def receive = {
case _
case _ =>
implicit val ec = context.dispatcher
implicit val timeout = Timeout(5 seconds) // needed for `?` below
@ -53,7 +53,7 @@ class SharedMutableStateDocSpec {
// application to break in weird ways
Future { state = "This will race" }
((echoActor ? Message("With this other one")).mapTo[Message])
.foreach { received state = received.msg }
.foreach { received => state = received.msg }
// Very bad: shared mutable object allows
// the other actor to mutate your own state,

View file

@ -22,10 +22,10 @@ object TimerDocSpec {
timers.startSingleTimer(TickKey, FirstTick, 500.millis)
def receive = {
case FirstTick
case FirstTick =>
// do something useful here
timers.startPeriodicTimer(TickKey, Tick, 1.second)
case Tick
case Tick =>
// do something useful here
}
}

View file

@ -5,7 +5,7 @@
package docs.actor
//#imports
import java.lang.String.{ valueOf println }
import java.lang.String.{ valueOf => println }
import akka.actor.{ ActorContext, ActorRef, TypedActor, TypedProps }
import akka.routing.RoundRobinGroup
@ -16,7 +16,7 @@ import scala.concurrent.duration._
//#imports
//Mr funny man avoids printing to stdout AND keeping docs alright
import java.lang.String.{ valueOf println }
import java.lang.String.{ valueOf => println }
//#typed-actor-iface
trait Squarer {
@ -112,7 +112,7 @@ class TypedActorDocSpec extends AkkaSpec(Map("akka.loglevel" -> "INFO")) {
//#typed-actor-extension-tools
} catch {
case e: Exception //dun care
case e: Exception => //dun care
}
}
@ -182,7 +182,7 @@ class TypedActorDocSpec extends AkkaSpec(Map("akka.loglevel" -> "INFO")) {
//Use "childSquarer" as a Squarer
//#typed-actor-hierarchy
} catch {
case e: Exception //ignore
case e: Exception => //ignore
}
}
@ -205,7 +205,7 @@ class TypedActorDocSpec extends AkkaSpec(Map("akka.loglevel" -> "INFO")) {
// prepare routees
val routees: List[HasName] = List.fill(5) { namedActor() }
val routeePaths = routees map { r
val routeePaths = routees map { r =>
TypedActor(system).getActorRefFor(r).path.toStringWithoutAddress
}

View file

@ -35,16 +35,16 @@ class UnnestedReceives extends Actor {
}
def receive = {
case 'Replay //Our first message should be a 'Replay message, all others are invalid
case 'Replay => //Our first message should be a 'Replay message, all others are invalid
allOldMessages() foreach process //Process all old messages/events
become { //Switch behavior to look for the GoAhead signal
case 'GoAhead //When we get the GoAhead signal we process all our buffered messages/events
case 'GoAhead => //When we get the GoAhead signal we process all our buffered messages/events
queue foreach process
queue.clear
become { //Then we change behavior to process incoming messages/events as they arrive
case msg process(msg)
case msg => process(msg)
}
case msg //While we haven't gotten the GoAhead signal, buffer all incoming messages
case msg => //While we haven't gotten the GoAhead signal, buffer all incoming messages
queue += msg //Here you have full control, you can handle overflow etc
}
}

View file

@ -60,7 +60,7 @@ class AgentDocSpec extends AkkaSpec {
agent send (_ * 2)
//#send
def longRunningOrBlockingFunction = (i: Int) i * 1 // Just for the example code
def longRunningOrBlockingFunction = (i: Int) => i * 1 // Just for the example code
def someExecutionContext() = scala.concurrent.ExecutionContext.Implicits.global // Just for the example code
//#send-off
// the ExecutionContext you want to run the function on
@ -83,7 +83,7 @@ class AgentDocSpec extends AkkaSpec {
val f3: Future[Int] = agent alter (_ * 2)
//#alter
def longRunningOrBlockingFunction = (i: Int) i * 1 // Just for the example code
def longRunningOrBlockingFunction = (i: Int) => i * 1 // Just for the example code
def someExecutionContext() = ExecutionContext.global // Just for the example code
//#alter-off
@ -104,7 +104,7 @@ class AgentDocSpec extends AkkaSpec {
import scala.concurrent.stm._
def transfer(from: Agent[Int], to: Agent[Int], amount: Int): Boolean = {
atomic { txn
atomic { txn =>
if (from.get < amount) false
else {
from send (_ - amount)
@ -135,19 +135,19 @@ class AgentDocSpec extends AkkaSpec {
val agent2 = Agent(5)
// uses foreach
for (value agent1)
for (value <- agent1)
println(value)
// uses map
val agent3 = for (value agent1) yield value + 1
val agent3 = for (value <- agent1) yield value + 1
// or using map directly
val agent4 = agent1 map (_ + 1)
// uses flatMap
val agent5 = for {
value1 agent1
value2 agent2
value1 <- agent1
value2 <- agent2
} yield value1 + value2
//#monadic-example

View file

@ -15,7 +15,7 @@ object Consumers {
def endpointUri = "file:data/input/actor"
def receive = {
case msg: CamelMessage println("received %s" format msg.bodyAs[String])
case msg: CamelMessage => println("received %s" format msg.bodyAs[String])
}
}
//#Consumer1
@ -28,7 +28,7 @@ object Consumers {
def endpointUri = "jetty:http://localhost:8877/camel/default"
def receive = {
case msg: CamelMessage sender() ! ("Hello %s" format msg.bodyAs[String])
case msg: CamelMessage => sender() ! ("Hello %s" format msg.bodyAs[String])
}
}
//#Consumer2
@ -45,7 +45,7 @@ object Consumers {
def endpointUri = "jms:queue:test"
def receive = {
case msg: CamelMessage
case msg: CamelMessage =>
sender() ! Ack
// on success
// ..
@ -65,7 +65,7 @@ object Consumers {
def endpointUri = "jetty:http://localhost:8877/camel/default"
override def replyTimeout = 500 millis
def receive = {
case msg: CamelMessage sender() ! ("Hello %s" format msg.bodyAs[String])
case msg: CamelMessage => sender() ! ("Hello %s" format msg.bodyAs[String])
}
}
//#Consumer4

View file

@ -18,9 +18,9 @@ object CustomRoute {
import akka.camel._
class Responder extends Actor {
def receive = {
case msg: CamelMessage
case msg: CamelMessage =>
sender() ! (msg.mapBody {
body: String "received %s" format body
body: String => "received %s" format body
})
}
}
@ -47,9 +47,9 @@ object CustomRoute {
class ErrorThrowingConsumer(override val endpointUri: String) extends Consumer {
def receive = {
case msg: CamelMessage throw new Exception("error: %s" format msg.body)
case msg: CamelMessage => throw new Exception("error: %s" format msg.body)
}
override def onRouteDefinition = (rd) rd.onException(classOf[Exception]).
override def onRouteDefinition = (rd) => rd.onException(classOf[Exception]).
handled(true).transform(Builder.exceptionMessage).end
final override def preRestart(reason: Throwable, message: Option[Any]): Unit = {

View file

@ -21,8 +21,8 @@ object Introduction {
def endpointUri = "mina2:tcp://localhost:6200?textline=true"
def receive = {
case msg: CamelMessage { /* ... */ }
case _ { /* ... */ }
case msg: CamelMessage => { /* ... */ }
case _ => { /* ... */ }
}
}
@ -41,8 +41,8 @@ object Introduction {
def endpointUri = "jetty:http://localhost:8877/example"
def receive = {
case msg: CamelMessage { /* ... */ }
case _ { /* ... */ }
case msg: CamelMessage => { /* ... */ }
case _ => { /* ... */ }
}
}
//#Consumer
@ -91,8 +91,8 @@ object Introduction {
def endpointUri = "mina2:tcp://localhost:6200?textline=true"
def receive = {
case msg: CamelMessage { /* ... */ }
case _ { /* ... */ }
case msg: CamelMessage => { /* ... */ }
case _ => { /* ... */ }
}
}
val system = ActorSystem("some-system")

View file

@ -37,7 +37,7 @@ object Producers {
class ResponseReceiver extends Actor {
def receive = {
case msg: CamelMessage
case msg: CamelMessage =>
// do something with the forwarded response
}
}
@ -65,11 +65,11 @@ object Producers {
def endpointUri = uri
def upperCase(msg: CamelMessage) = msg.mapBody {
body: String body.toUpperCase
body: String => body.toUpperCase
}
override def transformOutgoingMessage(msg: Any) = msg match {
case msg: CamelMessage upperCase(msg)
case msg: CamelMessage => upperCase(msg)
}
}
//#TransformOutgoingMessage
@ -110,7 +110,7 @@ object Producers {
import akka.actor.Actor
class MyActor extends Actor {
def receive = {
case msg
case msg =>
val template = CamelExtension(context.system).template
template.sendBody("direct:news", msg)
}
@ -122,7 +122,7 @@ object Producers {
import akka.actor.Actor
class MyActor extends Actor {
def receive = {
case msg
case msg =>
val template = CamelExtension(context.system).template
sender() ! template.requestBody("direct:news", msg)
}

View file

@ -13,7 +13,7 @@ object PublishSubscribe {
def endpointUri = uri
def receive = {
case msg: CamelMessage println("%s received: %s" format (name, msg.body))
case msg: CamelMessage => println("%s received: %s" format (name, msg.body))
}
}
@ -29,7 +29,7 @@ object PublishSubscribe {
def endpointUri = uri
def receive = {
case msg: CamelMessage {
case msg: CamelMessage => {
publisher ! msg.bodyAs[String]
sender() ! ("message published")
}

View file

@ -36,9 +36,9 @@ class DangerousActor extends Actor with ActorLogging {
def dangerousCall: String = "This really isn't that dangerous of a call after all"
def receive = {
case "is my middle name"
case "is my middle name" =>
breaker.withCircuitBreaker(Future(dangerousCall)) pipeTo sender()
case "block for me"
case "block for me" =>
sender() ! breaker.withSyncCircuitBreaker(dangerousCall)
}
//#circuit-breaker-usage
@ -62,16 +62,16 @@ class TellPatternActor(recipient: ActorRef) extends Actor with ActorLogging {
import akka.actor.ReceiveTimeout
def receive = {
case "call" if breaker.isClosed {
case "call" if breaker.isClosed => {
recipient ! "message"
}
case "response" {
case "response" => {
breaker.succeed()
}
case err: Throwable {
case err: Throwable => {
breaker.fail()
}
case ReceiveTimeout {
case ReceiveTimeout => {
breaker.fail()
}
}
@ -82,9 +82,9 @@ class EvenNoFailureActor extends Actor {
import context.dispatcher
//#even-no-as-failure
def luckyNumber(): Future[Int] = {
val evenNumberAsFailure: Try[Int] Boolean = {
case Success(n) n % 2 == 0
case Failure(_) true
val evenNumberAsFailure: Try[Int] => Boolean = {
case Success(n) => n % 2 == 0
case Failure(_) => true
}
val breaker =
@ -100,6 +100,6 @@ class EvenNoFailureActor extends Actor {
//#even-no-as-failure
override def receive = {
case x: Int
case x: Int =>
}
}

View file

@ -19,8 +19,8 @@ class FactorialBackend extends Actor with ActorLogging {
import context.dispatcher
def receive = {
case (n: Int)
Future(factorial(n)) map { result (n, result) } pipeTo sender()
case (n: Int) =>
Future(factorial(n)) map { result => (n, result) } pipeTo sender()
}
def factorial(n: Int): BigInt = {

View file

@ -31,13 +31,13 @@ class FactorialFrontend(upToN: Int, repeat: Boolean) extends Actor with ActorLog
}
def receive = {
case (n: Int, factorial: BigInt)
case (n: Int, factorial: BigInt) =>
if (n == upToN) {
log.debug("{}! = {}", n, factorial)
if (repeat) sendJobs()
else context.stop(self)
}
case ReceiveTimeout
case ReceiveTimeout =>
log.info("Timeout")
sendJobs()
}

View file

@ -27,24 +27,24 @@ class MetricsListener extends Actor with ActorLogging {
override def postStop(): Unit = extension.unsubscribe(self)
def receive = {
case ClusterMetricsChanged(clusterMetrics)
clusterMetrics.filter(_.address == selfAddress) foreach { nodeMetrics
case ClusterMetricsChanged(clusterMetrics) =>
clusterMetrics.filter(_.address == selfAddress) foreach { nodeMetrics =>
logHeap(nodeMetrics)
logCpu(nodeMetrics)
}
case state: CurrentClusterState // Ignore.
case state: CurrentClusterState => // Ignore.
}
def logHeap(nodeMetrics: NodeMetrics): Unit = nodeMetrics match {
case HeapMemory(address, timestamp, used, committed, max)
case HeapMemory(address, timestamp, used, committed, max) =>
log.info("Used heap: {} MB", used.doubleValue / 1024 / 1024)
case _ // No heap info.
case _ => // No heap info.
}
def logCpu(nodeMetrics: NodeMetrics): Unit = nodeMetrics match {
case Cpu(address, timestamp, Some(systemLoadAverage), cpuCombined, cpuStolen, processors)
case Cpu(address, timestamp, Some(systemLoadAverage), cpuCombined, cpuStolen, processors) =>
log.info("Load: {} ({} processors)", systemLoadAverage, processors)
case _ // No cpu info.
case _ => // No cpu info.
}
}
//#metrics-listener

View file

@ -23,14 +23,14 @@ class SimpleClusterListener extends Actor with ActorLogging {
override def postStop(): Unit = cluster.unsubscribe(self)
def receive = {
case MemberUp(member)
case MemberUp(member) =>
log.info("Member is Up: {}", member.address)
case UnreachableMember(member)
case UnreachableMember(member) =>
log.info("Member detected as unreachable: {}", member)
case MemberRemoved(member, previousStatus)
case MemberRemoved(member, previousStatus) =>
log.info(
"Member is Removed: {} after {}",
member.address, previousStatus)
case _: MemberEvent // ignore
case _: MemberEvent => // ignore
}
}

View file

@ -33,16 +33,16 @@ class SimpleClusterListener2 extends Actor with ActorLogging {
override def postStop(): Unit = cluster.unsubscribe(self)
def receive = {
case state: CurrentClusterState
case state: CurrentClusterState =>
log.info("Current members: {}", state.members.mkString(", "))
case MemberUp(member)
case MemberUp(member) =>
log.info("Member is Up: {}", member.address)
case UnreachableMember(member)
case UnreachableMember(member) =>
log.info("Member detected as unreachable: {}", member)
case MemberRemoved(member, previousStatus)
case MemberRemoved(member, previousStatus) =>
log.info(
"Member is Removed: {} after {}",
member.address, previousStatus)
case _: MemberEvent // ignore
case _: MemberEvent => // ignore
}
}

View file

@ -29,10 +29,10 @@ class TransformationBackend extends Actor {
override def postStop(): Unit = cluster.unsubscribe(self)
def receive = {
case TransformationJob(text) sender() ! TransformationResult(text.toUpperCase)
case state: CurrentClusterState
case TransformationJob(text) => sender() ! TransformationResult(text.toUpperCase)
case state: CurrentClusterState =>
state.members.filter(_.status == MemberStatus.Up) foreach register
case MemberUp(m) register(m)
case MemberUp(m) => register(m)
}
def register(member: Member): Unit =

View file

@ -24,18 +24,18 @@ class TransformationFrontend extends Actor {
var jobCounter = 0
def receive = {
case job: TransformationJob if backends.isEmpty
case job: TransformationJob if backends.isEmpty =>
sender() ! JobFailed("Service unavailable, try again later", job)
case job: TransformationJob
case job: TransformationJob =>
jobCounter += 1
backends(jobCounter % backends.size) forward job
case BackendRegistration if !backends.contains(sender())
case BackendRegistration if !backends.contains(sender()) =>
context watch sender()
backends = backends :+ sender()
case Terminated(a)
case Terminated(a) =>
backends = backends.filterNot(_ == a)
}
}
@ -57,7 +57,7 @@ object TransformationFrontend {
system.scheduler.schedule(2.seconds, 2.seconds) {
implicit val timeout = Timeout(5 seconds)
(frontend ? TransformationJob("hello-" + counter.incrementAndGet()))
.foreach { result println(result) }
.foreach { result => println(result) }
}
}

View file

@ -10,7 +10,7 @@ class SupervisorActor(childProps: Props, override val supervisorStrategy: Superv
val child = context.actorOf(childProps, "supervised-child")
def receive = {
case msg child forward msg
case msg => child forward msg
}
}
//#singleton-supervisor-actor

View file

@ -73,7 +73,7 @@ object DistributedDataDocSpec {
replicator ! Subscribe(DataKey, self)
def receive = {
case Tick
case Tick =>
val s = ThreadLocalRandom.current().nextInt(97, 123).toChar.toString
if (ThreadLocalRandom.current().nextBoolean()) {
// add
@ -85,9 +85,9 @@ object DistributedDataDocSpec {
replicator ! Update(DataKey, ORSet.empty[String], WriteLocal)(_ remove s)
}
case _: UpdateResponse[_] // ignore
case _: UpdateResponse[_] => // ignore
case c @ Changed(DataKey)
case c @ Changed(DataKey) =>
val data = c.get(DataKey)
log.info("Current elements: {}", data.elements)
}
@ -129,19 +129,19 @@ class DistributedDataDocSpec extends AkkaSpec(DistributedDataDocSpec.config) {
probe.expectMsgType[UpdateResponse[_]] match {
//#update-response1
case UpdateSuccess(Counter1Key, req) // ok
case UpdateSuccess(Counter1Key, req) => // ok
//#update-response1
case unexpected fail("Unexpected response: " + unexpected)
case unexpected => fail("Unexpected response: " + unexpected)
}
probe.expectMsgType[UpdateResponse[_]] match {
//#update-response2
case UpdateSuccess(Set1Key, req) // ok
case UpdateTimeout(Set1Key, req)
case UpdateSuccess(Set1Key, req) => // ok
case UpdateTimeout(Set1Key, req) =>
// write to 3 nodes failed within 1.second
//#update-response2
case UpdateSuccess(Set2Key, None)
case unexpected fail("Unexpected response: " + unexpected)
case UpdateSuccess(Set2Key, None) =>
case unexpected => fail("Unexpected response: " + unexpected)
}
}
@ -158,14 +158,14 @@ class DistributedDataDocSpec extends AkkaSpec(DistributedDataDocSpec.config) {
val Counter1Key = PNCounterKey("counter1")
def receive: Receive = {
case "increment"
case "increment" =>
// incoming command to increase the counter
val upd = Update(Counter1Key, PNCounter(), writeTwo, request = Some(sender()))(_ :+ 1)
replicator ! upd
case UpdateSuccess(Counter1Key, Some(replyTo: ActorRef))
case UpdateSuccess(Counter1Key, Some(replyTo: ActorRef)) =>
replyTo ! "ack"
case UpdateTimeout(Counter1Key, Some(replyTo: ActorRef))
case UpdateTimeout(Counter1Key, Some(replyTo: ActorRef)) =>
replyTo ! "nack"
}
//#update-request-context
@ -196,24 +196,24 @@ class DistributedDataDocSpec extends AkkaSpec(DistributedDataDocSpec.config) {
probe.expectMsgType[GetResponse[_]] match {
//#get-response1
case g @ GetSuccess(Counter1Key, req)
case g @ GetSuccess(Counter1Key, req) =>
val value = g.get(Counter1Key).value
case NotFound(Counter1Key, req) // key counter1 does not exist
case NotFound(Counter1Key, req) => // key counter1 does not exist
//#get-response1
case unexpected fail("Unexpected response: " + unexpected)
case unexpected => fail("Unexpected response: " + unexpected)
}
probe.expectMsgType[GetResponse[_]] match {
//#get-response2
case g @ GetSuccess(Set1Key, req)
case g @ GetSuccess(Set1Key, req) =>
val elements = g.get(Set1Key).elements
case GetFailure(Set1Key, req)
case GetFailure(Set1Key, req) =>
// read from 3 nodes failed within 1.second
case NotFound(Set1Key, req) // key set1 does not exist
case NotFound(Set1Key, req) => // key set1 does not exist
//#get-response2
case g @ GetSuccess(Set2Key, None)
case g @ GetSuccess(Set2Key, None) =>
val elements = g.get(Set2Key).elements
case unexpected fail("Unexpected response: " + unexpected)
case unexpected => fail("Unexpected response: " + unexpected)
}
}
@ -230,16 +230,16 @@ class DistributedDataDocSpec extends AkkaSpec(DistributedDataDocSpec.config) {
val Counter1Key = PNCounterKey("counter1")
def receive: Receive = {
case "get-count"
case "get-count" =>
// incoming request to retrieve current value of the counter
replicator ! Get(Counter1Key, readTwo, request = Some(sender()))
case g @ GetSuccess(Counter1Key, Some(replyTo: ActorRef))
case g @ GetSuccess(Counter1Key, Some(replyTo: ActorRef)) =>
val value = g.get(Counter1Key).value.longValue
replyTo ! value
case GetFailure(Counter1Key, Some(replyTo: ActorRef))
case GetFailure(Counter1Key, Some(replyTo: ActorRef)) =>
replyTo ! -1L
case NotFound(Counter1Key, Some(replyTo: ActorRef))
case NotFound(Counter1Key, Some(replyTo: ActorRef)) =>
replyTo ! 0L
}
//#get-request-context
@ -259,9 +259,9 @@ class DistributedDataDocSpec extends AkkaSpec(DistributedDataDocSpec.config) {
var currentValue = BigInt(0)
def receive: Receive = {
case c @ Changed(Counter1Key)
case c @ Changed(Counter1Key) =>
currentValue = c.get(Counter1Key).value
case "get-count"
case "get-count" =>
// incoming request to retrieve current value of the counter
sender() ! currentValue
}
@ -306,7 +306,7 @@ class DistributedDataDocSpec extends AkkaSpec(DistributedDataDocSpec.config) {
val m2 = m1.decrement(node, "a", 2)
val m3 = m2.increment(node, "b", 1)
println(m3.get("a")) // 5
m3.entries.foreach { case (key, value) println(s"$key -> $value") }
m3.entries.foreach { case (key, value) => println(s"$key -> $value") }
//#pncountermap
}

View file

@ -49,18 +49,18 @@ class ShoppingCart(userId: String) extends Actor {
//#get-cart
def receiveGetCart: Receive = {
case GetCart
case GetCart =>
replicator ! Get(DataKey, readMajority, Some(sender()))
case g @ GetSuccess(DataKey, Some(replyTo: ActorRef))
case g @ GetSuccess(DataKey, Some(replyTo: ActorRef)) =>
val data = g.get(DataKey)
val cart = Cart(data.entries.values.toSet)
replyTo ! cart
case NotFound(DataKey, Some(replyTo: ActorRef))
case NotFound(DataKey, Some(replyTo: ActorRef)) =>
replyTo ! Cart(Set.empty)
case GetFailure(DataKey, Some(replyTo: ActorRef))
case GetFailure(DataKey, Some(replyTo: ActorRef)) =>
// ReadMajority failure, try again with local read
replicator ! Get(DataKey, ReadLocal, Some(replyTo))
}
@ -68,9 +68,9 @@ class ShoppingCart(userId: String) extends Actor {
//#add-item
def receiveAddItem: Receive = {
case cmd @ AddItem(item)
case cmd @ AddItem(item) =>
val update = Update(DataKey, LWWMap.empty[String, LineItem], writeMajority, Some(cmd)) {
cart updateCart(cart, item)
cart => updateCart(cart, item)
}
replicator ! update
}
@ -78,38 +78,38 @@ class ShoppingCart(userId: String) extends Actor {
def updateCart(data: LWWMap[String, LineItem], item: LineItem): LWWMap[String, LineItem] =
data.get(item.productId) match {
case Some(LineItem(_, _, existingQuantity))
case Some(LineItem(_, _, existingQuantity)) =>
data :+ (item.productId -> item.copy(quantity = existingQuantity + item.quantity))
case None data :+ (item.productId -> item)
case None => data :+ (item.productId -> item)
}
//#remove-item
def receiveRemoveItem: Receive = {
case cmd @ RemoveItem(productId)
case cmd @ RemoveItem(productId) =>
// Try to fetch latest from a majority of nodes first, since ORMap
// remove must have seen the item to be able to remove it.
replicator ! Get(DataKey, readMajority, Some(cmd))
case GetSuccess(DataKey, Some(RemoveItem(productId)))
case GetSuccess(DataKey, Some(RemoveItem(productId))) =>
replicator ! Update(DataKey, LWWMap(), writeMajority, None) {
_.remove(node, productId)
}
case GetFailure(DataKey, Some(RemoveItem(productId)))
case GetFailure(DataKey, Some(RemoveItem(productId))) =>
// ReadMajority failed, fall back to best effort local value
replicator ! Update(DataKey, LWWMap(), writeMajority, None) {
_.remove(node, productId)
}
case NotFound(DataKey, Some(RemoveItem(productId)))
case NotFound(DataKey, Some(RemoveItem(productId))) =>
// nothing to remove
}
//#remove-item
def receiveOther: Receive = {
case _: UpdateSuccess[_] | _: UpdateTimeout[_]
case _: UpdateSuccess[_] | _: UpdateTimeout[_] =>
// UpdateTimeout, will eventually be replicated
case e: UpdateFailure[_] throw new IllegalStateException("Unexpected failure: " + e)
case e: UpdateFailure[_] => throw new IllegalStateException("Unexpected failure: " + e)
}
}

View file

@ -23,8 +23,8 @@ class TwoPhaseSetSerializer(val system: ExtendedActorSystem)
override def identifier = 99999
override def toBinary(obj: AnyRef): Array[Byte] = obj match {
case m: TwoPhaseSet twoPhaseSetToProto(m).toByteArray
case _ throw new IllegalArgumentException(
case m: TwoPhaseSet => twoPhaseSetToProto(m).toByteArray
case _ => throw new IllegalArgumentException(
s"Can't serialize object of type ${obj.getClass}")
}
@ -54,8 +54,8 @@ class TwoPhaseSetSerializer(val system: ExtendedActorSystem)
val msg = TwoPhaseSetMessages.TwoPhaseSet.parseFrom(bytes)
val addsSet = msg.getAddsList.iterator.asScala.toSet
val removalsSet = msg.getRemovalsList.iterator.asScala.toSet
val adds = addsSet.foldLeft(GSet.empty[String])((acc, el) acc.add(el))
val removals = removalsSet.foldLeft(GSet.empty[String])((acc, el) acc.add(el))
val adds = addsSet.foldLeft(GSet.empty[String])((acc, el) => acc.add(el))
val removals = removalsSet.foldLeft(GSet.empty[String])((acc, el) => acc.add(el))
// GSet will accumulate deltas when adding elements,
// but those are not of interest in the result of the deserialization
TwoPhaseSet(adds.resetDelta, removals.resetDelta)
@ -67,8 +67,8 @@ class TwoPhaseSetSerializerWithCompression(system: ExtendedActorSystem)
extends TwoPhaseSetSerializer(system) {
//#compression
override def toBinary(obj: AnyRef): Array[Byte] = obj match {
case m: TwoPhaseSet compress(twoPhaseSetToProto(m))
case _ throw new IllegalArgumentException(
case m: TwoPhaseSet => compress(twoPhaseSetToProto(m))
case _ => throw new IllegalArgumentException(
s"Can't serialize object of type ${obj.getClass}")
}

View file

@ -23,8 +23,8 @@ class TwoPhaseSetSerializer2(val system: ExtendedActorSystem)
val replicatedDataSerializer = new ReplicatedDataSerializer(system)
override def toBinary(obj: AnyRef): Array[Byte] = obj match {
case m: TwoPhaseSet twoPhaseSetToProto(m).toByteArray
case _ throw new IllegalArgumentException(
case m: TwoPhaseSet => twoPhaseSetToProto(m).toByteArray
case _ => throw new IllegalArgumentException(
s"Can't serialize object of type ${obj.getClass}")
}

View file

@ -229,16 +229,16 @@ object DispatcherDocSpec {
// Create a new PriorityGenerator, lower prio means more important
PriorityGenerator {
// 'highpriority messages should be treated first if possible
case 'highpriority 0
case 'highpriority => 0
// 'lowpriority messages should be treated last if possible
case 'lowpriority 2
case 'lowpriority => 2
// PoisonPill when no other left
case PoisonPill 3
case PoisonPill => 3
// We default to 1, which is in between high and low
case otherwise 1
case otherwise => 1
})
//#prio-mailbox
@ -250,7 +250,7 @@ object DispatcherDocSpec {
class MyActor extends Actor {
def receive = {
case x
case x =>
}
}
@ -267,7 +267,7 @@ object DispatcherDocSpec {
with RequiresMessageQueue[MyUnboundedMessageQueueSemantics] {
//#require-mailbox-on-actor
def receive = {
case _
case _ =>
}
//#require-mailbox-on-actor
// ...
@ -370,7 +370,7 @@ class DispatcherDocSpec extends AkkaSpec(DispatcherDocSpec.config) {
self ! PoisonPill
def receive = {
case x log.info(x.toString)
case x => log.info(x.toString)
}
}
val a = system.actorOf(Props(classOf[Logger], this).withDispatcher(
@ -389,7 +389,7 @@ class DispatcherDocSpec extends AkkaSpec(DispatcherDocSpec.config) {
//#prio-dispatcher
watch(a)
expectMsgPF() { case Terminated(`a`) () }
expectMsgPF() { case Terminated(`a`) => () }
}
}
@ -407,7 +407,7 @@ class DispatcherDocSpec extends AkkaSpec(DispatcherDocSpec.config) {
self ! PoisonPill
def receive = {
case x log.info(x.toString)
case x => log.info(x.toString)
}
}
val a = system.actorOf(Props(classOf[Logger], this).withDispatcher(
@ -422,7 +422,7 @@ class DispatcherDocSpec extends AkkaSpec(DispatcherDocSpec.config) {
//#control-aware-dispatcher
watch(a)
expectMsgPF() { case Terminated(`a`) () }
expectMsgPF() { case Terminated(`a`) => () }
}
}

View file

@ -22,8 +22,8 @@ object LoggingDocSpec {
reason.getMessage, message.getOrElse(""))
}
def receive = {
case "test" log.info("Received test")
case x log.warning("Received unknown message: {}", x)
case "test" => log.info("Received test")
case x => log.warning("Received unknown message: {}", x)
}
}
//#my-actor
@ -34,7 +34,7 @@ object LoggingDocSpec {
val log = Logging(this)
def receive = {
case _ {
case _ => {
//#mdc
val mdc = Map("requestId" -> 1234, "visitorId" -> 5678)
log.mdc(mdc)
@ -60,14 +60,14 @@ object LoggingDocSpec {
reqId += 1
val always = Map("requestId" -> reqId)
val perMessage = currentMessage match {
case r: Req Map("visitorId" -> r.visitorId)
case _ Map()
case r: Req => Map("visitorId" -> r.visitorId)
case _ => Map()
}
always ++ perMessage
}
def receive: Receive = {
case r: Req {
case r: Req => {
log.info(s"Starting new request: ${r.work}")
}
}
@ -85,11 +85,11 @@ object LoggingDocSpec {
class MyEventListener extends Actor {
def receive = {
case InitializeLogger(_) sender() ! LoggerInitialized
case Error(cause, logSource, logClass, message) // ...
case Warning(logSource, logClass, message) // ...
case Info(logSource, logClass, message) // ...
case Debug(logSource, logClass, message) // ...
case InitializeLogger(_) => sender() ! LoggerInitialized
case Error(cause, logSource, logClass, message) => // ...
case Warning(logSource, logClass, message) => // ...
case Info(logSource, logClass, message) => // ...
case Debug(logSource, logClass, message) => // ...
}
}
//#my-event-listener
@ -121,7 +121,7 @@ object LoggingDocSpec {
class DeadLetterListener extends Actor {
def receive = {
case d: DeadLetter println(d)
case d: DeadLetter => println(d)
}
}
//#deadletters
@ -133,8 +133,8 @@ object LoggingDocSpec {
class Listener extends Actor {
def receive = {
case m: Jazz println(s"${self.path.name} is listening to: ${m.artist}")
case m: Electronic println(s"${self.path.name} is listening to: ${m.artist}")
case m: Jazz => println(s"${self.path.name} is listening to: ${m.artist}")
case m: Electronic => println(s"${self.path.name} is listening to: ${m.artist}")
}
}
//#superclass-subscription-eventstream

View file

@ -61,7 +61,7 @@ object ExtensionDocSpec {
class MyActor extends Actor {
def receive = {
case someMessage
case someMessage =>
CountExtension(context.system).increment()
}
}
@ -69,12 +69,12 @@ object ExtensionDocSpec {
//#extension-usage-actor-trait
trait Counting { self: Actor
trait Counting { self: Actor =>
def increment() = CountExtension(context.system).increment()
}
class MyCounterActor extends Actor with Counting {
def receive = {
case someMessage increment()
case someMessage => increment()
}
}
//#extension-usage-actor-trait

View file

@ -67,7 +67,7 @@ object SettingsExtensionDocSpec {
//#extension-usage-actor
def receive = {
case someMessage
case someMessage =>
}
def connect(dbUri: String, circuitBreakerTimeout: Duration) = {

View file

@ -21,12 +21,12 @@ object MyActor {
class MyActor extends Actor {
import MyActor._
def receive = {
case message: Message message match {
case BarMessage(bar) sender() ! BazMessage("Got " + bar)
case message: Message => message match {
case BarMessage(bar) => sender() ! BazMessage("Got " + bar)
// warning here:
// "match may not be exhaustive. It would fail on the following input: FooMessage(_)"
//#exhaustiveness-check
case FooMessage(_) // avoid the warning in our build logs
case FooMessage(_) => // avoid the warning in our build logs
//#exhaustiveness-check
}
}

View file

@ -18,9 +18,9 @@ object FutureDocSpec {
class MyActor extends Actor {
def receive = {
case x: String sender() ! x.toUpperCase
case x: Int if x < 0 sender() ! Status.Failure(new ArithmeticException("Negative values not supported"))
case x: Int sender() ! x
case x: String => sender() ! x.toUpperCase
case x: Int if x < 0 => sender() ! Status.Failure(new ArithmeticException("Negative values not supported"))
case x: Int => sender() ! x
}
}
@ -29,7 +29,7 @@ object FutureDocSpec {
class OddActor extends Actor {
var n = 1
def receive = {
case GetNext
case GetNext =>
sender() ! n
n += 2
}
@ -44,7 +44,7 @@ object FutureDocSpec {
implicit val timeout: Timeout = 5.seconds
def receive = {
case _
case _ =>
val future = target ? "some message"
future pipeTo sender() // use the pipe pattern
}
@ -64,7 +64,7 @@ object FutureDocSpec {
var internalData: UserData = UserData("initial data")
def receive = {
case Get
case Get =>
sender() ! internalData
}
}
@ -85,7 +85,7 @@ object FutureDocSpec {
implicit val ec: ExecutionContext = context.dispatcher
def receive = {
case Get
case Get =>
// user's historical activities are retrieved
// via the separate repository
repository.queryHistoricalActivities(userId) pipeTo sender()
@ -109,9 +109,9 @@ object FutureDocSpec {
implicit val timeout = Timeout(5 seconds)
def receive = {
case GetUserData
case GetUserData =>
(userData ? UserDataActor.Get) pipeTo sender()
case GetUserActivities
case GetUserActivities =>
(userActivities ? UserActivityActor.Get) pipeTo sender()
}
}
@ -131,7 +131,7 @@ class FutureDocSpec extends AkkaSpec {
import FutureDocSpec._
import system.dispatcher
val println: PartialFunction[Any, Unit] = { case _ }
val println: PartialFunction[Any, Unit] = { case _ => }
"demonstrate usage custom ExecutionContext" in {
val yourExecutorServiceGoesHere = java.util.concurrent.Executors.newSingleThreadExecutor()
@ -198,7 +198,7 @@ class FutureDocSpec extends AkkaSpec {
val f1 = Future {
"Hello" + "World"
}
val f2 = f1 map { x
val f2 = f1 map { x =>
x.length
}
f2 foreach println
@ -214,8 +214,8 @@ class FutureDocSpec extends AkkaSpec {
"Hello" + "World"
}
val f2 = Future.successful(3)
val f3 = f1 map { x
f2 map { y
val f3 = f1 map { x =>
f2 map { y =>
x.length * y
}
}
@ -230,8 +230,8 @@ class FutureDocSpec extends AkkaSpec {
"Hello" + "World"
}
val f2 = Future.successful(3)
val f3 = f1 flatMap { x
f2 map { y
val f3 = f1 flatMap { x =>
f2 map { y =>
x.length * y
}
}
@ -250,7 +250,7 @@ class FutureDocSpec extends AkkaSpec {
val failedFilter = future1.filter(_ % 2 == 1).recover {
// When filter fails, it will have a java.util.NoSuchElementException
case m: NoSuchElementException 0
case m: NoSuchElementException => 0
}
failedFilter foreach println
@ -264,9 +264,9 @@ class FutureDocSpec extends AkkaSpec {
"demonstrate usage of for comprehension" in {
//#for-comprehension
val f = for {
a Future(10 / 2) // 10 / 2 = 5
b Future(a + 1) // 5 + 1 = 6
c Future(a - 1) // 5 - 1 = 4
a <- Future(10 / 2) // 10 / 2 = 5
b <- Future(a + 1) // 5 + 1 = 6
c <- Future(a - 1) // 5 - 1 = 4
if c > 3 // Future.filter
} yield b * c // 6 * 4 = 24
@ -318,9 +318,9 @@ class FutureDocSpec extends AkkaSpec {
val f2 = ask(actor2, msg2)
val f3 = for {
a f1.mapTo[Int]
b f2.mapTo[Int]
c ask(actor3, (a + b)).mapTo[Int]
a <- f1.mapTo[Int]
b <- f2.mapTo[Int]
c <- ask(actor3, (a + b)).mapTo[Int]
} yield c
f3 foreach println
@ -348,7 +348,7 @@ class FutureDocSpec extends AkkaSpec {
"demonstrate usage of sequence" in {
//#sequence
val futureList = Future.sequence((1 to 100).toList.map(x Future(x * 2 - 1)))
val futureList = Future.sequence((1 to 100).toList.map(x => Future(x * 2 - 1)))
val oddSum = futureList.map(_.sum)
oddSum foreach println
//#sequence
@ -357,7 +357,7 @@ class FutureDocSpec extends AkkaSpec {
"demonstrate usage of traverse" in {
//#traverse
val futureList = Future.traverse((1 to 100).toList)(x Future(x * 2 - 1))
val futureList = Future.traverse((1 to 100).toList)(x => Future(x * 2 - 1))
val oddSum = futureList.map(_.sum)
oddSum foreach println
//#traverse
@ -367,7 +367,7 @@ class FutureDocSpec extends AkkaSpec {
"demonstrate usage of fold" in {
//#fold
// Create a sequence of Futures
val futures = for (i 1 to 1000) yield Future(i * 2)
val futures = for (i <- 1 to 1000) yield Future(i * 2)
val futureSum = Future.fold(futures)(0)(_ + _)
futureSum foreach println
//#fold
@ -377,7 +377,7 @@ class FutureDocSpec extends AkkaSpec {
"demonstrate usage of reduce" in {
//#reduce
// Create a sequence of Futures
val futures = for (i 1 to 1000) yield Future(i * 2)
val futures = for (i <- 1 to 1000) yield Future(i * 2)
val futureSum = Future.reduce(futures)(_ + _)
futureSum foreach println
//#reduce
@ -390,7 +390,7 @@ class FutureDocSpec extends AkkaSpec {
val msg1 = -1
//#recover
val future = akka.pattern.ask(actor, msg1) recover {
case e: ArithmeticException 0
case e: ArithmeticException => 0
}
future foreach println
//#recover
@ -403,8 +403,8 @@ class FutureDocSpec extends AkkaSpec {
val msg1 = -1
//#try-recover
val future = akka.pattern.ask(actor, msg1) recoverWith {
case e: ArithmeticException Future.successful(0)
case foo: IllegalArgumentException
case e: ArithmeticException => Future.successful(0)
case foo: IllegalArgumentException =>
Future.failed[Int](new IllegalStateException("All br0ken!"))
}
future foreach println
@ -416,7 +416,7 @@ class FutureDocSpec extends AkkaSpec {
val future1 = Future { "foo" }
val future2 = Future { "bar" }
//#zip
val future3 = future1 zip future2 map { case (a, b) a + " " + b }
val future3 = future1 zip future2 map { case (a, b) => a + " " + b }
future3 foreach println
//#zip
Await.result(future3, 3 seconds) should be("foo bar")
@ -429,9 +429,9 @@ class FutureDocSpec extends AkkaSpec {
def watchSomeTV(): Unit = ()
//#and-then
val result = Future { loadPage(url) } andThen {
case Failure(exception) log(exception)
case Failure(exception) => log(exception)
} andThen {
case _ watchSomeTV()
case _ => watchSomeTV()
}
result foreach println
//#and-then
@ -455,8 +455,8 @@ class FutureDocSpec extends AkkaSpec {
def doSomethingOnFailure(t: Throwable) = ()
//#onComplete
future onComplete {
case Success(result) doSomethingOnSuccess(result)
case Failure(failure) doSomethingOnFailure(failure)
case Success(result) => doSomethingOnSuccess(result)
case Failure(failure) => doSomethingOnFailure(failure)
}
//#onComplete
Await.result(future, 3 seconds) should be("foo")
@ -505,7 +505,7 @@ class FutureDocSpec extends AkkaSpec {
}
//Return a new future that will retry up to 10 times
val retried = akka.pattern.retry(
() attempt(),
() => attempt(),
10,
100 milliseconds)
//#retry
@ -520,7 +520,7 @@ class FutureDocSpec extends AkkaSpec {
val f = Future("hello")
def receive = {
//#receive-omitted
case _
case _ =>
//#receive-omitted
}
}

View file

@ -53,15 +53,15 @@ class EchoManager(handlerClass: Class[_]) extends Actor with ActorLogging {
override def postRestart(thr: Throwable): Unit = context stop self
def receive = {
case Bound(localAddress)
case Bound(localAddress) =>
log.info("listening on port {}", localAddress.getPort)
case CommandFailed(Bind(_, local, _, _, _))
case CommandFailed(Bind(_, local, _, _, _)) =>
log.warning(s"cannot bind to [$local]")
context stop self
//#echo-manager
case Connected(remote, local)
case Connected(remote, local) =>
log.info("received connection from {}", remote)
val handler = context.actorOf(Props(handlerClass, sender(), remote))
sender() ! Register(handler, keepOpenOnPeerClosed = true)
@ -92,18 +92,18 @@ class EchoHandler(connection: ActorRef, remote: InetSocketAddress)
//#writing
def writing: Receive = {
case Received(data)
case Received(data) =>
connection ! Write(data, Ack(currentOffset))
buffer(data)
case Ack(ack)
case Ack(ack) =>
acknowledge(ack)
case CommandFailed(Write(_, Ack(ack)))
case CommandFailed(Write(_, Ack(ack))) =>
connection ! ResumeWriting
context become buffering(ack)
case PeerClosed
case PeerClosed =>
if (storage.isEmpty) context stop self
else context become closing
}
@ -115,11 +115,11 @@ class EchoHandler(connection: ActorRef, remote: InetSocketAddress)
var peerClosed = false
{
case Received(data) buffer(data)
case WritingResumed writeFirst()
case PeerClosed peerClosed = true
case Ack(ack) if ack < nack acknowledge(ack)
case Ack(ack)
case Received(data) => buffer(data)
case WritingResumed => writeFirst()
case PeerClosed => peerClosed = true
case Ack(ack) if ack < nack => acknowledge(ack)
case Ack(ack) =>
acknowledge(ack)
if (storage.nonEmpty) {
if (toAck > 0) {
@ -139,19 +139,19 @@ class EchoHandler(connection: ActorRef, remote: InetSocketAddress)
//#closing
def closing: Receive = {
case CommandFailed(_: Write)
case CommandFailed(_: Write) =>
connection ! ResumeWriting
context.become({
case WritingResumed
case WritingResumed =>
writeAll()
context.unbecome()
case ack: Int acknowledge(ack)
case ack: Int => acknowledge(ack)
}, discardOld = false)
case Ack(ack)
case Ack(ack) =>
acknowledge(ack)
if (storage.isEmpty) context stop self
}
@ -214,7 +214,7 @@ class EchoHandler(connection: ActorRef, remote: InetSocketAddress)
}
private def writeAll(): Unit = {
for ((data, i) storage.zipWithIndex) {
for ((data, i) <- storage.zipWithIndex) {
connection ! Write(data, Ack(storageOffset + i))
}
}
@ -235,17 +235,17 @@ class SimpleEchoHandler(connection: ActorRef, remote: InetSocketAddress)
case object Ack extends Event
def receive = {
case Received(data)
case Received(data) =>
buffer(data)
connection ! Write(data, Ack)
context.become({
case Received(data) buffer(data)
case Ack acknowledge()
case PeerClosed closing = true
case Received(data) => buffer(data)
case Ack => acknowledge()
case PeerClosed => closing = true
}, discardOld = false)
case PeerClosed context stop self
case PeerClosed => context stop self
}
//#storage-omitted

View file

@ -34,14 +34,14 @@ class Server extends Actor {
IO(Tcp) ! Bind(self, new InetSocketAddress("localhost", 0))
def receive = {
case b @ Bound(localAddress)
case b @ Bound(localAddress) =>
//#do-some-logging-or-setup
context.parent ! b
//#do-some-logging-or-setup
case CommandFailed(_: Bind) context stop self
case CommandFailed(_: Bind) => context stop self
case c @ Connected(remote, local)
case c @ Connected(remote, local) =>
//#server
context.parent ! c
//#server
@ -57,8 +57,8 @@ class Server extends Actor {
class SimplisticHandler extends Actor {
import Tcp._
def receive = {
case Received(data) sender() ! Write(data)
case PeerClosed context stop self
case Received(data) => sender() ! Write(data)
case PeerClosed => context stop self
}
}
//#simplistic-handler
@ -77,25 +77,25 @@ class Client(remote: InetSocketAddress, listener: ActorRef) extends Actor {
IO(Tcp) ! Connect(remote)
def receive = {
case CommandFailed(_: Connect)
case CommandFailed(_: Connect) =>
listener ! "connect failed"
context stop self
case c @ Connected(remote, local)
case c @ Connected(remote, local) =>
listener ! c
val connection = sender()
connection ! Register(self)
context become {
case data: ByteString
case data: ByteString =>
connection ! Write(data)
case CommandFailed(w: Write)
case CommandFailed(w: Write) =>
// O/S buffer was full
listener ! "write failed"
case Received(data)
case Received(data) =>
listener ! data
case "close"
case "close" =>
connection ! Close
case _: ConnectionClosed
case _: ConnectionClosed =>
listener ! "connection closed"
context stop self
}
@ -108,7 +108,7 @@ class IODocSpec extends AkkaSpec {
class Parent extends Actor {
context.actorOf(Props[Server], "server")
def receive = {
case msg testActor forward msg
case msg => testActor forward msg
}
}

View file

@ -27,7 +27,7 @@ object PullReadingExample {
def receive = {
//#pull-accepting
case Bound(localAddress)
case Bound(localAddress) =>
// Accept connections one by one
sender() ! ResumeAccepting(batchSize = 1)
context.become(listening(sender()))
@ -37,7 +37,7 @@ object PullReadingExample {
//#pull-accepting-cont
def listening(listener: ActorRef): Receive = {
case Connected(remote, local)
case Connected(remote, local) =>
val handler = context.actorOf(Props(classOf[PullEcho], sender()))
sender() ! Register(handler, keepOpenOnPeerClosed = true)
listener ! ResumeAccepting(batchSize = 1)
@ -54,8 +54,8 @@ object PullReadingExample {
override def preStart: Unit = connection ! ResumeReading
def receive = {
case Received(data) connection ! Write(data, Ack)
case Ack connection ! ResumeReading
case Received(data) => connection ! Write(data, Ack)
case Ack => connection ! ResumeReading
}
//#pull-reading-echo
}

View file

@ -38,10 +38,10 @@ class Listener(iface: String, group: String, port: Int, sink: ActorRef) extends
//#bind
def receive = {
case b @ Udp.Bound(to)
case b @ Udp.Bound(to) =>
log.info("Bound to {}", to)
sink ! (b)
case Udp.Received(data, remote)
case Udp.Received(data, remote) =>
val msg = data.decodeString("utf-8")
log.info("Received '{}' from {}", msg, remote)
sink ! msg
@ -53,7 +53,7 @@ class Sender(iface: String, group: String, port: Int, msg: String) extends Actor
IO(Udp) ! Udp.SimpleSender(List(Inet6ProtocolFamily()))
def receive = {
case Udp.SimpleSenderReady {
case Udp.SimpleSenderReady => {
val remote = new InetSocketAddress(s"$group%$iface", port)
log.info("Sending message to {}", remote)
sender() ! Udp.Send(ByteString(msg), remote)

View file

@ -20,7 +20,7 @@ class ScalaUdpMulticastSpec extends TestKit(ActorSystem("ScalaUdpMulticastSpec")
"listener" should {
"send message back to sink" in {
val ipv6ifaces =
NetworkInterface.getNetworkInterfaces.asScala.toSeq.filter(iface
NetworkInterface.getNetworkInterfaces.asScala.toSeq.filter(iface =>
iface.supportsMulticast &&
iface.isUp &&
iface.getInetAddresses.asScala.exists(_.isInstanceOf[Inet6Address]))
@ -33,7 +33,7 @@ class ScalaUdpMulticastSpec extends TestKit(ActorSystem("ScalaUdpMulticastSpec")
// on the platform (awsdl0 can't be used on OSX, docker[0-9] can't be used in a docker machine etc.)
// therefore: try hard to find an interface that _does_ work, and only fail if there was any potentially
// working interfaces but all failed
ipv6ifaces.exists { ipv6iface
ipv6ifaces.exists { ipv6iface =>
// host assigned link local multicast address http://tools.ietf.org/html/rfc3307#section-4.3.2
// generate a random 32 bit multicast address with the high order bit set
val randomAddress: String = (Random.nextInt().abs.toLong | (1L << 31)).toHexString.toUpperCase
@ -51,7 +51,7 @@ class ScalaUdpMulticastSpec extends TestKit(ActorSystem("ScalaUdpMulticastSpec")
true
} catch {
case _: AssertionError
case _: AssertionError =>
system.log.info("Failed to run test on interface {}", ipv6iface.getDisplayName)
false

View file

@ -25,7 +25,7 @@ object ScalaUdpDocSpec {
IO(Udp) ! Udp.SimpleSender
def receive = {
case Udp.SimpleSenderReady
case Udp.SimpleSenderReady =>
context.become(ready(sender()))
//#sender
sender() ! Udp.Send(ByteString("hello"), remote)
@ -33,7 +33,7 @@ object ScalaUdpDocSpec {
}
def ready(send: ActorRef): Receive = {
case msg: String
case msg: String =>
send ! Udp.Send(ByteString(msg), remote)
//#sender
if (msg == "world") send ! PoisonPill
@ -48,7 +48,7 @@ object ScalaUdpDocSpec {
IO(Udp) ! Udp.Bind(self, new InetSocketAddress("localhost", 0))
def receive = {
case Udp.Bound(local)
case Udp.Bound(local) =>
//#listener
nextActor forward local
//#listener
@ -56,15 +56,15 @@ object ScalaUdpDocSpec {
}
def ready(socket: ActorRef): Receive = {
case Udp.Received(data, remote)
case Udp.Received(data, remote) =>
val processed = // parse data etc., e.g. using PipelineStage
//#listener
data.utf8String
//#listener
socket ! Udp.Send(data, remote) // example server echoes back
nextActor ! processed
case Udp.Unbind socket ! Udp.Unbind
case Udp.Unbound context.stop(self)
case Udp.Unbind => socket ! Udp.Unbind
case Udp.Unbound => context.stop(self)
}
}
//#listener
@ -75,7 +75,7 @@ object ScalaUdpDocSpec {
IO(UdpConnected) ! UdpConnected.Connect(self, remote)
def receive = {
case UdpConnected.Connected
case UdpConnected.Connected =>
context.become(ready(sender()))
//#connected
sender() ! UdpConnected.Send(ByteString("hello"))
@ -83,17 +83,17 @@ object ScalaUdpDocSpec {
}
def ready(connection: ActorRef): Receive = {
case UdpConnected.Received(data)
case UdpConnected.Received(data) =>
// process data, send it on, etc.
//#connected
if (data.utf8String == "hello")
connection ! UdpConnected.Send(ByteString("world"))
//#connected
case msg: String
case msg: String =>
connection ! UdpConnected.Send(ByteString(msg))
case UdpConnected.Disconnect
case UdpConnected.Disconnect =>
connection ! UdpConnected.Disconnect
case UdpConnected.Disconnected context.stop(self)
case UdpConnected.Disconnected => context.stop(self)
}
}
//#connected

View file

@ -90,8 +90,8 @@ class BackoffSupervisorDocSpec {
).withAutoReset(10.seconds) // reset if the child does not throw any errors within 10 seconds
.withSupervisorStrategy(
OneForOneStrategy() {
case _: MyException SupervisorStrategy.Restart
case _ SupervisorStrategy.Escalate
case _: MyException => SupervisorStrategy.Restart
case _ => SupervisorStrategy.Escalate
}))
//#backoff-custom-fail

View file

@ -48,14 +48,14 @@ object PersistenceDocSpec {
//#recovery-completed
override def receiveRecover: Receive = {
case RecoveryCompleted
case RecoveryCompleted =>
// perform init after recovery, before any other messages
//...
case evt //...
case evt => //...
}
override def receiveCommand: Receive = {
case msg //...
case msg => //...
}
//#recovery-completed
}
@ -84,10 +84,10 @@ object PersistenceDocSpec {
//#persistence-id-override
override def receiveRecover: Receive = {
case _
case _ =>
}
override def receiveCommand: Receive = {
case _
case _ =>
}
}
}
@ -128,25 +128,25 @@ object PersistenceDocSpec {
override def persistenceId: String = "persistence-id"
override def receiveCommand: Receive = {
case s: String persist(MsgSent(s))(updateState)
case Confirm(deliveryId) persist(MsgConfirmed(deliveryId))(updateState)
case s: String => persist(MsgSent(s))(updateState)
case Confirm(deliveryId) => persist(MsgConfirmed(deliveryId))(updateState)
}
override def receiveRecover: Receive = {
case evt: Evt updateState(evt)
case evt: Evt => updateState(evt)
}
def updateState(evt: Evt): Unit = evt match {
case MsgSent(s)
deliver(destination)(deliveryId Msg(deliveryId, s))
case MsgSent(s) =>
deliver(destination)(deliveryId => Msg(deliveryId, s))
case MsgConfirmed(deliveryId) confirmDelivery(deliveryId)
case MsgConfirmed(deliveryId) => confirmDelivery(deliveryId)
}
}
class MyDestination extends Actor {
def receive = {
case Msg(deliveryId, s)
case Msg(deliveryId, s) =>
// ...
sender() ! Confirm(deliveryId)
}
@ -166,10 +166,10 @@ object PersistenceDocSpec {
val snapShotInterval = 1000
override def receiveCommand: Receive = {
case SaveSnapshotSuccess(metadata) // ...
case SaveSnapshotFailure(metadata, reason) // ...
case cmd: String
persist(s"evt-$cmd") { e
case SaveSnapshotSuccess(metadata) => // ...
case SaveSnapshotFailure(metadata, reason) => // ...
case cmd: String =>
persist(s"evt-$cmd") { e =>
updateState(e)
if (lastSequenceNr % snapShotInterval == 0 && lastSequenceNr != 0)
saveSnapshot(state)
@ -195,9 +195,9 @@ object PersistenceDocSpec {
var state: Any = _
override def receiveRecover: Receive = {
case SnapshotOffer(metadata, offeredSnapshot) state = offeredSnapshot
case RecoveryCompleted
case event // ...
case SnapshotOffer(metadata, offeredSnapshot) => state = offeredSnapshot
case RecoveryCompleted =>
case event => // ...
}
//#snapshot-offer
@ -214,14 +214,14 @@ object PersistenceDocSpec {
override def persistenceId = "my-stable-persistence-id"
override def receiveRecover: Receive = {
case _ // handle recovery here
case _ => // handle recovery here
}
override def receiveCommand: Receive = {
case c: String {
case c: String => {
sender() ! c
persistAsync(s"evt-$c-1") { e sender() ! e }
persistAsync(s"evt-$c-2") { e sender() ! e }
persistAsync(s"evt-$c-1") { e => sender() ! e }
persistAsync(s"evt-$c-2") { e => sender() ! e }
}
}
}
@ -249,15 +249,15 @@ object PersistenceDocSpec {
override def persistenceId = "my-stable-persistence-id"
override def receiveRecover: Receive = {
case _ // handle recovery here
case _ => // handle recovery here
}
override def receiveCommand: Receive = {
case c: String {
case c: String => {
sender() ! c
persistAsync(s"evt-$c-1") { e sender() ! e }
persistAsync(s"evt-$c-2") { e sender() ! e }
deferAsync(s"evt-$c-3") { e sender() ! e }
persistAsync(s"evt-$c-1") { e => sender() ! e }
persistAsync(s"evt-$c-2") { e => sender() ! e }
deferAsync(s"evt-$c-3") { e => sender() ! e }
}
}
}
@ -287,15 +287,15 @@ object PersistenceDocSpec {
override def persistenceId = "my-stable-persistence-id"
override def receiveRecover: Receive = {
case _ // handle recovery here
case _ => // handle recovery here
}
override def receiveCommand: Receive = {
case c: String {
case c: String => {
sender() ! c
persist(s"evt-$c-1") { e sender() ! e }
persist(s"evt-$c-2") { e sender() ! e }
defer(s"evt-$c-3") { e sender() ! e }
persist(s"evt-$c-1") { e => sender() ! e }
persist(s"evt-$c-2") { e => sender() ! e }
defer(s"evt-$c-3") { e => sender() ! e }
}
}
}
@ -308,24 +308,24 @@ object PersistenceDocSpec {
override def persistenceId = "my-stable-persistence-id"
override def receiveRecover: Receive = {
case _ // handle recovery here
case _ => // handle recovery here
}
//#nested-persist-persist
override def receiveCommand: Receive = {
case c: String
case c: String =>
sender() ! c
persist(s"$c-1-outer") { outer1
persist(s"$c-1-outer") { outer1 =>
sender() ! outer1
persist(s"$c-1-inner") { inner1
persist(s"$c-1-inner") { inner1 =>
sender() ! inner1
}
}
persist(s"$c-2-outer") { outer2
persist(s"$c-2-outer") { outer2 =>
sender() ! outer2
persist(s"$c-2-inner") { inner2
persist(s"$c-2-inner") { inner2 =>
sender() ! inner2
}
}
@ -356,20 +356,20 @@ object PersistenceDocSpec {
override def persistenceId = "my-stable-persistence-id"
override def receiveRecover: Receive = {
case _ // handle recovery here
case _ => // handle recovery here
}
//#nested-persistAsync-persistAsync
override def receiveCommand: Receive = {
case c: String
case c: String =>
sender() ! c
persistAsync(c + "-outer-1") { outer
persistAsync(c + "-outer-1") { outer =>
sender() ! outer
persistAsync(c + "-inner-1") { inner sender() ! inner }
persistAsync(c + "-inner-1") { inner => sender() ! inner }
}
persistAsync(c + "-outer-2") { outer
persistAsync(c + "-outer-2") { outer =>
sender() ! outer
persistAsync(c + "-inner-2") { inner sender() ! inner }
persistAsync(c + "-inner-2") { inner => sender() ! inner }
}
}
//#nested-persistAsync-persistAsync
@ -408,15 +408,15 @@ object PersistenceDocSpec {
override def persistenceId = "safe-actor"
override def receiveCommand: Receive = {
case c: String
case c: String =>
println(c)
persist(s"handle-$c") { println(_) }
case Shutdown
case Shutdown =>
context.stop(self)
}
override def receiveRecover: Receive = {
case _ // handle recovery here
case _ => // handle recovery here
}
}
//#safe-shutdown

View file

@ -76,12 +76,12 @@ class PersistenceEventAdapterDocSpec(config: String) extends AkkaSpec(config) {
override def journalPluginId: String = "akka.persistence.journal.auto-json-store"
override def receiveRecover: Receive = {
case RecoveryCompleted // ignore...
case e p.ref ! e
case RecoveryCompleted => // ignore...
case e => p.ref ! e
}
override def receiveCommand: Receive = {
case c persist(c) { e p.ref ! e }
case c => persist(c) { e => p.ref ! e }
}
})
@ -108,12 +108,12 @@ class PersistenceEventAdapterDocSpec(config: String) extends AkkaSpec(config) {
override def journalPluginId: String = "akka.persistence.journal.manual-json-store"
override def receiveRecover: Receive = {
case RecoveryCompleted // ignore...
case e p.ref ! e
case RecoveryCompleted => // ignore...
case e => p.ref ! e
}
override def receiveCommand: Receive = {
case c persist(c) { e p.ref ! e }
case c => persist(c) { e => p.ref ! e }
}
})
@ -165,7 +165,7 @@ class MyAutoJsonEventAdapter(system: ExtendedActorSystem) extends EventAdapter {
override def fromJournal(event: Any, manifest: String): EventSeq = EventSeq.single {
event match {
case json: JsonElement
case json: JsonElement =>
val clazz = system.dynamicAccess.getClassFor[Any](manifest).get
gson.fromJson(json, clazz)
}
@ -202,7 +202,7 @@ class MyManualJsonEventAdapter(system: ExtendedActorSystem) extends EventAdapter
}
override def fromJournal(event: Any, m: String): EventSeq = event match {
case json: JsonElement
case json: JsonElement =>
val manifest = json.getAsJsonObject.get("_manifest").getAsString
val clazz = system.dynamicAccess.getClassFor[Any](manifest).get
@ -214,14 +214,14 @@ class MyTaggingEventAdapter(system: ExtendedActorSystem) extends EventAdapter {
override def manifest(event: Any): String = ""
override def fromJournal(event: Any, manifest: String): EventSeq = event match {
case j: MyTaggingJournalModel EventSeq.single(j)
case j: MyTaggingJournalModel => EventSeq.single(j)
}
override def toJournal(event: Any): Any = {
event match {
case Person(_, age) if age >= 18 MyTaggingJournalModel(event, tags = Set("adult"))
case Person(_, age) MyTaggingJournalModel(event, tags = Set("minor"))
case _ MyTaggingJournalModel(event, tags = Set.empty)
case Person(_, age) if age >= 18 => MyTaggingJournalModel(event, tags = Set("adult"))
case Person(_, age) => MyTaggingJournalModel(event, tags = Set("minor"))
case _ => MyTaggingJournalModel(event, tags = Set.empty)
}
}
}

View file

@ -129,7 +129,7 @@ object SharedLeveldbPluginDocSpec {
}
def receive = {
case ActorIdentity(1, Some(store))
case ActorIdentity(1, Some(store)) =>
SharedLeveldbJournal.setStore(store, context.system)
}
}
@ -161,7 +161,7 @@ class MyJournal extends AsyncWriteJournal {
def asyncDeleteMessagesTo(persistenceId: String, toSequenceNr: Long): Future[Unit] = ???
def asyncReplayMessages(persistenceId: String, fromSequenceNr: Long,
toSequenceNr: Long, max: Long)(
replayCallback: (PersistentRepr) Unit): Future[Unit] = ???
replayCallback: (PersistentRepr) => Unit): Future[Unit] = ???
def asyncReadHighestSequenceNr(
persistenceId: String,
fromSequenceNr: Long): Future[Long] = ???

View file

@ -53,10 +53,10 @@ class ProtobufReadOptional {
sealed abstract class SeatType { def code: String }
object SeatType {
def fromString(s: String) = s match {
case Window.code Window
case Aisle.code Aisle
case Other.code Other
case _ Unknown
case Window.code => Window
case Aisle.code => Aisle
case Other.code => Other
case _ => Unknown
}
case object Window extends SeatType { override val code = "W" }
case object Aisle extends SeatType { override val code = "A" }
@ -82,15 +82,15 @@ class ProtobufReadOptional {
override def fromBinary(bytes: Array[Byte], manifest: String): AnyRef =
manifest match {
case SeatReservedManifest
case SeatReservedManifest =>
// use generated protobuf serializer
seatReserved(FlightAppModels.SeatReserved.parseFrom(bytes))
case _
case _ =>
throw new NotSerializableException("Unable to handle manifest: " + manifest)
}
override def toBinary(o: AnyRef): Array[Byte] = o match {
case s: SeatReserved
case s: SeatReserved =>
FlightAppModels.SeatReserved.newBuilder
.setRow(s.row)
.setLetter(s.letter)
@ -142,12 +142,12 @@ class RenamePlainJson {
marshaller.toJson(event)
override def fromJournal(event: Any, manifest: String): EventSeq = event match {
case json: JsObject EventSeq(marshaller.fromJson(manifest match {
case V1 rename(json, "code", "seatNr")
case V2 json // pass-through
case unknown throw new IllegalArgumentException(s"Unknown manifest: $unknown")
case json: JsObject => EventSeq(marshaller.fromJson(manifest match {
case V1 => rename(json, "code", "seatNr")
case V2 => json // pass-through
case unknown => throw new IllegalArgumentException(s"Unknown manifest: $unknown")
}))
case _
case _ =>
val c = event.getClass
throw new IllegalArgumentException("Can only work with JSON, was: %s".format(c))
}
@ -189,19 +189,19 @@ object SimplestCustomSerializer {
// serialize the object
override def toBinary(obj: AnyRef): Array[Byte] = obj match {
case p: Person s"""${p.name}|${p.surname}""".getBytes(Utf8)
case _ throw new IllegalArgumentException(
case p: Person => s"""${p.name}|${p.surname}""".getBytes(Utf8)
case _ => throw new IllegalArgumentException(
s"Unable to serialize to bytes, clazz was: ${obj.getClass}!")
}
// deserialize the object, using the manifest to indicate which logic to apply
override def fromBinary(bytes: Array[Byte], manifest: String): AnyRef =
manifest match {
case PersonManifest
case PersonManifest =>
val nameAndSurname = new String(bytes, Utf8)
val Array(name, surname) = nameAndSurname.split("[|]")
Person(name, surname)
case _ throw new NotSerializableException(
case _ => throw new NotSerializableException(
s"Unable to deserialize from bytes, manifest was: $manifest! Bytes length: " +
bytes.length)
}
@ -248,13 +248,13 @@ class UserEventsAdapter extends EventAdapter {
override def manifest(event: Any): String = ""
override def fromJournal(event: Any, manifest: String): EventSeq = event match {
case UserDetailsChanged(null, address) EventSeq(UserAddressChanged(address))
case UserDetailsChanged(name, null) EventSeq(UserNameChanged(name))
case UserDetailsChanged(name, address)
case UserDetailsChanged(null, address) => EventSeq(UserAddressChanged(address))
case UserDetailsChanged(name, null) => EventSeq(UserNameChanged(name))
case UserDetailsChanged(name, address) =>
EventSeq(
UserNameChanged(name),
UserAddressChanged(address))
case event: V2 EventSeq(event)
case event: V2 => EventSeq(event)
}
override def toJournal(event: Any): Any = event
@ -277,15 +277,15 @@ class RemovedEventsAwareSerializer extends SerializerWithStringManifest {
override def manifest(o: AnyRef): String = o.getClass.getName
override def toBinary(o: AnyRef): Array[Byte] = o match {
case _ o.toString.getBytes(utf8) // example serialization
case _ => o.toString.getBytes(utf8) // example serialization
}
override def fromBinary(bytes: Array[Byte], manifest: String): AnyRef =
manifest match {
case m if SkipEventManifestsEvents.contains(m)
case m if SkipEventManifestsEvents.contains(m) =>
EventDeserializationSkipped
case other new String(bytes, utf8)
case other => new String(bytes, utf8)
}
}
//#string-serializer-skip-deleved-event-by-manifest
@ -296,8 +296,8 @@ class SkippedEventsAwareAdapter extends EventAdapter {
override def toJournal(event: Any) = event
override def fromJournal(event: Any, manifest: String) = event match {
case EventDeserializationSkipped EventSeq.empty
case _ EventSeq(event)
case EventDeserializationSkipped => EventSeq.empty
case _ => EventSeq(event)
}
}
//#string-serializer-skip-deleved-event-by-manifest-adapter
@ -313,15 +313,15 @@ class RenamedEventAwareSerializer extends SerializerWithStringManifest {
override def manifest(o: AnyRef): String = o.getClass.getName
override def toBinary(o: AnyRef): Array[Byte] = o match {
case SamplePayload(data) s"""$data""".getBytes(Utf8)
case SamplePayload(data) => s"""$data""".getBytes(Utf8)
// previously also handled "old" events here.
}
override def fromBinary(bytes: Array[Byte], manifest: String): AnyRef =
manifest match {
case OldPayloadClassName SamplePayload(new String(bytes, Utf8))
case MyPayloadClassName SamplePayload(new String(bytes, Utf8))
case other throw new NotSerializableException(s"unexpected manifest [$other]")
case OldPayloadClassName => SamplePayload(new String(bytes, Utf8))
case MyPayloadClassName => SamplePayload(new String(bytes, Utf8))
case other => throw new NotSerializableException(s"unexpected manifest [$other]")
}
}
//#string-serializer-handle-rename
@ -348,11 +348,11 @@ class DetachedModelsAdapter extends EventAdapter {
override def manifest(event: Any): String = ""
override def toJournal(event: Any): Any = event match {
case DomainModel.SeatBooked(code, customer)
case DomainModel.SeatBooked(code, customer) =>
DataModel.SeatBooked(code, customer.name)
}
override def fromJournal(event: Any, manifest: String): EventSeq = event match {
case DataModel.SeatBooked(code, customerName)
case DataModel.SeatBooked(code, customerName) =>
EventSeq(DomainModel.SeatBooked(code, DomainModel.Customer(customerName)))
}
}
@ -374,9 +374,9 @@ class JsonDataModelAdapter extends EventAdapter {
marshaller.toJson(event)
override def fromJournal(event: Any, manifest: String): EventSeq = event match {
case json: JsObject
case json: JsObject =>
EventSeq(marshaller.fromJson(json))
case _
case _ =>
throw new IllegalArgumentException(
"Unable to fromJournal a non-JSON object! Was: " + event.getClass)
}

View file

@ -29,20 +29,20 @@ class ExamplePersistentActor extends PersistentActor {
state.size
val receiveRecover: Receive = {
case evt: Evt updateState(evt)
case SnapshotOffer(_, snapshot: ExampleState) state = snapshot
case evt: Evt => updateState(evt)
case SnapshotOffer(_, snapshot: ExampleState) => state = snapshot
}
val snapShotInterval = 1000
val receiveCommand: Receive = {
case Cmd(data)
persist(Evt(s"${data}-${numEvents}")) { event
case Cmd(data) =>
persist(Evt(s"${data}-${numEvents}")) { event =>
updateState(event)
context.system.eventStream.publish(event)
if (lastSequenceNr % snapShotInterval == 0 && lastSequenceNr != 0)
saveSnapshot(state)
}
case "print" println(state)
case "print" => println(state)
}
}

View file

@ -24,13 +24,13 @@ object LeveldbPersistenceQueryDocSpec {
class MyTaggingEventAdapter extends WriteEventAdapter {
val colors = Set("green", "black", "blue")
override def toJournal(event: Any): Any = event match {
case s: String
var tags = colors.foldLeft(Set.empty[String]) { (acc, c)
case s: String =>
var tags = colors.foldLeft(Set.empty[String]) { (acc, c) =>
if (s.contains(c)) acc + c else acc
}
if (tags.isEmpty) event
else Tagged(event, tags)
case _ event
case _ => event
}
override def manifest(event: Any): String = ""

View file

@ -39,11 +39,11 @@ class MyEventsByTagPublisher(tag: String, offset: Long, refreshInterval: FiniteD
}
def receive = {
case _: Request | Continue
case _: Request | Continue =>
query()
deliverBuf()
case Cancel
case Cancel =>
context.stop(self)
}
@ -79,12 +79,12 @@ class MyEventsByTagPublisher(tag: String, offset: Long, refreshInterval: FiniteD
val serialization = SerializationExtension(context.system)
buf = result.map {
case (id, bytes)
case (id, bytes) =>
val p = serialization.deserialize(bytes, classOf[PersistentRepr]).get
EventEnvelope(offset = Sequence(id), p.persistenceId, p.sequenceNr, p.payload)
}
} catch {
case e: Exception
case e: Exception =>
onErrorThenStop(e)
}
}

View file

@ -67,12 +67,12 @@ object PersistenceQueryDocSpec {
*/
override def eventsByTag(
tag: String, offset: Offset): Source[EventEnvelope, NotUsed] = offset match {
case Sequence(offsetValue)
case Sequence(offsetValue) =>
val props = MyEventsByTagPublisher.props(tag, offsetValue, refreshInterval)
Source.actorPublisher[EventEnvelope](props)
.mapMaterializedValue(_ NotUsed)
case NoOffset eventsByTag(tag, Sequence(0L)) //recursive
case _
.mapMaterializedValue(_ => NotUsed)
case NoOffset => eventsByTag(tag, Sequence(0L)) //recursive
case _ =>
throw new IllegalArgumentException("LevelDB does not support " + offset.getClass.getName + " offsets")
}
@ -166,7 +166,7 @@ object PersistenceQueryDocSpec {
// Using an example (Reactive Streams) Database driver
readJournal
.eventsByPersistenceId("user-1337", fromSequenceNr = 0L, toSequenceNr = Long.MaxValue)
.map(envelope envelope.event)
.map(envelope => envelope.event)
.map(convertToReadSideTypes) // convert to datatype
.grouped(20) // batch inserts into groups of 20
.runWith(Sink.fromSubscriber(dbBatchWriter)) // write batches to read-side database
@ -180,7 +180,7 @@ object PersistenceQueryDocSpec {
var state: ComplexState = ComplexState()
def receive = {
case m
case m =>
state = updateState(state, m)
if (state.readyToSave) store.save(Record(state))
}
@ -223,7 +223,7 @@ class PersistenceQueryDocSpec(s: String) extends AkkaSpec(s) {
// materialize stream, consuming events
implicit val mat = ActorMaterializer()
source.runForeach { event println("Event: " + event) }
source.runForeach { event => println("Event: " + event) }
//#basic-usage
//#all-persistence-ids-live
@ -261,12 +261,12 @@ class PersistenceQueryDocSpec(s: String) extends AkkaSpec(s) {
readJournal.byTagsWithMeta(Set("red", "blue"))
query
.mapMaterializedValue { meta
.mapMaterializedValue { meta =>
println(s"The query is: " +
s"ordered deterministically: ${meta.deterministicOrder}, " +
s"infinite: ${meta.infinite}")
}
.map { event println(s"Event payload: ${event.payload}") }
.map { event => println(s"Event payload: ${event.payload}") }
.runWith(Sink.ignore)
//#advanced-journal-query-usage
@ -293,11 +293,11 @@ class PersistenceQueryDocSpec(s: String) extends AkkaSpec(s) {
val writerProps = Props(classOf[TheOneWhoWritesToQueryJournal], "bid")
val writer = system.actorOf(writerProps, "bid-projection-writer")
bidProjection.latestOffset.foreach { startFromOffset
bidProjection.latestOffset.foreach { startFromOffset =>
readJournal
.eventsByTag("bid", Sequence(startFromOffset))
.mapAsync(8) { envelope (writer ? envelope.event).map(_ envelope.offset) }
.mapAsync(1) { offset bidProjection.saveProgress(offset) }
.mapAsync(8) { envelope => (writer ? envelope.event).map(_ => envelope.offset) }
.mapAsync(1) { offset => bidProjection.saveProgress(offset) }
.runWith(Sink.ignore)
}
//#projection-into-different-store-actor-run
@ -319,7 +319,7 @@ class PersistenceQueryDocSpec(s: String) extends AkkaSpec(s) {
readJournal
.eventsByTag("bid", NoOffset)
.mapAsync(1) { e store.save(e) }
.mapAsync(1) { e => store.save(e) }
.runWith(Sink.ignore)
//#projection-into-different-store-simple
}

View file

@ -14,7 +14,7 @@ import akka.remote.RemoteScope
object RemoteDeploymentDocSpec {
class SampleActor extends Actor {
def receive = { case _ sender() ! self }
def receive = { case _ => sender() ! self }
}
}

View file

@ -19,9 +19,9 @@ object ConsistentHashingRouterDocSpec {
var cache = Map.empty[String, String]
def receive = {
case Entry(key, value) cache += (key -> value)
case Get(key) sender() ! cache.get(key)
case Evict(key) cache -= key
case Entry(key, value) => cache += (key -> value)
case Get(key) => sender() ! cache.get(key)
case Evict(key) => cache -= key
}
}
@ -51,7 +51,7 @@ class ConsistentHashingRouterDocSpec extends AkkaSpec with ImplicitSender {
import akka.routing.ConsistentHashingRouter.ConsistentHashableEnvelope
def hashMapping: ConsistentHashMapping = {
case Evict(key) key
case Evict(key) => key
}
val cache: ActorRef =

View file

@ -51,7 +51,7 @@ akka.actor.deployment {
class RedundancyRoutingLogic(nbrCopies: Int) extends RoutingLogic {
val roundRobin = RoundRobinRoutingLogic()
def select(message: Any, routees: immutable.IndexedSeq[Routee]): Routee = {
val targets = (1 to nbrCopies).map(_ roundRobin.select(message, routees))
val targets = (1 to nbrCopies).map(_ => roundRobin.select(message, routees))
SeveralRoutees(targets)
}
}
@ -59,7 +59,7 @@ akka.actor.deployment {
class Storage extends Actor {
def receive = {
case x sender() ! x
case x => sender() ! x
}
}
@ -102,7 +102,7 @@ class CustomRouterDocSpec extends AkkaSpec(CustomRouterDocSpec.config) with Impl
//#unit-test-logic
val logic = new RedundancyRoutingLogic(nbrCopies = 3)
val routees = for (n 1 to 7) yield TestRoutee(n)
val routees = for (n <- 1 to 7) yield TestRoutee(n)
val r1 = logic.select("msg", routees)
r1.asInstanceOf[SeveralRoutees].routees should be(
@ -121,9 +121,9 @@ class CustomRouterDocSpec extends AkkaSpec(CustomRouterDocSpec.config) with Impl
"demonstrate usage of custom router" in {
//#usage-1
for (n 1 to 10) system.actorOf(Props[Storage], "s" + n)
for (n <- 1 to 10) system.actorOf(Props[Storage], "s" + n)
val paths = for (n 1 to 10) yield ("/user/s" + n)
val paths = for (n <- 1 to 10) yield ("/user/s" + n)
val redundancy1: ActorRef =
system.actorOf(
RedundancyGroup(paths, nbrCopies = 3).props(),
@ -131,7 +131,7 @@ class CustomRouterDocSpec extends AkkaSpec(CustomRouterDocSpec.config) with Impl
redundancy1 ! "important"
//#usage-1
for (_ 1 to 3) expectMsg("important")
for (_ <- 1 to 3) expectMsg("important")
//#usage-2
val redundancy2: ActorRef = system.actorOf(
@ -140,7 +140,7 @@ class CustomRouterDocSpec extends AkkaSpec(CustomRouterDocSpec.config) with Impl
redundancy2 ! "very important"
//#usage-2
for (_ 1 to 5) expectMsg("very important")
for (_ <- 1 to 5) expectMsg("very important")
}

View file

@ -307,9 +307,9 @@ router-dispatcher {}
}
def receive = {
case w: Work
case w: Work =>
router.route(w, sender())
case Terminated(a)
case Terminated(a) =>
router = router.removeRoutee(a)
val r = context.actorOf(Props[Worker])
context watch r
@ -320,7 +320,7 @@ router-dispatcher {}
class Worker extends Actor {
def receive = {
case _
case _ =>
}
}
@ -333,7 +333,7 @@ router-dispatcher {}
//#create-worker-actors
def receive = {
case _
case _ =>
}
}
@ -397,7 +397,7 @@ router-dispatcher {}
val router10b: ActorRef =
context.actorOf(BalancingPool(20).props(Props[Worker]), "router10b")
//#balancing-pool-3
for (i 1 to 100) router10b ! i
for (i <- 1 to 100) router10b ! i
val threads10b = Thread.getAllStackTraces.keySet.asScala.filter { _.getName contains "router10b" }
val threads10bNr = threads10b.size
require(threads10bNr == 5, s"Expected 5 threads for router10b, had $threads10bNr! Got: ${threads10b.map(_.getName)}")
@ -519,14 +519,14 @@ router-dispatcher {}
//#optimal-size-exploring-resize-pool
def receive = {
case _
case _ =>
}
}
class Echo extends Actor {
def receive = {
case m sender() ! m
case m => sender() ! m
}
}
}

View file

@ -70,16 +70,16 @@ package docs.serialization {
// Use `""` if manifest is not needed.
def manifest(obj: AnyRef): String =
obj match {
case _: Customer CustomerManifest
case _: User UserManifest
case _: Customer => CustomerManifest
case _: User => UserManifest
}
// "toBinary" serializes the given object to an Array of Bytes
def toBinary(obj: AnyRef): Array[Byte] = {
// Put the real code that serializes the object here
obj match {
case Customer(name) name.getBytes(UTF_8)
case User(name) name.getBytes(UTF_8)
case Customer(name) => name.getBytes(UTF_8)
case User(name) => name.getBytes(UTF_8)
}
}
@ -88,9 +88,9 @@ package docs.serialization {
def fromBinary(bytes: Array[Byte], manifest: String): AnyRef = {
// Put the real code that deserializes here
manifest match {
case CustomerManifest
case CustomerManifest =>
Customer(new String(bytes, UTF_8))
case UserManifest
case UserManifest =>
User(new String(bytes, UTF_8))
}
}

View file

@ -30,9 +30,9 @@ object ActorPublisherDocSpec {
var buf = Vector.empty[Job]
def receive = {
case job: Job if buf.size == MaxBufferSize
case job: Job if buf.size == MaxBufferSize =>
sender() ! JobDenied
case job: Job
case job: Job =>
sender() ! JobAccepted
if (buf.isEmpty && totalDemand > 0)
onNext(job)
@ -40,9 +40,9 @@ object ActorPublisherDocSpec {
buf :+= job
deliverBuf()
}
case Request(_)
case Request(_) =>
deliverBuf()
case Cancel
case Cancel =>
context.stop(self)
}
@ -80,7 +80,7 @@ class ActorPublisherDocSpec extends AkkaSpec {
val jobManagerSource = Source.actorPublisher[JobManager.Job](JobManager.props)
val ref = Flow[JobManager.Job]
.map(_.payload.toUpperCase)
.map { elem println(elem); elem }
.map { elem => println(elem); elem }
.to(Sink.ignore)
.runWith(jobManagerSource)

View file

@ -49,17 +49,17 @@ object ActorSubscriberDocSpec {
}
def receive = {
case OnNext(Msg(id, replyTo))
case OnNext(Msg(id, replyTo)) =>
queue += (id -> replyTo)
assert(queue.size <= MaxQueueSize, s"queued too many: ${queue.size}")
router.route(Work(id), self)
case Reply(id)
case Reply(id) =>
queue(id) ! Done(id)
queue -= id
if (canceled && queue.isEmpty) {
context.stop(self)
}
case OnComplete
case OnComplete =>
if (queue.isEmpty) {
context.stop(self)
}
@ -69,7 +69,7 @@ object ActorSubscriberDocSpec {
class Worker extends Actor {
import WorkerPool._
def receive = {
case Work(id)
case Work(id) =>
// ...
sender() ! Reply(id)
}

View file

@ -24,8 +24,8 @@ object BidiFlowDocSpec {
//#implementation-details-elided
implicit val order = ByteOrder.LITTLE_ENDIAN
msg match {
case Ping(id) ByteString.newBuilder.putByte(1).putInt(id).result()
case Pong(id) ByteString.newBuilder.putByte(2).putInt(id).result()
case Ping(id) => ByteString.newBuilder.putByte(1).putInt(id).result()
case Pong(id) => ByteString.newBuilder.putByte(2).putInt(id).result()
}
//#implementation-details-elided
}
@ -35,15 +35,15 @@ object BidiFlowDocSpec {
implicit val order = ByteOrder.LITTLE_ENDIAN
val it = bytes.iterator
it.getByte match {
case 1 Ping(it.getInt)
case 2 Pong(it.getInt)
case other throw new RuntimeException(s"parse error: expected 1|2 got $other")
case 1 => Ping(it.getInt)
case 2 => Pong(it.getInt)
case other => throw new RuntimeException(s"parse error: expected 1|2 got $other")
}
//#implementation-details-elided
}
//#codec-impl
val codecVerbose = BidiFlow.fromGraph(GraphDSL.create() { b
val codecVerbose = BidiFlow.fromGraph(GraphDSL.create() { b =>
// construct and add the top flow, going outbound
val outbound = b.add(Flow[Message].map(toBytes))
// construct and add the bottom flow, going inbound
@ -57,7 +57,7 @@ object BidiFlowDocSpec {
//#codec
//#framing
val framing = BidiFlow.fromGraph(GraphDSL.create() { b
val framing = BidiFlow.fromGraph(GraphDSL.create() { b =>
implicit val order = ByteOrder.LITTLE_ENDIAN
def addLengthHeader(bytes: ByteString) = {
@ -134,12 +134,12 @@ object BidiFlowDocSpec {
})
//#framing
val chopUp = BidiFlow.fromGraph(GraphDSL.create() { b
val chopUp = BidiFlow.fromGraph(GraphDSL.create() { b =>
val f = Flow[ByteString].mapConcat(_.map(ByteString(_)))
BidiShape.fromFlows(b.add(f), b.add(f))
})
val accumulate = BidiFlow.fromGraph(GraphDSL.create() { b
val accumulate = BidiFlow.fromGraph(GraphDSL.create() { b =>
val f = Flow[ByteString].grouped(1000).map(_.fold(ByteString.empty)(_ ++ _))
BidiShape.fromFlows(b.add(f), b.add(f))
})
@ -168,7 +168,7 @@ class BidiFlowDocSpec extends AkkaSpec {
val stack = codec.atop(framing)
// test it by plugging it into its own inverse and closing the right end
val pingpong = Flow[Message].collect { case Ping(id) Pong(id) }
val pingpong = Flow[Message].collect { case Ping(id) => Pong(id) }
val flow = stack.atop(stack.reversed).join(pingpong)
val result = Source((0 to 9).map(Ping)).via(flow).limit(20).runWith(Sink.seq)
Await.result(result, 1.second) should ===((0 to 9).map(Pong))
@ -177,14 +177,14 @@ class BidiFlowDocSpec extends AkkaSpec {
"work when chopped up" in {
val stack = codec.atop(framing)
val flow = stack.atop(chopUp).atop(stack.reversed).join(Flow[Message].map { case Ping(id) Pong(id) })
val flow = stack.atop(chopUp).atop(stack.reversed).join(Flow[Message].map { case Ping(id) => Pong(id) })
val f = Source((0 to 9).map(Ping)).via(flow).limit(20).runWith(Sink.seq)
Await.result(f, 1.second) should ===((0 to 9).map(Pong))
}
"work when accumulated" in {
val stack = codec.atop(framing)
val flow = stack.atop(accumulate).atop(stack.reversed).join(Flow[Message].map { case Ping(id) Pong(id) })
val flow = stack.atop(accumulate).atop(stack.reversed).join(Flow[Message].map { case Ping(id) => Pong(id) })
val f = Source((0 to 9).map(Ping)).via(flow).limit(20).runWith(Sink.seq)
Await.result(f, 1.second) should ===((0 to 9).map(Pong))
}

View file

@ -169,7 +169,7 @@ class CompositionDocSpec extends AkkaSpec {
"closed graph" in {
//#embed-closed
val closed1 = Source.single(0).to(Sink.foreach(println))
val closed2 = RunnableGraph.fromGraph(GraphDSL.create() { implicit builder
val closed2 = RunnableGraph.fromGraph(GraphDSL.create() { implicit builder =>
val embeddedClosed: ClosedShape = builder.add(closed1)
//
embeddedClosed
@ -192,7 +192,7 @@ class CompositionDocSpec extends AkkaSpec {
//#mat-combine-2
// Materializes to NotUsed (orange)
val flow2: Flow[Int, ByteString, NotUsed] = Flow[Int].map { i ByteString(i.toString) }
val flow2: Flow[Int, ByteString, NotUsed] = Flow[Int].map { i => ByteString(i.toString) }
// Materializes to Future[OutgoingConnection] (yellow)
val flow3: Flow[ByteString, ByteString, Future[OutgoingConnection]] =

View file

@ -27,10 +27,10 @@ class FlowDocSpec extends AkkaSpec with CompileOnlySpec {
"source is immutable" in {
//#source-immutable
val source = Source(1 to 10)
source.map(_ 0) // has no effect on source, since it's immutable
source.map(_ => 0) // has no effect on source, since it's immutable
source.runWith(Sink.fold(0)(_ + _)) // 55
val zeroes = source.map(_ 0) // returns new Source[Int], with `map()` appended
val zeroes = source.map(_ => 0) // returns new Source[Int], with `map()` appended
zeroes.runWith(Sink.fold(0)(_ + _)) // 0
//#source-immutable
}
@ -81,12 +81,12 @@ class FlowDocSpec extends AkkaSpec with CompileOnlySpec {
import scala.concurrent.duration._
case object Tick
val timer = Source.tick(initialDelay = 1.second, interval = 1.seconds, tick = () Tick)
val timer = Source.tick(initialDelay = 1.second, interval = 1.seconds, tick = () => Tick)
val timerCancel: Cancellable = Sink.ignore.runWith(timer)
timerCancel.cancel()
val timerMap = timer.map(tick "tick")
val timerMap = timer.map(tick => "tick")
// materialize the flow and retrieve the timers Cancellable
val timerCancellable = Sink.ignore.runWith(timerMap)
timerCancellable.cancel()
@ -152,7 +152,7 @@ class FlowDocSpec extends AkkaSpec with CompileOnlySpec {
"various ways of transforming materialized values" in {
import scala.concurrent.duration._
val throttler = Flow.fromGraph(GraphDSL.create(Source.tick(1.second, 1.second, "test")) { implicit builder tickSource
val throttler = Flow.fromGraph(GraphDSL.create(Source.tick(1.second, 1.second, "test")) { implicit builder => tickSource =>
import GraphDSL.Implicits._
val zip = builder.add(ZipWith[String, Int, Int](Keep.right))
tickSource ~> zip.in0
@ -200,7 +200,7 @@ class FlowDocSpec extends AkkaSpec with CompileOnlySpec {
// doubly nested pair, but we want to flatten it out
val r11: RunnableGraph[(Promise[Option[Int]], Cancellable, Future[Int])] =
r9.mapMaterializedValue {
case ((promise, cancellable), future)
case ((promise, cancellable), future) =>
(promise, cancellable, future)
}
@ -214,7 +214,7 @@ class FlowDocSpec extends AkkaSpec with CompileOnlySpec {
// The result of r11 can be also achieved by using the Graph API
val r12: RunnableGraph[(Promise[Option[Int]], Cancellable, Future[Int])] =
RunnableGraph.fromGraph(GraphDSL.create(source, flow, sink)((_, _, _)) { implicit builder (src, f, dst)
RunnableGraph.fromGraph(GraphDSL.create(source, flow, sink)((_, _, _)) { implicit builder => (src, f, dst) =>
import GraphDSL.Implicits._
src ~> f ~> dst
ClosedShape
@ -263,12 +263,12 @@ object FlowDocSpec {
Source.maybe
.runWith(Sink.onComplete {
case Success(done) println(s"Completed: $done")
case Failure(ex) println(s"Failed: ${ex.getMessage}")
case Success(done) => println(s"Completed: $done")
case Failure(ex) => println(s"Failed: ${ex.getMessage}")
})
def receive = {
case "boom"
case "boom" =>
context.stop(self) // will also terminate the stream
}
}
@ -279,12 +279,12 @@ object FlowDocSpec {
Source.maybe
.runWith(Sink.onComplete {
case Success(done) println(s"Completed: $done")
case Failure(ex) println(s"Failed: ${ex.getMessage}")
case Success(done) => println(s"Completed: $done")
case Failure(ex) => println(s"Failed: ${ex.getMessage}")
})
def receive = {
case "boom"
case "boom" =>
context.stop(self) // will NOT terminate the stream (it's bound to the system!)
}
}

View file

@ -33,8 +33,8 @@ class FlowErrorDocSpec extends AkkaSpec {
"demonstrate resume stream" in {
//#resume
val decider: Supervision.Decider = {
case _: ArithmeticException Supervision.Resume
case _ Supervision.Stop
case _: ArithmeticException => Supervision.Resume
case _ => Supervision.Stop
}
implicit val materializer = ActorMaterializer(
ActorMaterializerSettings(system).withSupervisionStrategy(decider))
@ -51,11 +51,11 @@ class FlowErrorDocSpec extends AkkaSpec {
//#resume-section
implicit val materializer = ActorMaterializer()
val decider: Supervision.Decider = {
case _: ArithmeticException Supervision.Resume
case _ Supervision.Stop
case _: ArithmeticException => Supervision.Resume
case _ => Supervision.Stop
}
val flow = Flow[Int]
.filter(100 / _ < 50).map(elem 100 / (5 - elem))
.filter(100 / _ < 50).map(elem => 100 / (5 - elem))
.withAttributes(ActorAttributes.supervisionStrategy(decider))
val source = Source(0 to 5).via(flow)
@ -71,11 +71,11 @@ class FlowErrorDocSpec extends AkkaSpec {
//#restart-section
implicit val materializer = ActorMaterializer()
val decider: Supervision.Decider = {
case _: IllegalArgumentException Supervision.Restart
case _ Supervision.Stop
case _: IllegalArgumentException => Supervision.Restart
case _ => Supervision.Stop
}
val flow = Flow[Int]
.scan(0) { (acc, elem)
.scan(0) { (acc, elem) =>
if (elem < 0) throw new IllegalArgumentException("negative not allowed")
else acc + elem
}
@ -93,11 +93,11 @@ class FlowErrorDocSpec extends AkkaSpec {
"demonstrate recover" in {
implicit val materializer = ActorMaterializer()
//#recover
Source(0 to 6).map(n
Source(0 to 6).map(n =>
if (n < 5) n.toString
else throw new RuntimeException("Boom!")
).recover {
case _: RuntimeException "stream truncated"
case _: RuntimeException => "stream truncated"
}.runForeach(println)
//#recover
@ -119,11 +119,11 @@ stream truncated
//#recoverWithRetries
val planB = Source(List("five", "six", "seven", "eight"))
Source(0 to 10).map(n
Source(0 to 10).map(n =>
if (n < 5) n.toString
else throw new RuntimeException("Boom!")
).recoverWithRetries(attempts = 1, {
case _: RuntimeException planB
case _: RuntimeException => planB
}).runForeach(println)
//#recoverWithRetries

View file

@ -41,9 +41,9 @@ class FlowParallelismDocSpec extends AkkaSpec {
"Demonstrate parallel processing" in {
//#parallelism
val fryingPan: Flow[ScoopOfBatter, Pancake, NotUsed] =
Flow[ScoopOfBatter].map { batter Pancake() }
Flow[ScoopOfBatter].map { batter => Pancake() }
val pancakeChef: Flow[ScoopOfBatter, Pancake, NotUsed] = Flow.fromGraph(GraphDSL.create() { implicit builder
val pancakeChef: Flow[ScoopOfBatter, Pancake, NotUsed] = Flow.fromGraph(GraphDSL.create() { implicit builder =>
val dispatchBatter = builder.add(Balance[ScoopOfBatter](2))
val mergePancakes = builder.add(Merge[Pancake](2))
@ -64,7 +64,7 @@ class FlowParallelismDocSpec extends AkkaSpec {
"Demonstrate parallelized pipelines" in {
//#parallel-pipeline
val pancakeChef: Flow[ScoopOfBatter, Pancake, NotUsed] =
Flow.fromGraph(GraphDSL.create() { implicit builder
Flow.fromGraph(GraphDSL.create() { implicit builder =>
val dispatchBatter = builder.add(Balance[ScoopOfBatter](2))
val mergePancakes = builder.add(Merge[Pancake](2))
@ -82,7 +82,7 @@ class FlowParallelismDocSpec extends AkkaSpec {
"Demonstrate pipelined parallel processing" in {
//#pipelined-parallel
val pancakeChefs1: Flow[ScoopOfBatter, HalfCookedPancake, NotUsed] =
Flow.fromGraph(GraphDSL.create() { implicit builder
Flow.fromGraph(GraphDSL.create() { implicit builder =>
val dispatchBatter = builder.add(Balance[ScoopOfBatter](2))
val mergeHalfPancakes = builder.add(Merge[HalfCookedPancake](2))
@ -95,7 +95,7 @@ class FlowParallelismDocSpec extends AkkaSpec {
})
val pancakeChefs2: Flow[HalfCookedPancake, Pancake, NotUsed] =
Flow.fromGraph(GraphDSL.create() { implicit builder
Flow.fromGraph(GraphDSL.create() { implicit builder =>
val dispatchHalfPancakes = builder.add(Balance[HalfCookedPancake](2))
val mergePancakes = builder.add(Merge[Pancake](2))

View file

@ -27,7 +27,7 @@ class FlowStreamRefsDocSpec extends AkkaSpec with CompileOnlySpec {
implicit val mat = ActorMaterializer()(context)
def receive = {
case RequestLogs(streamId)
case RequestLogs(streamId) =>
// obtain the source you want to offer:
val source: Source[String, NotUsed] = streamLogs(streamId)
@ -74,7 +74,7 @@ class FlowStreamRefsDocSpec extends AkkaSpec with CompileOnlySpec {
implicit val mat = ActorMaterializer()(context)
def receive = {
case PrepareUpload(nodeId)
case PrepareUpload(nodeId) =>
// obtain the source you want to offer:
val sink: Sink[String, NotUsed] = logsSinkFor(nodeId)

View file

@ -13,7 +13,7 @@ class GraphCyclesSpec extends AkkaSpec {
implicit val materializer = ActorMaterializer()
"Cycle demonstration" must {
val source = Source.fromIterator(() Iterator.from(0))
val source = Source.fromIterator(() => Iterator.from(0))
"include a deadlocked cycle" in {

View file

@ -47,7 +47,7 @@ class GraphDSLDocSpec extends AkkaSpec {
"flow connection errors" in {
intercept[IllegalStateException] {
//#simple-graph
RunnableGraph.fromGraph(GraphDSL.create() { implicit builder
RunnableGraph.fromGraph(GraphDSL.create() { implicit builder =>
import GraphDSL.Implicits._
val source1 = Source(1 to 10)
val source2 = Source(1 to 10)
@ -125,7 +125,7 @@ class GraphDSLDocSpec extends AkkaSpec {
worker: Flow[In, Out, Any],
workerCount: Int): Graph[PriorityWorkerPoolShape[In, Out], NotUsed] = {
GraphDSL.create() { implicit b
GraphDSL.create() { implicit b =>
import GraphDSL.Implicits._
val priorityMerge = b.add(MergePreferred[In](1))
@ -137,7 +137,7 @@ class GraphDSLDocSpec extends AkkaSpec {
// Wire up each of the outputs of the balancer to a worker flow
// then merge them back
for (i 0 until workerCount)
for (i <- 0 until workerCount)
balance.out(i) ~> worker ~> resultsMerge.in(i)
// We now expose the input ports of the priorityMerge and the output
@ -160,7 +160,7 @@ class GraphDSLDocSpec extends AkkaSpec {
val worker1 = Flow[String].map("step 1 " + _)
val worker2 = Flow[String].map("step 2 " + _)
RunnableGraph.fromGraph(GraphDSL.create() { implicit b
RunnableGraph.fromGraph(GraphDSL.create() { implicit b =>
import GraphDSL.Implicits._
val priorityPool1 = b.add(PriorityWorkerPool(worker1, 4))
@ -195,7 +195,7 @@ class GraphDSLDocSpec extends AkkaSpec {
"access to materialized value" in {
//#graph-dsl-matvalue
import GraphDSL.Implicits._
val foldFlow: Flow[Int, Int, Future[Int]] = Flow.fromGraph(GraphDSL.create(Sink.fold[Int, Int](0)(_ + _)) { implicit builder fold
val foldFlow: Flow[Int, Int, Future[Int]] = Flow.fromGraph(GraphDSL.create(Sink.fold[Int, Int](0)(_ + _)) { implicit builder => fold =>
FlowShape(fold.in, builder.materializedValue.mapAsync(4)(identity).outlet)
})
//#graph-dsl-matvalue
@ -205,7 +205,7 @@ class GraphDSLDocSpec extends AkkaSpec {
//#graph-dsl-matvalue-cycle
import GraphDSL.Implicits._
// This cannot produce any value:
val cyclicFold: Source[Int, Future[Int]] = Source.fromGraph(GraphDSL.create(Sink.fold[Int, Int](0)(_ + _)) { implicit builder fold
val cyclicFold: Source[Int, Future[Int]] = Source.fromGraph(GraphDSL.create(Sink.fold[Int, Int](0)(_ + _)) { implicit builder => fold =>
// - Fold cannot complete until its upstream mapAsync completes
// - mapAsync cannot complete until the materialized Future produced by
// fold completes

View file

@ -121,7 +121,7 @@ class GraphStageDocSpec extends AkkaSpec {
}
//#one-to-one
class Map[A, B](f: A B) extends GraphStage[FlowShape[A, B]] {
class Map[A, B](f: A => B) extends GraphStage[FlowShape[A, B]] {
val in = Inlet[A]("Map.in")
val out = Outlet[B]("Map.out")
@ -151,13 +151,13 @@ class GraphStageDocSpec extends AkkaSpec {
val result =
Source(Vector("one", "two", "three"))
.via(stringLength)
.runFold(Seq.empty[Int])((elem, acc) elem :+ acc)
.runFold(Seq.empty[Int])((elem, acc) => elem :+ acc)
Await.result(result, 3.seconds) should ===(Seq(3, 3, 5))
}
//#many-to-one
class Filter[A](p: A Boolean) extends GraphStage[FlowShape[A, A]] {
class Filter[A](p: A => Boolean) extends GraphStage[FlowShape[A, A]] {
val in = Inlet[A]("Filter.in")
val out = Outlet[A]("Filter.out")
@ -190,7 +190,7 @@ class GraphStageDocSpec extends AkkaSpec {
val result =
Source(Vector(1, 2, 3, 4, 5, 6))
.via(evenFilter)
.runFold(Seq.empty[Int])((elem, acc) elem :+ acc)
.runFold(Seq.empty[Int])((elem, acc) => elem :+ acc)
Await.result(result, 3.seconds) should ===(Seq(2, 4, 6))
}
@ -243,7 +243,7 @@ class GraphStageDocSpec extends AkkaSpec {
val result =
Source(Vector(1, 2, 3))
.via(duplicator)
.runFold(Seq.empty[Int])((elem, acc) elem :+ acc)
.runFold(Seq.empty[Int])((elem, acc) => elem :+ acc)
Await.result(result, 3.seconds) should ===(Seq(1, 1, 2, 2, 3, 3))
}
@ -283,14 +283,14 @@ class GraphStageDocSpec extends AkkaSpec {
val result =
Source(Vector(1, 2, 3))
.via(duplicator)
.runFold(Seq.empty[Int])((elem, acc) elem :+ acc)
.runFold(Seq.empty[Int])((elem, acc) => elem :+ acc)
Await.result(result, 3.seconds) should ===(Seq(1, 1, 2, 2, 3, 3))
}
"Demonstrate chaining of graph stages" in {
val sink = Sink.fold[List[Int], Int](List.empty[Int])((acc, n) acc :+ n)
val sink = Sink.fold[List[Int], Int](List.empty[Int])((acc, n) => acc :+ n)
//#graph-operator-chain
val resultFuture = Source(1 to 5)
@ -320,7 +320,7 @@ class GraphStageDocSpec extends AkkaSpec {
new GraphStageLogic(shape) {
override def preStart(): Unit = {
val callback = getAsyncCallback[Unit] { (_)
val callback = getAsyncCallback[Unit] { (_) =>
completeStage()
}
switch.foreach(callback.invoke)
@ -407,7 +407,7 @@ class GraphStageDocSpec extends AkkaSpec {
Source(Vector(1, 2, 3))
.via(new TimedGate[Int](2.second))
.takeWithin(250.millis)
.runFold(Seq.empty[Int])((elem, acc) elem :+ acc)
.runFold(Seq.empty[Int])((elem, acc) => elem :+ acc)
Await.result(result, 3.seconds) should ===(Seq(1))
}
@ -532,7 +532,7 @@ class GraphStageDocSpec extends AkkaSpec {
// tests:
val result1 = Source(Vector(1, 2, 3))
.via(new TwoBuffer)
.runFold(Vector.empty[Int])((acc, n) acc :+ n)
.runFold(Vector.empty[Int])((acc, n) => acc :+ n)
Await.result(result1, 3.seconds) should ===(Vector(1, 2, 3))

View file

@ -61,8 +61,8 @@ class HubsDocSpec extends AkkaSpec with CompileOnlySpec {
val fromProducer: Source[String, NotUsed] = runnableGraph.run()
// Print out messages from the producer in two independent consumers
fromProducer.runForeach(msg println("consumer1: " + msg))
fromProducer.runForeach(msg println("consumer2: " + msg))
fromProducer.runForeach(msg => println("consumer1: " + msg))
fromProducer.runForeach(msg => println("consumer2: " + msg))
//#broadcast-hub
}
@ -110,7 +110,7 @@ class HubsDocSpec extends AkkaSpec with CompileOnlySpec {
//#partition-hub
// A simple producer that publishes a new "message-" every second
val producer = Source.tick(1.second, 1.second, "message")
.zipWith(Source(1 to 100))((a, b) s"$a-$b")
.zipWith(Source(1 to 100))((a, b) => s"$a-$b")
// Attach a PartitionHub Sink to the producer. This will materialize to a
// corresponding Source.
@ -118,7 +118,7 @@ class HubsDocSpec extends AkkaSpec with CompileOnlySpec {
// value to the left is used)
val runnableGraph: RunnableGraph[Source[String, NotUsed]] =
producer.toMat(PartitionHub.sink(
(size, elem) math.abs(elem.hashCode % size),
(size, elem) => math.abs(elem.hashCode % size),
startAfterNrOfConsumers = 2, bufferSize = 256))(Keep.right)
// By running/materializing the producer, we get back a Source, which
@ -126,8 +126,8 @@ class HubsDocSpec extends AkkaSpec with CompileOnlySpec {
val fromProducer: Source[String, NotUsed] = runnableGraph.run()
// Print out messages from the producer in two independent consumers
fromProducer.runForeach(msg println("consumer1: " + msg))
fromProducer.runForeach(msg println("consumer2: " + msg))
fromProducer.runForeach(msg => println("consumer1: " + msg))
fromProducer.runForeach(msg => println("consumer2: " + msg))
//#partition-hub
}
@ -135,14 +135,14 @@ class HubsDocSpec extends AkkaSpec with CompileOnlySpec {
//#partition-hub-stateful
// A simple producer that publishes a new "message-" every second
val producer = Source.tick(1.second, 1.second, "message")
.zipWith(Source(1 to 100))((a, b) s"$a-$b")
.zipWith(Source(1 to 100))((a, b) => s"$a-$b")
// New instance of the partitioner function and its state is created
// for each materialization of the PartitionHub.
def roundRobin(): (PartitionHub.ConsumerInfo, String) Long = {
def roundRobin(): (PartitionHub.ConsumerInfo, String) => Long = {
var i = -1L
(info, elem) {
(info, elem) => {
i += 1
info.consumerIdByIdx((i % info.size).toInt)
}
@ -154,7 +154,7 @@ class HubsDocSpec extends AkkaSpec with CompileOnlySpec {
// value to the left is used)
val runnableGraph: RunnableGraph[Source[String, NotUsed]] =
producer.toMat(PartitionHub.statefulSink(
() roundRobin(),
() => roundRobin(),
startAfterNrOfConsumers = 2, bufferSize = 256))(Keep.right)
// By running/materializing the producer, we get back a Source, which
@ -162,8 +162,8 @@ class HubsDocSpec extends AkkaSpec with CompileOnlySpec {
val fromProducer: Source[String, NotUsed] = runnableGraph.run()
// Print out messages from the producer in two independent consumers
fromProducer.runForeach(msg println("consumer1: " + msg))
fromProducer.runForeach(msg println("consumer2: " + msg))
fromProducer.runForeach(msg => println("consumer1: " + msg))
fromProducer.runForeach(msg => println("consumer2: " + msg))
//#partition-hub-stateful
}
@ -175,14 +175,14 @@ class HubsDocSpec extends AkkaSpec with CompileOnlySpec {
// Note that this is a moving target since the elements are consumed concurrently.
val runnableGraph: RunnableGraph[Source[Int, NotUsed]] =
producer.toMat(PartitionHub.statefulSink(
() (info, elem) info.consumerIds.minBy(id info.queueSize(id)),
() => (info, elem) => info.consumerIds.minBy(id => info.queueSize(id)),
startAfterNrOfConsumers = 2, bufferSize = 16))(Keep.right)
val fromProducer: Source[Int, NotUsed] = runnableGraph.run()
fromProducer.runForeach(msg println("consumer1: " + msg))
fromProducer.runForeach(msg => println("consumer1: " + msg))
fromProducer.throttle(10, 100.millis)
.runForeach(msg println("consumer2: " + msg))
.runForeach(msg => println("consumer2: " + msg))
//#partition-hub-fastest
}

View file

@ -92,7 +92,7 @@ object IntegrationDocSpec {
class DatabaseService(probe: ActorRef) extends Actor {
override def receive = {
case Save(tweet: Tweet)
case Save(tweet: Tweet) =>
probe ! tweet.author.handle
sender() ! SaveDone
}
@ -123,7 +123,7 @@ object IntegrationDocSpec {
//#ask-actor
class Translator extends Actor {
def receive = {
case word: String
case word: String =>
// ... process message
val reply = word.toUpperCase
sender() ! reply // reply to the ask
@ -169,14 +169,14 @@ class IntegrationDocSpec extends AkkaSpec(IntegrationDocSpec.config) {
//#email-addresses-mapAsync
val emailAddresses: Source[String, NotUsed] =
authors
.mapAsync(4)(author addressSystem.lookupEmail(author.handle))
.collect { case Some(emailAddress) emailAddress }
.mapAsync(4)(author => addressSystem.lookupEmail(author.handle))
.collect { case Some(emailAddress) => emailAddress }
//#email-addresses-mapAsync
//#send-emails
val sendEmails: RunnableGraph[NotUsed] =
emailAddresses
.mapAsync(4)(address {
.mapAsync(4)(address => {
emailServer.send(
Email(to = address, title = "Akka", body = "I like your tweet"))
})
@ -205,7 +205,7 @@ class IntegrationDocSpec extends AkkaSpec(IntegrationDocSpec.config) {
// sent from stream to actor to indicate start, end or failure of stream:
val InitMessage = AckingReceiver.StreamInitialized
val OnCompleteMessage = AckingReceiver.StreamCompleted
val onErrorMessage = (ex: Throwable) AckingReceiver.StreamFailure(ex)
val onErrorMessage = (ex: Throwable) => AckingReceiver.StreamFailure(ex)
val probe = TestProbe()
val receiver = system.actorOf(
@ -242,20 +242,20 @@ class IntegrationDocSpec extends AkkaSpec(IntegrationDocSpec.config) {
import AckingReceiver._
def receive: Receive = {
case StreamInitialized
case StreamInitialized =>
log.info("Stream initialized!")
probe ! "Stream initialized!"
sender() ! Ack // ack to allow the stream to proceed sending more elements
case el: String
case el: String =>
log.info("Received element: {}", el)
probe ! el
sender() ! Ack // ack to allow the stream to proceed sending more elements
case StreamCompleted
case StreamCompleted =>
log.info("Stream completed!")
probe ! "Stream completed!"
case StreamFailure(ex)
case StreamFailure(ex) =>
log.error(ex, "Stream failed!")
}
}
@ -272,7 +272,7 @@ class IntegrationDocSpec extends AkkaSpec(IntegrationDocSpec.config) {
val emailAddresses: Source[String, NotUsed] =
authors.via(
Flow[Author].mapAsync(4)(author addressSystem.lookupEmail(author.handle))
Flow[Author].mapAsync(4)(author => addressSystem.lookupEmail(author.handle))
.withAttributes(supervisionStrategy(resumingDecider)))
//#email-addresses-mapAsync-supervision
}
@ -288,12 +288,12 @@ class IntegrationDocSpec extends AkkaSpec(IntegrationDocSpec.config) {
val emailAddresses: Source[String, NotUsed] =
authors
.mapAsyncUnordered(4)(author addressSystem.lookupEmail(author.handle))
.collect { case Some(emailAddress) emailAddress }
.mapAsyncUnordered(4)(author => addressSystem.lookupEmail(author.handle))
.collect { case Some(emailAddress) => emailAddress }
val sendEmails: RunnableGraph[NotUsed] =
emailAddresses
.mapAsyncUnordered(4)(address {
.mapAsyncUnordered(4)(address => {
emailServer.send(
Email(to = address, title = "Akka", body = "I like your tweet"))
})
@ -320,15 +320,15 @@ class IntegrationDocSpec extends AkkaSpec(IntegrationDocSpec.config) {
val authors = tweets.filter(_.hashtags.contains(akkaTag)).map(_.author)
val phoneNumbers =
authors.mapAsync(4)(author addressSystem.lookupPhoneNumber(author.handle))
.collect { case Some(phoneNo) phoneNo }
authors.mapAsync(4)(author => addressSystem.lookupPhoneNumber(author.handle))
.collect { case Some(phoneNo) => phoneNo }
//#blocking-mapAsync
val blockingExecutionContext = system.dispatchers.lookup("blocking-dispatcher")
val sendTextMessages: RunnableGraph[NotUsed] =
phoneNumbers
.mapAsync(4)(phoneNo {
.mapAsync(4)(phoneNo => {
Future {
smsServer.send(
TextMessage(to = phoneNo, body = "I like your tweet"))
@ -357,12 +357,12 @@ class IntegrationDocSpec extends AkkaSpec(IntegrationDocSpec.config) {
val authors = tweets.filter(_.hashtags.contains(akkaTag)).map(_.author)
val phoneNumbers =
authors.mapAsync(4)(author addressSystem.lookupPhoneNumber(author.handle))
.collect { case Some(phoneNo) phoneNo }
authors.mapAsync(4)(author => addressSystem.lookupPhoneNumber(author.handle))
.collect { case Some(phoneNo) => phoneNo }
//#blocking-map
val send = Flow[String]
.map { phoneNo
.map { phoneNo =>
smsServer.send(TextMessage(to = phoneNo, body = "I like your tweet"))
}
.withAttributes(ActorAttributes.dispatcher("blocking-dispatcher"))
@ -393,7 +393,7 @@ class IntegrationDocSpec extends AkkaSpec(IntegrationDocSpec.config) {
implicit val timeout = Timeout(3.seconds)
val saveTweets: RunnableGraph[NotUsed] =
akkaTweets
.mapAsync(4)(tweet database ? Save(tweet))
.mapAsync(4)(tweet => database ? Save(tweet))
.to(Sink.ignore)
//#save-tweets
@ -423,9 +423,9 @@ class IntegrationDocSpec extends AkkaSpec(IntegrationDocSpec.config) {
ActorMaterializerSettings(system).withInputBuffer(initialSize = 4, maxSize = 4))
Source(List("a", "B", "C", "D", "e", "F", "g", "H", "i", "J"))
.map(elem { println(s"before: $elem"); elem })
.map(elem => { println(s"before: $elem"); elem })
.mapAsync(4)(service.convert)
.runForeach(elem println(s"after: $elem"))
.runForeach(elem => println(s"after: $elem"))
//#sometimes-slow-mapAsync
probe.expectMsg("after: A")
@ -455,9 +455,9 @@ class IntegrationDocSpec extends AkkaSpec(IntegrationDocSpec.config) {
ActorMaterializerSettings(system).withInputBuffer(initialSize = 4, maxSize = 4))
Source(List("a", "B", "C", "D", "e", "F", "g", "H", "i", "J"))
.map(elem { println(s"before: $elem"); elem })
.map(elem => { println(s"before: $elem"); elem })
.mapAsyncUnordered(4)(service.convert)
.runForeach(elem println(s"after: $elem"))
.runForeach(elem => println(s"after: $elem"))
//#sometimes-slow-mapAsyncUnordered
probe.receiveN(10).toSet should be(Set(
@ -481,19 +481,19 @@ class IntegrationDocSpec extends AkkaSpec(IntegrationDocSpec.config) {
val queue = Source
.queue[Int](bufferSize, OverflowStrategy.backpressure)
.throttle(elementsToProcess, 3.second)
.map(x x * x)
.toMat(Sink.foreach(x println(s"completed $x")))(Keep.left)
.map(x => x * x)
.toMat(Sink.foreach(x => println(s"completed $x")))(Keep.left)
.run()
val source = Source(1 to 10)
implicit val ec = system.dispatcher
source.mapAsync(1)(x {
source.mapAsync(1)(x => {
queue.offer(x).map {
case QueueOfferResult.Enqueued println(s"enqueued $x")
case QueueOfferResult.Dropped println(s"dropped $x")
case QueueOfferResult.Failure(ex) println(s"Offer failed ${ex.getMessage}")
case QueueOfferResult.QueueClosed println("Source Queue closed")
case QueueOfferResult.Enqueued => println(s"enqueued $x")
case QueueOfferResult.Dropped => println(s"dropped $x")
case QueueOfferResult.Failure(ex) => println(s"Offer failed ${ex.getMessage}")
case QueueOfferResult.QueueClosed => println("Source Queue closed")
}
}).runWith(Sink.ignore)
//#source-queue
@ -505,8 +505,8 @@ class IntegrationDocSpec extends AkkaSpec(IntegrationDocSpec.config) {
val ref = Source
.actorRef[Int](bufferSize, OverflowStrategy.fail) // note: backpressure is not supported
.map(x x * x)
.toMat(Sink.foreach(x println(s"completed $x")))(Keep.left)
.map(x => x * x)
.toMat(Sink.foreach(x => println(s"completed $x")))(Keep.left)
.run()
ref ! 1

View file

@ -16,7 +16,7 @@ class MigrationsScala extends AkkaSpec {
Flow[Int].expand(Iterator.continually(_))
//#expand-continually
//#expand-state
Flow[Int].expand(i {
Flow[Int].expand(i => {
var state = 0
Iterator.continually({
state += 1

View file

@ -43,15 +43,15 @@ class QuickStartDocSpec extends WordSpec with BeforeAndAfterAll with ScalaFuture
//#create-source
//#run-source
source.runForeach(i println(i))(materializer)
source.runForeach(i => println(i))(materializer)
//#run-source
//#transform-source
val factorials = source.scan(BigInt(1))((acc, next) acc * next)
val factorials = source.scan(BigInt(1))((acc, next) => acc * next)
val result: Future[IOResult] =
factorials
.map(num ByteString(s"$num\n"))
.map(num => ByteString(s"$num\n"))
.runWith(FileIO.toPath(Paths.get("factorials.txt")))
//#transform-source
@ -61,7 +61,7 @@ class QuickStartDocSpec extends WordSpec with BeforeAndAfterAll with ScalaFuture
//#add-streams
factorials
.zipWith(Source(0 to 100))((num, idx) s"$idx! = $num")
.zipWith(Source(0 to 100))((num, idx) => s"$idx! = $num")
.throttle(1, 1.second)
//#add-streams
.take(3)
@ -70,10 +70,10 @@ class QuickStartDocSpec extends WordSpec with BeforeAndAfterAll with ScalaFuture
//#add-streams
//#run-source-and-terminate
val done: Future[Done] = source.runForeach(i println(i))(materializer)
val done: Future[Done] = source.runForeach(i => println(i))(materializer)
implicit val ec = system.dispatcher
done.onComplete(_ system.terminate())
done.onComplete(_ => system.terminate())
//#run-source-and-terminate
done.futureValue
@ -82,7 +82,7 @@ class QuickStartDocSpec extends WordSpec with BeforeAndAfterAll with ScalaFuture
//#transform-sink
def lineSink(filename: String): Sink[String, Future[IOResult]] =
Flow[String]
.map(s ByteString(s + "\n"))
.map(s => ByteString(s + "\n"))
.toMat(FileIO.toPath(Paths.get(filename)))(Keep.right)
//#transform-sink

View file

@ -24,15 +24,15 @@ class RateTransformationDocSpec extends AkkaSpec {
//#conflate-summarize
val statsFlow = Flow[Double]
.conflateWithSeed(immutable.Seq(_))(_ :+ _)
.map { s
.map { s =>
val μ = s.sum / s.size
val se = s.map(x pow(x - μ, 2))
val se = s.map(x => pow(x - μ, 2))
val σ = sqrt(se.sum / se.size)
(σ, μ, s.size)
}
//#conflate-summarize
val fut = Source.fromIterator(() Iterator.continually(Random.nextGaussian))
val fut = Source.fromIterator(() => Iterator.continually(Random.nextGaussian))
.via(statsFlow)
.grouped(10)
.runWith(Sink.head)
@ -45,8 +45,8 @@ class RateTransformationDocSpec extends AkkaSpec {
val p = 0.01
val sampleFlow = Flow[Double]
.conflateWithSeed(immutable.Seq(_)) {
case (acc, elem) if Random.nextDouble < p acc :+ elem
case (acc, _) acc
case (acc, elem) if Random.nextDouble < p => acc :+ elem
case (acc, _) => acc
}
.mapConcat(identity)
//#conflate-sample
@ -97,11 +97,11 @@ class RateTransformationDocSpec extends AkkaSpec {
"extrapolate should track drift" in {
//#extrapolate-drift
val driftFlow = Flow[Double].map(_ -> 0)
.extrapolate[(Double, Int)] { case (i, _) Iterator.from(1).map(i -> _) }
.extrapolate[(Double, Int)] { case (i, _) => Iterator.from(1).map(i -> _) }
//#extrapolate-drift
val latch = TestLatch(2)
val realDriftFlow = Flow[Double].map(d { latch.countDown(); d -> 0; })
.extrapolate[(Double, Int)] { case (d, _) latch.countDown(); Iterator.from(1).map(d -> _) }
val realDriftFlow = Flow[Double].map(d => { latch.countDown(); d -> 0; })
.extrapolate[(Double, Int)] { case (d, _) => latch.countDown(); Iterator.from(1).map(d -> _) }
val (pub, sub) = TestSource.probe[Double]
.via(realDriftFlow)
@ -123,11 +123,11 @@ class RateTransformationDocSpec extends AkkaSpec {
"expand should track drift" in {
//#expand-drift
val driftFlow = Flow[Double]
.expand(i Iterator.from(0).map(i -> _))
.expand(i => Iterator.from(0).map(i -> _))
//#expand-drift
val latch = TestLatch(2)
val realDriftFlow = Flow[Double]
.expand(d { latch.countDown(); Iterator.from(0).map(d -> _) })
.expand(d => { latch.countDown(); Iterator.from(0).map(d -> _) })
val (pub, sub) = TestSource.probe[Double]
.via(realDriftFlow)

View file

@ -142,7 +142,7 @@ class ReactiveStreamsDocSpec extends AkkaSpec {
// An example Processor factory
def createProcessor: Processor[Int, Int] = Flow[Int].toProcessor.run()
val flow: Flow[Int, Int, NotUsed] = Flow.fromProcessor(() createProcessor)
val flow: Flow[Int, Int, NotUsed] = Flow.fromProcessor(() => createProcessor)
//#use-processor
}

View file

@ -40,7 +40,7 @@ class RestartDocSpec extends AkkaSpec with CompileOnlySpec {
maxBackoff = 30.seconds,
randomFactor = 0.2, // adds 20% "noise" to vary the intervals slightly
maxRestarts = 20 // limits the amount of restarts to 20
) { ()
) { () =>
// Create a source from a future of a source
Source.fromFutureSource {
// Make a single request with akka-http
@ -56,7 +56,7 @@ class RestartDocSpec extends AkkaSpec with CompileOnlySpec {
//#with-kill-switch
val killSwitch = restartSource
.viaMat(KillSwitches.single)(Keep.right)
.toMat(Sink.foreach(event println(s"Got event: $event")))(Keep.left)
.toMat(Sink.foreach(event => println(s"Got event: $event")))(Keep.left)
.run()
doSomethingElse()

View file

@ -16,9 +16,9 @@ class StreamBuffersRateSpec extends AkkaSpec {
def println(s: Any) = ()
//#pipelining
Source(1 to 3)
.map { i println(s"A: $i"); i }.async
.map { i println(s"B: $i"); i }.async
.map { i println(s"C: $i"); i }.async
.map { i => println(s"A: $i"); i }.async
.map { i => println(s"B: $i"); i }.async
.map { i => println(s"C: $i"); i }.async
.runWith(Sink.ignore)
//#pipelining
}
@ -44,16 +44,16 @@ class StreamBuffersRateSpec extends AkkaSpec {
import scala.concurrent.duration._
case class Tick()
RunnableGraph.fromGraph(GraphDSL.create() { implicit b
RunnableGraph.fromGraph(GraphDSL.create() { implicit b =>
import GraphDSL.Implicits._
// this is the asynchronous stage in this graph
val zipper = b.add(ZipWith[Tick, Int, Int]((tick, count) count).async)
val zipper = b.add(ZipWith[Tick, Int, Int]((tick, count) => count).async)
Source.tick(initialDelay = 3.second, interval = 3.second, Tick()) ~> zipper.in0
Source.tick(initialDelay = 1.second, interval = 1.second, "message!")
.conflateWithSeed(seed = (_) 1)((count, _) count + 1) ~> zipper.in1
.conflateWithSeed(seed = (_) => 1)((count, _) => count + 1) ~> zipper.in1
zipper.out ~> Sink.foreach(println)
ClosedShape

View file

@ -20,7 +20,7 @@ class StreamPartialGraphDSLDocSpec extends AkkaSpec {
"build with open ports" in {
//#simple-partial-graph-dsl
val pickMaxOfThree = GraphDSL.create() { implicit b
val pickMaxOfThree = GraphDSL.create() { implicit b =>
import GraphDSL.Implicits._
val zip1 = b.add(ZipWith[Int, Int, Int](math.max _))
@ -32,7 +32,7 @@ class StreamPartialGraphDSLDocSpec extends AkkaSpec {
val resultSink = Sink.head[Int]
val g = RunnableGraph.fromGraph(GraphDSL.create(resultSink) { implicit b sink
val g = RunnableGraph.fromGraph(GraphDSL.create(resultSink) { implicit b => sink =>
import GraphDSL.Implicits._
// importing the partial graph will return its shape (inlets & outlets)
@ -52,12 +52,12 @@ class StreamPartialGraphDSLDocSpec extends AkkaSpec {
"build source from partial graph" in {
//#source-from-partial-graph-dsl
val pairs = Source.fromGraph(GraphDSL.create() { implicit b
val pairs = Source.fromGraph(GraphDSL.create() { implicit b =>
import GraphDSL.Implicits._
// prepare graph elements
val zip = b.add(Zip[Int, Int]())
def ints = Source.fromIterator(() Iterator.from(1))
def ints = Source.fromIterator(() => Iterator.from(1))
// connect the graph
ints.filter(_ % 2 != 0) ~> zip.in0
@ -75,7 +75,7 @@ class StreamPartialGraphDSLDocSpec extends AkkaSpec {
"build flow from partial graph" in {
//#flow-from-partial-graph-dsl
val pairUpWithToString =
Flow.fromGraph(GraphDSL.create() { implicit b
Flow.fromGraph(GraphDSL.create() { implicit b =>
import GraphDSL.Implicits._
// prepare graph elements
@ -117,7 +117,7 @@ class StreamPartialGraphDSLDocSpec extends AkkaSpec {
val actorRef: ActorRef = testActor
//#sink-combine
val sendRmotely = Sink.actorRef(actorRef, "Done")
val localProcessing = Sink.foreach[Int](_ /* do something useful */ ())
val localProcessing = Sink.foreach[Int](_ => /* do something useful */ ())
val sink = Sink.combine(sendRmotely, localProcessing)(Broadcast[Int](_))

View file

@ -138,7 +138,7 @@ class StreamTestKitDocSpec extends AkkaSpec {
"test source and a sink" in {
import system.dispatcher
//#test-source-and-sink
val flowUnderTest = Flow[Int].mapAsyncUnordered(2) { sleep
val flowUnderTest = Flow[Int].mapAsyncUnordered(2) { sleep =>
pattern.after(10.millis * sleep, using = system.scheduler)(Future.successful(sleep))
}

View file

@ -57,7 +57,7 @@ class SubstreamDocSpec extends AkkaSpec {
val charCount = Source(text.toList)
.splitAfter { _ == '\n' }
.filter(_ != '\n')
.map(_ 1)
.map(_ => 1)
.reduce(_ + _)
.to(Sink.foreach(println))
.run()
@ -67,13 +67,13 @@ class SubstreamDocSpec extends AkkaSpec {
"generate substreams by flatMapConcat and flatMapMerge" in {
//#flatMapConcat
Source(1 to 2)
.flatMapConcat(i Source(List.fill(3)(i)))
.flatMapConcat(i => Source(List.fill(3)(i)))
.runWith(Sink.ignore)
//#flatMapConcat
//#flatMapMerge
Source(1 to 2)
.flatMapMerge(2, i Source(List.fill(3)(i)))
.flatMapMerge(2, i => Source(List.fill(3)(i)))
.runWith(Sink.ignore)
//#flatMapMerge
}

View file

@ -32,7 +32,7 @@ object TwitterStreamQuickstartDocSpec {
final case class Tweet(author: Author, timestamp: Long, body: String) {
def hashtags: Set[Hashtag] = body.split(" ").collect {
case t if t.startsWith("#") Hashtag(t.replaceAll("[^#\\w]", ""))
case t if t.startsWith("#") => Hashtag(t.replaceAll("[^#\\w]", ""))
}.toSet
}
@ -100,7 +100,7 @@ class TwitterStreamQuickstartDocSpec extends AkkaSpec {
trait Example3 {
//#authors-collect
val authors: Source[Author, NotUsed] =
tweets.collect { case t if t.hashtags.contains(akkaTag) t.author }
tweets.collect { case t if t.hashtags.contains(akkaTag) => t.author }
//#authors-collect
}
@ -185,8 +185,8 @@ class TwitterStreamQuickstartDocSpec extends AkkaSpec {
//#backpressure-by-readline
val completion: Future[Done] =
Source(1 to 10)
.map(i { println(s"map => $i"); i })
.runForeach { i readLine(s"Element = $i; continue reading? [press enter]\n") }
.map(i => { println(s"map => $i"); i })
.runForeach { i => readLine(s"Element = $i; continue reading? [press enter]\n") }
Await.ready(completion, 1.minute)
//#backpressure-by-readline
@ -195,7 +195,7 @@ class TwitterStreamQuickstartDocSpec extends AkkaSpec {
"count elements on finite stream" in {
//#tweets-fold-count
val count: Flow[Tweet, Int, NotUsed] = Flow[Tweet].map(_ 1)
val count: Flow[Tweet, Int, NotUsed] = Flow[Tweet].map(_ => 1)
val sumSink: Sink[Int, Future[Int]] = Sink.fold[Int, Int](0)(_ + _)
@ -206,12 +206,12 @@ class TwitterStreamQuickstartDocSpec extends AkkaSpec {
val sum: Future[Int] = counterGraph.run()
sum.foreach(c println(s"Total tweets processed: $c"))
sum.foreach(c => println(s"Total tweets processed: $c"))
//#tweets-fold-count
new AnyRef {
//#tweets-fold-count-oneline
val sum: Future[Int] = tweets.map(t 1).runWith(sumSink)
val sum: Future[Int] = tweets.map(t => 1).runWith(sumSink)
//#tweets-fold-count-oneline
}
}
@ -224,7 +224,7 @@ class TwitterStreamQuickstartDocSpec extends AkkaSpec {
val counterRunnableGraph: RunnableGraph[Future[Int]] =
tweetsInMinuteFromNow
.filter(_.hashtags contains akkaTag)
.map(t 1)
.map(t => 1)
.toMat(sumSink)(Keep.right)
// materialize the stream once in the morning
@ -236,7 +236,7 @@ class TwitterStreamQuickstartDocSpec extends AkkaSpec {
val sum: Future[Int] = counterRunnableGraph.run()
sum.map { c println(s"Total tweets processed: $c") }
sum.map { c => println(s"Total tweets processed: $c") }
}
}

View file

@ -19,9 +19,9 @@ class RecipeAdhocSource extends RecipeSpec {
//#adhoc-source
def adhocSource[T](source: Source[T, _], timeout: FiniteDuration, maxRetries: Int): Source[T, _] =
Source.lazily(
() source.backpressureTimeout(timeout).recoverWithRetries(maxRetries, {
case t: TimeoutException
Source.lazily(() source.backpressureTimeout(timeout)).mapMaterializedValue(_ NotUsed)
() => source.backpressureTimeout(timeout).recoverWithRetries(maxRetries, {
case t: TimeoutException =>
Source.lazily(() => source.backpressureTimeout(timeout)).mapMaterializedValue(_ => NotUsed)
})
)
//#adhoc-source
@ -29,7 +29,7 @@ class RecipeAdhocSource extends RecipeSpec {
"Recipe for adhoc source" must {
"not start the source if there is no demand" taggedAs TimingTest in {
val isStarted = new AtomicBoolean()
adhocSource(Source.empty.mapMaterializedValue(_ isStarted.set(true)), 200.milliseconds, 3)
adhocSource(Source.empty.mapMaterializedValue(_ => isStarted.set(true)), 200.milliseconds, 3)
.runWith(TestSink.probe[Int])
Thread.sleep(300)
isStarted.get() should be(false)
@ -44,7 +44,7 @@ class RecipeAdhocSource extends RecipeSpec {
"shut down the source when the next demand times out" taggedAs TimingTest in {
val shutdown = Promise[Done]()
val sink = adhocSource(
Source.repeat("a").watchTermination() { (_, term)
Source.repeat("a").watchTermination() { (_, term) =>
shutdown.completeWith(term)
}, 200.milliseconds, 3)
.runWith(TestSink.probe[String])
@ -57,7 +57,7 @@ class RecipeAdhocSource extends RecipeSpec {
"not shut down the source when there are still demands" taggedAs TimingTest in {
val shutdown = Promise[Done]()
val sink = adhocSource(
Source.repeat("a").watchTermination() { (_, term)
Source.repeat("a").watchTermination() { (_, term) =>
shutdown.completeWith(term)
}, 200.milliseconds, 3)
.runWith(TestSink.probe[String])
@ -81,10 +81,10 @@ class RecipeAdhocSource extends RecipeSpec {
val startedCount = new AtomicInteger(0)
val source = Source
.empty.mapMaterializedValue(_ startedCount.incrementAndGet())
.empty.mapMaterializedValue(_ => startedCount.incrementAndGet())
.concat(Source.repeat("a"))
val sink = adhocSource(source.watchTermination() { (_, term)
val sink = adhocSource(source.watchTermination() { (_, term) =>
shutdown.completeWith(term)
}, 200.milliseconds, 3)
.runWith(TestSink.probe[String])
@ -100,10 +100,10 @@ class RecipeAdhocSource extends RecipeSpec {
val startedCount = new AtomicInteger(0)
val source = Source
.empty.mapMaterializedValue(_ startedCount.incrementAndGet())
.empty.mapMaterializedValue(_ => startedCount.incrementAndGet())
.concat(Source.repeat("a"))
val sink = adhocSource(source.watchTermination() { (_, term)
val sink = adhocSource(source.watchTermination() { (_, term) =>
shutdown.completeWith(term)
}, 200.milliseconds, 3)
.runWith(TestSink.probe[String])

View file

@ -24,7 +24,7 @@ class RecipeDroppyBroadcast extends RecipeSpec {
val mySink3 = Sink.fromSubscriber(sub3)
//#droppy-bcast
val graph = RunnableGraph.fromGraph(GraphDSL.create(mySink1, mySink2, mySink3)((_, _, _)) { implicit b (sink1, sink2, sink3)
val graph = RunnableGraph.fromGraph(GraphDSL.create(mySink1, mySink2, mySink3)((_, _, _)) { implicit b => (sink1, sink2, sink3) =>
import GraphDSL.Implicits._
val bcast = b.add(Broadcast[Int](3))
@ -40,7 +40,7 @@ class RecipeDroppyBroadcast extends RecipeSpec {
graph.run()
sub3.request(100)
for (i 1 to 100) {
for (i <- 1 to 100) {
pub.sendNext(i)
sub3.expectNext(i)
}
@ -50,7 +50,7 @@ class RecipeDroppyBroadcast extends RecipeSpec {
sub1.expectSubscription().request(10)
sub2.expectSubscription().request(10)
for (i 91 to 100) {
for (i <- 91 to 100) {
sub1.expectNext(i)
sub2.expectNext(i)
}

View file

@ -49,19 +49,19 @@ class RecipeGlobalRateLimit extends RecipeSpec {
override def receive: Receive = open
val open: Receive = {
case ReplenishTokens
case ReplenishTokens =>
permitTokens = math.min(permitTokens + tokenRefreshAmount, maxAvailableTokens)
case WantToPass
case WantToPass =>
permitTokens -= 1
sender() ! MayPass
if (permitTokens == 0) context.become(closed)
}
val closed: Receive = {
case ReplenishTokens
case ReplenishTokens =>
permitTokens = math.min(permitTokens + tokenRefreshAmount, maxAvailableTokens)
releaseWaiting()
case WantToPass
case WantToPass =>
waitQueue = waitQueue.enqueue(sender())
}
@ -86,11 +86,11 @@ class RecipeGlobalRateLimit extends RecipeSpec {
def limitGlobal[T](limiter: ActorRef, maxAllowedWait: FiniteDuration): Flow[T, T, NotUsed] = {
import akka.pattern.ask
import akka.util.Timeout
Flow[T].mapAsync(4)((element: T) {
Flow[T].mapAsync(4)((element: T) => {
import system.dispatcher
implicit val triggerTimeout = Timeout(maxAllowedWait)
val limiterTriggerFuture = limiter ? Limiter.WantToPass
limiterTriggerFuture.map((_) element)
limiterTriggerFuture.map((_) => element)
})
}
@ -99,12 +99,12 @@ class RecipeGlobalRateLimit extends RecipeSpec {
// Use a large period and emulate the timer by hand instead
val limiter = system.actorOf(Limiter.props(2, 100.days, 1), "limiter")
val source1 = Source.fromIterator(() Iterator.continually("E1")).via(limitGlobal(limiter, 2.seconds.dilated))
val source2 = Source.fromIterator(() Iterator.continually("E2")).via(limitGlobal(limiter, 2.seconds.dilated))
val source1 = Source.fromIterator(() => Iterator.continually("E1")).via(limitGlobal(limiter, 2.seconds.dilated))
val source2 = Source.fromIterator(() => Iterator.continually("E2")).via(limitGlobal(limiter, 2.seconds.dilated))
val probe = TestSubscriber.manualProbe[String]()
RunnableGraph.fromGraph(GraphDSL.create() { implicit b
RunnableGraph.fromGraph(GraphDSL.create() { implicit b =>
import GraphDSL.Implicits._
val merge = b.add(Merge[String](2))
source1 ~> merge ~> Sink.fromSubscriber(probe)
@ -123,7 +123,7 @@ class RecipeGlobalRateLimit extends RecipeSpec {
probe.expectNoMsg(500.millis)
var resultSet = Set.empty[String]
for (_ 1 to 100) {
for (_ <- 1 to 100) {
limiter ! Limiter.ReplenishTokens
resultSet += probe.expectNext()
}

View file

@ -18,7 +18,7 @@ class RecipeKeepAlive extends RecipeSpec {
//#inject-keepalive
import scala.concurrent.duration._
val injectKeepAlive: Flow[ByteString, ByteString, NotUsed] =
Flow[ByteString].keepAlive(1.second, () keepaliveMessage)
Flow[ByteString].keepAlive(1.second, () => keepaliveMessage)
//#inject-keepalive
// No need to test, this is a built-in stage with proper tests

View file

@ -20,7 +20,7 @@ class RecipeLoggingElements extends RecipeSpec {
val mySource = Source(List("1", "2", "3"))
//#println-debug
val loggedSource = mySource.map { elem println(elem); elem }
val loggedSource = mySource.map { elem => println(elem); elem }
//#println-debug
loggedSource.runWith(Sink.ignore)

View file

@ -23,12 +23,12 @@ class RecipeManualTrigger extends RecipeSpec {
val sink = Sink.fromSubscriber(sub)
//#manually-triggered-stream
val graph = RunnableGraph.fromGraph(GraphDSL.create() { implicit builder
val graph = RunnableGraph.fromGraph(GraphDSL.create() { implicit builder =>
import GraphDSL.Implicits._
val zip = builder.add(Zip[Message, Trigger]())
elements ~> zip.in0
triggerSource ~> zip.in1
zip.out ~> Flow[(Message, Trigger)].map { case (msg, trigger) msg } ~> sink
zip.out ~> Flow[(Message, Trigger)].map { case (msg, trigger) => msg } ~> sink
ClosedShape
})
//#manually-triggered-stream
@ -62,9 +62,9 @@ class RecipeManualTrigger extends RecipeSpec {
val sink = Sink.fromSubscriber(sub)
//#manually-triggered-stream-zipwith
val graph = RunnableGraph.fromGraph(GraphDSL.create() { implicit builder
val graph = RunnableGraph.fromGraph(GraphDSL.create() { implicit builder =>
import GraphDSL.Implicits._
val zip = builder.add(ZipWith((msg: Message, trigger: Trigger) msg))
val zip = builder.add(ZipWith((msg: Message, trigger: Trigger) => msg))
elements ~> zip.in0
triggerSource ~> zip.in1

View file

@ -25,13 +25,13 @@ class RecipeMissedTicks extends RecipeSpec {
//#missed-ticks
val missedTicks: Flow[Tick, Int, NotUsed] =
Flow[Tick].conflateWithSeed(seed = (_) 0)(
(missedTicks, tick) missedTicks + 1)
Flow[Tick].conflateWithSeed(seed = (_) => 0)(
(missedTicks, tick) => missedTicks + 1)
//#missed-ticks
val latch = TestLatch(3)
val realMissedTicks: Flow[Tick, Int, NotUsed] =
Flow[Tick].conflateWithSeed(seed = (_) 0)(
(missedTicks, tick) { latch.countDown(); missedTicks + 1 })
Flow[Tick].conflateWithSeed(seed = (_) => 0)(
(missedTicks, tick) => { latch.countDown(); missedTicks + 1 })
tickStream.via(realMissedTicks).to(sink).run()

View file

@ -20,15 +20,15 @@ class RecipeMultiGroupBy extends RecipeSpec {
case class Topic(name: String)
val elems = Source(List("1: a", "1: b", "all: c", "all: d", "1: e"))
val extractTopics = { msg: Message
val extractTopics = { msg: Message =>
if (msg.startsWith("1")) List(Topic("1"))
else List(Topic("1"), Topic("2"))
}
//#multi-groupby
val topicMapper: (Message) immutable.Seq[Topic] = extractTopics
val topicMapper: (Message) => immutable.Seq[Topic] = extractTopics
val messageAndTopic: Source[(Message, Topic), NotUsed] = elems.mapConcat { msg: Message
val messageAndTopic: Source[(Message, Topic), NotUsed] = elems.mapConcat { msg: Message =>
val topicsForMessage = topicMapper(msg)
// Create a (Msg, Topic) pair for each of the topics
// the message belongs to
@ -37,7 +37,7 @@ class RecipeMultiGroupBy extends RecipeSpec {
val multiGroups = messageAndTopic
.groupBy(2, _._2).map {
case (msg, topic)
case (msg, topic) =>
// do what needs to be done
//#multi-groupby
(msg, topic)
@ -48,7 +48,7 @@ class RecipeMultiGroupBy extends RecipeSpec {
val result = multiGroups
.grouped(10)
.mergeSubstreams
.map(g g.head._2.name + g.map(_._1).mkString("[", ", ", "]"))
.map(g => g.head._2.name + g.map(_._1).mkString("[", ", ", "]"))
.limit(10)
.runWith(Sink.seq)

View file

@ -26,7 +26,7 @@ class RecipeReduceByKey extends RecipeSpec {
//transform each element to pair with number of words in it
.map(_ -> 1)
// add counting logic to the streams
.reduce((l, r) (l._1, l._2 + r._2))
.reduce((l, r) => (l._1, l._2 + r._2))
// get a stream of word counts
.mergeSubstreams
//#word-count
@ -47,21 +47,21 @@ class RecipeReduceByKey extends RecipeSpec {
//#reduce-by-key-general
def reduceByKey[In, K, Out](
maximumGroupSize: Int,
groupKey: (In) K,
map: (In) Out)(reduce: (Out, Out) Out): Flow[In, (K, Out), NotUsed] = {
groupKey: (In) => K,
map: (In) => Out)(reduce: (Out, Out) => Out): Flow[In, (K, Out), NotUsed] = {
Flow[In]
.groupBy[K](maximumGroupSize, groupKey)
.map(e groupKey(e) -> map(e))
.reduce((l, r) l._1 -> reduce(l._2, r._2))
.map(e => groupKey(e) -> map(e))
.reduce((l, r) => l._1 -> reduce(l._2, r._2))
.mergeSubstreams
}
val wordCounts = words.via(
reduceByKey(
MaximumDistinctWords,
groupKey = (word: String) word,
map = (word: String) 1)((left: Int, right: Int) left + right))
groupKey = (word: String) => word,
map = (word: String) => 1)((left: Int, right: Int) => left + right))
//#reduce-by-key-general
Await.result(wordCounts.limit(10).runWith(Sink.seq), 3.seconds).toSet should be(Set(

View file

@ -19,11 +19,11 @@ class RecipeSimpleDrop extends RecipeSpec {
//#simple-drop
val droppyStream: Flow[Message, Message, NotUsed] =
Flow[Message].conflate((lastMessage, newMessage) newMessage)
Flow[Message].conflate((lastMessage, newMessage) => newMessage)
//#simple-drop
val latch = TestLatch(2)
val realDroppyStream =
Flow[Message].conflate((lastMessage, newMessage) { latch.countDown(); newMessage })
Flow[Message].conflate((lastMessage, newMessage) => { latch.countDown(); newMessage })
val pub = TestPublisher.probe[Message]()
val sub = TestSubscriber.manualProbe[Message]()

View file

@ -17,7 +17,7 @@ class RecipeSourceFromFunction extends RecipeSpec {
def builderFunction(): String = UUID.randomUUID.toString
//#source-from-function
val source = Source.repeat(NotUsed).map(_ builderFunction())
val source = Source.repeat(NotUsed).map(_ => builderFunction())
//#source-from-function
val f = source.take(2).runWith(Sink.seq)

View file

@ -25,11 +25,11 @@ class RecipeWorkerPool extends RecipeSpec {
def balancer[In, Out](worker: Flow[In, Out, Any], workerCount: Int): Flow[In, Out, NotUsed] = {
import GraphDSL.Implicits._
Flow.fromGraph(GraphDSL.create() { implicit b
Flow.fromGraph(GraphDSL.create() { implicit b =>
val balancer = b.add(Balance[In](workerCount, waitForAllDownstreams = true))
val merge = b.add(Merge[Out](workerCount))
for (_ 1 to workerCount) {
for (_ <- 1 to workerCount) {
// for each worker, add an edge from the balancer to the worker, then wire
// it to the merge element
balancer ~> worker.async ~> merge

View file

@ -69,7 +69,7 @@ class StreamFileDocSpec extends AkkaSpec(UnboundedMailboxConfig) {
//#file-sink
val text = Source.single("Hello Akka Stream!")
val result: Future[IOResult] = text
.map(t ByteString(t))
.map(t => ByteString(t))
.runWith(FileIO.toPath(file))
//#file-sink
}

View file

@ -30,9 +30,9 @@ class StreamTcpDocSpec extends AkkaSpec {
val binding: Future[ServerBinding] =
Tcp().bind("127.0.0.1", 8888).to(Sink.ignore).run()
binding.map { b
binding.map { b =>
b.unbind() onComplete {
case _ // ...
case _ => // ...
}
}
//#echo-server-simple-bind
@ -44,7 +44,7 @@ class StreamTcpDocSpec extends AkkaSpec {
val connections: Source[IncomingConnection, Future[ServerBinding]] =
Tcp().bind(host, port)
connections runForeach { connection
connections runForeach { connection =>
println(s"New connection from: ${connection.remoteAddress}")
val echo = Flow[ByteString]
@ -71,7 +71,7 @@ class StreamTcpDocSpec extends AkkaSpec {
import akka.stream.scaladsl.Framing
val binding =
//#welcome-banner-chat-server
connections.to(Sink.foreach { connection
connections.to(Sink.foreach { connection =>
// server logic, parses incoming commands
val commandParser = Flow[String].takeWhile(_ != "BYE").map(_ + "!")
@ -87,7 +87,7 @@ class StreamTcpDocSpec extends AkkaSpec {
allowTruncation = true))
.map(_.utf8String)
//#welcome-banner-chat-server
.map { command serverProbe.ref ! command; command }
.map { command => serverProbe.ref ! command; command }
//#welcome-banner-chat-server
.via(commandParser)
// merge in the initial banner after parser
@ -107,8 +107,8 @@ class StreamTcpDocSpec extends AkkaSpec {
val input = new AtomicReference("Hello world" :: "What a lovely day" :: Nil)
def readLine(prompt: String): String = {
input.get() match {
case all @ cmd :: tail if input.compareAndSet(all, tail) cmd
case _ "q"
case all @ cmd :: tail if input.compareAndSet(all, tail) => cmd
case _ => "q"
}
}
@ -126,7 +126,7 @@ class StreamTcpDocSpec extends AkkaSpec {
val replParser =
Flow[String].takeWhile(_ != "q")
.concat(Source.single("BYE"))
.map(elem ByteString(s"$elem\n"))
.map(elem => ByteString(s"$elem\n"))
val repl = Flow[ByteString]
.via(Framing.delimiter(
@ -134,8 +134,8 @@ class StreamTcpDocSpec extends AkkaSpec {
maximumFrameLength = 256,
allowTruncation = true))
.map(_.utf8String)
.map(text println("Server: " + text))
.map(_ readLine("> "))
.map(text => println("Server: " + text))
.map(_ => readLine("> "))
.via(replParser)
val connected = connection.join(repl).run()

View file

@ -14,6 +14,6 @@ object Map {
//#map
val source: Source[Int, NotUsed] = Source(1 to 10)
val mapped: Source[String, NotUsed] = source.map(elem elem.toString)
val mapped: Source[String, NotUsed] = source.map(elem => elem.toString)
//#map
}

View file

@ -23,7 +23,7 @@ object SourceOperators {
implicit val materializer: ActorMaterializer = ActorMaterializer()
val source: Source[Int, NotUsed] = Source.fromFuture(Future.successful(10))
val sink: Sink[Int, Future[Done]] = Sink.foreach((i: Int) println(i))
val sink: Sink[Int, Future[Done]] = Sink.foreach((i: Int) => println(i))
val done: Future[Done] = source.runWith(sink) //10
//#sourceFromFuture

Some files were not shown because too many files have changed in this diff Show more