replace unicode arrows

* ⇒, →, ←
* because we don't want to show them in documentation snippets and
  then it's complicated to avoid that when snippets are
  located in src/test/scala in individual modules
* dont replace object `→` in FSM.scala and PersistentFSM.scala
This commit is contained in:
Patrik Nordwall 2019-02-09 15:25:39 +01:00
parent e4d38f92a4
commit 5c96a5f556
1521 changed files with 18846 additions and 18786 deletions

View file

@ -30,9 +30,9 @@ object ActorPublisherDocSpec {
var buf = Vector.empty[Job]
def receive = {
case job: Job if buf.size == MaxBufferSize
case job: Job if buf.size == MaxBufferSize =>
sender() ! JobDenied
case job: Job
case job: Job =>
sender() ! JobAccepted
if (buf.isEmpty && totalDemand > 0)
onNext(job)
@ -40,9 +40,9 @@ object ActorPublisherDocSpec {
buf :+= job
deliverBuf()
}
case Request(_)
case Request(_) =>
deliverBuf()
case Cancel
case Cancel =>
context.stop(self)
}
@ -80,7 +80,7 @@ class ActorPublisherDocSpec extends AkkaSpec {
val jobManagerSource = Source.actorPublisher[JobManager.Job](JobManager.props)
val ref = Flow[JobManager.Job]
.map(_.payload.toUpperCase)
.map { elem println(elem); elem }
.map { elem => println(elem); elem }
.to(Sink.ignore)
.runWith(jobManagerSource)

View file

@ -49,17 +49,17 @@ object ActorSubscriberDocSpec {
}
def receive = {
case OnNext(Msg(id, replyTo))
case OnNext(Msg(id, replyTo)) =>
queue += (id -> replyTo)
assert(queue.size <= MaxQueueSize, s"queued too many: ${queue.size}")
router.route(Work(id), self)
case Reply(id)
case Reply(id) =>
queue(id) ! Done(id)
queue -= id
if (canceled && queue.isEmpty) {
context.stop(self)
}
case OnComplete
case OnComplete =>
if (queue.isEmpty) {
context.stop(self)
}
@ -69,7 +69,7 @@ object ActorSubscriberDocSpec {
class Worker extends Actor {
import WorkerPool._
def receive = {
case Work(id)
case Work(id) =>
// ...
sender() ! Reply(id)
}

View file

@ -24,8 +24,8 @@ object BidiFlowDocSpec {
//#implementation-details-elided
implicit val order = ByteOrder.LITTLE_ENDIAN
msg match {
case Ping(id) ByteString.newBuilder.putByte(1).putInt(id).result()
case Pong(id) ByteString.newBuilder.putByte(2).putInt(id).result()
case Ping(id) => ByteString.newBuilder.putByte(1).putInt(id).result()
case Pong(id) => ByteString.newBuilder.putByte(2).putInt(id).result()
}
//#implementation-details-elided
}
@ -35,15 +35,15 @@ object BidiFlowDocSpec {
implicit val order = ByteOrder.LITTLE_ENDIAN
val it = bytes.iterator
it.getByte match {
case 1 Ping(it.getInt)
case 2 Pong(it.getInt)
case other throw new RuntimeException(s"parse error: expected 1|2 got $other")
case 1 => Ping(it.getInt)
case 2 => Pong(it.getInt)
case other => throw new RuntimeException(s"parse error: expected 1|2 got $other")
}
//#implementation-details-elided
}
//#codec-impl
val codecVerbose = BidiFlow.fromGraph(GraphDSL.create() { b
val codecVerbose = BidiFlow.fromGraph(GraphDSL.create() { b =>
// construct and add the top flow, going outbound
val outbound = b.add(Flow[Message].map(toBytes))
// construct and add the bottom flow, going inbound
@ -57,7 +57,7 @@ object BidiFlowDocSpec {
//#codec
//#framing
val framing = BidiFlow.fromGraph(GraphDSL.create() { b
val framing = BidiFlow.fromGraph(GraphDSL.create() { b =>
implicit val order = ByteOrder.LITTLE_ENDIAN
def addLengthHeader(bytes: ByteString) = {
@ -134,12 +134,12 @@ object BidiFlowDocSpec {
})
//#framing
val chopUp = BidiFlow.fromGraph(GraphDSL.create() { b
val chopUp = BidiFlow.fromGraph(GraphDSL.create() { b =>
val f = Flow[ByteString].mapConcat(_.map(ByteString(_)))
BidiShape.fromFlows(b.add(f), b.add(f))
})
val accumulate = BidiFlow.fromGraph(GraphDSL.create() { b
val accumulate = BidiFlow.fromGraph(GraphDSL.create() { b =>
val f = Flow[ByteString].grouped(1000).map(_.fold(ByteString.empty)(_ ++ _))
BidiShape.fromFlows(b.add(f), b.add(f))
})
@ -168,7 +168,7 @@ class BidiFlowDocSpec extends AkkaSpec {
val stack = codec.atop(framing)
// test it by plugging it into its own inverse and closing the right end
val pingpong = Flow[Message].collect { case Ping(id) Pong(id) }
val pingpong = Flow[Message].collect { case Ping(id) => Pong(id) }
val flow = stack.atop(stack.reversed).join(pingpong)
val result = Source((0 to 9).map(Ping)).via(flow).limit(20).runWith(Sink.seq)
Await.result(result, 1.second) should ===((0 to 9).map(Pong))
@ -177,14 +177,14 @@ class BidiFlowDocSpec extends AkkaSpec {
"work when chopped up" in {
val stack = codec.atop(framing)
val flow = stack.atop(chopUp).atop(stack.reversed).join(Flow[Message].map { case Ping(id) Pong(id) })
val flow = stack.atop(chopUp).atop(stack.reversed).join(Flow[Message].map { case Ping(id) => Pong(id) })
val f = Source((0 to 9).map(Ping)).via(flow).limit(20).runWith(Sink.seq)
Await.result(f, 1.second) should ===((0 to 9).map(Pong))
}
"work when accumulated" in {
val stack = codec.atop(framing)
val flow = stack.atop(accumulate).atop(stack.reversed).join(Flow[Message].map { case Ping(id) Pong(id) })
val flow = stack.atop(accumulate).atop(stack.reversed).join(Flow[Message].map { case Ping(id) => Pong(id) })
val f = Source((0 to 9).map(Ping)).via(flow).limit(20).runWith(Sink.seq)
Await.result(f, 1.second) should ===((0 to 9).map(Pong))
}

View file

@ -169,7 +169,7 @@ class CompositionDocSpec extends AkkaSpec {
"closed graph" in {
//#embed-closed
val closed1 = Source.single(0).to(Sink.foreach(println))
val closed2 = RunnableGraph.fromGraph(GraphDSL.create() { implicit builder
val closed2 = RunnableGraph.fromGraph(GraphDSL.create() { implicit builder =>
val embeddedClosed: ClosedShape = builder.add(closed1)
//
embeddedClosed
@ -192,7 +192,7 @@ class CompositionDocSpec extends AkkaSpec {
//#mat-combine-2
// Materializes to NotUsed (orange)
val flow2: Flow[Int, ByteString, NotUsed] = Flow[Int].map { i ByteString(i.toString) }
val flow2: Flow[Int, ByteString, NotUsed] = Flow[Int].map { i => ByteString(i.toString) }
// Materializes to Future[OutgoingConnection] (yellow)
val flow3: Flow[ByteString, ByteString, Future[OutgoingConnection]] =

View file

@ -27,10 +27,10 @@ class FlowDocSpec extends AkkaSpec with CompileOnlySpec {
"source is immutable" in {
//#source-immutable
val source = Source(1 to 10)
source.map(_ 0) // has no effect on source, since it's immutable
source.map(_ => 0) // has no effect on source, since it's immutable
source.runWith(Sink.fold(0)(_ + _)) // 55
val zeroes = source.map(_ 0) // returns new Source[Int], with `map()` appended
val zeroes = source.map(_ => 0) // returns new Source[Int], with `map()` appended
zeroes.runWith(Sink.fold(0)(_ + _)) // 0
//#source-immutable
}
@ -81,12 +81,12 @@ class FlowDocSpec extends AkkaSpec with CompileOnlySpec {
import scala.concurrent.duration._
case object Tick
val timer = Source.tick(initialDelay = 1.second, interval = 1.seconds, tick = () Tick)
val timer = Source.tick(initialDelay = 1.second, interval = 1.seconds, tick = () => Tick)
val timerCancel: Cancellable = Sink.ignore.runWith(timer)
timerCancel.cancel()
val timerMap = timer.map(tick "tick")
val timerMap = timer.map(tick => "tick")
// materialize the flow and retrieve the timers Cancellable
val timerCancellable = Sink.ignore.runWith(timerMap)
timerCancellable.cancel()
@ -152,7 +152,7 @@ class FlowDocSpec extends AkkaSpec with CompileOnlySpec {
"various ways of transforming materialized values" in {
import scala.concurrent.duration._
val throttler = Flow.fromGraph(GraphDSL.create(Source.tick(1.second, 1.second, "test")) { implicit builder tickSource
val throttler = Flow.fromGraph(GraphDSL.create(Source.tick(1.second, 1.second, "test")) { implicit builder => tickSource =>
import GraphDSL.Implicits._
val zip = builder.add(ZipWith[String, Int, Int](Keep.right))
tickSource ~> zip.in0
@ -200,7 +200,7 @@ class FlowDocSpec extends AkkaSpec with CompileOnlySpec {
// doubly nested pair, but we want to flatten it out
val r11: RunnableGraph[(Promise[Option[Int]], Cancellable, Future[Int])] =
r9.mapMaterializedValue {
case ((promise, cancellable), future)
case ((promise, cancellable), future) =>
(promise, cancellable, future)
}
@ -214,7 +214,7 @@ class FlowDocSpec extends AkkaSpec with CompileOnlySpec {
// The result of r11 can be also achieved by using the Graph API
val r12: RunnableGraph[(Promise[Option[Int]], Cancellable, Future[Int])] =
RunnableGraph.fromGraph(GraphDSL.create(source, flow, sink)((_, _, _)) { implicit builder (src, f, dst)
RunnableGraph.fromGraph(GraphDSL.create(source, flow, sink)((_, _, _)) { implicit builder => (src, f, dst) =>
import GraphDSL.Implicits._
src ~> f ~> dst
ClosedShape
@ -263,12 +263,12 @@ object FlowDocSpec {
Source.maybe
.runWith(Sink.onComplete {
case Success(done) println(s"Completed: $done")
case Failure(ex) println(s"Failed: ${ex.getMessage}")
case Success(done) => println(s"Completed: $done")
case Failure(ex) => println(s"Failed: ${ex.getMessage}")
})
def receive = {
case "boom"
case "boom" =>
context.stop(self) // will also terminate the stream
}
}
@ -279,12 +279,12 @@ object FlowDocSpec {
Source.maybe
.runWith(Sink.onComplete {
case Success(done) println(s"Completed: $done")
case Failure(ex) println(s"Failed: ${ex.getMessage}")
case Success(done) => println(s"Completed: $done")
case Failure(ex) => println(s"Failed: ${ex.getMessage}")
})
def receive = {
case "boom"
case "boom" =>
context.stop(self) // will NOT terminate the stream (it's bound to the system!)
}
}

View file

@ -33,8 +33,8 @@ class FlowErrorDocSpec extends AkkaSpec {
"demonstrate resume stream" in {
//#resume
val decider: Supervision.Decider = {
case _: ArithmeticException Supervision.Resume
case _ Supervision.Stop
case _: ArithmeticException => Supervision.Resume
case _ => Supervision.Stop
}
implicit val materializer = ActorMaterializer(
ActorMaterializerSettings(system).withSupervisionStrategy(decider))
@ -51,11 +51,11 @@ class FlowErrorDocSpec extends AkkaSpec {
//#resume-section
implicit val materializer = ActorMaterializer()
val decider: Supervision.Decider = {
case _: ArithmeticException Supervision.Resume
case _ Supervision.Stop
case _: ArithmeticException => Supervision.Resume
case _ => Supervision.Stop
}
val flow = Flow[Int]
.filter(100 / _ < 50).map(elem 100 / (5 - elem))
.filter(100 / _ < 50).map(elem => 100 / (5 - elem))
.withAttributes(ActorAttributes.supervisionStrategy(decider))
val source = Source(0 to 5).via(flow)
@ -71,11 +71,11 @@ class FlowErrorDocSpec extends AkkaSpec {
//#restart-section
implicit val materializer = ActorMaterializer()
val decider: Supervision.Decider = {
case _: IllegalArgumentException Supervision.Restart
case _ Supervision.Stop
case _: IllegalArgumentException => Supervision.Restart
case _ => Supervision.Stop
}
val flow = Flow[Int]
.scan(0) { (acc, elem)
.scan(0) { (acc, elem) =>
if (elem < 0) throw new IllegalArgumentException("negative not allowed")
else acc + elem
}
@ -93,11 +93,11 @@ class FlowErrorDocSpec extends AkkaSpec {
"demonstrate recover" in {
implicit val materializer = ActorMaterializer()
//#recover
Source(0 to 6).map(n
Source(0 to 6).map(n =>
if (n < 5) n.toString
else throw new RuntimeException("Boom!")
).recover {
case _: RuntimeException "stream truncated"
case _: RuntimeException => "stream truncated"
}.runForeach(println)
//#recover
@ -119,11 +119,11 @@ stream truncated
//#recoverWithRetries
val planB = Source(List("five", "six", "seven", "eight"))
Source(0 to 10).map(n
Source(0 to 10).map(n =>
if (n < 5) n.toString
else throw new RuntimeException("Boom!")
).recoverWithRetries(attempts = 1, {
case _: RuntimeException planB
case _: RuntimeException => planB
}).runForeach(println)
//#recoverWithRetries

View file

@ -41,9 +41,9 @@ class FlowParallelismDocSpec extends AkkaSpec {
"Demonstrate parallel processing" in {
//#parallelism
val fryingPan: Flow[ScoopOfBatter, Pancake, NotUsed] =
Flow[ScoopOfBatter].map { batter Pancake() }
Flow[ScoopOfBatter].map { batter => Pancake() }
val pancakeChef: Flow[ScoopOfBatter, Pancake, NotUsed] = Flow.fromGraph(GraphDSL.create() { implicit builder
val pancakeChef: Flow[ScoopOfBatter, Pancake, NotUsed] = Flow.fromGraph(GraphDSL.create() { implicit builder =>
val dispatchBatter = builder.add(Balance[ScoopOfBatter](2))
val mergePancakes = builder.add(Merge[Pancake](2))
@ -64,7 +64,7 @@ class FlowParallelismDocSpec extends AkkaSpec {
"Demonstrate parallelized pipelines" in {
//#parallel-pipeline
val pancakeChef: Flow[ScoopOfBatter, Pancake, NotUsed] =
Flow.fromGraph(GraphDSL.create() { implicit builder
Flow.fromGraph(GraphDSL.create() { implicit builder =>
val dispatchBatter = builder.add(Balance[ScoopOfBatter](2))
val mergePancakes = builder.add(Merge[Pancake](2))
@ -82,7 +82,7 @@ class FlowParallelismDocSpec extends AkkaSpec {
"Demonstrate pipelined parallel processing" in {
//#pipelined-parallel
val pancakeChefs1: Flow[ScoopOfBatter, HalfCookedPancake, NotUsed] =
Flow.fromGraph(GraphDSL.create() { implicit builder
Flow.fromGraph(GraphDSL.create() { implicit builder =>
val dispatchBatter = builder.add(Balance[ScoopOfBatter](2))
val mergeHalfPancakes = builder.add(Merge[HalfCookedPancake](2))
@ -95,7 +95,7 @@ class FlowParallelismDocSpec extends AkkaSpec {
})
val pancakeChefs2: Flow[HalfCookedPancake, Pancake, NotUsed] =
Flow.fromGraph(GraphDSL.create() { implicit builder
Flow.fromGraph(GraphDSL.create() { implicit builder =>
val dispatchHalfPancakes = builder.add(Balance[HalfCookedPancake](2))
val mergePancakes = builder.add(Merge[Pancake](2))

View file

@ -27,7 +27,7 @@ class FlowStreamRefsDocSpec extends AkkaSpec with CompileOnlySpec {
implicit val mat = ActorMaterializer()(context)
def receive = {
case RequestLogs(streamId)
case RequestLogs(streamId) =>
// obtain the source you want to offer:
val source: Source[String, NotUsed] = streamLogs(streamId)
@ -74,7 +74,7 @@ class FlowStreamRefsDocSpec extends AkkaSpec with CompileOnlySpec {
implicit val mat = ActorMaterializer()(context)
def receive = {
case PrepareUpload(nodeId)
case PrepareUpload(nodeId) =>
// obtain the source you want to offer:
val sink: Sink[String, NotUsed] = logsSinkFor(nodeId)

View file

@ -13,7 +13,7 @@ class GraphCyclesSpec extends AkkaSpec {
implicit val materializer = ActorMaterializer()
"Cycle demonstration" must {
val source = Source.fromIterator(() Iterator.from(0))
val source = Source.fromIterator(() => Iterator.from(0))
"include a deadlocked cycle" in {

View file

@ -47,7 +47,7 @@ class GraphDSLDocSpec extends AkkaSpec {
"flow connection errors" in {
intercept[IllegalStateException] {
//#simple-graph
RunnableGraph.fromGraph(GraphDSL.create() { implicit builder
RunnableGraph.fromGraph(GraphDSL.create() { implicit builder =>
import GraphDSL.Implicits._
val source1 = Source(1 to 10)
val source2 = Source(1 to 10)
@ -125,7 +125,7 @@ class GraphDSLDocSpec extends AkkaSpec {
worker: Flow[In, Out, Any],
workerCount: Int): Graph[PriorityWorkerPoolShape[In, Out], NotUsed] = {
GraphDSL.create() { implicit b
GraphDSL.create() { implicit b =>
import GraphDSL.Implicits._
val priorityMerge = b.add(MergePreferred[In](1))
@ -137,7 +137,7 @@ class GraphDSLDocSpec extends AkkaSpec {
// Wire up each of the outputs of the balancer to a worker flow
// then merge them back
for (i 0 until workerCount)
for (i <- 0 until workerCount)
balance.out(i) ~> worker ~> resultsMerge.in(i)
// We now expose the input ports of the priorityMerge and the output
@ -160,7 +160,7 @@ class GraphDSLDocSpec extends AkkaSpec {
val worker1 = Flow[String].map("step 1 " + _)
val worker2 = Flow[String].map("step 2 " + _)
RunnableGraph.fromGraph(GraphDSL.create() { implicit b
RunnableGraph.fromGraph(GraphDSL.create() { implicit b =>
import GraphDSL.Implicits._
val priorityPool1 = b.add(PriorityWorkerPool(worker1, 4))
@ -195,7 +195,7 @@ class GraphDSLDocSpec extends AkkaSpec {
"access to materialized value" in {
//#graph-dsl-matvalue
import GraphDSL.Implicits._
val foldFlow: Flow[Int, Int, Future[Int]] = Flow.fromGraph(GraphDSL.create(Sink.fold[Int, Int](0)(_ + _)) { implicit builder fold
val foldFlow: Flow[Int, Int, Future[Int]] = Flow.fromGraph(GraphDSL.create(Sink.fold[Int, Int](0)(_ + _)) { implicit builder => fold =>
FlowShape(fold.in, builder.materializedValue.mapAsync(4)(identity).outlet)
})
//#graph-dsl-matvalue
@ -205,7 +205,7 @@ class GraphDSLDocSpec extends AkkaSpec {
//#graph-dsl-matvalue-cycle
import GraphDSL.Implicits._
// This cannot produce any value:
val cyclicFold: Source[Int, Future[Int]] = Source.fromGraph(GraphDSL.create(Sink.fold[Int, Int](0)(_ + _)) { implicit builder fold
val cyclicFold: Source[Int, Future[Int]] = Source.fromGraph(GraphDSL.create(Sink.fold[Int, Int](0)(_ + _)) { implicit builder => fold =>
// - Fold cannot complete until its upstream mapAsync completes
// - mapAsync cannot complete until the materialized Future produced by
// fold completes

View file

@ -121,7 +121,7 @@ class GraphStageDocSpec extends AkkaSpec {
}
//#one-to-one
class Map[A, B](f: A B) extends GraphStage[FlowShape[A, B]] {
class Map[A, B](f: A => B) extends GraphStage[FlowShape[A, B]] {
val in = Inlet[A]("Map.in")
val out = Outlet[B]("Map.out")
@ -151,13 +151,13 @@ class GraphStageDocSpec extends AkkaSpec {
val result =
Source(Vector("one", "two", "three"))
.via(stringLength)
.runFold(Seq.empty[Int])((elem, acc) elem :+ acc)
.runFold(Seq.empty[Int])((elem, acc) => elem :+ acc)
Await.result(result, 3.seconds) should ===(Seq(3, 3, 5))
}
//#many-to-one
class Filter[A](p: A Boolean) extends GraphStage[FlowShape[A, A]] {
class Filter[A](p: A => Boolean) extends GraphStage[FlowShape[A, A]] {
val in = Inlet[A]("Filter.in")
val out = Outlet[A]("Filter.out")
@ -190,7 +190,7 @@ class GraphStageDocSpec extends AkkaSpec {
val result =
Source(Vector(1, 2, 3, 4, 5, 6))
.via(evenFilter)
.runFold(Seq.empty[Int])((elem, acc) elem :+ acc)
.runFold(Seq.empty[Int])((elem, acc) => elem :+ acc)
Await.result(result, 3.seconds) should ===(Seq(2, 4, 6))
}
@ -243,7 +243,7 @@ class GraphStageDocSpec extends AkkaSpec {
val result =
Source(Vector(1, 2, 3))
.via(duplicator)
.runFold(Seq.empty[Int])((elem, acc) elem :+ acc)
.runFold(Seq.empty[Int])((elem, acc) => elem :+ acc)
Await.result(result, 3.seconds) should ===(Seq(1, 1, 2, 2, 3, 3))
}
@ -283,14 +283,14 @@ class GraphStageDocSpec extends AkkaSpec {
val result =
Source(Vector(1, 2, 3))
.via(duplicator)
.runFold(Seq.empty[Int])((elem, acc) elem :+ acc)
.runFold(Seq.empty[Int])((elem, acc) => elem :+ acc)
Await.result(result, 3.seconds) should ===(Seq(1, 1, 2, 2, 3, 3))
}
"Demonstrate chaining of graph stages" in {
val sink = Sink.fold[List[Int], Int](List.empty[Int])((acc, n) acc :+ n)
val sink = Sink.fold[List[Int], Int](List.empty[Int])((acc, n) => acc :+ n)
//#graph-operator-chain
val resultFuture = Source(1 to 5)
@ -320,7 +320,7 @@ class GraphStageDocSpec extends AkkaSpec {
new GraphStageLogic(shape) {
override def preStart(): Unit = {
val callback = getAsyncCallback[Unit] { (_)
val callback = getAsyncCallback[Unit] { (_) =>
completeStage()
}
switch.foreach(callback.invoke)
@ -407,7 +407,7 @@ class GraphStageDocSpec extends AkkaSpec {
Source(Vector(1, 2, 3))
.via(new TimedGate[Int](2.second))
.takeWithin(250.millis)
.runFold(Seq.empty[Int])((elem, acc) elem :+ acc)
.runFold(Seq.empty[Int])((elem, acc) => elem :+ acc)
Await.result(result, 3.seconds) should ===(Seq(1))
}
@ -532,7 +532,7 @@ class GraphStageDocSpec extends AkkaSpec {
// tests:
val result1 = Source(Vector(1, 2, 3))
.via(new TwoBuffer)
.runFold(Vector.empty[Int])((acc, n) acc :+ n)
.runFold(Vector.empty[Int])((acc, n) => acc :+ n)
Await.result(result1, 3.seconds) should ===(Vector(1, 2, 3))

View file

@ -61,8 +61,8 @@ class HubsDocSpec extends AkkaSpec with CompileOnlySpec {
val fromProducer: Source[String, NotUsed] = runnableGraph.run()
// Print out messages from the producer in two independent consumers
fromProducer.runForeach(msg println("consumer1: " + msg))
fromProducer.runForeach(msg println("consumer2: " + msg))
fromProducer.runForeach(msg => println("consumer1: " + msg))
fromProducer.runForeach(msg => println("consumer2: " + msg))
//#broadcast-hub
}
@ -110,7 +110,7 @@ class HubsDocSpec extends AkkaSpec with CompileOnlySpec {
//#partition-hub
// A simple producer that publishes a new "message-" every second
val producer = Source.tick(1.second, 1.second, "message")
.zipWith(Source(1 to 100))((a, b) s"$a-$b")
.zipWith(Source(1 to 100))((a, b) => s"$a-$b")
// Attach a PartitionHub Sink to the producer. This will materialize to a
// corresponding Source.
@ -118,7 +118,7 @@ class HubsDocSpec extends AkkaSpec with CompileOnlySpec {
// value to the left is used)
val runnableGraph: RunnableGraph[Source[String, NotUsed]] =
producer.toMat(PartitionHub.sink(
(size, elem) math.abs(elem.hashCode % size),
(size, elem) => math.abs(elem.hashCode % size),
startAfterNrOfConsumers = 2, bufferSize = 256))(Keep.right)
// By running/materializing the producer, we get back a Source, which
@ -126,8 +126,8 @@ class HubsDocSpec extends AkkaSpec with CompileOnlySpec {
val fromProducer: Source[String, NotUsed] = runnableGraph.run()
// Print out messages from the producer in two independent consumers
fromProducer.runForeach(msg println("consumer1: " + msg))
fromProducer.runForeach(msg println("consumer2: " + msg))
fromProducer.runForeach(msg => println("consumer1: " + msg))
fromProducer.runForeach(msg => println("consumer2: " + msg))
//#partition-hub
}
@ -135,14 +135,14 @@ class HubsDocSpec extends AkkaSpec with CompileOnlySpec {
//#partition-hub-stateful
// A simple producer that publishes a new "message-" every second
val producer = Source.tick(1.second, 1.second, "message")
.zipWith(Source(1 to 100))((a, b) s"$a-$b")
.zipWith(Source(1 to 100))((a, b) => s"$a-$b")
// New instance of the partitioner function and its state is created
// for each materialization of the PartitionHub.
def roundRobin(): (PartitionHub.ConsumerInfo, String) Long = {
def roundRobin(): (PartitionHub.ConsumerInfo, String) => Long = {
var i = -1L
(info, elem) {
(info, elem) => {
i += 1
info.consumerIdByIdx((i % info.size).toInt)
}
@ -154,7 +154,7 @@ class HubsDocSpec extends AkkaSpec with CompileOnlySpec {
// value to the left is used)
val runnableGraph: RunnableGraph[Source[String, NotUsed]] =
producer.toMat(PartitionHub.statefulSink(
() roundRobin(),
() => roundRobin(),
startAfterNrOfConsumers = 2, bufferSize = 256))(Keep.right)
// By running/materializing the producer, we get back a Source, which
@ -162,8 +162,8 @@ class HubsDocSpec extends AkkaSpec with CompileOnlySpec {
val fromProducer: Source[String, NotUsed] = runnableGraph.run()
// Print out messages from the producer in two independent consumers
fromProducer.runForeach(msg println("consumer1: " + msg))
fromProducer.runForeach(msg println("consumer2: " + msg))
fromProducer.runForeach(msg => println("consumer1: " + msg))
fromProducer.runForeach(msg => println("consumer2: " + msg))
//#partition-hub-stateful
}
@ -175,14 +175,14 @@ class HubsDocSpec extends AkkaSpec with CompileOnlySpec {
// Note that this is a moving target since the elements are consumed concurrently.
val runnableGraph: RunnableGraph[Source[Int, NotUsed]] =
producer.toMat(PartitionHub.statefulSink(
() (info, elem) info.consumerIds.minBy(id info.queueSize(id)),
() => (info, elem) => info.consumerIds.minBy(id => info.queueSize(id)),
startAfterNrOfConsumers = 2, bufferSize = 16))(Keep.right)
val fromProducer: Source[Int, NotUsed] = runnableGraph.run()
fromProducer.runForeach(msg println("consumer1: " + msg))
fromProducer.runForeach(msg => println("consumer1: " + msg))
fromProducer.throttle(10, 100.millis)
.runForeach(msg println("consumer2: " + msg))
.runForeach(msg => println("consumer2: " + msg))
//#partition-hub-fastest
}

View file

@ -92,7 +92,7 @@ object IntegrationDocSpec {
class DatabaseService(probe: ActorRef) extends Actor {
override def receive = {
case Save(tweet: Tweet)
case Save(tweet: Tweet) =>
probe ! tweet.author.handle
sender() ! SaveDone
}
@ -123,7 +123,7 @@ object IntegrationDocSpec {
//#ask-actor
class Translator extends Actor {
def receive = {
case word: String
case word: String =>
// ... process message
val reply = word.toUpperCase
sender() ! reply // reply to the ask
@ -169,14 +169,14 @@ class IntegrationDocSpec extends AkkaSpec(IntegrationDocSpec.config) {
//#email-addresses-mapAsync
val emailAddresses: Source[String, NotUsed] =
authors
.mapAsync(4)(author addressSystem.lookupEmail(author.handle))
.collect { case Some(emailAddress) emailAddress }
.mapAsync(4)(author => addressSystem.lookupEmail(author.handle))
.collect { case Some(emailAddress) => emailAddress }
//#email-addresses-mapAsync
//#send-emails
val sendEmails: RunnableGraph[NotUsed] =
emailAddresses
.mapAsync(4)(address {
.mapAsync(4)(address => {
emailServer.send(
Email(to = address, title = "Akka", body = "I like your tweet"))
})
@ -205,7 +205,7 @@ class IntegrationDocSpec extends AkkaSpec(IntegrationDocSpec.config) {
// sent from stream to actor to indicate start, end or failure of stream:
val InitMessage = AckingReceiver.StreamInitialized
val OnCompleteMessage = AckingReceiver.StreamCompleted
val onErrorMessage = (ex: Throwable) AckingReceiver.StreamFailure(ex)
val onErrorMessage = (ex: Throwable) => AckingReceiver.StreamFailure(ex)
val probe = TestProbe()
val receiver = system.actorOf(
@ -242,20 +242,20 @@ class IntegrationDocSpec extends AkkaSpec(IntegrationDocSpec.config) {
import AckingReceiver._
def receive: Receive = {
case StreamInitialized
case StreamInitialized =>
log.info("Stream initialized!")
probe ! "Stream initialized!"
sender() ! Ack // ack to allow the stream to proceed sending more elements
case el: String
case el: String =>
log.info("Received element: {}", el)
probe ! el
sender() ! Ack // ack to allow the stream to proceed sending more elements
case StreamCompleted
case StreamCompleted =>
log.info("Stream completed!")
probe ! "Stream completed!"
case StreamFailure(ex)
case StreamFailure(ex) =>
log.error(ex, "Stream failed!")
}
}
@ -272,7 +272,7 @@ class IntegrationDocSpec extends AkkaSpec(IntegrationDocSpec.config) {
val emailAddresses: Source[String, NotUsed] =
authors.via(
Flow[Author].mapAsync(4)(author addressSystem.lookupEmail(author.handle))
Flow[Author].mapAsync(4)(author => addressSystem.lookupEmail(author.handle))
.withAttributes(supervisionStrategy(resumingDecider)))
//#email-addresses-mapAsync-supervision
}
@ -288,12 +288,12 @@ class IntegrationDocSpec extends AkkaSpec(IntegrationDocSpec.config) {
val emailAddresses: Source[String, NotUsed] =
authors
.mapAsyncUnordered(4)(author addressSystem.lookupEmail(author.handle))
.collect { case Some(emailAddress) emailAddress }
.mapAsyncUnordered(4)(author => addressSystem.lookupEmail(author.handle))
.collect { case Some(emailAddress) => emailAddress }
val sendEmails: RunnableGraph[NotUsed] =
emailAddresses
.mapAsyncUnordered(4)(address {
.mapAsyncUnordered(4)(address => {
emailServer.send(
Email(to = address, title = "Akka", body = "I like your tweet"))
})
@ -320,15 +320,15 @@ class IntegrationDocSpec extends AkkaSpec(IntegrationDocSpec.config) {
val authors = tweets.filter(_.hashtags.contains(akkaTag)).map(_.author)
val phoneNumbers =
authors.mapAsync(4)(author addressSystem.lookupPhoneNumber(author.handle))
.collect { case Some(phoneNo) phoneNo }
authors.mapAsync(4)(author => addressSystem.lookupPhoneNumber(author.handle))
.collect { case Some(phoneNo) => phoneNo }
//#blocking-mapAsync
val blockingExecutionContext = system.dispatchers.lookup("blocking-dispatcher")
val sendTextMessages: RunnableGraph[NotUsed] =
phoneNumbers
.mapAsync(4)(phoneNo {
.mapAsync(4)(phoneNo => {
Future {
smsServer.send(
TextMessage(to = phoneNo, body = "I like your tweet"))
@ -357,12 +357,12 @@ class IntegrationDocSpec extends AkkaSpec(IntegrationDocSpec.config) {
val authors = tweets.filter(_.hashtags.contains(akkaTag)).map(_.author)
val phoneNumbers =
authors.mapAsync(4)(author addressSystem.lookupPhoneNumber(author.handle))
.collect { case Some(phoneNo) phoneNo }
authors.mapAsync(4)(author => addressSystem.lookupPhoneNumber(author.handle))
.collect { case Some(phoneNo) => phoneNo }
//#blocking-map
val send = Flow[String]
.map { phoneNo
.map { phoneNo =>
smsServer.send(TextMessage(to = phoneNo, body = "I like your tweet"))
}
.withAttributes(ActorAttributes.dispatcher("blocking-dispatcher"))
@ -393,7 +393,7 @@ class IntegrationDocSpec extends AkkaSpec(IntegrationDocSpec.config) {
implicit val timeout = Timeout(3.seconds)
val saveTweets: RunnableGraph[NotUsed] =
akkaTweets
.mapAsync(4)(tweet database ? Save(tweet))
.mapAsync(4)(tweet => database ? Save(tweet))
.to(Sink.ignore)
//#save-tweets
@ -423,9 +423,9 @@ class IntegrationDocSpec extends AkkaSpec(IntegrationDocSpec.config) {
ActorMaterializerSettings(system).withInputBuffer(initialSize = 4, maxSize = 4))
Source(List("a", "B", "C", "D", "e", "F", "g", "H", "i", "J"))
.map(elem { println(s"before: $elem"); elem })
.map(elem => { println(s"before: $elem"); elem })
.mapAsync(4)(service.convert)
.runForeach(elem println(s"after: $elem"))
.runForeach(elem => println(s"after: $elem"))
//#sometimes-slow-mapAsync
probe.expectMsg("after: A")
@ -455,9 +455,9 @@ class IntegrationDocSpec extends AkkaSpec(IntegrationDocSpec.config) {
ActorMaterializerSettings(system).withInputBuffer(initialSize = 4, maxSize = 4))
Source(List("a", "B", "C", "D", "e", "F", "g", "H", "i", "J"))
.map(elem { println(s"before: $elem"); elem })
.map(elem => { println(s"before: $elem"); elem })
.mapAsyncUnordered(4)(service.convert)
.runForeach(elem println(s"after: $elem"))
.runForeach(elem => println(s"after: $elem"))
//#sometimes-slow-mapAsyncUnordered
probe.receiveN(10).toSet should be(Set(
@ -481,19 +481,19 @@ class IntegrationDocSpec extends AkkaSpec(IntegrationDocSpec.config) {
val queue = Source
.queue[Int](bufferSize, OverflowStrategy.backpressure)
.throttle(elementsToProcess, 3.second)
.map(x x * x)
.toMat(Sink.foreach(x println(s"completed $x")))(Keep.left)
.map(x => x * x)
.toMat(Sink.foreach(x => println(s"completed $x")))(Keep.left)
.run()
val source = Source(1 to 10)
implicit val ec = system.dispatcher
source.mapAsync(1)(x {
source.mapAsync(1)(x => {
queue.offer(x).map {
case QueueOfferResult.Enqueued println(s"enqueued $x")
case QueueOfferResult.Dropped println(s"dropped $x")
case QueueOfferResult.Failure(ex) println(s"Offer failed ${ex.getMessage}")
case QueueOfferResult.QueueClosed println("Source Queue closed")
case QueueOfferResult.Enqueued => println(s"enqueued $x")
case QueueOfferResult.Dropped => println(s"dropped $x")
case QueueOfferResult.Failure(ex) => println(s"Offer failed ${ex.getMessage}")
case QueueOfferResult.QueueClosed => println("Source Queue closed")
}
}).runWith(Sink.ignore)
//#source-queue
@ -505,8 +505,8 @@ class IntegrationDocSpec extends AkkaSpec(IntegrationDocSpec.config) {
val ref = Source
.actorRef[Int](bufferSize, OverflowStrategy.fail) // note: backpressure is not supported
.map(x x * x)
.toMat(Sink.foreach(x println(s"completed $x")))(Keep.left)
.map(x => x * x)
.toMat(Sink.foreach(x => println(s"completed $x")))(Keep.left)
.run()
ref ! 1

View file

@ -16,7 +16,7 @@ class MigrationsScala extends AkkaSpec {
Flow[Int].expand(Iterator.continually(_))
//#expand-continually
//#expand-state
Flow[Int].expand(i {
Flow[Int].expand(i => {
var state = 0
Iterator.continually({
state += 1

View file

@ -43,15 +43,15 @@ class QuickStartDocSpec extends WordSpec with BeforeAndAfterAll with ScalaFuture
//#create-source
//#run-source
source.runForeach(i println(i))(materializer)
source.runForeach(i => println(i))(materializer)
//#run-source
//#transform-source
val factorials = source.scan(BigInt(1))((acc, next) acc * next)
val factorials = source.scan(BigInt(1))((acc, next) => acc * next)
val result: Future[IOResult] =
factorials
.map(num ByteString(s"$num\n"))
.map(num => ByteString(s"$num\n"))
.runWith(FileIO.toPath(Paths.get("factorials.txt")))
//#transform-source
@ -61,7 +61,7 @@ class QuickStartDocSpec extends WordSpec with BeforeAndAfterAll with ScalaFuture
//#add-streams
factorials
.zipWith(Source(0 to 100))((num, idx) s"$idx! = $num")
.zipWith(Source(0 to 100))((num, idx) => s"$idx! = $num")
.throttle(1, 1.second)
//#add-streams
.take(3)
@ -70,10 +70,10 @@ class QuickStartDocSpec extends WordSpec with BeforeAndAfterAll with ScalaFuture
//#add-streams
//#run-source-and-terminate
val done: Future[Done] = source.runForeach(i println(i))(materializer)
val done: Future[Done] = source.runForeach(i => println(i))(materializer)
implicit val ec = system.dispatcher
done.onComplete(_ system.terminate())
done.onComplete(_ => system.terminate())
//#run-source-and-terminate
done.futureValue
@ -82,7 +82,7 @@ class QuickStartDocSpec extends WordSpec with BeforeAndAfterAll with ScalaFuture
//#transform-sink
def lineSink(filename: String): Sink[String, Future[IOResult]] =
Flow[String]
.map(s ByteString(s + "\n"))
.map(s => ByteString(s + "\n"))
.toMat(FileIO.toPath(Paths.get(filename)))(Keep.right)
//#transform-sink

View file

@ -24,15 +24,15 @@ class RateTransformationDocSpec extends AkkaSpec {
//#conflate-summarize
val statsFlow = Flow[Double]
.conflateWithSeed(immutable.Seq(_))(_ :+ _)
.map { s
.map { s =>
val μ = s.sum / s.size
val se = s.map(x pow(x - μ, 2))
val se = s.map(x => pow(x - μ, 2))
val σ = sqrt(se.sum / se.size)
(σ, μ, s.size)
}
//#conflate-summarize
val fut = Source.fromIterator(() Iterator.continually(Random.nextGaussian))
val fut = Source.fromIterator(() => Iterator.continually(Random.nextGaussian))
.via(statsFlow)
.grouped(10)
.runWith(Sink.head)
@ -45,8 +45,8 @@ class RateTransformationDocSpec extends AkkaSpec {
val p = 0.01
val sampleFlow = Flow[Double]
.conflateWithSeed(immutable.Seq(_)) {
case (acc, elem) if Random.nextDouble < p acc :+ elem
case (acc, _) acc
case (acc, elem) if Random.nextDouble < p => acc :+ elem
case (acc, _) => acc
}
.mapConcat(identity)
//#conflate-sample
@ -97,11 +97,11 @@ class RateTransformationDocSpec extends AkkaSpec {
"extrapolate should track drift" in {
//#extrapolate-drift
val driftFlow = Flow[Double].map(_ -> 0)
.extrapolate[(Double, Int)] { case (i, _) Iterator.from(1).map(i -> _) }
.extrapolate[(Double, Int)] { case (i, _) => Iterator.from(1).map(i -> _) }
//#extrapolate-drift
val latch = TestLatch(2)
val realDriftFlow = Flow[Double].map(d { latch.countDown(); d -> 0; })
.extrapolate[(Double, Int)] { case (d, _) latch.countDown(); Iterator.from(1).map(d -> _) }
val realDriftFlow = Flow[Double].map(d => { latch.countDown(); d -> 0; })
.extrapolate[(Double, Int)] { case (d, _) => latch.countDown(); Iterator.from(1).map(d -> _) }
val (pub, sub) = TestSource.probe[Double]
.via(realDriftFlow)
@ -123,11 +123,11 @@ class RateTransformationDocSpec extends AkkaSpec {
"expand should track drift" in {
//#expand-drift
val driftFlow = Flow[Double]
.expand(i Iterator.from(0).map(i -> _))
.expand(i => Iterator.from(0).map(i -> _))
//#expand-drift
val latch = TestLatch(2)
val realDriftFlow = Flow[Double]
.expand(d { latch.countDown(); Iterator.from(0).map(d -> _) })
.expand(d => { latch.countDown(); Iterator.from(0).map(d -> _) })
val (pub, sub) = TestSource.probe[Double]
.via(realDriftFlow)

View file

@ -142,7 +142,7 @@ class ReactiveStreamsDocSpec extends AkkaSpec {
// An example Processor factory
def createProcessor: Processor[Int, Int] = Flow[Int].toProcessor.run()
val flow: Flow[Int, Int, NotUsed] = Flow.fromProcessor(() createProcessor)
val flow: Flow[Int, Int, NotUsed] = Flow.fromProcessor(() => createProcessor)
//#use-processor
}

View file

@ -40,7 +40,7 @@ class RestartDocSpec extends AkkaSpec with CompileOnlySpec {
maxBackoff = 30.seconds,
randomFactor = 0.2, // adds 20% "noise" to vary the intervals slightly
maxRestarts = 20 // limits the amount of restarts to 20
) { ()
) { () =>
// Create a source from a future of a source
Source.fromFutureSource {
// Make a single request with akka-http
@ -56,7 +56,7 @@ class RestartDocSpec extends AkkaSpec with CompileOnlySpec {
//#with-kill-switch
val killSwitch = restartSource
.viaMat(KillSwitches.single)(Keep.right)
.toMat(Sink.foreach(event println(s"Got event: $event")))(Keep.left)
.toMat(Sink.foreach(event => println(s"Got event: $event")))(Keep.left)
.run()
doSomethingElse()

View file

@ -16,9 +16,9 @@ class StreamBuffersRateSpec extends AkkaSpec {
def println(s: Any) = ()
//#pipelining
Source(1 to 3)
.map { i println(s"A: $i"); i }.async
.map { i println(s"B: $i"); i }.async
.map { i println(s"C: $i"); i }.async
.map { i => println(s"A: $i"); i }.async
.map { i => println(s"B: $i"); i }.async
.map { i => println(s"C: $i"); i }.async
.runWith(Sink.ignore)
//#pipelining
}
@ -44,16 +44,16 @@ class StreamBuffersRateSpec extends AkkaSpec {
import scala.concurrent.duration._
case class Tick()
RunnableGraph.fromGraph(GraphDSL.create() { implicit b
RunnableGraph.fromGraph(GraphDSL.create() { implicit b =>
import GraphDSL.Implicits._
// this is the asynchronous stage in this graph
val zipper = b.add(ZipWith[Tick, Int, Int]((tick, count) count).async)
val zipper = b.add(ZipWith[Tick, Int, Int]((tick, count) => count).async)
Source.tick(initialDelay = 3.second, interval = 3.second, Tick()) ~> zipper.in0
Source.tick(initialDelay = 1.second, interval = 1.second, "message!")
.conflateWithSeed(seed = (_) 1)((count, _) count + 1) ~> zipper.in1
.conflateWithSeed(seed = (_) => 1)((count, _) => count + 1) ~> zipper.in1
zipper.out ~> Sink.foreach(println)
ClosedShape

View file

@ -20,7 +20,7 @@ class StreamPartialGraphDSLDocSpec extends AkkaSpec {
"build with open ports" in {
//#simple-partial-graph-dsl
val pickMaxOfThree = GraphDSL.create() { implicit b
val pickMaxOfThree = GraphDSL.create() { implicit b =>
import GraphDSL.Implicits._
val zip1 = b.add(ZipWith[Int, Int, Int](math.max _))
@ -32,7 +32,7 @@ class StreamPartialGraphDSLDocSpec extends AkkaSpec {
val resultSink = Sink.head[Int]
val g = RunnableGraph.fromGraph(GraphDSL.create(resultSink) { implicit b sink
val g = RunnableGraph.fromGraph(GraphDSL.create(resultSink) { implicit b => sink =>
import GraphDSL.Implicits._
// importing the partial graph will return its shape (inlets & outlets)
@ -52,12 +52,12 @@ class StreamPartialGraphDSLDocSpec extends AkkaSpec {
"build source from partial graph" in {
//#source-from-partial-graph-dsl
val pairs = Source.fromGraph(GraphDSL.create() { implicit b
val pairs = Source.fromGraph(GraphDSL.create() { implicit b =>
import GraphDSL.Implicits._
// prepare graph elements
val zip = b.add(Zip[Int, Int]())
def ints = Source.fromIterator(() Iterator.from(1))
def ints = Source.fromIterator(() => Iterator.from(1))
// connect the graph
ints.filter(_ % 2 != 0) ~> zip.in0
@ -75,7 +75,7 @@ class StreamPartialGraphDSLDocSpec extends AkkaSpec {
"build flow from partial graph" in {
//#flow-from-partial-graph-dsl
val pairUpWithToString =
Flow.fromGraph(GraphDSL.create() { implicit b
Flow.fromGraph(GraphDSL.create() { implicit b =>
import GraphDSL.Implicits._
// prepare graph elements
@ -117,7 +117,7 @@ class StreamPartialGraphDSLDocSpec extends AkkaSpec {
val actorRef: ActorRef = testActor
//#sink-combine
val sendRmotely = Sink.actorRef(actorRef, "Done")
val localProcessing = Sink.foreach[Int](_ /* do something useful */ ())
val localProcessing = Sink.foreach[Int](_ => /* do something useful */ ())
val sink = Sink.combine(sendRmotely, localProcessing)(Broadcast[Int](_))

View file

@ -138,7 +138,7 @@ class StreamTestKitDocSpec extends AkkaSpec {
"test source and a sink" in {
import system.dispatcher
//#test-source-and-sink
val flowUnderTest = Flow[Int].mapAsyncUnordered(2) { sleep
val flowUnderTest = Flow[Int].mapAsyncUnordered(2) { sleep =>
pattern.after(10.millis * sleep, using = system.scheduler)(Future.successful(sleep))
}

View file

@ -57,7 +57,7 @@ class SubstreamDocSpec extends AkkaSpec {
val charCount = Source(text.toList)
.splitAfter { _ == '\n' }
.filter(_ != '\n')
.map(_ 1)
.map(_ => 1)
.reduce(_ + _)
.to(Sink.foreach(println))
.run()
@ -67,13 +67,13 @@ class SubstreamDocSpec extends AkkaSpec {
"generate substreams by flatMapConcat and flatMapMerge" in {
//#flatMapConcat
Source(1 to 2)
.flatMapConcat(i Source(List.fill(3)(i)))
.flatMapConcat(i => Source(List.fill(3)(i)))
.runWith(Sink.ignore)
//#flatMapConcat
//#flatMapMerge
Source(1 to 2)
.flatMapMerge(2, i Source(List.fill(3)(i)))
.flatMapMerge(2, i => Source(List.fill(3)(i)))
.runWith(Sink.ignore)
//#flatMapMerge
}

View file

@ -32,7 +32,7 @@ object TwitterStreamQuickstartDocSpec {
final case class Tweet(author: Author, timestamp: Long, body: String) {
def hashtags: Set[Hashtag] = body.split(" ").collect {
case t if t.startsWith("#") Hashtag(t.replaceAll("[^#\\w]", ""))
case t if t.startsWith("#") => Hashtag(t.replaceAll("[^#\\w]", ""))
}.toSet
}
@ -100,7 +100,7 @@ class TwitterStreamQuickstartDocSpec extends AkkaSpec {
trait Example3 {
//#authors-collect
val authors: Source[Author, NotUsed] =
tweets.collect { case t if t.hashtags.contains(akkaTag) t.author }
tweets.collect { case t if t.hashtags.contains(akkaTag) => t.author }
//#authors-collect
}
@ -185,8 +185,8 @@ class TwitterStreamQuickstartDocSpec extends AkkaSpec {
//#backpressure-by-readline
val completion: Future[Done] =
Source(1 to 10)
.map(i { println(s"map => $i"); i })
.runForeach { i readLine(s"Element = $i; continue reading? [press enter]\n") }
.map(i => { println(s"map => $i"); i })
.runForeach { i => readLine(s"Element = $i; continue reading? [press enter]\n") }
Await.ready(completion, 1.minute)
//#backpressure-by-readline
@ -195,7 +195,7 @@ class TwitterStreamQuickstartDocSpec extends AkkaSpec {
"count elements on finite stream" in {
//#tweets-fold-count
val count: Flow[Tweet, Int, NotUsed] = Flow[Tweet].map(_ 1)
val count: Flow[Tweet, Int, NotUsed] = Flow[Tweet].map(_ => 1)
val sumSink: Sink[Int, Future[Int]] = Sink.fold[Int, Int](0)(_ + _)
@ -206,12 +206,12 @@ class TwitterStreamQuickstartDocSpec extends AkkaSpec {
val sum: Future[Int] = counterGraph.run()
sum.foreach(c println(s"Total tweets processed: $c"))
sum.foreach(c => println(s"Total tweets processed: $c"))
//#tweets-fold-count
new AnyRef {
//#tweets-fold-count-oneline
val sum: Future[Int] = tweets.map(t 1).runWith(sumSink)
val sum: Future[Int] = tweets.map(t => 1).runWith(sumSink)
//#tweets-fold-count-oneline
}
}
@ -224,7 +224,7 @@ class TwitterStreamQuickstartDocSpec extends AkkaSpec {
val counterRunnableGraph: RunnableGraph[Future[Int]] =
tweetsInMinuteFromNow
.filter(_.hashtags contains akkaTag)
.map(t 1)
.map(t => 1)
.toMat(sumSink)(Keep.right)
// materialize the stream once in the morning
@ -236,7 +236,7 @@ class TwitterStreamQuickstartDocSpec extends AkkaSpec {
val sum: Future[Int] = counterRunnableGraph.run()
sum.map { c println(s"Total tweets processed: $c") }
sum.map { c => println(s"Total tweets processed: $c") }
}
}

View file

@ -19,9 +19,9 @@ class RecipeAdhocSource extends RecipeSpec {
//#adhoc-source
def adhocSource[T](source: Source[T, _], timeout: FiniteDuration, maxRetries: Int): Source[T, _] =
Source.lazily(
() source.backpressureTimeout(timeout).recoverWithRetries(maxRetries, {
case t: TimeoutException
Source.lazily(() source.backpressureTimeout(timeout)).mapMaterializedValue(_ NotUsed)
() => source.backpressureTimeout(timeout).recoverWithRetries(maxRetries, {
case t: TimeoutException =>
Source.lazily(() => source.backpressureTimeout(timeout)).mapMaterializedValue(_ => NotUsed)
})
)
//#adhoc-source
@ -29,7 +29,7 @@ class RecipeAdhocSource extends RecipeSpec {
"Recipe for adhoc source" must {
"not start the source if there is no demand" taggedAs TimingTest in {
val isStarted = new AtomicBoolean()
adhocSource(Source.empty.mapMaterializedValue(_ isStarted.set(true)), 200.milliseconds, 3)
adhocSource(Source.empty.mapMaterializedValue(_ => isStarted.set(true)), 200.milliseconds, 3)
.runWith(TestSink.probe[Int])
Thread.sleep(300)
isStarted.get() should be(false)
@ -44,7 +44,7 @@ class RecipeAdhocSource extends RecipeSpec {
"shut down the source when the next demand times out" taggedAs TimingTest in {
val shutdown = Promise[Done]()
val sink = adhocSource(
Source.repeat("a").watchTermination() { (_, term)
Source.repeat("a").watchTermination() { (_, term) =>
shutdown.completeWith(term)
}, 200.milliseconds, 3)
.runWith(TestSink.probe[String])
@ -57,7 +57,7 @@ class RecipeAdhocSource extends RecipeSpec {
"not shut down the source when there are still demands" taggedAs TimingTest in {
val shutdown = Promise[Done]()
val sink = adhocSource(
Source.repeat("a").watchTermination() { (_, term)
Source.repeat("a").watchTermination() { (_, term) =>
shutdown.completeWith(term)
}, 200.milliseconds, 3)
.runWith(TestSink.probe[String])
@ -81,10 +81,10 @@ class RecipeAdhocSource extends RecipeSpec {
val startedCount = new AtomicInteger(0)
val source = Source
.empty.mapMaterializedValue(_ startedCount.incrementAndGet())
.empty.mapMaterializedValue(_ => startedCount.incrementAndGet())
.concat(Source.repeat("a"))
val sink = adhocSource(source.watchTermination() { (_, term)
val sink = adhocSource(source.watchTermination() { (_, term) =>
shutdown.completeWith(term)
}, 200.milliseconds, 3)
.runWith(TestSink.probe[String])
@ -100,10 +100,10 @@ class RecipeAdhocSource extends RecipeSpec {
val startedCount = new AtomicInteger(0)
val source = Source
.empty.mapMaterializedValue(_ startedCount.incrementAndGet())
.empty.mapMaterializedValue(_ => startedCount.incrementAndGet())
.concat(Source.repeat("a"))
val sink = adhocSource(source.watchTermination() { (_, term)
val sink = adhocSource(source.watchTermination() { (_, term) =>
shutdown.completeWith(term)
}, 200.milliseconds, 3)
.runWith(TestSink.probe[String])

View file

@ -24,7 +24,7 @@ class RecipeDroppyBroadcast extends RecipeSpec {
val mySink3 = Sink.fromSubscriber(sub3)
//#droppy-bcast
val graph = RunnableGraph.fromGraph(GraphDSL.create(mySink1, mySink2, mySink3)((_, _, _)) { implicit b (sink1, sink2, sink3)
val graph = RunnableGraph.fromGraph(GraphDSL.create(mySink1, mySink2, mySink3)((_, _, _)) { implicit b => (sink1, sink2, sink3) =>
import GraphDSL.Implicits._
val bcast = b.add(Broadcast[Int](3))
@ -40,7 +40,7 @@ class RecipeDroppyBroadcast extends RecipeSpec {
graph.run()
sub3.request(100)
for (i 1 to 100) {
for (i <- 1 to 100) {
pub.sendNext(i)
sub3.expectNext(i)
}
@ -50,7 +50,7 @@ class RecipeDroppyBroadcast extends RecipeSpec {
sub1.expectSubscription().request(10)
sub2.expectSubscription().request(10)
for (i 91 to 100) {
for (i <- 91 to 100) {
sub1.expectNext(i)
sub2.expectNext(i)
}

View file

@ -49,19 +49,19 @@ class RecipeGlobalRateLimit extends RecipeSpec {
override def receive: Receive = open
val open: Receive = {
case ReplenishTokens
case ReplenishTokens =>
permitTokens = math.min(permitTokens + tokenRefreshAmount, maxAvailableTokens)
case WantToPass
case WantToPass =>
permitTokens -= 1
sender() ! MayPass
if (permitTokens == 0) context.become(closed)
}
val closed: Receive = {
case ReplenishTokens
case ReplenishTokens =>
permitTokens = math.min(permitTokens + tokenRefreshAmount, maxAvailableTokens)
releaseWaiting()
case WantToPass
case WantToPass =>
waitQueue = waitQueue.enqueue(sender())
}
@ -86,11 +86,11 @@ class RecipeGlobalRateLimit extends RecipeSpec {
def limitGlobal[T](limiter: ActorRef, maxAllowedWait: FiniteDuration): Flow[T, T, NotUsed] = {
import akka.pattern.ask
import akka.util.Timeout
Flow[T].mapAsync(4)((element: T) {
Flow[T].mapAsync(4)((element: T) => {
import system.dispatcher
implicit val triggerTimeout = Timeout(maxAllowedWait)
val limiterTriggerFuture = limiter ? Limiter.WantToPass
limiterTriggerFuture.map((_) element)
limiterTriggerFuture.map((_) => element)
})
}
@ -99,12 +99,12 @@ class RecipeGlobalRateLimit extends RecipeSpec {
// Use a large period and emulate the timer by hand instead
val limiter = system.actorOf(Limiter.props(2, 100.days, 1), "limiter")
val source1 = Source.fromIterator(() Iterator.continually("E1")).via(limitGlobal(limiter, 2.seconds.dilated))
val source2 = Source.fromIterator(() Iterator.continually("E2")).via(limitGlobal(limiter, 2.seconds.dilated))
val source1 = Source.fromIterator(() => Iterator.continually("E1")).via(limitGlobal(limiter, 2.seconds.dilated))
val source2 = Source.fromIterator(() => Iterator.continually("E2")).via(limitGlobal(limiter, 2.seconds.dilated))
val probe = TestSubscriber.manualProbe[String]()
RunnableGraph.fromGraph(GraphDSL.create() { implicit b
RunnableGraph.fromGraph(GraphDSL.create() { implicit b =>
import GraphDSL.Implicits._
val merge = b.add(Merge[String](2))
source1 ~> merge ~> Sink.fromSubscriber(probe)
@ -123,7 +123,7 @@ class RecipeGlobalRateLimit extends RecipeSpec {
probe.expectNoMsg(500.millis)
var resultSet = Set.empty[String]
for (_ 1 to 100) {
for (_ <- 1 to 100) {
limiter ! Limiter.ReplenishTokens
resultSet += probe.expectNext()
}

View file

@ -18,7 +18,7 @@ class RecipeKeepAlive extends RecipeSpec {
//#inject-keepalive
import scala.concurrent.duration._
val injectKeepAlive: Flow[ByteString, ByteString, NotUsed] =
Flow[ByteString].keepAlive(1.second, () keepaliveMessage)
Flow[ByteString].keepAlive(1.second, () => keepaliveMessage)
//#inject-keepalive
// No need to test, this is a built-in stage with proper tests

View file

@ -20,7 +20,7 @@ class RecipeLoggingElements extends RecipeSpec {
val mySource = Source(List("1", "2", "3"))
//#println-debug
val loggedSource = mySource.map { elem println(elem); elem }
val loggedSource = mySource.map { elem => println(elem); elem }
//#println-debug
loggedSource.runWith(Sink.ignore)

View file

@ -23,12 +23,12 @@ class RecipeManualTrigger extends RecipeSpec {
val sink = Sink.fromSubscriber(sub)
//#manually-triggered-stream
val graph = RunnableGraph.fromGraph(GraphDSL.create() { implicit builder
val graph = RunnableGraph.fromGraph(GraphDSL.create() { implicit builder =>
import GraphDSL.Implicits._
val zip = builder.add(Zip[Message, Trigger]())
elements ~> zip.in0
triggerSource ~> zip.in1
zip.out ~> Flow[(Message, Trigger)].map { case (msg, trigger) msg } ~> sink
zip.out ~> Flow[(Message, Trigger)].map { case (msg, trigger) => msg } ~> sink
ClosedShape
})
//#manually-triggered-stream
@ -62,9 +62,9 @@ class RecipeManualTrigger extends RecipeSpec {
val sink = Sink.fromSubscriber(sub)
//#manually-triggered-stream-zipwith
val graph = RunnableGraph.fromGraph(GraphDSL.create() { implicit builder
val graph = RunnableGraph.fromGraph(GraphDSL.create() { implicit builder =>
import GraphDSL.Implicits._
val zip = builder.add(ZipWith((msg: Message, trigger: Trigger) msg))
val zip = builder.add(ZipWith((msg: Message, trigger: Trigger) => msg))
elements ~> zip.in0
triggerSource ~> zip.in1

View file

@ -25,13 +25,13 @@ class RecipeMissedTicks extends RecipeSpec {
//#missed-ticks
val missedTicks: Flow[Tick, Int, NotUsed] =
Flow[Tick].conflateWithSeed(seed = (_) 0)(
(missedTicks, tick) missedTicks + 1)
Flow[Tick].conflateWithSeed(seed = (_) => 0)(
(missedTicks, tick) => missedTicks + 1)
//#missed-ticks
val latch = TestLatch(3)
val realMissedTicks: Flow[Tick, Int, NotUsed] =
Flow[Tick].conflateWithSeed(seed = (_) 0)(
(missedTicks, tick) { latch.countDown(); missedTicks + 1 })
Flow[Tick].conflateWithSeed(seed = (_) => 0)(
(missedTicks, tick) => { latch.countDown(); missedTicks + 1 })
tickStream.via(realMissedTicks).to(sink).run()

View file

@ -20,15 +20,15 @@ class RecipeMultiGroupBy extends RecipeSpec {
case class Topic(name: String)
val elems = Source(List("1: a", "1: b", "all: c", "all: d", "1: e"))
val extractTopics = { msg: Message
val extractTopics = { msg: Message =>
if (msg.startsWith("1")) List(Topic("1"))
else List(Topic("1"), Topic("2"))
}
//#multi-groupby
val topicMapper: (Message) immutable.Seq[Topic] = extractTopics
val topicMapper: (Message) => immutable.Seq[Topic] = extractTopics
val messageAndTopic: Source[(Message, Topic), NotUsed] = elems.mapConcat { msg: Message
val messageAndTopic: Source[(Message, Topic), NotUsed] = elems.mapConcat { msg: Message =>
val topicsForMessage = topicMapper(msg)
// Create a (Msg, Topic) pair for each of the topics
// the message belongs to
@ -37,7 +37,7 @@ class RecipeMultiGroupBy extends RecipeSpec {
val multiGroups = messageAndTopic
.groupBy(2, _._2).map {
case (msg, topic)
case (msg, topic) =>
// do what needs to be done
//#multi-groupby
(msg, topic)
@ -48,7 +48,7 @@ class RecipeMultiGroupBy extends RecipeSpec {
val result = multiGroups
.grouped(10)
.mergeSubstreams
.map(g g.head._2.name + g.map(_._1).mkString("[", ", ", "]"))
.map(g => g.head._2.name + g.map(_._1).mkString("[", ", ", "]"))
.limit(10)
.runWith(Sink.seq)

View file

@ -26,7 +26,7 @@ class RecipeReduceByKey extends RecipeSpec {
//transform each element to pair with number of words in it
.map(_ -> 1)
// add counting logic to the streams
.reduce((l, r) (l._1, l._2 + r._2))
.reduce((l, r) => (l._1, l._2 + r._2))
// get a stream of word counts
.mergeSubstreams
//#word-count
@ -47,21 +47,21 @@ class RecipeReduceByKey extends RecipeSpec {
//#reduce-by-key-general
def reduceByKey[In, K, Out](
maximumGroupSize: Int,
groupKey: (In) K,
map: (In) Out)(reduce: (Out, Out) Out): Flow[In, (K, Out), NotUsed] = {
groupKey: (In) => K,
map: (In) => Out)(reduce: (Out, Out) => Out): Flow[In, (K, Out), NotUsed] = {
Flow[In]
.groupBy[K](maximumGroupSize, groupKey)
.map(e groupKey(e) -> map(e))
.reduce((l, r) l._1 -> reduce(l._2, r._2))
.map(e => groupKey(e) -> map(e))
.reduce((l, r) => l._1 -> reduce(l._2, r._2))
.mergeSubstreams
}
val wordCounts = words.via(
reduceByKey(
MaximumDistinctWords,
groupKey = (word: String) word,
map = (word: String) 1)((left: Int, right: Int) left + right))
groupKey = (word: String) => word,
map = (word: String) => 1)((left: Int, right: Int) => left + right))
//#reduce-by-key-general
Await.result(wordCounts.limit(10).runWith(Sink.seq), 3.seconds).toSet should be(Set(

View file

@ -19,11 +19,11 @@ class RecipeSimpleDrop extends RecipeSpec {
//#simple-drop
val droppyStream: Flow[Message, Message, NotUsed] =
Flow[Message].conflate((lastMessage, newMessage) newMessage)
Flow[Message].conflate((lastMessage, newMessage) => newMessage)
//#simple-drop
val latch = TestLatch(2)
val realDroppyStream =
Flow[Message].conflate((lastMessage, newMessage) { latch.countDown(); newMessage })
Flow[Message].conflate((lastMessage, newMessage) => { latch.countDown(); newMessage })
val pub = TestPublisher.probe[Message]()
val sub = TestSubscriber.manualProbe[Message]()

View file

@ -17,7 +17,7 @@ class RecipeSourceFromFunction extends RecipeSpec {
def builderFunction(): String = UUID.randomUUID.toString
//#source-from-function
val source = Source.repeat(NotUsed).map(_ builderFunction())
val source = Source.repeat(NotUsed).map(_ => builderFunction())
//#source-from-function
val f = source.take(2).runWith(Sink.seq)

View file

@ -25,11 +25,11 @@ class RecipeWorkerPool extends RecipeSpec {
def balancer[In, Out](worker: Flow[In, Out, Any], workerCount: Int): Flow[In, Out, NotUsed] = {
import GraphDSL.Implicits._
Flow.fromGraph(GraphDSL.create() { implicit b
Flow.fromGraph(GraphDSL.create() { implicit b =>
val balancer = b.add(Balance[In](workerCount, waitForAllDownstreams = true))
val merge = b.add(Merge[Out](workerCount))
for (_ 1 to workerCount) {
for (_ <- 1 to workerCount) {
// for each worker, add an edge from the balancer to the worker, then wire
// it to the merge element
balancer ~> worker.async ~> merge

View file

@ -69,7 +69,7 @@ class StreamFileDocSpec extends AkkaSpec(UnboundedMailboxConfig) {
//#file-sink
val text = Source.single("Hello Akka Stream!")
val result: Future[IOResult] = text
.map(t ByteString(t))
.map(t => ByteString(t))
.runWith(FileIO.toPath(file))
//#file-sink
}

View file

@ -30,9 +30,9 @@ class StreamTcpDocSpec extends AkkaSpec {
val binding: Future[ServerBinding] =
Tcp().bind("127.0.0.1", 8888).to(Sink.ignore).run()
binding.map { b
binding.map { b =>
b.unbind() onComplete {
case _ // ...
case _ => // ...
}
}
//#echo-server-simple-bind
@ -44,7 +44,7 @@ class StreamTcpDocSpec extends AkkaSpec {
val connections: Source[IncomingConnection, Future[ServerBinding]] =
Tcp().bind(host, port)
connections runForeach { connection
connections runForeach { connection =>
println(s"New connection from: ${connection.remoteAddress}")
val echo = Flow[ByteString]
@ -71,7 +71,7 @@ class StreamTcpDocSpec extends AkkaSpec {
import akka.stream.scaladsl.Framing
val binding =
//#welcome-banner-chat-server
connections.to(Sink.foreach { connection
connections.to(Sink.foreach { connection =>
// server logic, parses incoming commands
val commandParser = Flow[String].takeWhile(_ != "BYE").map(_ + "!")
@ -87,7 +87,7 @@ class StreamTcpDocSpec extends AkkaSpec {
allowTruncation = true))
.map(_.utf8String)
//#welcome-banner-chat-server
.map { command serverProbe.ref ! command; command }
.map { command => serverProbe.ref ! command; command }
//#welcome-banner-chat-server
.via(commandParser)
// merge in the initial banner after parser
@ -107,8 +107,8 @@ class StreamTcpDocSpec extends AkkaSpec {
val input = new AtomicReference("Hello world" :: "What a lovely day" :: Nil)
def readLine(prompt: String): String = {
input.get() match {
case all @ cmd :: tail if input.compareAndSet(all, tail) cmd
case _ "q"
case all @ cmd :: tail if input.compareAndSet(all, tail) => cmd
case _ => "q"
}
}
@ -126,7 +126,7 @@ class StreamTcpDocSpec extends AkkaSpec {
val replParser =
Flow[String].takeWhile(_ != "q")
.concat(Source.single("BYE"))
.map(elem ByteString(s"$elem\n"))
.map(elem => ByteString(s"$elem\n"))
val repl = Flow[ByteString]
.via(Framing.delimiter(
@ -134,8 +134,8 @@ class StreamTcpDocSpec extends AkkaSpec {
maximumFrameLength = 256,
allowTruncation = true))
.map(_.utf8String)
.map(text println("Server: " + text))
.map(_ readLine("> "))
.map(text => println("Server: " + text))
.map(_ => readLine("> "))
.via(replParser)
val connected = connection.join(repl).run()

View file

@ -14,6 +14,6 @@ object Map {
//#map
val source: Source[Int, NotUsed] = Source(1 to 10)
val mapped: Source[String, NotUsed] = source.map(elem elem.toString)
val mapped: Source[String, NotUsed] = source.map(elem => elem.toString)
//#map
}

View file

@ -23,7 +23,7 @@ object SourceOperators {
implicit val materializer: ActorMaterializer = ActorMaterializer()
val source: Source[Int, NotUsed] = Source.fromFuture(Future.successful(10))
val sink: Sink[Int, Future[Done]] = Sink.foreach((i: Int) println(i))
val sink: Sink[Int, Future[Done]] = Sink.foreach((i: Int) => println(i))
val done: Future[Done] = source.runWith(sink) //10
//#sourceFromFuture

View file

@ -30,9 +30,9 @@ object SourceOrFlow {
//#conflate
import scala.concurrent.duration._
Source.cycle(() List(1, 10, 100, 1000).iterator)
Source.cycle(() => List(1, 10, 100, 1000).iterator)
.throttle(10, per = 1.second) // faster upstream
.conflate((acc, el) acc + el) // acc: Int, el: Int
.conflate((acc, el) => acc + el) // acc: Int, el: Int
.throttle(1, per = 1.second) // slow downstream
//#conflate
}
@ -45,9 +45,9 @@ object SourceOrFlow {
def sum(other: Summed) = Summed(this.i + other.i)
}
Source.cycle(() List(1, 10, 100, 1000).iterator)
Source.cycle(() => List(1, 10, 100, 1000).iterator)
.throttle(10, per = 1.second) // faster upstream
.conflateWithSeed(el Summed(el))((acc, el) acc sum Summed(el)) // (Summed, Int) => Summed
.conflateWithSeed(el => Summed(el))((acc, el) => acc sum Summed(el)) // (Summed, Int) => Summed
.throttle(1, per = 1.second) // slow downstream
//#conflateWithSeed
}
@ -61,7 +61,7 @@ object SourceOrFlow {
//#scan
val source = Source(1 to 5)
source.scan(0)((acc, x) acc + x).runForeach(println)
source.scan(0)((acc, x) => acc + x).runForeach(println)
// 0 (= 0)
// 1 (= 0 + 1)
// 3 (= 0 + 1 + 2)