!str #16902: Unify stream internal representation

also =str #16912: Fix StreamTcpSpec flakiness
This commit is contained in:
Endre Sándor Varga 2015-01-28 14:19:50 +01:00
parent cac9c9f2fb
commit 8d77fa8b29
230 changed files with 7814 additions and 9596 deletions

View file

@ -7,8 +7,7 @@ import scala.annotation.tailrec
import akka.actor.Props
import akka.stream.ActorFlowMaterializer
import akka.stream.actor.ActorPublisher
import akka.stream.scaladsl.Sink
import akka.stream.scaladsl.Source
import akka.stream.scaladsl.{ Flow, Sink, Source }
import akka.stream.testkit.AkkaSpec
object ActorPublisherDocSpec {
@ -78,13 +77,12 @@ class ActorPublisherDocSpec extends AkkaSpec {
//#actor-publisher-usage
val jobManagerSource = Source[JobManager.Job](JobManager.props)
val materializedMap = jobManagerSource
val ref = Flow[JobManager.Job]
.map(_.payload.toUpperCase)
.map { elem => println(elem); elem }
.to(Sink.ignore)
.run()
.runWith(jobManagerSource)
val ref = materializedMap.get(jobManagerSource)
ref ! JobManager.Job("a")
ref ! JobManager.Job("b")
ref ! JobManager.Job("c")

View file

@ -3,37 +3,43 @@
*/
package docs.stream
import akka.stream.ActorFlowMaterializer
import akka.stream._
import akka.stream.scaladsl._
import akka.stream.testkit.AkkaSpec
import scala.collection.immutable.IndexedSeq
import scala.concurrent.Await
import scala.concurrent.duration._
import scala.util.control.NoStackTrace
object FlexiDocSpec {
//#fleximerge-zip-states
//#fleximerge-zip-readall
import akka.stream.FanInShape._
class ZipPorts[A, B](_init: Init[(A, B)] = Name("Zip"))
extends FanInShape[(A, B)](_init) {
val left = newInlet[A]("left")
val right = newInlet[B]("right")
protected override def construct(i: Init[(A, B)]) = new ZipPorts(i)
}
//#fleximerge-zip-readall
//#fleximerge-zip-states
}
class FlexiDocSpec extends AkkaSpec {
import FlexiDocSpec._
implicit val ec = system.dispatcher
implicit val mat = ActorFlowMaterializer()
"implement zip using readall" in {
//#fleximerge-zip-readall
class Zip[A, B] extends FlexiMerge[(A, B)] {
class Zip[A, B] extends FlexiMerge[(A, B), ZipPorts[A, B]](
new ZipPorts, OperationAttributes.name("Zip1State")) {
import FlexiMerge._
val left = createInputPort[A]()
val right = createInputPort[B]()
def createMergeLogic = new MergeLogic[(A, B)] {
override def inputHandles(inputCount: Int) = {
require(inputCount == 2, s"Zip must have two connected inputs, was $inputCount")
Vector(left, right)
}
override def initialState: State[_] =
State[ReadAllInputs](ReadAll(left, right)) { (ctx, _, inputs) =>
val a: A = inputs(left)
val b: B = inputs(right)
override def createMergeLogic(p: PortT) = new MergeLogic[(A, B)] {
override def initialState =
State(ReadAll(p.left, p.right)) { (ctx, _, inputs) =>
val a = inputs(p.left)
val b = inputs(p.right)
ctx.emit((a, b))
SameState
}
@ -44,49 +50,40 @@ class FlexiDocSpec extends AkkaSpec {
//#fleximerge-zip-readall
//format: OFF
val res =
//#fleximerge-zip-connecting
val head = Sink.head[(Int, String)]
//#fleximerge-zip-connecting
FlowGraph.closed(Sink.head[(Int, String)]) { implicit b =>
o =>
import FlowGraph.Implicits._
val map =
//#fleximerge-zip-connecting
FlowGraph { implicit b =>
import FlowGraphImplicits._
val zip = Zip[Int, String]
val zip = b.add(new Zip[Int, String])
Source.single(1) ~> zip.left
Source.single("A") ~> zip.right
zip.out ~> head
Source.single("1") ~> zip.right
zip.out ~> o.inlet
}
//#fleximerge-zip-connecting
.run()
.run()
//format: ON
Await.result(map.get(head), remaining) should equal((1, "A"))
Await.result(res, 300.millis) should equal((1, "1"))
}
"implement zip using two states" in {
//#fleximerge-zip-states
class Zip[A, B] extends FlexiMerge[(A, B)] {
class Zip[A, B] extends FlexiMerge[(A, B), ZipPorts[A, B]](
new ZipPorts, OperationAttributes.name("Zip2State")) {
import FlexiMerge._
val left = createInputPort[A]()
val right = createInputPort[B]()
def createMergeLogic = new MergeLogic[(A, B)] {
override def createMergeLogic(p: PortT) = new MergeLogic[(A, B)] {
var lastInA: A = _
override def inputHandles(inputCount: Int) = {
require(inputCount == 2, s"Zip must have two connected inputs, was $inputCount")
Vector(left, right)
}
val readA: State[A] = State[A](Read(left)) { (ctx, input, element) =>
val readA: State[A] = State[A](Read(p.left)) { (ctx, input, element) =>
lastInA = element
readB
}
val readB: State[B] = State[B](Read(right)) { (ctx, input, element) =>
val readB: State[B] = State[B](Read(p.right)) { (ctx, input, element) =>
ctx.emit((lastInA, element))
readA
}
@ -98,37 +95,37 @@ class FlexiDocSpec extends AkkaSpec {
}
//#fleximerge-zip-states
val head = Sink.head[(Int, String)]
val map = FlowGraph { implicit b =>
import akka.stream.scaladsl.FlowGraphImplicits._
val res = FlowGraph.closed(Sink.head[(Int, String)]) { implicit b =>
o =>
import FlowGraph.Implicits._
val zip = new Zip[Int, String]
val zip = b.add(new Zip[Int, String])
Source(1 to 2) ~> zip.left
Source(List("A", "B")) ~> zip.right
zip.out ~> head
Source(1 to 2) ~> zip.left
Source((1 to 2).map(_.toString)) ~> zip.right
zip.out ~> o.inlet
}.run()
Await.result(map.get(head), remaining) should equal((1, "A"))
Await.result(res, 300.millis) should equal((1, "1"))
}
"fleximerge completion handling" in {
import FanInShape._
//#fleximerge-completion
class ImportantWithBackups[A] extends FlexiMerge[A] {
class ImportantWithBackupShape[A](_init: Init[A] = Name("Zip"))
extends FanInShape[A](_init) {
val important = newInlet[A]("important")
val replica1 = newInlet[A]("replica1")
val replica2 = newInlet[A]("replica2")
protected override def construct(i: Init[A]) =
new ImportantWithBackupShape(i)
}
class ImportantWithBackups[A] extends FlexiMerge[A, ImportantWithBackupShape[A]](
new ImportantWithBackupShape, OperationAttributes.name("ImportantWithBackups")) {
import FlexiMerge._
val important = createInputPort[A]()
val replica1 = createInputPort[A]()
val replica2 = createInputPort[A]()
def createMergeLogic = new MergeLogic[A] {
val inputs = Vector(important, replica1, replica2)
override def inputHandles(inputCount: Int) = {
require(inputCount == 3, s"Must connect 3 inputs, connected only $inputCount")
inputs
}
override def createMergeLogic(p: PortT) = new MergeLogic[A] {
import p.important
override def initialCompletionHandling =
CompletionHandling(
onUpstreamFinish = (ctx, input) => input match {
@ -159,18 +156,19 @@ class FlexiDocSpec extends AkkaSpec {
SameState
})
override def initialState = State[A](ReadAny(inputs)) {
(ctx, input, element) =>
ctx.emit(element)
SameState
}
override def initialState =
State[A](ReadAny(p.important, p.replica1, p.replica2)) {
(ctx, input, element) =>
ctx.emit(element)
SameState
}
}
}
//#fleximerge-completion
FlowGraph { implicit b =>
import FlowGraphImplicits._
val importantWithBackups = new ImportantWithBackups[Int]
FlowGraph.closed() { implicit b =>
import FlowGraph.Implicits._
val importantWithBackups = b.add(new ImportantWithBackups[Int])
Source.single(1) ~> importantWithBackups.important
Source.single(2) ~> importantWithBackups.replica1
Source.failed[Int](new Exception("Boom!") with NoStackTrace) ~> importantWithBackups.replica2
@ -179,22 +177,22 @@ class FlexiDocSpec extends AkkaSpec {
}
"flexi preferring merge" in {
import FanInShape._
//#flexi-preferring-merge
class PreferringMerge extends FlexiMerge[Int] {
class PreferringMergeShape[A](_init: Init[A] = Name("PreferringMerge"))
extends FanInShape[A](_init) {
val preferred = newInlet[A]("preferred")
val secondary1 = newInlet[A]("secondary1")
val secondary2 = newInlet[A]("secondary2")
protected override def construct(i: Init[A]) = new PreferringMergeShape(i)
}
class PreferringMerge extends FlexiMerge[Int, PreferringMergeShape[Int]](
new PreferringMergeShape, OperationAttributes.name("ImportantWithBackups")) {
import akka.stream.scaladsl.FlexiMerge._
val preferred = createInputPort[Int]()
val secondary1 = createInputPort[Int]()
val secondary2 = createInputPort[Int]()
def createMergeLogic = new MergeLogic[Int] {
override def inputHandles(inputCount: Int) = {
require(inputCount == 3, s"PreferringMerge must have 3 connected inputs, was $inputCount")
Vector(preferred, secondary1, secondary2)
}
override def createMergeLogic(p: PortT) = new MergeLogic[Int] {
override def initialState =
State[Int](ReadPreferred(preferred)(secondary1, secondary2)) {
State[Int](ReadPreferred(p.preferred, p.secondary1, p.secondary2)) {
(ctx, input, element) =>
ctx.emit(element)
SameState
@ -204,61 +202,28 @@ class FlexiDocSpec extends AkkaSpec {
//#flexi-preferring-merge
}
"flexi read conditions" in {
class X extends FlexiMerge[Int] {
import FlexiMerge._
override def createMergeLogic(): MergeLogic[Int] = new MergeLogic[Int] {
//#read-conditions
val first = createInputPort[Int]()
val second = createInputPort[Int]()
val third = createInputPort[Int]()
//#read-conditions
//#read-conditions
val onlyFirst = Read(first)
val firstOrThird = ReadAny(first, third)
val firstAndSecond = ReadAll(first, second)
val firstAndThird = ReadAll(first, third)
val mostlyFirst = ReadPreferred(first)(second, third)
//#read-conditions
override def inputHandles(inputCount: Int): IndexedSeq[InputHandle] = Vector()
override def initialState: State[_] = State[ReadAllInputs](firstAndSecond) {
(ctx, input, inputs) =>
val in1: Int = inputs(first)
SameState
}
}
}
}
"flexi route" in {
//#flexiroute-unzip
class Unzip[A, B] extends FlexiRoute[(A, B)] {
import FanOutShape._
class UnzipShape[A, B](_init: Init[(A, B)] = Name[(A, B)]("Unzip"))
extends FanOutShape[(A, B)](_init) {
val outA = newOutlet[A]("outA")
val outB = newOutlet[B]("outB")
protected override def construct(i: Init[(A, B)]) = new UnzipShape(i)
}
class Unzip[A, B] extends FlexiRoute[(A, B), UnzipShape[A, B]](
new UnzipShape, OperationAttributes.name("Unzip")) {
import FlexiRoute._
val outA = createOutputPort[A]()
val outB = createOutputPort[B]()
override def createRouteLogic() = new RouteLogic[(A, B)] {
override def outputHandles(outputCount: Int) = {
require(outputCount == 2, s"Unzip must have two connected outputs, was $outputCount")
Vector(outA, outB)
}
override def initialState = State[Any](DemandFromAll(outA, outB)) {
(ctx, _, element) =>
val (a, b) = element
ctx.emit(outA, a)
ctx.emit(outB, b)
SameState
}
override def createRouteLogic(p: PortT) = new RouteLogic[(A, B)] {
override def initialState =
State[Any](DemandFromAll(p.outA, p.outB)) {
(ctx, _, element) =>
val (a, b) = element
ctx.emit(p.outA)(a)
ctx.emit(p.outB)(b)
SameState
}
override def initialCompletionHandling = eagerClose
}
@ -267,20 +232,20 @@ class FlexiDocSpec extends AkkaSpec {
}
"flexi route completion handling" in {
import FanOutShape._
//#flexiroute-completion
class ImportantRoute[A] extends FlexiRoute[A] {
class ImportantRouteShape[A](_init: Init[A] = Name[A]("ImportantRoute")) extends FanOutShape[A](_init) {
val important = newOutlet[A]("important")
val additional1 = newOutlet[A]("additional1")
val additional2 = newOutlet[A]("additional2")
protected override def construct(i: Init[A]) = new ImportantRouteShape(i)
}
class ImportantRoute[A] extends FlexiRoute[A, ImportantRouteShape[A]](
new ImportantRouteShape, OperationAttributes.name("ImportantRoute")) {
import FlexiRoute._
val important = createOutputPort[A]()
val additional1 = createOutputPort[A]()
val additional2 = createOutputPort[A]()
override def createRouteLogic() = new RouteLogic[A] {
val outputs = Vector(important, additional1, additional2)
override def outputHandles(outputCount: Int) = {
require(outputCount == 3, s"Must have three connected outputs, was $outputCount")
outputs
}
override def createRouteLogic(p: PortT) = new RouteLogic[A] {
import p.important
private val select = (p.important | p.additional1 | p.additional2)
override def initialCompletionHandling =
CompletionHandling(
@ -297,18 +262,19 @@ class FlexiDocSpec extends AkkaSpec {
SameState
})
override def initialState = State[A](DemandFromAny(outputs)) {
(ctx, output, element) =>
ctx.emit(output, element)
SameState
}
override def initialState =
State(DemandFromAny(p.important, p.additional1, p.additional2)) {
(ctx, output, element) =>
ctx.emit(select(output))(element)
SameState
}
}
}
//#flexiroute-completion
FlowGraph { implicit b =>
import FlowGraphImplicits._
val route = new ImportantRoute[Int]
FlowGraph.closed() { implicit b =>
import FlowGraph.Implicits._
val route = b.add(new ImportantRoute[Int])
Source.single(1) ~> route.in
route.important ~> Sink.ignore
route.additional1 ~> Sink.ignore

View file

@ -36,13 +36,10 @@ class FlowDocSpec extends AkkaSpec {
val sink = Sink.fold[Int, Int](0)(_ + _)
// connect the Source to the Sink, obtaining a RunnableFlow
val runnable: RunnableFlow = source.to(sink)
val runnable: RunnableFlow[Future[Int]] = source.toMat(sink)(Keep.right)
// materialize the flow
val materialized: MaterializedMap = runnable.run()
// get the materialized value of the FoldSink
val sum: Future[Int] = materialized.get(sink)
// materialize the flow and get the value of the FoldSink
val sum: Future[Int] = runnable.run()
//#materialization-in-steps
}
@ -61,17 +58,20 @@ class FlowDocSpec extends AkkaSpec {
//#stream-reuse
// connect the Source to the Sink, obtaining a RunnableFlow
val sink = Sink.fold[Int, Int](0)(_ + _)
val runnable: RunnableFlow = Source(1 to 10).to(sink)
val runnable: RunnableFlow[Future[Int]] =
Source(1 to 10).toMat(sink)(Keep.right)
// get the materialized value of the FoldSink
val sum1: Future[Int] = runnable.run().get(sink)
val sum2: Future[Int] = runnable.run().get(sink)
val sum1: Future[Int] = runnable.run()
val sum2: Future[Int] = runnable.run()
// sum1 and sum2 are different Futures!
//#stream-reuse
}
"compound source cannot be used as key" in {
// FIXME #16902 This example is now turned around
// The WRONG case has been switched
//#compound-source-is-not-keyed-runWith
import scala.concurrent.duration._
case object Tick
@ -82,14 +82,14 @@ class FlowDocSpec extends AkkaSpec {
timerCancel.cancel()
val timerMap = timer.map(tick => "tick")
val _ = Sink.ignore.runWith(timerMap) // WRONG: returned type is not the timers Cancellable!
// materialize the flow and retrieve the timers Cancellable
val timerCancellable = Sink.ignore.runWith(timerMap)
timerCancellable.cancel()
//#compound-source-is-not-keyed-runWith
//#compound-source-is-not-keyed-run
// retain the materialized map, in order to retrieve the timer's Cancellable
val materialized = timerMap.to(Sink.ignore).run()
val timerCancellable = materialized.get(timer)
timerCancellable.cancel()
val timerCancellable2 = timerMap.to(Sink.ignore).run()
timerCancellable2.cancel()
//#compound-source-is-not-keyed-run
}
@ -133,7 +133,7 @@ class FlowDocSpec extends AkkaSpec {
source.to(Sink.foreach(println(_)))
// Starting from a Sink
val sink: Sink[Int] = Flow[Int].map(_ * 2).to(Sink.foreach(println(_)))
val sink: Sink[Int, Unit] = Flow[Int].map(_ * 2).to(Sink.foreach(println(_)))
Source(1 to 6).to(sink)
//#flow-connecting

View file

@ -75,7 +75,7 @@ class FlowErrorDocSpec extends AkkaSpec {
else acc + elem
}
}
val result = source.grouped(1000).runWith(Sink.head)
val result = source.grouped(1000).runWith(Sink.head())
// the negative element cause the scan stage to be restarted,
// i.e. start from 0 again
// result here will be a Future completed with Success(Vector(0, 1, 0, 5, 12))

View file

@ -7,8 +7,6 @@ import akka.stream.ActorFlowMaterializer
import akka.stream.scaladsl.Broadcast
import akka.stream.scaladsl.Flow
import akka.stream.scaladsl.FlowGraph
import akka.stream.scaladsl.FlowGraphImplicits
import akka.stream.scaladsl.MaterializedMap
import akka.stream.scaladsl.Merge
import akka.stream.scaladsl.Sink
import akka.stream.scaladsl.Source
@ -27,44 +25,46 @@ class FlowGraphDocSpec extends AkkaSpec {
"build simple graph" in {
//format: OFF
//#simple-flow-graph
val g = FlowGraph { implicit b =>
import FlowGraphImplicits._
val g = FlowGraph.closed() { implicit b =>
import FlowGraph.Implicits._
val in = Source(1 to 10)
val out = Sink.ignore
val bcast = Broadcast[Int]
val merge = Merge[Int]
val bcast = b.add(Broadcast[Int](2))
val merge = b.add(Merge[Int](2))
val f1, f2, f3, f4 = Flow[Int].map(_ + 10)
in ~> f1 ~> bcast ~> f2 ~> merge ~> f3 ~> out
bcast ~> f4 ~> merge
in ~> f1 ~> bcast.in
bcast.out(0) ~> f2 ~> merge.in(0)
bcast.out(1) ~> f4 ~> merge.in(1)
merge.out ~> f3 ~> out
}
//#simple-flow-graph
//format: ON
//#simple-graph-run
val map: MaterializedMap = g.run()
g.run()
//#simple-graph-run
}
"build simple graph without implicits" in {
//#simple-flow-graph-no-implicits
val g = FlowGraph { b =>
val g = FlowGraph.closed() { b =>
val in = Source(1 to 10)
val out = Sink.ignore
val broadcast = Broadcast[Int]
val merge = Merge[Int]
val broadcast = b.add(Broadcast[Int](2))
val merge = b.add(Merge[Int](2))
val f1 = Flow[Int].map(_ + 10)
val f3 = Flow[Int].map(_.toString)
val f2 = Flow[Int].map(_ + 20)
b.addEdge(in, broadcast)
.addEdge(broadcast, f1, merge)
.addEdge(broadcast, f2, merge)
.addEdge(merge, f3, out)
b.addEdge(b.add(in), broadcast.in)
b.addEdge(broadcast.out(0), f1, merge.in(0))
b.addEdge(broadcast.out(1), f2, merge.in(1))
b.addEdge(merge.out, f3, b.add(out))
}
//#simple-flow-graph-no-implicits
@ -74,19 +74,19 @@ class FlowGraphDocSpec extends AkkaSpec {
"flow connection errors" in {
intercept[IllegalArgumentException] {
//#simple-graph
FlowGraph { implicit b =>
import FlowGraphImplicits._
FlowGraph.closed() { implicit b =>
import FlowGraph.Implicits._
val source1 = Source(1 to 10)
val source2 = Source(1 to 10)
val zip = Zip[Int, Int]
val zip = b.add(Zip[Int, Int]())
source1 ~> zip.left
source2 ~> zip.right
source1 ~> zip.in0
source2 ~> zip.in1
// unconnected zip.out (!) => "must have at least 1 outgoing edge"
}
//#simple-graph
}.getMessage should include("must have at least 1 outgoing edge")
}.getMessage should include("unconnected ports: Zip.out")
}
"reusing a flow in a graph" in {
@ -101,19 +101,20 @@ class FlowGraphDocSpec extends AkkaSpec {
// format: OFF
val g =
//#flow-graph-reusing-a-flow
FlowGraph { implicit b =>
import FlowGraphImplicits._
val broadcast = Broadcast[Int]
Source.single(1) ~> broadcast
FlowGraph.closed(topHeadSink, bottomHeadSink)((_, _)) { implicit b =>
(topHS, bottomHS) =>
import FlowGraph.Implicits._
val broadcast = b.add(Broadcast[Int](2))
Source.single(1) ~> broadcast.in
broadcast ~> sharedDoubler ~> topHeadSink
broadcast ~> sharedDoubler ~> bottomHeadSink
broadcast.out(0) ~> sharedDoubler ~> topHS.inlet
broadcast.out(1) ~> sharedDoubler ~> bottomHS.inlet
}
//#flow-graph-reusing-a-flow
// format: ON
val map = g.run()
Await.result(map.get(topHeadSink), 300.millis) shouldEqual 2
Await.result(map.get(bottomHeadSink), 300.millis) shouldEqual 2
val (topFuture, bottomFuture) = g.run()
Await.result(topFuture, 300.millis) shouldEqual 2
Await.result(bottomFuture, 300.millis) shouldEqual 2
}
}

View file

@ -1,13 +1,13 @@
package docs.stream
import akka.stream.ActorFlowMaterializer
import akka.stream.scaladsl.{ RunnableFlow, Sink, Source, Flow }
import akka.stream.scaladsl.{ RunnableFlow, Sink, Source, Flow, Keep }
import akka.stream.stage.PushPullStage
import akka.stream.testkit.AkkaSpec
import org.scalatest.concurrent.{ ScalaFutures, Futures }
import scala.collection.immutable
import scala.concurrent.Await
import scala.concurrent.{ Future, Await }
import scala.concurrent.duration._
class FlowStagesSpec extends AkkaSpec with ScalaFutures {
@ -75,17 +75,17 @@ class FlowStagesSpec extends AkkaSpec with ScalaFutures {
//#one-to-many
val keyedSink = Sink.head[immutable.Seq[Int]]
val sink = Flow[Int].grouped(10).to(keyedSink)
val sink = Flow[Int].grouped(10).toMat(keyedSink)(Keep.right)
//#stage-chain
val runnable: RunnableFlow = Source(1 to 10)
val resultFuture = Source(1 to 10)
.transform(() => new Filter(_ % 2 == 0))
.transform(() => new Duplicator())
.transform(() => new Map(_ / 2))
.to(sink)
.runWith(sink)
//#stage-chain
Await.result(runnable.run().get(keyedSink), 3.seconds) should be(Seq(1, 1, 2, 2, 3, 3, 4, 4, 5, 5))
Await.result(resultFuture, 3.seconds) should be(Seq(1, 1, 2, 2, 3, 3, 4, 4, 5, 5))
}

View file

@ -13,89 +13,90 @@ class GraphCyclesSpec extends AkkaSpec {
"include a deadlocked cycle" in {
// format: OFF
//#deadlocked
// WARNING! The graph below deadlocks!
FlowGraph { implicit b =>
import FlowGraphImplicits._
b.allowCycles()
FlowGraph.closed() { implicit b =>
import FlowGraph.Implicits._
val merge = Merge[Int]
val bcast = Broadcast[Int]
val merge = b.add(Merge[Int](2))
val bcast = b.add(Broadcast[Int](2))
source ~> merge ~> Flow[Int].map { (s) => println(s); s } ~> bcast ~> Sink.ignore
bcast ~> merge
source ~> merge ~> Flow[Int].map { s => println(s); s } ~> bcast ~> Sink.ignore()
merge <~ bcast
}
//#deadlocked
// format: ON
}
"include an unfair cycle" in {
// format: OFF
//#unfair
// WARNING! The graph below stops consuming from "source" after a few steps
FlowGraph { implicit b =>
import FlowGraphImplicits._
b.allowCycles()
FlowGraph.closed() { implicit b =>
import FlowGraph.Implicits._
val merge = MergePreferred[Int]
val bcast = Broadcast[Int]
val merge = b.add(MergePreferred[Int](1))
val bcast = b.add(Broadcast[Int](2))
source ~> merge ~> Flow[Int].map { (s) => println(s); s } ~> bcast ~> Sink.ignore
bcast ~> merge.preferred
source ~> merge ~> Flow[Int].map { s => println(s); s } ~> bcast ~> Sink.ignore()
merge.preferred <~ bcast
}
//#unfair
// format: ON
}
"include a dropping cycle" in {
// format: OFF
//#dropping
FlowGraph { implicit b =>
import FlowGraphImplicits._
b.allowCycles()
FlowGraph.closed() { implicit b =>
import FlowGraph.Implicits._
val merge = Merge[Int]
val bcast = Broadcast[Int]
val merge = b.add(Merge[Int](2))
val bcast = b.add(Broadcast[Int](2))
source ~> merge ~> Flow[Int].map { (s) => println(s); s } ~> bcast ~> Sink.ignore
bcast ~> Flow[Int].buffer(10, OverflowStrategy.dropHead) ~> merge
source ~> merge ~> Flow[Int].map { s => println(s); s } ~> bcast ~> Sink.ignore()
merge <~ Flow[Int].buffer(10, OverflowStrategy.dropHead) <~ bcast
}
//#dropping
// format: ON
}
"include a dead zipping cycle" in {
// format: OFF
//#zipping-dead
// WARNING! The graph below never processes any elements
FlowGraph { implicit b =>
import FlowGraphImplicits._
b.allowCycles()
FlowGraph.closed() { implicit b =>
import FlowGraph.Implicits._
val zip = ZipWith[Int, Int, Int]((left, right) => right)
val bcast = Broadcast[Int]
val zip = b.add(ZipWith[Int, Int, Int]((left, right) => right))
val bcast = b.add(Broadcast[Int](2))
source ~> zip.left ~> Flow[Int].map { (s) => println(s); s } ~> bcast ~> Sink.ignore
bcast ~> zip.right
source ~> zip.in0
zip.out.map { s => println(s); s } ~> bcast ~> Sink.ignore()
zip.in1 <~ bcast
}
//#zipping-dead
// format: ON
}
"include a live zipping cycle" in {
// format: OFF
//#zipping-live
FlowGraph { implicit b =>
import FlowGraphImplicits._
b.allowCycles()
FlowGraph.closed() { implicit b =>
import FlowGraph.Implicits._
val zip = ZipWith[Int, Int, Int]((left, right) => left)
val bcast = Broadcast[Int]
val concat = Concat[Int]
source ~> zip.left ~> Flow[Int].map { (s) => println(s); s } ~> bcast ~> Sink.ignore
bcast ~> concat.second ~> zip.right
Source.single(0) ~> concat.first
val zip = b.add(ZipWith((left: Int, right: Int) => left))
val bcast = b.add(Broadcast[Int](2))
val concat = b.add(Concat[Int]())
source ~> zip.in0
zip.out.map { s => println(s); s } ~> bcast ~> Sink.ignore()
zip.in1 <~ concat <~ bcast
concat <~ Source.single(0)
}
//#zipping-live
// format: ON
}
}

View file

@ -133,21 +133,21 @@ class IntegrationDocSpec extends AkkaSpec(IntegrationDocSpec.config) {
val emailServer = new EmailServer(probe.ref)
//#tweet-authors
val authors: Source[Author] =
val authors: Source[Author, Unit] =
tweets
.filter(_.hashtags.contains(akka))
.map(_.author)
//#tweet-authors
//#email-addresses-mapAsync
val emailAddresses: Source[String] =
val emailAddresses: Source[String, Unit] =
authors
.mapAsync(author => addressSystem.lookupEmail(author.handle))
.collect { case Some(emailAddress) => emailAddress }
//#email-addresses-mapAsync
//#send-emails
val sendEmails: RunnableFlow =
val sendEmails: RunnableFlow[Unit] =
emailAddresses
.mapAsync { address =>
emailServer.send(
@ -169,14 +169,14 @@ class IntegrationDocSpec extends AkkaSpec(IntegrationDocSpec.config) {
"lookup email with mapAsync and supervision" in {
val addressSystem = new AddressSystem2
val authors: Source[Author] =
val authors: Source[Author, Unit] =
tweets.filter(_.hashtags.contains(akka)).map(_.author)
//#email-addresses-mapAsync-supervision
import OperationAttributes.supervisionStrategy
import Supervision.resumingDecider
val emailAddresses: Source[String] =
val emailAddresses: Source[String, Unit] =
authors.section(supervisionStrategy(resumingDecider)) {
_.mapAsync(author => addressSystem.lookupEmail(author.handle))
}
@ -189,15 +189,15 @@ class IntegrationDocSpec extends AkkaSpec(IntegrationDocSpec.config) {
val emailServer = new EmailServer(probe.ref)
//#external-service-mapAsyncUnordered
val authors: Source[Author] =
val authors: Source[Author, Unit] =
tweets.filter(_.hashtags.contains(akka)).map(_.author)
val emailAddresses: Source[String] =
val emailAddresses: Source[String, Unit] =
authors
.mapAsyncUnordered(author => addressSystem.lookupEmail(author.handle))
.collect { case Some(emailAddress) => emailAddress }
val sendEmails: RunnableFlow =
val sendEmails: RunnableFlow[Unit] =
emailAddresses
.mapAsyncUnordered { address =>
emailServer.send(
@ -232,7 +232,7 @@ class IntegrationDocSpec extends AkkaSpec(IntegrationDocSpec.config) {
//#blocking-mapAsync
val blockingExecutionContext = system.dispatchers.lookup("blocking-dispatcher")
val sendTextMessages: RunnableFlow =
val sendTextMessages: RunnableFlow[Unit] =
phoneNumbers
.mapAsync { phoneNo =>
Future {
@ -267,7 +267,7 @@ class IntegrationDocSpec extends AkkaSpec(IntegrationDocSpec.config) {
.collect { case Some(phoneNo) => phoneNo }
//#blocking-map
val sendTextMessages: RunnableFlow =
val sendTextMessages: RunnableFlow[Unit] =
phoneNumbers
.section(OperationAttributes.dispatcher("blocking-dispatcher")) {
_.map { phoneNo =>
@ -294,10 +294,10 @@ class IntegrationDocSpec extends AkkaSpec(IntegrationDocSpec.config) {
val database = system.actorOf(Props(classOf[DatabaseService], probe.ref), "db")
//#save-tweets
val akkaTweets: Source[Tweet] = tweets.filter(_.hashtags.contains(akka))
val akkaTweets: Source[Tweet, Unit] = tweets.filter(_.hashtags.contains(akka))
implicit val timeout = Timeout(3.seconds)
val saveTweets: RunnableFlow =
val saveTweets: RunnableFlow[Unit] =
akkaTweets
.mapAsync(tweet => database ? Save(tweet))
.to(Sink.ignore)

View file

@ -43,7 +43,7 @@ class ReactiveStreamsDocSpec extends AkkaSpec {
val impl = new Fixture {
override def tweets: Publisher[Tweet] =
TwitterStreamQuickstartDocSpec.tweets.runWith(Sink.publisher)
TwitterStreamQuickstartDocSpec.tweets.runWith(Sink.publisher())
override def storage = SubscriberProbe[Author]
@ -95,7 +95,7 @@ class ReactiveStreamsDocSpec extends AkkaSpec {
//#source-publisher
val authorPublisher: Publisher[Author] =
Source(tweets).via(authors).runWith(Sink.publisher)
Source(tweets).via(authors).runWith(Sink.publisher())
authorPublisher.subscribe(storage)
//#source-publisher

View file

@ -43,15 +43,15 @@ class StreamBuffersRateSpec extends AkkaSpec {
import scala.concurrent.duration._
case class Tick()
FlowGraph { implicit b =>
import FlowGraphImplicits._
FlowGraph.closed() { implicit b =>
import FlowGraph.Implicits._
val zipper = ZipWith[Tick, Int, Int]((tick, count) => count)
val zipper = b.add(ZipWith[Tick, Int, Int]((tick, count) => count))
Source(initialDelay = 3.second, interval = 3.second, Tick()) ~> zipper.in0
Source(initialDelay = 1.second, interval = 1.second, "message!")
.conflate(seed = (_) => 1)((count, _) => count + 1) ~> zipper.right
Source(initialDelay = 3.second, interval = 3.second, Tick()) ~> zipper.left
.conflate(seed = (_) => 1)((count, _) => count + 1) ~> zipper.in1
zipper.out ~> Sink.foreach(println)
}
@ -60,10 +60,10 @@ class StreamBuffersRateSpec extends AkkaSpec {
"explcit buffers" in {
trait Job
def inboundJobsConnector(): Source[Job] = Source.empty()
def inboundJobsConnector(): Source[Job, Unit] = Source.empty()
//#explicit-buffers-backpressure
// Getting a stream of jobs from an imaginary external system as a Source
val jobs: Source[Job] = inboundJobsConnector()
val jobs: Source[Job, Unit] = inboundJobsConnector()
jobs.buffer(1000, OverflowStrategy.backpressure)
//#explicit-buffers-backpressure

View file

@ -3,20 +3,11 @@
*/
package docs.stream
import akka.stream.ActorFlowMaterializer
import akka.stream.scaladsl.Broadcast
import akka.stream.scaladsl.Flow
import akka.stream.scaladsl.FlowGraph
import akka.stream.scaladsl.FlowGraphImplicits
import akka.stream.scaladsl.PartialFlowGraph
import akka.stream.scaladsl.Sink
import akka.stream.scaladsl.Source
import akka.stream.scaladsl.UndefinedSink
import akka.stream.scaladsl.UndefinedSource
import akka.stream.scaladsl.Zip
import akka.stream.scaladsl.ZipWith
import akka.stream.scaladsl._
import akka.stream._
import akka.stream.testkit.AkkaSpec
import scala.collection.immutable
import scala.concurrent.Await
import scala.concurrent.Future
import scala.concurrent.duration._
@ -28,83 +19,55 @@ class StreamPartialFlowGraphDocSpec extends AkkaSpec {
implicit val mat = ActorFlowMaterializer()
"build with open ports" in {
// format: OFF
//#simple-partial-flow-graph
// defined outside as they will be used by different FlowGraphs
// 1) first by the PartialFlowGraph to mark its open input and output ports
// 2) then by the assembling FlowGraph which will attach real sinks and sources to them
val in1 = UndefinedSource[Int]
val in2 = UndefinedSource[Int]
val in3 = UndefinedSource[Int]
val out = UndefinedSink[Int]
val pickMaxOfThree = FlowGraph.partial() { implicit b =>
import FlowGraph.Implicits._
val pickMaxOfThree: PartialFlowGraph = PartialFlowGraph { implicit b =>
import FlowGraphImplicits._
val zip1 = b.add(ZipWith[Int, Int, Int](math.max _))
val zip2 = b.add(ZipWith[Int, Int, Int](math.max _))
zip1.out ~> zip2.in0
val zip1 = ZipWith[Int, Int, Int](math.max _)
val zip2 = ZipWith[Int, Int, Int](math.max _)
in1 ~> zip1.left
in2 ~> zip1.right
zip1.out ~> zip2.left
in3 ~> zip2.right
zip2.out ~> out
UniformFanInShape(zip2.out, zip1.in0, zip1.in1, zip2.in1)
}
//#simple-partial-flow-graph
// format: ON
//#simple-partial-flow-graph
val resultSink = Sink.head[Int]
val g = FlowGraph { b =>
// import the partial flow graph explicitly
b.importPartialFlowGraph(pickMaxOfThree)
val g = FlowGraph.closed(resultSink) { implicit b =>
sink =>
import FlowGraph.Implicits._
b.attachSource(in1, Source.single(1))
b.attachSource(in2, Source.single(2))
b.attachSource(in3, Source.single(3))
b.attachSink(out, resultSink)
// importing the partial graph will return its shape (inlets & outlets)
val pm3 = b.add(pickMaxOfThree)
Source.single(1) ~> pm3.in(0)
Source.single(2) ~> pm3.in(1)
Source.single(3) ~> pm3.in(2)
pm3.out ~> sink.inlet
}
val materialized = g.run()
val max: Future[Int] = materialized.get(resultSink)
val max: Future[Int] = g.run()
Await.result(max, 300.millis) should equal(3)
//#simple-partial-flow-graph
val g2 =
//#simple-partial-flow-graph-import-shorthand
FlowGraph(pickMaxOfThree) { b =>
b.attachSource(in1, Source.single(1))
b.attachSource(in2, Source.single(2))
b.attachSource(in3, Source.single(3))
b.attachSink(out, resultSink)
}
//#simple-partial-flow-graph-import-shorthand
val materialized2 = g.run()
val max2: Future[Int] = materialized2.get(resultSink)
Await.result(max2, 300.millis) should equal(3)
}
"build source from partial flow graph" in {
//#source-from-partial-flow-graph
val pairs: Source[(Int, Int)] = Source() { implicit b =>
import FlowGraphImplicits._
val pairs = Source() { implicit b =>
import FlowGraph.Implicits._
// prepare graph elements
val undefinedSink = UndefinedSink[(Int, Int)]
val zip = Zip[Int, Int]
val zip = b.add(Zip[Int, Int]())
def ints = Source(() => Iterator.from(1))
// connect the graph
ints ~> Flow[Int].filter(_ % 2 != 0) ~> zip.left
ints ~> Flow[Int].filter(_ % 2 == 0) ~> zip.right
zip.out ~> undefinedSink
ints ~> Flow[Int].filter(_ % 2 != 0) ~> zip.in0
ints ~> Flow[Int].filter(_ % 2 == 0) ~> zip.in1
// expose undefined sink
undefinedSink
// expose port
zip.out
}
val firstPair: Future[(Int, Int)] = pairs.runWith(Sink.head)
val firstPair: Future[(Int, Int)] = pairs.runWith(Sink.head())
//#source-from-partial-flow-graph
Await.result(firstPair, 300.millis) should equal(1 -> 2)
}
@ -112,23 +75,18 @@ class StreamPartialFlowGraphDocSpec extends AkkaSpec {
"build flow from partial flow graph" in {
//#flow-from-partial-flow-graph
val pairUpWithToString = Flow() { implicit b =>
import FlowGraphImplicits._
import FlowGraph.Implicits._
// prepare graph elements
val undefinedSource = UndefinedSource[Int]
val undefinedSink = UndefinedSink[(Int, String)]
val broadcast = Broadcast[Int]
val zip = Zip[Int, String]
val broadcast = b.add(Broadcast[Int](2))
val zip = b.add(Zip[Int, String]())
// connect the graph
undefinedSource ~> broadcast
broadcast ~> Flow[Int].map(identity) ~> zip.left
broadcast ~> Flow[Int].map(_.toString) ~> zip.right
zip.out ~> undefinedSink
broadcast.out(0) ~> Flow[Int].map(identity) ~> zip.in0
broadcast.out(1) ~> Flow[Int].map(_.toString) ~> zip.in1
// expose undefined ports
(undefinedSource, undefinedSink)
// expose ports
(broadcast.in, zip.out)
}
//#flow-from-partial-flow-graph
@ -136,7 +94,7 @@ class StreamPartialFlowGraphDocSpec extends AkkaSpec {
// format: OFF
val (_, matSink: Future[(Int, String)]) =
//#flow-from-partial-flow-graph
pairUpWithToString.runWith(Source(List(1)), Sink.head)
pairUpWithToString.runWith(Source(List(1)), Sink.head())
//#flow-from-partial-flow-graph
// format: ON

View file

@ -6,20 +6,16 @@ package docs.stream
import java.net.InetSocketAddress
import java.util.concurrent.atomic.AtomicReference
import akka.actor.ActorSystem
import akka.stream.ActorFlowMaterializer
import akka.stream.scaladsl.Concat
import akka.stream.scaladsl.Flow
import akka.stream.scaladsl.FlowGraphImplicits
import akka.stream.scaladsl.Source
import akka.stream.scaladsl.StreamTcp
import akka.stream.scaladsl.StreamTcp._
import akka.stream.scaladsl.UndefinedSink
import akka.stream.scaladsl.UndefinedSource
import akka.stream._
import akka.stream.scaladsl._
import akka.stream.stage.{ PushStage, Directive, Context }
import akka.stream.testkit.AkkaSpec
import akka.testkit.TestProbe
import akka.util.ByteString
import cookbook.RecipeParseLines
import StreamTcp._
import scala.concurrent.Future
class StreamTcpDocSpec extends AkkaSpec {
@ -34,11 +30,9 @@ class StreamTcpDocSpec extends AkkaSpec {
"simple server connection" ignore {
//#echo-server-simple-bind
val localhost = new InetSocketAddress("127.0.0.1", 8888)
val binding = StreamTcp().bind(localhost)
//#echo-server-simple-bind
//#echo-server-simple-handle
val connections: Source[IncomingConnection] = binding.connections
val connections: Source[IncomingConnection, Future[ServerBinding]] = StreamTcp().bind(localhost)
//#echo-server-simple-bind
connections runForeach { connection =>
println(s"New connection from: ${connection.remoteAddress}")
@ -53,19 +47,35 @@ class StreamTcpDocSpec extends AkkaSpec {
//#echo-server-simple-handle
}
"actually working client-server CLI app" in {
"simple repl client" ignore {
val sys: ActorSystem = ???
//#repl-client
val connection: Flow[ByteString, ByteString, Future[OutgoingConnection]] = StreamTcp().outgoingConnection(localhost)
val repl = Flow[ByteString]
.transform(() => RecipeParseLines.parseLines("\n", maximumLineBytes = 256))
.map(text => println("Server: " + text))
.map(_ => readLine("> "))
.map {
case "q" =>
sys.shutdown(); ByteString("BYE")
case text => ByteString(s"$text")
}
connection.join(repl)
//#repl-client
}
"initial server banner echo server" ignore {
val connections = StreamTcp().bind(localhost)
val serverProbe = TestProbe()
val binding = StreamTcp().bind(localhost)
//#welcome-banner-chat-server
binding.connections runForeach { connection =>
connections runForeach { connection =>
val serverLogic = Flow() { implicit b =>
import FlowGraphImplicits._
// to be filled in by StreamTCP
val in = UndefinedSource[ByteString]
val out = UndefinedSink[ByteString]
import FlowGraph.Implicits._
// server logic, parses incoming commands
val commandParser = new PushStage[String, String] {
@ -81,23 +91,22 @@ class StreamTcpDocSpec extends AkkaSpec {
val welcomeMsg = s"Welcome to: $localAddress, you are: $remoteAddress!\n"
val welcome = Source.single(ByteString(welcomeMsg))
val echo = Flow[ByteString]
val echo = b.add(Flow[ByteString]
.transform(() => RecipeParseLines.parseLines("\n", maximumLineBytes = 256))
//#welcome-banner-chat-server
.map { command serverProbe.ref ! command; command }
//#welcome-banner-chat-server
.transform(() commandParser)
.map(_ + "\n")
.map(ByteString(_))
.map(ByteString(_)))
val concat = Concat[ByteString]
val concat = b.add(Concat[ByteString]())
// first we emit the welcome message,
welcome ~> concat.first
welcome ~> concat.in(0)
// then we continue using the echo-logic Flow
in ~> echo ~> concat.second
echo.outlet ~> concat.in(1)
concat.out ~> out
(in, out)
(echo.inlet, concat.out)
}
connection.handleWith(serverLogic)
@ -114,7 +123,7 @@ class StreamTcpDocSpec extends AkkaSpec {
}
//#repl-client
val connection: OutgoingConnection = StreamTcp().outgoingConnection(localhost)
val connection = StreamTcp().outgoingConnection(localhost)
val replParser = new PushStage[String, ByteString] {
override def onPush(elem: String, ctx: Context[ByteString]): Directive = {
@ -131,7 +140,7 @@ class StreamTcpDocSpec extends AkkaSpec {
.map(_ => readLine("> "))
.transform(() replParser)
connection.handleWith(repl)
connection.join(repl)
//#repl-client
serverProbe.expectMsg("Hello world")

View file

@ -8,14 +8,7 @@ package docs.stream
import akka.actor.ActorSystem
import akka.stream.ActorFlowMaterializer
import akka.stream.OverflowStrategy
import akka.stream.scaladsl.Broadcast
import akka.stream.scaladsl.Flow
import akka.stream.scaladsl.FlowGraph
import akka.stream.scaladsl.FlowGraphImplicits
import akka.stream.scaladsl.MaterializedMap
import akka.stream.scaladsl.RunnableFlow
import akka.stream.scaladsl.Sink
import akka.stream.scaladsl.Source
import akka.stream.scaladsl._
import scala.concurrent.Await
import scala.concurrent.Future
@ -57,9 +50,12 @@ class TwitterStreamQuickstartDocSpec extends AkkaSpec {
implicit val executionContext = system.dispatcher
// Disable println
def println(s: Any): Unit = ()
trait Example0 {
//#tweet-source
val tweets: Source[Tweet]
val tweets: Source[Tweet, Unit]
//#tweet-source
}
@ -74,7 +70,7 @@ class TwitterStreamQuickstartDocSpec extends AkkaSpec {
"filter and map" in {
//#authors-filter-map
val authors: Source[Author] =
val authors: Source[Author, Unit] =
tweets
.filter(_.hashtags.contains(akka))
.map(_.author)
@ -82,7 +78,7 @@ class TwitterStreamQuickstartDocSpec extends AkkaSpec {
trait Example3 {
//#authors-collect
val authors: Source[Author] =
val authors: Source[Author, Unit] =
tweets.collect { case t if t.hashtags.contains(akka) => t.author }
//#authors-collect
}
@ -98,29 +94,30 @@ class TwitterStreamQuickstartDocSpec extends AkkaSpec {
"mapConcat hashtags" in {
//#hashtags-mapConcat
val hashtags: Source[Hashtag] = tweets.mapConcat(_.hashtags.toList)
val hashtags: Source[Hashtag, Unit] = tweets.mapConcat(_.hashtags.toList)
//#hashtags-mapConcat
}
trait HiddenDefinitions {
//#flow-graph-broadcast
val writeAuthors: Sink[Author] = ???
val writeHashtags: Sink[Hashtag] = ???
val writeAuthors: Sink[Author, Unit] = ???
val writeHashtags: Sink[Hashtag, Unit] = ???
//#flow-graph-broadcast
}
"simple broadcast" in {
val writeAuthors: Sink[Author] = Sink.ignore
val writeHashtags: Sink[Hashtag] = Sink.ignore
val writeAuthors: Sink[Author, Unit] = Sink.ignore
val writeHashtags: Sink[Hashtag, Unit] = Sink.ignore
// format: OFF
//#flow-graph-broadcast
val g = FlowGraph { implicit builder =>
import FlowGraphImplicits._
val g = FlowGraph.closed() { implicit b =>
import FlowGraph.Implicits._
val b = Broadcast[Tweet]
tweets ~> b ~> Flow[Tweet].map(_.author) ~> writeAuthors
b ~> Flow[Tweet].mapConcat(_.hashtags.toList) ~> writeHashtags
val bcast = b.add(Broadcast[Tweet](2))
tweets ~> bcast.in
bcast.out(0) ~> Flow[Tweet].map(_.author) ~> writeAuthors
bcast.out(1) ~> Flow[Tweet].mapConcat(_.hashtags.toList) ~> writeHashtags
}
g.run()
//#flow-graph-broadcast
@ -160,10 +157,9 @@ class TwitterStreamQuickstartDocSpec extends AkkaSpec {
//#tweets-fold-count
val sumSink = Sink.fold[Int, Int](0)(_ + _)
val counter: RunnableFlow = tweets.map(t => 1).to(sumSink)
val map: MaterializedMap = counter.run()
val counter: RunnableFlow[Future[Int]] = tweets.map(t => 1).toMat(sumSink)(Keep.right)
val sum: Future[Int] = map.get(sumSink)
val sum: Future[Int] = counter.run()
sum.foreach(c => println(s"Total tweets processed: $c"))
//#tweets-fold-count
@ -180,26 +176,20 @@ class TwitterStreamQuickstartDocSpec extends AkkaSpec {
//#tweets-runnable-flow-materialized-twice
val sumSink = Sink.fold[Int, Int](0)(_ + _)
val counterRunnableFlow: RunnableFlow =
val counterRunnableFlow: RunnableFlow[Future[Int]] =
tweetsInMinuteFromNow
.filter(_.hashtags contains akka)
.map(t => 1)
.to(sumSink)
.toMat(sumSink)(Keep.right)
// materialize the stream once in the morning
val morningMaterialized = counterRunnableFlow.run()
// and once in the evening, reusing the
val eveningMaterialized = counterRunnableFlow.run()
val morningTweetsCount: Future[Int] = counterRunnableFlow.run()
// and once in the evening, reusing the flow
val eveningTweetsCount: Future[Int] = counterRunnableFlow.run()
// the sumSink materialized two different futures
// we use it as key to get the materialized value out of the materialized map
val morningTweetsCount: Future[Int] = morningMaterialized.get(sumSink)
val eveningTweetsCount: Future[Int] = eveningMaterialized.get(sumSink)
//#tweets-runnable-flow-materialized-twice
val map: MaterializedMap = counterRunnableFlow.run()
val sum: Future[Int] = map.get(sumSink)
val sum: Future[Int] = counterRunnableFlow.run()
sum.map { c => println(s"Total tweets processed: $c") }
}

View file

@ -41,7 +41,7 @@ class RecipeByteStrings extends RecipeSpec {
val chunksStream = rawBytes.transform(() => new Chunker(ChunkLimit))
//#bytestring-chunker
val chunksFuture = chunksStream.grouped(10).runWith(Sink.head)
val chunksFuture = chunksStream.grouped(10).runWith(Sink.head())
val chunks = Await.result(chunksFuture, 3.seconds)
@ -70,11 +70,11 @@ class RecipeByteStrings extends RecipeSpec {
val bytes1 = Source(List(ByteString(1, 2), ByteString(3), ByteString(4, 5, 6), ByteString(7, 8, 9)))
val bytes2 = Source(List(ByteString(1, 2), ByteString(3), ByteString(4, 5, 6), ByteString(7, 8, 9, 10)))
Await.result(bytes1.via(limiter).grouped(10).runWith(Sink.head), 3.seconds)
Await.result(bytes1.via(limiter).grouped(10).runWith(Sink.head()), 3.seconds)
.fold(ByteString())(_ ++ _) should be(ByteString(1, 2, 3, 4, 5, 6, 7, 8, 9))
an[IllegalStateException] must be thrownBy {
Await.result(bytes2.via(limiter).grouped(10).runWith(Sink.head), 3.seconds)
Await.result(bytes2.via(limiter).grouped(10).runWith(Sink.head()), 3.seconds)
}
}
@ -83,10 +83,10 @@ class RecipeByteStrings extends RecipeSpec {
val data = Source(List(ByteString(1, 2), ByteString(3), ByteString(4, 5, 6), ByteString(7, 8, 9)))
//#compacting-bytestrings
val compacted: Source[ByteString] = data.map(_.compact)
val compacted: Source[ByteString, Unit] = data.map(_.compact)
//#compacting-bytestrings
Await.result(compacted.grouped(10).runWith(Sink.head), 3.seconds).forall(_.isCompact) should be(true)
Await.result(compacted.grouped(10).runWith(Sink.head()), 3.seconds).forall(_.isCompact) should be(true)
}
}

View file

@ -41,10 +41,10 @@ class RecipeDigest extends RecipeSpec {
}
}
val digest: Source[ByteString] = data.transform(() => digestCalculator("SHA-256"))
val digest: Source[ByteString, Unit] = data.transform(() => digestCalculator("SHA-256"))
//#calculating-digest
Await.result(digest.runWith(Sink.head), 3.seconds) should be(
Await.result(digest.runWith(Sink.head()), 3.seconds) should be(
ByteString(
0x24, 0x8d, 0x6a, 0x61,
0xd2, 0x06, 0x38, 0xb8,

View file

@ -16,30 +16,26 @@ class RecipeDroppyBroadcast extends RecipeSpec {
val sub1 = SubscriberProbe[Int]()
val sub2 = SubscriberProbe[Int]()
val futureSink = Sink.head[Seq[Int]]
val mySink1 = Sink(sub1)
val mySink2 = Sink(sub2)
val futureSink = Sink.head[Seq[Int]]
val mySink3 = Flow[Int].grouped(200).to(futureSink)
val mySink3 = Flow[Int].grouped(200).toMat(futureSink)(Keep.right)
//#droppy-bcast
// Makes a sink drop elements if too slow
def droppySink[T](sink: Sink[T], bufferSize: Int): Sink[T] = {
Flow[T].buffer(bufferSize, OverflowStrategy.dropHead).to(sink)
}
val graph = FlowGraph.closed(mySink1, mySink2, mySink3)((_, _, _)) { implicit b =>
(sink1, sink2, sink3) =>
import FlowGraph.Implicits._
import FlowGraphImplicits._
val graph = FlowGraph { implicit builder =>
val bcast = Broadcast[Int]
val bcast = b.add(Broadcast[Int](3))
myElements ~> bcast
myElements ~> bcast
bcast ~> droppySink(mySink1, 10)
bcast ~> droppySink(mySink2, 10)
bcast ~> droppySink(mySink3, 10)
bcast.buffer(10, OverflowStrategy.dropHead) ~> sink1
bcast.buffer(10, OverflowStrategy.dropHead) ~> sink2
bcast.buffer(10, OverflowStrategy.dropHead) ~> sink3
}
//#droppy-bcast
Await.result(graph.run().get(futureSink), 3.seconds).sum should be(5050)
Await.result(graph.run()._3, 3.seconds).sum should be(5050)
sub1.expectSubscription().request(10)
sub2.expectSubscription().request(10)

View file

@ -15,11 +15,11 @@ class RecipeFlattenSeq extends RecipeSpec {
val someDataSource = Source(List(List("1"), List("2"), List("3", "4", "5"), List("6", "7")))
//#flattening-seqs
val myData: Source[List[Message]] = someDataSource
val flattened: Source[Message] = myData.mapConcat(identity)
val myData: Source[List[Message], Unit] = someDataSource
val flattened: Source[Message, Unit] = myData.mapConcat(identity)
//#flattening-seqs
Await.result(flattened.grouped(8).runWith(Sink.head), 3.seconds) should be(List("1", "2", "3", "4", "5", "6", "7"))
Await.result(flattened.grouped(8).runWith(Sink.head()), 3.seconds) should be(List("1", "2", "3", "4", "5", "6", "7"))
}

View file

@ -76,7 +76,7 @@ class RecipeGlobalRateLimit extends RecipeSpec {
"work" in {
//#global-limiter-flow
def limitGlobal[T](limiter: ActorRef, maxAllowedWait: FiniteDuration): Flow[T, T] = {
def limitGlobal[T](limiter: ActorRef, maxAllowedWait: FiniteDuration): Flow[T, T, Unit] = {
import akka.pattern.ask
import akka.util.Timeout
Flow[T].mapAsync { (element: T) =>
@ -97,9 +97,9 @@ class RecipeGlobalRateLimit extends RecipeSpec {
val probe = SubscriberProbe[String]()
FlowGraph { implicit b =>
import FlowGraphImplicits._
val merge = Merge[String]
FlowGraph.closed() { implicit b =>
import FlowGraph.Implicits._
val merge = b.add(Merge[String](2))
source1 ~> merge ~> Sink(probe)
source2 ~> merge
}.run()

View file

@ -23,17 +23,16 @@ class RecipeKeepAlive extends RecipeSpec {
val sink = Sink(sub)
//#inject-keepalive
val keepAliveStream: Source[ByteString] = ticks
val keepAliveStream: Source[ByteString, Unit] = ticks
.conflate(seed = (tick) => keepaliveMessage)((msg, newTick) => msg)
import FlowGraphImplicits._
val graph = FlowGraph { implicit builder =>
val unfairMerge = MergePreferred[ByteString]
val graph = FlowGraph.closed() { implicit builder =>
import FlowGraph.Implicits._
val unfairMerge = builder.add(MergePreferred[ByteString](1))
dataStream ~> unfairMerge.preferred // If data is available then no keepalive is injected
keepAliveStream ~> unfairMerge
unfairMerge ~> sink
dataStream ~> unfairMerge.preferred
// If data is available then no keepalive is injected
keepAliveStream ~> unfairMerge ~> sink
}
//#inject-keepalive

View file

@ -18,11 +18,11 @@ class RecipeManualTrigger extends RecipeSpec {
val sink = Sink(sub)
//#manually-triggered-stream
import FlowGraphImplicits._
val graph = FlowGraph { implicit builder =>
val zip = Zip[Message, Trigger]
elements ~> zip.left
triggerSource ~> zip.right
val graph = FlowGraph.closed() { implicit builder =>
import FlowGraph.Implicits._
val zip = builder.add(Zip[Message, Trigger]())
elements ~> zip.in0
triggerSource ~> zip.in1
zip.out ~> Flow[(Message, Trigger)].map { case (msg, trigger) => msg } ~> sink
}
//#manually-triggered-stream
@ -57,13 +57,12 @@ class RecipeManualTrigger extends RecipeSpec {
val sink = Sink(sub)
//#manually-triggered-stream-zipwith
import FlowGraphImplicits._
val graph = FlowGraph { implicit builder =>
val zip = ZipWith[Message, Trigger, Message](
(msg: Message, trigger: Trigger) => msg)
val graph = FlowGraph.closed() { implicit builder =>
import FlowGraph.Implicits._
val zip = builder.add(ZipWith((msg: Message, trigger: Trigger) => msg))
elements ~> zip.left
triggerSource ~> zip.right
elements ~> zip.in0
triggerSource ~> zip.in1
zip.out ~> sink
}
//#manually-triggered-stream-zipwith

View file

@ -20,7 +20,7 @@ class RecipeMissedTicks extends RecipeSpec {
//#missed-ticks
// tickStream is a Source[Tick]
val missedTicks: Source[Int] =
val missedTicks: Source[Int, Unit] =
tickStream.conflate(seed = (_) => 0)(
(missedTicks, tick) => missedTicks + 1)
//#missed-ticks

View file

@ -15,7 +15,7 @@ class RecipeMultiGroupBy extends RecipeSpec {
case class Topic(name: String)
val elems = Source(List("1: a", "1: b", "all: c", "all: d", "1: e"))
val topicMapper: (Message) => immutable.Seq[Topic] = { msg =>
val topicMapper = { msg: Message =>
if (msg.startsWith("1")) List(Topic("1"))
else List(Topic("1"), Topic("2"))
}
@ -28,14 +28,14 @@ class RecipeMultiGroupBy extends RecipeSpec {
}
//#multi-groupby
val messageAndTopic: Source[(Message, Topic)] = elems.mapConcat { msg: Message =>
val messageAndTopic: Source[(Message, Topic), Unit] = elems.mapConcat { msg: Message =>
val topicsForMessage = topicMapper(msg)
// Create a (Msg, Topic) pair for each of the topics
// the message belongs to
topicsForMessage.map(msg -> _)
}
val multiGroups: Source[(Topic, Source[String])] = messageAndTopic.groupBy(_._2).map {
val multiGroups: Source[(Topic, Source[String, Unit]), Unit] = messageAndTopic.groupBy(_._2).map {
case (topic, topicStream) =>
// chopping of the topic from the (Message, Topic) pairs
(topic, topicStream.map(_._1))
@ -43,8 +43,8 @@ class RecipeMultiGroupBy extends RecipeSpec {
//#multi-groupby
val result = multiGroups.map {
case (topic, topicMessages) => topicMessages.grouped(10).map(topic.name + _.mkString("[", ", ", "]")).runWith(Sink.head)
}.mapAsync(identity).grouped(10).runWith(Sink.head)
case (topic, topicMessages) => topicMessages.grouped(10).map(topic.name + _.mkString("[", ", ", "]")).runWith(Sink.head())
}.mapAsync(identity).grouped(10).runWith(Sink.head())
Await.result(result, 3.seconds).toSet should be(Set(
"1[1: a, 1: b, all: c, all: d, 1: e]",

View file

@ -24,7 +24,7 @@ class RecipeParseLines extends RecipeSpec {
val linesStream = rawData.transform(() => parseLines("\r\n", 100))
Await.result(linesStream.grouped(10).runWith(Sink.head), 3.seconds) should be(List(
Await.result(linesStream.grouped(10).runWith(Sink.head()), 3.seconds) should be(List(
"Hello World\r!",
"Hello Akka!",
"Hello Streams!",

View file

@ -18,10 +18,10 @@ class RecipeReduceByKey extends RecipeSpec {
//#word-count
// split the words into separate streams first
val wordStreams: Source[(String, Source[String])] = words.groupBy(identity)
val wordStreams: Source[(String, Source[String, Unit]), Unit] = words.groupBy(identity)
// add counting logic to the streams
val countedWords: Source[Future[(String, Int)]] = wordStreams.map {
val countedWords: Source[Future[(String, Int)], Unit] = wordStreams.map {
case (word, wordStream) =>
wordStream.runFold((word, 0)) {
case ((w, count), _) => (w, count + 1)
@ -29,13 +29,13 @@ class RecipeReduceByKey extends RecipeSpec {
}
// get a stream of word counts
val counts: Source[(String, Int)] =
val counts: Source[(String, Int), Unit] =
countedWords
.buffer(MaximumDistinctWords, OverflowStrategy.fail)
.mapAsync(identity)
//#word-count
Await.result(counts.grouped(10).runWith(Sink.head), 3.seconds).toSet should be(Set(
Await.result(counts.grouped(10).runWith(Sink.head()), 3.seconds).toSet should be(Set(
("hello", 2),
("world", 1),
("and", 1),
@ -52,7 +52,7 @@ class RecipeReduceByKey extends RecipeSpec {
def reduceByKey[In, K, Out](
maximumGroupSize: Int,
groupKey: (In) => K,
foldZero: (K) => Out)(fold: (Out, In) => Out): Flow[In, (K, Out)] = {
foldZero: (K) => Out)(fold: (Out, In) => Out): Flow[In, (K, Out), Unit] = {
val groupStreams = Flow[In].groupBy(groupKey)
val reducedValues = groupStreams.map {
@ -72,7 +72,7 @@ class RecipeReduceByKey extends RecipeSpec {
//#reduce-by-key-general
Await.result(wordCounts.grouped(10).runWith(Sink.head), 3.seconds).toSet should be(Set(
Await.result(wordCounts.grouped(10).runWith(Sink.head()), 3.seconds).toSet should be(Set(
("hello", 2),
("world", 1),
("and", 1),

View file

@ -13,7 +13,7 @@ class RecipeSimpleDrop extends RecipeSpec {
"work" in {
//#simple-drop
val droppyStream: Flow[Message, Message] =
val droppyStream: Flow[Message, Message, Unit] =
Flow[Message].conflate(seed = identity)((lastMessage, newMessage) => newMessage)
//#simple-drop

View file

@ -16,7 +16,7 @@ class RecipeToStrict extends RecipeSpec {
//#draining-to-seq
val strict: Future[immutable.Seq[Message]] =
myData.grouped(MaxAllowedSeqSize).runWith(Sink.head)
myData.grouped(MaxAllowedSeqSize).runWith(Sink.head())
//#draining-to-seq
Await.result(strict, 3.seconds) should be(List("1", "2", "3"))

View file

@ -17,18 +17,12 @@ class RecipeWorkerPool extends RecipeSpec {
val worker = Flow[String].map(_ + " done")
//#worker-pool
def balancer[In, Out](worker: Flow[In, Out], workerCount: Int): Flow[In, Out] = {
import FlowGraphImplicits._
def balancer[In, Out](worker: Flow[In, Out, Unit], workerCount: Int): Flow[In, Out, Unit] = {
import FlowGraph.Implicits._
Flow[In, Out]() { implicit graphBuilder =>
val jobsIn = UndefinedSource[In]
val resultsOut = UndefinedSink[Out]
val balancer = Balance[In](waitForAllDownstreams = true)
val merge = Merge[Out]
jobsIn ~> balancer // Jobs are fed into the balancer
merge ~> resultsOut // the merged results are sent out
Flow() { implicit b =>
val balancer = b.add(Balance[In](workerCount, waitForAllDownstreams = true))
val merge = b.add(Merge[Out](workerCount))
for (_ <- 1 to workerCount) {
// for each worker, add an edge from the balancer to the worker, then wire
@ -36,14 +30,14 @@ class RecipeWorkerPool extends RecipeSpec {
balancer ~> worker ~> merge
}
(jobsIn, resultsOut)
(balancer.in, merge.out)
}
}
val processedJobs: Source[Result] = myJobs.via(balancer(worker, 3))
val processedJobs: Source[Result, Unit] = myJobs.via(balancer(worker, 3))
//#worker-pool
Await.result(processedJobs.grouped(10).runWith(Sink.head), 3.seconds).toSet should be(Set(
Await.result(processedJobs.grouped(10).runWith(Sink.head()), 3.seconds).toSet should be(Set(
"1 done", "2 done", "3 done", "4 done", "5 done"))
}