2014-12-22 12:43:01 +01:00
|
|
|
package docs.stream
|
|
|
|
|
|
2015-01-27 18:29:20 +01:00
|
|
|
import akka.stream.{ OverflowStrategy, ActorFlowMaterializerSettings, ActorFlowMaterializer }
|
2014-12-22 12:43:01 +01:00
|
|
|
import akka.stream.scaladsl._
|
|
|
|
|
import akka.stream.testkit.AkkaSpec
|
2015-04-10 16:49:49 +02:00
|
|
|
import akka.stream.OperationAttributes
|
2014-12-22 12:43:01 +01:00
|
|
|
|
|
|
|
|
class StreamBuffersRateSpec extends AkkaSpec {
|
2015-01-27 18:29:20 +01:00
|
|
|
implicit val mat = ActorFlowMaterializer()
|
2014-12-22 12:43:01 +01:00
|
|
|
|
|
|
|
|
"Demonstrate pipelining" in {
|
2014-12-22 16:56:11 +01:00
|
|
|
def println(s: Any) = ()
|
2014-12-22 12:43:01 +01:00
|
|
|
//#pipelining
|
|
|
|
|
Source(1 to 3)
|
|
|
|
|
.map { i => println(s"A: $i"); i }
|
|
|
|
|
.map { i => println(s"B: $i"); i }
|
|
|
|
|
.map { i => println(s"C: $i"); i }
|
|
|
|
|
.runWith(Sink.ignore)
|
|
|
|
|
//#pipelining
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
"Demonstrate buffer sizes" in {
|
|
|
|
|
//#materializer-buffer
|
2015-01-27 18:29:20 +01:00
|
|
|
val materializer = ActorFlowMaterializer(
|
|
|
|
|
ActorFlowMaterializerSettings(system)
|
2014-12-22 12:43:01 +01:00
|
|
|
.withInputBuffer(
|
|
|
|
|
initialSize = 64,
|
|
|
|
|
maxSize = 64))
|
|
|
|
|
//#materializer-buffer
|
|
|
|
|
|
|
|
|
|
//#section-buffer
|
2015-04-09 15:16:59 +02:00
|
|
|
val section = Flow[Int].map(_ * 2)
|
|
|
|
|
.withAttributes(OperationAttributes.inputBuffer(initial = 1, max = 1))
|
|
|
|
|
val flow = section.via(Flow[Int].map(_ / 2)) // the buffer size of this map is the default
|
2014-12-22 12:43:01 +01:00
|
|
|
//#section-buffer
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
"buffering abstraction leak" in {
|
|
|
|
|
//#buffering-abstraction-leak
|
|
|
|
|
import scala.concurrent.duration._
|
|
|
|
|
case class Tick()
|
|
|
|
|
|
2015-01-28 14:19:50 +01:00
|
|
|
FlowGraph.closed() { implicit b =>
|
|
|
|
|
import FlowGraph.Implicits._
|
2014-12-22 12:43:01 +01:00
|
|
|
|
2015-01-28 14:19:50 +01:00
|
|
|
val zipper = b.add(ZipWith[Tick, Int, Int]((tick, count) => count))
|
2014-12-22 12:43:01 +01:00
|
|
|
|
2015-01-28 14:19:50 +01:00
|
|
|
Source(initialDelay = 3.second, interval = 3.second, Tick()) ~> zipper.in0
|
2014-12-22 12:43:01 +01:00
|
|
|
|
2015-01-28 14:19:50 +01:00
|
|
|
Source(initialDelay = 1.second, interval = 1.second, "message!")
|
|
|
|
|
.conflate(seed = (_) => 1)((count, _) => count + 1) ~> zipper.in1
|
2014-12-22 12:43:01 +01:00
|
|
|
|
|
|
|
|
zipper.out ~> Sink.foreach(println)
|
|
|
|
|
}
|
|
|
|
|
//#buffering-abstraction-leak
|
|
|
|
|
}
|
|
|
|
|
|
2014-12-22 13:25:06 +01:00
|
|
|
"explcit buffers" in {
|
|
|
|
|
trait Job
|
2015-03-05 12:21:17 +01:00
|
|
|
def inboundJobsConnector(): Source[Job, Unit] = Source.empty
|
2014-12-22 13:25:06 +01:00
|
|
|
//#explicit-buffers-backpressure
|
|
|
|
|
// Getting a stream of jobs from an imaginary external system as a Source
|
2015-01-28 14:19:50 +01:00
|
|
|
val jobs: Source[Job, Unit] = inboundJobsConnector()
|
2014-12-22 13:25:06 +01:00
|
|
|
jobs.buffer(1000, OverflowStrategy.backpressure)
|
|
|
|
|
//#explicit-buffers-backpressure
|
|
|
|
|
|
|
|
|
|
//#explicit-buffers-droptail
|
|
|
|
|
jobs.buffer(1000, OverflowStrategy.dropTail)
|
|
|
|
|
//#explicit-buffers-droptail
|
|
|
|
|
|
2015-06-01 18:08:13 +03:00
|
|
|
//#explicit-buffers-dropnew
|
|
|
|
|
jobs.buffer(1000, OverflowStrategy.dropNew)
|
|
|
|
|
//#explicit-buffers-dropnew
|
|
|
|
|
|
2014-12-22 13:25:06 +01:00
|
|
|
//#explicit-buffers-drophead
|
|
|
|
|
jobs.buffer(1000, OverflowStrategy.dropHead)
|
|
|
|
|
//#explicit-buffers-drophead
|
|
|
|
|
|
|
|
|
|
//#explicit-buffers-dropbuffer
|
|
|
|
|
jobs.buffer(1000, OverflowStrategy.dropBuffer)
|
|
|
|
|
//#explicit-buffers-dropbuffer
|
|
|
|
|
|
2015-01-30 10:30:56 +01:00
|
|
|
//#explicit-buffers-fail
|
|
|
|
|
jobs.buffer(1000, OverflowStrategy.fail)
|
|
|
|
|
//#explicit-buffers-fail
|
2014-12-22 13:25:06 +01:00
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
2014-12-22 12:43:01 +01:00
|
|
|
}
|