also fix FlowInterleaveSpec

Also-by: Johan Andrén <johan@markatta.com>
Also-by: Roland Kuhn <rk@rkuhn.info>
Also-by: Martynas Mickevičius <mmartynas@gmail.com>
This commit is contained in:
Endre Sándor Varga 2016-01-20 10:00:37 +02:00 committed by Martynas Mickevičius
parent ef77b56e66
commit 60497f6561
195 changed files with 1110 additions and 857 deletions

View file

@ -5,6 +5,7 @@ package docs.http.scaladsl.server
import java.io.File
import akka.Done
import akka.actor.ActorRef
import akka.http.scaladsl.model.Multipart.FormData.BodyPart
import akka.stream.io.{ Framing }
@ -73,18 +74,18 @@ class FileUploadExamplesSpec extends RoutingSpec {
val csvUploads =
path("metadata" / LongNumber) { id =>
entity(as[Multipart.FormData]) { formData =>
val done = formData.parts.mapAsync(1) {
val done: Future[Done] = formData.parts.mapAsync(1) {
case b: BodyPart if b.filename.exists(_.endsWith(".csv")) =>
b.entity.dataBytes
.via(splitLines)
.map(_.utf8String.split(",").toVector)
.runForeach(csv =>
metadataActor ! MetadataActor.Entry(id, csv))
case _ => Future.successful(Unit)
case _ => Future.successful(Done)
}.runWith(Sink.ignore)
// when processing have finished create a response for the user
onSuccess(done) {
onSuccess(done) { _ =>
complete {
"ok!"
}

View file

@ -92,5 +92,4 @@ class FormFieldDirectivesExamplesSpec extends RoutingSpec {
}
}
}

View file

@ -3,6 +3,7 @@
*/
package docs.persistence.query
import akka.NotUsed
import akka.persistence.journal.{ EventAdapter, EventSeq }
import akka.testkit.AkkaSpec
import akka.persistence.query.PersistenceQuery
@ -56,10 +57,10 @@ class LeveldbPersistenceQueryDocSpec(config: String) extends AkkaSpec(config) {
val queries = PersistenceQuery(system).readJournalFor[LeveldbReadJournal](
LeveldbReadJournal.Identifier)
val src: Source[EventEnvelope, Unit] =
val src: Source[EventEnvelope, NotUsed] =
queries.eventsByPersistenceId("some-persistence-id", 0L, Long.MaxValue)
val events: Source[Any, Unit] = src.map(_.event)
val events: Source[Any, NotUsed] = src.map(_.event)
//#EventsByPersistenceId
}
@ -69,7 +70,7 @@ class LeveldbPersistenceQueryDocSpec(config: String) extends AkkaSpec(config) {
val queries = PersistenceQuery(system).readJournalFor[LeveldbReadJournal](
LeveldbReadJournal.Identifier)
val src: Source[String, Unit] = queries.allPersistenceIds()
val src: Source[String, NotUsed] = queries.allPersistenceIds()
//#AllPersistenceIds
}
@ -79,7 +80,7 @@ class LeveldbPersistenceQueryDocSpec(config: String) extends AkkaSpec(config) {
val queries = PersistenceQuery(system).readJournalFor[LeveldbReadJournal](
LeveldbReadJournal.Identifier)
val src: Source[EventEnvelope, Unit] =
val src: Source[EventEnvelope, NotUsed] =
queries.eventsByTag(tag = "green", offset = 0L)
//#EventsByTag
}

View file

@ -4,6 +4,7 @@
package docs.persistence.query
import akka.NotUsed
import akka.actor._
import akka.persistence.{ Recovery, PersistentActor }
import akka.persistence.query._
@ -53,25 +54,25 @@ object PersistenceQueryDocSpec {
config.getDuration("refresh-interval", MILLISECONDS).millis
override def eventsByTag(
tag: String, offset: Long = 0L): Source[EventEnvelope, Unit] = {
tag: String, offset: Long = 0L): Source[EventEnvelope, NotUsed] = {
val props = MyEventsByTagPublisher.props(tag, offset, refreshInterval)
Source.actorPublisher[EventEnvelope](props)
.mapMaterializedValue(_ ())
.mapMaterializedValue(_ NotUsed)
}
override def eventsByPersistenceId(
persistenceId: String, fromSequenceNr: Long = 0L,
toSequenceNr: Long = Long.MaxValue): Source[EventEnvelope, Unit] = {
toSequenceNr: Long = Long.MaxValue): Source[EventEnvelope, NotUsed] = {
// implement in a similar way as eventsByTag
???
}
override def allPersistenceIds(): Source[String, Unit] = {
override def allPersistenceIds(): Source[String, NotUsed] = {
// implement in a similar way as eventsByTag
???
}
override def currentPersistenceIds(): Source[String, Unit] = {
override def currentPersistenceIds(): Source[String, NotUsed] = {
// implement in a similar way as eventsByTag
???
}
@ -95,19 +96,19 @@ object PersistenceQueryDocSpec {
with akka.persistence.query.javadsl.CurrentPersistenceIdsQuery {
override def eventsByTag(
tag: String, offset: Long = 0L): javadsl.Source[EventEnvelope, Unit] =
tag: String, offset: Long = 0L): javadsl.Source[EventEnvelope, NotUsed] =
scaladslReadJournal.eventsByTag(tag, offset).asJava
override def eventsByPersistenceId(
persistenceId: String, fromSequenceNr: Long = 0L,
toSequenceNr: Long = Long.MaxValue): javadsl.Source[EventEnvelope, Unit] =
toSequenceNr: Long = Long.MaxValue): javadsl.Source[EventEnvelope, NotUsed] =
scaladslReadJournal.eventsByPersistenceId(
persistenceId, fromSequenceNr, toSequenceNr).asJava
override def allPersistenceIds(): javadsl.Source[String, Unit] =
override def allPersistenceIds(): javadsl.Source[String, NotUsed] =
scaladslReadJournal.allPersistenceIds().asJava
override def currentPersistenceIds(): javadsl.Source[String, Unit] =
override def currentPersistenceIds(): javadsl.Source[String, NotUsed] =
scaladslReadJournal.currentPersistenceIds().asJava
// possibility to add more plugin specific queries
@ -201,7 +202,7 @@ class PersistenceQueryDocSpec(s: String) extends AkkaSpec(s) {
"akka.persistence.query.my-read-journal")
// issue query to journal
val source: Source[EventEnvelope, Unit] =
val source: Source[EventEnvelope, NotUsed] =
readJournal.eventsByPersistenceId("user-1337", 0, Long.MaxValue)
// materialize stream, consuming events
@ -220,7 +221,7 @@ class PersistenceQueryDocSpec(s: String) extends AkkaSpec(s) {
//#events-by-tag
// assuming journal is able to work with numeric offsets we can:
val blueThings: Source[EventEnvelope, Unit] =
val blueThings: Source[EventEnvelope, NotUsed] =
readJournal.eventsByTag("blue")
// find top 10 blue things:

View file

@ -3,6 +3,7 @@
*/
package docs.stream
import akka.NotUsed
import akka.stream._
import akka.stream.scaladsl.Tcp.OutgoingConnection
import akka.stream.scaladsl._
@ -181,7 +182,7 @@ class CompositionDocSpec extends AkkaSpec {
val source: Source[Int, Promise[Option[Int]]] = Source.maybe[Int]
// Materializes to Unit (black)
val flow1: Flow[Int, Int, Unit] = Flow[Int].take(100)
val flow1: Flow[Int, Int, NotUsed] = Flow[Int].take(100)
// Materializes to Promise[Int] (red)
val nestedSource: Source[Int, Promise[Option[Int]]] =
@ -190,7 +191,7 @@ class CompositionDocSpec extends AkkaSpec {
//#mat-combine-2
// Materializes to Unit (orange)
val flow2: Flow[Int, ByteString, Unit] = Flow[Int].map { i => ByteString(i.toString) }
val flow2: Flow[Int, ByteString, NotUsed] = Flow[Int].map { i => ByteString(i.toString) }
// Materializes to Future[OutgoingConnection] (yellow)
val flow3: Flow[ByteString, ByteString, Future[OutgoingConnection]] =

View file

@ -3,6 +3,7 @@
*/
package docs.stream
import akka.NotUsed
import akka.actor.Cancellable
import akka.stream.{ ClosedShape, FlowShape }
import akka.stream.scaladsl._
@ -134,11 +135,11 @@ class FlowDocSpec extends AkkaSpec {
source.to(Sink.foreach(println(_)))
// Starting from a Sink
val sink: Sink[Int, Unit] = Flow[Int].map(_ * 2).to(Sink.foreach(println(_)))
val sink: Sink[Int, NotUsed] = Flow[Int].map(_ * 2).to(Sink.foreach(println(_)))
Source(1 to 6).to(sink)
// Broadcast to a sink inline
val otherSink: Sink[Int, Unit] =
val otherSink: Sink[Int, NotUsed] =
Flow[Int].alsoTo(Sink.foreach(println(_))).to(Sink.ignore)
Source(1 to 6).to(otherSink)

View file

@ -3,6 +3,7 @@
*/
package docs.stream
import akka.NotUsed
import akka.stream._
import akka.stream.scaladsl._
import akka.stream.testkit.AkkaSpec
@ -21,7 +22,7 @@ class FlowGraphDocSpec extends AkkaSpec {
"build simple graph" in {
//format: OFF
//#simple-flow-graph
val g = RunnableGraph.fromGraph(GraphDSL.create() { implicit builder: GraphDSL.Builder[Unit] =>
val g = RunnableGraph.fromGraph(GraphDSL.create() { implicit builder: GraphDSL.Builder[NotUsed] =>
import GraphDSL.Implicits._
val in = Source(1 to 10)
val out = Sink.ignore
@ -131,7 +132,7 @@ class FlowGraphDocSpec extends AkkaSpec {
object PriorityWorkerPool {
def apply[In, Out](
worker: Flow[In, Out, Any],
workerCount: Int): Graph[PriorityWorkerPoolShape[In, Out], Unit] = {
workerCount: Int): Graph[PriorityWorkerPoolShape[In, Out], NotUsed] = {
GraphDSL.create() { implicit b
import GraphDSL.Implicits._

View file

@ -1,5 +1,6 @@
package docs.stream
import akka.NotUsed
import akka.stream.FlowShape
import akka.stream.scaladsl.{ GraphDSL, Merge, Balance, Source, Flow }
import akka.stream.testkit.AkkaSpec
@ -15,11 +16,11 @@ class FlowParallelismDocSpec extends AkkaSpec {
//format: OFF
//#pipelining
// Takes a scoop of batter and creates a pancake with one side cooked
val fryingPan1: Flow[ScoopOfBatter, HalfCookedPancake, Unit] =
val fryingPan1: Flow[ScoopOfBatter, HalfCookedPancake, NotUsed] =
Flow[ScoopOfBatter].map { batter => HalfCookedPancake() }
// Finishes a half-cooked pancake
val fryingPan2: Flow[HalfCookedPancake, Pancake, Unit] =
val fryingPan2: Flow[HalfCookedPancake, Pancake, NotUsed] =
Flow[HalfCookedPancake].map { halfCooked => Pancake() }
//#pipelining
//format: ON
@ -28,17 +29,17 @@ class FlowParallelismDocSpec extends AkkaSpec {
//#pipelining
// With the two frying pans we can fully cook pancakes
val pancakeChef: Flow[ScoopOfBatter, Pancake, Unit] =
val pancakeChef: Flow[ScoopOfBatter, Pancake, NotUsed] =
Flow[ScoopOfBatter].via(fryingPan1).via(fryingPan2)
//#pipelining
}
"Demonstrate parallel processing" in {
//#parallelism
val fryingPan: Flow[ScoopOfBatter, Pancake, Unit] =
val fryingPan: Flow[ScoopOfBatter, Pancake, NotUsed] =
Flow[ScoopOfBatter].map { batter => Pancake() }
val pancakeChef: Flow[ScoopOfBatter, Pancake, Unit] = Flow.fromGraph(GraphDSL.create() { implicit builder =>
val pancakeChef: Flow[ScoopOfBatter, Pancake, NotUsed] = Flow.fromGraph(GraphDSL.create() { implicit builder =>
val dispatchBatter = builder.add(Balance[ScoopOfBatter](2))
val mergePancakes = builder.add(Merge[Pancake](2))
@ -58,7 +59,7 @@ class FlowParallelismDocSpec extends AkkaSpec {
"Demonstrate parallelized pipelines" in {
//#parallel-pipeline
val pancakeChef: Flow[ScoopOfBatter, Pancake, Unit] =
val pancakeChef: Flow[ScoopOfBatter, Pancake, NotUsed] =
Flow.fromGraph(GraphDSL.create() { implicit builder =>
val dispatchBatter = builder.add(Balance[ScoopOfBatter](2))
@ -76,7 +77,7 @@ class FlowParallelismDocSpec extends AkkaSpec {
"Demonstrate pipelined parallel processing" in {
//#pipelined-parallel
val pancakeChefs1: Flow[ScoopOfBatter, HalfCookedPancake, Unit] =
val pancakeChefs1: Flow[ScoopOfBatter, HalfCookedPancake, NotUsed] =
Flow.fromGraph(GraphDSL.create() { implicit builder =>
val dispatchBatter = builder.add(Balance[ScoopOfBatter](2))
val mergeHalfPancakes = builder.add(Merge[HalfCookedPancake](2))
@ -89,7 +90,7 @@ class FlowParallelismDocSpec extends AkkaSpec {
FlowShape(dispatchBatter.in, mergeHalfPancakes.out)
})
val pancakeChefs2: Flow[HalfCookedPancake, Pancake, Unit] =
val pancakeChefs2: Flow[HalfCookedPancake, Pancake, NotUsed] =
Flow.fromGraph(GraphDSL.create() { implicit builder =>
val dispatchHalfPancakes = builder.add(Balance[HalfCookedPancake](2))
val mergePancakes = builder.add(Merge[Pancake](2))
@ -102,7 +103,7 @@ class FlowParallelismDocSpec extends AkkaSpec {
FlowShape(dispatchHalfPancakes.in, mergePancakes.out)
})
val kitchen: Flow[ScoopOfBatter, Pancake, Unit] = pancakeChefs1.via(pancakeChefs2)
val kitchen: Flow[ScoopOfBatter, Pancake, NotUsed] = pancakeChefs1.via(pancakeChefs2)
//#pipelined-parallel
}

View file

@ -3,6 +3,7 @@
*/
package docs.stream
import akka.NotUsed
import akka.stream.scaladsl.{ Keep, Sink, Flow, Source }
import akka.stream.stage._
import akka.stream._
@ -68,10 +69,10 @@ class GraphStageDocSpec extends AkkaSpec {
//#simple-source-usage
// A GraphStage is a proper Graph, just like what GraphDSL.create would return
val sourceGraph: Graph[SourceShape[Int], Unit] = new NumbersSource
val sourceGraph: Graph[SourceShape[Int], NotUsed] = new NumbersSource
// Create a Source from the Graph to access the DSL
val mySource: Source[Int, Unit] = Source.fromGraph(new NumbersSource)
val mySource: Source[Int, NotUsed] = Source.fromGraph(new NumbersSource)
// Returns 55
val result1: Future[Int] = mySource.take(10).runFold(0)(_ + _)

View file

@ -3,6 +3,8 @@
*/
package docs.stream
import akka.NotUsed
import scala.concurrent.duration._
import akka.stream.testkit.AkkaSpec
import akka.stream.scaladsl._
@ -132,21 +134,21 @@ class IntegrationDocSpec extends AkkaSpec(IntegrationDocSpec.config) {
val emailServer = new EmailServer(probe.ref)
//#tweet-authors
val authors: Source[Author, Unit] =
val authors: Source[Author, NotUsed] =
tweets
.filter(_.hashtags.contains(akka))
.map(_.author)
//#tweet-authors
//#email-addresses-mapAsync
val emailAddresses: Source[String, Unit] =
val emailAddresses: Source[String, NotUsed] =
authors
.mapAsync(4)(author => addressSystem.lookupEmail(author.handle))
.collect { case Some(emailAddress) => emailAddress }
//#email-addresses-mapAsync
//#send-emails
val sendEmails: RunnableGraph[Unit] =
val sendEmails: RunnableGraph[NotUsed] =
emailAddresses
.mapAsync(4)(address => {
emailServer.send(
@ -168,14 +170,14 @@ class IntegrationDocSpec extends AkkaSpec(IntegrationDocSpec.config) {
"lookup email with mapAsync and supervision" in {
val addressSystem = new AddressSystem2
val authors: Source[Author, Unit] =
val authors: Source[Author, NotUsed] =
tweets.filter(_.hashtags.contains(akka)).map(_.author)
//#email-addresses-mapAsync-supervision
import ActorAttributes.supervisionStrategy
import Supervision.resumingDecider
val emailAddresses: Source[String, Unit] =
val emailAddresses: Source[String, NotUsed] =
authors.via(
Flow[Author].mapAsync(4)(author => addressSystem.lookupEmail(author.handle))
.withAttributes(supervisionStrategy(resumingDecider)))
@ -188,15 +190,15 @@ class IntegrationDocSpec extends AkkaSpec(IntegrationDocSpec.config) {
val emailServer = new EmailServer(probe.ref)
//#external-service-mapAsyncUnordered
val authors: Source[Author, Unit] =
val authors: Source[Author, NotUsed] =
tweets.filter(_.hashtags.contains(akka)).map(_.author)
val emailAddresses: Source[String, Unit] =
val emailAddresses: Source[String, NotUsed] =
authors
.mapAsyncUnordered(4)(author => addressSystem.lookupEmail(author.handle))
.collect { case Some(emailAddress) => emailAddress }
val sendEmails: RunnableGraph[Unit] =
val sendEmails: RunnableGraph[NotUsed] =
emailAddresses
.mapAsyncUnordered(4)(address => {
emailServer.send(
@ -231,7 +233,7 @@ class IntegrationDocSpec extends AkkaSpec(IntegrationDocSpec.config) {
//#blocking-mapAsync
val blockingExecutionContext = system.dispatchers.lookup("blocking-dispatcher")
val sendTextMessages: RunnableGraph[Unit] =
val sendTextMessages: RunnableGraph[NotUsed] =
phoneNumbers
.mapAsync(4)(phoneNo => {
Future {
@ -271,7 +273,7 @@ class IntegrationDocSpec extends AkkaSpec(IntegrationDocSpec.config) {
smsServer.send(TextMessage(to = phoneNo, body = "I like your tweet"))
}
.withAttributes(ActorAttributes.dispatcher("blocking-dispatcher"))
val sendTextMessages: RunnableGraph[Unit] =
val sendTextMessages: RunnableGraph[NotUsed] =
phoneNumbers.via(send).to(Sink.ignore)
sendTextMessages.run()
@ -291,10 +293,10 @@ class IntegrationDocSpec extends AkkaSpec(IntegrationDocSpec.config) {
val database = system.actorOf(Props(classOf[DatabaseService], probe.ref), "db")
//#save-tweets
val akkaTweets: Source[Tweet, Unit] = tweets.filter(_.hashtags.contains(akka))
val akkaTweets: Source[Tweet, NotUsed] = tweets.filter(_.hashtags.contains(akka))
implicit val timeout = Timeout(3.seconds)
val saveTweets: RunnableGraph[Unit] =
val saveTweets: RunnableGraph[NotUsed] =
akkaTweets
.mapAsync(4)(tweet => database ? Save(tweet))
.to(Sink.ignore)

View file

@ -3,8 +3,9 @@
*/
package docs.stream
import akka.NotUsed
import akka.stream.ActorMaterializer
import akka.stream.scaladsl.{ RunnableGraph, Flow, Sink, Source }
import akka.stream.scaladsl.{ Flow, Sink, Source }
import akka.stream.testkit._
import org.reactivestreams.Processor
@ -139,7 +140,7 @@ class ReactiveStreamsDocSpec extends AkkaSpec {
// An example Processor factory
def createProcessor: Processor[Int, Int] = Flow[Int].toProcessor.run()
val flow: Flow[Int, Int, Unit] = Flow.fromProcessor(() => createProcessor)
val flow: Flow[Int, Int, NotUsed] = Flow.fromProcessor(() => createProcessor)
//#use-processor
}

View file

@ -1,5 +1,6 @@
package docs.stream
import akka.NotUsed
import akka.stream._
import akka.stream.scaladsl._
import akka.stream.testkit.AkkaSpec
@ -57,10 +58,10 @@ class StreamBuffersRateSpec extends AkkaSpec {
"explcit buffers" in {
trait Job
def inboundJobsConnector(): Source[Job, Unit] = Source.empty
def inboundJobsConnector(): Source[Job, NotUsed] = Source.empty
//#explicit-buffers-backpressure
// Getting a stream of jobs from an imaginary external system as a Source
val jobs: Source[Job, Unit] = inboundJobsConnector()
val jobs: Source[Job, NotUsed] = inboundJobsConnector()
jobs.buffer(1000, OverflowStrategy.backpressure)
//#explicit-buffers-backpressure

View file

@ -5,6 +5,7 @@ package docs.stream
//#imports
import akka.{ Done, NotUsed }
import akka.actor.ActorSystem
import akka.stream.{ ClosedShape, ActorMaterializer, OverflowStrategy }
import akka.stream.scaladsl._
@ -73,7 +74,7 @@ class TwitterStreamQuickstartDocSpec extends AkkaSpec {
//#first-sample
//#authors-filter-map
val authors: Source[Author, Unit] =
val authors: Source[Author, NotUsed] =
tweets
.filter(_.hashtags.contains(akka))
.map(_.author)
@ -82,7 +83,7 @@ class TwitterStreamQuickstartDocSpec extends AkkaSpec {
trait Example3 {
//#authors-collect
val authors: Source[Author, Unit] =
val authors: Source[Author, NotUsed] =
tweets.collect { case t if t.hashtags.contains(akka) => t.author }
//#authors-collect
}
@ -101,7 +102,7 @@ class TwitterStreamQuickstartDocSpec extends AkkaSpec {
"mapConcat hashtags" in {
//#hashtags-mapConcat
val hashtags: Source[Hashtag, Unit] = tweets.mapConcat(_.hashtags.toList)
val hashtags: Source[Hashtag, NotUsed] = tweets.mapConcat(_.hashtags.toList)
//#hashtags-mapConcat
}
@ -113,8 +114,8 @@ class TwitterStreamQuickstartDocSpec extends AkkaSpec {
}
"simple broadcast" in {
val writeAuthors: Sink[Author, Future[Unit]] = Sink.ignore
val writeHashtags: Sink[Hashtag, Future[Unit]] = Sink.ignore
val writeAuthors: Sink[Author, Future[Done]] = Sink.ignore
val writeHashtags: Sink[Hashtag, Future[Done]] = Sink.ignore
// format: OFF
//#flow-graph-broadcast
@ -151,7 +152,7 @@ class TwitterStreamQuickstartDocSpec extends AkkaSpec {
import scala.concurrent.duration._
//#backpressure-by-readline
val completion: Future[Unit] =
val completion: Future[Done] =
Source(1 to 10)
.map(i => { println(s"map => $i"); i })
.runForeach { i => readLine(s"Element = $i; continue reading? [press enter]\n") }
@ -163,7 +164,7 @@ class TwitterStreamQuickstartDocSpec extends AkkaSpec {
"count elements on finite stream" in {
//#tweets-fold-count
val count: Flow[Tweet, Int, Unit] = Flow[Tweet].map(_ => 1)
val count: Flow[Tweet, Int, NotUsed] = Flow[Tweet].map(_ => 1)
val sumSink: Sink[Int, Future[Int]] = Sink.fold[Int, Int](0)(_ + _)

View file

@ -1,5 +1,6 @@
package docs.stream.cookbook
import akka.NotUsed
import akka.stream.scaladsl.{ Flow, Sink, Source }
import akka.util.ByteString
@ -89,7 +90,7 @@ class RecipeByteStrings extends RecipeSpec {
val data = Source(List(ByteString(1, 2), ByteString(3), ByteString(4, 5, 6), ByteString(7, 8, 9)))
//#compacting-bytestrings
val compacted: Source[ByteString, Unit] = data.map(_.compact)
val compacted: Source[ByteString, NotUsed] = data.map(_.compact)
//#compacting-bytestrings
Await.result(compacted.grouped(10).runWith(Sink.head), 3.seconds).forall(_.isCompact) should be(true)

View file

@ -2,6 +2,7 @@ package docs.stream.cookbook
import java.security.MessageDigest
import akka.NotUsed
import akka.stream.scaladsl.{ Sink, Source }
import akka.util.ByteString
@ -41,7 +42,7 @@ class RecipeDigest extends RecipeSpec {
}
}
val digest: Source[ByteString, Unit] = data.transform(() => digestCalculator("SHA-256"))
val digest: Source[ByteString, NotUsed] = data.transform(() => digestCalculator("SHA-256"))
//#calculating-digest
Await.result(digest.runWith(Sink.head), 3.seconds) should be(

View file

@ -1,5 +1,6 @@
package docs.stream.cookbook
import akka.NotUsed
import akka.stream.scaladsl.{ Sink, Source }
import scala.collection.immutable
@ -15,8 +16,8 @@ class RecipeFlattenSeq extends RecipeSpec {
val someDataSource = Source(List(List("1"), List("2"), List("3", "4", "5"), List("6", "7")))
//#flattening-seqs
val myData: Source[List[Message], Unit] = someDataSource
val flattened: Source[Message, Unit] = myData.mapConcat(identity)
val myData: Source[List[Message], NotUsed] = someDataSource
val flattened: Source[Message, NotUsed] = myData.mapConcat(identity)
//#flattening-seqs
Await.result(flattened.grouped(8).runWith(Sink.head), 3.seconds) should be(List("1", "2", "3", "4", "5", "6", "7"))

View file

@ -1,5 +1,6 @@
package docs.stream.cookbook
import akka.NotUsed
import akka.actor.{ Props, ActorRef, Actor }
import akka.actor.Actor.Receive
import akka.stream.ClosedShape
@ -78,7 +79,7 @@ class RecipeGlobalRateLimit extends RecipeSpec {
"work" in {
//#global-limiter-flow
def limitGlobal[T](limiter: ActorRef, maxAllowedWait: FiniteDuration): Flow[T, T, Unit] = {
def limitGlobal[T](limiter: ActorRef, maxAllowedWait: FiniteDuration): Flow[T, T, NotUsed] = {
import akka.pattern.ask
import akka.util.Timeout
Flow[T].mapAsync(4)((element: T) => {

View file

@ -1,5 +1,6 @@
package docs.stream.cookbook
import akka.NotUsed
import akka.stream.ClosedShape
import akka.stream.scaladsl._
import akka.stream.testkit._
@ -14,7 +15,7 @@ class RecipeKeepAlive extends RecipeSpec {
//#inject-keepalive
import scala.concurrent.duration._
val injectKeepAlive: Flow[ByteString, ByteString, Unit] =
val injectKeepAlive: Flow[ByteString, ByteString, NotUsed] =
Flow[ByteString].keepAlive(1.second, () => keepaliveMessage)
//#inject-keepalive

View file

@ -1,5 +1,6 @@
package docs.stream.cookbook
import akka.NotUsed
import akka.stream.scaladsl._
import akka.stream.testkit._
import scala.concurrent.duration._
@ -19,12 +20,12 @@ class RecipeMissedTicks extends RecipeSpec {
val sink = Sink.fromSubscriber(sub)
//#missed-ticks
val missedTicks: Flow[Tick, Int, Unit] =
val missedTicks: Flow[Tick, Int, NotUsed] =
Flow[Tick].conflate(seed = (_) => 0)(
(missedTicks, tick) => missedTicks + 1)
//#missed-ticks
val latch = TestLatch(3)
val realMissedTicks: Flow[Tick, Int, Unit] =
val realMissedTicks: Flow[Tick, Int, NotUsed] =
Flow[Tick].conflate(seed = (_) => 0)(
(missedTicks, tick) => { latch.countDown(); missedTicks + 1 })

View file

@ -1,5 +1,6 @@
package docs.stream.cookbook
import akka.NotUsed
import akka.stream.scaladsl.{ Sink, Source }
import scala.collection.immutable
@ -23,7 +24,7 @@ class RecipeMultiGroupBy extends RecipeSpec {
//#multi-groupby
val topicMapper: (Message) => immutable.Seq[Topic] = extractTopics
val messageAndTopic: Source[(Message, Topic), Unit] = elems.mapConcat { msg: Message =>
val messageAndTopic: Source[(Message, Topic), NotUsed] = elems.mapConcat { msg: Message =>
val topicsForMessage = topicMapper(msg)
// Create a (Msg, Topic) pair for each of the topics
// the message belongs to

View file

@ -1,5 +1,6 @@
package docs.stream.cookbook
import akka.NotUsed
import akka.stream.{ Graph, FlowShape, Inlet, Outlet, Attributes, OverflowStrategy }
import akka.stream.scaladsl._
import scala.concurrent.{ Await, Future }
@ -17,7 +18,7 @@ class RecipeReduceByKey extends RecipeSpec {
def words = Source(List("hello", "world", "and", "hello", "universe", "akka") ++ List.fill(1000)("rocks!"))
//#word-count
val counts: Source[(String, Int), Unit] = words
val counts: Source[(String, Int), NotUsed] = words
// split the words into separate streams first
.groupBy(MaximumDistinctWords, identity)
// add counting logic to the streams
@ -45,7 +46,7 @@ class RecipeReduceByKey extends RecipeSpec {
def reduceByKey[In, K, Out](
maximumGroupSize: Int,
groupKey: (In) => K,
foldZero: (K) => Out)(fold: (Out, In) => Out): Flow[In, (K, Out), Unit] = {
foldZero: (K) => Out)(fold: (Out, In) => Out): Flow[In, (K, Out), NotUsed] = {
Flow[In]
.groupBy(maximumGroupSize, groupKey)

View file

@ -1,5 +1,6 @@
package docs.stream.cookbook
import akka.NotUsed
import akka.stream.scaladsl.{ Flow, Sink, Source }
import akka.stream.testkit._
import scala.concurrent.duration._
@ -13,7 +14,7 @@ class RecipeSimpleDrop extends RecipeSpec {
"work" in {
//#simple-drop
val droppyStream: Flow[Message, Message, Unit] =
val droppyStream: Flow[Message, Message, NotUsed] =
Flow[Message].conflate(seed = identity)((lastMessage, newMessage) => newMessage)
//#simple-drop
val latch = TestLatch(2)

View file

@ -1,5 +1,6 @@
package docs.stream.cookbook
import akka.NotUsed
import akka.stream.FlowShape
import akka.stream.scaladsl._
import akka.testkit.TestProbe
@ -18,7 +19,7 @@ class RecipeWorkerPool extends RecipeSpec {
val worker = Flow[String].map(_ + " done")
//#worker-pool
def balancer[In, Out](worker: Flow[In, Out, Any], workerCount: Int): Flow[In, Out, Unit] = {
def balancer[In, Out](worker: Flow[In, Out, Any], workerCount: Int): Flow[In, Out, NotUsed] = {
import GraphDSL.Implicits._
Flow.fromGraph(GraphDSL.create() { implicit b =>
@ -35,7 +36,7 @@ class RecipeWorkerPool extends RecipeSpec {
})
}
val processedJobs: Source[Result, Unit] = myJobs.via(balancer(worker, 3))
val processedJobs: Source[Result, NotUsed] = myJobs.via(balancer(worker, 3))
//#worker-pool
Await.result(processedJobs.grouped(10).runWith(Sink.head), 3.seconds).toSet should be(Set(