!str #16992 Rework Source and Sink name parameter

* Remove name parameter (no overloads), naming is performed using `.withAttributes` or the new
  convenience `.named`. Those adds the OperationAttribute.Name and also change the name of the
  shape Inlet and Outlet.

* Remove Source/Sink parameter list for 0 parameter methods,
  this allows usage of `Sink.head` instead of `Sink.head()`
This commit is contained in:
Patrik Nordwall 2015-03-05 12:21:17 +01:00
parent 53e3dcad06
commit 3dc4e6d077
75 changed files with 336 additions and 364 deletions

View file

@ -152,14 +152,14 @@ class FlowDocSpec extends AkkaSpec {
//#flow-mat-combine
// An empty source that can be shut down explicitly from the outside
val source: Source[Int, Promise[Unit]] = Source.lazyEmpty[Int]()
val source: Source[Int, Promise[Unit]] = Source.lazyEmpty[Int]
// A flow that internally throttles elements to 1/second, and returns a Cancellable
// which can be used to shut down the stream
val flow: Flow[Int, Int, Cancellable] = throttler
// A sink that returns the first element of a stream in the returned Future
val sink: Sink[Int, Future[Int]] = Sink.head[Int]()
val sink: Sink[Int, Future[Int]] = Sink.head[Int]
// By default, the materialized value of the leftmost stage is preserved
val r1: RunnableFlow[Promise[Unit]] = source.via(flow).to(sink)

View file

@ -75,7 +75,7 @@ class FlowErrorDocSpec extends AkkaSpec {
else acc + elem
}
}
val result = source.grouped(1000).runWith(Sink.head())
val result = source.grouped(1000).runWith(Sink.head)
// the negative element cause the scan stage to be restarted,
// i.e. start from 0 again
// result here will be a Future completed with Success(Vector(0, 1, 0, 5, 12))

View file

@ -22,7 +22,7 @@ class GraphCyclesSpec extends AkkaSpec {
val merge = b.add(Merge[Int](2))
val bcast = b.add(Broadcast[Int](2))
source ~> merge ~> Flow[Int].map { s => println(s); s } ~> bcast ~> Sink.ignore()
source ~> merge ~> Flow[Int].map { s => println(s); s } ~> bcast ~> Sink.ignore
merge <~ bcast
}
//#deadlocked
@ -39,7 +39,7 @@ class GraphCyclesSpec extends AkkaSpec {
val merge = b.add(MergePreferred[Int](1))
val bcast = b.add(Broadcast[Int](2))
source ~> merge ~> Flow[Int].map { s => println(s); s } ~> bcast ~> Sink.ignore()
source ~> merge ~> Flow[Int].map { s => println(s); s } ~> bcast ~> Sink.ignore
merge.preferred <~ bcast
}
//#unfair
@ -55,7 +55,7 @@ class GraphCyclesSpec extends AkkaSpec {
val merge = b.add(Merge[Int](2))
val bcast = b.add(Broadcast[Int](2))
source ~> merge ~> Flow[Int].map { s => println(s); s } ~> bcast ~> Sink.ignore()
source ~> merge ~> Flow[Int].map { s => println(s); s } ~> bcast ~> Sink.ignore
merge <~ Flow[Int].buffer(10, OverflowStrategy.dropHead) <~ bcast
}
//#dropping
@ -73,7 +73,7 @@ class GraphCyclesSpec extends AkkaSpec {
val bcast = b.add(Broadcast[Int](2))
source ~> zip.in0
zip.out.map { s => println(s); s } ~> bcast ~> Sink.ignore()
zip.out.map { s => println(s); s } ~> bcast ~> Sink.ignore
zip.in1 <~ bcast
}
//#zipping-dead
@ -92,7 +92,7 @@ class GraphCyclesSpec extends AkkaSpec {
val start = Source.single(0)
source ~> zip.in0
zip.out.map { s => println(s); s } ~> bcast ~> Sink.ignore()
zip.out.map { s => println(s); s } ~> bcast ~> Sink.ignore
zip.in1 <~ concat <~ start
concat <~ bcast
}

View file

@ -43,7 +43,7 @@ class ReactiveStreamsDocSpec extends AkkaSpec {
val impl = new Fixture {
override def tweets: Publisher[Tweet] =
TwitterStreamQuickstartDocSpec.tweets.runWith(Sink.publisher())
TwitterStreamQuickstartDocSpec.tweets.runWith(Sink.publisher)
override def storage = SubscriberProbe[Author]
@ -95,7 +95,7 @@ class ReactiveStreamsDocSpec extends AkkaSpec {
//#source-publisher
val authorPublisher: Publisher[Author] =
Source(tweets).via(authors).runWith(Sink.publisher())
Source(tweets).via(authors).runWith(Sink.publisher)
authorPublisher.subscribe(storage)
//#source-publisher

View file

@ -60,7 +60,7 @@ class StreamBuffersRateSpec extends AkkaSpec {
"explcit buffers" in {
trait Job
def inboundJobsConnector(): Source[Job, Unit] = Source.empty()
def inboundJobsConnector(): Source[Job, Unit] = Source.empty
//#explicit-buffers-backpressure
// Getting a stream of jobs from an imaginary external system as a Source
val jobs: Source[Job, Unit] = inboundJobsConnector()

View file

@ -67,7 +67,7 @@ class StreamPartialFlowGraphDocSpec extends AkkaSpec {
zip.out
}
val firstPair: Future[(Int, Int)] = pairs.runWith(Sink.head())
val firstPair: Future[(Int, Int)] = pairs.runWith(Sink.head)
//#source-from-partial-flow-graph
Await.result(firstPair, 300.millis) should equal(1 -> 2)
}
@ -94,7 +94,7 @@ class StreamPartialFlowGraphDocSpec extends AkkaSpec {
// format: OFF
val (_, matSink: Future[(Int, String)]) =
//#flow-from-partial-flow-graph
pairUpWithToString.runWith(Source(List(1)), Sink.head())
pairUpWithToString.runWith(Source(List(1)), Sink.head)
//#flow-from-partial-flow-graph
// format: ON

View file

@ -41,7 +41,7 @@ class RecipeByteStrings extends RecipeSpec {
val chunksStream = rawBytes.transform(() => new Chunker(ChunkLimit))
//#bytestring-chunker
val chunksFuture = chunksStream.grouped(10).runWith(Sink.head())
val chunksFuture = chunksStream.grouped(10).runWith(Sink.head)
val chunks = Await.result(chunksFuture, 3.seconds)
@ -70,11 +70,11 @@ class RecipeByteStrings extends RecipeSpec {
val bytes1 = Source(List(ByteString(1, 2), ByteString(3), ByteString(4, 5, 6), ByteString(7, 8, 9)))
val bytes2 = Source(List(ByteString(1, 2), ByteString(3), ByteString(4, 5, 6), ByteString(7, 8, 9, 10)))
Await.result(bytes1.via(limiter).grouped(10).runWith(Sink.head()), 3.seconds)
Await.result(bytes1.via(limiter).grouped(10).runWith(Sink.head), 3.seconds)
.fold(ByteString())(_ ++ _) should be(ByteString(1, 2, 3, 4, 5, 6, 7, 8, 9))
an[IllegalStateException] must be thrownBy {
Await.result(bytes2.via(limiter).grouped(10).runWith(Sink.head()), 3.seconds)
Await.result(bytes2.via(limiter).grouped(10).runWith(Sink.head), 3.seconds)
}
}
@ -86,7 +86,7 @@ class RecipeByteStrings extends RecipeSpec {
val compacted: Source[ByteString, Unit] = data.map(_.compact)
//#compacting-bytestrings
Await.result(compacted.grouped(10).runWith(Sink.head()), 3.seconds).forall(_.isCompact) should be(true)
Await.result(compacted.grouped(10).runWith(Sink.head), 3.seconds).forall(_.isCompact) should be(true)
}
}

View file

@ -44,7 +44,7 @@ class RecipeDigest extends RecipeSpec {
val digest: Source[ByteString, Unit] = data.transform(() => digestCalculator("SHA-256"))
//#calculating-digest
Await.result(digest.runWith(Sink.head()), 3.seconds) should be(
Await.result(digest.runWith(Sink.head), 3.seconds) should be(
ByteString(
0x24, 0x8d, 0x6a, 0x61,
0xd2, 0x06, 0x38, 0xb8,

View file

@ -19,7 +19,7 @@ class RecipeFlattenSeq extends RecipeSpec {
val flattened: Source[Message, Unit] = myData.mapConcat(identity)
//#flattening-seqs
Await.result(flattened.grouped(8).runWith(Sink.head()), 3.seconds) should be(List("1", "2", "3", "4", "5", "6", "7"))
Await.result(flattened.grouped(8).runWith(Sink.head), 3.seconds) should be(List("1", "2", "3", "4", "5", "6", "7"))
}

View file

@ -43,8 +43,8 @@ class RecipeMultiGroupBy extends RecipeSpec {
//#multi-groupby
val result = multiGroups.map {
case (topic, topicMessages) => topicMessages.grouped(10).map(topic.name + _.mkString("[", ", ", "]")).runWith(Sink.head())
}.mapAsync(identity).grouped(10).runWith(Sink.head())
case (topic, topicMessages) => topicMessages.grouped(10).map(topic.name + _.mkString("[", ", ", "]")).runWith(Sink.head)
}.mapAsync(identity).grouped(10).runWith(Sink.head)
Await.result(result, 3.seconds).toSet should be(Set(
"1[1: a, 1: b, all: c, all: d, 1: e]",

View file

@ -24,7 +24,7 @@ class RecipeParseLines extends RecipeSpec {
val linesStream = rawData.transform(() => parseLines("\r\n", 100))
Await.result(linesStream.grouped(10).runWith(Sink.head()), 3.seconds) should be(List(
Await.result(linesStream.grouped(10).runWith(Sink.head), 3.seconds) should be(List(
"Hello World\r!",
"Hello Akka!",
"Hello Streams!",

View file

@ -35,7 +35,7 @@ class RecipeReduceByKey extends RecipeSpec {
.mapAsync(identity)
//#word-count
Await.result(counts.grouped(10).runWith(Sink.head()), 3.seconds).toSet should be(Set(
Await.result(counts.grouped(10).runWith(Sink.head), 3.seconds).toSet should be(Set(
("hello", 2),
("world", 1),
("and", 1),
@ -72,7 +72,7 @@ class RecipeReduceByKey extends RecipeSpec {
//#reduce-by-key-general
Await.result(wordCounts.grouped(10).runWith(Sink.head()), 3.seconds).toSet should be(Set(
Await.result(wordCounts.grouped(10).runWith(Sink.head), 3.seconds).toSet should be(Set(
("hello", 2),
("world", 1),
("and", 1),

View file

@ -16,7 +16,7 @@ class RecipeToStrict extends RecipeSpec {
//#draining-to-seq
val strict: Future[immutable.Seq[Message]] =
myData.grouped(MaxAllowedSeqSize).runWith(Sink.head())
myData.grouped(MaxAllowedSeqSize).runWith(Sink.head)
//#draining-to-seq
Await.result(strict, 3.seconds) should be(List("1", "2", "3"))

View file

@ -37,7 +37,7 @@ class RecipeWorkerPool extends RecipeSpec {
val processedJobs: Source[Result, Unit] = myJobs.via(balancer(worker, 3))
//#worker-pool
Await.result(processedJobs.grouped(10).runWith(Sink.head()), 3.seconds).toSet should be(Set(
Await.result(processedJobs.grouped(10).runWith(Sink.head), 3.seconds).toSet should be(Set(
"1 done", "2 done", "3 done", "4 done", "5 done"))
}