=str #19293 fix issues in Sink.seq + minor doc fixes + use Sink.seq and limit in tests where appropriate

* Sink.seq (Scala DSL) now returns immutable.Seq rather than Seq
* Sink.seq will not silently truncate when incoming elements is > Int.MAX_VALUE
* minor doc fixes
* replacing various grouped(n) / Sink.head with limit(n) / Sink.seq in various tests
* fix inconsistent indentation in RequestParserSpec
This commit is contained in:
lolski 2016-02-12 01:36:21 +08:00
parent f042204d8b
commit 21381d5710
46 changed files with 330 additions and 217 deletions

View file

@ -156,7 +156,7 @@ public class FlowStagesDocTest {
@Test
public void demonstrateVariousPushPullStages() throws Exception {
final Sink<Integer, CompletionStage<List<Integer>>> sink =
Flow.of(Integer.class).grouped(10).toMat(Sink.head(), Keep.right());
Flow.of(Integer.class).limit(10).toMat(Sink.seq(), Keep.right());
//#stage-chain
final RunnableGraph<CompletionStage<List<Integer>>> runnable =

View file

@ -95,7 +95,7 @@ public class RecipeByteStrings extends RecipeTest {
rawBytes.transform(() -> new Chunker(CHUNK_LIMIT));
//#bytestring-chunker2
CompletionStage<List<ByteString>> chunksFuture = chunksStream.grouped(10).runWith(Sink.head(), mat);
CompletionStage<List<ByteString>> chunksFuture = chunksStream.limit(10).runWith(Sink.seq(), mat);
List<ByteString> chunks = chunksFuture.toCompletableFuture().get(3, TimeUnit.SECONDS);
@ -157,7 +157,7 @@ public class RecipeByteStrings extends RecipeTest {
ByteString.fromArray(new byte[] { 4, 5, 6 }),
ByteString.fromArray(new byte[] { 7, 8, 9, 10 })));
List<ByteString> got = bytes1.via(limiter).grouped(10).runWith(Sink.head(), mat).toCompletableFuture().get(3, TimeUnit.SECONDS);
List<ByteString> got = bytes1.via(limiter).limit(10).runWith(Sink.seq(), mat).toCompletableFuture().get(3, TimeUnit.SECONDS);
ByteString acc = ByteString.empty();
for (ByteString b : got) {
acc = acc.concat(b);
@ -166,7 +166,7 @@ public class RecipeByteStrings extends RecipeTest {
boolean thrown = false;
try {
bytes2.via(limiter).grouped(10).runWith(Sink.head(), mat).toCompletableFuture().get(3, TimeUnit.SECONDS);
bytes2.via(limiter).limit(10).runWith(Sink.seq(), mat).toCompletableFuture().get(3, TimeUnit.SECONDS);
} catch (IllegalStateException ex) {
thrown = true;
}
@ -190,7 +190,7 @@ public class RecipeByteStrings extends RecipeTest {
Source<ByteString, NotUsed> compacted = rawBytes.map(bs -> bs.compact());
//#compacting-bytestrings
List<ByteString> got = compacted.grouped(10).runWith(Sink.head(), mat).toCompletableFuture().get(3, TimeUnit.SECONDS);
List<ByteString> got = compacted.limit(10).runWith(Sink.seq(), mat).toCompletableFuture().get(3, TimeUnit.SECONDS);
for (ByteString byteString : got) {
assertTrue(byteString.isCompact());

View file

@ -48,7 +48,7 @@ public class RecipeFlattenList extends RecipeTest {
Source<Message, NotUsed> flattened = myData.mapConcat(i -> i);
//#flattening-lists
List<Message> got = flattened.grouped(10).runWith(Sink.head(), mat).toCompletableFuture().get(1, TimeUnit.SECONDS);
List<Message> got = flattened.limit(10).runWith(Sink.seq(), mat).toCompletableFuture().get(1, TimeUnit.SECONDS);
assertEquals(got.get(0), new Message("1"));
assertEquals(got.get(1), new Message("2"));
assertEquals(got.get(2), new Message("3"));

View file

@ -50,8 +50,7 @@ public class RecipeParseLines extends RecipeTest {
.via(Framing.delimiter(ByteString.fromString("\r\n"), 100, true))
.map(b -> b.utf8String());
//#parse-lines
lines.grouped(10).runWith(Sink.head(), mat).toCompletableFuture().get(1, TimeUnit.SECONDS);
lines.limit(10).runWith(Sink.seq(), mat).toCompletableFuture().get(1, TimeUnit.SECONDS);
}
}

View file

@ -0,0 +1,92 @@
/**
* Copyright (C) 2015-2016 Typesafe <http://typesafe.com/>
*/
package docs.stream.javadsl.cookbook;
import akka.NotUsed;
import akka.actor.ActorSystem;
import akka.stream.ActorMaterializer;
import akka.stream.Materializer;
import akka.stream.javadsl.Sink;
import akka.stream.javadsl.Source;
import akka.testkit.JavaTestKit;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import scala.concurrent.Await;
import scala.concurrent.Future;
import scala.concurrent.duration.FiniteDuration;
import java.util.Arrays;
import java.util.List;
import java.util.concurrent.CompletionStage;
import java.util.concurrent.TimeUnit;
public class RecipeSeq extends RecipeTest {
static ActorSystem system;
@BeforeClass
public static void setup() {
system = ActorSystem.create("RecipeLoggingElements");
}
@AfterClass
public static void tearDown() {
JavaTestKit.shutdownActorSystem(system);
system = null;
}
final Materializer mat = ActorMaterializer.create(system);
@Test
public void drainSourceToList() throws Exception {
new JavaTestKit(system) {
{
//#draining-to-list-unsafe
final Source<String, NotUsed> myData = Source.from(Arrays.asList("1", "2", "3"));
final int MAX_ALLOWED_SIZE = 100;
final CompletionStage<List<String>> strings = myData.runWith(Sink.seq(), mat); // dangerous!
//#draining-to-list-unsafe
strings.toCompletableFuture().get(3, TimeUnit.SECONDS);
}
};
}
@Test
public void drainSourceToListWithLimit() throws Exception {
new JavaTestKit(system) {
{
//#draining-to-list-safe
final Source<String, NotUsed> myData = Source.from(Arrays.asList("1", "2", "3"));
final int MAX_ALLOWED_SIZE = 100;
// OK. Future will fail with a `StreamLimitReachedException`
// if the number of incoming elements is larger than max
final CompletionStage<List<String>> strings =
myData.limit(MAX_ALLOWED_SIZE).runWith(Sink.seq(), mat);
//#draining-to-list-safe
strings.toCompletableFuture().get(1, TimeUnit.SECONDS);
}
};
}
public void drainSourceToListWithTake() throws Exception {
new JavaTestKit(system) {
{
final Source<String, NotUsed> myData = Source.from(Arrays.asList("1", "2", "3"));
final int MAX_ALLOWED_SIZE = 100;
//#draining-to-list-safe
// OK. Collect up until max-th elements only, then cancel upstream
final CompletionStage<List<String>> strings =
myData.take(MAX_ALLOWED_SIZE).runWith(Sink.seq(), mat);
//#draining-to-list-safe
strings.toCompletableFuture().get(1, TimeUnit.SECONDS);
}
};
}
}

View file

@ -1,58 +0,0 @@
/**
* Copyright (C) 2015-2016 Typesafe <http://typesafe.com/>
*/
package docs.stream.javadsl.cookbook;
import akka.NotUsed;
import akka.actor.ActorSystem;
import akka.stream.ActorMaterializer;
import akka.stream.Materializer;
import akka.stream.javadsl.Sink;
import akka.stream.javadsl.Source;
import akka.testkit.JavaTestKit;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import scala.concurrent.Await;
import scala.concurrent.Future;
import scala.concurrent.duration.FiniteDuration;
import java.util.Arrays;
import java.util.List;
import java.util.concurrent.CompletionStage;
import java.util.concurrent.TimeUnit;
public class RecipeToStrict extends RecipeTest {
static ActorSystem system;
@BeforeClass
public static void setup() {
system = ActorSystem.create("RecipeLoggingElements");
}
@AfterClass
public static void tearDown() {
JavaTestKit.shutdownActorSystem(system);
system = null;
}
final Materializer mat = ActorMaterializer.create(system);
@Test
public void workWithPrintln() throws Exception {
new JavaTestKit(system) {
{
final Source<String, NotUsed> myData = Source.from(Arrays.asList("1", "2", "3"));
final int MAX_ALLOWED_SIZE = 100;
//#draining-to-list
final CompletionStage<List<String>> strings = myData
.grouped(MAX_ALLOWED_SIZE).runWith(Sink.head(), mat);
//#draining-to-list
strings.toCompletableFuture().get(3, TimeUnit.SECONDS);
}
};
}
}

View file

@ -73,7 +73,8 @@ public class RecipeWorkerPool extends RecipeTest {
Source<Message, NotUsed> processedJobs = data.via(balancer);
//#worker-pool2
CompletionStage<List<String>> future = processedJobs.map(m -> m.msg).grouped(10).runWith(Sink.head(), mat);
FiniteDuration timeout = FiniteDuration.create(200, TimeUnit.MILLISECONDS);
CompletionStage<List<String>> future = processedJobs.map(m -> m.msg).limit(10).runWith(Sink.seq(), mat);
List<String> got = future.toCompletableFuture().get(1, TimeUnit.SECONDS);
assertTrue(got.contains("1 done"));
assertTrue(got.contains("2 done"));

View file

@ -54,16 +54,20 @@ collection itself, so we can just call ``mapConcat(l -> l)``.
Draining a stream to a strict collection
----------------------------------------
**Situation:** A finite sequence of elements is given as a stream, but a Scala collection is needed instead.
**Situation:** A possibly unbounded sequence of elements is given as a stream, which needs to be collected into a Scala collection while ensuring boundedness
In this recipe we will use the ``grouped`` stream operation that groups incoming elements into a stream of limited
size collections (it can be seen as the almost opposite version of the "Flattening a stream of sequences" recipe
we showed before). By using a ``grouped(MAX_ALLOWED_SIZE)`` we create a stream of groups
with maximum size of ``MaxAllowedSeqSize`` and then we take the first element of this stream by attaching a ``Sink.head()``. What we get is a
:class:`CompletionStage` containing a sequence with all the elements of the original up to ``MAX_ALLOWED_SIZE`` size (further
elements are dropped).
A common situation when working with streams is one where we need to collect incoming elements into a Scala collection.
This operation is supported via ``Sink.seq`` which materializes into a ``CompletionStage<List<T>>``.
.. includecode:: ../code/docs/stream/javadsl/cookbook/RecipeToStrict.java#draining-to-list
The function ``limit`` or ``take`` should always be used in conjunction in order to guarantee stream boundedness, thus preventing the program from running out of memory.
For example, this is best avoided:
.. includecode:: ../../../akka-samples/akka-docs-java-lambda/src/test/java/docs/stream/cookbook/RecipeSeq.java#draining-to-list-unsafe
Rather, use ``limit`` or ``take`` to ensure that the resulting ``List`` will contain only up to ``MAX_ALLOWED_SIZE`` elements:
.. includecode:: ../../../akka-samples/akka-docs-java-lambda/src/test/java/docs/stream/cookbook/RecipeSeq.java#draining-to-list-safe
Calculating the digest of a ByteString stream
---------------------------------------------

View file

@ -46,7 +46,6 @@ class HttpClientExampleSpec extends WordSpec with Matchers {
implicit val system = ActorSystem()
implicit val materializer = ActorMaterializer()
// construct a pool client flow with context type `Int`
val poolClientFlow = Http().cachedHostConnectionPool[Int]("akka.io")
val responseFuture: Future[(Try[HttpResponse], Int)] =

View file

@ -152,7 +152,7 @@ class BidiFlowDocSpec extends AkkaSpec with ConversionCheckedTripleEquals {
// test it by plugging it into its own inverse and closing the right end
val pingpong = Flow[Message].collect { case Ping(id) => Pong(id) }
val flow = stack.atop(stack.reversed).join(pingpong)
val result = Source((0 to 9).map(Ping)).via(flow).grouped(20).runWith(Sink.head)
val result = Source((0 to 9).map(Ping)).via(flow).limit(20).runWith(Sink.seq)
Await.result(result, 1.second) should ===((0 to 9).map(Pong))
//#compose
}
@ -160,14 +160,14 @@ class BidiFlowDocSpec extends AkkaSpec with ConversionCheckedTripleEquals {
"work when chopped up" in {
val stack = codec.atop(framing)
val flow = stack.atop(chopUp).atop(stack.reversed).join(Flow[Message].map { case Ping(id) => Pong(id) })
val f = Source((0 to 9).map(Ping)).via(flow).grouped(20).runWith(Sink.head)
val f = Source((0 to 9).map(Ping)).via(flow).limit(20).runWith(Sink.seq)
Await.result(f, 1.second) should ===((0 to 9).map(Pong))
}
"work when accumulated" in {
val stack = codec.atop(framing)
val flow = stack.atop(accumulate).atop(stack.reversed).join(Flow[Message].map { case Ping(id) => Pong(id) })
val f = Source((0 to 9).map(Ping)).via(flow).grouped(20).runWith(Sink.head)
val f = Source((0 to 9).map(Ping)).via(flow).limit(20).runWith(Sink.seq)
Await.result(f, 1.second) should ===((0 to 9).map(Pong))
}

View file

@ -80,7 +80,7 @@ class FlowErrorDocSpec extends AkkaSpec {
}
.withAttributes(ActorAttributes.supervisionStrategy(decider))
val source = Source(List(1, 3, -1, 5, 7)).via(flow)
val result = source.grouped(1000).runWith(Sink.head)
val result = source.limit(1000).runWith(Sink.seq)
// the negative element cause the scan stage to be restarted,
// i.e. start from 0 again
// result here will be a Future completed with Success(Vector(0, 1, 4, 0, 5, 12))

View file

@ -48,7 +48,7 @@ class RecipeByteStrings extends RecipeSpec {
val chunksStream = rawBytes.transform(() => new Chunker(ChunkLimit))
//#bytestring-chunker
val chunksFuture = chunksStream.grouped(10).runWith(Sink.head)
val chunksFuture = chunksStream.limit(10).runWith(Sink.seq)
val chunks = Await.result(chunksFuture, 3.seconds)
@ -77,11 +77,11 @@ class RecipeByteStrings extends RecipeSpec {
val bytes1 = Source(List(ByteString(1, 2), ByteString(3), ByteString(4, 5, 6), ByteString(7, 8, 9)))
val bytes2 = Source(List(ByteString(1, 2), ByteString(3), ByteString(4, 5, 6), ByteString(7, 8, 9, 10)))
Await.result(bytes1.via(limiter).grouped(10).runWith(Sink.head), 3.seconds)
Await.result(bytes1.via(limiter).limit(10).runWith(Sink.seq), 3.seconds)
.fold(ByteString())(_ ++ _) should be(ByteString(1, 2, 3, 4, 5, 6, 7, 8, 9))
an[IllegalStateException] must be thrownBy {
Await.result(bytes2.via(limiter).grouped(10).runWith(Sink.head), 3.seconds)
Await.result(bytes2.via(limiter).limit(10).runWith(Sink.seq), 3.seconds)
}
}
@ -93,7 +93,7 @@ class RecipeByteStrings extends RecipeSpec {
val compacted: Source[ByteString, NotUsed] = data.map(_.compact)
//#compacting-bytestrings
Await.result(compacted.grouped(10).runWith(Sink.head), 3.seconds).forall(_.isCompact) should be(true)
Await.result(compacted.limit(10).runWith(Sink.seq), 3.seconds).forall(_.isCompact) should be(true)
}
}

View file

@ -20,7 +20,7 @@ class RecipeFlattenSeq extends RecipeSpec {
val flattened: Source[Message, NotUsed] = myData.mapConcat(identity)
//#flattening-seqs
Await.result(flattened.grouped(8).runWith(Sink.head), 3.seconds) should be(List("1", "2", "3", "4", "5", "6", "7"))
Await.result(flattened.limit(8).runWith(Sink.seq), 3.seconds) should be(List("1", "2", "3", "4", "5", "6", "7"))
}

View file

@ -45,8 +45,8 @@ class RecipeMultiGroupBy extends RecipeSpec {
.grouped(10)
.mergeSubstreams
.map(g => g.head._2.name + g.map(_._1).mkString("[", ", ", "]"))
.grouped(10)
.runWith(Sink.head)
.limit(10)
.runWith(Sink.seq)
Await.result(result, 3.seconds).toSet should be(Set(
"1[1: a, 1: b, all: c, all: d, 1: e]",

View file

@ -27,7 +27,7 @@ class RecipeParseLines extends RecipeSpec {
.map(_.utf8String)
//#parse-lines
Await.result(linesStream.grouped(10).runWith(Sink.head), 3.seconds) should be(List(
Await.result(linesStream.limit(10).runWith(Sink.seq), 3.seconds) should be(List(
"Hello World\r!",
"Hello Akka!",
"Hello Streams!",

View file

@ -29,7 +29,7 @@ class RecipeReduceByKey extends RecipeSpec {
.mergeSubstreams
//#word-count
Await.result(counts.grouped(10).runWith(Sink.head), 3.seconds).toSet should be(Set(
Await.result(counts.limit(10).runWith(Sink.seq), 3.seconds).toSet should be(Set(
("hello", 2),
("world", 1),
("and", 1),
@ -61,7 +61,7 @@ class RecipeReduceByKey extends RecipeSpec {
map = (word: String) => 1)((left: Int, right: Int) => left + right))
//#reduce-by-key-general
Await.result(wordCounts.grouped(10).runWith(Sink.head), 3.seconds).toSet should be(Set(
Await.result(wordCounts.limit(10).runWith(Sink.seq), 3.seconds).toSet should be(Set(
("hello", 2),
("world", 1),
("and", 1),

View file

@ -1,27 +0,0 @@
package docs.stream.cookbook
import akka.stream.scaladsl.{ Sink, Source }
import scala.collection.immutable
import scala.concurrent.{ Await, Future }
import scala.concurrent.duration._
class RecipeToStrict extends RecipeSpec {
"Recipe for draining a stream into a strict collection" must {
"work" in {
val myData = Source(List("1", "2", "3"))
val MaxAllowedSeqSize = 100
//#draining-to-seq
val strict: Future[immutable.Seq[Message]] =
myData.grouped(MaxAllowedSeqSize).runWith(Sink.head)
//#draining-to-seq
Await.result(strict, 3.seconds) should be(List("1", "2", "3"))
}
}
}

View file

@ -39,7 +39,7 @@ class RecipeWorkerPool extends RecipeSpec {
val processedJobs: Source[Result, NotUsed] = myJobs.via(balancer(worker, 3))
//#worker-pool
Await.result(processedJobs.grouped(10).runWith(Sink.head), 3.seconds).toSet should be(Set(
Await.result(processedJobs.limit(10).runWith(Sink.seq), 3.seconds).toSet should be(Set(
"1 done", "2 done", "3 done", "4 done", "5 done"))
}

View file

@ -54,16 +54,20 @@ collection itself, so we can just call ``mapConcat(identity)``.
Draining a stream to a strict collection
----------------------------------------
**Situation:** A finite sequence of elements is given as a stream, but a Scala collection is needed instead.
**Situation:** A possibly unbounded sequence of elements is given as a stream, which needs to be collected into a Scala collection while ensuring boundedness
In this recipe we will use the ``grouped`` stream operation that groups incoming elements into a stream of limited
size collections (it can be seen as the almost opposite version of the "Flattening a stream of sequences" recipe
we showed before). By using a ``grouped(MaxAllowedSeqSize)`` we create a stream of groups
with maximum size of ``MaxAllowedSeqSize`` and then we take the first element of this stream by attaching a ``Sink.head``. What we get is a
:class:`Future` containing a sequence with all the elements of the original up to ``MaxAllowedSeqSize`` size (further
elements are dropped).
A common situation when working with streams is one where we need to collect incoming elements into a Scala collection.
This operation is supported via ``Sink.seq`` which materializes into a ``Future[Seq[T]]``.
.. includecode:: ../code/docs/stream/cookbook/RecipeToStrict.scala#draining-to-seq
The function ``limit`` or ``take`` should always be used in conjunction in order to guarantee stream boundedness, thus preventing the program from running out of memory.
For example, this is best avoided:
.. includecode:: code/docs/stream/cookbook/RecipeSeq.scala#draining-to-seq-unsafe
Rather, use ``limit`` or ``take`` to ensure that the resulting ``Seq`` will contain only up to ``max`` elements:
.. includecode:: code/docs/stream/cookbook/RecipeSeq.scala#draining-to-seq-safe
Calculating the digest of a ByteString stream
---------------------------------------------