Merge pull request #19786 from akka/wip-docs-RK

add statefulMapConcat to overview and fix links
This commit is contained in:
Roland Kuhn 2016-02-15 18:39:30 +01:00
commit 815dc2aa70
13 changed files with 529 additions and 462 deletions

View file

@ -42,11 +42,10 @@ public class RecipeSeq extends RecipeTest {
public void drainSourceToList() throws Exception {
new JavaTestKit(system) {
{
final Source<String, NotUsed> mySource = Source.from(Arrays.asList("1", "2", "3"));
//#draining-to-list-unsafe
final Source<String, NotUsed> myData = Source.from(Arrays.asList("1", "2", "3"));
final int MAX_ALLOWED_SIZE = 100;
final CompletionStage<List<String>> strings = myData.runWith(Sink.seq(), mat); // dangerous!
// Dangerous: might produce a collection with 2 billion elements!
final CompletionStage<List<String>> strings = mySource.runWith(Sink.seq(), mat);
//#draining-to-list-unsafe
strings.toCompletableFuture().get(3, TimeUnit.SECONDS);
@ -58,14 +57,14 @@ public class RecipeSeq extends RecipeTest {
public void drainSourceToListWithLimit() throws Exception {
new JavaTestKit(system) {
{
final Source<String, NotUsed> mySource = Source.from(Arrays.asList("1", "2", "3"));
//#draining-to-list-safe
final Source<String, NotUsed> myData = Source.from(Arrays.asList("1", "2", "3"));
final int MAX_ALLOWED_SIZE = 100;
// OK. Future will fail with a `StreamLimitReachedException`
// if the number of incoming elements is larger than max
final CompletionStage<List<String>> strings =
myData.limit(MAX_ALLOWED_SIZE).runWith(Sink.seq(), mat);
mySource.limit(MAX_ALLOWED_SIZE).runWith(Sink.seq(), mat);
//#draining-to-list-safe
strings.toCompletableFuture().get(1, TimeUnit.SECONDS);
@ -76,13 +75,14 @@ public class RecipeSeq extends RecipeTest {
public void drainSourceToListWithTake() throws Exception {
new JavaTestKit(system) {
{
final Source<String, NotUsed> myData = Source.from(Arrays.asList("1", "2", "3"));
final Source<String, NotUsed> mySource = Source.from(Arrays.asList("1", "2", "3"));
final int MAX_ALLOWED_SIZE = 100;
//#draining-to-list-safe
// OK. Collect up until max-th elements only, then cancel upstream
final CompletionStage<List<String>> strings =
myData.take(MAX_ALLOWED_SIZE).runWith(Sink.seq(), mat);
mySource.take(MAX_ALLOWED_SIZE).runWith(Sink.seq(), mat);
//#draining-to-list-safe
strings.toCompletableFuture().get(1, TimeUnit.SECONDS);

File diff suppressed because it is too large Load diff

View file

@ -16,7 +16,7 @@ This part also serves as supplementary material for the main body of documentati
open while reading the manual and look for examples demonstrating various streaming concepts
as they appear in the main body of documentation.
If you need a quick reference of the available processing stages used in the recipes see :ref:`stages-overview`.
If you need a quick reference of the available processing stages used in the recipes see :ref:`stages-overview_java`.
Working with Flows
==================
@ -63,11 +63,11 @@ The function ``limit`` or ``take`` should always be used in conjunction in order
For example, this is best avoided:
.. includecode:: ../../../akka-samples/akka-docs-java-lambda/src/test/java/docs/stream/cookbook/RecipeSeq.java#draining-to-list-unsafe
.. includecode:: ../code/docs/stream/javadsl/cookbook/RecipeSeq.java#draining-to-list-unsafe
Rather, use ``limit`` or ``take`` to ensure that the resulting ``List`` will contain only up to ``MAX_ALLOWED_SIZE`` elements:
.. includecode:: ../../../akka-samples/akka-docs-java-lambda/src/test/java/docs/stream/cookbook/RecipeSeq.java#draining-to-list-safe
.. includecode:: ../code/docs/stream/javadsl/cookbook/RecipeSeq.java#draining-to-list-safe
Calculating the digest of a ByteString stream
---------------------------------------------

View file

@ -38,7 +38,7 @@ Processing Stage
The common name for all building blocks that build up a Graph.
Examples of a processing stage would be operations like ``map()``, ``filter()``, stages added by ``transform()`` like
:class:`PushStage`, :class:`PushPullStage`, :class:`StatefulStage` and graph junctions like ``Merge`` or ``Broadcast``.
For the full list of built-in processing stages see :ref:`stages-overview`
For the full list of built-in processing stages see :ref:`stages-overview_java`
When we talk about *asynchronous, non-blocking backpressure* we mean that the processing stages available in Akka
Streams will not use blocking calls but asynchronous message passing to exchange messages between each other, and they

View file

@ -24,7 +24,7 @@ Graphs are built from simple Flows which serve as the linear connections within
which serve as fan-in and fan-out points for Flows. Thanks to the junctions having meaningful types based on their behaviour
and making them explicit elements these elements should be rather straightforward to use.
Akka Streams currently provide these junctions (for a detailed list see :ref:`stages-overview`):
Akka Streams currently provide these junctions (for a detailed list see :ref:`stages-overview_java`):
* **Fan-out**

View file

@ -79,7 +79,7 @@ and for best results we recommend the following approach:
* The bottom-up learners may feel more at home rummaging through the
:ref:`stream-cookbook-java`.
* For a complete overview of the built-in processing stages you can look at the
table in :ref:`stages-overview`
table in :ref:`stages-overview_java`
* The other sections can be read sequentially or as needed during the previous
steps, each digging deeper into specific topics.

View file

@ -0,0 +1,45 @@
/**
* Copyright (C) 2016 Typesafe Inc. <http://www.typesafe.com>
*/
package docs.stream.cookbook
import akka.stream.scaladsl._
import scala.concurrent.Future
import org.scalatest.concurrent.ScalaFutures
import scala.concurrent.duration._
class RecipeSeq extends RecipeSpec with ScalaFutures {
implicit val patience = PatienceConfig(3.seconds)
"Draining to a strict sequence" must {
"not be done unsafely" in {
val mySource = Source(1 to 3).map(_.toString)
//#draining-to-seq-unsafe
// Dangerous: might produce a collection with 2 billion elements!
val f: Future[Seq[String]] = mySource.runWith(Sink.seq)
//#draining-to-seq-unsafe
f.futureValue should ===(Seq("1", "2", "3"))
}
"be done safely" in {
val mySource = Source(1 to 3).map(_.toString)
//#draining-to-seq-safe
val MAX_ALLOWED_SIZE = 100
// OK. Future will fail with a `StreamLimitReachedException`
// if the number of incoming elements is larger than max
val limited: Future[Seq[String]] =
mySource.limit(MAX_ALLOWED_SIZE).runWith(Sink.seq)
// OK. Collect up until max-th elements only, then cancel upstream
val ignoreOverflow: Future[Seq[String]] =
mySource.take(MAX_ALLOWED_SIZE).runWith(Sink.seq)
//#draining-to-seq-safe
limited.futureValue should ===(Seq("1", "2", "3"))
ignoreOverflow.futureValue should ===(Seq("1", "2", "3"))
}
}
}

File diff suppressed because it is too large Load diff

View file

@ -16,7 +16,7 @@ This part also serves as supplementary material for the main body of documentati
open while reading the manual and look for examples demonstrating various streaming concepts
as they appear in the main body of documentation.
If you need a quick reference of the available processing stages used in the recipes see :ref:`stages-overview`.
If you need a quick reference of the available processing stages used in the recipes see :ref:`stages-overview_scala`.
Working with Flows
==================
@ -63,11 +63,11 @@ The function ``limit`` or ``take`` should always be used in conjunction in order
For example, this is best avoided:
.. includecode:: code/docs/stream/cookbook/RecipeSeq.scala#draining-to-seq-unsafe
.. includecode:: ../code/docs/stream/cookbook/RecipeSeq.scala#draining-to-seq-unsafe
Rather, use ``limit`` or ``take`` to ensure that the resulting ``Seq`` will contain only up to ``max`` elements:
.. includecode:: code/docs/stream/cookbook/RecipeSeq.scala#draining-to-seq-safe
.. includecode:: ../code/docs/stream/cookbook/RecipeSeq.scala#draining-to-seq-safe
Calculating the digest of a ByteString stream
---------------------------------------------

View file

@ -38,7 +38,7 @@ Processing Stage
The common name for all building blocks that build up a Graph.
Examples of a processing stage would be operations like ``map()``, ``filter()``, stages added by ``transform()`` like
:class:`PushStage`, :class:`PushPullStage`, :class:`StatefulStage` and graph junctions like ``Merge`` or ``Broadcast``.
For the full list of built-in processing stages see :ref:`stages-overview`
For the full list of built-in processing stages see :ref:`stages-overview_scala`
When we talk about *asynchronous, non-blocking backpressure* we mean that the processing stages available in Akka
Streams will not use blocking calls but asynchronous message passing to exchange messages between each other, and they

View file

@ -24,7 +24,7 @@ Graphs are built from simple Flows which serve as the linear connections within
which serve as fan-in and fan-out points for Flows. Thanks to the junctions having meaningful types based on their behaviour
and making them explicit elements these elements should be rather straightforward to use.
Akka Streams currently provide these junctions (for a detailed list see :ref:`stages-overview`):
Akka Streams currently provide these junctions (for a detailed list see :ref:`stages-overview_scala`):
* **Fan-out**

View file

@ -79,7 +79,7 @@ and for best results we recommend the following approach:
* The bottom-up learners may feel more at home rummaging through the
:ref:`stream-cookbook-scala`.
* For a complete overview of the built-in processing stages you can look at the
table in :ref:`stages-overview`
table in :ref:`stages-overview_scala`
* The other sections can be read sequentially or as needed during the previous
steps, each digging deeper into specific topics.

View file

@ -362,12 +362,12 @@ object StreamLayout {
}
final case class CompositeModule(
override val subModules: Set[Module],
override val shape: Shape,
override val downstreams: Map[OutPort, InPort],
override val upstreams: Map[InPort, OutPort],
override val materializedValueComputation: MaterializedValueNode,
override val attributes: Attributes) extends Module {
override val subModules: Set[Module],
override val shape: Shape,
override val downstreams: Map[OutPort, InPort],
override val upstreams: Map[InPort, OutPort],
override val materializedValueComputation: MaterializedValueNode,
override val attributes: Attributes) extends Module {
override def replaceShape(s: Shape): Module = {
shape.requireSamePortsAs(s)
@ -392,13 +392,13 @@ object StreamLayout {
}
final case class FusedModule(
override val subModules: Set[Module],
override val shape: Shape,
override val downstreams: Map[OutPort, InPort],
override val upstreams: Map[InPort, OutPort],
override val materializedValueComputation: MaterializedValueNode,
override val attributes: Attributes,
info: Fusing.StructuralInfo) extends Module {
override val subModules: Set[Module],
override val shape: Shape,
override val downstreams: Map[OutPort, InPort],
override val upstreams: Map[InPort, OutPort],
override val materializedValueComputation: MaterializedValueNode,
override val attributes: Attributes,
info: Fusing.StructuralInfo) extends Module {
override def isFused: Boolean = true