2019-01-02 18:55:26 +08:00
|
|
|
/*
|
2020-01-02 07:24:59 -05:00
|
|
|
* Copyright (C) 2016-2020 Lightbend Inc. <https://www.lightbend.com>
|
2018-04-18 11:44:37 +02:00
|
|
|
*/
|
|
|
|
|
|
|
|
|
|
import sbt._
|
|
|
|
|
import sbt.Keys._
|
|
|
|
|
|
2018-05-07 19:23:30 +09:00
|
|
|
import scala.util.control.NonFatal
|
|
|
|
|
|
2018-04-18 11:44:37 +02:00
|
|
|
/**
|
|
|
|
|
* Generate the "index" pages of stream operators.
|
|
|
|
|
*/
|
|
|
|
|
object StreamOperatorsIndexGenerator extends AutoPlugin {
|
|
|
|
|
|
2019-11-21 14:29:20 +01:00
|
|
|
override val projectSettings: Seq[Setting[_]] = inConfig(Compile)(
|
|
|
|
|
Seq(
|
|
|
|
|
resourceGenerators +=
|
|
|
|
|
generateAlphabeticalIndex(sourceDirectory, _ / "paradox" / "stream" / "operators" / "index.md")))
|
2018-04-18 11:44:37 +02:00
|
|
|
|
|
|
|
|
val categories = Seq(
|
2018-05-29 17:00:48 +09:00
|
|
|
"Source operators",
|
|
|
|
|
"Sink operators",
|
2018-04-18 11:44:37 +02:00
|
|
|
"Additional Sink and Source converters",
|
|
|
|
|
"File IO Sinks and Sources",
|
2018-05-19 10:15:20 +09:00
|
|
|
"Simple operators",
|
2018-05-29 17:00:48 +09:00
|
|
|
"Flow operators composed of Sinks and Sources",
|
2018-05-19 10:15:20 +09:00
|
|
|
"Asynchronous operators",
|
2018-05-29 17:00:48 +09:00
|
|
|
"Timer driven operators",
|
|
|
|
|
"Backpressure aware operators",
|
|
|
|
|
"Nesting and flattening operators",
|
|
|
|
|
"Time aware operators",
|
|
|
|
|
"Fan-in operators",
|
2019-05-31 12:56:25 +02:00
|
|
|
"Fan-out operators",
|
2018-05-29 17:00:48 +09:00
|
|
|
"Watching status operators",
|
2018-10-15 18:11:33 +02:00
|
|
|
"Actor interop operators",
|
2020-02-26 13:50:43 +01:00
|
|
|
"Compression operators",
|
2019-11-21 14:29:20 +01:00
|
|
|
"Error handling")
|
2018-04-18 11:44:37 +02:00
|
|
|
|
|
|
|
|
def categoryId(name: String): String = name.toLowerCase.replace(' ', '-')
|
|
|
|
|
|
|
|
|
|
val pendingSourceOrFlow = Seq(
|
|
|
|
|
"to",
|
|
|
|
|
"toMat",
|
|
|
|
|
"via",
|
|
|
|
|
"viaMat",
|
|
|
|
|
"async",
|
|
|
|
|
"upcast",
|
|
|
|
|
"shape",
|
|
|
|
|
"run",
|
|
|
|
|
"runWith",
|
|
|
|
|
"traversalBuilder",
|
|
|
|
|
"runFold",
|
|
|
|
|
"runFoldAsync",
|
|
|
|
|
"runForeach",
|
|
|
|
|
"runReduce",
|
|
|
|
|
"named",
|
|
|
|
|
"throttleEven",
|
|
|
|
|
"actorPublisher",
|
|
|
|
|
"addAttributes",
|
|
|
|
|
"mapMaterializedValue",
|
|
|
|
|
// *Graph:
|
|
|
|
|
"concatGraph",
|
|
|
|
|
"prependGraph",
|
|
|
|
|
"mergeSortedGraph",
|
|
|
|
|
"fromGraph",
|
|
|
|
|
"interleaveGraph",
|
|
|
|
|
"zipGraph",
|
|
|
|
|
"mergeGraph",
|
|
|
|
|
"wireTapGraph",
|
|
|
|
|
"alsoToGraph",
|
|
|
|
|
"orElseGraph",
|
|
|
|
|
"divertToGraph",
|
2020-04-28 16:58:00 +02:00
|
|
|
"zipWithGraph")
|
2018-04-18 11:44:37 +02:00
|
|
|
|
|
|
|
|
// FIXME document these methods as well
|
|
|
|
|
val pendingTestCases = Map(
|
2019-11-21 14:29:20 +01:00
|
|
|
"Source" -> (pendingSourceOrFlow ++ Seq("preMaterialize")),
|
2018-04-18 11:44:37 +02:00
|
|
|
"Flow" -> (pendingSourceOrFlow ++ Seq(
|
|
|
|
|
"lazyInit",
|
|
|
|
|
"fromProcessorMat",
|
|
|
|
|
"toProcessor",
|
|
|
|
|
"fromProcessor",
|
|
|
|
|
"of",
|
|
|
|
|
"join",
|
|
|
|
|
"joinMat",
|
2019-11-21 14:29:20 +01:00
|
|
|
"fromFunction")),
|
2018-04-18 11:44:37 +02:00
|
|
|
"Sink" -> Seq(
|
|
|
|
|
"lazyInit",
|
|
|
|
|
"contramap",
|
|
|
|
|
"named",
|
|
|
|
|
"addAttributes",
|
|
|
|
|
"async",
|
|
|
|
|
"mapMaterializedValue",
|
|
|
|
|
"runWith",
|
|
|
|
|
"shape",
|
|
|
|
|
"traversalBuilder",
|
|
|
|
|
"fromGraph",
|
|
|
|
|
"actorSubscriber",
|
|
|
|
|
"foldAsync",
|
2020-06-10 09:51:11 +02:00
|
|
|
"newOnCompleteStage"))
|
2018-04-18 11:44:37 +02:00
|
|
|
|
|
|
|
|
val ignore =
|
|
|
|
|
Set("equals", "hashCode", "notify", "notifyAll", "wait", "toString", "getClass") ++
|
|
|
|
|
Set("productArity", "canEqual", "productPrefix", "copy", "productIterator", "productElement") ++
|
2019-11-21 14:29:20 +01:00
|
|
|
Set(
|
|
|
|
|
"create",
|
|
|
|
|
"apply",
|
|
|
|
|
"ops",
|
|
|
|
|
"appendJava",
|
|
|
|
|
"andThen",
|
|
|
|
|
"andThenMat",
|
|
|
|
|
"isIdentity",
|
|
|
|
|
"withAttributes",
|
|
|
|
|
"transformMaterializing") ++
|
2018-04-18 11:44:37 +02:00
|
|
|
Set("asScala", "asJava", "deprecatedAndThen", "deprecatedAndThenMat") ++
|
2019-09-10 11:59:19 +02:00
|
|
|
Set("++", "onPush", "onPull", "actorRefWithAck")
|
2018-04-18 11:44:37 +02:00
|
|
|
|
|
|
|
|
def isPending(element: String, opName: String) =
|
|
|
|
|
pendingTestCases.get(element).exists(_.contains(opName))
|
|
|
|
|
|
2019-05-31 12:56:25 +02:00
|
|
|
val noElement = " "
|
|
|
|
|
|
2019-02-09 15:25:39 +01:00
|
|
|
def generateAlphabeticalIndex(dir: SettingKey[File], locate: File => File) = Def.task[Seq[File]] {
|
2018-04-18 11:44:37 +02:00
|
|
|
val file = locate(dir.value)
|
|
|
|
|
|
|
|
|
|
val defs =
|
|
|
|
|
List(
|
|
|
|
|
"akka-stream/src/main/scala/akka/stream/scaladsl/Source.scala",
|
|
|
|
|
"akka-stream/src/main/scala/akka/stream/javadsl/Source.scala",
|
|
|
|
|
// "akka-stream/src/main/scala/akka/stream/scaladsl/SubSource.scala",
|
|
|
|
|
// "akka-stream/src/main/scala/akka/stream/javadsl/SubSource.scala",
|
|
|
|
|
"akka-stream/src/main/scala/akka/stream/scaladsl/Flow.scala",
|
|
|
|
|
"akka-stream/src/main/scala/akka/stream/javadsl/Flow.scala",
|
|
|
|
|
// "akka-stream/src/main/scala/akka/stream/scaladsl/SubFlow.scala",
|
|
|
|
|
// "akka-stream/src/main/scala/akka/stream/javadsl/SubFlow.scala",
|
|
|
|
|
// "akka-stream/src/main/scala/akka/stream/scaladsl/RunnableFlow.scala",
|
|
|
|
|
// "akka-stream/src/main/scala/akka/stream/javadsl/RunnableFlow.scala",
|
|
|
|
|
"akka-stream/src/main/scala/akka/stream/scaladsl/Sink.scala",
|
|
|
|
|
"akka-stream/src/main/scala/akka/stream/javadsl/Sink.scala",
|
|
|
|
|
"akka-stream/src/main/scala/akka/stream/scaladsl/StreamConverters.scala",
|
|
|
|
|
"akka-stream/src/main/scala/akka/stream/javadsl/StreamConverters.scala",
|
|
|
|
|
"akka-stream/src/main/scala/akka/stream/scaladsl/FileIO.scala",
|
|
|
|
|
"akka-stream/src/main/scala/akka/stream/javadsl/FileIO.scala",
|
2018-10-15 18:11:33 +02:00
|
|
|
"akka-stream/src/main/scala/akka/stream/scaladsl/RestartSource.scala",
|
|
|
|
|
"akka-stream/src/main/scala/akka/stream/javadsl/RestartSource.scala",
|
|
|
|
|
"akka-stream/src/main/scala/akka/stream/scaladsl/RestartFlow.scala",
|
|
|
|
|
"akka-stream/src/main/scala/akka/stream/javadsl/RestartFlow.scala",
|
|
|
|
|
"akka-stream/src/main/scala/akka/stream/scaladsl/RestartSink.scala",
|
|
|
|
|
"akka-stream/src/main/scala/akka/stream/javadsl/RestartSink.scala",
|
2019-10-15 18:41:30 +02:00
|
|
|
"akka-stream/src/main/scala/akka/stream/scaladsl/RetryFlow.scala",
|
|
|
|
|
"akka-stream/src/main/scala/akka/stream/javadsl/RetryFlow.scala",
|
2020-02-26 13:50:43 +01:00
|
|
|
"akka-stream/src/main/scala/akka/stream/scaladsl/Compression.scala",
|
|
|
|
|
"akka-stream/src/main/scala/akka/stream/javadsl/Compression.scala",
|
2018-05-07 19:23:30 +09:00
|
|
|
// akka-stream-typed
|
|
|
|
|
"akka-stream-typed/src/main/scala/akka/stream/typed/javadsl/ActorSource.scala",
|
|
|
|
|
"akka-stream-typed/src/main/scala/akka/stream/typed/scaladsl/ActorSource.scala",
|
|
|
|
|
"akka-stream-typed/src/main/scala/akka/stream/typed/javadsl/ActorFlow.scala",
|
|
|
|
|
"akka-stream-typed/src/main/scala/akka/stream/typed/scaladsl/ActorFlow.scala",
|
|
|
|
|
"akka-stream-typed/src/main/scala/akka/stream/typed/scaladsl/ActorSink.scala",
|
2019-11-21 14:29:20 +01:00
|
|
|
"akka-stream-typed/src/main/scala/akka/stream/typed/javadsl/ActorSink.scala").flatMap { f =>
|
2018-05-07 19:23:30 +09:00
|
|
|
val slashesNr = f.count(_ == '/')
|
|
|
|
|
val element = f.split("/")(slashesNr).split("\\.")(0)
|
2019-11-21 14:29:20 +01:00
|
|
|
IO.read(new File(f))
|
|
|
|
|
.split("\n")
|
|
|
|
|
.map(_.trim)
|
|
|
|
|
.filter(_.startsWith("def "))
|
2019-02-09 15:25:39 +01:00
|
|
|
.map(_.drop(4).takeWhile(c => c != '[' && c != '(' && c != ':'))
|
2018-04-18 11:44:37 +02:00
|
|
|
.filter(op => !isPending(element, op))
|
|
|
|
|
.filter(op => !ignore.contains(op))
|
|
|
|
|
.map(_.replaceAll("Mat$", ""))
|
2019-02-09 15:25:39 +01:00
|
|
|
.map(method => (element, method))
|
2019-05-31 12:56:25 +02:00
|
|
|
} ++ List(
|
|
|
|
|
(noElement, "Partition"),
|
Added MergeSequence graph stage (#29247)
Fixes #28769
Use case for this is if you have a sequence of elements that has been
partitioned across multiple streams, and you want to merge them back
together in order. It will typically be used in combination with
`zipWithIndex` to define the index for the sequence, followed by a
`Partition`, followed by the processing of different substreams with
different flows (each flow emitting exactly one output for each input),
and then merging with this stage, using the index from `zipWithIndex`.
A more concrete use case is if you're consuming messages from a message
broker, and you have a flow that you wish to apply to some messages, but
not others, you can partition the message stream according to which
should be processed by the flow and which should bypass it, and then
bring the elements back together acknowledgement. If an ordinary merge
was used rather than this, the messages that bypass the processing flow
would likely overtake the messages going through the processing flow,
and the result would be out of order offset acknowledgement which would
lead to dropping messages on failure.
I've included a minimal version of the above example in the documentation.
2020-07-10 01:52:46 +10:00
|
|
|
(noElement, "MergeSequence"),
|
2019-05-31 12:56:25 +02:00
|
|
|
(noElement, "Broadcast"),
|
|
|
|
|
(noElement, "Balance"),
|
|
|
|
|
(noElement, "Unzip"),
|
2019-11-21 14:29:20 +01:00
|
|
|
(noElement, "UnzipWith"))
|
2018-04-18 11:44:37 +02:00
|
|
|
|
2019-11-21 14:29:20 +01:00
|
|
|
val sourceAndFlow =
|
|
|
|
|
defs.collect { case ("Source", method) => method }.intersect(defs.collect { case ("Flow", method) => method })
|
2018-04-18 11:44:37 +02:00
|
|
|
|
|
|
|
|
val groupedDefs =
|
|
|
|
|
defs.map {
|
|
|
|
|
case (element @ ("Source" | "Flow"), method) if sourceAndFlow.contains(method) =>
|
|
|
|
|
("Source/Flow", method, s"Source-or-Flow/$method.md")
|
2019-05-31 12:56:25 +02:00
|
|
|
case (`noElement`, method) =>
|
|
|
|
|
(noElement, method, s"$method.md")
|
2018-04-18 11:44:37 +02:00
|
|
|
case (element, method) =>
|
|
|
|
|
(element, method, s"$element/$method.md")
|
|
|
|
|
}.distinct
|
|
|
|
|
|
2019-11-21 14:29:20 +01:00
|
|
|
val tablePerCategory = groupedDefs
|
|
|
|
|
.map {
|
|
|
|
|
case (element, method, md) =>
|
|
|
|
|
val (description, category) = getDetails(file.getParentFile / md)
|
|
|
|
|
category -> (element, method, md, description)
|
|
|
|
|
}
|
2018-04-18 11:44:37 +02:00
|
|
|
.groupBy(_._1)
|
2019-11-21 14:29:20 +01:00
|
|
|
.mapValues(
|
|
|
|
|
lines =>
|
|
|
|
|
"| |Operator|Description|\n" ++ // TODO mini images here too
|
2018-04-18 11:44:37 +02:00
|
|
|
"|--|--|--|\n" ++
|
|
|
|
|
lines
|
|
|
|
|
.map(_._2)
|
|
|
|
|
.sortBy(_._2)
|
2019-11-21 14:29:20 +01:00
|
|
|
.map {
|
|
|
|
|
case (element, method, md, description) =>
|
|
|
|
|
s"""|$element|<a name="${method.toLowerCase}"></a>@ref[${methodToShow(method)}]($md)|$description|"""
|
2019-05-23 14:12:00 +02:00
|
|
|
}
|
2019-11-21 14:29:20 +01:00
|
|
|
.mkString("\n"))
|
2018-04-18 11:44:37 +02:00
|
|
|
|
2019-11-21 14:29:20 +01:00
|
|
|
val tables = categories
|
|
|
|
|
.map { category =>
|
|
|
|
|
s"## $category\n\n" ++
|
2018-04-18 11:44:37 +02:00
|
|
|
IO.read(dir.value / "categories" / (categoryId(category) + ".md")) ++ "\n\n" ++
|
|
|
|
|
tablePerCategory(category)
|
2019-11-21 14:29:20 +01:00
|
|
|
}
|
|
|
|
|
.mkString("\n\n")
|
2018-04-18 11:44:37 +02:00
|
|
|
|
|
|
|
|
val content =
|
2018-05-07 19:23:30 +09:00
|
|
|
"<!-- DO NOT EDIT DIRECTLY: This file is generated by `project/StreamOperatorsIndexGenerator`. See CONTRIBUTING.md for details. -->\n" +
|
|
|
|
|
"# Operators\n\n" +
|
2019-11-21 14:29:20 +01:00
|
|
|
tables +
|
|
|
|
|
"\n\n@@@ index\n\n" +
|
2020-04-28 16:58:00 +02:00
|
|
|
groupedDefs
|
|
|
|
|
.sortBy { case (_, method, _) => method.toLowerCase }
|
|
|
|
|
.map { case (_, method, md) => s"* [$method]($md)" }
|
|
|
|
|
.mkString("\n") + "\n\n@@@\n"
|
2018-04-18 11:44:37 +02:00
|
|
|
|
|
|
|
|
if (!file.exists || IO.read(file) != content) IO.write(file, content)
|
|
|
|
|
Seq(file)
|
|
|
|
|
}
|
|
|
|
|
|
2019-05-23 14:12:00 +02:00
|
|
|
def methodToShow(method: String): String = method match {
|
|
|
|
|
case "from" => "@scala[apply]@java[from]"
|
2019-11-21 14:29:20 +01:00
|
|
|
case other => other
|
2019-05-23 14:12:00 +02:00
|
|
|
}
|
|
|
|
|
|
2019-11-21 14:29:20 +01:00
|
|
|
def getDetails(file: File): (String, String) =
|
|
|
|
|
try {
|
|
|
|
|
val contents = IO.read(file)
|
|
|
|
|
val lines = contents.split("\\r?\\n")
|
|
|
|
|
require(
|
|
|
|
|
lines.size >= 5,
|
|
|
|
|
s"There must be at least 5 lines in $file, including the title, description, category link and an empty line between each two of them")
|
|
|
|
|
// This forces the short description to be on a single line. We could make this smarter,
|
|
|
|
|
// but 'forcing' the short description to be really short seems nice as well.
|
|
|
|
|
val description = lines(2)
|
2020-03-11 15:56:59 +01:00
|
|
|
.replaceAll("ref:?\\[(.*?)\\]\\(", "ref[$1](" + file.getAbsolutePath.replaceFirst(".*/([^/]+/).*", "$1"))
|
2019-11-21 14:29:20 +01:00
|
|
|
require(!description.isEmpty, s"description in $file must be non-empty, single-line description at the 3rd line")
|
|
|
|
|
val categoryLink = lines(4)
|
|
|
|
|
require(
|
|
|
|
|
categoryLink.startsWith("@ref"),
|
|
|
|
|
s"""category link in $file should start with @ref, but saw \"$categoryLink\"""")
|
|
|
|
|
val categoryName = categoryLink.drop(5).takeWhile(_ != ']')
|
|
|
|
|
val categoryLinkId = categoryLink.dropWhile(_ != '#').drop(1).takeWhile(_ != ')')
|
|
|
|
|
require(categories.contains(categoryName), s"category $categoryName in $file should be known")
|
|
|
|
|
require(categoryLinkId == categoryId(categoryName), s"category id $categoryLinkId in $file")
|
|
|
|
|
(description, categoryName)
|
|
|
|
|
} catch {
|
|
|
|
|
case NonFatal(ex) =>
|
|
|
|
|
throw new RuntimeException(s"Unable to extract details from $file", ex)
|
|
|
|
|
}
|
2018-04-18 11:44:37 +02:00
|
|
|
|
|
|
|
|
}
|