2014-12-11 14:57:48 +01:00
|
|
|
/**
|
|
|
|
|
* Copyright (C) 2014 Typesafe Inc. <http://www.typesafe.com>
|
|
|
|
|
*/
|
|
|
|
|
package docs.stream
|
|
|
|
|
|
|
|
|
|
//#imports
|
|
|
|
|
|
|
|
|
|
import java.util.Date
|
|
|
|
|
|
|
|
|
|
import akka.actor.ActorSystem
|
|
|
|
|
import akka.stream.FlowMaterializer
|
|
|
|
|
import akka.stream.OverflowStrategy
|
|
|
|
|
import akka.stream.scaladsl.Broadcast
|
|
|
|
|
import akka.stream.scaladsl.Flow
|
|
|
|
|
import akka.stream.scaladsl.FlowGraph
|
|
|
|
|
import akka.stream.scaladsl.FlowGraphImplicits
|
|
|
|
|
import akka.stream.scaladsl.MaterializedMap
|
|
|
|
|
import akka.stream.scaladsl.RunnableFlow
|
|
|
|
|
import akka.stream.scaladsl.Sink
|
|
|
|
|
import akka.stream.scaladsl.Source
|
|
|
|
|
|
|
|
|
|
import concurrent.Await
|
|
|
|
|
import concurrent.Future
|
|
|
|
|
|
|
|
|
|
//#imports
|
|
|
|
|
|
|
|
|
|
import akka.stream.testkit.AkkaSpec
|
|
|
|
|
|
2014-12-18 10:34:59 +01:00
|
|
|
object TwitterStreamQuickstartDocSpec {
|
2014-12-11 14:57:48 +01:00
|
|
|
//#model
|
|
|
|
|
final case class Author(handle: String)
|
2014-12-25 11:33:42 +01:00
|
|
|
val akka = Hashtag("#akka")
|
2014-12-11 14:57:48 +01:00
|
|
|
|
|
|
|
|
final case class Hashtag(name: String)
|
|
|
|
|
|
|
|
|
|
final case class Tweet(author: Author, timestamp: Long, body: String) {
|
2014-12-18 10:34:59 +01:00
|
|
|
def hashtags: Set[Hashtag] =
|
2014-12-22 16:18:26 +01:00
|
|
|
body.split(" ").collect { case t if t.startsWith("#") => Hashtag(t) }.toSet
|
2014-12-11 14:57:48 +01:00
|
|
|
}
|
|
|
|
|
//#model
|
|
|
|
|
|
2014-12-18 10:34:59 +01:00
|
|
|
val tweets = Source(
|
|
|
|
|
Tweet(Author("rolandkuhn"), (new Date).getTime, "#akka rocks!") ::
|
|
|
|
|
Tweet(Author("patriknw"), (new Date).getTime, "#akka !") ::
|
|
|
|
|
Tweet(Author("bantonsson"), (new Date).getTime, "#akka !") ::
|
|
|
|
|
Tweet(Author("drewhk"), (new Date).getTime, "#akka !") ::
|
|
|
|
|
Tweet(Author("ktosopl"), (new Date).getTime, "#akka on the rocks!") ::
|
|
|
|
|
Tweet(Author("mmartynas"), (new Date).getTime, "wow #akka !") ::
|
|
|
|
|
Tweet(Author("akkateam"), (new Date).getTime, "#akka rocks!") ::
|
|
|
|
|
Tweet(Author("bananaman"), (new Date).getTime, "#bananas rock!") ::
|
|
|
|
|
Tweet(Author("appleman"), (new Date).getTime, "#apples rock!") ::
|
|
|
|
|
Tweet(Author("drama"), (new Date).getTime, "we compared #apples to #oranges!") ::
|
|
|
|
|
Nil)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
class TwitterStreamQuickstartDocSpec extends AkkaSpec {
|
|
|
|
|
import TwitterStreamQuickstartDocSpec._
|
|
|
|
|
|
|
|
|
|
implicit val executionContext = system.dispatcher
|
|
|
|
|
|
2014-12-11 14:57:48 +01:00
|
|
|
trait Example0 {
|
|
|
|
|
//#tweet-source
|
|
|
|
|
val tweets: Source[Tweet]
|
|
|
|
|
//#tweet-source
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
trait Example1 {
|
|
|
|
|
//#materializer-setup
|
|
|
|
|
implicit val system = ActorSystem("reactive-tweets")
|
|
|
|
|
implicit val mat = FlowMaterializer()
|
|
|
|
|
//#materializer-setup
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
implicit val mat = FlowMaterializer()
|
|
|
|
|
|
|
|
|
|
"filter and map" in {
|
|
|
|
|
//#authors-filter-map
|
|
|
|
|
val authors: Source[Author] =
|
|
|
|
|
tweets
|
2014-12-25 11:33:42 +01:00
|
|
|
.filter(_.hashtags.contains(akka))
|
2014-12-11 14:57:48 +01:00
|
|
|
.map(_.author)
|
|
|
|
|
//#authors-filter-map
|
|
|
|
|
|
|
|
|
|
trait Example3 {
|
|
|
|
|
//#authors-collect
|
|
|
|
|
val authors: Source[Author] =
|
2014-12-25 11:33:42 +01:00
|
|
|
tweets.collect { case t if t.hashtags.contains(akka) => t.author }
|
2014-12-11 14:57:48 +01:00
|
|
|
//#authors-collect
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
//#authors-foreachsink-println
|
|
|
|
|
authors.runWith(Sink.foreach(println))
|
|
|
|
|
//#authors-foreachsink-println
|
|
|
|
|
|
|
|
|
|
//#authors-foreach-println
|
|
|
|
|
authors.foreach(println)
|
|
|
|
|
//#authors-foreach-println
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
"mapConcat hashtags" in {
|
|
|
|
|
//#hashtags-mapConcat
|
2014-12-18 10:34:59 +01:00
|
|
|
val hashtags: Source[Hashtag] = tweets.mapConcat(_.hashtags.toList)
|
2014-12-11 14:57:48 +01:00
|
|
|
//#hashtags-mapConcat
|
|
|
|
|
}
|
|
|
|
|
|
2014-12-11 14:57:48 +01:00
|
|
|
trait HiddenDefinitions {
|
|
|
|
|
//#flow-graph-broadcast
|
|
|
|
|
val writeAuthors: Sink[Author] = ???
|
|
|
|
|
val writeHashtags: Sink[Hashtag] = ???
|
|
|
|
|
//#flow-graph-broadcast
|
|
|
|
|
}
|
2014-12-11 14:57:48 +01:00
|
|
|
|
2014-12-11 14:57:48 +01:00
|
|
|
"simple broadcast" in {
|
2014-12-11 14:57:48 +01:00
|
|
|
val writeAuthors: Sink[Author] = Sink.ignore
|
|
|
|
|
val writeHashtags: Sink[Hashtag] = Sink.ignore
|
|
|
|
|
|
|
|
|
|
// format: OFF
|
|
|
|
|
//#flow-graph-broadcast
|
2014-12-22 16:18:26 +01:00
|
|
|
val g = FlowGraph { implicit builder =>
|
2014-12-11 14:57:48 +01:00
|
|
|
import FlowGraphImplicits._
|
|
|
|
|
|
|
|
|
|
val b = Broadcast[Tweet]
|
|
|
|
|
tweets ~> b ~> Flow[Tweet].map(_.author) ~> writeAuthors
|
2014-12-18 10:34:59 +01:00
|
|
|
b ~> Flow[Tweet].mapConcat(_.hashtags.toList) ~> writeHashtags
|
2014-12-11 14:57:48 +01:00
|
|
|
}
|
|
|
|
|
g.run()
|
|
|
|
|
//#flow-graph-broadcast
|
|
|
|
|
// format: ON
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
"slowProcessing" in {
|
|
|
|
|
def slowComputation(t: Tweet): Long = {
|
|
|
|
|
Thread.sleep(500) // act as if performing some heavy computation
|
|
|
|
|
42
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
//#tweets-slow-consumption-dropHead
|
|
|
|
|
tweets
|
|
|
|
|
.buffer(10, OverflowStrategy.dropHead)
|
|
|
|
|
.map(slowComputation)
|
|
|
|
|
.runWith(Sink.ignore)
|
|
|
|
|
//#tweets-slow-consumption-dropHead
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
"backpressure by readline" in {
|
|
|
|
|
trait X {
|
|
|
|
|
import scala.concurrent.duration._
|
|
|
|
|
|
|
|
|
|
//#backpressure-by-readline
|
|
|
|
|
val completion: Future[Unit] =
|
|
|
|
|
Source(1 to 10)
|
2014-12-22 16:18:26 +01:00
|
|
|
.map(i => { println(s"map => $i"); i })
|
|
|
|
|
.foreach { i => readLine(s"Element = $i; continue reading? [press enter]\n") }
|
2014-12-11 14:57:48 +01:00
|
|
|
|
|
|
|
|
Await.ready(completion, 1.minute)
|
|
|
|
|
//#backpressure-by-readline
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
"count elements on finite stream" in {
|
|
|
|
|
//#tweets-fold-count
|
|
|
|
|
val sumSink = Sink.fold[Int, Int](0)(_ + _)
|
|
|
|
|
|
2014-12-22 16:18:26 +01:00
|
|
|
val counter: RunnableFlow = tweets.map(t => 1).to(sumSink)
|
2014-12-11 14:57:48 +01:00
|
|
|
val map: MaterializedMap = counter.run()
|
|
|
|
|
|
|
|
|
|
val sum: Future[Int] = map.get(sumSink)
|
|
|
|
|
|
2014-12-25 11:33:42 +01:00
|
|
|
sum.foreach(c => println(s"Total tweets processed: $c"))
|
2014-12-11 14:57:48 +01:00
|
|
|
//#tweets-fold-count
|
|
|
|
|
|
|
|
|
|
new AnyRef {
|
|
|
|
|
//#tweets-fold-count-oneline
|
2014-12-22 16:18:26 +01:00
|
|
|
val sum: Future[Int] = tweets.map(t => 1).runWith(sumSink)
|
2014-12-11 14:57:48 +01:00
|
|
|
//#tweets-fold-count-oneline
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
"materialize multiple times" in {
|
|
|
|
|
val tweetsInMinuteFromNow = tweets // not really in second, just acting as if
|
|
|
|
|
|
|
|
|
|
//#tweets-runnable-flow-materialized-twice
|
|
|
|
|
val sumSink = Sink.fold[Int, Int](0)(_ + _)
|
|
|
|
|
val counterRunnableFlow: RunnableFlow =
|
|
|
|
|
tweetsInMinuteFromNow
|
2014-12-25 11:33:42 +01:00
|
|
|
.filter(_.hashtags contains akka)
|
2014-12-22 16:18:26 +01:00
|
|
|
.map(t => 1)
|
2014-12-11 14:57:48 +01:00
|
|
|
.to(sumSink)
|
|
|
|
|
|
|
|
|
|
// materialize the stream once in the morning
|
|
|
|
|
val morningMaterialized = counterRunnableFlow.run()
|
2014-12-18 10:34:59 +01:00
|
|
|
// and once in the evening, reusing the
|
2014-12-11 14:57:48 +01:00
|
|
|
val eveningMaterialized = counterRunnableFlow.run()
|
|
|
|
|
|
|
|
|
|
// the sumSink materialized two different futures
|
|
|
|
|
// we use it as key to get the materialized value out of the materialized map
|
|
|
|
|
val morningTweetsCount: Future[Int] = morningMaterialized.get(sumSink)
|
2014-12-11 14:57:48 +01:00
|
|
|
val eveningTweetsCount: Future[Int] = eveningMaterialized.get(sumSink)
|
2014-12-11 14:57:48 +01:00
|
|
|
//#tweets-runnable-flow-materialized-twice
|
|
|
|
|
|
|
|
|
|
val map: MaterializedMap = counterRunnableFlow.run()
|
|
|
|
|
|
|
|
|
|
val sum: Future[Int] = map.get(sumSink)
|
|
|
|
|
|
2014-12-22 16:18:26 +01:00
|
|
|
sum.map { c => println(s"Total tweets processed: $c") }
|
2014-12-11 14:57:48 +01:00
|
|
|
//#tweets-fold-count
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
}
|