+doc #19429 initial merge of docs-dev and docs

This commit is contained in:
Konrad Malawski 2016-01-13 16:25:24 +01:00
parent be0c8af4c0
commit 5a18d43435
501 changed files with 9876 additions and 3681 deletions

View file

@ -21,15 +21,21 @@ import akka.util._
import scala.concurrent.duration._
import scala.concurrent.Await
//#my-actor
class MyActor extends Actor {
val log = Logging(context.system, this)
def receive = {
case "test" => log.info("received test")
case _ => log.info("received unknown message")
object x {
//#my-actor
class MyActor extends Actor {
val log = Logging(context.system, this)
def receive = {
case "test" => log.info("received test")
case _ => log.info("received unknown message")
}
}
//#my-actor
}
//#my-actor
import x._
final case class DoIt(msg: ImmutableMessage)
final case class Message(s: String)
@ -54,6 +60,7 @@ class DemoActorWrapper extends Actor {
object DemoActor {
/**
* Create Props for an actor of this type.
*
* @param magicNumber The magic number to be passed to this actors constructor.
* @return a Props for creating this actor, which can then be further configured
* (e.g. calling `.withDispatcher()` on it)
@ -257,7 +264,10 @@ final case class Give(thing: Any)
//#receive-orElse
class ActorDocSpec extends AkkaSpec(Map("akka.loglevel" -> "INFO")) {
class ActorDocSpec extends AkkaSpec("""
akka.loglevel = INFO
akka.loggers = []
""") {
"import context" in {
new AnyRef {

View file

@ -0,0 +1,108 @@
/*
* Copyright (C) 2009-2014 Typesafe Inc. <http://www.typesafe.com>
*/
package docs.http.scaladsl
import akka.actor.{ ActorLogging, ActorSystem }
import akka.util.ByteString
import org.scalatest.{ Matchers, WordSpec }
class HttpClientExampleSpec extends WordSpec with Matchers {
"outgoing-connection-example" in {
pending // compile-time only test
//#outgoing-connection-example
import akka.http.scaladsl.Http
import akka.http.scaladsl.model._
import akka.stream.ActorMaterializer
import akka.stream.scaladsl._
import scala.concurrent.Future
implicit val system = ActorSystem()
implicit val materializer = ActorMaterializer()
val connectionFlow: Flow[HttpRequest, HttpResponse, Future[Http.OutgoingConnection]] =
Http().outgoingConnection("akka.io")
val responseFuture: Future[HttpResponse] =
Source.single(HttpRequest(uri = "/"))
.via(connectionFlow)
.runWith(Sink.head)
//#outgoing-connection-example
}
"host-level-example" in {
pending // compile-time only test
//#host-level-example
import akka.http.scaladsl.Http
import akka.http.scaladsl.model._
import akka.stream.ActorMaterializer
import akka.stream.scaladsl._
import scala.concurrent.Future
import scala.util.Try
implicit val system = ActorSystem()
implicit val materializer = ActorMaterializer()
// construct a pool client flow with context type `Int`
val poolClientFlow = Http().cachedHostConnectionPool[Int]("akka.io")
val responseFuture: Future[(Try[HttpResponse], Int)] =
Source.single(HttpRequest(uri = "/") -> 42)
.via(poolClientFlow)
.runWith(Sink.head)
//#host-level-example
}
"single-request-example" in {
pending // compile-time only test
//#single-request-example
import akka.http.scaladsl.Http
import akka.http.scaladsl.model._
import akka.stream.ActorMaterializer
import scala.concurrent.Future
implicit val system = ActorSystem()
implicit val materializer = ActorMaterializer()
val responseFuture: Future[HttpResponse] =
Http().singleRequest(HttpRequest(uri = "http://akka.io"))
//#single-request-example
}
"single-request-in-actor-example" in {
pending // compile-time only test
//#single-request-in-actor-example
import akka.actor.Actor
import akka.http.scaladsl.Http
import akka.http.scaladsl.model._
import akka.stream.scaladsl.ImplicitMaterializer
class Myself extends Actor
with ImplicitMaterializer
with ActorLogging {
import akka.pattern.pipe
import context.dispatcher
val http = Http(context.system)
override def preStart() = {
http.singleRequest(HttpRequest(uri = "http://akka.io"))
.pipeTo(self)
}
def receive = {
case HttpResponse(StatusCodes.OK, headers, entity, _) =>
log.info("Got response, body: " + entity.dataBytes.runFold(ByteString(""))(_ ++ _))
case HttpResponse(code, _, _, _) =>
log.info("Request failed, response code: " + code)
}
}
//#single-request-in-actor-example
}
}

View file

@ -0,0 +1,398 @@
/*
* Copyright (C) 2009-2014 Typesafe Inc. <http://www.typesafe.com>
*/
package docs.http.scaladsl
import akka.actor.{ ActorRef, ActorSystem }
import akka.event.LoggingAdapter
import akka.http.scaladsl.Http
import akka.http.scaladsl.Http.ServerBinding
import akka.http.scaladsl.model._
import akka.stream.ActorMaterializer
import akka.stream.scaladsl.{ Flow, Sink }
import akka.stream.stage.{ Context, PushStage }
import akka.testkit.TestActors
import org.scalatest.{ Matchers, WordSpec }
import scala.language.postfixOps
import scala.concurrent.{ ExecutionContext, Future }
class HttpServerExampleSpec extends WordSpec with Matchers {
// never actually called
val log: LoggingAdapter = null
def compileOnlySpec(body: => Unit) = ()
"binding-example" in compileOnlySpec {
import akka.http.scaladsl.Http
import akka.stream.ActorMaterializer
import akka.stream.scaladsl._
implicit val system = ActorSystem()
implicit val materializer = ActorMaterializer()
implicit val ec = system.dispatcher
val serverSource: Source[Http.IncomingConnection, Future[Http.ServerBinding]] =
Http().bind(interface = "localhost", port = 8080)
val bindingFuture: Future[Http.ServerBinding] =
serverSource.to(Sink.foreach { connection => // foreach materializes the source
println("Accepted new connection from " + connection.remoteAddress)
// ... and then actually handle the connection
}).run()
}
"binding-failure-high-level-example" in compileOnlySpec {
import akka.http.scaladsl.Http
import akka.http.scaladsl.server.Directives._
import akka.stream.ActorMaterializer
implicit val system = ActorSystem()
implicit val materializer = ActorMaterializer()
implicit val ec = system.dispatcher
val handler = get {
complete("Hello world!")
}
// let's say the OS won't allow us to bind to 80.
val (host, port) = ("localhost", 80)
val bindingFuture: Future[ServerBinding] =
Http().bindAndHandle(handler, host, port)
bindingFuture onFailure {
case ex: Exception =>
log.error(ex, "Failed to bind to {}:{}!", host, port)
}
}
// mock values:
val handleConnections: Sink[Http.IncomingConnection, Future[Http.ServerBinding]] =
Sink.ignore.mapMaterializedValue(_ => Future.failed(new Exception("")))
"binding-failure-handling" in compileOnlySpec {
implicit val system = ActorSystem()
implicit val materializer = ActorMaterializer()
implicit val ec = system.dispatcher
// let's say the OS won't allow us to bind to 80.
val (host, port) = ("localhost", 80)
val serverSource = Http().bind(host, port)
val bindingFuture: Future[ServerBinding] = serverSource
.to(handleConnections) // Sink[Http.IncomingConnection, _]
.run()
bindingFuture onFailure {
case ex: Exception =>
log.error(ex, "Failed to bind to {}:{}!", host, port)
}
}
object MyExampleMonitoringActor {
def props = TestActors.echoActorProps
}
"incoming-connections-source-failure-handling" in compileOnlySpec {
implicit val system = ActorSystem()
implicit val materializer = ActorMaterializer()
implicit val ec = system.dispatcher
import Http._
val (host, port) = ("localhost", 8080)
val serverSource = Http().bind(host, port)
val failureMonitor: ActorRef = system.actorOf(MyExampleMonitoringActor.props)
val reactToTopLevelFailures = Flow[IncomingConnection]
.transform { () =>
new PushStage[IncomingConnection, IncomingConnection] {
override def onPush(elem: IncomingConnection, ctx: Context[IncomingConnection]) =
ctx.push(elem)
override def onUpstreamFailure(cause: Throwable, ctx: Context[IncomingConnection]) = {
failureMonitor ! cause
super.onUpstreamFailure(cause, ctx)
}
}
}
serverSource
.via(reactToTopLevelFailures)
.to(handleConnections) // Sink[Http.IncomingConnection, _]
.run()
}
"connection-stream-failure-handling" in compileOnlySpec {
implicit val system = ActorSystem()
implicit val materializer = ActorMaterializer()
implicit val ec = system.dispatcher
val (host, port) = ("localhost", 8080)
val serverSource = Http().bind(host, port)
val reactToConnectionFailure = Flow[HttpRequest]
.transform { () =>
new PushStage[HttpRequest, HttpRequest] {
override def onPush(elem: HttpRequest, ctx: Context[HttpRequest]) =
ctx.push(elem)
override def onUpstreamFailure(cause: Throwable, ctx: Context[HttpRequest]) = {
// handle the failure somehow
super.onUpstreamFailure(cause, ctx)
}
}
}
val httpEcho = Flow[HttpRequest]
.via(reactToConnectionFailure)
.map { request =>
// simple text "echo" response:
HttpResponse(entity = HttpEntity(ContentTypes.`text/plain(UTF-8)`, request.entity.dataBytes))
}
serverSource
.runForeach { con =>
con.handleWith(httpEcho)
}
}
"full-server-example" in compileOnlySpec {
import akka.http.scaladsl.Http
import akka.http.scaladsl.model.HttpMethods._
import akka.http.scaladsl.model._
import akka.stream.ActorMaterializer
import akka.stream.scaladsl.Sink
implicit val system = ActorSystem()
implicit val materializer = ActorMaterializer()
val serverSource = Http().bind(interface = "localhost", port = 8080)
val requestHandler: HttpRequest => HttpResponse = {
case HttpRequest(GET, Uri.Path("/"), _, _, _) =>
HttpResponse(entity = HttpEntity(ContentTypes.`text/html(UTF-8)`,
"<html><body>Hello world!</body></html>"))
case HttpRequest(GET, Uri.Path("/ping"), _, _, _) =>
HttpResponse(entity = "PONG!")
case HttpRequest(GET, Uri.Path("/crash"), _, _, _) =>
sys.error("BOOM!")
case _: HttpRequest =>
HttpResponse(404, entity = "Unknown resource!")
}
val bindingFuture: Future[Http.ServerBinding] =
serverSource.to(Sink.foreach { connection =>
println("Accepted new connection from " + connection.remoteAddress)
connection handleWithSyncHandler requestHandler
// this is equivalent to
// connection handleWith { Flow[HttpRequest] map requestHandler }
}).run()
}
"low-level-server-example" in compileOnlySpec {
import akka.http.scaladsl.Http
import akka.http.scaladsl.model.HttpMethods._
import akka.http.scaladsl.model._
import akka.stream.ActorMaterializer
implicit val system = ActorSystem()
implicit val materializer = ActorMaterializer()
val requestHandler: HttpRequest => HttpResponse = {
case HttpRequest(GET, Uri.Path("/"), _, _, _) =>
HttpResponse(entity = HttpEntity(ContentTypes.`text/html(UTF-8)`,
"<html><body>Hello world!</body></html>"))
case HttpRequest(GET, Uri.Path("/ping"), _, _, _) =>
HttpResponse(entity = "PONG!")
case HttpRequest(GET, Uri.Path("/crash"), _, _, _) =>
sys.error("BOOM!")
case _: HttpRequest =>
HttpResponse(404, entity = "Unknown resource!")
}
Http().bindAndHandleSync(requestHandler, "localhost", 8080)
}
// format: OFF
"high-level-server-example" in compileOnlySpec {
import akka.http.scaladsl.Http
import akka.http.scaladsl.marshallers.xml.ScalaXmlSupport._
import akka.http.scaladsl.server.Directives._
import akka.stream.ActorMaterializer
implicit val system = ActorSystem()
implicit val materializer = ActorMaterializer()
val route =
get {
pathSingleSlash {
complete {
<html>
<body>Hello world!</body>
</html>
}
} ~
path("ping") {
complete("PONG!")
} ~
path("crash") {
sys.error("BOOM!")
}
}
// `route` will be implicitly converted to `Flow` using `RouteResult.route2HandlerFlow`
Http().bindAndHandle(route, "localhost", 8080)
}
"minimal-routing-example" in compileOnlySpec {
import akka.http.scaladsl.Http
import akka.http.scaladsl.marshallers.xml.ScalaXmlSupport._
import akka.http.scaladsl.server.Directives._
import akka.stream.ActorMaterializer
object Main extends App {
implicit val system = ActorSystem("my-system")
implicit val materializer = ActorMaterializer()
implicit val ec = system.dispatcher
val route =
path("hello") {
get {
complete {
<h1>Say hello to akka-http</h1>
}
}
}
val bindingFuture = Http().bindAndHandle(route, "localhost", 8080)
println(s"Server online at http://localhost:8080/\nPress RETURN to stop...")
Console.readLine() // for the future transformations
bindingFuture
.flatMap(_.unbind()) // trigger unbinding from the port
.onComplete(_ system.shutdown()) // and shutdown when done
}
}
"long-routing-example" in compileOnlySpec {
//#long-routing-example
import akka.actor.ActorRef
import akka.http.scaladsl.coding.Deflate
import akka.http.scaladsl.marshalling.ToResponseMarshaller
import akka.http.scaladsl.model.StatusCodes.MovedPermanently
import akka.http.scaladsl.server.Directives._
// TODO: these explicit imports are only needed in complex cases, like below; Also, not needed on Scala 2.11
import akka.http.scaladsl.server.directives.ParameterDirectives.ParamMagnet
import akka.http.scaladsl.server.directives.FormFieldDirectives.FieldMagnet
import akka.http.scaladsl.unmarshalling.FromRequestUnmarshaller
import akka.pattern.ask
import akka.util.Timeout
// types used by the API routes
type Money = Double // only for demo purposes, don't try this at home!
type TransactionResult = String
case class User(name: String)
case class Order(email: String, amount: Money)
case class Update(order: Order)
case class OrderItem(i: Int, os: Option[String], s: String)
// marshalling would usually be derived automatically using libraries
implicit val orderUM: FromRequestUnmarshaller[Order] = ???
implicit val orderM: ToResponseMarshaller[Order] = ???
implicit val orderSeqM: ToResponseMarshaller[Seq[Order]] = ???
implicit val timeout: Timeout = ??? // for actor asks
implicit val ec: ExecutionContext = ???
implicit val mat: ActorMaterializer = ???
implicit val sys: ActorSystem = ???
// backend entry points
def myAuthenticator: Authenticator[User] = ???
def retrieveOrdersFromDB: Seq[Order] = ???
def myDbActor: ActorRef = ???
def processOrderRequest(id: Int, complete: Order => Unit): Unit = ???
val route = {
path("orders") {
authenticateBasic(realm = "admin area", myAuthenticator) { user =>
get {
encodeResponseWith(Deflate) {
complete {
// marshal custom object with in-scope marshaller
retrieveOrdersFromDB
}
}
} ~
post {
// decompress gzipped or deflated requests if required
decodeRequest {
// unmarshal with in-scope unmarshaller
entity(as[Order]) { order =>
complete {
// ... write order to DB
"Order received"
}
}
}
}
}
} ~
// extract URI path element as Int
pathPrefix("order" / IntNumber) { orderId =>
pathEnd {
(put | parameter('method ! "put")) {
// form extraction from multipart or www-url-encoded forms
formFields(('email, 'total.as[Money])).as(Order) { order =>
complete {
// complete with serialized Future result
(myDbActor ? Update(order)).mapTo[TransactionResult]
}
}
} ~
get {
// debugging helper
logRequest("GET-ORDER") {
// use in-scope marshaller to create completer function
completeWith(instanceOf[Order]) { completer =>
// custom
processOrderRequest(orderId, completer)
}
}
}
} ~
path("items") {
get {
// parameters to case class extraction
parameters(('size.as[Int], 'color ?, 'dangerous ? "no"))
.as(OrderItem) { orderItem =>
// ... route using case class instance created from
// required and optional query parameters
complete("") // hide
}
}
}
} ~
pathPrefix("documentation") {
// optionally compresses the response with Gzip or Deflate
// if the client accepts compressed responses
encodeResponse {
// serve up static content from a JAR resource
getFromResourceDirectory("docs")
}
} ~
path("oldApi" / Rest) { pathRest =>
redirect("http://oldapi.example.com/" + pathRest, MovedPermanently)
}
}
}
}

View file

@ -0,0 +1,38 @@
/*
* Copyright (C) 2009-2014 Typesafe Inc. <http://www.typesafe.com>
*/
package docs.http.scaladsl
import akka.stream.testkit.AkkaSpec
class MarshalSpec extends AkkaSpec {
"use marshal" in {
import scala.concurrent.Await
import scala.concurrent.duration._
import akka.http.scaladsl.marshalling.Marshal
import akka.http.scaladsl.model._
import system.dispatcher // ExecutionContext
val string = "Yeah"
val entityFuture = Marshal(string).to[MessageEntity]
val entity = Await.result(entityFuture, 1.second) // don't block in non-test code!
entity.contentType shouldEqual ContentTypes.`text/plain(UTF-8)`
val errorMsg = "Easy, pal!"
val responseFuture = Marshal(420 -> errorMsg).to[HttpResponse]
val response = Await.result(responseFuture, 1.second) // don't block in non-test code!
response.status shouldEqual StatusCodes.EnhanceYourCalm
response.entity.contentType shouldEqual ContentTypes.`text/plain(UTF-8)`
val request = HttpRequest(headers = List(headers.Accept(MediaTypes.`application/json`)))
val responseText = "Plaintext"
val respFuture = Marshal(responseText).toResponseFor(request) // with content negotiation!
a[Marshal.UnacceptableResponseContentTypeException] should be thrownBy {
Await.result(respFuture, 1.second) // client requested JSON, we only have text/plain!
}
}
}

View file

@ -0,0 +1,91 @@
/*
* Copyright (C) 2009-2014 Typesafe Inc. <http://www.typesafe.com>
*/
package docs.http.scaladsl
//#import-model
import akka.http.scaladsl.model._
//#import-model
import akka.stream.testkit.AkkaSpec
import akka.util.ByteString
import akka.http.scaladsl.model.headers.BasicHttpCredentials
class ModelSpec extends AkkaSpec {
"construct request" in {
//#construct-request
import HttpMethods._
// construct a simple GET request to `homeUri`
val homeUri = Uri("/abc")
HttpRequest(GET, uri = homeUri)
// construct simple GET request to "/index" (implicit string to Uri conversion)
HttpRequest(GET, uri = "/index")
// construct simple POST request containing entity
val data = ByteString("abc")
HttpRequest(POST, uri = "/receive", entity = data)
// customize every detail of HTTP request
import HttpProtocols._
import MediaTypes._
import HttpCharsets._
val userData = ByteString("abc")
val authorization = headers.Authorization(BasicHttpCredentials("user", "pass"))
HttpRequest(
PUT,
uri = "/user",
entity = HttpEntity(`text/plain` withCharset `UTF-8`, userData),
headers = List(authorization),
protocol = `HTTP/1.0`)
//#construct-request
}
"construct response" in {
//#construct-response
import StatusCodes._
// simple OK response without data created using the integer status code
HttpResponse(200)
// 404 response created using the named StatusCode constant
HttpResponse(NotFound)
// 404 response with a body explaining the error
HttpResponse(404, entity = "Unfortunately, the resource couldn't be found.")
// A redirecting response containing an extra header
val locationHeader = headers.Location("http://example.com/other")
HttpResponse(Found, headers = List(locationHeader))
//#construct-response
}
"deal with headers" in {
//#headers
import akka.http.scaladsl.model.headers._
// create a ``Location`` header
val loc = Location("http://example.com/other")
// create an ``Authorization`` header with HTTP Basic authentication data
val auth = Authorization(BasicHttpCredentials("joe", "josepp"))
// custom type
case class User(name: String, pass: String)
// a method that extracts basic HTTP credentials from a request
def credentialsOfRequest(req: HttpRequest): Option[User] =
for {
Authorization(BasicHttpCredentials(user, pass)) <- req.header[Authorization]
} yield User(user, pass)
//#headers
credentialsOfRequest(HttpRequest(headers = List(auth))) should be(Some(User("joe", "josepp")))
credentialsOfRequest(HttpRequest()) should be(None)
credentialsOfRequest(HttpRequest(headers = List(Authorization(GenericHttpCredentials("Other", Map.empty[String, String]))))) should be(None)
}
}

View file

@ -0,0 +1,50 @@
/*
* Copyright (C) 2009-2015 Typesafe Inc. <http://www.typesafe.com>
*/
package docs.http.scaladsl
import akka.http.scaladsl.marshallers.sprayjson.SprayJsonSupport
import akka.http.scaladsl.server.Directives
import org.scalatest.{ Matchers, WordSpec }
class SprayJsonExampleSpec extends WordSpec with Matchers {
"spray-json example" in {
//#example
import spray.json._
// domain model
final case class Item(name: String, id: Long)
final case class Order(items: List[Item])
// collect your json format instances into a support trait:
trait JsonSupport extends SprayJsonSupport with DefaultJsonProtocol {
implicit val itemFormat = jsonFormat2(Item)
implicit val orderFormat = jsonFormat1(Order) // contains List[Item]
}
// use it wherever json (un)marshalling is needed
class MyJsonService extends Directives with JsonSupport {
// format: OFF
val route =
get {
pathSingleSlash {
complete {
Item("thing", 42) // will render as JSON
}
}
} ~
post {
entity(as[Order]) { order => // will unmarshal JSON to Order
val itemsCount = order.items.size
val itemNames = order.items.map(_.name).mkString(", ")
complete(s"Ordered $itemsCount items: $itemNames")
}
}
// format: ON
//#
}
}
}

View file

@ -0,0 +1,29 @@
/*
* Copyright (C) 2009-2014 Typesafe Inc. <http://www.typesafe.com>
*/
package docs.http.scaladsl
import akka.stream.{ Materializer, ActorMaterializer }
import akka.stream.testkit.AkkaSpec
class UnmarshalSpec extends AkkaSpec {
"use unmarshal" in {
import akka.http.scaladsl.unmarshalling.Unmarshal
import system.dispatcher // ExecutionContext
implicit val materializer: Materializer = ActorMaterializer()
import scala.concurrent.Await
import scala.concurrent.duration._
val intFuture = Unmarshal("42").to[Int]
val int = Await.result(intFuture, 1.second) // don't block in non-test code!
int shouldEqual 42
val boolFuture = Unmarshal("off").to[Boolean]
val bool = Await.result(boolFuture, 1.second) // don't block in non-test code!
bool shouldBe false
}
}

View file

@ -0,0 +1,84 @@
/*
* Copyright (C) 2009-2014 Typesafe Inc. <http://www.typesafe.com>
*/
package docs.http.scaladsl.server
/*
import org.scalatest.Inside
import akka.http.scaladsl.server._
class CaseClassExtractionExamplesSpec extends RoutingSpec with Inside {
// FIXME: investigate why it doesn't work without this import
import akka.http.scaladsl.server.directives.ParameterDirectives.ParamMagnet
// format: OFF
"example-1" in {
case class Color(red: Int, green: Int, blue: Int)
val route =
path("color") {
parameters('red.as[Int], 'green.as[Int], 'blue.as[Int]) { (red, green, blue) =>
val color = Color(red, green, blue)
// ... route working with the `color` instance
null // hide
}
}
Get("/color?red=1&green=2&blue=3") ~> route ~> check { responseAs[String] shouldEqual "Color(1,2,3)" } // hide
}
"example-2" in {
case class Color(red: Int, green: Int, blue: Int)
val route =
path("color") {
parameters('red.as[Int], 'green.as[Int], 'blue.as[Int]).as(Color) { color =>
// ... route working with the `color` instance
null // hide
}
}
Get("/color?red=1&green=2&blue=3") ~> route ~> check { responseAs[String] shouldEqual "Color(1,2,3)" } // hide
}
"example-3" in {
case class Color(name: String, red: Int, green: Int, blue: Int)
val route =
(path("color" / Segment) & parameters('r.as[Int], 'g.as[Int], 'b.as[Int]))
.as(Color) { color =>
// ... route working with the `color` instance
null // hide
}
Get("/color/abc?r=1&g=2&b=3") ~> route ~> check { responseAs[String] shouldEqual "Color(abc,1,2,3)" } // hide
}
//# example-4
case class Color(name: String, red: Int, green: Int, blue: Int) {
require(!name.isEmpty, "color name must not be empty")
require(0 <= red && red <= 255, "red color component must be between 0 and 255")
require(0 <= green && green <= 255, "green color component must be between 0 and 255")
require(0 <= blue && blue <= 255, "blue color component must be between 0 and 255")
}
//#
"example 4 test" in {
val route =
(path("color" / Segment) &
parameters('r.as[Int], 'g.as[Int], 'b.as[Int])).as(Color) { color =>
doSomethingWith(color) // route working with the Color instance
}
Get("/color/abc?r=1&g=2&b=3") ~> route ~> check {
responseAs[String] shouldEqual "Color(abc,1,2,3)"
}
Get("/color/abc?r=1&g=2&b=345") ~> route ~> check {
inside(rejection) {
case ValidationRejection("requirement failed: blue color component must be between 0 and 255", _) =>
}
}
}
def doSomethingWith(x: Any) = complete(x.toString)
}*/

View file

@ -0,0 +1,109 @@
/*
* Copyright (C) 2009-2014 Typesafe Inc. <http://www.typesafe.com>
*/
package docs.http.scaladsl.server
import akka.http.scaladsl.server._
import Directives._
import akka.http.scaladsl.testkit.ScalatestRouteTest
import org.scalatest._
class DirectiveExamplesSpec extends RoutingSpec {
// format: OFF
"example-1" in {
val route: Route =
path("order" / IntNumber) { id =>
get {
complete {
"Received GET request for order " + id
}
} ~
put {
complete {
"Received PUT request for order " + id
}
}
}
verify(route) // hide
}
"example-2" in {
def innerRoute(id: Int): Route =
get {
complete {
"Received GET request for order " + id
}
} ~
put {
complete {
"Received PUT request for order " + id
}
}
val route: Route = path("order" / IntNumber) { id => innerRoute(id) }
verify(route) // hide
}
"example-3" in {
val route =
path("order" / IntNumber) { id =>
(get | put) { ctx =>
ctx.complete(s"Received ${ctx.request.method.name} request for order $id")
}
}
verify(route) // hide
}
"example-4" in {
val route =
path("order" / IntNumber) { id =>
(get | put) {
extractMethod { m =>
complete(s"Received ${m.name} request for order $id")
}
}
}
verify(route) // hide
}
"example-5" in {
val getOrPut = get | put
val route =
path("order" / IntNumber) { id =>
getOrPut {
extractMethod { m =>
complete(s"Received ${m.name} request for order $id")
}
}
}
verify(route) // hide
}
"example-6" in {
val getOrPut = get | put
val route =
(path("order" / IntNumber) & getOrPut & extractMethod) { (id, m) =>
complete(s"Received ${m.name} request for order $id")
}
verify(route) // hide
}
"example-7" in {
val orderGetOrPutWithMethod =
path("order" / IntNumber) & (get | put) & extractMethod
val route =
orderGetOrPutWithMethod { (id, m) =>
complete(s"Received ${m.name} request for order $id")
}
verify(route) // hide
}
def verify(route: Route) = {
Get("/order/42") ~> route ~> check { responseAs[String] shouldEqual "Received GET request for order 42" }
Put("/order/42") ~> route ~> check { responseAs[String] shouldEqual "Received PUT request for order 42" }
Get("/") ~> route ~> check { handled shouldEqual false }
}
}

View file

@ -0,0 +1,95 @@
/*
* Copyright (C) 2009-2014 Typesafe Inc. <http://www.typesafe.com>
*/
package docs.http.scaladsl.server
// format: OFF
object MyExplicitExceptionHandler {
//#explicit-handler-example
import akka.actor.ActorSystem
import akka.stream.ActorMaterializer
import akka.http.scaladsl.Http
import akka.http.scaladsl.model._
import akka.http.scaladsl.server._
import StatusCodes._
import Directives._
val myExceptionHandler = ExceptionHandler {
case _: ArithmeticException =>
extractUri { uri =>
println(s"Request to $uri could not be handled normally")
complete(HttpResponse(InternalServerError, entity = "Bad numbers, bad result!!!"))
}
}
object MyApp extends App {
implicit val system = ActorSystem()
implicit val materializer = ActorMaterializer()
val route: Route =
handleExceptions(myExceptionHandler) {
// ... some route structure
null // hide
}
Http().bindAndHandle(route, "localhost", 8080)
}
//#
}
object MyImplicitExceptionHandler {
//#implicit-handler-example
import akka.actor.ActorSystem
import akka.stream.ActorMaterializer
import akka.http.scaladsl.Http
import akka.http.scaladsl.model._
import akka.http.scaladsl.server._
import StatusCodes._
import Directives._
implicit def myExceptionHandler: ExceptionHandler =
ExceptionHandler {
case _: ArithmeticException =>
extractUri { uri =>
println(s"Request to $uri could not be handled normally")
complete(HttpResponse(InternalServerError, entity = "Bad numbers, bad result!!!"))
}
}
object MyApp extends App {
implicit val system = ActorSystem()
implicit val materializer = ActorMaterializer()
val route: Route =
// ... some route structure
null // hide
Http().bindAndHandle(route, "localhost", 8080)
}
//#
}
class ExceptionHandlerExamplesSpec extends RoutingSpec {
"test explicit example" in {
// tests:
Get() ~> handleExceptions(MyExplicitExceptionHandler.myExceptionHandler) {
_.complete((1 / 0).toString)
} ~> check {
responseAs[String] === "Bad numbers, bad result!!!"
}
}
"test implicit example" in {
import akka.http.scaladsl.server._
import MyImplicitExceptionHandler.myExceptionHandler
// tests:
Get() ~> Route.seal(ctx => ctx.complete((1 / 0).toString)) ~> check {
responseAs[String] === "Bad numbers, bad result!!!"
}
}
}

View file

@ -0,0 +1,96 @@
/**
* Copyright (C) 2009-2015 Typesafe Inc. <http://www.typesafe.com>
*/
package docs.http.scaladsl.server
import java.io.File
import akka.actor.ActorRef
import akka.http.scaladsl.model.Multipart.FormData.BodyPart
import akka.stream.io.{ Framing }
import akka.stream.scaladsl._
import akka.http.scaladsl.model.Multipart
import akka.util.ByteString
import scala.concurrent.duration._
import scala.concurrent.Future
class FileUploadExamplesSpec extends RoutingSpec {
case class Video(file: File, title: String, author: String)
object db {
def create(video: Video): Future[Unit] = Future.successful(Unit)
}
"simple-upload" in {
val uploadVideo =
path("video") {
entity(as[Multipart.FormData]) { formData =>
// collect all parts of the multipart as it arrives into a map
val allPartsF: Future[Map[String, Any]] = formData.parts.mapAsync[(String, Any)](1) {
case b: BodyPart if b.name == "file" =>
// stream into a file as the chunks of it arrives and return a future
// file to where it got stored
val file = File.createTempFile("upload", "tmp")
b.entity.dataBytes.runWith(FileIO.toFile(file)).map(_ =>
(b.name -> file))
case b: BodyPart =>
// collect form field values
b.toStrict(2.seconds).map(strict =>
(b.name -> strict.entity.data.utf8String))
}.runFold(Map.empty[String, Any])((map, tuple) => map + tuple)
val done = allPartsF.map { allParts =>
// You would have some better validation/unmarshalling here
db.create(Video(
file = allParts("file").asInstanceOf[File],
title = allParts("title").asInstanceOf[String],
author = allParts("author").asInstanceOf[String]))
}
// when processing have finished create a response for the user
onSuccess(allPartsF) { allParts =>
complete {
"ok!"
}
}
}
}
}
object MetadataActor {
case class Entry(id: Long, values: Seq[String])
}
val metadataActor: ActorRef = system.deadLetters
"stream-csv-upload" in {
val splitLines = Framing.delimiter(ByteString("\n"), 256)
val csvUploads =
path("metadata" / LongNumber) { id =>
entity(as[Multipart.FormData]) { formData =>
val done = formData.parts.mapAsync(1) {
case b: BodyPart if b.filename.exists(_.endsWith(".csv")) =>
b.entity.dataBytes
.via(splitLines)
.map(_.utf8String.split(",").toVector)
.runForeach(csv =>
metadataActor ! MetadataActor.Entry(id, csv))
case _ => Future.successful(Unit)
}.runWith(Sink.ignore)
// when processing have finished create a response for the user
onSuccess(done) {
complete {
"ok!"
}
}
}
}
}
}

View file

@ -0,0 +1,62 @@
/*
* Copyright (C) 2009-2014 Typesafe Inc. <http://www.typesafe.com>
*/
package docs.http.scaladsl.server
// format: OFF
//# source-quote
import org.scalatest.{ Matchers, WordSpec }
import akka.http.scaladsl.model.StatusCodes
import akka.http.scaladsl.testkit.ScalatestRouteTest
import akka.http.scaladsl.server._
import Directives._
class FullTestKitExampleSpec extends WordSpec with Matchers with ScalatestRouteTest {
val smallRoute =
get {
pathSingleSlash {
complete {
"Captain on the bridge!"
}
} ~
path("ping") {
complete("PONG!")
}
}
"The service" should {
"return a greeting for GET requests to the root path" in {
// tests:
Get() ~> smallRoute ~> check {
responseAs[String] shouldEqual "Captain on the bridge!"
}
}
"return a 'PONG!' response for GET requests to /ping" in {
// tests:
Get("/ping") ~> smallRoute ~> check {
responseAs[String] shouldEqual "PONG!"
}
}
"leave GET requests to other paths unhandled" in {
// tests:
Get("/kermit") ~> smallRoute ~> check {
handled shouldBe false
}
}
"return a MethodNotAllowed error for PUT requests to the root path" in {
// tests:
Put() ~> Route.seal(smallRoute) ~> check {
status === StatusCodes.MethodNotAllowed
responseAs[String] shouldEqual "HTTP method not allowed, supported methods: GET"
}
}
}
}
//#

View file

@ -0,0 +1,79 @@
/*
* Copyright (C) 2009-2014 Typesafe Inc. <http://www.typesafe.com>
*/
package docs.http.scaladsl.server
// format: OFF
object MyRejectionHandler {
//#custom-handler-example
import akka.actor.ActorSystem
import akka.stream.ActorMaterializer
import akka.http.scaladsl.Http
import akka.http.scaladsl.model._
import akka.http.scaladsl.server._
import StatusCodes._
import Directives._
implicit def myRejectionHandler =
RejectionHandler.newBuilder()
.handle { case MissingCookieRejection(cookieName) =>
complete(HttpResponse(BadRequest, entity = "No cookies, no service!!!"))
}
.handle { case AuthorizationFailedRejection
complete((Forbidden, "You're out of your depth!"))
}
.handle { case ValidationRejection(msg, _)
complete((InternalServerError, "That wasn't valid! " + msg))
}
.handleAll[MethodRejection] { methodRejections
val names = methodRejections.map(_.supported.name)
complete((MethodNotAllowed, s"Can't do that! Supported: ${names mkString " or "}!"))
}
.handleNotFound { complete((NotFound, "Not here!")) }
.result()
object MyApp extends App {
implicit val system = ActorSystem()
implicit val materializer = ActorMaterializer()
val route: Route =
// ... some route structure
null // hide
Http().bindAndHandle(route, "localhost", 8080)
}
//#
}
class RejectionHandlerExamplesSpec extends RoutingSpec {
import MyRejectionHandler._
"example-1" in {
import akka.http.scaladsl.coding.Gzip
val route =
path("order") {
get {
complete("Received GET")
} ~
post {
decodeRequestWith(Gzip) {
complete("Received compressed POST")
}
}
}
}
"test custom handler example" in {
import akka.http.scaladsl.server._
val route = Route.seal(reject(MissingCookieRejection("abc")))
// tests:
Get() ~> route ~> check {
responseAs[String] === "No cookies, no service!!!"
}
}
}

View file

@ -0,0 +1,11 @@
/*
* Copyright (C) 2009-2014 Typesafe Inc. <http://www.typesafe.com>
*/
package docs.http.scaladsl.server
import akka.http.scaladsl.server.Directives
import akka.http.scaladsl.testkit.ScalatestRouteTest
import org.scalatest.{ Matchers, WordSpec }
abstract class RoutingSpec extends WordSpec with Matchers with Directives with ScalatestRouteTest

View file

@ -0,0 +1,109 @@
/*
* Copyright (C) 2009-2015 Typesafe Inc. <http://www.typesafe.com>
*/
package docs.http.scaladsl.server
import akka.http.scaladsl.model.ws.BinaryMessage
import akka.stream.scaladsl.Sink
import org.scalatest.{ Matchers, WordSpec }
class WebsocketExampleSpec extends WordSpec with Matchers {
"core-example" in {
pending // compile-time only test
//#websocket-example-using-core
import akka.actor.ActorSystem
import akka.stream.ActorMaterializer
import akka.stream.scaladsl.{ Source, Flow }
import akka.http.scaladsl.Http
import akka.http.scaladsl.model.ws.UpgradeToWebsocket
import akka.http.scaladsl.model.ws.{ TextMessage, Message }
import akka.http.scaladsl.model.{ HttpResponse, Uri, HttpRequest }
import akka.http.scaladsl.model.HttpMethods._
implicit val system = ActorSystem()
implicit val materializer = ActorMaterializer()
//#websocket-handler
// The Greeter WebSocket Service expects a "name" per message and
// returns a greeting message for that name
val greeterWebsocketService =
Flow[Message]
.mapConcat {
// we match but don't actually consume the text message here,
// rather we simply stream it back as the tail of the response
// this means we might start sending the response even before the
// end of the incoming message has been received
case tm: TextMessage TextMessage(Source.single("Hello ") ++ tm.textStream) :: Nil
case bm: BinaryMessage =>
// ignore binary messages but drain content to avoid the stream being clogged
bm.dataStream.runWith(Sink.ignore)
Nil
}
//#websocket-handler
//#websocket-request-handling
val requestHandler: HttpRequest HttpResponse = {
case req @ HttpRequest(GET, Uri.Path("/greeter"), _, _, _)
req.header[UpgradeToWebsocket] match {
case Some(upgrade) upgrade.handleMessages(greeterWebsocketService)
case None HttpResponse(400, entity = "Not a valid websocket request!")
}
case _: HttpRequest HttpResponse(404, entity = "Unknown resource!")
}
//#websocket-request-handling
val bindingFuture =
Http().bindAndHandleSync(requestHandler, interface = "localhost", port = 8080)
println(s"Server online at http://localhost:8080/\nPress RETURN to stop...")
Console.readLine()
import system.dispatcher // for the future transformations
bindingFuture
.flatMap(_.unbind()) // trigger unbinding from the port
.onComplete(_ system.shutdown()) // and shutdown when done
}
"routing-example" in {
pending // compile-time only test
import akka.actor.ActorSystem
import akka.stream.ActorMaterializer
import akka.stream.scaladsl.{ Source, Flow }
import akka.http.scaladsl.Http
import akka.http.scaladsl.model.ws.{ TextMessage, Message }
import akka.http.scaladsl.server.Directives
implicit val system = ActorSystem()
implicit val materializer = ActorMaterializer()
import Directives._
// The Greeter WebSocket Service expects a "name" per message and
// returns a greeting message for that name
val greeterWebsocketService =
Flow[Message]
.collect {
case tm: TextMessage TextMessage(Source.single("Hello ") ++ tm.textStream)
// ignore binary messages
}
//#websocket-routing
val route =
path("greeter") {
get {
handleWebsocketMessages(greeterWebsocketService)
}
}
//#websocket-routing
val bindingFuture = Http().bindAndHandle(route, "localhost", 8080)
println(s"Server online at http://localhost:8080/\nPress RETURN to stop...")
Console.readLine()
import system.dispatcher // for the future transformations
bindingFuture
.flatMap(_.unbind()) // trigger unbinding from the port
.onComplete(_ system.shutdown()) // and shutdown when done
}
}

View file

@ -0,0 +1,803 @@
/*
* Copyright (C) 2009-2014 Typesafe Inc. <http://www.typesafe.com>
*/
package docs.http.scaladsl.server.directives
import java.io.File
import akka.actor.ActorSystem
import akka.event.Logging
import akka.http.scaladsl.model._
import akka.http.scaladsl.model.headers.{ Server, RawHeader }
import akka.http.scaladsl.server.RouteResult.{ Complete, Rejected }
import akka.http.scaladsl.server._
import akka.stream.ActorMaterializer
import akka.stream.scaladsl.{ FileIO, Sink, Source }
import akka.util.ByteString
import docs.http.scaladsl.server.RoutingSpec
import scala.concurrent.Future
import scala.util.control.NonFatal
class BasicDirectivesExamplesSpec extends RoutingSpec {
"0extract" in {
//#0extract
val uriLength = extract(_.request.uri.toString.length)
val route =
uriLength { len =>
complete(s"The length of the request URI is $len")
}
// tests:
Get("/abcdef") ~> route ~> check {
responseAs[String] shouldEqual "The length of the request URI is 25"
}
//#
}
"0extractLog" in {
//#0extractLog
val route =
extractLog { log =>
log.debug("I'm logging things in much detail..!")
complete("It's amazing!")
}
// tests:
Get("/abcdef") ~> route ~> check {
responseAs[String] shouldEqual "It's amazing!"
}
//#
}
"withMaterializer-0" in {
//#withMaterializer-0
val special = ActorMaterializer(namePrefix = Some("special"))
def sample() =
path("sample") {
extractMaterializer { mat =>
complete {
// explicitly use the materializer:
Source.single(s"Materialized by ${mat.##}!")
.runWith(Sink.head)(mat)
}
}
}
val route =
pathPrefix("special") {
withMaterializer(special) {
sample() // `special` materializer will be used
}
} ~ sample() // default materializer will be used
// tests:
Get("/sample") ~> route ~> check {
responseAs[String] shouldEqual s"Materialized by ${materializer.##}!"
}
Get("/special/sample") ~> route ~> check {
responseAs[String] shouldEqual s"Materialized by ${special.##}!"
}
//#
}
"extractMaterializer-0" in {
//#extractMaterializer-0
val route =
path("sample") {
extractMaterializer { materializer =>
complete {
// explicitly use the `materializer`:
Source.single(s"Materialized by ${materializer.##}!")
.runWith(Sink.head)(materializer)
}
}
} // default materializer will be used
// tests:
Get("/sample") ~> route ~> check {
responseAs[String] shouldEqual s"Materialized by ${materializer.##}!"
}
//#
}
"withExecutionContext-0" in compileOnlySpec {
//#withExecutionContext-0
val special = system.dispatchers.lookup("special")
def sample() =
path("sample") {
extractExecutionContext { implicit ec =>
complete {
Future(s"Run on ${ec.##}!") // uses the `ec` ExecutionContext
}
}
}
val route =
pathPrefix("special") {
withExecutionContext(special) {
sample() // `special` execution context will be used
}
} ~ sample() // default execution context will be used
// tests:
Get("/sample") ~> route ~> check {
responseAs[String] shouldEqual s"Run on ${system.dispatcher.##}!"
}
Get("/special/sample") ~> route ~> check {
responseAs[String] shouldEqual s"Run on ${special.##}!"
}
//#
}
"extractExecutionContext-0" in compileOnlySpec {
//#extractExecutionContext-0
def sample() =
path("sample") {
extractExecutionContext { implicit ec =>
complete {
Future(s"Run on ${ec.##}!") // uses the `ec` ExecutionContext
}
}
}
val route =
pathPrefix("special") {
sample() // default execution context will be used
}
// tests:
Get("/sample") ~> route ~> check {
responseAs[String] shouldEqual s"Run on ${system.dispatcher.##}!"
}
//#
}
"0withLog" in {
//#0withLog
val special = Logging(system, "SpecialRoutes")
def sample() =
path("sample") {
extractLog { implicit log =>
complete {
val msg = s"Logging using $log!"
log.debug(msg)
msg
}
}
}
val route =
pathPrefix("special") {
withLog(special) {
sample() // `special` logging adapter will be used
}
} ~ sample() // default logging adapter will be used
// tests:
Get("/sample") ~> route ~> check {
responseAs[String] shouldEqual s"Logging using ${system.log}!"
}
Get("/special/sample") ~> route ~> check {
responseAs[String] shouldEqual s"Logging using $special!"
}
//#
}
"withSettings-0" in compileOnlySpec {
//#withSettings-0
val special = RoutingSettings(system).copy(fileIODispatcher = "special-io-dispatcher")
def sample() =
path("sample") {
complete {
// internally uses the configured fileIODispatcher:
val source = FileIO.fromFile(new File("example.json"))
HttpResponse(entity = HttpEntity(ContentTypes.`application/json`, source))
}
}
val route =
get {
pathPrefix("special") {
withSettings(special) {
sample() // `special` file-io-dispatcher will be used to read the file
}
} ~ sample() // default file-io-dispatcher will be used to read the file
}
// tests:
Post("/special/sample") ~> route ~> check {
responseAs[String] shouldEqual s"{}"
}
Get("/sample") ~> route ~> check {
responseAs[String] shouldEqual "{}"
}
//#
}
"textract" in {
//#textract
val pathAndQuery = textract { ctx =>
val uri = ctx.request.uri
(uri.path, uri.query())
}
val route =
pathAndQuery { (p, query) =>
complete(s"The path is $p and the query is $query")
}
// tests:
Get("/abcdef?ghi=12") ~> route ~> check {
responseAs[String] shouldEqual "The path is /abcdef and the query is ghi=12"
}
//#
}
"tprovide" in {
//#tprovide
def provideStringAndLength(value: String) = tprovide((value, value.length))
val route =
provideStringAndLength("test") { (value, len) =>
complete(s"Value is $value and its length is $len")
}
// tests:
Get("/") ~> route ~> check {
responseAs[String] shouldEqual "Value is test and its length is 4"
}
//#
}
"0mapResponse" in {
//#0mapResponse
def overwriteResultStatus(response: HttpResponse): HttpResponse =
response.copy(status = StatusCodes.BadGateway)
val route = mapResponse(overwriteResultStatus)(complete("abc"))
// tests:
Get("/abcdef?ghi=12") ~> route ~> check {
status shouldEqual StatusCodes.BadGateway
}
//#
}
"1mapResponse-advanced-json" in {
//#1mapResponse-advanced
trait ApiRoutes {
protected def system: ActorSystem
private val log = Logging(system, "ApiRoutes")
private val NullJsonEntity = HttpEntity(ContentTypes.`application/json`, "{}")
private def nonSuccessToEmptyJsonEntity(response: HttpResponse): HttpResponse =
response.status match {
case code if code.isSuccess response
case code
log.warning("Dropping response entity since response status code was: {}", code)
response.copy(entity = NullJsonEntity)
}
/** Wrapper for all of our JSON API routes */
def apiRoute(innerRoutes: Route): Route =
mapResponse(nonSuccessToEmptyJsonEntity)(innerRoutes)
}
//#
import StatusCodes._
val __system = system
val routes = new ApiRoutes {
override protected def system = __system
}
import routes.apiRoute
//#1mapResponse-advanced
val route: Route =
apiRoute {
get {
complete(InternalServerError)
}
}
// tests:
Get("/") ~> route ~> check {
responseAs[String] shouldEqual "{}"
}
//#
}
"mapRouteResult" in {
//#mapRouteResult
// this directive is a joke, don't do that :-)
val makeEverythingOk = mapRouteResult { r =>
r match {
case Complete(response) =>
// "Everything is OK!"
Complete(response.copy(status = 200))
case _ => r
}
}
val route =
makeEverythingOk {
// will actually render as 200 OK (!)
complete(StatusCodes.Accepted)
}
// tests:
Get("/") ~> route ~> check {
status shouldEqual StatusCodes.OK
}
//#
}
"mapRouteResultFuture" in {
//#mapRouteResultFuture
val tryRecoverAddServer = mapRouteResultFuture { fr =>
fr recover {
case ex: IllegalArgumentException =>
Complete(HttpResponse(StatusCodes.InternalServerError))
} map {
case Complete(res) => Complete(res.addHeader(Server("MyServer 1.0")))
case rest => rest
}
}
val route =
tryRecoverAddServer {
complete("Hello world!")
}
// tests:
Get("/") ~> route ~> check {
status shouldEqual StatusCodes.OK
header[Server] shouldEqual Some(Server("MyServer 1.0"))
}
//#
}
"mapResponseEntity" in {
//#mapResponseEntity
def prefixEntity(entity: ResponseEntity): ResponseEntity = entity match {
case HttpEntity.Strict(contentType, data) =>
HttpEntity.Strict(contentType, ByteString("test") ++ data)
case _ => throw new IllegalStateException("Unexpected entity type")
}
val prefixWithTest: Directive0 = mapResponseEntity(prefixEntity)
val route = prefixWithTest(complete("abc"))
// tests:
Get("/") ~> route ~> check {
responseAs[String] shouldEqual "testabc"
}
//#
}
"mapResponseHeaders" in {
//#mapResponseHeaders
// adds all request headers to the response
val echoRequestHeaders = extract(_.request.headers).flatMap(respondWithHeaders)
val removeIdHeader = mapResponseHeaders(_.filterNot(_.lowercaseName == "id"))
val route =
removeIdHeader {
echoRequestHeaders {
complete("test")
}
}
// tests:
Get("/") ~> RawHeader("id", "12345") ~> RawHeader("id2", "67890") ~> route ~> check {
header("id") shouldEqual None
header("id2").get.value shouldEqual "67890"
}
//#
}
"mapInnerRoute" in {
//#mapInnerRoute
val completeWithInnerException =
mapInnerRoute { route =>
ctx =>
try {
route(ctx)
} catch {
case NonFatal(e) => ctx.complete(s"Got ${e.getClass.getSimpleName} '${e.getMessage}'")
}
}
val route =
completeWithInnerException {
complete(throw new IllegalArgumentException("BLIP! BLOP! Everything broke"))
}
// tests:
Get("/") ~> route ~> check {
responseAs[String] shouldEqual "Got IllegalArgumentException 'BLIP! BLOP! Everything broke'"
}
//#
}
"mapRejections" in {
//#mapRejections
// ignore any rejections and replace them by AuthorizationFailedRejection
val replaceByAuthorizationFailed = mapRejections(_ => List(AuthorizationFailedRejection))
val route =
replaceByAuthorizationFailed {
path("abc")(complete("abc"))
}
// tests:
Get("/") ~> route ~> check {
rejection shouldEqual AuthorizationFailedRejection
}
Get("/abc") ~> route ~> check {
status shouldEqual StatusCodes.OK
}
//#
}
"recoverRejections" in {
//#recoverRejections
val authRejectionsToNothingToSeeHere = recoverRejections { rejections =>
if (rejections.exists(_.isInstanceOf[AuthenticationFailedRejection]))
Complete(HttpResponse(entity = "Nothing to see here, move along."))
else if (rejections == Nil) // see "Empty Rejections" for more details
Complete(HttpResponse(StatusCodes.NotFound, entity = "Literally nothing to see here."))
else
Rejected(rejections)
}
val neverAuth: Authenticator[String] = creds => None
val alwaysAuth: Authenticator[String] = creds => Some("id")
val route =
authRejectionsToNothingToSeeHere {
pathPrefix("auth") {
path("never") {
authenticateBasic("my-realm", neverAuth) { user =>
complete("Welcome to the bat-cave!")
}
} ~
path("always") {
authenticateBasic("my-realm", alwaysAuth) { user =>
complete("Welcome to the secret place!")
}
}
}
}
// tests:
Get("/auth/never") ~> route ~> check {
status shouldEqual StatusCodes.OK
responseAs[String] shouldEqual "Nothing to see here, move along."
}
Get("/auth/always") ~> route ~> check {
status shouldEqual StatusCodes.OK
responseAs[String] shouldEqual "Welcome to the secret place!"
}
Get("/auth/does_not_exist") ~> route ~> check {
status shouldEqual StatusCodes.NotFound
responseAs[String] shouldEqual "Literally nothing to see here."
}
//#
}
"recoverRejectionsWith" in {
//#recoverRejectionsWith
val authRejectionsToNothingToSeeHere = recoverRejectionsWith { rejections =>
Future {
// imagine checking rejections takes a longer time:
if (rejections.exists(_.isInstanceOf[AuthenticationFailedRejection]))
Complete(HttpResponse(entity = "Nothing to see here, move along."))
else
Rejected(rejections)
}
}
val neverAuth: Authenticator[String] = creds => None
val route =
authRejectionsToNothingToSeeHere {
pathPrefix("auth") {
path("never") {
authenticateBasic("my-realm", neverAuth) { user =>
complete("Welcome to the bat-cave!")
}
}
}
}
// tests:
Get("/auth/never") ~> route ~> check {
status shouldEqual StatusCodes.OK
responseAs[String] shouldEqual "Nothing to see here, move along."
}
//#
}
"0mapRequest" in {
//#0mapRequest
def transformToPostRequest(req: HttpRequest): HttpRequest = req.copy(method = HttpMethods.POST)
val route =
mapRequest(transformToPostRequest) {
extractRequest { req =>
complete(s"The request method was ${req.method.name}")
}
}
Get("/") ~> route ~> check {
responseAs[String] shouldEqual "The request method was POST"
}
//#
}
"mapRequestContext" in {
//#mapRequestContext
val replaceRequest =
mapRequestContext(_.withRequest(HttpRequest(HttpMethods.POST)))
val route =
replaceRequest {
extractRequest { req =>
complete(req.method.value)
}
}
// tests:
Get("/abc/def/ghi") ~> route ~> check {
responseAs[String] shouldEqual "POST"
}
//#
}
"0mapRouteResult" in {
//#0mapRouteResult
val rejectAll = // not particularly useful directive
mapRouteResult {
case _ => Rejected(List(AuthorizationFailedRejection))
}
val route =
rejectAll {
complete("abc")
}
// tests:
Get("/") ~> route ~> check {
rejections.nonEmpty shouldEqual true
}
//#
}
"mapRouteResultPF" in {
//#mapRouteResultPF
case object MyCustomRejection extends Rejection
val rejectRejections = // not particularly useful directive
mapRouteResultPF {
case Rejected(_) => Rejected(List(AuthorizationFailedRejection))
}
val route =
rejectRejections {
reject(MyCustomRejection)
}
// tests:
Get("/") ~> route ~> check {
rejection shouldEqual AuthorizationFailedRejection
}
//#
}
"mapRouteResultWithPF-0" in {
//#mapRouteResultWithPF-0
case object MyCustomRejection extends Rejection
val rejectRejections = // not particularly useful directive
mapRouteResultWithPF {
case Rejected(_) => Future(Rejected(List(AuthorizationFailedRejection)))
}
val route =
rejectRejections {
reject(MyCustomRejection)
}
// tests:
Get("/") ~> route ~> check {
rejection shouldEqual AuthorizationFailedRejection
}
//#
}
"mapRouteResultWith-0" in {
//#mapRouteResultWith-0
case object MyCustomRejection extends Rejection
val rejectRejections = // not particularly useful directive
mapRouteResultWith { res =>
res match {
case Rejected(_) => Future(Rejected(List(AuthorizationFailedRejection)))
case _ => Future(res)
}
}
val route =
rejectRejections {
reject(MyCustomRejection)
}
// tests:
Get("/") ~> route ~> check {
rejection shouldEqual AuthorizationFailedRejection
}
//#
}
"pass" in {
//#pass
val route = pass(complete("abc"))
// tests:
Get("/") ~> route ~> check {
responseAs[String] shouldEqual "abc"
}
//#
}
"0provide" in {
//#0provide
def providePrefixedString(value: String): Directive1[String] = provide("prefix:" + value)
val route =
providePrefixedString("test") { value =>
complete(value)
}
// tests:
Get("/") ~> route ~> check {
responseAs[String] shouldEqual "prefix:test"
}
//#
}
"cancelRejections-filter-example" in {
//#cancelRejections-filter-example
def isMethodRejection: Rejection => Boolean = {
case MethodRejection(_) => true
case _ => false
}
val route =
cancelRejections(isMethodRejection) {
post {
complete("Result")
}
}
// tests:
Get("/") ~> route ~> check {
rejections shouldEqual Nil
handled shouldEqual false
}
//#
}
"cancelRejection-example" in {
//#cancelRejection-example
val route =
cancelRejection(MethodRejection(HttpMethods.POST)) {
post {
complete("Result")
}
}
// tests:
Get("/") ~> route ~> check {
rejections shouldEqual Nil
handled shouldEqual false
}
//#
}
"extractRequest-example" in {
//#extractRequest-example
val route =
extractRequest { request =>
complete(s"Request method is ${request.method.name} and content-type is ${request.entity.contentType}")
}
// tests:
Post("/", "text") ~> route ~> check {
responseAs[String] shouldEqual "Request method is POST and content-type is text/plain; charset=UTF-8"
}
Get("/") ~> route ~> check {
responseAs[String] shouldEqual "Request method is GET and content-type is none/none"
}
//#
}
"extractSettings-examples" in {
//#extractSettings-examples
val route =
extractSettings { settings: RoutingSettings =>
complete(s"RoutingSettings.renderVanityFooter = ${settings.renderVanityFooter}")
}
// tests:
Get("/") ~> route ~> check {
responseAs[String] shouldEqual s"RoutingSettings.renderVanityFooter = true"
}
//#
}
"mapSettings-examples" in {
//#mapSettings-examples
val tunedSettings = mapSettings { settings =>
settings.copy(fileGetConditional = false)
}
val route =
tunedSettings {
extractSettings { settings: RoutingSettings =>
complete(s"RoutingSettings.fileGetConditional = ${settings.fileGetConditional}")
}
}
// tests:
Get("/") ~> route ~> check {
responseAs[String] shouldEqual s"RoutingSettings.fileGetConditional = false"
}
//#
}
"extractRequestContext-example" in {
//#extractRequestContext-example
val route =
extractRequestContext { ctx =>
ctx.log.debug("Using access to additional context availablethings, like the logger.")
val request = ctx.request
complete(s"Request method is ${request.method.name} and content-type is ${request.entity.contentType}")
}
// tests:
Post("/", "text") ~> route ~> check {
responseAs[String] shouldEqual "Request method is POST and content-type is text/plain; charset=UTF-8"
}
Get("/") ~> route ~> check {
responseAs[String] shouldEqual "Request method is GET and content-type is none/none"
}
//#
}
"extractUri-example" in {
//#extractUri-example
val route =
extractUri { uri =>
complete(s"Full URI: $uri")
}
// tests:
Get("/") ~> route ~> check {
// tests are executed with the host assumed to be "example.com"
responseAs[String] shouldEqual "Full URI: http://example.com/"
}
Get("/test") ~> route ~> check {
responseAs[String] shouldEqual "Full URI: http://example.com/test"
}
//#
}
"mapUnmatchedPath-example" in {
//#mapUnmatchedPath-example
def ignore456(path: Uri.Path) = path match {
case s @ Uri.Path.Segment(head, tail) if head.startsWith("456") =>
val newHead = head.drop(3)
if (newHead.isEmpty) tail
else s.copy(head = head.drop(3))
case _ => path
}
val ignoring456 = mapUnmatchedPath(ignore456)
val route =
pathPrefix("123") {
ignoring456 {
path("abc") {
complete(s"Content")
}
}
}
// tests:
Get("/123/abc") ~> route ~> check {
responseAs[String] shouldEqual "Content"
}
Get("/123456/abc") ~> route ~> check {
responseAs[String] shouldEqual "Content"
}
//#
}
"extractUnmatchedPath-example" in {
//#extractUnmatchedPath-example
val route =
pathPrefix("abc") {
extractUnmatchedPath { remaining =>
complete(s"Unmatched: '$remaining'")
}
}
// tests:
Get("/abc") ~> route ~> check {
responseAs[String] shouldEqual "Unmatched: ''"
}
Get("/abc/456") ~> route ~> check {
responseAs[String] shouldEqual "Unmatched: '/456'"
}
//#
}
private def compileOnlySpec(block: => Unit) = pending
}

View file

@ -0,0 +1,126 @@
/*
* Copyright (C) 2009-2014 Typesafe Inc. <http://www.typesafe.com>
*/
package docs.http.scaladsl.server.directives
import akka.http.scaladsl.coding._
import docs.http.scaladsl.server.RoutingSpec
import akka.http.scaladsl.model.{ HttpResponse, StatusCodes }
import akka.http.scaladsl.model.headers.{ HttpEncodings, HttpEncoding, `Accept-Encoding`, `Content-Encoding` }
import akka.http.scaladsl.model.headers.HttpEncodings._
import akka.http.scaladsl.server._
import akka.util.ByteString
import org.scalatest.matchers.Matcher
class CodingDirectivesExamplesSpec extends RoutingSpec {
"responseEncodingAccepted" in {
val route = responseEncodingAccepted(gzip) { complete("content") }
Get("/") ~> route ~> check {
responseAs[String] shouldEqual "content"
}
Get("/") ~> `Accept-Encoding`(deflate) ~> route ~> check {
rejection shouldEqual UnacceptedResponseEncodingRejection(gzip)
}
}
"encodeResponse" in {
val route = encodeResponse { complete("content") }
// tests:
Get("/") ~> route ~> check {
response should haveContentEncoding(identity)
}
Get("/") ~> `Accept-Encoding`(gzip, deflate) ~> route ~> check {
response should haveContentEncoding(gzip)
}
Get("/") ~> `Accept-Encoding`(deflate) ~> route ~> check {
response should haveContentEncoding(deflate)
}
Get("/") ~> `Accept-Encoding`(identity) ~> route ~> check {
response should haveContentEncoding(identity)
}
}
"encodeResponseWith" in {
val route = encodeResponseWith(Gzip) { complete("content") }
// tests:
Get("/") ~> route ~> check {
response should haveContentEncoding(gzip)
}
Get("/") ~> `Accept-Encoding`(gzip, deflate) ~> route ~> check {
response should haveContentEncoding(gzip)
}
Get("/") ~> `Accept-Encoding`(deflate) ~> route ~> check {
rejection shouldEqual UnacceptedResponseEncodingRejection(gzip)
}
Get("/") ~> `Accept-Encoding`(identity) ~> route ~> check {
rejection shouldEqual UnacceptedResponseEncodingRejection(gzip)
}
}
val helloGzipped = compress("Hello", Gzip)
val helloDeflated = compress("Hello", Deflate)
"decodeRequest" in {
val route =
decodeRequest {
entity(as[String]) { content: String =>
complete(s"Request content: '$content'")
}
}
// tests:
Post("/", helloGzipped) ~> `Content-Encoding`(gzip) ~> route ~> check {
responseAs[String] shouldEqual "Request content: 'Hello'"
}
Post("/", helloDeflated) ~> `Content-Encoding`(deflate) ~> route ~> check {
responseAs[String] shouldEqual "Request content: 'Hello'"
}
Post("/", "hello uncompressed") ~> `Content-Encoding`(identity) ~> route ~> check {
responseAs[String] shouldEqual "Request content: 'hello uncompressed'"
}
}
"decodeRequestWith-0" in {
val route =
decodeRequestWith(Gzip) {
entity(as[String]) { content: String =>
complete(s"Request content: '$content'")
}
}
// tests:
Post("/", helloGzipped) ~> `Content-Encoding`(gzip) ~> route ~> check {
responseAs[String] shouldEqual "Request content: 'Hello'"
}
Post("/", helloDeflated) ~> `Content-Encoding`(deflate) ~> route ~> check {
rejection shouldEqual UnsupportedRequestEncodingRejection(gzip)
}
Post("/", "hello") ~> `Content-Encoding`(identity) ~> route ~> check {
rejection shouldEqual UnsupportedRequestEncodingRejection(gzip)
}
}
"decodeRequestWith-1" in {
val route =
decodeRequestWith(Gzip, NoCoding) {
entity(as[String]) { content: String =>
complete(s"Request content: '$content'")
}
}
// tests:
Post("/", helloGzipped) ~> `Content-Encoding`(gzip) ~> route ~> check {
responseAs[String] shouldEqual "Request content: 'Hello'"
}
Post("/", helloDeflated) ~> `Content-Encoding`(deflate) ~> route ~> check {
rejections shouldEqual List(UnsupportedRequestEncodingRejection(gzip), UnsupportedRequestEncodingRejection(identity))
}
Post("/", "hello uncompressed") ~> `Content-Encoding`(identity) ~> route ~> check {
responseAs[String] shouldEqual "Request content: 'hello uncompressed'"
}
}
def haveContentEncoding(encoding: HttpEncoding): Matcher[HttpResponse] =
be(encoding) compose { (_: HttpResponse).header[`Content-Encoding`].map(_.encodings.head).getOrElse(HttpEncodings.identity) }
def compress(input: String, encoder: Encoder): ByteString = encoder.encode(ByteString(input))
}

View file

@ -0,0 +1,70 @@
/*
* Copyright (C) 2009-2014 Typesafe Inc. <http://www.typesafe.com>
*/
package docs.http.scaladsl.server.directives
import akka.http.scaladsl.server._
import akka.http.scaladsl.model.headers.{ HttpCookie, Cookie, `Set-Cookie` }
import docs.http.scaladsl.server.RoutingSpec
import akka.http.scaladsl.model.DateTime
class CookieDirectivesExamplesSpec extends RoutingSpec {
"cookie" in {
val route =
cookie("userName") { nameCookie =>
complete(s"The logged in user is '${nameCookie.value}'")
}
// tests:
Get("/") ~> Cookie("userName" -> "paul") ~> route ~> check {
responseAs[String] shouldEqual "The logged in user is 'paul'"
}
// missing cookie
Get("/") ~> route ~> check {
rejection shouldEqual MissingCookieRejection("userName")
}
Get("/") ~> Route.seal(route) ~> check {
responseAs[String] shouldEqual "Request is missing required cookie 'userName'"
}
}
"optionalCookie" in {
val route =
optionalCookie("userName") {
case Some(nameCookie) => complete(s"The logged in user is '${nameCookie.value}'")
case None => complete("No user logged in")
}
// tests:
Get("/") ~> Cookie("userName" -> "paul") ~> route ~> check {
responseAs[String] shouldEqual "The logged in user is 'paul'"
}
Get("/") ~> route ~> check {
responseAs[String] shouldEqual "No user logged in"
}
}
"deleteCookie" in {
val route =
deleteCookie("userName") {
complete("The user was logged out")
}
// tests:
Get("/") ~> route ~> check {
responseAs[String] shouldEqual "The user was logged out"
header[`Set-Cookie`] shouldEqual Some(`Set-Cookie`(HttpCookie("userName", value = "deleted", expires = Some(DateTime.MinValue))))
}
}
"setCookie" in {
val route =
setCookie(HttpCookie("userName", value = "paul")) {
complete("The user was logged in")
}
// tests:
Get("/") ~> route ~> check {
responseAs[String] shouldEqual "The user was logged in"
header[`Set-Cookie`] shouldEqual Some(`Set-Cookie`(HttpCookie("userName", value = "paul")))
}
}
}

View file

@ -0,0 +1,72 @@
/**
* Copyright (C) 2009-2015 Typesafe Inc. <http://www.typesafe.com>
*/
package docs.http.scaladsl.server.directives
import akka.http.scaladsl.server.{ Directive1, Directive }
import docs.http.scaladsl.server.RoutingSpec
class CustomDirectivesExamplesSpec extends RoutingSpec {
"labeling" in {
val getOrPut = get | put
// tests:
val route = getOrPut { complete("ok") }
Get("/") ~> route ~> check {
responseAs[String] shouldEqual "ok"
}
Put("/") ~> route ~> check {
responseAs[String] shouldEqual "ok"
}
}
"map-0" in {
val textParam: Directive1[String] =
parameter("text".as[String])
val lengthDirective: Directive1[Int] =
textParam.map(text => text.length)
// tests:
Get("/?text=abcdefg") ~> lengthDirective(x => complete(x.toString)) ~> check {
responseAs[String] === "7"
}
}
"tmap-1" in {
val twoIntParameters: Directive[(Int, Int)] =
parameters(("a".as[Int], "b".as[Int]))
val myDirective: Directive1[String] =
twoIntParameters.tmap {
case (a, b) => (a + b).toString
}
// tests:
Get("/?a=2&b=5") ~> myDirective(x => complete(x)) ~> check {
responseAs[String] === "7"
}
}
"flatMap-0" in {
val intParameter: Directive1[Int] = parameter("a".as[Int])
val myDirective: Directive1[Int] =
intParameter.flatMap {
case a if a > 0 => provide(2 * a)
case _ => reject
}
// tests:
Get("/?a=21") ~> myDirective(i => complete(i.toString)) ~> check {
responseAs[String] === "42"
}
Get("/?a=-18") ~> myDirective(i => complete(i.toString)) ~> check {
handled === false
}
}
}

View file

@ -0,0 +1,97 @@
/*
* Copyright (C) 2009-2014 Typesafe Inc. <http://www.typesafe.com>
*/
package docs.http.scaladsl.server.directives
import akka.event.Logging
import akka.http.scaladsl.model.{ HttpRequest, HttpResponse }
import akka.http.scaladsl.server.directives.{ DebuggingDirectives, LogEntry, LoggingMagnet }
import docs.http.scaladsl.server.RoutingSpec
class DebuggingDirectivesExamplesSpec extends RoutingSpec {
"logRequest-0" in {
// different possibilities of using logRequest
// The first alternatives use an implicitly available LoggingContext for logging
// marks with "get-user", log with debug level, HttpRequest.toString
DebuggingDirectives.logRequest("get-user")
// marks with "get-user", log with info level, HttpRequest.toString
DebuggingDirectives.logRequest(("get-user", Logging.InfoLevel))
// logs just the request method at debug level
def requestMethod(req: HttpRequest): String = req.method.toString
DebuggingDirectives.logRequest(requestMethod _)
// logs just the request method at info level
def requestMethodAsInfo(req: HttpRequest): LogEntry = LogEntry(req.method.toString, Logging.InfoLevel)
DebuggingDirectives.logRequest(requestMethodAsInfo _)
// This one doesn't use the implicit LoggingContext but uses `println` for logging
def printRequestMethod(req: HttpRequest): Unit = println(req.method)
val logRequestPrintln = DebuggingDirectives.logRequest(LoggingMagnet(_ => printRequestMethod))
// tests:
Get("/") ~> logRequestPrintln(complete("logged")) ~> check {
responseAs[String] shouldEqual "logged"
}
}
"logRequestResult" in {
// different possibilities of using logRequestResponse
// The first alternatives use an implicitly available LoggingContext for logging
// marks with "get-user", log with debug level, HttpRequest.toString, HttpResponse.toString
DebuggingDirectives.logRequestResult("get-user")
// marks with "get-user", log with info level, HttpRequest.toString, HttpResponse.toString
DebuggingDirectives.logRequestResult(("get-user", Logging.InfoLevel))
// logs just the request method and response status at info level
def requestMethodAndResponseStatusAsInfo(req: HttpRequest): Any => Option[LogEntry] = {
case res: HttpResponse => Some(LogEntry(req.method + ":" + res.status, Logging.InfoLevel))
case _ => None // other kind of responses
}
DebuggingDirectives.logRequestResult(requestMethodAndResponseStatusAsInfo _)
// This one doesn't use the implicit LoggingContext but uses `println` for logging
def printRequestMethodAndResponseStatus(req: HttpRequest)(res: Any): Unit =
println(requestMethodAndResponseStatusAsInfo(req)(res).map(_.obj.toString).getOrElse(""))
val logRequestResultPrintln = DebuggingDirectives.logRequestResult(LoggingMagnet(_ => printRequestMethodAndResponseStatus))
// tests:
Get("/") ~> logRequestResultPrintln(complete("logged")) ~> check {
responseAs[String] shouldEqual "logged"
}
}
"logResult" in {
// different possibilities of using logResponse
// The first alternatives use an implicitly available LoggingContext for logging
// marks with "get-user", log with debug level, HttpResponse.toString
DebuggingDirectives.logResult("get-user")
// marks with "get-user", log with info level, HttpResponse.toString
DebuggingDirectives.logResult(("get-user", Logging.InfoLevel))
// logs just the response status at debug level
def responseStatus(res: Any): String = res match {
case x: HttpResponse => x.status.toString
case _ => "unknown response part"
}
DebuggingDirectives.logResult(responseStatus _)
// logs just the response status at info level
def responseStatusAsInfo(res: Any): LogEntry = LogEntry(responseStatus(res), Logging.InfoLevel)
DebuggingDirectives.logResult(responseStatusAsInfo _)
// This one doesn't use the implicit LoggingContext but uses `println` for logging
def printResponseStatus(res: Any): Unit = println(responseStatus(res))
val logResultPrintln = DebuggingDirectives.logResult(LoggingMagnet(_ => printResponseStatus))
// tests:
Get("/") ~> logResultPrintln(complete("logged")) ~> check {
responseAs[String] shouldEqual "logged"
}
}
}

View file

@ -0,0 +1,62 @@
/*
* Copyright (C) 2009-2014 Typesafe Inc. <http://www.typesafe.com>
*/
package docs.http.scaladsl.server.directives
import akka.http.scaladsl.model.StatusCodes
import akka.http.scaladsl.server._
import docs.http.scaladsl.server.RoutingSpec
class ExecutionDirectivesExamplesSpec extends RoutingSpec {
"handleExceptions" in {
val divByZeroHandler = ExceptionHandler {
case _: ArithmeticException => complete((StatusCodes.BadRequest, "You've got your arithmetic wrong, fool!"))
}
val route =
path("divide" / IntNumber / IntNumber) { (a, b) =>
handleExceptions(divByZeroHandler) {
complete(s"The result is ${a / b}")
}
}
// tests:
Get("/divide/10/5") ~> route ~> check {
responseAs[String] shouldEqual "The result is 2"
}
Get("/divide/10/0") ~> route ~> check {
status shouldEqual StatusCodes.BadRequest
responseAs[String] shouldEqual "You've got your arithmetic wrong, fool!"
}
}
"handleRejections" in {
val totallyMissingHandler = RejectionHandler.newBuilder()
.handleNotFound { complete((StatusCodes.NotFound, "Oh man, what you are looking for is long gone.")) }
.handle { case ValidationRejection(msg, _) => complete((StatusCodes.InternalServerError, msg)) }
.result()
val route =
pathPrefix("handled") {
handleRejections(totallyMissingHandler) {
path("existing")(complete("This path exists")) ~
path("boom")(reject(new ValidationRejection("This didn't work.")))
}
}
// tests:
Get("/handled/existing") ~> route ~> check {
responseAs[String] shouldEqual "This path exists"
}
Get("/missing") ~> Route.seal(route) /* applies default handler */ ~> check {
status shouldEqual StatusCodes.NotFound
responseAs[String] shouldEqual "The requested resource could not be found."
}
Get("/handled/missing") ~> route ~> check {
status shouldEqual StatusCodes.NotFound
responseAs[String] shouldEqual "Oh man, what you are looking for is long gone."
}
Get("/handled/boom") ~> route ~> check {
status shouldEqual StatusCodes.InternalServerError
responseAs[String] shouldEqual "This didn't work."
}
}
}

View file

@ -0,0 +1,106 @@
/*
* Copyright (C) 2009-2014 Typesafe Inc. <http://www.typesafe.com>
*/
package docs.http.scaladsl.server.directives
import akka.http.scaladsl.marshalling.ToEntityMarshaller
import akka.http.scaladsl.model.StatusCodes
import akka.http.scaladsl.server.directives.DirectoryListing
import akka.http.scaladsl.server.directives.FileAndResourceDirectives.DirectoryRenderer
import docs.http.scaladsl.server.RoutingSpec
import docs.http.scaladsl.server.RoutingSpec
class FileAndResourceDirectivesExamplesSpec extends RoutingSpec {
"getFromFile-examples" in compileOnlySpec {
import akka.http.scaladsl.server.directives._
import ContentTypeResolver.Default
val route =
path("logs" / Segment) { name =>
getFromFile(".log") // uses implicit ContentTypeResolver
}
// tests:
Get("/logs/example") ~> route ~> check {
responseAs[String] shouldEqual "example file contents"
}
}
"getFromResource-examples" in compileOnlySpec {
import akka.http.scaladsl.server.directives._
import ContentTypeResolver.Default
val route =
path("logs" / Segment) { name =>
getFromResource(".log") // uses implicit ContentTypeResolver
}
// tests:
Get("/logs/example") ~> route ~> check {
responseAs[String] shouldEqual "example file contents"
}
}
"listDirectoryContents-examples" in compileOnlySpec {
val route =
path("tmp") {
listDirectoryContents("/tmp")
} ~
path("custom") {
val renderer = new DirectoryRenderer {
override def marshaller(renderVanityFooter: Boolean): ToEntityMarshaller[DirectoryListing] = ???
}
listDirectoryContents("/tmp")(renderer)
}
// tests:
Get("/logs/example") ~> route ~> check {
responseAs[String] shouldEqual "example file contents"
}
}
"getFromBrowseableDirectory-examples" in compileOnlySpec {
val route =
path("tmp") {
getFromBrowseableDirectory("/tmp")
}
// tests:
Get("/tmp") ~> route ~> check {
status shouldEqual StatusCodes.OK
}
}
"getFromBrowseableDirectories-examples" in compileOnlySpec {
val route =
path("tmp") {
getFromBrowseableDirectories("/main", "/backups")
}
// tests:
Get("/tmp") ~> route ~> check {
status shouldEqual StatusCodes.OK
}
}
"getFromDirectory-examples" in compileOnlySpec {
val route =
path("tmp") {
getFromDirectory("/tmp")
}
// tests:
Get("/tmp/example") ~> route ~> check {
responseAs[String] shouldEqual "example file contents"
}
}
"getFromResourceDirectory-examples" in compileOnlySpec {
val route =
path("examples") {
getFromResourceDirectory("/examples")
}
// tests:
Get("/examples/example-1") ~> route ~> check {
responseAs[String] shouldEqual "example file contents"
}
}
private def compileOnlySpec(block: => Unit) = pending
}

View file

@ -0,0 +1,77 @@
/**
* Copyright (C) 2015 Typesafe Inc. <http://www.typesafe.com>
*/
package docs.http.scaladsl.server.directives
import akka.http.scaladsl.model._
import akka.stream.io.Framing
import akka.util.ByteString
import docs.http.scaladsl.server.RoutingSpec
import scala.concurrent.Future
class FileUploadDirectivesExamplesSpec extends RoutingSpec {
override def testConfigSource = "akka.actor.default-mailbox.mailbox-type = \"akka.dispatch.UnboundedMailbox\""
"uploadedFile" in {
val route =
uploadedFile("csv") {
case (metadata, file) =>
// do something with the file and file metadata ...
file.delete()
complete(StatusCodes.OK)
}
// tests:
val multipartForm =
Multipart.FormData(
Multipart.FormData.BodyPart.Strict(
"csv",
HttpEntity(ContentTypes.`text/plain(UTF-8)`, "1,5,7\n11,13,17"),
Map("filename" -> "data.csv")))
Post("/", multipartForm) ~> route ~> check {
status shouldEqual StatusCodes.OK
}
}
"fileUpload" in {
// adding integers as a service ;)
val route =
extractRequestContext { ctx =>
implicit val materializer = ctx.materializer
implicit val ec = ctx.executionContext
fileUpload("csv") {
case (metadata, byteSource) =>
val sumF: Future[Int] =
// sum the numbers as they arrive so that we can
// accept any size of file
byteSource.via(Framing.delimiter(ByteString("\n"), 1024))
.mapConcat(_.utf8String.split(",").toVector)
.map(_.toInt)
.runFold(0) { (acc, n) => acc + n }
onSuccess(sumF) { sum => complete(s"Sum: $sum") }
}
}
// tests:
val multipartForm =
Multipart.FormData(Multipart.FormData.BodyPart.Strict(
"csv",
HttpEntity(ContentTypes.`text/plain(UTF-8)`, "2,3,5\n7,11,13,17,23\n29,31,37\n"),
Map("filename" -> "primes.csv")))
Post("/", multipartForm) ~> route ~> check {
status shouldEqual StatusCodes.OK
responseAs[String] shouldEqual "Sum: 178"
}
}
}

View file

@ -0,0 +1,48 @@
/*
* Copyright (C) 2009-2014 Typesafe Inc. <http://www.typesafe.com>
*/
package docs.http.scaladsl.server.directives
import akka.http.scaladsl.server.Route
import akka.http.scaladsl.model._
import docs.http.scaladsl.server.RoutingSpec
class FormFieldDirectivesExamplesSpec extends RoutingSpec {
"formFields" in {
val route =
formFields('color, 'age.as[Int]) { (color, age) =>
complete(s"The color is '$color' and the age ten years ago was ${age - 10}")
}
// tests:
Post("/", FormData("color" -> "blue", "age" -> "68")) ~> route ~> check {
responseAs[String] shouldEqual "The color is 'blue' and the age ten years ago was 58"
}
Get("/") ~> Route.seal(route) ~> check {
status shouldEqual StatusCodes.BadRequest
responseAs[String] shouldEqual "Request is missing required form field 'color'"
}
}
"formField" in {
val route =
formField('color) { color =>
complete(s"The color is '$color'")
} ~
formField('id.as[Int]) { id =>
complete(s"The id is '$id'")
}
// tests:
Post("/", FormData("color" -> "blue")) ~> route ~> check {
responseAs[String] shouldEqual "The color is 'blue'"
}
Get("/") ~> Route.seal(route) ~> check {
status shouldEqual StatusCodes.BadRequest
responseAs[String] shouldEqual "Request is missing required form field 'color'"
}
}
}

View file

@ -0,0 +1,103 @@
/*
* Copyright (C) 2009-2014 Typesafe Inc. <http://www.typesafe.com>
*/
package docs.http.scaladsl.server.directives
import java.util.concurrent.TimeUnit
import docs.http.scaladsl.server.RoutingSpec
import scala.concurrent.Future
import scala.util.{ Success, Failure }
import akka.http.scaladsl.server.ExceptionHandler
import akka.actor.{ Actor, Props }
import akka.util.Timeout
import akka.http.scaladsl.model._
import akka.http.scaladsl.server.Route
import StatusCodes._
// format: OFF
class FutureDirectivesExamplesSpec extends RoutingSpec {
object TestException extends Throwable
implicit val myExceptionHandler =
ExceptionHandler {
case TestException => ctx =>
ctx.complete((InternalServerError, "Unsuccessful future!"))
}
implicit val responseTimeout = Timeout(2, TimeUnit.SECONDS)
"onComplete" in {
def divide(a: Int, b: Int): Future[Int] = Future {
a / b
}
val route =
path("divide" / IntNumber / IntNumber) { (a, b) =>
onComplete(divide(a, b)) {
case Success(value) => complete(s"The result was $value")
case Failure(ex) => complete((InternalServerError, s"An error occurred: ${ex.getMessage}"))
}
}
// tests:
Get("/divide/10/2") ~> route ~> check {
responseAs[String] shouldEqual "The result was 5"
}
Get("/divide/10/0") ~> Route.seal(route) ~> check {
status shouldEqual InternalServerError
responseAs[String] shouldEqual "An error occurred: / by zero"
}
}
"onSuccess" in {
val route =
path("success") {
onSuccess(Future { "Ok" }) { extraction =>
complete(extraction)
}
} ~
path("failure") {
onSuccess(Future.failed[String](TestException)) { extraction =>
complete(extraction)
}
}
// tests:
Get("/success") ~> route ~> check {
responseAs[String] shouldEqual "Ok"
}
Get("/failure") ~> Route.seal(route) ~> check {
status shouldEqual InternalServerError
responseAs[String] shouldEqual "Unsuccessful future!"
}
}
"completeOrRecoverWith" in {
val route =
path("success") {
completeOrRecoverWith(Future { "Ok" }) { extraction =>
failWith(extraction) // not executed.
}
} ~
path("failure") {
completeOrRecoverWith(Future.failed[String](TestException)) { extraction =>
failWith(extraction)
}
}
// tests:
Get("/success") ~> route ~> check {
responseAs[String] shouldEqual "Ok"
}
Get("/failure") ~> Route.seal(route) ~> check {
status shouldEqual InternalServerError
responseAs[String] shouldEqual "Unsuccessful future!"
}
}
}

View file

@ -0,0 +1,188 @@
/*
* Copyright (C) 2009-2014 Typesafe Inc. <http://www.typesafe.com>
*/
package docs.http.scaladsl.server.directives
import akka.http.scaladsl.model._
import akka.http.scaladsl.server.MissingHeaderRejection
import akka.http.scaladsl.server.Route
import docs.http.scaladsl.server.RoutingSpec
import headers._
import StatusCodes._
import org.scalatest.Inside
class HeaderDirectivesExamplesSpec extends RoutingSpec with Inside {
"headerValueByName-0" in {
val route =
headerValueByName("X-User-Id") { userId =>
complete(s"The user is $userId")
}
// tests:
Get("/") ~> RawHeader("X-User-Id", "Joe42") ~> route ~> check {
responseAs[String] shouldEqual "The user is Joe42"
}
Get("/") ~> Route.seal(route) ~> check {
status shouldEqual BadRequest
responseAs[String] shouldEqual "Request is missing required HTTP header 'X-User-Id'"
}
}
"headerValue-0" in {
def extractHostPort: HttpHeader => Option[Int] = {
case h: `Host` => Some(h.port)
case x => None
}
val route =
headerValue(extractHostPort) { port =>
complete(s"The port was $port")
}
// tests:
Get("/") ~> Host("example.com", 5043) ~> route ~> check {
responseAs[String] shouldEqual "The port was 5043"
}
Get("/") ~> Route.seal(route) ~> check {
status shouldEqual NotFound
responseAs[String] shouldEqual "The requested resource could not be found."
}
}
"optionalHeaderValue-0" in {
def extractHostPort: HttpHeader => Option[Int] = {
case h: `Host` => Some(h.port)
case x => None
}
val route =
optionalHeaderValue(extractHostPort) {
case Some(port) => complete(s"The port was $port")
case None => complete(s"The port was not provided explicitly")
} ~ // can also be written as:
optionalHeaderValue(extractHostPort) { port =>
complete {
port match {
case Some(p) => s"The port was $p"
case _ => "The port was not provided explicitly"
}
}
}
// tests:
Get("/") ~> Host("example.com", 5043) ~> route ~> check {
responseAs[String] shouldEqual "The port was 5043"
}
Get("/") ~> Route.seal(route) ~> check {
responseAs[String] shouldEqual "The port was not provided explicitly"
}
}
"optionalHeaderValueByName-0" in {
val route =
optionalHeaderValueByName("X-User-Id") {
case Some(userId) => complete(s"The user is $userId")
case None => complete(s"No user was provided")
} ~ // can also be written as:
optionalHeaderValueByName("port") { port =>
complete {
port match {
case Some(p) => s"The user is $p"
case _ => "No user was provided"
}
}
}
// tests:
Get("/") ~> RawHeader("X-User-Id", "Joe42") ~> route ~> check {
responseAs[String] shouldEqual "The user is Joe42"
}
Get("/") ~> Route.seal(route) ~> check {
responseAs[String] shouldEqual "No user was provided"
}
}
"headerValuePF-0" in {
def extractHostPort: PartialFunction[HttpHeader, Int] = {
case h: `Host` => h.port
}
val route =
headerValuePF(extractHostPort) { port =>
complete(s"The port was $port")
}
// tests:
Get("/") ~> Host("example.com", 5043) ~> route ~> check {
responseAs[String] shouldEqual "The port was 5043"
}
Get("/") ~> Route.seal(route) ~> check {
status shouldEqual NotFound
responseAs[String] shouldEqual "The requested resource could not be found."
}
}
"optionalHeaderValuePF-0" in {
def extractHostPort: PartialFunction[HttpHeader, Int] = {
case h: `Host` => h.port
}
val route =
optionalHeaderValuePF(extractHostPort) {
case Some(port) => complete(s"The port was $port")
case None => complete(s"The port was not provided explicitly")
} ~ // can also be written as:
optionalHeaderValuePF(extractHostPort) { port =>
complete {
port match {
case Some(p) => s"The port was $p"
case _ => "The port was not provided explicitly"
}
}
}
// tests:
Get("/") ~> Host("example.com", 5043) ~> route ~> check {
responseAs[String] shouldEqual "The port was 5043"
}
Get("/") ~> Route.seal(route) ~> check {
responseAs[String] shouldEqual "The port was not provided explicitly"
}
}
"headerValueByType-0" in {
val route =
headerValueByType[Origin]() { origin
complete(s"The first origin was ${origin.origins.head}")
}
val originHeader = Origin(HttpOrigin("http://localhost:8080"))
// tests:
// extract a header if the type is matching
Get("abc") ~> originHeader ~> route ~> check {
responseAs[String] shouldEqual "The first origin was http://localhost:8080"
}
// reject a request if no header of the given type is present
Get("abc") ~> route ~> check {
inside(rejection) { case MissingHeaderRejection("Origin") }
}
}
"optionalHeaderValueByType-0" in {
val route =
optionalHeaderValueByType[Origin]() {
case Some(origin) complete(s"The first origin was ${origin.origins.head}")
case None complete("No Origin header found.")
}
val originHeader = Origin(HttpOrigin("http://localhost:8080"))
// tests:
// extract Some(header) if the type is matching
Get("abc") ~> originHeader ~> route ~> check {
responseAs[String] shouldEqual "The first origin was http://localhost:8080"
}
// extract None if no header of the given type is present
Get("abc") ~> route ~> check {
responseAs[String] shouldEqual "No Origin header found."
}
}
}

View file

@ -0,0 +1,92 @@
/*
* Copyright (C) 2009-2014 Typesafe Inc. <http://www.typesafe.com>
*/
package docs.http.scaladsl.server.directives
import akka.http.scaladsl.model._
import docs.http.scaladsl.server.RoutingSpec
import headers._
import StatusCodes._
class HostDirectivesExamplesSpec extends RoutingSpec {
"extractHost" in {
val route =
extractHost { hn =>
complete(s"Hostname: $hn")
}
// tests:
Get() ~> Host("company.com", 9090) ~> route ~> check {
status shouldEqual OK
responseAs[String] shouldEqual "Hostname: company.com"
}
}
"list-of-hosts" in {
val route =
host("api.company.com", "rest.company.com") {
complete("Ok")
}
// tests:
Get() ~> Host("rest.company.com") ~> route ~> check {
status shouldEqual OK
responseAs[String] shouldEqual "Ok"
}
Get() ~> Host("notallowed.company.com") ~> route ~> check {
handled shouldBe false
}
}
"predicate" in {
val shortOnly: String => Boolean = (hostname) => hostname.length < 10
val route =
host(shortOnly) {
complete("Ok")
}
// tests:
Get() ~> Host("short.com") ~> route ~> check {
status shouldEqual OK
responseAs[String] shouldEqual "Ok"
}
Get() ~> Host("verylonghostname.com") ~> route ~> check {
handled shouldBe false
}
}
"using-regex" in {
val route =
host("api|rest".r) { prefix =>
complete(s"Extracted prefix: $prefix")
} ~
host("public.(my|your)company.com".r) { captured =>
complete(s"You came through $captured company")
}
// tests:
Get() ~> Host("api.company.com") ~> route ~> check {
status shouldEqual OK
responseAs[String] shouldEqual "Extracted prefix: api"
}
Get() ~> Host("public.mycompany.com") ~> route ~> check {
status shouldEqual OK
responseAs[String] shouldEqual "You came through my company"
}
}
"failing-regex" in {
an[IllegalArgumentException] should be thrownBy {
host("server-([0-9]).company.(com|net|org)".r) { target =>
complete("Will never complete :'(")
}
}
}
}

View file

@ -0,0 +1,86 @@
/*
* Copyright (C) 2009-2014 Typesafe Inc. <http://www.typesafe.com>
*/
package docs.http.scaladsl.server.directives
import akka.http.scaladsl.marshallers.sprayjson.SprayJsonSupport
import docs.http.scaladsl.server.RoutingSpec
import akka.http.scaladsl.model.MediaTypes.`application/json`
import akka.http.scaladsl.model._
import spray.json.DefaultJsonProtocol
//# person-case-class
case class Person(name: String, favoriteNumber: Int)
//# person-json-support
object PersonJsonSupport extends DefaultJsonProtocol with SprayJsonSupport {
implicit val PortofolioFormats = jsonFormat2(Person)
}
//#
class MarshallingDirectivesExamplesSpec extends RoutingSpec {
"example-entity-with-json" in {
import PersonJsonSupport._
val route = post {
entity(as[Person]) { person =>
complete(s"Person: ${person.name} - favorite number: ${person.favoriteNumber}")
}
}
// tests:
Post("/", HttpEntity(`application/json`, """{ "name": "Jane", "favoriteNumber" : 42 }""")) ~>
route ~> check {
responseAs[String] shouldEqual "Person: Jane - favorite number: 42"
}
}
"example-completeWith-with-json" in {
import PersonJsonSupport._
val findPerson = (f: Person => Unit) => {
//... some processing logic...
//complete the request
f(Person("Jane", 42))
}
val route = get {
completeWith(instanceOf[Person]) { completionFunction => findPerson(completionFunction) }
}
// tests:
Get("/") ~> route ~> check {
mediaType shouldEqual `application/json`
responseAs[String] should include(""""name": "Jane"""")
responseAs[String] should include(""""favoriteNumber": 42""")
}
}
"example-handleWith-with-json" in {
import PersonJsonSupport._
val updatePerson = (person: Person) => {
//... some processing logic...
//return the person
person
}
val route = post {
handleWith(updatePerson)
}
// tests:
Post("/", HttpEntity(`application/json`, """{ "name": "Jane", "favoriteNumber" : 42 }""")) ~>
route ~> check {
mediaType shouldEqual `application/json`
responseAs[String] should include(""""name": "Jane"""")
responseAs[String] should include(""""favoriteNumber": 42""")
}
}
}

View file

@ -0,0 +1,136 @@
/*
* Copyright (C) 2009-2014 Typesafe Inc. <http://www.typesafe.com>
*/
package docs.http.scaladsl.server.directives
import akka.http.scaladsl.model._
import akka.http.scaladsl.server.Route
import docs.http.scaladsl.server.RoutingSpec
class MethodDirectivesExamplesSpec extends RoutingSpec {
"delete-method" in {
val route = delete { complete("This is a DELETE request.") }
// tests:
Delete("/") ~> route ~> check {
responseAs[String] shouldEqual "This is a DELETE request."
}
}
"get-method" in {
val route = get { complete("This is a GET request.") }
// tests:
Get("/") ~> route ~> check {
responseAs[String] shouldEqual "This is a GET request."
}
}
"head-method" in {
val route = head { complete("This is a HEAD request.") }
// tests:
Head("/") ~> route ~> check {
responseAs[String] shouldEqual "This is a HEAD request."
}
}
"options-method" in {
val route = options { complete("This is an OPTIONS request.") }
// tests:
Options("/") ~> route ~> check {
responseAs[String] shouldEqual "This is an OPTIONS request."
}
}
"patch-method" in {
val route = patch { complete("This is a PATCH request.") }
// tests:
Patch("/", "patch content") ~> route ~> check {
responseAs[String] shouldEqual "This is a PATCH request."
}
}
"post-method" in {
val route = post { complete("This is a POST request.") }
// tests:
Post("/", "post content") ~> route ~> check {
responseAs[String] shouldEqual "This is a POST request."
}
}
"put-method" in {
val route = put { complete("This is a PUT request.") }
// tests:
Put("/", "put content") ~> route ~> check {
responseAs[String] shouldEqual "This is a PUT request."
}
}
"method-example" in {
val route = method(HttpMethods.PUT) { complete("This is a PUT request.") }
// tests:
Put("/", "put content") ~> route ~> check {
responseAs[String] shouldEqual "This is a PUT request."
}
Get("/") ~> Route.seal(route) ~> check {
status shouldEqual StatusCodes.MethodNotAllowed
responseAs[String] shouldEqual "HTTP method not allowed, supported methods: PUT"
}
}
"extractMethod-example" in {
val route =
get {
complete("This is a GET request.")
} ~
extractMethod { method =>
complete(s"This ${method.name} request, clearly is not a GET!")
}
// tests:
Get("/") ~> route ~> check {
responseAs[String] shouldEqual "This is a GET request."
}
Put("/") ~> route ~> check {
responseAs[String] shouldEqual "This PUT request, clearly is not a GET!"
}
Head("/") ~> route ~> check {
responseAs[String] shouldEqual "This HEAD request, clearly is not a GET!"
}
}
"overrideMethodWithParameter-0" in {
val route =
overrideMethodWithParameter("method") {
get {
complete("This looks like a GET request.")
} ~
post {
complete("This looks like a POST request.")
}
}
// tests:
Get("/?method=POST") ~> route ~> check {
responseAs[String] shouldEqual "This looks like a POST request."
}
Post("/?method=get") ~> route ~> check {
responseAs[String] shouldEqual "This looks like a GET request."
}
Get("/?method=hallo") ~> route ~> check {
status shouldEqual StatusCodes.NotImplemented
}
}
}

View file

@ -0,0 +1,96 @@
/*
* Copyright (C) 2009-2014 Typesafe Inc. <http://www.typesafe.com>
*/
package docs.http.scaladsl.server.directives
import akka.http.scaladsl.model._
import akka.http.scaladsl.server._
import headers._
import docs.http.scaladsl.server.RoutingSpec
class MiscDirectivesExamplesSpec extends RoutingSpec {
"extractClientIP-example" in {
val route = extractClientIP { ip =>
complete("Client's ip is " + ip.toOption.map(_.getHostAddress).getOrElse("unknown"))
}
// tests:
Get("/").withHeaders(`Remote-Address`(RemoteAddress("192.168.3.12"))) ~> route ~> check {
responseAs[String] shouldEqual "Client's ip is 192.168.3.12"
}
}
"rejectEmptyResponse-example" in {
val route = rejectEmptyResponse {
path("even" / IntNumber) { i =>
complete {
// returns Some(evenNumberDescription) or None
Option(i).filter(_ % 2 == 0).map { num =>
s"Number $num is even."
}
}
}
}
// tests:
Get("/even/23") ~> Route.seal(route) ~> check {
status shouldEqual StatusCodes.NotFound
}
Get("/even/28") ~> route ~> check {
responseAs[String] shouldEqual "Number 28 is even."
}
}
"requestEntityEmptyPresent-example" in {
val route =
requestEntityEmpty {
complete("request entity empty")
} ~
requestEntityPresent {
complete("request entity present")
}
// tests:
Post("/", "text") ~> Route.seal(route) ~> check {
responseAs[String] shouldEqual "request entity present"
}
Post("/") ~> route ~> check {
responseAs[String] shouldEqual "request entity empty"
}
}
"selectPreferredLanguage-example" in {
val request = Get() ~> `Accept-Language`(
Language("en-US"),
Language("en") withQValue 0.7f,
LanguageRange.`*` withQValue 0.1f,
Language("de") withQValue 0.5f)
request ~> {
selectPreferredLanguage("en", "en-US") { lang
complete(lang.toString)
}
} ~> check { responseAs[String] shouldEqual "en-US" }
request ~> {
selectPreferredLanguage("de-DE", "hu") { lang
complete(lang.toString)
}
} ~> check { responseAs[String] shouldEqual "de-DE" }
}
"validate-example" in {
val route =
extractUri { uri =>
validate(uri.path.toString.size < 5, s"Path too long: '${uri.path.toString}'") {
complete(s"Full URI: $uri")
}
}
// tests:
Get("/234") ~> route ~> check {
responseAs[String] shouldEqual "Full URI: http://example.com/234"
}
Get("/abcdefghijkl") ~> route ~> check {
rejection shouldEqual ValidationRejection("Path too long: '/abcdefghijkl'", None)
}
}
}

View file

@ -0,0 +1,212 @@
/*
* Copyright (C) 2009-2014 Typesafe Inc. <http://www.typesafe.com>
*/
package docs.http.scaladsl.server.directives
import akka.http.scaladsl.model._
import akka.http.scaladsl.server.Route
import akka.http.scaladsl.unmarshalling.PredefinedFromStringUnmarshallers
import docs.http.scaladsl.server.RoutingSpec
class ParameterDirectivesExamplesSpec extends RoutingSpec with PredefinedFromStringUnmarshallers {
"example-1" in {
val route =
parameter('color) { color =>
complete(s"The color is '$color'")
}
// tests:
Get("/?color=blue") ~> route ~> check {
responseAs[String] shouldEqual "The color is 'blue'"
}
Get("/") ~> Route.seal(route) ~> check {
status shouldEqual StatusCodes.NotFound
responseAs[String] shouldEqual "Request is missing required query parameter 'color'"
}
}
"required-1" in {
val route =
parameters('color, 'backgroundColor) { (color, backgroundColor) =>
complete(s"The color is '$color' and the background is '$backgroundColor'")
}
// tests:
Get("/?color=blue&backgroundColor=red") ~> route ~> check {
responseAs[String] shouldEqual "The color is 'blue' and the background is 'red'"
}
Get("/?color=blue") ~> Route.seal(route) ~> check {
status shouldEqual StatusCodes.NotFound
responseAs[String] shouldEqual "Request is missing required query parameter 'backgroundColor'"
}
}
"optional" in {
val route =
parameters('color, 'backgroundColor.?) { (color, backgroundColor) =>
val backgroundStr = backgroundColor.getOrElse("<undefined>")
complete(s"The color is '$color' and the background is '$backgroundStr'")
}
// tests:
Get("/?color=blue&backgroundColor=red") ~> route ~> check {
responseAs[String] shouldEqual "The color is 'blue' and the background is 'red'"
}
Get("/?color=blue") ~> route ~> check {
responseAs[String] shouldEqual "The color is 'blue' and the background is '<undefined>'"
}
}
"optional-with-default" in {
val route =
parameters('color, 'backgroundColor ? "white") { (color, backgroundColor) =>
complete(s"The color is '$color' and the background is '$backgroundColor'")
}
// tests:
Get("/?color=blue&backgroundColor=red") ~> route ~> check {
responseAs[String] shouldEqual "The color is 'blue' and the background is 'red'"
}
Get("/?color=blue") ~> route ~> check {
responseAs[String] shouldEqual "The color is 'blue' and the background is 'white'"
}
}
"required-value" in {
val route =
parameters('color, 'action ! "true") { (color) =>
complete(s"The color is '$color'.")
}
// tests:
Get("/?color=blue&action=true") ~> route ~> check {
responseAs[String] shouldEqual "The color is 'blue'."
}
Get("/?color=blue&action=false") ~> Route.seal(route) ~> check {
status shouldEqual StatusCodes.NotFound
responseAs[String] shouldEqual "The requested resource could not be found."
}
}
"mapped-value" in {
val route =
parameters('color, 'count.as[Int]) { (color, count) =>
complete(s"The color is '$color' and you have $count of it.")
}
// tests:
Get("/?color=blue&count=42") ~> route ~> check {
responseAs[String] shouldEqual "The color is 'blue' and you have 42 of it."
}
Get("/?color=blue&count=blub") ~> Route.seal(route) ~> check {
status shouldEqual StatusCodes.BadRequest
responseAs[String] shouldEqual "The query parameter 'count' was malformed:\n'blub' is not a valid 32-bit signed integer value"
}
}
"repeated" in {
val route =
parameters('color, 'city.*) { (color, cities) =>
cities.toList match {
case Nil => complete(s"The color is '$color' and there are no cities.")
case city :: Nil => complete(s"The color is '$color' and the city is $city.")
case multiple => complete(s"The color is '$color' and the cities are ${multiple.mkString(", ")}.")
}
}
// tests:
Get("/?color=blue") ~> route ~> check {
responseAs[String] === "The color is 'blue' and there are no cities."
}
Get("/?color=blue&city=Chicago") ~> Route.seal(route) ~> check {
responseAs[String] === "The color is 'blue' and the city is Chicago."
}
Get("/?color=blue&city=Chicago&city=Boston") ~> Route.seal(route) ~> check {
responseAs[String] === "The color is 'blue' and the cities are Chicago, Boston."
}
}
"mapped-repeated" in {
val route =
parameters('color, 'distance.as[Int].*) { (color, cities) =>
cities.toList match {
case Nil => complete(s"The color is '$color' and there are no distances.")
case distance :: Nil => complete(s"The color is '$color' and the distance is $distance.")
case multiple => complete(s"The color is '$color' and the distances are ${multiple.mkString(", ")}.")
}
}
// tests:
Get("/?color=blue") ~> route ~> check {
responseAs[String] === "The color is 'blue' and there are no distances."
}
Get("/?color=blue&distance=5") ~> Route.seal(route) ~> check {
responseAs[String] === "The color is 'blue' and the distance is 5."
}
Get("/?color=blue&distance=5&distance=14") ~> Route.seal(route) ~> check {
responseAs[String] === "The color is 'blue' and the distances are 5, 14."
}
}
"parameterMap" in {
val route =
parameterMap { params =>
def paramString(param: (String, String)): String = s"""${param._1} = '${param._2}'"""
complete(s"The parameters are ${params.map(paramString).mkString(", ")}")
}
// tests:
Get("/?color=blue&count=42") ~> route ~> check {
responseAs[String] shouldEqual "The parameters are color = 'blue', count = '42'"
}
Get("/?x=1&x=2") ~> route ~> check {
responseAs[String] shouldEqual "The parameters are x = '2'"
}
}
"parameterMultiMap" in {
val route =
parameterMultiMap { params =>
complete(s"There are parameters ${params.map(x => x._1 + " -> " + x._2.size).mkString(", ")}")
}
// tests:
Get("/?color=blue&count=42") ~> route ~> check {
responseAs[String] shouldEqual "There are parameters color -> 1, count -> 1"
}
Get("/?x=23&x=42") ~> route ~> check {
responseAs[String] shouldEqual "There are parameters x -> 2"
}
}
"parameterSeq" in {
val route =
parameterSeq { params =>
def paramString(param: (String, String)): String = s"""${param._1} = '${param._2}'"""
complete(s"The parameters are ${params.map(paramString).mkString(", ")}")
}
// tests:
Get("/?color=blue&count=42") ~> route ~> check {
responseAs[String] shouldEqual "The parameters are color = 'blue', count = '42'"
}
Get("/?x=1&x=2") ~> route ~> check {
responseAs[String] shouldEqual "The parameters are x = '1', x = '2'"
}
}
"csv" in {
val route =
parameter("names".as(CsvSeq[String])) { names =>
complete(s"The parameters are ${names.mkString(", ")}")
}
// tests:
Get("/?names=") ~> route ~> check {
responseAs[String] shouldEqual "The parameters are "
}
Get("/?names=Caplin") ~> route ~> check {
responseAs[String] shouldEqual "The parameters are Caplin"
}
Get("/?names=Caplin,John") ~> route ~> check {
responseAs[String] shouldEqual "The parameters are Caplin, John"
}
}
}

View file

@ -0,0 +1,380 @@
/*
* Copyright (C) 2009-2015 Typesafe Inc. <http://www.typesafe.com>
*/
package docs.http.scaladsl.server.directives
import akka.http.scaladsl.model.StatusCodes._
import akka.http.scaladsl.server._
import docs.http.scaladsl.server.RoutingSpec
class PathDirectivesExamplesSpec extends RoutingSpec {
//# path-matcher
val matcher: PathMatcher1[Option[Int]] =
"foo" / "bar" / "X" ~ IntNumber.? / ("edit" | "create")
//#
//# path-dsl
// matches /foo/
path("foo"./)
// matches e.g. /foo/123 and extracts "123" as a String
path("foo" / """\d+""".r)
// matches e.g. /foo/bar123 and extracts "123" as a String
path("foo" / """bar(\d+)""".r)
// similar to `path(Segments)`
path(Segment.repeat(10, separator = Slash))
// matches e.g. /i42 or /hCAFE and extracts an Int
path("i" ~ IntNumber | "h" ~ HexIntNumber)
// identical to path("foo" ~ (PathEnd | Slash))
path("foo" ~ Slash.?)
// matches /red or /green or /blue and extracts 1, 2 or 3 respectively
path(Map("red" -> 1, "green" -> 2, "blue" -> 3))
// matches anything starting with "/foo" except for /foobar
pathPrefix("foo" ~ !"bar")
//#
//# pathPrefixTest-, rawPathPrefix-, rawPathPrefixTest-, pathSuffix-, pathSuffixTest-
val completeWithUnmatchedPath =
extractUnmatchedPath { p =>
complete(p.toString)
}
//#
"path-example" in {
val route =
path("foo") {
complete("/foo")
} ~
path("foo" / "bar") {
complete("/foo/bar")
} ~
pathPrefix("ball") {
pathEnd {
complete("/ball")
} ~
path(IntNumber) { int =>
complete(if (int % 2 == 0) "even ball" else "odd ball")
}
}
// tests:
Get("/") ~> route ~> check {
handled shouldEqual false
}
Get("/foo") ~> route ~> check {
responseAs[String] shouldEqual "/foo"
}
Get("/foo/bar") ~> route ~> check {
responseAs[String] shouldEqual "/foo/bar"
}
Get("/ball/1337") ~> route ~> check {
responseAs[String] shouldEqual "odd ball"
}
}
"pathEnd-" in {
val route =
pathPrefix("foo") {
pathEnd {
complete("/foo")
} ~
path("bar") {
complete("/foo/bar")
}
}
// tests:
Get("/foo") ~> route ~> check {
responseAs[String] shouldEqual "/foo"
}
Get("/foo/") ~> route ~> check {
handled shouldEqual false
}
Get("/foo/bar") ~> route ~> check {
responseAs[String] shouldEqual "/foo/bar"
}
}
"pathEndOrSingleSlash-" in {
val route =
pathPrefix("foo") {
pathEndOrSingleSlash {
complete("/foo")
} ~
path("bar") {
complete("/foo/bar")
}
}
// tests:
Get("/foo") ~> route ~> check {
responseAs[String] shouldEqual "/foo"
}
Get("/foo/") ~> route ~> check {
responseAs[String] shouldEqual "/foo"
}
Get("/foo/bar") ~> route ~> check {
responseAs[String] shouldEqual "/foo/bar"
}
}
"pathPrefix-" in {
val route =
pathPrefix("ball") {
pathEnd {
complete("/ball")
} ~
path(IntNumber) { int =>
complete(if (int % 2 == 0) "even ball" else "odd ball")
}
}
// tests:
Get("/") ~> route ~> check {
handled shouldEqual false
}
Get("/ball") ~> route ~> check {
responseAs[String] shouldEqual "/ball"
}
Get("/ball/1337") ~> route ~> check {
responseAs[String] shouldEqual "odd ball"
}
}
"pathPrefixTest-" in {
val route =
pathPrefixTest("foo" | "bar") {
pathPrefix("foo") { completeWithUnmatchedPath } ~
pathPrefix("bar") { completeWithUnmatchedPath }
}
// tests:
Get("/foo/doo") ~> route ~> check {
responseAs[String] shouldEqual "/doo"
}
Get("/bar/yes") ~> route ~> check {
responseAs[String] shouldEqual "/yes"
}
}
"pathSingleSlash-" in {
val route =
pathSingleSlash {
complete("root")
} ~
pathPrefix("ball") {
pathSingleSlash {
complete("/ball/")
} ~
path(IntNumber) { int =>
complete(if (int % 2 == 0) "even ball" else "odd ball")
}
}
// tests:
Get("/") ~> route ~> check {
responseAs[String] shouldEqual "root"
}
Get("/ball") ~> route ~> check {
handled shouldEqual false
}
Get("/ball/") ~> route ~> check {
responseAs[String] shouldEqual "/ball/"
}
Get("/ball/1337") ~> route ~> check {
responseAs[String] shouldEqual "odd ball"
}
}
"pathSuffix-" in {
val route =
pathPrefix("start") {
pathSuffix("end") {
completeWithUnmatchedPath
} ~
pathSuffix("foo" / "bar" ~ "baz") {
completeWithUnmatchedPath
}
}
// tests:
Get("/start/middle/end") ~> route ~> check {
responseAs[String] shouldEqual "/middle/"
}
Get("/start/something/barbaz/foo") ~> route ~> check {
responseAs[String] shouldEqual "/something/"
}
}
"pathSuffixTest-" in {
val route =
pathSuffixTest(Slash) {
complete("slashed")
} ~
complete("unslashed")
// tests:
Get("/foo/") ~> route ~> check {
responseAs[String] shouldEqual "slashed"
}
Get("/foo") ~> route ~> check {
responseAs[String] shouldEqual "unslashed"
}
}
"rawPathPrefix-" in {
val route =
pathPrefix("foo") {
rawPathPrefix("bar") { completeWithUnmatchedPath } ~
rawPathPrefix("doo") { completeWithUnmatchedPath }
}
// tests:
Get("/foobar/baz") ~> route ~> check {
responseAs[String] shouldEqual "/baz"
}
Get("/foodoo/baz") ~> route ~> check {
responseAs[String] shouldEqual "/baz"
}
}
"rawPathPrefixTest-" in {
val route =
pathPrefix("foo") {
rawPathPrefixTest("bar") {
completeWithUnmatchedPath
}
}
// tests:
Get("/foobar") ~> route ~> check {
responseAs[String] shouldEqual "bar"
}
Get("/foobaz") ~> route ~> check {
handled shouldEqual false
}
}
"redirectToTrailingSlashIfMissing-0" in {
import akka.http.scaladsl.model.StatusCodes
val route =
redirectToTrailingSlashIfMissing(StatusCodes.MovedPermanently) {
path("foo"./) {
// We require the explicit trailing slash in the path
complete("OK")
} ~
path("bad-1") {
// MISTAKE!
// Missing `/` in path, causes this path to never match,
// because it is inside a `redirectToTrailingSlashIfMissing`
???
} ~
path("bad-2/") {
// MISTAKE!
// / should be explicit as path element separator and not *in* the path element
// So it should be: "bad-1" /
???
}
}
// tests:
// Redirected:
Get("/foo") ~> route ~> check {
status shouldEqual StatusCodes.MovedPermanently
// results in nice human readable message,
// in case the redirect can't be followed automatically:
responseAs[String] shouldEqual {
"This and all future requests should be directed to " +
"<a href=\"http://example.com/foo/\">this URI</a>."
}
}
// Properly handled:
Get("/foo/") ~> route ~> check {
status shouldEqual StatusCodes.OK
responseAs[String] shouldEqual "OK"
}
// MISTAKE! will never match - reason explained in routes
Get("/bad-1/") ~> route ~> check {
handled shouldEqual false
}
// MISTAKE! will never match - reason explained in routes
Get("/bad-2/") ~> route ~> check {
handled shouldEqual false
}
}
"redirectToNoTrailingSlashIfPresent-0" in {
import akka.http.scaladsl.model.StatusCodes
val route =
redirectToNoTrailingSlashIfPresent(StatusCodes.MovedPermanently) {
path("foo") {
// We require the explicit trailing slash in the path
complete("OK")
} ~
path("bad"./) {
// MISTAKE!
// Since inside a `redirectToNoTrailingSlashIfPresent` directive
// the matched path here will never contain a trailing slash,
// thus this path will never match.
//
// It should be `path("bad")` instead.
???
}
}
// tests:
// Redirected:
Get("/foo/") ~> route ~> check {
status shouldEqual StatusCodes.MovedPermanently
// results in nice human readable message,
// in case the redirect can't be followed automatically:
responseAs[String] shouldEqual {
"This and all future requests should be directed to " +
"<a href=\"http://example.com/foo\">this URI</a>."
}
}
// Properly handled:
Get("/foo") ~> route ~> check {
status shouldEqual StatusCodes.OK
responseAs[String] shouldEqual "OK"
}
// MISTAKE! will never match - reason explained in routes
Get("/bad") ~> route ~> check {
handled shouldEqual false
}
}
}

View file

@ -0,0 +1,58 @@
/*
* Copyright (C) 2009-2014 Typesafe Inc. <http://www.typesafe.com>
*/
package docs.http.scaladsl.server.directives
import akka.http.scaladsl.model._
import com.typesafe.config.{ ConfigFactory, Config }
import akka.util.ByteString
import headers._
import scala.concurrent.Await
import scala.concurrent.duration._
import docs.http.scaladsl.server.RoutingSpec
class RangeDirectivesExamplesSpec extends RoutingSpec {
override def testConfig: Config =
ConfigFactory.parseString("akka.http.routing.range-coalescing-threshold=2").withFallback(super.testConfig)
"withRangeSupport" in {
val route =
withRangeSupport {
complete("ABCDEFGH")
}
Get() ~> addHeader(Range(ByteRange(3, 4))) ~> route ~> check {
headers should contain(`Content-Range`(ContentRange(3, 4, 8)))
status shouldEqual StatusCodes.PartialContent
responseAs[String] shouldEqual "DE"
}
// we set "akka.http.routing.range-coalescing-threshold = 2"
// above to make sure we get two BodyParts
Get() ~> addHeader(Range(ByteRange(0, 1), ByteRange(1, 2), ByteRange(6, 7))) ~> route ~> check {
headers.collectFirst { case `Content-Range`(_, _) => true } shouldBe None
val responseF = responseAs[Multipart.ByteRanges].parts
.runFold[List[Multipart.ByteRanges.BodyPart]](Nil)((acc, curr) => curr :: acc)
val response = Await.result(responseF, 3.seconds).reverse
response should have length 2
val part1 = response(0)
part1.contentRange === ContentRange(0, 2, 8)
part1.entity should matchPattern {
case HttpEntity.Strict(_, bytes) if bytes.utf8String == "ABC" =>
}
val part2 = response(1)
part2.contentRange === ContentRange(6, 7, 8)
part2.entity should matchPattern {
case HttpEntity.Strict(_, bytes) if bytes.utf8String == "GH" =>
}
}
}
}

View file

@ -0,0 +1,83 @@
/*
* Copyright (C) 2009-2014 Typesafe Inc. <http://www.typesafe.com>
*/
package docs.http.scaladsl.server.directives
import akka.http.scaladsl.model.headers._
import docs.http.scaladsl.server.RoutingSpec
class RespondWithDirectivesExamplesSpec extends RoutingSpec {
"respondWithHeader-0" in {
val route =
path("foo") {
respondWithHeader(RawHeader("Funky-Muppet", "gonzo")) {
complete("beep")
}
}
// tests:
Get("/foo") ~> route ~> check {
header("Funky-Muppet") shouldEqual Some(RawHeader("Funky-Muppet", "gonzo"))
responseAs[String] shouldEqual "beep"
}
}
"respondWithDefaultHeader-0" in {
// custom headers
val blippy = RawHeader("X-Fish-Name", "Blippy")
val elTonno = RawHeader("X-Fish-Name", "El Tonno")
// format: OFF
// by default always include the Blippy header,
// unless a more specific X-Fish-Name is given by the inner route
val route =
respondWithDefaultHeader(blippy) { // blippy
respondWithHeader(elTonno) { // / el tonno
path("el-tonno") { // | /
complete("¡Ay blippy!") // | |- el tonno
} ~ // | |
path("los-tonnos") { // | |
complete("¡Ay ay blippy!") // | |- el tonno
} // | |
} ~ // | x
complete("Blip!") // |- blippy
} // x
// format: ON
// tests:
Get("/") ~> route ~> check {
header("X-Fish-Name") shouldEqual Some(RawHeader("X-Fish-Name", "Blippy"))
responseAs[String] shouldEqual "Blip!"
}
Get("/el-tonno") ~> route ~> check {
header("X-Fish-Name") shouldEqual Some(RawHeader("X-Fish-Name", "El Tonno"))
responseAs[String] shouldEqual "¡Ay blippy!"
}
Get("/los-tonnos") ~> route ~> check {
header("X-Fish-Name") shouldEqual Some(RawHeader("X-Fish-Name", "El Tonno"))
responseAs[String] shouldEqual "¡Ay ay blippy!"
}
}
// format: ON
"respondWithHeaders-0" in {
val route =
path("foo") {
respondWithHeaders(RawHeader("Funky-Muppet", "gonzo"), Origin(HttpOrigin("http://akka.io"))) {
complete("beep")
}
}
// tests:
Get("/foo") ~> route ~> check {
header("Funky-Muppet") shouldEqual Some(RawHeader("Funky-Muppet", "gonzo"))
header[Origin] shouldEqual Some(Origin(HttpOrigin("http://akka.io")))
responseAs[String] shouldEqual "beep"
}
}
}

View file

@ -0,0 +1,100 @@
/*
* Copyright (C) 2009-2014 Typesafe Inc. <http://www.typesafe.com>
*/
package docs.http.scaladsl.server.directives
import akka.http.scaladsl.model._
import akka.http.scaladsl.server.{ Route, ValidationRejection }
import akka.testkit.EventFilter
import docs.http.scaladsl.server.RoutingSpec
class RouteDirectivesExamplesSpec extends RoutingSpec {
"complete-examples" in {
val route =
path("a") {
complete(HttpResponse(entity = "foo"))
} ~
path("b") {
complete((StatusCodes.Created, "bar"))
} ~
(path("c") & complete("baz")) // `&` also works with `complete` as the 2nd argument
// tests:
Get("/a") ~> route ~> check {
status shouldEqual StatusCodes.OK
responseAs[String] shouldEqual "foo"
}
Get("/b") ~> route ~> check {
status shouldEqual StatusCodes.Created
responseAs[String] shouldEqual "bar"
}
Get("/c") ~> route ~> check {
status shouldEqual StatusCodes.OK
responseAs[String] shouldEqual "baz"
}
}
"reject-examples" in {
val route =
path("a") {
reject // don't handle here, continue on
} ~
path("a") {
complete("foo")
} ~
path("b") {
// trigger a ValidationRejection explicitly
// rather than through the `validate` directive
reject(ValidationRejection("Restricted!"))
}
// tests:
Get("/a") ~> route ~> check {
responseAs[String] shouldEqual "foo"
}
Get("/b") ~> route ~> check {
rejection shouldEqual ValidationRejection("Restricted!")
}
}
"redirect-examples" in {
val route =
pathPrefix("foo") {
pathSingleSlash {
complete("yes")
} ~
pathEnd {
redirect("/foo/", StatusCodes.PermanentRedirect)
}
}
// tests:
Get("/foo/") ~> route ~> check {
responseAs[String] shouldEqual "yes"
}
Get("/foo") ~> route ~> check {
status shouldEqual StatusCodes.PermanentRedirect
responseAs[String] shouldEqual """The request, and all future requests should be repeated using <a href="/foo/">this URI</a>."""
}
}
"failwith-examples" in EventFilter[RuntimeException](start = "Error during processing of request", occurrences = 1).intercept {
val route =
path("foo") {
failWith(new RuntimeException("Oops."))
}
// tests:
Get("/foo") ~> Route.seal(route) ~> check {
status shouldEqual StatusCodes.InternalServerError
responseAs[String] shouldEqual "There was an internal server error."
}
}
}

View file

@ -0,0 +1,46 @@
/*
* Copyright (C) 2009-2014 Typesafe Inc. <http://www.typesafe.com>
*/
package docs.http.scaladsl.server.directives
import docs.http.scaladsl.server.RoutingSpec
class SchemeDirectivesExamplesSpec extends RoutingSpec {
"example-1" in {
val route =
extractScheme { scheme =>
complete(s"The scheme is '${scheme}'")
}
// tests:
Get("https://www.example.com/") ~> route ~> check {
responseAs[String] shouldEqual "The scheme is 'https'"
}
}
"example-2" in {
import akka.http.scaladsl.model._
import akka.http.scaladsl.model.headers.Location
import StatusCodes.MovedPermanently
val route =
scheme("http") {
extract(_.request.uri) { uri
redirect(uri.copy(scheme = "https"), MovedPermanently)
}
} ~
scheme("https") {
complete(s"Safe and secure!")
}
// tests:
Get("http://www.example.com/hello") ~> route ~> check {
status shouldEqual MovedPermanently
header[Location] shouldEqual Some(Location(Uri("https://www.example.com/hello")))
}
Get("https://www.example.com/hello") ~> route ~> check {
responseAs[String] shouldEqual "Safe and secure!"
}
}
}

View file

@ -0,0 +1,286 @@
/*
* Copyright (C) 2009-2015 Typesafe Inc. <http://www.typesafe.com>
*/
package docs.http.scaladsl.server.directives
import akka.http.scaladsl.model._
import akka.http.scaladsl.model.headers._
import akka.http.scaladsl.server.Route
import akka.http.scaladsl.server.directives.Credentials
import scala.concurrent.Future
import docs.http.scaladsl.server.RoutingSpec
class SecurityDirectivesExamplesSpec extends RoutingSpec {
"authenticateBasic-0" in {
def myUserPassAuthenticator(credentials: Credentials): Option[String] =
credentials match {
case p @ Credentials.Provided(id) if p.verify("p4ssw0rd") => Some(id)
case _ => None
}
val route =
Route.seal {
path("secured") {
authenticateBasic(realm = "secure site", myUserPassAuthenticator) { userName =>
complete(s"The user is '$userName'")
}
}
}
// tests:
Get("/secured") ~> route ~> check {
status shouldEqual StatusCodes.Unauthorized
responseAs[String] shouldEqual "The resource requires authentication, which was not supplied with the request"
header[`WWW-Authenticate`].get.challenges.head shouldEqual HttpChallenge("Basic", "secure site")
}
val validCredentials = BasicHttpCredentials("John", "p4ssw0rd")
Get("/secured") ~> addCredentials(validCredentials) ~> // adds Authorization header
route ~> check {
responseAs[String] shouldEqual "The user is 'John'"
}
val invalidCredentials = BasicHttpCredentials("Peter", "pan")
Get("/secured") ~>
addCredentials(invalidCredentials) ~> // adds Authorization header
route ~> check {
status shouldEqual StatusCodes.Unauthorized
responseAs[String] shouldEqual "The supplied authentication is invalid"
header[`WWW-Authenticate`].get.challenges.head shouldEqual HttpChallenge("Basic", "secure site")
}
}
"authenticateBasicPF-0" in {
val myUserPassAuthenticator: AuthenticatorPF[String] = {
case p @ Credentials.Provided(id) if p.verify("p4ssw0rd") => id
case p @ Credentials.Provided(id) if p.verify("p4ssw0rd-special") => s"$id-admin"
}
val route =
Route.seal {
path("secured") {
authenticateBasicPF(realm = "secure site", myUserPassAuthenticator) { userName =>
complete(s"The user is '$userName'")
}
}
}
// tests:
Get("/secured") ~> route ~> check {
status shouldEqual StatusCodes.Unauthorized
responseAs[String] shouldEqual "The resource requires authentication, which was not supplied with the request"
header[`WWW-Authenticate`].get.challenges.head shouldEqual HttpChallenge("Basic", "secure site")
}
val validCredentials = BasicHttpCredentials("John", "p4ssw0rd")
Get("/secured") ~> addCredentials(validCredentials) ~> // adds Authorization header
route ~> check {
responseAs[String] shouldEqual "The user is 'John'"
}
val validAdminCredentials = BasicHttpCredentials("John", "p4ssw0rd-special")
Get("/secured") ~> addCredentials(validAdminCredentials) ~> // adds Authorization header
route ~> check {
responseAs[String] shouldEqual "The user is 'John-admin'"
}
val invalidCredentials = BasicHttpCredentials("Peter", "pan")
Get("/secured") ~>
addCredentials(invalidCredentials) ~> // adds Authorization header
route ~> check {
status shouldEqual StatusCodes.Unauthorized
responseAs[String] shouldEqual "The supplied authentication is invalid"
header[`WWW-Authenticate`].get.challenges.head shouldEqual HttpChallenge("Basic", "secure site")
}
}
"authenticateBasicPFAsync-0" in {
case class User(id: String)
def fetchUser(id: String): Future[User] = {
// some fancy logic to obtain a User
Future.successful(User(id))
}
val myUserPassAuthenticator: AsyncAuthenticatorPF[User] = {
case p @ Credentials.Provided(id) if p.verify("p4ssw0rd") =>
fetchUser(id)
}
val route =
Route.seal {
path("secured") {
authenticateBasicPFAsync(realm = "secure site", myUserPassAuthenticator) { user =>
complete(s"The user is '${user.id}'")
}
}
}
// tests:
Get("/secured") ~> route ~> check {
status shouldEqual StatusCodes.Unauthorized
responseAs[String] shouldEqual "The resource requires authentication, which was not supplied with the request"
header[`WWW-Authenticate`].get.challenges.head shouldEqual HttpChallenge("Basic", "secure site")
}
val validCredentials = BasicHttpCredentials("John", "p4ssw0rd")
Get("/secured") ~> addCredentials(validCredentials) ~> // adds Authorization header
route ~> check {
responseAs[String] shouldEqual "The user is 'John'"
}
val invalidCredentials = BasicHttpCredentials("Peter", "pan")
Get("/secured") ~>
addCredentials(invalidCredentials) ~> // adds Authorization header
route ~> check {
status shouldEqual StatusCodes.Unauthorized
responseAs[String] shouldEqual "The supplied authentication is invalid"
header[`WWW-Authenticate`].get.challenges.head shouldEqual HttpChallenge("Basic", "secure site")
}
}
"authenticateBasicAsync-0" in {
def myUserPassAuthenticator(credentials: Credentials): Future[Option[String]] =
credentials match {
case p @ Credentials.Provided(id) =>
Future {
// potentially
if (p.verify("p4ssw0rd")) Some(id)
else None
}
case _ => Future.successful(None)
}
val route =
Route.seal {
path("secured") {
authenticateBasicAsync(realm = "secure site", myUserPassAuthenticator) { userName =>
complete(s"The user is '$userName'")
}
}
}
// tests:
Get("/secured") ~> route ~> check {
status shouldEqual StatusCodes.Unauthorized
responseAs[String] shouldEqual "The resource requires authentication, which was not supplied with the request"
header[`WWW-Authenticate`].get.challenges.head shouldEqual HttpChallenge("Basic", "secure site")
}
val validCredentials = BasicHttpCredentials("John", "p4ssw0rd")
Get("/secured") ~> addCredentials(validCredentials) ~> // adds Authorization header
route ~> check {
responseAs[String] shouldEqual "The user is 'John'"
}
val invalidCredentials = BasicHttpCredentials("Peter", "pan")
Get("/secured") ~>
addCredentials(invalidCredentials) ~> // adds Authorization header
route ~> check {
status shouldEqual StatusCodes.Unauthorized
responseAs[String] shouldEqual "The supplied authentication is invalid"
header[`WWW-Authenticate`].get.challenges.head shouldEqual HttpChallenge("Basic", "secure site")
}
}
"authenticateOrRejectWithChallenge-0" in {
val challenge = HttpChallenge("MyAuth", "MyRealm")
// your custom authentication logic:
def auth(creds: HttpCredentials): Boolean = true
def myUserPassAuthenticator(credentials: Option[HttpCredentials]): Future[AuthenticationResult[String]] =
Future {
credentials match {
case Some(creds) if auth(creds) => Right("some-user-name-from-creds")
case _ => Left(challenge)
}
}
val route =
Route.seal {
path("secured") {
authenticateOrRejectWithChallenge(myUserPassAuthenticator _) { userName =>
complete("Authenticated!")
}
}
}
// tests:
Get("/secured") ~> route ~> check {
status shouldEqual StatusCodes.Unauthorized
responseAs[String] shouldEqual "The resource requires authentication, which was not supplied with the request"
header[`WWW-Authenticate`].get.challenges.head shouldEqual HttpChallenge("MyAuth", "MyRealm")
}
val validCredentials = BasicHttpCredentials("John", "p4ssw0rd")
Get("/secured") ~> addCredentials(validCredentials) ~> // adds Authorization header
route ~> check {
status shouldEqual StatusCodes.OK
responseAs[String] shouldEqual "Authenticated!"
}
}
"0authorize" in {
case class User(name: String)
// authenticate the user:
def myUserPassAuthenticator(credentials: Credentials): Option[User] =
credentials match {
case Credentials.Provided(id) => Some(User(id))
case _ => None
}
// check if user is authorized to perform admin actions:
val admins = Set("Peter")
def hasAdminPermissions(user: User): Boolean =
admins.contains(user.name)
val route =
Route.seal {
authenticateBasic(realm = "secure site", myUserPassAuthenticator) { user =>
path("peters-lair") {
authorize(hasAdminPermissions(user)) {
complete(s"'${user.name}' visited Peter's lair")
}
}
}
}
// tests:
val johnsCred = BasicHttpCredentials("John", "p4ssw0rd")
Get("/peters-lair") ~> addCredentials(johnsCred) ~> // adds Authorization header
route ~> check {
status shouldEqual StatusCodes.Forbidden
responseAs[String] shouldEqual "The supplied authentication is not authorized to access this resource"
}
val petersCred = BasicHttpCredentials("Peter", "pan")
Get("/peters-lair") ~> addCredentials(petersCred) ~> // adds Authorization header
route ~> check {
responseAs[String] shouldEqual "'Peter' visited Peter's lair"
}
}
"0extractCredentials" in {
val route =
extractCredentials { creds =>
complete {
creds match {
case Some(c) => "Credentials: " + c
case _ => "No credentials"
}
}
}
// tests:
val johnsCred = BasicHttpCredentials("John", "p4ssw0rd")
Get("/") ~> addCredentials(johnsCred) ~> // adds Authorization header
route ~> check {
responseAs[String] shouldEqual "Credentials: Basic Sm9objpwNHNzdzByZA=="
}
Get("/") ~> route ~> check {
responseAs[String] shouldEqual "No credentials"
}
}
}

View file

@ -0,0 +1,105 @@
/*
* Copyright (C) 2009-2015 Typesafe Inc. <http://www.typesafe.com>
*/
package docs.http.scaladsl.server.directives
import scala.concurrent.duration._
import akka.util.ByteString
import akka.stream.OverflowStrategy
import akka.stream.scaladsl.{ Sink, Source, Flow }
import docs.http.scaladsl.server.RoutingSpec
import akka.http.scaladsl.model.ws.{ TextMessage, Message, BinaryMessage }
import akka.http.scaladsl.testkit.WSProbe
class WebsocketDirectivesExamplesSpec extends RoutingSpec {
"greeter-service" in {
def greeter: Flow[Message, Message, Any] =
Flow[Message].mapConcat {
case tm: TextMessage
TextMessage(Source.single("Hello ") ++ tm.textStream ++ Source.single("!")) :: Nil
case bm: BinaryMessage
// ignore binary messages but drain content to avoid the stream being clogged
bm.dataStream.runWith(Sink.ignore)
Nil
}
val websocketRoute =
path("greeter") {
handleWebsocketMessages(greeter)
}
// tests:
// create a testing probe representing the client-side
val wsClient = WSProbe()
// WS creates a Websocket request for testing
WS("/greeter", wsClient.flow) ~> websocketRoute ~>
check {
// check response for WS Upgrade headers
isWebsocketUpgrade shouldEqual true
// manually run a WS conversation
wsClient.sendMessage("Peter")
wsClient.expectMessage("Hello Peter!")
wsClient.sendMessage(BinaryMessage(ByteString("abcdef")))
wsClient.expectNoMessage(100.millis)
wsClient.sendMessage("John")
wsClient.expectMessage("Hello John!")
wsClient.sendCompletion()
wsClient.expectCompletion()
}
}
"handle-multiple-protocols" in {
def greeterService: Flow[Message, Message, Any] =
Flow[Message].mapConcat {
case tm: TextMessage
TextMessage(Source.single("Hello ") ++ tm.textStream ++ Source.single("!")) :: Nil
case bm: BinaryMessage
// ignore binary messages but drain content to avoid the stream being clogged
bm.dataStream.runWith(Sink.ignore)
Nil
}
def echoService: Flow[Message, Message, Any] =
Flow[Message]
// needed because a noop flow hasn't any buffer that would start processing in tests
.buffer(1, OverflowStrategy.backpressure)
def websocketMultipleProtocolRoute =
path("services") {
handleWebsocketMessagesForProtocol(greeterService, "greeter") ~
handleWebsocketMessagesForProtocol(echoService, "echo")
}
// tests:
val wsClient = WSProbe()
// WS creates a Websocket request for testing
WS("/services", wsClient.flow, List("other", "echo")) ~>
websocketMultipleProtocolRoute ~>
check {
expectWebsocketUpgradeWithProtocol { protocol
protocol shouldEqual "echo"
wsClient.sendMessage("Peter")
wsClient.expectMessage("Peter")
wsClient.sendMessage(BinaryMessage(ByteString("abcdef")))
wsClient.expectMessage(ByteString("abcdef"))
wsClient.sendMessage("John")
wsClient.expectMessage("John")
wsClient.sendCompletion()
wsClient.expectCompletion()
}
}
}
}

View file

@ -0,0 +1,96 @@
/**
* Copyright (C) 2014 Typesafe Inc. <http://www.typesafe.com>
*/
package docs.stream
import scala.annotation.tailrec
import akka.actor.Props
import akka.stream.ActorMaterializer
import akka.stream.actor.ActorPublisher
import akka.stream.scaladsl.{ Flow, Sink, Source }
import akka.stream.testkit.AkkaSpec
object ActorPublisherDocSpec {
//#job-manager
object JobManager {
def props: Props = Props[JobManager]
final case class Job(payload: String)
case object JobAccepted
case object JobDenied
}
class JobManager extends ActorPublisher[JobManager.Job] {
import akka.stream.actor.ActorPublisherMessage._
import JobManager._
val MaxBufferSize = 100
var buf = Vector.empty[Job]
def receive = {
case job: Job if buf.size == MaxBufferSize =>
sender() ! JobDenied
case job: Job =>
sender() ! JobAccepted
if (buf.isEmpty && totalDemand > 0)
onNext(job)
else {
buf :+= job
deliverBuf()
}
case Request(_) =>
deliverBuf()
case Cancel =>
context.stop(self)
}
@tailrec final def deliverBuf(): Unit =
if (totalDemand > 0) {
/*
* totalDemand is a Long and could be larger than
* what buf.splitAt can accept
*/
if (totalDemand <= Int.MaxValue) {
val (use, keep) = buf.splitAt(totalDemand.toInt)
buf = keep
use foreach onNext
} else {
val (use, keep) = buf.splitAt(Int.MaxValue)
buf = keep
use foreach onNext
deliverBuf()
}
}
}
//#job-manager
}
class ActorPublisherDocSpec extends AkkaSpec {
import ActorPublisherDocSpec._
implicit val materializer = ActorMaterializer()
"illustrate usage of ActorPublisher" in {
def println(s: String): Unit =
testActor ! s
//#actor-publisher-usage
val jobManagerSource = Source.actorPublisher[JobManager.Job](JobManager.props)
val ref = Flow[JobManager.Job]
.map(_.payload.toUpperCase)
.map { elem => println(elem); elem }
.to(Sink.ignore)
.runWith(jobManagerSource)
ref ! JobManager.Job("a")
ref ! JobManager.Job("b")
ref ! JobManager.Job("c")
//#actor-publisher-usage
expectMsg("A")
expectMsg("B")
expectMsg("C")
}
}

View file

@ -0,0 +1,89 @@
/**
* Copyright (C) 2014 Typesafe Inc. <http://www.typesafe.com>
*/
package docs.stream
import akka.actor.Actor
import akka.actor.ActorRef
import akka.actor.Props
import akka.routing.ActorRefRoutee
import akka.routing.RoundRobinRoutingLogic
import akka.routing.Router
import akka.stream.ActorMaterializer
import akka.stream.actor.ActorSubscriber
import akka.stream.actor.ActorSubscriberMessage
import akka.stream.actor.MaxInFlightRequestStrategy
import akka.stream.scaladsl.Sink
import akka.stream.scaladsl.Source
import akka.stream.testkit.AkkaSpec
object ActorSubscriberDocSpec {
//#worker-pool
object WorkerPool {
case class Msg(id: Int, replyTo: ActorRef)
case class Work(id: Int)
case class Reply(id: Int)
case class Done(id: Int)
def props: Props = Props(new WorkerPool)
}
class WorkerPool extends ActorSubscriber {
import WorkerPool._
import ActorSubscriberMessage._
val MaxQueueSize = 10
var queue = Map.empty[Int, ActorRef]
val router = {
val routees = Vector.fill(3) {
ActorRefRoutee(context.actorOf(Props[Worker]))
}
Router(RoundRobinRoutingLogic(), routees)
}
override val requestStrategy = new MaxInFlightRequestStrategy(max = MaxQueueSize) {
override def inFlightInternally: Int = queue.size
}
def receive = {
case OnNext(Msg(id, replyTo)) =>
queue += (id -> replyTo)
assert(queue.size <= MaxQueueSize, s"queued too many: ${queue.size}")
router.route(Work(id), self)
case Reply(id) =>
queue(id) ! Done(id)
queue -= id
}
}
class Worker extends Actor {
import WorkerPool._
def receive = {
case Work(id) =>
// ...
sender() ! Reply(id)
}
}
//#worker-pool
}
class ActorSubscriberDocSpec extends AkkaSpec {
import ActorSubscriberDocSpec._
implicit val materializer = ActorMaterializer()
"illustrate usage of ActorSubscriber" in {
val replyTo = testActor
//#actor-subscriber-usage
val N = 117
Source(1 to N).map(WorkerPool.Msg(_, replyTo))
.runWith(Sink.actorSubscriber(WorkerPool.props))
//#actor-subscriber-usage
receiveN(N).toSet should be((1 to N).map(WorkerPool.Done).toSet)
}
}

View file

@ -0,0 +1,176 @@
/**
* Copyright (C) 2015 Typesafe Inc. <http://www.typesafe.com>
*/
package docs.stream
import akka.stream.testkit.AkkaSpec
import akka.stream.scaladsl._
import akka.stream._
import akka.util.ByteString
import java.nio.ByteOrder
import akka.stream.stage._
import scala.annotation.tailrec
import scala.concurrent.duration._
import scala.concurrent.Await
import org.scalactic.ConversionCheckedTripleEquals
object BidiFlowDocSpec {
//#codec
trait Message
case class Ping(id: Int) extends Message
case class Pong(id: Int) extends Message
//#codec-impl
def toBytes(msg: Message): ByteString = {
//#implementation-details-elided
implicit val order = ByteOrder.LITTLE_ENDIAN
msg match {
case Ping(id) => ByteString.newBuilder.putByte(1).putInt(id).result()
case Pong(id) => ByteString.newBuilder.putByte(2).putInt(id).result()
}
//#implementation-details-elided
}
def fromBytes(bytes: ByteString): Message = {
//#implementation-details-elided
implicit val order = ByteOrder.LITTLE_ENDIAN
val it = bytes.iterator
it.getByte match {
case 1 => Ping(it.getInt)
case 2 => Pong(it.getInt)
case other => throw new RuntimeException(s"parse error: expected 1|2 got $other")
}
//#implementation-details-elided
}
//#codec-impl
val codecVerbose = BidiFlow.fromGraph(GraphDSL.create() { b =>
// construct and add the top flow, going outbound
val outbound = b.add(Flow[Message].map(toBytes))
// construct and add the bottom flow, going inbound
val inbound = b.add(Flow[ByteString].map(fromBytes))
// fuse them together into a BidiShape
BidiShape.fromFlows(outbound, inbound)
})
// this is the same as the above
val codec = BidiFlow.fromFunctions(toBytes _, fromBytes _)
//#codec
//#framing
val framing = BidiFlow.fromGraph(GraphDSL.create() { b =>
implicit val order = ByteOrder.LITTLE_ENDIAN
def addLengthHeader(bytes: ByteString) = {
val len = bytes.length
ByteString.newBuilder.putInt(len).append(bytes).result()
}
class FrameParser extends PushPullStage[ByteString, ByteString] {
// this holds the received but not yet parsed bytes
var stash = ByteString.empty
// this holds the current message length or -1 if at a boundary
var needed = -1
override def onPush(bytes: ByteString, ctx: Context[ByteString]) = {
stash ++= bytes
run(ctx)
}
override def onPull(ctx: Context[ByteString]) = run(ctx)
override def onUpstreamFinish(ctx: Context[ByteString]) =
if (stash.isEmpty) ctx.finish()
else ctx.absorbTermination() // we still have bytes to emit
private def run(ctx: Context[ByteString]): SyncDirective =
if (needed == -1) {
// are we at a boundary? then figure out next length
if (stash.length < 4) pullOrFinish(ctx)
else {
needed = stash.iterator.getInt
stash = stash.drop(4)
run(ctx) // cycle back to possibly already emit the next chunk
}
} else if (stash.length < needed) {
// we are in the middle of a message, need more bytes
pullOrFinish(ctx)
} else {
// we have enough to emit at least one message, so do it
val emit = stash.take(needed)
stash = stash.drop(needed)
needed = -1
ctx.push(emit)
}
/*
* After having called absorbTermination() we cannot pull any more, so if we need
* more data we will just have to give up.
*/
private def pullOrFinish(ctx: Context[ByteString]) =
if (ctx.isFinishing) ctx.finish()
else ctx.pull()
}
val outbound = b.add(Flow[ByteString].map(addLengthHeader))
val inbound = b.add(Flow[ByteString].transform(() => new FrameParser))
BidiShape.fromFlows(outbound, inbound)
})
//#framing
val chopUp = BidiFlow.fromGraph(GraphDSL.create() { b =>
val f = Flow[ByteString].mapConcat(_.map(ByteString(_)))
BidiShape.fromFlows(b.add(f), b.add(f))
})
val accumulate = BidiFlow.fromGraph(GraphDSL.create() { b =>
val f = Flow[ByteString].grouped(1000).map(_.fold(ByteString.empty)(_ ++ _))
BidiShape.fromFlows(b.add(f), b.add(f))
})
}
class BidiFlowDocSpec extends AkkaSpec with ConversionCheckedTripleEquals {
import BidiFlowDocSpec._
implicit val materializer = ActorMaterializer()
"A BidiFlow" must {
"compose" in {
//#compose
/* construct protocol stack
* +------------------------------------+
* | stack |
* | |
* | +-------+ +---------+ |
* ~> O~~o | ~> | o~~O ~>
* Message | | codec | ByteString | framing | | ByteString
* <~ O~~o | <~ | o~~O <~
* | +-------+ +---------+ |
* +------------------------------------+
*/
val stack = codec.atop(framing)
// test it by plugging it into its own inverse and closing the right end
val pingpong = Flow[Message].collect { case Ping(id) => Pong(id) }
val flow = stack.atop(stack.reversed).join(pingpong)
val result = Source((0 to 9).map(Ping)).via(flow).grouped(20).runWith(Sink.head)
Await.result(result, 1.second) should ===((0 to 9).map(Pong))
//#compose
}
"work when chopped up" in {
val stack = codec.atop(framing)
val flow = stack.atop(chopUp).atop(stack.reversed).join(Flow[Message].map { case Ping(id) => Pong(id) })
val f = Source((0 to 9).map(Ping)).via(flow).grouped(20).runWith(Sink.head)
Await.result(f, 1.second) should ===((0 to 9).map(Pong))
}
"work when accumulated" in {
val stack = codec.atop(framing)
val flow = stack.atop(accumulate).atop(stack.reversed).join(Flow[Message].map { case Ping(id) => Pong(id) })
val f = Source((0 to 9).map(Ping)).via(flow).grouped(20).runWith(Sink.head)
Await.result(f, 1.second) should ===((0 to 9).map(Pong))
}
}
}

View file

@ -0,0 +1,249 @@
/**
* Copyright (C) 2015 Typesafe Inc. <http://www.typesafe.com>
*/
package docs.stream
import akka.stream._
import akka.stream.scaladsl.Tcp.OutgoingConnection
import akka.stream.scaladsl._
import akka.stream.testkit.AkkaSpec
import akka.util.ByteString
import scala.concurrent.{ Future, Promise }
class CompositionDocSpec extends AkkaSpec {
implicit val ec = system.dispatcher
implicit val materializer = ActorMaterializer()
"nonnested flow" in {
//#non-nested-flow
Source.single(0)
.map(_ + 1)
.filter(_ != 0)
.map(_ - 2)
.to(Sink.fold(0)(_ + _))
// ... where is the nesting?
//#non-nested-flow
}
"nested flow" in {
//#nested-flow
val nestedSource =
Source.single(0) // An atomic source
.map(_ + 1) // an atomic processing stage
.named("nestedSource") // wraps up the current Source and gives it a name
val nestedFlow =
Flow[Int].filter(_ != 0) // an atomic processing stage
.map(_ - 2) // another atomic processing stage
.named("nestedFlow") // wraps up the Flow, and gives it a name
val nestedSink =
nestedFlow.to(Sink.fold(0)(_ + _)) // wire an atomic sink to the nestedFlow
.named("nestedSink") // wrap it up
// Create a RunnableGraph
val runnableGraph = nestedSource.to(nestedSink)
//#nested-flow
}
"reusing components" in {
val nestedSource =
Source.single(0) // An atomic source
.map(_ + 1) // an atomic processing stage
.named("nestedSource") // wraps up the current Source and gives it a name
val nestedFlow =
Flow[Int].filter(_ != 0) // an atomic processing stage
.map(_ - 2) // another atomic processing stage
.named("nestedFlow") // wraps up the Flow, and gives it a name
val nestedSink =
nestedFlow.to(Sink.fold(0)(_ + _)) // wire an atomic sink to the nestedFlow
.named("nestedSink") // wrap it up
//#reuse
// Create a RunnableGraph from our components
val runnableGraph = nestedSource.to(nestedSink)
// Usage is uniform, no matter if modules are composite or atomic
val runnableGraph2 = Source.single(0).to(Sink.fold(0)(_ + _))
//#reuse
}
"complex graph" in {
// format: OFF
//#complex-graph
import GraphDSL.Implicits._
RunnableGraph.fromGraph(GraphDSL.create() { implicit builder =>
val A: Outlet[Int] = builder.add(Source.single(0)).out
val B: UniformFanOutShape[Int, Int] = builder.add(Broadcast[Int](2))
val C: UniformFanInShape[Int, Int] = builder.add(Merge[Int](2))
val D: FlowShape[Int, Int] = builder.add(Flow[Int].map(_ + 1))
val E: UniformFanOutShape[Int, Int] = builder.add(Balance[Int](2))
val F: UniformFanInShape[Int, Int] = builder.add(Merge[Int](2))
val G: Inlet[Any] = builder.add(Sink.foreach(println)).in
C <~ F
A ~> B ~> C ~> F
B ~> D ~> E ~> F
E ~> G
ClosedShape
})
//#complex-graph
//#complex-graph-alt
import GraphDSL.Implicits._
RunnableGraph.fromGraph(GraphDSL.create() { implicit builder =>
val B = builder.add(Broadcast[Int](2))
val C = builder.add(Merge[Int](2))
val E = builder.add(Balance[Int](2))
val F = builder.add(Merge[Int](2))
Source.single(0) ~> B.in; B.out(0) ~> C.in(1); C.out ~> F.in(0)
C.in(0) <~ F.out
B.out(1).map(_ + 1) ~> E.in; E.out(0) ~> F.in(1)
E.out(1) ~> Sink.foreach(println)
ClosedShape
})
//#complex-graph-alt
// format: ON
}
"partial graph" in {
// format: OFF
//#partial-graph
import GraphDSL.Implicits._
val partial = GraphDSL.create() { implicit builder =>
val B = builder.add(Broadcast[Int](2))
val C = builder.add(Merge[Int](2))
val E = builder.add(Balance[Int](2))
val F = builder.add(Merge[Int](2))
C <~ F
B ~> C ~> F
B ~> Flow[Int].map(_ + 1) ~> E ~> F
FlowShape(B.in, E.out(1))
}.named("partial")
//#partial-graph
// format: ON
//#partial-use
Source.single(0).via(partial).to(Sink.ignore)
//#partial-use
// format: OFF
//#partial-flow-dsl
// Convert the partial graph of FlowShape to a Flow to get
// access to the fluid DSL (for example to be able to call .filter())
val flow = Flow.fromGraph(partial)
// Simple way to create a graph backed Source
val source = Source.fromGraph( GraphDSL.create() { implicit builder =>
val merge = builder.add(Merge[Int](2))
Source.single(0) ~> merge
Source(List(2, 3, 4)) ~> merge
// Exposing exactly one output port
SourceShape(merge.out)
})
// Building a Sink with a nested Flow, using the fluid DSL
val sink = {
val nestedFlow = Flow[Int].map(_ * 2).drop(10).named("nestedFlow")
nestedFlow.to(Sink.head)
}
// Putting all together
val closed = source.via(flow.filter(_ > 1)).to(sink)
//#partial-flow-dsl
// format: ON
}
"closed graph" in {
//#embed-closed
val closed1 = Source.single(0).to(Sink.foreach(println))
val closed2 = RunnableGraph.fromGraph(GraphDSL.create() { implicit builder =>
val embeddedClosed: ClosedShape = builder.add(closed1)
//
embeddedClosed
})
//#embed-closed
}
"materialized values" in {
//#mat-combine-1
// Materializes to Promise[Option[Int]] (red)
val source: Source[Int, Promise[Option[Int]]] = Source.maybe[Int]
// Materializes to Unit (black)
val flow1: Flow[Int, Int, Unit] = Flow[Int].take(100)
// Materializes to Promise[Int] (red)
val nestedSource: Source[Int, Promise[Option[Int]]] =
source.viaMat(flow1)(Keep.left).named("nestedSource")
//#mat-combine-1
//#mat-combine-2
// Materializes to Unit (orange)
val flow2: Flow[Int, ByteString, Unit] = Flow[Int].map { i => ByteString(i.toString) }
// Materializes to Future[OutgoingConnection] (yellow)
val flow3: Flow[ByteString, ByteString, Future[OutgoingConnection]] =
Tcp().outgoingConnection("localhost", 8080)
// Materializes to Future[OutgoingConnection] (yellow)
val nestedFlow: Flow[Int, ByteString, Future[OutgoingConnection]] =
flow2.viaMat(flow3)(Keep.right).named("nestedFlow")
//#mat-combine-2
//#mat-combine-3
// Materializes to Future[String] (green)
val sink: Sink[ByteString, Future[String]] = Sink.fold("")(_ + _.utf8String)
// Materializes to (Future[OutgoingConnection], Future[String]) (blue)
val nestedSink: Sink[Int, (Future[OutgoingConnection], Future[String])] =
nestedFlow.toMat(sink)(Keep.both)
//#mat-combine-3
//#mat-combine-4
case class MyClass(private val p: Promise[Option[Int]], conn: OutgoingConnection) {
def close() = p.trySuccess(None)
}
def f(p: Promise[Option[Int]],
rest: (Future[OutgoingConnection], Future[String])): Future[MyClass] = {
val connFuture = rest._1
connFuture.map(MyClass(p, _))
}
// Materializes to Future[MyClass] (purple)
val runnableGraph: RunnableGraph[Future[MyClass]] =
nestedSource.toMat(nestedSink)(f)
//#mat-combine-4
}
"attributes" in {
//#attributes-inheritance
import Attributes._
val nestedSource =
Source.single(0)
.map(_ + 1)
.named("nestedSource") // Wrap, no inputBuffer set
val nestedFlow =
Flow[Int].filter(_ != 0)
.via(Flow[Int].map(_ - 2).withAttributes(inputBuffer(4, 4))) // override
.named("nestedFlow") // Wrap, no inputBuffer set
val nestedSink =
nestedFlow.to(Sink.fold(0)(_ + _)) // wire an atomic sink to the nestedFlow
.withAttributes(name("nestedSink") and inputBuffer(3, 3)) // override
//#attributes-inheritance
}
}

View file

@ -0,0 +1,248 @@
/**
* Copyright (C) 2014 Typesafe Inc. <http://www.typesafe.com>
*/
package docs.stream
import akka.actor.Cancellable
import akka.stream.{ ClosedShape, FlowShape }
import akka.stream.scaladsl._
import akka.stream.testkit.AkkaSpec
import scala.concurrent.{ Promise, Future }
class FlowDocSpec extends AkkaSpec {
implicit val ec = system.dispatcher
//#imports
import akka.stream.ActorMaterializer
//#imports
implicit val materializer = ActorMaterializer()
"source is immutable" in {
//#source-immutable
val source = Source(1 to 10)
source.map(_ => 0) // has no effect on source, since it's immutable
source.runWith(Sink.fold(0)(_ + _)) // 55
val zeroes = source.map(_ => 0) // returns new Source[Int], with `map()` appended
zeroes.runWith(Sink.fold(0)(_ + _)) // 0
//#source-immutable
}
"materialization in steps" in {
//#materialization-in-steps
val source = Source(1 to 10)
val sink = Sink.fold[Int, Int](0)(_ + _)
// connect the Source to the Sink, obtaining a RunnableGraph
val runnable: RunnableGraph[Future[Int]] = source.toMat(sink)(Keep.right)
// materialize the flow and get the value of the FoldSink
val sum: Future[Int] = runnable.run()
//#materialization-in-steps
}
"materialization runWith" in {
//#materialization-runWith
val source = Source(1 to 10)
val sink = Sink.fold[Int, Int](0)(_ + _)
// materialize the flow, getting the Sinks materialized value
val sum: Future[Int] = source.runWith(sink)
//#materialization-runWith
}
"materialization is unique" in {
//#stream-reuse
// connect the Source to the Sink, obtaining a RunnableGraph
val sink = Sink.fold[Int, Int](0)(_ + _)
val runnable: RunnableGraph[Future[Int]] =
Source(1 to 10).toMat(sink)(Keep.right)
// get the materialized value of the FoldSink
val sum1: Future[Int] = runnable.run()
val sum2: Future[Int] = runnable.run()
// sum1 and sum2 are different Futures!
//#stream-reuse
}
"compound source cannot be used as key" in {
// FIXME #16902 This example is now turned around
// The WRONG case has been switched
//#compound-source-is-not-keyed-runWith
import scala.concurrent.duration._
case object Tick
val timer = Source.tick(initialDelay = 1.second, interval = 1.seconds, tick = () => Tick)
val timerCancel: Cancellable = Sink.ignore.runWith(timer)
timerCancel.cancel()
val timerMap = timer.map(tick => "tick")
// materialize the flow and retrieve the timers Cancellable
val timerCancellable = Sink.ignore.runWith(timerMap)
timerCancellable.cancel()
//#compound-source-is-not-keyed-runWith
//#compound-source-is-not-keyed-run
val timerCancellable2 = timerMap.to(Sink.ignore).run()
timerCancellable2.cancel()
//#compound-source-is-not-keyed-run
}
"creating sources, sinks" in {
//#source-sink
// Create a source from an Iterable
Source(List(1, 2, 3))
// Create a source from a Future
Source.fromFuture(Future.successful("Hello Streams!"))
// Create a source from a single element
Source.single("only one element")
// an empty source
Source.empty
// Sink that folds over the stream and returns a Future
// of the final result as its materialized value
Sink.fold[Int, Int](0)(_ + _)
// Sink that returns a Future as its materialized value,
// containing the first element of the stream
Sink.head
// A Sink that consumes a stream without doing anything with the elements
Sink.ignore
// A Sink that executes a side-effecting call for every element of the stream
Sink.foreach[String](println(_))
//#source-sink
}
"various ways of connecting source, sink, flow" in {
//#flow-connecting
// Explicitly creating and wiring up a Source, Sink and Flow
Source(1 to 6).via(Flow[Int].map(_ * 2)).to(Sink.foreach(println(_)))
// Starting from a Source
val source = Source(1 to 6).map(_ * 2)
source.to(Sink.foreach(println(_)))
// Starting from a Sink
val sink: Sink[Int, Unit] = Flow[Int].map(_ * 2).to(Sink.foreach(println(_)))
Source(1 to 6).to(sink)
// Broadcast to a sink inline
val otherSink: Sink[Int, Unit] =
Flow[Int].alsoTo(Sink.foreach(println(_))).to(Sink.ignore)
Source(1 to 6).to(otherSink)
//#flow-connecting
}
"various ways of transforming materialized values" in {
import scala.concurrent.duration._
val throttler = Flow.fromGraph(GraphDSL.create(Source.tick(1.second, 1.second, "test")) { implicit builder =>
tickSource =>
import GraphDSL.Implicits._
val zip = builder.add(ZipWith[String, Int, Int](Keep.right))
tickSource ~> zip.in0
FlowShape(zip.in1, zip.out)
})
//#flow-mat-combine
// An source that can be signalled explicitly from the outside
val source: Source[Int, Promise[Option[Int]]] = Source.maybe[Int]
// A flow that internally throttles elements to 1/second, and returns a Cancellable
// which can be used to shut down the stream
val flow: Flow[Int, Int, Cancellable] = throttler
// A sink that returns the first element of a stream in the returned Future
val sink: Sink[Int, Future[Int]] = Sink.head[Int]
// By default, the materialized value of the leftmost stage is preserved
val r1: RunnableGraph[Promise[Option[Int]]] = source.via(flow).to(sink)
// Simple selection of materialized values by using Keep.right
val r2: RunnableGraph[Cancellable] = source.viaMat(flow)(Keep.right).to(sink)
val r3: RunnableGraph[Future[Int]] = source.via(flow).toMat(sink)(Keep.right)
// Using runWith will always give the materialized values of the stages added
// by runWith() itself
val r4: Future[Int] = source.via(flow).runWith(sink)
val r5: Promise[Option[Int]] = flow.to(sink).runWith(source)
val r6: (Promise[Option[Int]], Future[Int]) = flow.runWith(source, sink)
// Using more complext combinations
val r7: RunnableGraph[(Promise[Option[Int]], Cancellable)] =
source.viaMat(flow)(Keep.both).to(sink)
val r8: RunnableGraph[(Promise[Option[Int]], Future[Int])] =
source.via(flow).toMat(sink)(Keep.both)
val r9: RunnableGraph[((Promise[Option[Int]], Cancellable), Future[Int])] =
source.viaMat(flow)(Keep.both).toMat(sink)(Keep.both)
val r10: RunnableGraph[(Cancellable, Future[Int])] =
source.viaMat(flow)(Keep.right).toMat(sink)(Keep.both)
// It is also possible to map over the materialized values. In r9 we had a
// doubly nested pair, but we want to flatten it out
val r11: RunnableGraph[(Promise[Option[Int]], Cancellable, Future[Int])] =
r9.mapMaterializedValue {
case ((promise, cancellable), future) =>
(promise, cancellable, future)
}
// Now we can use pattern matching to get the resulting materialized values
val (promise, cancellable, future) = r11.run()
// Type inference works as expected
promise.success(None)
cancellable.cancel()
future.map(_ + 3)
// The result of r11 can be also achieved by using the Graph API
val r12: RunnableGraph[(Promise[Option[Int]], Cancellable, Future[Int])] =
RunnableGraph.fromGraph(GraphDSL.create(source, flow, sink)((_, _, _)) { implicit builder =>
(src, f, dst) =>
import GraphDSL.Implicits._
src ~> f ~> dst
ClosedShape
})
//#flow-mat-combine
}
"explicit fusing" in {
//#explicit-fusing
import akka.stream.Fusing
val flow = Flow[Int].map(_ * 2).filter(_ > 500)
val fused = Fusing.aggressive(flow)
Source.fromIterator { () => Iterator from 0 }
.via(fused)
.take(1000)
//#explicit-fusing
}
"defining asynchronous boundaries" in {
//#flow-async
import akka.stream.Attributes.asyncBoundary
Source(List(1, 2, 3))
.map(_ + 1)
.withAttributes(asyncBoundary)
.map(_ * 2)
.to(Sink.ignore)
//#flow-async
}
}

View file

@ -0,0 +1,92 @@
/**
* Copyright (C) 2015 Typesafe Inc. <http://www.typesafe.com>
*/
package docs.stream
import scala.concurrent.Await
import akka.stream.ActorMaterializer
import akka.stream.ActorMaterializerSettings
import akka.stream.Supervision
import akka.stream.scaladsl._
import akka.stream.testkit.AkkaSpec
import akka.stream.Attributes
import akka.stream.ActorAttributes
import scala.concurrent.duration._
class FlowErrorDocSpec extends AkkaSpec {
"demonstrate fail stream" in {
//#stop
implicit val materializer = ActorMaterializer()
val source = Source(0 to 5).map(100 / _)
val result = source.runWith(Sink.fold(0)(_ + _))
// division by zero will fail the stream and the
// result here will be a Future completed with Failure(ArithmeticException)
//#stop
intercept[ArithmeticException] {
Await.result(result, 3.seconds)
}
}
"demonstrate resume stream" in {
//#resume
val decider: Supervision.Decider = {
case _: ArithmeticException => Supervision.Resume
case _ => Supervision.Stop
}
implicit val materializer = ActorMaterializer(
ActorMaterializerSettings(system).withSupervisionStrategy(decider))
val source = Source(0 to 5).map(100 / _)
val result = source.runWith(Sink.fold(0)(_ + _))
// the element causing division by zero will be dropped
// result here will be a Future completed with Success(228)
//#resume
Await.result(result, 3.seconds) should be(228)
}
"demonstrate resume section" in {
//#resume-section
implicit val materializer = ActorMaterializer()
val decider: Supervision.Decider = {
case _: ArithmeticException => Supervision.Resume
case _ => Supervision.Stop
}
val flow = Flow[Int]
.filter(100 / _ < 50).map(elem => 100 / (5 - elem))
.withAttributes(ActorAttributes.supervisionStrategy(decider))
val source = Source(0 to 5).via(flow)
val result = source.runWith(Sink.fold(0)(_ + _))
// the elements causing division by zero will be dropped
// result here will be a Future completed with Success(150)
//#resume-section
Await.result(result, 3.seconds) should be(150)
}
"demonstrate restart section" in {
//#restart-section
implicit val materializer = ActorMaterializer()
val decider: Supervision.Decider = {
case _: IllegalArgumentException => Supervision.Restart
case _ => Supervision.Stop
}
val flow = Flow[Int]
.scan(0) { (acc, elem) =>
if (elem < 0) throw new IllegalArgumentException("negative not allowed")
else acc + elem
}
.withAttributes(ActorAttributes.supervisionStrategy(decider))
val source = Source(List(1, 3, -1, 5, 7)).via(flow)
val result = source.grouped(1000).runWith(Sink.head)
// the negative element cause the scan stage to be restarted,
// i.e. start from 0 again
// result here will be a Future completed with Success(Vector(0, 1, 4, 0, 5, 12))
//#restart-section
Await.result(result, 3.seconds) should be(Vector(0, 1, 4, 0, 5, 12))
}
}

View file

@ -0,0 +1,233 @@
/**
* Copyright (C) 2014 Typesafe Inc. <http://www.typesafe.com>
*/
package docs.stream
import akka.stream._
import akka.stream.scaladsl._
import akka.stream.testkit.AkkaSpec
import scala.collection.immutable
import scala.concurrent.{ Future, Await }
import scala.concurrent.duration._
import akka.stream.Attributes
class FlowGraphDocSpec extends AkkaSpec {
implicit val ec = system.dispatcher
implicit val materializer = ActorMaterializer()
"build simple graph" in {
//format: OFF
//#simple-flow-graph
val g = RunnableGraph.fromGraph(GraphDSL.create() { implicit builder: GraphDSL.Builder[Unit] =>
import GraphDSL.Implicits._
val in = Source(1 to 10)
val out = Sink.ignore
val bcast = builder.add(Broadcast[Int](2))
val merge = builder.add(Merge[Int](2))
val f1, f2, f3, f4 = Flow[Int].map(_ + 10)
in ~> f1 ~> bcast ~> f2 ~> merge ~> f3 ~> out
bcast ~> f4 ~> merge
ClosedShape
})
//#simple-flow-graph
//format: ON
//#simple-graph-run
g.run()
//#simple-graph-run
}
"flow connection errors" in {
intercept[IllegalArgumentException] {
//#simple-graph
RunnableGraph.fromGraph(GraphDSL.create() { implicit builder =>
import GraphDSL.Implicits._
val source1 = Source(1 to 10)
val source2 = Source(1 to 10)
val zip = builder.add(Zip[Int, Int]())
source1 ~> zip.in0
source2 ~> zip.in1
// unconnected zip.out (!) => "must have at least 1 outgoing edge"
ClosedShape
})
//#simple-graph
}.getMessage should include("ZipWith2.out")
}
"reusing a flow in a graph" in {
//#flow-graph-reusing-a-flow
val topHeadSink = Sink.head[Int]
val bottomHeadSink = Sink.head[Int]
val sharedDoubler = Flow[Int].map(_ * 2)
//#flow-graph-reusing-a-flow
// format: OFF
val g =
//#flow-graph-reusing-a-flow
RunnableGraph.fromGraph(GraphDSL.create(topHeadSink, bottomHeadSink)((_, _)) { implicit builder =>
(topHS, bottomHS) =>
import GraphDSL.Implicits._
val broadcast = builder.add(Broadcast[Int](2))
Source.single(1) ~> broadcast.in
broadcast.out(0) ~> sharedDoubler ~> topHS.in
broadcast.out(1) ~> sharedDoubler ~> bottomHS.in
ClosedShape
})
//#flow-graph-reusing-a-flow
// format: ON
val (topFuture, bottomFuture) = g.run()
Await.result(topFuture, 300.millis) shouldEqual 2
Await.result(bottomFuture, 300.millis) shouldEqual 2
}
"building a reusable component" in {
//#flow-graph-components-shape
// A shape represents the input and output ports of a reusable
// processing module
case class PriorityWorkerPoolShape[In, Out](
jobsIn: Inlet[In],
priorityJobsIn: Inlet[In],
resultsOut: Outlet[Out]) extends Shape {
// It is important to provide the list of all input and output
// ports with a stable order. Duplicates are not allowed.
override val inlets: immutable.Seq[Inlet[_]] =
jobsIn :: priorityJobsIn :: Nil
override val outlets: immutable.Seq[Outlet[_]] =
resultsOut :: Nil
// A Shape must be able to create a copy of itself. Basically
// it means a new instance with copies of the ports
override def deepCopy() = PriorityWorkerPoolShape(
jobsIn.carbonCopy(),
priorityJobsIn.carbonCopy(),
resultsOut.carbonCopy())
// A Shape must also be able to create itself from existing ports
override def copyFromPorts(
inlets: immutable.Seq[Inlet[_]],
outlets: immutable.Seq[Outlet[_]]) = {
assert(inlets.size == this.inlets.size)
assert(outlets.size == this.outlets.size)
// This is why order matters when overriding inlets and outlets.
PriorityWorkerPoolShape[In, Out](inlets(0).as[In], inlets(1).as[In], outlets(0).as[Out])
}
}
//#flow-graph-components-shape
//#flow-graph-components-create
object PriorityWorkerPool {
def apply[In, Out](
worker: Flow[In, Out, Any],
workerCount: Int): Graph[PriorityWorkerPoolShape[In, Out], Unit] = {
GraphDSL.create() { implicit b
import GraphDSL.Implicits._
val priorityMerge = b.add(MergePreferred[In](1))
val balance = b.add(Balance[In](workerCount))
val resultsMerge = b.add(Merge[Out](workerCount))
// After merging priority and ordinary jobs, we feed them to the balancer
priorityMerge ~> balance
// Wire up each of the outputs of the balancer to a worker flow
// then merge them back
for (i <- 0 until workerCount)
balance.out(i) ~> worker ~> resultsMerge.in(i)
// We now expose the input ports of the priorityMerge and the output
// of the resultsMerge as our PriorityWorkerPool ports
// -- all neatly wrapped in our domain specific Shape
PriorityWorkerPoolShape(
jobsIn = priorityMerge.in(0),
priorityJobsIn = priorityMerge.preferred,
resultsOut = resultsMerge.out)
}
}
}
//#flow-graph-components-create
def println(s: Any): Unit = ()
//#flow-graph-components-use
val worker1 = Flow[String].map("step 1 " + _)
val worker2 = Flow[String].map("step 2 " + _)
RunnableGraph.fromGraph(GraphDSL.create() { implicit b =>
import GraphDSL.Implicits._
val priorityPool1 = b.add(PriorityWorkerPool(worker1, 4))
val priorityPool2 = b.add(PriorityWorkerPool(worker2, 2))
Source(1 to 100).map("job: " + _) ~> priorityPool1.jobsIn
Source(1 to 100).map("priority job: " + _) ~> priorityPool1.priorityJobsIn
priorityPool1.resultsOut ~> priorityPool2.jobsIn
Source(1 to 100).map("one-step, priority " + _) ~> priorityPool2.priorityJobsIn
priorityPool2.resultsOut ~> Sink.foreach(println)
ClosedShape
}).run()
//#flow-graph-components-use
//#flow-graph-components-shape2
import FanInShape.Name
import FanInShape.Init
class PriorityWorkerPoolShape2[In, Out](_init: Init[Out] = Name("PriorityWorkerPool"))
extends FanInShape[Out](_init) {
protected override def construct(i: Init[Out]) = new PriorityWorkerPoolShape2(i)
val jobsIn = newInlet[In]("jobsIn")
val priorityJobsIn = newInlet[In]("priorityJobsIn")
// Outlet[Out] with name "out" is automatically created
}
//#flow-graph-components-shape2
}
"access to materialized value" in {
//#flow-graph-matvalue
import GraphDSL.Implicits._
val foldFlow: Flow[Int, Int, Future[Int]] = Flow.fromGraph(GraphDSL.create(Sink.fold[Int, Int](0)(_ + _)) {
implicit builder
fold
FlowShape(fold.in, builder.materializedValue.mapAsync(4)(identity).outlet)
})
//#flow-graph-matvalue
Await.result(Source(1 to 10).via(foldFlow).runWith(Sink.head), 3.seconds) should ===(55)
//#flow-graph-matvalue-cycle
import GraphDSL.Implicits._
// This cannot produce any value:
val cyclicFold: Source[Int, Future[Int]] = Source.fromGraph(GraphDSL.create(Sink.fold[Int, Int](0)(_ + _)) {
implicit builder =>
fold =>
// - Fold cannot complete until its upstream mapAsync completes
// - mapAsync cannot complete until the materialized Future produced by
// fold completes
// As a result this Source will never emit anything, and its materialited
// Future will never complete
builder.materializedValue.mapAsync(4)(identity) ~> fold
SourceShape(builder.materializedValue.mapAsync(4)(identity).outlet)
})
//#flow-graph-matvalue-cycle
}
}

View file

@ -0,0 +1,110 @@
package docs.stream
import akka.stream.FlowShape
import akka.stream.scaladsl.{ GraphDSL, Merge, Balance, Source, Flow }
import akka.stream.testkit.AkkaSpec
class FlowParallelismDocSpec extends AkkaSpec {
import GraphDSL.Implicits._
case class ScoopOfBatter()
case class HalfCookedPancake()
case class Pancake()
//format: OFF
//#pipelining
// Takes a scoop of batter and creates a pancake with one side cooked
val fryingPan1: Flow[ScoopOfBatter, HalfCookedPancake, Unit] =
Flow[ScoopOfBatter].map { batter => HalfCookedPancake() }
// Finishes a half-cooked pancake
val fryingPan2: Flow[HalfCookedPancake, Pancake, Unit] =
Flow[HalfCookedPancake].map { halfCooked => Pancake() }
//#pipelining
//format: ON
"Demonstrate pipelining" in {
//#pipelining
// With the two frying pans we can fully cook pancakes
val pancakeChef: Flow[ScoopOfBatter, Pancake, Unit] =
Flow[ScoopOfBatter].via(fryingPan1).via(fryingPan2)
//#pipelining
}
"Demonstrate parallel processing" in {
//#parallelism
val fryingPan: Flow[ScoopOfBatter, Pancake, Unit] =
Flow[ScoopOfBatter].map { batter => Pancake() }
val pancakeChef: Flow[ScoopOfBatter, Pancake, Unit] = Flow.fromGraph(GraphDSL.create() { implicit builder =>
val dispatchBatter = builder.add(Balance[ScoopOfBatter](2))
val mergePancakes = builder.add(Merge[Pancake](2))
// Using two frying pans in parallel, both fully cooking a pancake from the batter.
// We always put the next scoop of batter to the first frying pan that becomes available.
dispatchBatter.out(0) ~> fryingPan ~> mergePancakes.in(0)
// Notice that we used the "fryingPan" flow without importing it via builder.add().
// Flows used this way are auto-imported, which in this case means that the two
// uses of "fryingPan" mean actually different stages in the graph.
dispatchBatter.out(1) ~> fryingPan ~> mergePancakes.in(1)
FlowShape(dispatchBatter.in, mergePancakes.out)
})
//#parallelism
}
"Demonstrate parallelized pipelines" in {
//#parallel-pipeline
val pancakeChef: Flow[ScoopOfBatter, Pancake, Unit] =
Flow.fromGraph(GraphDSL.create() { implicit builder =>
val dispatchBatter = builder.add(Balance[ScoopOfBatter](2))
val mergePancakes = builder.add(Merge[Pancake](2))
// Using two pipelines, having two frying pans each, in total using
// four frying pans
dispatchBatter.out(0) ~> fryingPan1 ~> fryingPan2 ~> mergePancakes.in(0)
dispatchBatter.out(1) ~> fryingPan1 ~> fryingPan2 ~> mergePancakes.in(1)
FlowShape(dispatchBatter.in, mergePancakes.out)
})
//#parallel-pipeline
}
"Demonstrate pipelined parallel processing" in {
//#pipelined-parallel
val pancakeChefs1: Flow[ScoopOfBatter, HalfCookedPancake, Unit] =
Flow.fromGraph(GraphDSL.create() { implicit builder =>
val dispatchBatter = builder.add(Balance[ScoopOfBatter](2))
val mergeHalfPancakes = builder.add(Merge[HalfCookedPancake](2))
// Two chefs work with one frying pan for each, half-frying the pancakes then putting
// them into a common pool
dispatchBatter.out(0) ~> fryingPan1 ~> mergeHalfPancakes.in(0)
dispatchBatter.out(1) ~> fryingPan1 ~> mergeHalfPancakes.in(1)
FlowShape(dispatchBatter.in, mergeHalfPancakes.out)
})
val pancakeChefs2: Flow[HalfCookedPancake, Pancake, Unit] =
Flow.fromGraph(GraphDSL.create() { implicit builder =>
val dispatchHalfPancakes = builder.add(Balance[HalfCookedPancake](2))
val mergePancakes = builder.add(Merge[Pancake](2))
// Two chefs work with one frying pan for each, finishing the pancakes then putting
// them into a common pool
dispatchHalfPancakes.out(0) ~> fryingPan2 ~> mergePancakes.in(0)
dispatchHalfPancakes.out(1) ~> fryingPan2 ~> mergePancakes.in(1)
FlowShape(dispatchHalfPancakes.in, mergePancakes.out)
})
val kitchen: Flow[ScoopOfBatter, Pancake, Unit] = pancakeChefs1.via(pancakeChefs2)
//#pipelined-parallel
}
}

View file

@ -0,0 +1,192 @@
package docs.stream
import akka.stream._
import akka.stream.scaladsl.{ Sink, Source, Flow, Keep }
import akka.stream.testkit.AkkaSpec
import org.scalatest.concurrent.ScalaFutures
import scala.collection.immutable
import scala.concurrent.Await
import scala.concurrent.duration._
class FlowStagesSpec extends AkkaSpec with ScalaFutures {
//#import-stage
import akka.stream.stage._
//#import-stage
implicit val materializer = ActorMaterializer()
"stages demo" must {
"demonstrate various PushPullStages" in {
//#one-to-one
class Map[A, B](f: A => B) extends PushPullStage[A, B] {
override def onPush(elem: A, ctx: Context[B]): SyncDirective =
ctx.push(f(elem))
override def onPull(ctx: Context[B]): SyncDirective =
ctx.pull()
}
//#one-to-one
//#many-to-one
class Filter[A](p: A => Boolean) extends PushPullStage[A, A] {
override def onPush(elem: A, ctx: Context[A]): SyncDirective =
if (p(elem)) ctx.push(elem)
else ctx.pull()
override def onPull(ctx: Context[A]): SyncDirective =
ctx.pull()
}
//#many-to-one
//#one-to-many
class Duplicator[A]() extends PushPullStage[A, A] {
private var lastElem: A = _
private var oneLeft = false
override def onPush(elem: A, ctx: Context[A]): SyncDirective = {
lastElem = elem
oneLeft = true
ctx.push(elem)
}
override def onPull(ctx: Context[A]): SyncDirective =
if (!ctx.isFinishing) {
// the main pulling logic is below as it is demonstrated on the illustration
if (oneLeft) {
oneLeft = false
ctx.push(lastElem)
} else
ctx.pull()
} else {
// If we need to emit a final element after the upstream
// finished
if (oneLeft) ctx.pushAndFinish(lastElem)
else ctx.finish()
}
override def onUpstreamFinish(ctx: Context[A]): TerminationDirective =
ctx.absorbTermination()
}
//#one-to-many
val keyedSink = Sink.head[immutable.Seq[Int]]
val sink = Flow[Int].grouped(10).toMat(keyedSink)(Keep.right)
//#stage-chain
val resultFuture = Source(1 to 10)
.transform(() => new Filter(_ % 2 == 0))
.transform(() => new Duplicator())
.transform(() => new Map(_ / 2))
.runWith(sink)
//#stage-chain
Await.result(resultFuture, 3.seconds) should be(Seq(1, 1, 2, 2, 3, 3, 4, 4, 5, 5))
}
"demonstrate various PushStages" in {
import akka.stream.stage._
//#pushstage
class Map[A, B](f: A => B) extends PushStage[A, B] {
override def onPush(elem: A, ctx: Context[B]): SyncDirective =
ctx.push(f(elem))
}
class Filter[A](p: A => Boolean) extends PushStage[A, A] {
override def onPush(elem: A, ctx: Context[A]): SyncDirective =
if (p(elem)) ctx.push(elem)
else ctx.pull()
}
//#pushstage
}
"demonstrate GraphStage" in {
//#doubler-stateful
class Duplicator[A] extends GraphStage[FlowShape[A, A]] {
val in = Inlet[A]("Duplicator.in")
val out = Outlet[A]("Duplicator.out")
val shape: FlowShape[A, A] = FlowShape(in, out)
override def createLogic(inheritedAttributes: Attributes): GraphStageLogic =
new GraphStageLogic(shape) {
setHandler(in, new InHandler {
override def onPush(): Unit = {
val elem = grab(in)
emitMultiple(out, List(elem, elem))
}
})
setHandler(out, new OutHandler {
override def onPull(): Unit = pull(in)
})
}
}
//#doubler-stateful
val duplicator = Flow.fromGraph(new Duplicator[Int])
val fold = Source(1 to 2).via(duplicator).runFold("")(_ + _)
whenReady(fold) { s
s should be("1122")
}
}
"demonstrate DetachedStage" in {
//#detached
class Buffer2[T]() extends DetachedStage[T, T] {
private var buf = Vector.empty[T]
private var capacity = 2
private def isFull = capacity == 0
private def isEmpty = capacity == 2
private def dequeue(): T = {
capacity += 1
val next = buf.head
buf = buf.tail
next
}
private def enqueue(elem: T) = {
capacity -= 1
buf = buf :+ elem
}
override def onPull(ctx: DetachedContext[T]): DownstreamDirective = {
if (isEmpty) {
if (ctx.isFinishing) ctx.finish() // No more elements will arrive
else ctx.holdDownstream() // waiting until new elements
} else {
val next = dequeue()
if (ctx.isHoldingUpstream) ctx.pushAndPull(next) // release upstream
else ctx.push(next)
}
}
override def onPush(elem: T, ctx: DetachedContext[T]): UpstreamDirective = {
enqueue(elem)
if (isFull) ctx.holdUpstream() // Queue is now full, wait until new empty slot
else {
if (ctx.isHoldingDownstream) ctx.pushAndPull(dequeue()) // Release downstream
else ctx.pull()
}
}
override def onUpstreamFinish(ctx: DetachedContext[T]): TerminationDirective = {
if (!isEmpty) ctx.absorbTermination() // still need to flush from buffer
else ctx.finish() // already empty, finishing
}
}
//#detached
}
}
}

View file

@ -0,0 +1,110 @@
package docs.stream
import akka.stream.{ ClosedShape, OverflowStrategy, ActorMaterializer }
import akka.stream.scaladsl._
import akka.stream.testkit.AkkaSpec
class GraphCyclesSpec extends AkkaSpec {
implicit val materializer = ActorMaterializer()
"Cycle demonstration" must {
val source = Source.fromIterator(() => Iterator.from(0))
"include a deadlocked cycle" in {
// format: OFF
//#deadlocked
// WARNING! The graph below deadlocks!
RunnableGraph.fromGraph(GraphDSL.create() { implicit b =>
import GraphDSL.Implicits._
val merge = b.add(Merge[Int](2))
val bcast = b.add(Broadcast[Int](2))
source ~> merge ~> Flow[Int].map { s => println(s); s } ~> bcast ~> Sink.ignore
merge <~ bcast
ClosedShape
})
//#deadlocked
// format: ON
}
"include an unfair cycle" in {
// format: OFF
//#unfair
// WARNING! The graph below stops consuming from "source" after a few steps
RunnableGraph.fromGraph(GraphDSL.create() { implicit b =>
import GraphDSL.Implicits._
val merge = b.add(MergePreferred[Int](1))
val bcast = b.add(Broadcast[Int](2))
source ~> merge ~> Flow[Int].map { s => println(s); s } ~> bcast ~> Sink.ignore
merge.preferred <~ bcast
ClosedShape
})
//#unfair
// format: ON
}
"include a dropping cycle" in {
// format: OFF
//#dropping
RunnableGraph.fromGraph(GraphDSL.create() { implicit b =>
import GraphDSL.Implicits._
val merge = b.add(Merge[Int](2))
val bcast = b.add(Broadcast[Int](2))
source ~> merge ~> Flow[Int].map { s => println(s); s } ~> bcast ~> Sink.ignore
merge <~ Flow[Int].buffer(10, OverflowStrategy.dropHead) <~ bcast
ClosedShape
})
//#dropping
// format: ON
}
"include a dead zipping cycle" in {
// format: OFF
//#zipping-dead
// WARNING! The graph below never processes any elements
RunnableGraph.fromGraph(GraphDSL.create() { implicit b =>
import GraphDSL.Implicits._
val zip = b.add(ZipWith[Int, Int, Int]((left, right) => right))
val bcast = b.add(Broadcast[Int](2))
source ~> zip.in0
zip.out.map { s => println(s); s } ~> bcast ~> Sink.ignore
zip.in1 <~ bcast
ClosedShape
})
//#zipping-dead
// format: ON
}
"include a live zipping cycle" in {
// format: OFF
//#zipping-live
RunnableGraph.fromGraph(GraphDSL.create() { implicit b =>
import GraphDSL.Implicits._
val zip = b.add(ZipWith((left: Int, right: Int) => left))
val bcast = b.add(Broadcast[Int](2))
val concat = b.add(Concat[Int]())
val start = Source.single(0)
source ~> zip.in0
zip.out.map { s => println(s); s } ~> bcast ~> Sink.ignore
zip.in1 <~ concat <~ start
concat <~ bcast
ClosedShape
})
//#zipping-live
// format: ON
}
}
}

View file

@ -0,0 +1,508 @@
/**
* Copyright (C) 2015 Typesafe Inc. <http://www.typesafe.com>
*/
package docs.stream
import akka.stream.scaladsl.{ Keep, Sink, Flow, Source }
import akka.stream.stage._
import akka.stream._
import akka.stream.testkit.{ TestPublisher, TestSubscriber, AkkaSpec }
import scala.collection.mutable
import scala.concurrent.{ Promise, Await, Future }
import scala.concurrent.duration._
import scala.collection.immutable.Iterable
class GraphStageDocSpec extends AkkaSpec {
implicit val materializer = ActorMaterializer()
"Demonstrate creation of GraphStage boilerplate" in {
//#boilerplate-example
import akka.stream.SourceShape
import akka.stream.stage.GraphStage
class NumbersSource extends GraphStage[SourceShape[Int]] {
// Define the (sole) output port of this stage
val out: Outlet[Int] = Outlet("NumbersSource")
// Define the shape of this stage, which is SourceShape with the port we defined above
override val shape: SourceShape[Int] = SourceShape(out)
// This is where the actual (possibly stateful) logic will live
override def createLogic(inheritedAttributes: Attributes): GraphStageLogic = ???
}
//#boilerplate-example
}
"Demonstrate creation of GraphStage Source" in {
//#custom-source-example
import akka.stream.SourceShape
import akka.stream.Graph
import akka.stream.stage.GraphStage
import akka.stream.stage.OutHandler
class NumbersSource extends GraphStage[SourceShape[Int]] {
val out: Outlet[Int] = Outlet("NumbersSource")
override val shape: SourceShape[Int] = SourceShape(out)
override def createLogic(inheritedAttributes: Attributes): GraphStageLogic =
new GraphStageLogic(shape) {
// All state MUST be inside the GraphStageLogic,
// never inside the enclosing GraphStage.
// This state is safe to access and modify from all the
// callbacks that are provided by GraphStageLogic and the
// registered handlers.
private var counter = 1
setHandler(out, new OutHandler {
override def onPull(): Unit = {
push(out, counter)
counter += 1
}
})
}
}
//#custom-source-example
//#simple-source-usage
// A GraphStage is a proper Graph, just like what GraphDSL.create would return
val sourceGraph: Graph[SourceShape[Int], Unit] = new NumbersSource
// Create a Source from the Graph to access the DSL
val mySource: Source[Int, Unit] = Source.fromGraph(new NumbersSource)
// Returns 55
val result1: Future[Int] = mySource.take(10).runFold(0)(_ + _)
// The source is reusable. This returns 5050
val result2: Future[Int] = mySource.take(100).runFold(0)(_ + _)
//#simple-source-usage
Await.result(result1, 3.seconds) should ===(55)
Await.result(result2, 3.seconds) should ===(5050)
}
//#one-to-one
class Map[A, B](f: A => B) extends GraphStage[FlowShape[A, B]] {
val in = Inlet[A]("Map.in")
val out = Outlet[B]("Map.out")
override val shape = FlowShape.of(in, out)
override def createLogic(attr: Attributes): GraphStageLogic =
new GraphStageLogic(shape) {
setHandler(in, new InHandler {
override def onPush(): Unit = {
push(out, f(grab(in)))
}
})
setHandler(out, new OutHandler {
override def onPull(): Unit = {
pull(in)
}
})
}
}
//#one-to-one
"Demonstrate a one to one element GraphStage" in {
// tests:
val stringLength = Flow.fromGraph(new Map[String, Int](_.length))
val result =
Source(Vector("one", "two", "three"))
.via(stringLength)
.runFold(Seq.empty[Int])((elem, acc) => elem :+ acc)
Await.result(result, 3.seconds) should ===(Seq(3, 3, 5))
}
//#many-to-one
class Filter[A](p: A => Boolean) extends GraphStage[FlowShape[A, A]] {
val in = Inlet[A]("Filter.in")
val out = Outlet[A]("Filter.out")
val shape = FlowShape.of(in, out)
override def createLogic(inheritedAttributes: Attributes): GraphStageLogic =
new GraphStageLogic(shape) {
setHandler(in, new InHandler {
override def onPush(): Unit = {
val elem = grab(in)
if (p(elem)) push(out, elem)
else pull(in)
}
})
setHandler(out, new OutHandler {
override def onPull(): Unit = {
pull(in)
}
})
}
}
//#many-to-one
"Demonstrate a many to one element GraphStage" in {
// tests:
val evenFilter = Flow.fromGraph(new Filter[Int](_ % 2 == 0))
val result =
Source(Vector(1, 2, 3, 4, 5, 6))
.via(evenFilter)
.runFold(Seq.empty[Int])((elem, acc) => elem :+ acc)
Await.result(result, 3.seconds) should ===(Seq(2, 4, 6))
}
//#one-to-many
class Duplicator[A] extends GraphStage[FlowShape[A, A]] {
val in = Inlet[A]("Duplicator.in")
val out = Outlet[A]("Duplicator.out")
val shape = FlowShape.of(in, out)
override def createLogic(inheritedAttributes: Attributes): GraphStageLogic =
new GraphStageLogic(shape) {
// Again: note that all mutable state
// MUST be inside the GraphStageLogic
var lastElem: Option[A] = None
setHandler(in, new InHandler {
override def onPush(): Unit = {
val elem = grab(in)
lastElem = Some(elem)
push(out, elem)
}
override def onUpstreamFinish(): Unit = {
if (lastElem.isDefined) emit(out, lastElem.get)
complete(out)
}
})
setHandler(out, new OutHandler {
override def onPull(): Unit = {
if (lastElem.isDefined) {
push(out, lastElem.get)
lastElem = None
} else {
pull(in)
}
}
})
}
}
//#one-to-many
"Demonstrate a one to many element GraphStage" in {
// tests:
val duplicator = Flow.fromGraph(new Duplicator[Int])
val result =
Source(Vector(1, 2, 3))
.via(duplicator)
.runFold(Seq.empty[Int])((elem, acc) => elem :+ acc)
Await.result(result, 3.seconds) should ===(Seq(1, 1, 2, 2, 3, 3))
}
"Demonstrate a simpler one to many stage" in {
//#simpler-one-to-many
class Duplicator[A] extends GraphStage[FlowShape[A, A]] {
val in = Inlet[A]("Duplicator.in")
val out = Outlet[A]("Duplicator.out")
val shape = FlowShape.of(in, out)
override def createLogic(inheritedAttributes: Attributes): GraphStageLogic =
new GraphStageLogic(shape) {
setHandler(in, new InHandler {
override def onPush(): Unit = {
val elem = grab(in)
// this will temporarily suspend this handler until the two elems
// are emitted and then reinstates it
emitMultiple(out, Iterable(elem, elem))
}
})
setHandler(out, new OutHandler {
override def onPull(): Unit = {
pull(in)
}
})
}
}
//#simpler-one-to-many
// tests:
val duplicator = Flow.fromGraph(new Duplicator[Int])
val result =
Source(Vector(1, 2, 3))
.via(duplicator)
.runFold(Seq.empty[Int])((elem, acc) => elem :+ acc)
Await.result(result, 3.seconds) should ===(Seq(1, 1, 2, 2, 3, 3))
}
"Demonstrate chaining of graph stages" in {
val sink = Sink.fold[List[Int], Int](List.empty[Int])((acc, n) => acc :+ n)
//#graph-stage-chain
val resultFuture = Source(1 to 5)
.via(new Filter(_ % 2 == 0))
.via(new Duplicator())
.via(new Map(_ / 2))
.runWith(sink)
//#graph-stage-chain
Await.result(resultFuture, 3.seconds) should ===(List(1, 1, 2, 2))
}
"Demonstrate an asynchronous side channel" in {
import system.dispatcher
//#async-side-channel
// will close upstream when the future completes
class KillSwitch[A](switch: Future[Unit]) extends GraphStage[FlowShape[A, A]] {
val in = Inlet[A]("KillSwitch.in")
val out = Outlet[A]("KillSwitch.out")
val shape = FlowShape.of(in, out)
override def createLogic(inheritedAttributes: Attributes): GraphStageLogic =
new GraphStageLogic(shape) {
override def preStart(): Unit = {
val callback = getAsyncCallback[Unit] { (_) =>
completeStage()
}
switch.foreach(callback.invoke)
}
setHandler(in, new InHandler {
override def onPush(): Unit = { push(out, grab(in)) }
})
setHandler(out, new OutHandler {
override def onPull(): Unit = { pull(in) }
})
}
}
//#async-side-channel
// tests:
val switch = Promise[Unit]()
val duplicator = Flow.fromGraph(new KillSwitch[Int](switch.future))
// TODO this is probably racey, is there a way to make sure it happens after?
val valueAfterKill = switch.future.flatMap(_ => Future(4))
val result =
Source(Vector(1, 2, 3)).concat(Source.fromFuture(valueAfterKill))
.via(duplicator)
.runFold(Seq.empty[Int])((elem, acc) => elem :+ acc)
switch.success(Unit)
Await.result(result, 3.seconds) should ===(Seq(1, 2, 3))
}
"Demonstrate a graph stage with a timer" in {
//#timed
// each time an event is pushed through it will trigger a period of silence
class TimedGate[A](silencePeriod: FiniteDuration) extends GraphStage[FlowShape[A, A]] {
val in = Inlet[A]("TimedGate.in")
val out = Outlet[A]("TimedGate.out")
val shape = FlowShape.of(in, out)
override def createLogic(inheritedAttributes: Attributes): GraphStageLogic =
new TimerGraphStageLogic(shape) {
var open = false
setHandler(in, new InHandler {
override def onPush(): Unit = {
val elem = grab(in)
if (open) pull(in)
else {
push(out, elem)
open = true
scheduleOnce(None, silencePeriod)
}
}
})
setHandler(out, new OutHandler {
override def onPull(): Unit = { pull(in) }
})
override protected def onTimer(timerKey: Any): Unit = {
open = false
}
}
}
//#timed
// tests:
val result =
Source(Vector(1, 2, 3))
.via(new TimedGate[Int](2.second))
.takeWithin(250.millis)
.runFold(Seq.empty[Int])((elem, acc) => elem :+ acc)
Await.result(result, 3.seconds) should ===(Seq(1))
}
"Demonstrate a custom materialized value" in {
//#materialized
class FirstValue[A] extends GraphStageWithMaterializedValue[FlowShape[A, A], Future[A]] {
val in = Inlet[A]("FirstValue.in")
val out = Outlet[A]("FirstValue.out")
val shape = FlowShape.of(in, out)
override def createLogicAndMaterializedValue(inheritedAttributes: Attributes): (GraphStageLogic, Future[A]) = {
val promise = Promise[A]()
val logic = new GraphStageLogic(shape) {
setHandler(in, new InHandler {
override def onPush(): Unit = {
val elem = grab(in)
promise.success(elem)
push(out, elem)
// replace handler with one just forwarding
setHandler(in, new InHandler {
override def onPush(): Unit = {
push(out, grab(in))
}
})
}
})
setHandler(out, new OutHandler {
override def onPull(): Unit = {
pull(in)
}
})
}
(logic, promise.future)
}
}
//#materialized
// tests:
val flow = Source(Vector(1, 2, 3))
.viaMat(new FirstValue)(Keep.right)
.to(Sink.ignore)
val result: Future[Int] = flow.run()
Await.result(result, 3.seconds) should ===(1)
}
"Demonstrate a detached graph stage" in {
//#detached
class TwoBuffer[A] extends GraphStage[FlowShape[A, A]] {
val in = Inlet[A]("TwoBuffer.in")
val out = Outlet[A]("TwoBuffer.out")
val shape = FlowShape.of(in, out)
override def createLogic(inheritedAttributes: Attributes): GraphStageLogic =
new GraphStageLogic(shape) {
val buffer = mutable.Queue[A]()
def bufferFull = buffer.size == 2
var downstreamWaiting = false
override def preStart(): Unit = {
// a detached stage needs to start upstream demand
// itself as it is not triggered by downstream demand
pull(in)
}
setHandler(in, new InHandler {
override def onPush(): Unit = {
val elem = grab(in)
buffer.enqueue(elem)
if (downstreamWaiting) {
downstreamWaiting = false
val bufferedElem = buffer.dequeue()
push(out, bufferedElem)
}
if (!bufferFull) {
pull(in)
}
}
override def onUpstreamFinish(): Unit = {
if (buffer.nonEmpty) {
// emit the rest if possible
emitMultiple(out, buffer.toIterator)
}
completeStage()
}
})
setHandler(out, new OutHandler {
override def onPull(): Unit = {
if (buffer.isEmpty) {
downstreamWaiting = true
} else {
val elem = buffer.dequeue
push(out, elem)
}
if (!bufferFull && !hasBeenPulled(in)) {
pull(in)
}
}
})
}
}
//#detached
// tests:
val result1 = Source(Vector(1, 2, 3))
.via(new TwoBuffer)
.runFold(Vector.empty[Int])((acc, n) => acc :+ n)
Await.result(result1, 3.seconds) should ===(Vector(1, 2, 3))
val subscriber = TestSubscriber.manualProbe[Int]()
val publisher = TestPublisher.probe[Int]()
val flow2 =
Source.fromPublisher(publisher)
.via(new TwoBuffer)
.to(Sink.fromSubscriber(subscriber))
val result2 = flow2.run()
val sub = subscriber.expectSubscription()
// this happens even though the subscriber has not signalled any demand
publisher.sendNext(1)
publisher.sendNext(2)
sub.cancel()
}
}

View file

@ -0,0 +1,379 @@
/**
* Copyright (C) 2014 Typesafe Inc. <http://www.typesafe.com>
*/
package docs.stream
import scala.concurrent.duration._
import akka.stream.testkit.AkkaSpec
import akka.stream.scaladsl._
import akka.stream.ActorMaterializer
import scala.concurrent.Future
import akka.testkit.TestProbe
import akka.actor.ActorRef
import com.typesafe.config.ConfigFactory
import akka.actor.Actor
import akka.actor.Props
import akka.pattern.ask
import akka.util.Timeout
import akka.stream.Attributes
import akka.stream.ActorAttributes
import scala.concurrent.ExecutionContext
import akka.stream.ActorMaterializerSettings
import java.util.concurrent.atomic.AtomicInteger
import akka.stream.Supervision
import akka.stream.scaladsl.Flow
object IntegrationDocSpec {
import TwitterStreamQuickstartDocSpec._
val config = ConfigFactory.parseString("""
#//#blocking-dispatcher-config
blocking-dispatcher {
executor = "thread-pool-executor"
thread-pool-executor {
core-pool-size-min = 10
core-pool-size-max = 10
}
}
#//#blocking-dispatcher-config
akka.actor.default-mailbox.mailbox-type = akka.dispatch.UnboundedMailbox
""")
class AddressSystem {
//#email-address-lookup
def lookupEmail(handle: String): Future[Option[String]] =
//#email-address-lookup
Future.successful(Some(handle + "@somewhere.com"))
//#phone-lookup
def lookupPhoneNumber(handle: String): Future[Option[String]] =
//#phone-lookup
Future.successful(Some(handle.hashCode.toString))
}
class AddressSystem2 {
//#email-address-lookup2
def lookupEmail(handle: String): Future[String] =
//#email-address-lookup2
Future.successful(handle + "@somewhere.com")
}
final case class Email(to: String, title: String, body: String)
final case class TextMessage(to: String, body: String)
class EmailServer(probe: ActorRef) {
//#email-server-send
def send(email: Email): Future[Unit] = {
// ...
//#email-server-send
probe ! email.to
Future.successful(())
//#email-server-send
}
//#email-server-send
}
class SmsServer(probe: ActorRef) {
//#sms-server-send
def send(text: TextMessage): Unit = {
// ...
//#sms-server-send
probe ! text.to
//#sms-server-send
}
//#sms-server-send
}
final case class Save(tweet: Tweet)
final case object SaveDone
class DatabaseService(probe: ActorRef) extends Actor {
override def receive = {
case Save(tweet: Tweet) =>
probe ! tweet.author.handle
sender() ! SaveDone
}
}
//#sometimes-slow-service
class SometimesSlowService(implicit ec: ExecutionContext) {
//#sometimes-slow-service
def println(s: String): Unit = ()
//#sometimes-slow-service
private val runningCount = new AtomicInteger
def convert(s: String): Future[String] = {
println(s"running: $s (${runningCount.incrementAndGet()})")
Future {
if (s.nonEmpty && s.head.isLower)
Thread.sleep(500)
else
Thread.sleep(20)
println(s"completed: $s (${runningCount.decrementAndGet()})")
s.toUpperCase
}
}
}
//#sometimes-slow-service
}
class IntegrationDocSpec extends AkkaSpec(IntegrationDocSpec.config) {
import TwitterStreamQuickstartDocSpec._
import IntegrationDocSpec._
implicit val materializer = ActorMaterializer()
"calling external service with mapAsync" in {
val probe = TestProbe()
val addressSystem = new AddressSystem
val emailServer = new EmailServer(probe.ref)
//#tweet-authors
val authors: Source[Author, Unit] =
tweets
.filter(_.hashtags.contains(akka))
.map(_.author)
//#tweet-authors
//#email-addresses-mapAsync
val emailAddresses: Source[String, Unit] =
authors
.mapAsync(4)(author => addressSystem.lookupEmail(author.handle))
.collect { case Some(emailAddress) => emailAddress }
//#email-addresses-mapAsync
//#send-emails
val sendEmails: RunnableGraph[Unit] =
emailAddresses
.mapAsync(4)(address => {
emailServer.send(
Email(to = address, title = "Akka", body = "I like your tweet"))
})
.to(Sink.ignore)
sendEmails.run()
//#send-emails
probe.expectMsg("rolandkuhn@somewhere.com")
probe.expectMsg("patriknw@somewhere.com")
probe.expectMsg("bantonsson@somewhere.com")
probe.expectMsg("drewhk@somewhere.com")
probe.expectMsg("ktosopl@somewhere.com")
probe.expectMsg("mmartynas@somewhere.com")
probe.expectMsg("akkateam@somewhere.com")
}
"lookup email with mapAsync and supervision" in {
val addressSystem = new AddressSystem2
val authors: Source[Author, Unit] =
tweets.filter(_.hashtags.contains(akka)).map(_.author)
//#email-addresses-mapAsync-supervision
import ActorAttributes.supervisionStrategy
import Supervision.resumingDecider
val emailAddresses: Source[String, Unit] =
authors.via(
Flow[Author].mapAsync(4)(author => addressSystem.lookupEmail(author.handle))
.withAttributes(supervisionStrategy(resumingDecider)))
//#email-addresses-mapAsync-supervision
}
"calling external service with mapAsyncUnordered" in {
val probe = TestProbe()
val addressSystem = new AddressSystem
val emailServer = new EmailServer(probe.ref)
//#external-service-mapAsyncUnordered
val authors: Source[Author, Unit] =
tweets.filter(_.hashtags.contains(akka)).map(_.author)
val emailAddresses: Source[String, Unit] =
authors
.mapAsyncUnordered(4)(author => addressSystem.lookupEmail(author.handle))
.collect { case Some(emailAddress) => emailAddress }
val sendEmails: RunnableGraph[Unit] =
emailAddresses
.mapAsyncUnordered(4)(address => {
emailServer.send(
Email(to = address, title = "Akka", body = "I like your tweet"))
})
.to(Sink.ignore)
sendEmails.run()
//#external-service-mapAsyncUnordered
probe.receiveN(7).toSet should be(Set(
"rolandkuhn@somewhere.com",
"patriknw@somewhere.com",
"bantonsson@somewhere.com",
"drewhk@somewhere.com",
"ktosopl@somewhere.com",
"mmartynas@somewhere.com",
"akkateam@somewhere.com"))
}
"careful managed blocking with mapAsync" in {
val probe = TestProbe()
val addressSystem = new AddressSystem
val smsServer = new SmsServer(probe.ref)
val authors = tweets.filter(_.hashtags.contains(akka)).map(_.author)
val phoneNumbers =
authors.mapAsync(4)(author => addressSystem.lookupPhoneNumber(author.handle))
.collect { case Some(phoneNo) => phoneNo }
//#blocking-mapAsync
val blockingExecutionContext = system.dispatchers.lookup("blocking-dispatcher")
val sendTextMessages: RunnableGraph[Unit] =
phoneNumbers
.mapAsync(4)(phoneNo => {
Future {
smsServer.send(
TextMessage(to = phoneNo, body = "I like your tweet"))
}(blockingExecutionContext)
})
.to(Sink.ignore)
sendTextMessages.run()
//#blocking-mapAsync
probe.receiveN(7).toSet should be(Set(
"rolandkuhn".hashCode.toString,
"patriknw".hashCode.toString,
"bantonsson".hashCode.toString,
"drewhk".hashCode.toString,
"ktosopl".hashCode.toString,
"mmartynas".hashCode.toString,
"akkateam".hashCode.toString))
}
"careful managed blocking with map" in {
val probe = TestProbe()
val addressSystem = new AddressSystem
val smsServer = new SmsServer(probe.ref)
val authors = tweets.filter(_.hashtags.contains(akka)).map(_.author)
val phoneNumbers =
authors.mapAsync(4)(author => addressSystem.lookupPhoneNumber(author.handle))
.collect { case Some(phoneNo) => phoneNo }
//#blocking-map
val send = Flow[String]
.map { phoneNo =>
smsServer.send(TextMessage(to = phoneNo, body = "I like your tweet"))
}
.withAttributes(ActorAttributes.dispatcher("blocking-dispatcher"))
val sendTextMessages: RunnableGraph[Unit] =
phoneNumbers.via(send).to(Sink.ignore)
sendTextMessages.run()
//#blocking-map
probe.expectMsg("rolandkuhn".hashCode.toString)
probe.expectMsg("patriknw".hashCode.toString)
probe.expectMsg("bantonsson".hashCode.toString)
probe.expectMsg("drewhk".hashCode.toString)
probe.expectMsg("ktosopl".hashCode.toString)
probe.expectMsg("mmartynas".hashCode.toString)
probe.expectMsg("akkateam".hashCode.toString)
}
"calling actor service with mapAsync" in {
val probe = TestProbe()
val database = system.actorOf(Props(classOf[DatabaseService], probe.ref), "db")
//#save-tweets
val akkaTweets: Source[Tweet, Unit] = tweets.filter(_.hashtags.contains(akka))
implicit val timeout = Timeout(3.seconds)
val saveTweets: RunnableGraph[Unit] =
akkaTweets
.mapAsync(4)(tweet => database ? Save(tweet))
.to(Sink.ignore)
//#save-tweets
saveTweets.run()
probe.expectMsg("rolandkuhn")
probe.expectMsg("patriknw")
probe.expectMsg("bantonsson")
probe.expectMsg("drewhk")
probe.expectMsg("ktosopl")
probe.expectMsg("mmartynas")
probe.expectMsg("akkateam")
}
"illustrate ordering and parallelism of mapAsync" in {
val probe = TestProbe()
def println(s: String): Unit = {
if (s.startsWith("after:"))
probe.ref ! s
}
//#sometimes-slow-mapAsync
implicit val blockingExecutionContext = system.dispatchers.lookup("blocking-dispatcher")
val service = new SometimesSlowService
implicit val materializer = ActorMaterializer(
ActorMaterializerSettings(system).withInputBuffer(initialSize = 4, maxSize = 4))
Source(List("a", "B", "C", "D", "e", "F", "g", "H", "i", "J"))
.map(elem => { println(s"before: $elem"); elem })
.mapAsync(4)(service.convert)
.runForeach(elem => println(s"after: $elem"))
//#sometimes-slow-mapAsync
probe.expectMsg("after: A")
probe.expectMsg("after: B")
probe.expectMsg("after: C")
probe.expectMsg("after: D")
probe.expectMsg("after: E")
probe.expectMsg("after: F")
probe.expectMsg("after: G")
probe.expectMsg("after: H")
probe.expectMsg("after: I")
probe.expectMsg("after: J")
}
"illustrate ordering and parallelism of mapAsyncUnordered" in {
val probe = TestProbe()
def println(s: String): Unit = {
if (s.startsWith("after:"))
probe.ref ! s
}
//#sometimes-slow-mapAsyncUnordered
implicit val blockingExecutionContext = system.dispatchers.lookup("blocking-dispatcher")
val service = new SometimesSlowService
implicit val materializer = ActorMaterializer(
ActorMaterializerSettings(system).withInputBuffer(initialSize = 4, maxSize = 4))
Source(List("a", "B", "C", "D", "e", "F", "g", "H", "i", "J"))
.map(elem => { println(s"before: $elem"); elem })
.mapAsyncUnordered(4)(service.convert)
.runForeach(elem => println(s"after: $elem"))
//#sometimes-slow-mapAsyncUnordered
probe.receiveN(10).toSet should be(Set(
"after: A",
"after: B",
"after: C",
"after: D",
"after: E",
"after: F",
"after: G",
"after: H",
"after: I",
"after: J"))
}
}

View file

@ -0,0 +1,284 @@
/**
* Copyright (C) 2015 Typesafe Inc. <http://www.typesafe.com>
*/
package docs.stream
import java.io.File
import _root_.akka.http.scaladsl.model.Uri
import _root_.akka.stream._
import _root_.akka.stream.scaladsl._
import _root_.akka.stream.stage.{GraphStage, GraphStageLogic, InHandler, OutHandler}
import _root_.akka.stream.testkit.{AkkaSpec, TestPublisher, TestSubscriber}
import scala.concurrent.duration._
import scala.concurrent.{ExecutionContext, Future, Promise}
import scala.util.{Failure, Random, Success, Try}
class MigrationsScala extends AkkaSpec {
"Examples in migration guide" must {
"compile" in {
val flow1 = Flow[Int]
val flow2 = Flow[Int]
def inlet: Inlet[Int] = ???
def outlet: Outlet[Int] = ???
def inlet1: Inlet[Int] = ???
def outlet1: Outlet[Int] = ???
def inlet2: Inlet[Int] = ???
def outlet2: Outlet[Int] = ???
lazy val dontExecuteMe = {
//#flow-wrap
val graphSource: Graph[SourceShape[Int], Unit] = ???
val source: Source[Int, Unit] = Source.fromGraph(graphSource)
val graphSink: Graph[SinkShape[Int], Unit] = ???
val sink: Sink[Int, Unit] = Sink.fromGraph(graphSink)
val graphFlow: Graph[FlowShape[Int, Int], Unit] = ???
val flow: Flow[Int, Int, Unit] = Flow.fromGraph(graphFlow)
Flow.fromSinkAndSource(Sink.head[Int], Source.single(0))
//#flow-wrap
//#bidiflow-wrap
val bidiGraph: Graph[BidiShape[Int, Int, Int, Int], Unit] = ???
val bidi: BidiFlow[Int, Int, Int, Int, Unit] = BidiFlow.fromGraph(bidiGraph)
BidiFlow.fromFlows(flow1, flow2)
BidiFlow.fromFunctions((x: Int) => x + 1, (y: Int) => y * 3)
//#bidiflow-wrap
//#graph-create
// Replaces GraphDSL.closed()
GraphDSL.create() { builder =>
//...
ClosedShape
}
// Replaces GraphDSL.partial()
GraphDSL.create() { builder =>
//...
FlowShape(inlet, outlet)
}
//#graph-create
//#graph-create-2
Source.fromGraph(
GraphDSL.create() { builder =>
//...
SourceShape(outlet)
})
Sink.fromGraph(
GraphDSL.create() { builder =>
//...
SinkShape(inlet)
})
Flow.fromGraph(
GraphDSL.create() { builder =>
//...
FlowShape(inlet, outlet)
})
BidiFlow.fromGraph(
GraphDSL.create() { builder =>
//...
BidiShape(inlet1, outlet1, inlet2, outlet2)
})
//#graph-create-2
//#graph-edges
RunnableGraph.fromGraph(
GraphDSL.create() { implicit builder =>
import GraphDSL.Implicits._
outlet ~> inlet
outlet ~> flow ~> inlet
//...
ClosedShape
})
//#graph-edges
val promise = Promise[Unit]()
//#source-creators
val src: Source[Int, Promise[Option[Int]]] = Source.maybe[Int]
//...
// This finishes the stream without emitting anything, just like Source.lazyEmpty did
promise.trySuccess(Some(()))
val ticks = Source.tick(1.second, 3.seconds, "tick")
val pubSource = Source.fromPublisher(TestPublisher.manualProbe[Int]())
val itSource = Source.fromIterator(() => Iterator.continually(Random.nextGaussian))
val futSource = Source.fromFuture(Future.successful(42))
val subSource = Source.asSubscriber
//#source-creators
//#sink-creators
val subSink = Sink.fromSubscriber(TestSubscriber.manualProbe[Int]())
//#sink-creators
//#sink-as-publisher
val pubSink = Sink.asPublisher(fanout = false)
val pubSinkFanout = Sink.asPublisher(fanout = true)
//#sink-as-publisher
//#flatMapConcat
Flow[Source[Int, Any]].flatMapConcat(identity)
//#flatMapConcat
//#group-flatten
Flow[Int]
.groupBy(2, _ % 2) // the first parameter sets max number of substreams
.map(_ + 3)
.concatSubstreams
//#group-flatten
val MaxDistinctWords = 1000
//#group-fold
Flow[String]
.groupBy(MaxDistinctWords, identity)
.fold(("", 0))((pair, word) => (word, pair._2 + 1))
.mergeSubstreams
//#group-fold
//#port-async
class MapAsyncOne[In, Out](f: In Future[Out])(implicit ec: ExecutionContext)
extends GraphStage[FlowShape[In, Out]] {
val in: Inlet[In] = Inlet("MapAsyncOne.in")
val out: Outlet[Out] = Outlet("MapAsyncOne.out")
override val shape: FlowShape[In, Out] = FlowShape(in, out)
// The actual logic is encapsulated in a GraphStageLogic now
override def createLogic(inheritedAttributes: Attributes): GraphStageLogic =
new GraphStageLogic(shape) {
// All of the state *must* be encapsulated in the GraphStageLogic,
// not in the GraphStage
private var elemInFlight: Out = _
val callback = getAsyncCallback(onAsyncInput)
var holdingUpstream = false
// All upstream related events now are handled in an InHandler instance
setHandler(in, new InHandler {
// No context or element parameter for onPush
override def onPush(): Unit = {
// The element is not passed as an argument but needs to be dequeued explicitly
val elem = grab(in)
val future = f(elem)
future.onComplete(callback.invoke)
// ctx.holdUpstream is no longer needed, but we need to track the state
holdingUpstream = true
}
// No context parameter
override def onUpstreamFinish(): Unit = {
if (holdingUpstream) absorbTermination()
else completeStage() // ctx.finish turns into completeStage()
}
})
setHandler(out, new OutHandler {
override def onPull(): Unit = {
if (elemInFlight != null) {
val e = elemInFlight
elemInFlight = null.asInstanceOf[Out]
pushIt(e)
} // holdDownstream is no longer needed
}
})
// absorbTermination turns into the code below.
// This emulates the behavior of the AsyncStage stage.
private def absorbTermination(): Unit =
if (isAvailable(shape.out)) getHandler(out).onPull()
// The line below emulates the behavior of the AsyncStage holdingDownstream
private def holdingDownstream(): Boolean =
!(isClosed(in) || hasBeenPulled(in))
// Any method can be used as a callback, we chose the previous name for
// easier comparison with the original code
private def onAsyncInput(input: Try[Out]) =
input match {
case Failure(ex) failStage(ex)
case Success(e) if holdingDownstream() pushIt(e)
case Success(e)
elemInFlight = e
// ctx.ignore is no longer needed
}
private def pushIt(elem: Out): Unit = {
// ctx.isFinishing turns into isClosed(in)
if (isClosed(in)) {
// pushAndFinish is now two actions
push(out, elem)
completeStage()
} else {
// pushAndPull is now two actions
push(out, elem)
pull(in)
holdingUpstream = false
}
}
}
}
//#port-async
val uri: Uri = ???
//#raw-query
val queryPart: Option[String] = uri.rawQueryString
//#raw-query
//#query-param
val param: Option[String] = uri.query().get("a")
//#query-param
//#file-source-sink
val fileSrc = FileIO.fromFile(new File("."))
val otherFileSrc = FileIO.fromFile(new File("."), 1024)
val someFileSink = FileIO.toFile(new File("."))
//#file-source-sink
class SomeInputStream extends java.io.InputStream { override def read(): Int = 0 }
class SomeOutputStream extends java.io.OutputStream { override def write(b: Int): Unit = () }
//#input-output-stream-source-sink
val inputStreamSrc = StreamConverters.fromInputStream(() => new SomeInputStream())
val otherInputStreamSrc = StreamConverters.fromInputStream(() => new SomeInputStream())
val someOutputStreamSink = StreamConverters.fromOutputStream(() => new SomeOutputStream())
//#input-output-stream-source-sink
//#output-input-stream-source-sink
val timeout: FiniteDuration = 0.seconds
val outputStreamSrc = StreamConverters.asOutputStream()
val otherOutputStreamSrc = StreamConverters.asOutputStream(timeout)
val someInputStreamSink = StreamConverters.asInputStream()
val someOtherInputStreamSink = StreamConverters.asInputStream(timeout)
//#output-input-stream-source-sink
}
}
}
}

View file

@ -0,0 +1,111 @@
/**
* Copyright (C) 2015 Typesafe Inc. <http://www.typesafe.com>
*/
package docs.stream
import akka.stream._
import akka.stream.scaladsl._
import akka.stream.testkit._
import akka.stream.testkit.scaladsl._
import scala.util.Random
import scala.math._
import scala.concurrent.Await
import scala.concurrent.duration._
import scala.collection.immutable
import akka.testkit.TestLatch
class RateTransformationDocSpec extends AkkaSpec {
type Seq[+A] = immutable.Seq[A]
val Seq = immutable.Seq
implicit val materializer = ActorMaterializer()
"conflate should summarize" in {
//#conflate-summarize
val statsFlow = Flow[Double]
.conflate(Seq(_))(_ :+ _)
.map { s =>
val μ = s.sum / s.size
val se = s.map(x => pow(x - μ, 2))
val σ = sqrt(se.sum / se.size)
(σ, μ, s.size)
}
//#conflate-summarize
val fut = Source.fromIterator(() => Iterator.continually(Random.nextGaussian))
.via(statsFlow)
.grouped(10)
.runWith(Sink.head)
Await.result(fut, 100.millis)
}
"conflate should sample" in {
//#conflate-sample
val p = 0.01
val sampleFlow = Flow[Double]
.conflate(Seq(_)) {
case (acc, elem) if Random.nextDouble < p => acc :+ elem
case (acc, _) => acc
}
.mapConcat(identity)
//#conflate-sample
val fut = Source(1 to 1000)
.map(_.toDouble)
.via(sampleFlow)
.runWith(Sink.fold(Seq.empty[Double])(_ :+ _))
val count = Await.result(fut, 1000.millis).size
}
"expand should repeat last" in {
//#expand-last
val lastFlow = Flow[Double]
.expand(identity)(s => (s, s))
//#expand-last
val (probe, fut) = TestSource.probe[Double]
.via(lastFlow)
.grouped(10)
.toMat(Sink.head)(Keep.both)
.run()
probe.sendNext(1.0)
val expanded = Await.result(fut, 100.millis)
expanded.size shouldBe 10
expanded.sum shouldBe 10
}
"expand should track drift" in {
//#expand-drift
val driftFlow = Flow[Double]
.expand((_, 0)) {
case (lastElement, drift) => ((lastElement, drift), (lastElement, drift + 1))
}
//#expand-drift
val latch = TestLatch(2)
val realDriftFlow = Flow[Double]
.expand(d => { latch.countDown(); (d, 0) }) {
case (lastElement, drift) => ((lastElement, drift), (lastElement, drift + 1))
}
val (pub, sub) = TestSource.probe[Double]
.via(realDriftFlow)
.toMat(TestSink.probe[(Double, Int)])(Keep.both)
.run()
sub.request(1)
pub.sendNext(1.0)
sub.expectNext((1.0, 0))
sub.requestNext((1.0, 1))
sub.requestNext((1.0, 2))
pub.sendNext(2.0)
Await.ready(latch, 1.second)
sub.requestNext((2.0, 0))
}
}

View file

@ -0,0 +1,147 @@
/**
* Copyright (C) 2014 Typesafe Inc. <http://www.typesafe.com>
*/
package docs.stream
import akka.stream.ActorMaterializer
import akka.stream.scaladsl.{ RunnableGraph, Flow, Sink, Source }
import akka.stream.testkit._
import org.reactivestreams.Processor
class ReactiveStreamsDocSpec extends AkkaSpec {
import TwitterStreamQuickstartDocSpec._
implicit val materializer = ActorMaterializer()
//#imports
import org.reactivestreams.Publisher
import org.reactivestreams.Subscriber
//#imports
trait Fixture {
//#authors
val authors = Flow[Tweet]
.filter(_.hashtags.contains(akka))
.map(_.author)
//#authors
//#tweets-publisher
def tweets: Publisher[Tweet]
//#tweets-publisher
//#author-storage-subscriber
def storage: Subscriber[Author]
//#author-storage-subscriber
//#author-alert-subscriber
def alert: Subscriber[Author]
//#author-alert-subscriber
}
val impl = new Fixture {
override def tweets: Publisher[Tweet] =
TwitterStreamQuickstartDocSpec.tweets.runWith(Sink.asPublisher(false))
override def storage = TestSubscriber.manualProbe[Author]
override def alert = TestSubscriber.manualProbe[Author]
}
def assertResult(storage: TestSubscriber.ManualProbe[Author]): Unit = {
val sub = storage.expectSubscription()
sub.request(10)
storage.expectNext(Author("rolandkuhn"))
storage.expectNext(Author("patriknw"))
storage.expectNext(Author("bantonsson"))
storage.expectNext(Author("drewhk"))
storage.expectNext(Author("ktosopl"))
storage.expectNext(Author("mmartynas"))
storage.expectNext(Author("akkateam"))
storage.expectComplete()
}
"reactive streams publisher via flow to subscriber" in {
import impl._
val storage = impl.storage
//#connect-all
Source.fromPublisher(tweets).via(authors).to(Sink.fromSubscriber(storage)).run()
//#connect-all
assertResult(storage)
}
"flow as publisher and subscriber" in {
import impl._
val storage = impl.storage
//#flow-publisher-subscriber
val processor: Processor[Tweet, Author] = authors.toProcessor.run()
tweets.subscribe(processor)
processor.subscribe(storage)
//#flow-publisher-subscriber
assertResult(storage)
}
"source as publisher" in {
import impl._
val storage = impl.storage
//#source-publisher
val authorPublisher: Publisher[Author] =
Source.fromPublisher(tweets).via(authors).runWith(Sink.asPublisher(fanout = false))
authorPublisher.subscribe(storage)
//#source-publisher
assertResult(storage)
}
"source as fanoutPublisher" in {
import impl._
val storage = impl.storage
val alert = impl.alert
//#source-fanoutPublisher
val authorPublisher: Publisher[Author] =
Source.fromPublisher(tweets).via(authors)
.runWith(Sink.asPublisher(fanout = true))
authorPublisher.subscribe(storage)
authorPublisher.subscribe(alert)
//#source-fanoutPublisher
// this relies on fanoutPublisher buffer size > number of authors
assertResult(storage)
assertResult(alert)
}
"sink as subscriber" in {
import impl._
val storage = impl.storage
//#sink-subscriber
val tweetSubscriber: Subscriber[Tweet] =
authors.to(Sink.fromSubscriber(storage)).runWith(Source.asSubscriber[Tweet])
tweets.subscribe(tweetSubscriber)
//#sink-subscriber
assertResult(storage)
}
"use a processor" in {
//#use-processor
// An example Processor factory
def createProcessor: Processor[Int, Int] = Flow[Int].toProcessor.run()
val flow: Flow[Int, Int, Unit] = Flow.fromProcessor(() => createProcessor)
//#use-processor
}
}

View file

@ -0,0 +1,89 @@
package docs.stream
import akka.stream._
import akka.stream.scaladsl._
import akka.stream.testkit.AkkaSpec
class StreamBuffersRateSpec extends AkkaSpec {
implicit val materializer = ActorMaterializer()
"Demonstrate pipelining" in {
def println(s: Any) = ()
//#pipelining
Source(1 to 3)
.map { i => println(s"A: $i"); i }
.map { i => println(s"B: $i"); i }
.map { i => println(s"C: $i"); i }
.runWith(Sink.ignore)
//#pipelining
}
"Demonstrate buffer sizes" in {
//#materializer-buffer
val materializer = ActorMaterializer(
ActorMaterializerSettings(system)
.withInputBuffer(
initialSize = 64,
maxSize = 64))
//#materializer-buffer
//#section-buffer
val section = Flow[Int].map(_ * 2)
.withAttributes(Attributes.inputBuffer(initial = 1, max = 1))
val flow = section.via(Flow[Int].map(_ / 2)) // the buffer size of this map is the default
//#section-buffer
}
"buffering abstraction leak" in {
//#buffering-abstraction-leak
import scala.concurrent.duration._
case class Tick()
RunnableGraph.fromGraph(GraphDSL.create() { implicit b =>
import GraphDSL.Implicits._
val zipper = b.add(ZipWith[Tick, Int, Int]((tick, count) => count))
Source.tick(initialDelay = 3.second, interval = 3.second, Tick()) ~> zipper.in0
Source.tick(initialDelay = 1.second, interval = 1.second, "message!")
.conflate(seed = (_) => 1)((count, _) => count + 1) ~> zipper.in1
zipper.out ~> Sink.foreach(println)
ClosedShape
})
//#buffering-abstraction-leak
}
"explcit buffers" in {
trait Job
def inboundJobsConnector(): Source[Job, Unit] = Source.empty
//#explicit-buffers-backpressure
// Getting a stream of jobs from an imaginary external system as a Source
val jobs: Source[Job, Unit] = inboundJobsConnector()
jobs.buffer(1000, OverflowStrategy.backpressure)
//#explicit-buffers-backpressure
//#explicit-buffers-droptail
jobs.buffer(1000, OverflowStrategy.dropTail)
//#explicit-buffers-droptail
//#explicit-buffers-dropnew
jobs.buffer(1000, OverflowStrategy.dropNew)
//#explicit-buffers-dropnew
//#explicit-buffers-drophead
jobs.buffer(1000, OverflowStrategy.dropHead)
//#explicit-buffers-drophead
//#explicit-buffers-dropbuffer
jobs.buffer(1000, OverflowStrategy.dropBuffer)
//#explicit-buffers-dropbuffer
//#explicit-buffers-fail
jobs.buffer(1000, OverflowStrategy.fail)
//#explicit-buffers-fail
}
}

View file

@ -0,0 +1,130 @@
/**
* Copyright (C) 2014 Typesafe Inc. <http://www.typesafe.com>
*/
package docs.stream
import akka.actor.ActorRef
import akka.stream._
import akka.stream.scaladsl._
import akka.stream.testkit.AkkaSpec
import scala.concurrent.{ Await, Future }
import scala.concurrent.duration._
class StreamPartialFlowGraphDocSpec extends AkkaSpec {
implicit val ec = system.dispatcher
implicit val materializer = ActorMaterializer()
"build with open ports" in {
//#simple-partial-flow-graph
val pickMaxOfThree = GraphDSL.create() { implicit b =>
import GraphDSL.Implicits._
val zip1 = b.add(ZipWith[Int, Int, Int](math.max _))
val zip2 = b.add(ZipWith[Int, Int, Int](math.max _))
zip1.out ~> zip2.in0
UniformFanInShape(zip2.out, zip1.in0, zip1.in1, zip2.in1)
}
val resultSink = Sink.head[Int]
val g = RunnableGraph.fromGraph(GraphDSL.create(resultSink) { implicit b =>
sink =>
import GraphDSL.Implicits._
// importing the partial graph will return its shape (inlets & outlets)
val pm3 = b.add(pickMaxOfThree)
Source.single(1) ~> pm3.in(0)
Source.single(2) ~> pm3.in(1)
Source.single(3) ~> pm3.in(2)
pm3.out ~> sink.in
ClosedShape
})
val max: Future[Int] = g.run()
Await.result(max, 300.millis) should equal(3)
//#simple-partial-flow-graph
}
"build source from partial flow graph" in {
//#source-from-partial-flow-graph
val pairs = Source.fromGraph(GraphDSL.create() { implicit b =>
import GraphDSL.Implicits._
// prepare graph elements
val zip = b.add(Zip[Int, Int]())
def ints = Source.fromIterator(() => Iterator.from(1))
// connect the graph
ints.filter(_ % 2 != 0) ~> zip.in0
ints.filter(_ % 2 == 0) ~> zip.in1
// expose port
SourceShape(zip.out)
})
val firstPair: Future[(Int, Int)] = pairs.runWith(Sink.head)
//#source-from-partial-flow-graph
Await.result(firstPair, 300.millis) should equal(1 -> 2)
}
"build flow from partial flow graph" in {
//#flow-from-partial-flow-graph
val pairUpWithToString =
Flow.fromGraph(GraphDSL.create() { implicit b =>
import GraphDSL.Implicits._
// prepare graph elements
val broadcast = b.add(Broadcast[Int](2))
val zip = b.add(Zip[Int, String]())
// connect the graph
broadcast.out(0).map(identity) ~> zip.in0
broadcast.out(1).map(_.toString) ~> zip.in1
// expose ports
FlowShape(broadcast.in, zip.out)
})
//#flow-from-partial-flow-graph
// format: OFF
val (_, matSink: Future[(Int, String)]) =
//#flow-from-partial-flow-graph
pairUpWithToString.runWith(Source(List(1)), Sink.head)
//#flow-from-partial-flow-graph
// format: ON
Await.result(matSink, 300.millis) should equal(1 -> "1")
}
"combine sources with simplified API" in {
//#source-combine
val sourceOne = Source(List(1))
val sourceTwo = Source(List(2))
val merged = Source.combine(sourceOne, sourceTwo)(Merge(_))
val mergedResult: Future[Int] = merged.runWith(Sink.fold(0)(_ + _))
//#source-combine
Await.result(mergedResult, 300.millis) should equal(3)
}
"combine sinks with simplified API" in {
val actorRef: ActorRef = testActor
//#sink-combine
val sendRmotely = Sink.actorRef(actorRef, "Done")
val localProcessing = Sink.foreach[Int](_ => /* do something usefull */ ())
val sink = Sink.combine(sendRmotely, localProcessing)(Broadcast[Int](_))
Source(List(0, 1, 2)).runWith(sink)
//#sink-combine
expectMsg(0)
expectMsg(1)
expectMsg(2)
}
}

View file

@ -0,0 +1,161 @@
/**
* Copyright (C) 2015 Typesafe Inc. <http://www.typesafe.com>
*/
package docs.stream
import akka.stream._
import akka.stream.scaladsl._
import akka.stream.testkit._
import akka.stream.testkit.scaladsl._
import scala.util._
import scala.concurrent.duration._
import scala.concurrent._
import akka.testkit.TestProbe
import akka.pattern
class StreamTestKitDocSpec extends AkkaSpec {
implicit val materializer = ActorMaterializer()
"strict collection" in {
//#strict-collection
val sinkUnderTest = Flow[Int].map(_ * 2).toMat(Sink.fold(0)(_ + _))(Keep.right)
val future = Source(1 to 4).runWith(sinkUnderTest)
val result = Await.result(future, 100.millis)
assert(result == 20)
//#strict-collection
}
"grouped part of infinite stream" in {
//#grouped-infinite
import system.dispatcher
import akka.pattern.pipe
val sourceUnderTest = Source.repeat(1).map(_ * 2)
val future = sourceUnderTest.grouped(10).runWith(Sink.head)
val result = Await.result(future, 100.millis)
assert(result == Seq.fill(10)(2))
//#grouped-infinite
}
"folded stream" in {
//#folded-stream
val flowUnderTest = Flow[Int].takeWhile(_ < 5)
val future = Source(1 to 10).via(flowUnderTest).runWith(Sink.fold(Seq.empty[Int])(_ :+ _))
val result = Await.result(future, 100.millis)
assert(result == (1 to 4))
//#folded-stream
}
"pipe to test probe" in {
//#pipeto-testprobe
import system.dispatcher
import akka.pattern.pipe
val sourceUnderTest = Source(1 to 4).grouped(2)
val probe = TestProbe()
sourceUnderTest.grouped(2).runWith(Sink.head).pipeTo(probe.ref)
probe.expectMsg(100.millis, Seq(Seq(1, 2), Seq(3, 4)))
//#pipeto-testprobe
}
"sink actor ref" in {
//#sink-actorref
case object Tick
val sourceUnderTest = Source.tick(0.seconds, 200.millis, Tick)
val probe = TestProbe()
val cancellable = sourceUnderTest.to(Sink.actorRef(probe.ref, "completed")).run()
probe.expectMsg(1.second, Tick)
probe.expectNoMsg(100.millis)
probe.expectMsg(200.millis, Tick)
cancellable.cancel()
probe.expectMsg(200.millis, "completed")
//#sink-actorref
}
"source actor ref" in {
//#source-actorref
val sinkUnderTest = Flow[Int].map(_.toString).toMat(Sink.fold("")(_ + _))(Keep.right)
val (ref, future) = Source.actorRef(8, OverflowStrategy.fail)
.toMat(sinkUnderTest)(Keep.both).run()
ref ! 1
ref ! 2
ref ! 3
ref ! akka.actor.Status.Success("done")
val result = Await.result(future, 100.millis)
assert(result == "123")
//#source-actorref
}
"test sink probe" in {
//#test-sink-probe
val sourceUnderTest = Source(1 to 4).filter(_ % 2 == 0).map(_ * 2)
sourceUnderTest
.runWith(TestSink.probe[Int])
.request(2)
.expectNext(4, 8)
.expectComplete()
//#test-sink-probe
}
"test source probe" in {
//#test-source-probe
val sinkUnderTest = Sink.cancelled
TestSource.probe[Int]
.toMat(sinkUnderTest)(Keep.left)
.run()
.expectCancellation()
//#test-source-probe
}
"injecting failure" in {
//#injecting-failure
val sinkUnderTest = Sink.head[Int]
val (probe, future) = TestSource.probe[Int]
.toMat(sinkUnderTest)(Keep.both)
.run()
probe.sendError(new Exception("boom"))
Await.ready(future, 100.millis)
val Failure(exception) = future.value.get
assert(exception.getMessage == "boom")
//#injecting-failure
}
"test source and a sink" in {
import system.dispatcher
//#test-source-and-sink
val flowUnderTest = Flow[Int].mapAsyncUnordered(2) { sleep =>
pattern.after(10.millis * sleep, using = system.scheduler)(Future.successful(sleep))
}
val (pub, sub) = TestSource.probe[Int]
.via(flowUnderTest)
.toMat(TestSink.probe[Int])(Keep.both)
.run()
sub.request(n = 3)
pub.sendNext(3)
pub.sendNext(2)
pub.sendNext(1)
sub.expectNextUnordered(1, 2, 3)
pub.sendError(new Exception("Power surge in the linear subroutine C-47!"))
val ex = sub.expectError()
assert(ex.getMessage.contains("C-47"))
//#test-source-and-sink
}
}

View file

@ -0,0 +1,210 @@
/**
* Copyright (C) 2014 Typesafe Inc. <http://www.typesafe.com>
*/
package docs.stream
//#imports
import akka.actor.ActorSystem
import akka.stream.{ ClosedShape, ActorMaterializer, OverflowStrategy }
import akka.stream.scaladsl._
import scala.concurrent.Await
import scala.concurrent.Future
//#imports
import akka.stream.testkit.AkkaSpec
object TwitterStreamQuickstartDocSpec {
//#model
final case class Author(handle: String)
final case class Hashtag(name: String)
final case class Tweet(author: Author, timestamp: Long, body: String) {
def hashtags: Set[Hashtag] =
body.split(" ").collect { case t if t.startsWith("#") => Hashtag(t) }.toSet
}
val akka = Hashtag("#akka")
//#model
val tweets = Source(
Tweet(Author("rolandkuhn"), System.currentTimeMillis, "#akka rocks!") ::
Tweet(Author("patriknw"), System.currentTimeMillis, "#akka !") ::
Tweet(Author("bantonsson"), System.currentTimeMillis, "#akka !") ::
Tweet(Author("drewhk"), System.currentTimeMillis, "#akka !") ::
Tweet(Author("ktosopl"), System.currentTimeMillis, "#akka on the rocks!") ::
Tweet(Author("mmartynas"), System.currentTimeMillis, "wow #akka !") ::
Tweet(Author("akkateam"), System.currentTimeMillis, "#akka rocks!") ::
Tweet(Author("bananaman"), System.currentTimeMillis, "#bananas rock!") ::
Tweet(Author("appleman"), System.currentTimeMillis, "#apples rock!") ::
Tweet(Author("drama"), System.currentTimeMillis, "we compared #apples to #oranges!") ::
Nil)
}
class TwitterStreamQuickstartDocSpec extends AkkaSpec {
import TwitterStreamQuickstartDocSpec._
implicit val executionContext = system.dispatcher
// Disable println
def println(s: Any): Unit = ()
trait Example0 {
//#tweet-source
val tweets: Source[Tweet, Unit]
//#tweet-source
}
trait Example1 {
//#first-sample
//#materializer-setup
implicit val system = ActorSystem("reactive-tweets")
implicit val materializer = ActorMaterializer()
//#materializer-setup
//#first-sample
}
implicit val materializer = ActorMaterializer()
"filter and map" in {
//#first-sample
//#authors-filter-map
val authors: Source[Author, Unit] =
tweets
.filter(_.hashtags.contains(akka))
.map(_.author)
//#first-sample
//#authors-filter-map
trait Example3 {
//#authors-collect
val authors: Source[Author, Unit] =
tweets.collect { case t if t.hashtags.contains(akka) => t.author }
//#authors-collect
}
//#first-sample
//#authors-foreachsink-println
authors.runWith(Sink.foreach(println))
//#authors-foreachsink-println
//#first-sample
//#authors-foreach-println
authors.runForeach(println)
//#authors-foreach-println
}
"mapConcat hashtags" in {
//#hashtags-mapConcat
val hashtags: Source[Hashtag, Unit] = tweets.mapConcat(_.hashtags.toList)
//#hashtags-mapConcat
}
trait HiddenDefinitions {
//#flow-graph-broadcast
val writeAuthors: Sink[Author, Unit] = ???
val writeHashtags: Sink[Hashtag, Unit] = ???
//#flow-graph-broadcast
}
"simple broadcast" in {
val writeAuthors: Sink[Author, Future[Unit]] = Sink.ignore
val writeHashtags: Sink[Hashtag, Future[Unit]] = Sink.ignore
// format: OFF
//#flow-graph-broadcast
val g = RunnableGraph.fromGraph(GraphDSL.create() { implicit b =>
import GraphDSL.Implicits._
val bcast = b.add(Broadcast[Tweet](2))
tweets ~> bcast.in
bcast.out(0) ~> Flow[Tweet].map(_.author) ~> writeAuthors
bcast.out(1) ~> Flow[Tweet].mapConcat(_.hashtags.toList) ~> writeHashtags
ClosedShape
})
g.run()
//#flow-graph-broadcast
// format: ON
}
"slowProcessing" in {
def slowComputation(t: Tweet): Long = {
Thread.sleep(500) // act as if performing some heavy computation
42
}
//#tweets-slow-consumption-dropHead
tweets
.buffer(10, OverflowStrategy.dropHead)
.map(slowComputation)
.runWith(Sink.ignore)
//#tweets-slow-consumption-dropHead
}
"backpressure by readline" in {
trait X {
import scala.concurrent.duration._
//#backpressure-by-readline
val completion: Future[Unit] =
Source(1 to 10)
.map(i => { println(s"map => $i"); i })
.runForeach { i => readLine(s"Element = $i; continue reading? [press enter]\n") }
Await.ready(completion, 1.minute)
//#backpressure-by-readline
}
}
"count elements on finite stream" in {
//#tweets-fold-count
val count: Flow[Tweet, Int, Unit] = Flow[Tweet].map(_ => 1)
val sumSink: Sink[Int, Future[Int]] = Sink.fold[Int, Int](0)(_ + _)
val counterGraph: RunnableGraph[Future[Int]] =
tweets
.via(count)
.toMat(sumSink)(Keep.right)
val sum: Future[Int] = counterGraph.run()
sum.foreach(c => println(s"Total tweets processed: $c"))
//#tweets-fold-count
new AnyRef {
//#tweets-fold-count-oneline
val sum: Future[Int] = tweets.map(t => 1).runWith(sumSink)
//#tweets-fold-count-oneline
}
}
"materialize multiple times" in {
val tweetsInMinuteFromNow = tweets // not really in second, just acting as if
//#tweets-runnable-flow-materialized-twice
val sumSink = Sink.fold[Int, Int](0)(_ + _)
val counterRunnableGraph: RunnableGraph[Future[Int]] =
tweetsInMinuteFromNow
.filter(_.hashtags contains akka)
.map(t => 1)
.toMat(sumSink)(Keep.right)
// materialize the stream once in the morning
val morningTweetsCount: Future[Int] = counterRunnableGraph.run()
// and once in the evening, reusing the flow
val eveningTweetsCount: Future[Int] = counterRunnableGraph.run()
//#tweets-runnable-flow-materialized-twice
val sum: Future[Int] = counterRunnableGraph.run()
sum.map { c => println(s"Total tweets processed: $c") }
}
}

View file

@ -0,0 +1,100 @@
package docs.stream.cookbook
import akka.stream.scaladsl.{ Flow, Sink, Source }
import akka.util.ByteString
import scala.concurrent.Await
import scala.concurrent.duration._
class RecipeByteStrings extends RecipeSpec {
"Recipes for bytestring streams" must {
"have a working chunker" in {
val rawBytes = Source(List(ByteString(1, 2), ByteString(3), ByteString(4, 5, 6), ByteString(7, 8, 9)))
val ChunkLimit = 2
//#bytestring-chunker
import akka.stream.stage._
class Chunker(val chunkSize: Int) extends PushPullStage[ByteString, ByteString] {
private var buffer = ByteString.empty
override def onPush(elem: ByteString, ctx: Context[ByteString]): SyncDirective = {
buffer ++= elem
emitChunkOrPull(ctx)
}
override def onPull(ctx: Context[ByteString]): SyncDirective = emitChunkOrPull(ctx)
override def onUpstreamFinish(ctx: Context[ByteString]): TerminationDirective =
if (buffer.nonEmpty) ctx.absorbTermination()
else ctx.finish()
private def emitChunkOrPull(ctx: Context[ByteString]): SyncDirective = {
if (buffer.isEmpty) {
if (ctx.isFinishing) ctx.finish()
else ctx.pull()
} else {
val (emit, nextBuffer) = buffer.splitAt(chunkSize)
buffer = nextBuffer
ctx.push(emit)
}
}
}
val chunksStream = rawBytes.transform(() => new Chunker(ChunkLimit))
//#bytestring-chunker
val chunksFuture = chunksStream.grouped(10).runWith(Sink.head)
val chunks = Await.result(chunksFuture, 3.seconds)
chunks.forall(_.size <= 2) should be(true)
chunks.fold(ByteString())(_ ++ _) should be(ByteString(1, 2, 3, 4, 5, 6, 7, 8, 9))
}
"have a working bytes limiter" in {
val SizeLimit = 9
//#bytes-limiter
import akka.stream.stage._
class ByteLimiter(val maximumBytes: Long) extends PushStage[ByteString, ByteString] {
private var count = 0
override def onPush(chunk: ByteString, ctx: Context[ByteString]): SyncDirective = {
count += chunk.size
if (count > maximumBytes) ctx.fail(new IllegalStateException("Too much bytes"))
else ctx.push(chunk)
}
}
val limiter = Flow[ByteString].transform(() => new ByteLimiter(SizeLimit))
//#bytes-limiter
val bytes1 = Source(List(ByteString(1, 2), ByteString(3), ByteString(4, 5, 6), ByteString(7, 8, 9)))
val bytes2 = Source(List(ByteString(1, 2), ByteString(3), ByteString(4, 5, 6), ByteString(7, 8, 9, 10)))
Await.result(bytes1.via(limiter).grouped(10).runWith(Sink.head), 3.seconds)
.fold(ByteString())(_ ++ _) should be(ByteString(1, 2, 3, 4, 5, 6, 7, 8, 9))
an[IllegalStateException] must be thrownBy {
Await.result(bytes2.via(limiter).grouped(10).runWith(Sink.head), 3.seconds)
}
}
"demonstrate compacting" in {
val data = Source(List(ByteString(1, 2), ByteString(3), ByteString(4, 5, 6), ByteString(7, 8, 9)))
//#compacting-bytestrings
val compacted: Source[ByteString, Unit] = data.map(_.compact)
//#compacting-bytestrings
Await.result(compacted.grouped(10).runWith(Sink.head), 3.seconds).forall(_.isCompact) should be(true)
}
}
}

View file

@ -0,0 +1,90 @@
package docs.stream.cookbook
import akka.stream.{ ActorMaterializerSettings, ActorMaterializer }
import akka.stream.scaladsl._
import akka.stream.testkit._
import scala.collection.immutable
import scala.concurrent.Await
import scala.concurrent.duration._
class RecipeCollectingMetrics extends RecipeSpec {
import HoldOps._
implicit val m2 = ActorMaterializer(ActorMaterializerSettings(system).withInputBuffer(1, 1))
"Recipe for periodically collecting metrics" must {
"work" in {
// type Tick = Unit
//
// val loadPub = TestPublisher.manualProbe[Int]()
// val tickPub = TestPublisher.manualProbe[Tick]()
// val reportTicks = Source.fromPublisher(tickPub)
// val loadUpdates = Source.fromPublisher(loadPub)
// val futureSink = Sink.head[immutable.Seq[String]]
// val sink = Flow[String].grouped(10).to(futureSink)
//
// //#periodic-metrics-collection
// val currentLoad = loadUpdates.transform(() => new HoldWithWait)
//
// val graph = GraphDSL { implicit builder =>
// import FlowGraphImplicits._
// val collector = ZipWith[Int, Tick, String](
// (load: Int, tick: Tick) => s"current load is $load")
//
// currentLoad ~> collector.left
// reportTicks ~> collector.right
//
// collector.out ~> sink
// }
// //#periodic-metrics-collection
//
// val reports = graph.run().get(futureSink)
// val manualLoad = new StreamTestKit.AutoPublisher(loadPub)
// val manualTick = new StreamTestKit.AutoPublisher(tickPub)
//
// // Prefetch elimination
// manualTick.sendNext(())
//
// manualLoad.sendNext(53)
// manualLoad.sendNext(61)
// manualTick.sendNext(())
//
// manualLoad.sendNext(44)
// manualLoad.sendNext(54)
// manualLoad.sendNext(78)
// Thread.sleep(500)
//
// manualTick.sendNext(())
//
// manualTick.sendComplete()
//
// Await.result(reports, 3.seconds) should be(List("current load is 53", "current load is 61", "current load is 78"))
// Periodically collect values of metrics expressed as stream of updates
// ---------------------------------------------------------------------
//
// **Situation:** Given performance counters expressed as a stream of updates we want to gather a periodic report of these.
// We do not want to backpressure the counter updates but always take the last value instead. Whenever we don't have a new counter
// value we want to repeat the last value.
//
// This recipe uses the :class:`HoldWithWait` recipe introduced previously. We use this element to gather updates from
// the counter stream and store the final value, and also repeat this final value if no update is received between
// metrics collection rounds.
//
// To finish the recipe, we simply use :class:`ZipWith` to trigger reading the latest value from the ``currentLoad``
// stream whenever a new ``Tick`` arrives on the stream of ticks, ``reportTicks``.
//
// .. includecode:: ../code/docs/stream/cookbook/RecipeCollectingMetrics.scala#periodic-metrics-collection
//
// .. warning::
// In order for this recipe to work the buffer size for the :class:`ZipWith` must be set to 1. The reason for this is
// explained in the "Buffering" section of the documentation.
// FIXME: This recipe does only work with buffer size of 0, which is only available if graph fusing is implemented
pending
}
}
}

View file

@ -0,0 +1,62 @@
package docs.stream.cookbook
import java.security.MessageDigest
import akka.stream.scaladsl.{ Sink, Source }
import akka.util.ByteString
import scala.concurrent.Await
import scala.concurrent.duration._
class RecipeDigest extends RecipeSpec {
"Recipe for calculating digest" must {
"work" in {
val data = Source(List(
ByteString("abcdbcdecdef"),
ByteString("defgefghfghighijhijkijkljklmklmnlmnomnopnopq")))
//#calculating-digest
import akka.stream.stage._
def digestCalculator(algorithm: String) = new PushPullStage[ByteString, ByteString] {
val digest = MessageDigest.getInstance(algorithm)
override def onPush(chunk: ByteString, ctx: Context[ByteString]): SyncDirective = {
digest.update(chunk.toArray)
ctx.pull()
}
override def onPull(ctx: Context[ByteString]): SyncDirective = {
if (ctx.isFinishing) ctx.pushAndFinish(ByteString(digest.digest()))
else ctx.pull()
}
override def onUpstreamFinish(ctx: Context[ByteString]): TerminationDirective = {
// If the stream is finished, we need to emit the last element in the onPull block.
// It is not allowed to directly emit elements from a termination block
// (onUpstreamFinish or onUpstreamFailure)
ctx.absorbTermination()
}
}
val digest: Source[ByteString, Unit] = data.transform(() => digestCalculator("SHA-256"))
//#calculating-digest
Await.result(digest.runWith(Sink.head), 3.seconds) should be(
ByteString(
0x24, 0x8d, 0x6a, 0x61,
0xd2, 0x06, 0x38, 0xb8,
0xe5, 0xc0, 0x26, 0x93,
0x0c, 0x3e, 0x60, 0x39,
0xa3, 0x3c, 0xe4, 0x59,
0x64, 0xff, 0x21, 0x67,
0xf6, 0xec, 0xed, 0xd4,
0x19, 0xdb, 0x06, 0xc1))
}
}
}

View file

@ -0,0 +1,65 @@
package docs.stream.cookbook
import akka.stream.{ ClosedShape, OverflowStrategy }
import akka.stream.scaladsl._
import akka.stream.testkit._
import scala.collection.immutable
import scala.concurrent.Await
import scala.concurrent.duration._
class RecipeDroppyBroadcast extends RecipeSpec {
"Recipe for a droppy broadcast" must {
"work" in {
val pub = TestPublisher.probe[Int]()
val myElements = Source.fromPublisher(pub)
val sub1 = TestSubscriber.manualProbe[Int]()
val sub2 = TestSubscriber.manualProbe[Int]()
val sub3 = TestSubscriber.probe[Int]()
val futureSink = Sink.head[Seq[Int]]
val mySink1 = Sink.fromSubscriber(sub1)
val mySink2 = Sink.fromSubscriber(sub2)
val mySink3 = Sink.fromSubscriber(sub3)
//#droppy-bcast
val graph = RunnableGraph.fromGraph(GraphDSL.create(mySink1, mySink2, mySink3)((_, _, _)) { implicit b =>
(sink1, sink2, sink3) =>
import GraphDSL.Implicits._
val bcast = b.add(Broadcast[Int](3))
myElements ~> bcast
bcast.buffer(10, OverflowStrategy.dropHead) ~> sink1
bcast.buffer(10, OverflowStrategy.dropHead) ~> sink2
bcast.buffer(10, OverflowStrategy.dropHead) ~> sink3
ClosedShape
})
//#droppy-bcast
graph.run()
sub3.request(100)
for (i <- 1 to 100) {
pub.sendNext(i)
sub3.expectNext(i)
}
pub.sendComplete()
sub1.expectSubscription().request(10)
sub2.expectSubscription().request(10)
for (i <- 91 to 100) {
sub1.expectNext(i)
sub2.expectNext(i)
}
sub1.expectComplete()
sub2.expectComplete()
}
}
}

View file

@ -0,0 +1,28 @@
package docs.stream.cookbook
import akka.stream.scaladsl.{ Sink, Source }
import scala.collection.immutable
import scala.concurrent.Await
import scala.concurrent.duration._
class RecipeFlattenSeq extends RecipeSpec {
"Recipe for flatteing a stream of seqs" must {
"work" in {
val someDataSource = Source(List(List("1"), List("2"), List("3", "4", "5"), List("6", "7")))
//#flattening-seqs
val myData: Source[List[Message], Unit] = someDataSource
val flattened: Source[Message, Unit] = myData.mapConcat(identity)
//#flattening-seqs
Await.result(flattened.grouped(8).runWith(Sink.head), 3.seconds) should be(List("1", "2", "3", "4", "5", "6", "7"))
}
}
}

View file

@ -0,0 +1,135 @@
package docs.stream.cookbook
import akka.actor.{ Props, ActorRef, Actor }
import akka.actor.Actor.Receive
import akka.stream.ClosedShape
import akka.stream.scaladsl._
import akka.stream.testkit._
import scala.collection.immutable
import scala.concurrent.duration._
class RecipeGlobalRateLimit extends RecipeSpec {
"Global rate limiting recipe" must {
//#global-limiter-actor
object Limiter {
case object WantToPass
case object MayPass
case object ReplenishTokens
def props(maxAvailableTokens: Int, tokenRefreshPeriod: FiniteDuration,
tokenRefreshAmount: Int): Props =
Props(new Limiter(maxAvailableTokens, tokenRefreshPeriod, tokenRefreshAmount))
}
class Limiter(
val maxAvailableTokens: Int,
val tokenRefreshPeriod: FiniteDuration,
val tokenRefreshAmount: Int) extends Actor {
import Limiter._
import context.dispatcher
import akka.actor.Status
private var waitQueue = immutable.Queue.empty[ActorRef]
private var permitTokens = maxAvailableTokens
private val replenishTimer = system.scheduler.schedule(
initialDelay = tokenRefreshPeriod,
interval = tokenRefreshPeriod,
receiver = self,
ReplenishTokens)
override def receive: Receive = open
val open: Receive = {
case ReplenishTokens =>
permitTokens = math.min(permitTokens + tokenRefreshAmount, maxAvailableTokens)
case WantToPass =>
permitTokens -= 1
sender() ! MayPass
if (permitTokens == 0) context.become(closed)
}
val closed: Receive = {
case ReplenishTokens =>
permitTokens = math.min(permitTokens + tokenRefreshAmount, maxAvailableTokens)
releaseWaiting()
case WantToPass =>
waitQueue = waitQueue.enqueue(sender())
}
private def releaseWaiting(): Unit = {
val (toBeReleased, remainingQueue) = waitQueue.splitAt(permitTokens)
waitQueue = remainingQueue
permitTokens -= toBeReleased.size
toBeReleased foreach (_ ! MayPass)
if (permitTokens > 0) context.become(open)
}
override def postStop(): Unit = {
replenishTimer.cancel()
waitQueue foreach (_ ! Status.Failure(new IllegalStateException("limiter stopped")))
}
}
//#global-limiter-actor
"work" in {
//#global-limiter-flow
def limitGlobal[T](limiter: ActorRef, maxAllowedWait: FiniteDuration): Flow[T, T, Unit] = {
import akka.pattern.ask
import akka.util.Timeout
Flow[T].mapAsync(4)((element: T) => {
import system.dispatcher
implicit val triggerTimeout = Timeout(maxAllowedWait)
val limiterTriggerFuture = limiter ? Limiter.WantToPass
limiterTriggerFuture.map((_) => element)
})
}
//#global-limiter-flow
// Use a large period and emulate the timer by hand instead
val limiter = system.actorOf(Limiter.props(2, 100.days, 1), "limiter")
val source1 = Source.fromIterator(() => Iterator.continually("E1")).via(limitGlobal(limiter, 2.seconds))
val source2 = Source.fromIterator(() => Iterator.continually("E2")).via(limitGlobal(limiter, 2.seconds))
val probe = TestSubscriber.manualProbe[String]()
RunnableGraph.fromGraph(GraphDSL.create() { implicit b =>
import GraphDSL.Implicits._
val merge = b.add(Merge[String](2))
source1 ~> merge ~> Sink.fromSubscriber(probe)
source2 ~> merge
ClosedShape
}).run()
probe.expectSubscription().request(1000)
probe.expectNext() should startWith("E")
probe.expectNext() should startWith("E")
probe.expectNoMsg(500.millis)
limiter ! Limiter.ReplenishTokens
probe.expectNext() should startWith("E")
probe.expectNoMsg(500.millis)
var resultSet = Set.empty[String]
for (_ <- 1 to 100) {
limiter ! Limiter.ReplenishTokens
resultSet += probe.expectNext()
}
resultSet.contains("E1") should be(true)
resultSet.contains("E2") should be(true)
probe.expectError()
}
}
}

View file

@ -0,0 +1,115 @@
package docs.stream.cookbook
import akka.stream.scaladsl.{ Sink, Source }
import akka.stream.testkit._
import scala.concurrent.duration._
object HoldOps {
//#hold-version-1
import akka.stream.stage._
class HoldWithInitial[T](initial: T) extends DetachedStage[T, T] {
private var currentValue: T = initial
override def onPush(elem: T, ctx: DetachedContext[T]): UpstreamDirective = {
currentValue = elem
ctx.pull()
}
override def onPull(ctx: DetachedContext[T]): DownstreamDirective = {
ctx.push(currentValue)
}
}
//#hold-version-1
//#hold-version-2
import akka.stream.stage._
class HoldWithWait[T] extends DetachedStage[T, T] {
private var currentValue: T = _
private var waitingFirstValue = true
override def onPush(elem: T, ctx: DetachedContext[T]): UpstreamDirective = {
currentValue = elem
waitingFirstValue = false
if (ctx.isHoldingDownstream) ctx.pushAndPull(currentValue)
else ctx.pull()
}
override def onPull(ctx: DetachedContext[T]): DownstreamDirective = {
if (waitingFirstValue) ctx.holdDownstream()
else ctx.push(currentValue)
}
}
//#hold-version-2
}
class RecipeHold extends RecipeSpec {
import HoldOps._
"Recipe for creating a holding element" must {
"work for version 1" in {
val pub = TestPublisher.probe[Int]()
val sub = TestSubscriber.manualProbe[Int]()
val source = Source.fromPublisher(pub)
val sink = Sink.fromSubscriber(sub)
source.transform(() => new HoldWithInitial(0)).to(sink).run()
val subscription = sub.expectSubscription()
sub.expectNoMsg(100.millis)
subscription.request(1)
sub.expectNext(0)
subscription.request(1)
sub.expectNext(0)
pub.sendNext(1)
pub.sendNext(2)
subscription.request(2)
sub.expectNext(2)
sub.expectNext(2)
pub.sendComplete()
subscription.request(1)
sub.expectComplete()
}
"work for version 2" in {
val pub = TestPublisher.probe[Int]()
val sub = TestSubscriber.manualProbe[Int]()
val source = Source.fromPublisher(pub)
val sink = Sink.fromSubscriber(sub)
source.transform(() => new HoldWithWait).to(sink).run()
val subscription = sub.expectSubscription()
sub.expectNoMsg(100.millis)
subscription.request(1)
sub.expectNoMsg(100.millis)
pub.sendNext(1)
sub.expectNext(1)
pub.sendNext(2)
pub.sendNext(3)
subscription.request(2)
sub.expectNext(3)
sub.expectNext(3)
pub.sendComplete()
subscription.request(1)
sub.expectComplete()
}
}
}

View file

@ -0,0 +1,25 @@
package docs.stream.cookbook
import akka.stream.ClosedShape
import akka.stream.scaladsl._
import akka.stream.testkit._
import akka.util.ByteString
class RecipeKeepAlive extends RecipeSpec {
"Recipe for injecting keepalive messages" must {
"work" in {
val keepaliveMessage = ByteString(11)
//#inject-keepalive
import scala.concurrent.duration._
val injectKeepAlive: Flow[ByteString, ByteString, Unit] =
Flow[ByteString].keepAlive(1.second, () => keepaliveMessage)
//#inject-keepalive
// No need to test, this is a built-in stage with proper tests
}
}
}

View file

@ -0,0 +1,50 @@
package docs.stream.cookbook
import akka.event.Logging
import akka.stream.Attributes
import akka.stream.scaladsl.{ Sink, Source }
import akka.testkit.{ EventFilter, TestProbe }
class RecipeLoggingElements extends RecipeSpec {
"Simple logging recipe" must {
"work with println" in {
val printProbe = TestProbe()
def println(s: String): Unit = printProbe.ref ! s
val mySource = Source(List("1", "2", "3"))
//#println-debug
val loggedSource = mySource.map { elem => println(elem); elem }
//#println-debug
loggedSource.runWith(Sink.ignore)
printProbe.expectMsgAllOf("1", "2", "3")
}
"use log()" in {
val mySource = Source(List("1", "2", "3"))
def analyse(s: String) = s
//#log-custom
// customise log levels
mySource.log("before-map")
.withAttributes(Attributes.logLevels(onElement = Logging.WarningLevel))
.map(analyse)
// or provide custom logging adapter
implicit val adapter = Logging(system, "customLogger")
mySource.log("custom")
//#log-custom
val loggedSource = mySource.log("custom")
EventFilter.debug(start = "[custom] Element: ").intercept {
loggedSource.runWith(Sink.ignore)
}
}
}
}

View file

@ -0,0 +1,93 @@
package docs.stream.cookbook
import akka.stream.ClosedShape
import akka.stream.scaladsl._
import akka.stream.testkit._
import scala.concurrent.duration._
class RecipeManualTrigger extends RecipeSpec {
"Recipe for triggering a stream manually" must {
"work" in {
val elements = Source(List("1", "2", "3", "4"))
val pub = TestPublisher.probe[Trigger]()
val sub = TestSubscriber.manualProbe[Message]()
val triggerSource = Source.fromPublisher(pub)
val sink = Sink.fromSubscriber(sub)
//#manually-triggered-stream
val graph = RunnableGraph.fromGraph(GraphDSL.create() { implicit builder =>
import GraphDSL.Implicits._
val zip = builder.add(Zip[Message, Trigger]())
elements ~> zip.in0
triggerSource ~> zip.in1
zip.out ~> Flow[(Message, Trigger)].map { case (msg, trigger) => msg } ~> sink
ClosedShape
})
//#manually-triggered-stream
graph.run()
sub.expectSubscription().request(1000)
sub.expectNoMsg(100.millis)
pub.sendNext(())
sub.expectNext("1")
sub.expectNoMsg(100.millis)
pub.sendNext(())
pub.sendNext(())
sub.expectNext("2")
sub.expectNext("3")
sub.expectNoMsg(100.millis)
pub.sendNext(())
sub.expectNext("4")
sub.expectComplete()
}
"work with ZipWith" in {
val elements = Source(List("1", "2", "3", "4"))
val pub = TestPublisher.probe[Trigger]()
val sub = TestSubscriber.manualProbe[Message]()
val triggerSource = Source.fromPublisher(pub)
val sink = Sink.fromSubscriber(sub)
//#manually-triggered-stream-zipwith
val graph = RunnableGraph.fromGraph(GraphDSL.create() { implicit builder =>
import GraphDSL.Implicits._
val zip = builder.add(ZipWith((msg: Message, trigger: Trigger) => msg))
elements ~> zip.in0
triggerSource ~> zip.in1
zip.out ~> sink
ClosedShape
})
//#manually-triggered-stream-zipwith
graph.run()
sub.expectSubscription().request(1000)
sub.expectNoMsg(100.millis)
pub.sendNext(())
sub.expectNext("1")
sub.expectNoMsg(100.millis)
pub.sendNext(())
pub.sendNext(())
sub.expectNext("2")
sub.expectNext("3")
sub.expectNoMsg(100.millis)
pub.sendNext(())
sub.expectNext("4")
sub.expectComplete()
}
}
}

View file

@ -0,0 +1,57 @@
package docs.stream.cookbook
import akka.stream.scaladsl._
import akka.stream.testkit._
import scala.concurrent.duration._
import akka.testkit.TestLatch
import scala.concurrent.Await
class RecipeMissedTicks extends RecipeSpec {
"Recipe for collecting missed ticks" must {
"work" in {
type Tick = Unit
val pub = TestPublisher.probe[Tick]()
val sub = TestSubscriber.manualProbe[Int]()
val tickStream = Source.fromPublisher(pub)
val sink = Sink.fromSubscriber(sub)
//#missed-ticks
val missedTicks: Flow[Tick, Int, Unit] =
Flow[Tick].conflate(seed = (_) => 0)(
(missedTicks, tick) => missedTicks + 1)
//#missed-ticks
val latch = TestLatch(3)
val realMissedTicks: Flow[Tick, Int, Unit] =
Flow[Tick].conflate(seed = (_) => 0)(
(missedTicks, tick) => { latch.countDown(); missedTicks + 1 })
tickStream.via(realMissedTicks).to(sink).run()
pub.sendNext(())
pub.sendNext(())
pub.sendNext(())
pub.sendNext(())
val subscription = sub.expectSubscription()
Await.ready(latch, 1.second)
subscription.request(1)
sub.expectNext(3)
subscription.request(1)
sub.expectNoMsg(100.millis)
pub.sendNext(())
sub.expectNext(0)
pub.sendComplete()
subscription.request(1)
sub.expectComplete()
}
}
}

View file

@ -0,0 +1,58 @@
package docs.stream.cookbook
import akka.stream.scaladsl.{ Sink, Source }
import scala.collection.immutable
import scala.concurrent.Await
import scala.concurrent.duration._
class RecipeMultiGroupBy extends RecipeSpec {
"Recipe for multi-groupBy" must {
"work" in {
case class Topic(name: String)
val elems = Source(List("1: a", "1: b", "all: c", "all: d", "1: e"))
val extractTopics = { msg: Message =>
if (msg.startsWith("1")) List(Topic("1"))
else List(Topic("1"), Topic("2"))
}
//#multi-groupby
val topicMapper: (Message) => immutable.Seq[Topic] = extractTopics
val messageAndTopic: Source[(Message, Topic), Unit] = elems.mapConcat { msg: Message =>
val topicsForMessage = topicMapper(msg)
// Create a (Msg, Topic) pair for each of the topics
// the message belongs to
topicsForMessage.map(msg -> _)
}
val multiGroups = messageAndTopic
.groupBy(2, _._2).map {
case (msg, topic) =>
// do what needs to be done
//#multi-groupby
(msg, topic)
//#multi-groupby
}
//#multi-groupby
val result = multiGroups
.grouped(10)
.mergeSubstreams
.map(g => g.head._2.name + g.map(_._1).mkString("[", ", ", "]"))
.grouped(10)
.runWith(Sink.head)
Await.result(result, 3.seconds).toSet should be(Set(
"1[1: a, 1: b, all: c, all: d, 1: e]",
"2[all: c, all: d]"))
}
}
}

View file

@ -0,0 +1,39 @@
package docs.stream.cookbook
import akka.stream.scaladsl.Sink
import akka.stream.scaladsl.Source
import akka.util.ByteString
import scala.annotation.tailrec
import scala.concurrent.Await
import scala.concurrent.duration._
class RecipeParseLines extends RecipeSpec {
"Recipe for parsing line from bytes" must {
"work" in {
val rawData = Source(List(
ByteString("Hello World"),
ByteString("\r"),
ByteString("!\r"),
ByteString("\nHello Akka!\r\nHello Streams!"),
ByteString("\r\n\r\n")))
//#parse-lines
import akka.stream.io.Framing
val linesStream = rawData.via(Framing.delimiter(
ByteString("\r\n"), maximumFrameLength = 100, allowTruncation = true))
.map(_.utf8String)
//#parse-lines
Await.result(linesStream.grouped(10).runWith(Sink.head), 3.seconds) should be(List(
"Hello World\r!",
"Hello Akka!",
"Hello Streams!",
""))
}
}
}

View file

@ -0,0 +1,81 @@
package docs.stream.cookbook
import akka.stream.{ Graph, FlowShape, Inlet, Outlet, Attributes, OverflowStrategy }
import akka.stream.scaladsl._
import scala.concurrent.{ Await, Future }
import scala.concurrent.duration._
import akka.stream.stage.{ GraphStage, GraphStageLogic }
class RecipeReduceByKey extends RecipeSpec {
"Reduce by key recipe" must {
val MaximumDistinctWords = 1000
"work with simple word count" in {
def words = Source(List("hello", "world", "and", "hello", "universe", "akka") ++ List.fill(1000)("rocks!"))
//#word-count
val counts: Source[(String, Int), Unit] = words
// split the words into separate streams first
.groupBy(MaximumDistinctWords, identity)
// add counting logic to the streams
.fold(("", 0)) {
case ((_, count), word) => (word, count + 1)
}
// get a stream of word counts
.mergeSubstreams
//#word-count
Await.result(counts.grouped(10).runWith(Sink.head), 3.seconds).toSet should be(Set(
("hello", 2),
("world", 1),
("and", 1),
("universe", 1),
("akka", 1),
("rocks!", 1000)))
}
"work generalized" in {
def words = Source(List("hello", "world", "and", "hello", "universe", "akka") ++ List.fill(1000)("rocks!"))
//#reduce-by-key-general
def reduceByKey[In, K, Out](
maximumGroupSize: Int,
groupKey: (In) => K,
foldZero: (K) => Out)(fold: (Out, In) => Out): Flow[In, (K, Out), Unit] = {
Flow[In]
.groupBy(maximumGroupSize, groupKey)
.fold(Option.empty[(K, Out)]) {
case (None, elem) =>
val key = groupKey(elem)
Some((key, fold(foldZero(key), elem)))
case (Some((key, out)), elem) =>
Some((key, fold(out, elem)))
}
.map(_.get)
.mergeSubstreams
}
val wordCounts = words.via(reduceByKey(
MaximumDistinctWords,
groupKey = (word: String) => word,
foldZero = (key: String) => 0)(fold = (count: Int, elem: String) => count + 1))
//#reduce-by-key-general
Await.result(wordCounts.grouped(10).runWith(Sink.head), 3.seconds).toSet should be(Set(
("hello", 2),
("world", 1),
("and", 1),
("universe", 1),
("akka", 1),
("rocks!", 1000)))
}
}
}

View file

@ -0,0 +1,49 @@
package docs.stream.cookbook
import akka.stream.scaladsl.{ Flow, Sink, Source }
import akka.stream.testkit._
import scala.concurrent.duration._
import akka.testkit.TestLatch
import scala.concurrent.Await
class RecipeSimpleDrop extends RecipeSpec {
"Recipe for simply dropping elements for a faster stream" must {
"work" in {
//#simple-drop
val droppyStream: Flow[Message, Message, Unit] =
Flow[Message].conflate(seed = identity)((lastMessage, newMessage) => newMessage)
//#simple-drop
val latch = TestLatch(2)
val realDroppyStream =
Flow[Message].conflate(seed = identity)((lastMessage, newMessage) => { latch.countDown(); newMessage })
val pub = TestPublisher.probe[Message]()
val sub = TestSubscriber.manualProbe[Message]()
val messageSource = Source.fromPublisher(pub)
val sink = Sink.fromSubscriber(sub)
messageSource.via(realDroppyStream).to(sink).run()
val subscription = sub.expectSubscription()
sub.expectNoMsg(100.millis)
pub.sendNext("1")
pub.sendNext("2")
pub.sendNext("3")
Await.ready(latch, 1.second)
subscription.request(1)
sub.expectNext("3")
pub.sendComplete()
subscription.request(1)
sub.expectComplete()
}
}
}

View file

@ -0,0 +1,13 @@
package docs.stream.cookbook
import akka.stream.ActorMaterializer
import akka.stream.testkit.AkkaSpec
trait RecipeSpec extends AkkaSpec {
implicit val m = ActorMaterializer()
type Message = String
type Trigger = Unit
type Job = String
}

View file

@ -0,0 +1,27 @@
package docs.stream.cookbook
import akka.stream.scaladsl.{ Sink, Source }
import scala.collection.immutable
import scala.concurrent.{ Await, Future }
import scala.concurrent.duration._
class RecipeToStrict extends RecipeSpec {
"Recipe for draining a stream into a strict collection" must {
"work" in {
val myData = Source(List("1", "2", "3"))
val MaxAllowedSeqSize = 100
//#draining-to-seq
val strict: Future[immutable.Seq[Message]] =
myData.grouped(MaxAllowedSeqSize).runWith(Sink.head)
//#draining-to-seq
Await.result(strict, 3.seconds) should be(List("1", "2", "3"))
}
}
}

View file

@ -0,0 +1,48 @@
package docs.stream.cookbook
import akka.stream.FlowShape
import akka.stream.scaladsl._
import akka.testkit.TestProbe
import scala.concurrent.Await
import scala.concurrent.duration._
class RecipeWorkerPool extends RecipeSpec {
"Recipe for a pool of workers" must {
"work" in {
val myJobs = Source(List("1", "2", "3", "4", "5"))
type Result = String
val worker = Flow[String].map(_ + " done")
//#worker-pool
def balancer[In, Out](worker: Flow[In, Out, Any], workerCount: Int): Flow[In, Out, Unit] = {
import GraphDSL.Implicits._
Flow.fromGraph(GraphDSL.create() { implicit b =>
val balancer = b.add(Balance[In](workerCount, waitForAllDownstreams = true))
val merge = b.add(Merge[Out](workerCount))
for (_ <- 1 to workerCount) {
// for each worker, add an edge from the balancer to the worker, then wire
// it to the merge element
balancer ~> worker ~> merge
}
FlowShape(balancer.in, merge.out)
})
}
val processedJobs: Source[Result, Unit] = myJobs.via(balancer(worker, 3))
//#worker-pool
Await.result(processedJobs.grouped(10).runWith(Sink.head), 3.seconds).toSet should be(Set(
"1 done", "2 done", "3 done", "4 done", "5 done"))
}
}
}

View file

@ -0,0 +1,60 @@
/*
* Copyright (C) 2015 Typesafe Inc. <http://www.typesafe.com>
*/
package docs.stream.io
import java.io.File
import akka.stream._
import akka.stream.scaladsl.{ FileIO, Sink, Source }
import akka.stream.testkit.Utils._
import akka.stream.testkit._
import akka.util.ByteString
import scala.concurrent.Future
class StreamFileDocSpec extends AkkaSpec(UnboundedMailboxConfig) {
implicit val ec = system.dispatcher
implicit val materializer = ActorMaterializer()
// silence sysout
def println(s: String) = ()
val file = File.createTempFile(getClass.getName, ".tmp")
override def afterTermination() = file.delete()
{
//#file-source
import akka.stream.io._
//#file-source
Thread.sleep(0) // needs a statement here for valid syntax and to avoid "unused" warnings
}
{
//#file-source
val file = new File("example.csv")
//#file-source
}
"read data from a file" in {
//#file-source
def handle(b: ByteString): Unit //#file-source
= ()
//#file-source
val foreach: Future[Long] = FileIO.fromFile(file)
.to(Sink.ignore)
.run()
//#file-source
}
"configure dispatcher in code" in {
//#custom-dispatcher-code
FileIO.fromFile(file)
.withAttributes(ActorAttributes.dispatcher("custom-blocking-io-dispatcher"))
//#custom-dispatcher-code
}
}

View file

@ -0,0 +1,165 @@
/**
* Copyright (C) 2014 Typesafe Inc. <http://www.typesafe.com>
*/
package docs.stream.io
import java.util.concurrent.atomic.AtomicReference
import akka.stream._
import akka.stream.scaladsl.Tcp._
import akka.stream.scaladsl._
import akka.stream.stage.{ Context, PushStage, SyncDirective }
import akka.stream.testkit.AkkaSpec
import akka.testkit.TestProbe
import akka.util.ByteString
import docs.utils.TestUtils
import scala.concurrent.Future
class StreamTcpDocSpec extends AkkaSpec {
implicit val ec = system.dispatcher
implicit val materializer = ActorMaterializer()
// silence sysout
def println(s: String) = ()
"simple server connection" in {
{
//#echo-server-simple-bind
val binding: Future[ServerBinding] =
Tcp().bind("127.0.0.1", 8888).to(Sink.ignore).run()
binding.map { b =>
b.unbind() onComplete {
case _ => // ...
}
}
//#echo-server-simple-bind
}
{
val (host, port) = TestUtils.temporaryServerHostnameAndPort()
//#echo-server-simple-handle
import akka.stream.io.Framing
val connections: Source[IncomingConnection, Future[ServerBinding]] =
Tcp().bind(host, port)
connections runForeach { connection =>
println(s"New connection from: ${connection.remoteAddress}")
val echo = Flow[ByteString]
.via(Framing.delimiter(
ByteString("\n"),
maximumFrameLength = 256,
allowTruncation = true))
.map(_.utf8String)
.map(_ + "!!!\n")
.map(ByteString(_))
connection.handleWith(echo)
}
//#echo-server-simple-handle
}
}
"initial server banner echo server" in {
val localhost = TestUtils.temporaryServerAddress()
val connections = Tcp().bind(localhost.getHostName, localhost.getPort) // TODO getHostString in Java7
val serverProbe = TestProbe()
import akka.stream.io.Framing
//#welcome-banner-chat-server
connections runForeach { connection =>
val serverLogic = Flow.fromGraph(GraphDSL.create() { implicit b =>
import GraphDSL.Implicits._
// server logic, parses incoming commands
val commandParser = new PushStage[String, String] {
override def onPush(elem: String, ctx: Context[String]): SyncDirective = {
elem match {
case "BYE" ctx.finish()
case _ ctx.push(elem + "!")
}
}
}
import connection._
val welcomeMsg = s"Welcome to: $localAddress, you are: $remoteAddress!\n"
val welcome = Source.single(ByteString(welcomeMsg))
val echo = b.add(Flow[ByteString]
.via(Framing.delimiter(
ByteString("\n"),
maximumFrameLength = 256,
allowTruncation = true))
.map(_.utf8String)
//#welcome-banner-chat-server
.map { command serverProbe.ref ! command; command }
//#welcome-banner-chat-server
.transform(() commandParser)
.map(_ + "\n")
.map(ByteString(_)))
val concat = b.add(Concat[ByteString]())
// first we emit the welcome message,
welcome ~> concat.in(0)
// then we continue using the echo-logic Flow
echo.outlet ~> concat.in(1)
FlowShape(echo.in, concat.out)
})
connection.handleWith(serverLogic)
}
//#welcome-banner-chat-server
import akka.stream.io.Framing
val input = new AtomicReference("Hello world" :: "What a lovely day" :: Nil)
def readLine(prompt: String): String = {
input.get() match {
case all @ cmd :: tail if input.compareAndSet(all, tail) cmd
case _ "q"
}
}
{
//#repl-client
val connection = Tcp().outgoingConnection("127.0.0.1", 8888)
//#repl-client
}
{
val connection = Tcp().outgoingConnection(localhost)
//#repl-client
val replParser = new PushStage[String, ByteString] {
override def onPush(elem: String, ctx: Context[ByteString]): SyncDirective = {
elem match {
case "q" ctx.pushAndFinish(ByteString("BYE\n"))
case _ ctx.push(ByteString(s"$elem\n"))
}
}
}
val repl = Flow[ByteString]
.via(Framing.delimiter(
ByteString("\n"),
maximumFrameLength = 256,
allowTruncation = true))
.map(_.utf8String)
.map(text => println("Server: " + text))
.map(_ => readLine("> "))
.transform(() replParser)
connection.join(repl).run()
}
//#repl-client
serverProbe.expectMsg("Hello world")
serverProbe.expectMsg("What a lovely day")
serverProbe.expectMsg("BYE")
}
}

View file

@ -0,0 +1,24 @@
/**
* Copyright (C) 2009-2015 Typesafe Inc. <http://www.typesafe.com>
*/
package docs.utils
import java.net.InetSocketAddress
import java.nio.channels.ServerSocketChannel
object TestUtils {
def temporaryServerAddress(interface: String = "127.0.0.1"): InetSocketAddress = {
val serverSocket = ServerSocketChannel.open()
try {
serverSocket.socket.bind(new InetSocketAddress(interface, 0))
val port = serverSocket.socket.getLocalPort
new InetSocketAddress(interface, port)
} finally serverSocket.close()
}
def temporaryServerHostnameAndPort(interface: String = "127.0.0.1"): (String, Int) = {
val socketAddress = temporaryServerAddress(interface)
socketAddress.getHostName -> socketAddress.getPort // TODO getHostString in Java7
}
}