Update to a working version of Scalariform
This commit is contained in:
parent
cae070bd93
commit
c66ce62d63
616 changed files with 5966 additions and 5436 deletions
|
|
@ -22,7 +22,8 @@ class DangerousActor extends Actor with ActorLogging {
|
|||
import context.dispatcher
|
||||
|
||||
val breaker =
|
||||
new CircuitBreaker(context.system.scheduler,
|
||||
new CircuitBreaker(
|
||||
context.system.scheduler,
|
||||
maxFailures = 5,
|
||||
callTimeout = 10.seconds,
|
||||
resetTimeout = 1.minute).onOpen(notifyMeOnOpen())
|
||||
|
|
|
|||
|
|
@ -134,10 +134,11 @@ class CounterService extends Actor {
|
|||
|
||||
// Restart the storage child when StorageException is thrown.
|
||||
// After 3 restarts within 5 seconds it will be stopped.
|
||||
override val supervisorStrategy = OneForOneStrategy(maxNrOfRetries = 3,
|
||||
override val supervisorStrategy = OneForOneStrategy(
|
||||
maxNrOfRetries = 3,
|
||||
withinTimeRange = 5 seconds) {
|
||||
case _: Storage.StorageException => Restart
|
||||
}
|
||||
case _: Storage.StorageException => Restart
|
||||
}
|
||||
|
||||
val key = self.path.name
|
||||
var storage: Option[ActorRef] = None
|
||||
|
|
|
|||
|
|
@ -102,7 +102,8 @@ object FaultHandlingDocSpec {
|
|||
class FaultHandlingDocSpec(_system: ActorSystem) extends TestKit(_system)
|
||||
with ImplicitSender with FlatSpecLike with Matchers with BeforeAndAfterAll {
|
||||
|
||||
def this() = this(ActorSystem("FaultHandlingDocSpec",
|
||||
def this() = this(ActorSystem(
|
||||
"FaultHandlingDocSpec",
|
||||
ConfigFactory.parseString("""
|
||||
akka {
|
||||
loggers = ["akka.testkit.TestEventListener"]
|
||||
|
|
|
|||
|
|
@ -53,7 +53,8 @@ class SchedulerDocSpec extends AkkaSpec(Map("akka.loglevel" -> "INFO")) {
|
|||
//This will schedule to send the Tick-message
|
||||
//to the tickActor after 0ms repeating every 50ms
|
||||
val cancellable =
|
||||
system.scheduler.schedule(0 milliseconds,
|
||||
system.scheduler.schedule(
|
||||
0 milliseconds,
|
||||
50 milliseconds,
|
||||
tickActor,
|
||||
Tick)
|
||||
|
|
|
|||
|
|
@ -121,7 +121,8 @@ class TypedActorDocSpec extends AkkaSpec(Map("akka.loglevel" -> "INFO")) {
|
|||
//#typed-actor-create1
|
||||
//#typed-actor-create2
|
||||
val otherSquarer: Squarer =
|
||||
TypedActor(system).typedActorOf(TypedProps(classOf[Squarer],
|
||||
TypedActor(system).typedActorOf(TypedProps(
|
||||
classOf[Squarer],
|
||||
new SquarerImpl("foo")), "name")
|
||||
//#typed-actor-create2
|
||||
|
||||
|
|
|
|||
|
|
@ -21,7 +21,7 @@ object IntroSpec {
|
|||
final case class Greet(whom: String, replyTo: ActorRef[Greeted])
|
||||
final case class Greeted(whom: String)
|
||||
|
||||
val greeter = Static[Greet] { msg ⇒
|
||||
val greeter = Static[Greet] { msg =>
|
||||
println(s"Hello ${msg.whom}!")
|
||||
msg.replyTo ! Greeted(msg.whom)
|
||||
}
|
||||
|
|
@ -51,17 +51,17 @@ object IntroSpec {
|
|||
//#chatroom-behavior
|
||||
|
||||
val behavior: Behavior[GetSession] =
|
||||
ContextAware[Command] { ctx ⇒
|
||||
ContextAware[Command] { ctx =>
|
||||
var sessions = List.empty[ActorRef[SessionEvent]]
|
||||
|
||||
Static {
|
||||
case GetSession(screenName, client) ⇒
|
||||
case GetSession(screenName, client) =>
|
||||
sessions ::= client
|
||||
val wrapper = ctx.spawnAdapter {
|
||||
p: PostMessage ⇒ PostSessionMessage(screenName, p.message)
|
||||
p: PostMessage => PostSessionMessage(screenName, p.message)
|
||||
}
|
||||
client ! SessionGranted(wrapper)
|
||||
case PostSessionMessage(screenName, message) ⇒
|
||||
case PostSessionMessage(screenName, message) =>
|
||||
val mp = MessagePosted(screenName, message)
|
||||
sessions foreach (_ ! mp)
|
||||
}
|
||||
|
|
@ -98,13 +98,13 @@ class IntroSpec extends TypedSpec {
|
|||
|
||||
val gabbler: Behavior[SessionEvent] =
|
||||
Total {
|
||||
case SessionDenied(reason) ⇒
|
||||
case SessionDenied(reason) =>
|
||||
println(s"cannot start chat room session: $reason")
|
||||
Stopped
|
||||
case SessionGranted(handle) ⇒
|
||||
case SessionGranted(handle) =>
|
||||
handle ! PostMessage("Hello World!")
|
||||
Same
|
||||
case MessagePosted(screenName, message) ⇒
|
||||
case MessagePosted(screenName, message) =>
|
||||
println(s"message has been posted by '$screenName': $message")
|
||||
Stopped
|
||||
}
|
||||
|
|
@ -113,13 +113,13 @@ class IntroSpec extends TypedSpec {
|
|||
//#chatroom-main
|
||||
val main: Behavior[Unit] =
|
||||
Full {
|
||||
case Sig(ctx, PreStart) ⇒
|
||||
case Sig(ctx, PreStart) =>
|
||||
val chatRoom = ctx.spawn(Props(ChatRoom.behavior), "chatroom")
|
||||
val gabblerRef = ctx.spawn(Props(gabbler), "gabbler")
|
||||
ctx.watch(gabblerRef)
|
||||
chatRoom ! GetSession("ol’ Gabbler", gabblerRef)
|
||||
Same
|
||||
case Sig(_, Terminated(ref)) ⇒
|
||||
case Sig(_, Terminated(ref)) =>
|
||||
Stopped
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -93,13 +93,15 @@ object Introduction {
|
|||
val camel = CamelExtension(system)
|
||||
val actorRef = system.actorOf(Props[MyEndpoint])
|
||||
// get a future reference to the activation of the endpoint of the Consumer Actor
|
||||
val activationFuture = camel.activationFutureFor(actorRef)(timeout = 10 seconds,
|
||||
val activationFuture = camel.activationFutureFor(actorRef)(
|
||||
timeout = 10 seconds,
|
||||
executor = system.dispatcher)
|
||||
//#CamelActivation
|
||||
//#CamelDeactivation
|
||||
system.stop(actorRef)
|
||||
// get a future reference to the deactivation of the endpoint of the Consumer Actor
|
||||
val deactivationFuture = camel.deactivationFutureFor(actorRef)(timeout = 10 seconds,
|
||||
val deactivationFuture = camel.deactivationFutureFor(actorRef)(
|
||||
timeout = 10 seconds,
|
||||
executor = system.dispatcher)
|
||||
//#CamelDeactivation
|
||||
}
|
||||
|
|
|
|||
|
|
@ -8,7 +8,7 @@ import akka.cluster.ddata.GSet
|
|||
|
||||
//#twophaseset
|
||||
case class TwoPhaseSet(
|
||||
adds: GSet[String] = GSet.empty,
|
||||
adds: GSet[String] = GSet.empty,
|
||||
removals: GSet[String] = GSet.empty)
|
||||
extends ReplicatedData {
|
||||
type T = TwoPhaseSet
|
||||
|
|
|
|||
|
|
@ -22,8 +22,8 @@ class TwoPhaseSetSerializer(val system: ExtendedActorSystem)
|
|||
override def identifier = 99999
|
||||
|
||||
override def toBinary(obj: AnyRef): Array[Byte] = obj match {
|
||||
case m: TwoPhaseSet ⇒ twoPhaseSetToProto(m).toByteArray
|
||||
case _ ⇒ throw new IllegalArgumentException(
|
||||
case m: TwoPhaseSet => twoPhaseSetToProto(m).toByteArray
|
||||
case _ => throw new IllegalArgumentException(
|
||||
s"Can't serialize object of type ${obj.getClass}")
|
||||
}
|
||||
|
||||
|
|
@ -62,8 +62,8 @@ class TwoPhaseSetSerializerWithCompression(system: ExtendedActorSystem)
|
|||
extends TwoPhaseSetSerializer(system) {
|
||||
//#compression
|
||||
override def toBinary(obj: AnyRef): Array[Byte] = obj match {
|
||||
case m: TwoPhaseSet ⇒ compress(twoPhaseSetToProto(m))
|
||||
case _ ⇒ throw new IllegalArgumentException(
|
||||
case m: TwoPhaseSet => compress(twoPhaseSetToProto(m))
|
||||
case _ => throw new IllegalArgumentException(
|
||||
s"Can't serialize object of type ${obj.getClass}")
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -22,8 +22,8 @@ class TwoPhaseSetSerializer2(val system: ExtendedActorSystem)
|
|||
val replicatedDataSerializer = new ReplicatedDataSerializer(system)
|
||||
|
||||
override def toBinary(obj: AnyRef): Array[Byte] = obj match {
|
||||
case m: TwoPhaseSet ⇒ twoPhaseSetToProto(m).toByteArray
|
||||
case _ ⇒ throw new IllegalArgumentException(
|
||||
case m: TwoPhaseSet => twoPhaseSetToProto(m).toByteArray
|
||||
case _ => throw new IllegalArgumentException(
|
||||
s"Can't serialize object of type ${obj.getClass}")
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -52,8 +52,9 @@ class MyUnboundedMailbox extends MailboxType
|
|||
}
|
||||
|
||||
// The create method is called to create the MessageQueue
|
||||
final override def create(owner: Option[ActorRef],
|
||||
system: Option[ActorSystem]): MessageQueue =
|
||||
final override def create(
|
||||
owner: Option[ActorRef],
|
||||
system: Option[ActorSystem]): MessageQueue =
|
||||
new MyMessageQueue()
|
||||
}
|
||||
//#mailbox-implementation-example
|
||||
|
|
|
|||
|
|
@ -22,7 +22,8 @@ import akka.testkit.AkkaSpec
|
|||
class SettingsImpl(config: Config) extends Extension {
|
||||
val DbUri: String = config.getString("myapp.db.uri")
|
||||
val CircuitBreakerTimeout: Duration =
|
||||
Duration(config.getMilliseconds("myapp.circuit-breaker.timeout"),
|
||||
Duration(
|
||||
config.getMilliseconds("myapp.circuit-breaker.timeout"),
|
||||
TimeUnit.MILLISECONDS)
|
||||
}
|
||||
//#extension
|
||||
|
|
|
|||
|
|
@ -178,7 +178,8 @@ class HttpServerExampleSpec extends WordSpec with Matchers
|
|||
|
||||
val requestHandler: HttpRequest => HttpResponse = {
|
||||
case HttpRequest(GET, Uri.Path("/"), _, _, _) =>
|
||||
HttpResponse(entity = HttpEntity(ContentTypes.`text/html(UTF-8)`,
|
||||
HttpResponse(entity = HttpEntity(
|
||||
ContentTypes.`text/html(UTF-8)`,
|
||||
"<html><body>Hello world!</body></html>"))
|
||||
|
||||
case HttpRequest(GET, Uri.Path("/ping"), _, _, _) =>
|
||||
|
|
@ -218,7 +219,8 @@ class HttpServerExampleSpec extends WordSpec with Matchers
|
|||
|
||||
val requestHandler: HttpRequest => HttpResponse = {
|
||||
case HttpRequest(GET, Uri.Path("/"), _, _, _) =>
|
||||
HttpResponse(entity = HttpEntity(ContentTypes.`text/html(UTF-8)`,
|
||||
HttpResponse(entity = HttpEntity(
|
||||
ContentTypes.`text/html(UTF-8)`,
|
||||
"<html><body>Hello world!</body></html>"))
|
||||
|
||||
case HttpRequest(GET, Uri.Path("/ping"), _, _, _) =>
|
||||
|
|
@ -236,7 +238,7 @@ class HttpServerExampleSpec extends WordSpec with Matchers
|
|||
StdIn.readLine() // let it run until user presses return
|
||||
bindingFuture
|
||||
.flatMap(_.unbind()) // trigger unbinding from the port
|
||||
.onComplete(_ ⇒ system.terminate()) // and shutdown when done
|
||||
.onComplete(_ => system.terminate()) // and shutdown when done
|
||||
|
||||
}
|
||||
}
|
||||
|
|
@ -278,7 +280,7 @@ class HttpServerExampleSpec extends WordSpec with Matchers
|
|||
StdIn.readLine() // let it run until user presses return
|
||||
bindingFuture
|
||||
.flatMap(_.unbind()) // trigger unbinding from the port
|
||||
.onComplete(_ ⇒ system.terminate()) // and shutdown when done
|
||||
.onComplete(_ => system.terminate()) // and shutdown when done
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -310,7 +312,7 @@ class HttpServerExampleSpec extends WordSpec with Matchers
|
|||
StdIn.readLine() // let it run until user presses return
|
||||
bindingFuture
|
||||
.flatMap(_.unbind()) // trigger unbinding from the port
|
||||
.onComplete(_ ⇒ system.terminate()) // and shutdown when done
|
||||
.onComplete(_ => system.terminate()) // and shutdown when done
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -466,7 +468,7 @@ class HttpServerExampleSpec extends WordSpec with Matchers
|
|||
StdIn.readLine() // let it run until user presses return
|
||||
bindingFuture
|
||||
.flatMap(_.unbind()) // trigger unbinding from the port
|
||||
.onComplete(_ ⇒ system.terminate()) // and shutdown when done
|
||||
.onComplete(_ => system.terminate()) // and shutdown when done
|
||||
}
|
||||
}
|
||||
//#stream-random-numbers
|
||||
|
|
@ -533,7 +535,7 @@ class HttpServerExampleSpec extends WordSpec with Matchers
|
|||
StdIn.readLine() // let it run until user presses return
|
||||
bindingFuture
|
||||
.flatMap(_.unbind()) // trigger unbinding from the port
|
||||
.onComplete(_ ⇒ system.terminate()) // and shutdown when done
|
||||
.onComplete(_ => system.terminate()) // and shutdown when done
|
||||
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -22,13 +22,13 @@ object MyRejectionHandler {
|
|||
.handle { case MissingCookieRejection(cookieName) =>
|
||||
complete(HttpResponse(BadRequest, entity = "No cookies, no service!!!"))
|
||||
}
|
||||
.handle { case AuthorizationFailedRejection ⇒
|
||||
.handle { case AuthorizationFailedRejection =>
|
||||
complete((Forbidden, "You're out of your depth!"))
|
||||
}
|
||||
.handle { case ValidationRejection(msg, _) ⇒
|
||||
.handle { case ValidationRejection(msg, _) =>
|
||||
complete((InternalServerError, "That wasn't valid! " + msg))
|
||||
}
|
||||
.handleAll[MethodRejection] { methodRejections ⇒
|
||||
.handleAll[MethodRejection] { methodRejections =>
|
||||
val names = methodRejections.map(_.supported.name)
|
||||
complete((MethodNotAllowed, s"Can't do that! Supported: ${names mkString " or "}!"))
|
||||
}
|
||||
|
|
|
|||
|
|
@ -34,7 +34,7 @@ class WebSocketExampleSpec extends WordSpec with Matchers {
|
|||
// rather we simply stream it back as the tail of the response
|
||||
// this means we might start sending the response even before the
|
||||
// end of the incoming message has been received
|
||||
case tm: TextMessage ⇒ TextMessage(Source.single("Hello ") ++ tm.textStream) :: Nil
|
||||
case tm: TextMessage => TextMessage(Source.single("Hello ") ++ tm.textStream) :: Nil
|
||||
case bm: BinaryMessage =>
|
||||
// ignore binary messages but drain content to avoid the stream being clogged
|
||||
bm.dataStream.runWith(Sink.ignore)
|
||||
|
|
@ -43,13 +43,13 @@ class WebSocketExampleSpec extends WordSpec with Matchers {
|
|||
//#websocket-handler
|
||||
|
||||
//#websocket-request-handling
|
||||
val requestHandler: HttpRequest ⇒ HttpResponse = {
|
||||
case req @ HttpRequest(GET, Uri.Path("/greeter"), _, _, _) ⇒
|
||||
val requestHandler: HttpRequest => HttpResponse = {
|
||||
case req @ HttpRequest(GET, Uri.Path("/greeter"), _, _, _) =>
|
||||
req.header[UpgradeToWebSocket] match {
|
||||
case Some(upgrade) ⇒ upgrade.handleMessages(greeterWebSocketService)
|
||||
case None ⇒ HttpResponse(400, entity = "Not a valid websocket request!")
|
||||
case Some(upgrade) => upgrade.handleMessages(greeterWebSocketService)
|
||||
case None => HttpResponse(400, entity = "Not a valid websocket request!")
|
||||
}
|
||||
case _: HttpRequest ⇒ HttpResponse(404, entity = "Unknown resource!")
|
||||
case _: HttpRequest => HttpResponse(404, entity = "Unknown resource!")
|
||||
}
|
||||
//#websocket-request-handling
|
||||
|
||||
|
|
@ -62,7 +62,7 @@ class WebSocketExampleSpec extends WordSpec with Matchers {
|
|||
import system.dispatcher // for the future transformations
|
||||
bindingFuture
|
||||
.flatMap(_.unbind()) // trigger unbinding from the port
|
||||
.onComplete(_ ⇒ system.terminate()) // and shutdown when done
|
||||
.onComplete(_ => system.terminate()) // and shutdown when done
|
||||
}
|
||||
"routing-example" in {
|
||||
pending // compile-time only test
|
||||
|
|
@ -83,7 +83,7 @@ class WebSocketExampleSpec extends WordSpec with Matchers {
|
|||
val greeterWebSocketService =
|
||||
Flow[Message]
|
||||
.collect {
|
||||
case tm: TextMessage ⇒ TextMessage(Source.single("Hello ") ++ tm.textStream)
|
||||
case tm: TextMessage => TextMessage(Source.single("Hello ") ++ tm.textStream)
|
||||
// ignore binary messages
|
||||
}
|
||||
|
||||
|
|
@ -104,6 +104,6 @@ class WebSocketExampleSpec extends WordSpec with Matchers {
|
|||
import system.dispatcher // for the future transformations
|
||||
bindingFuture
|
||||
.flatMap(_.unbind()) // trigger unbinding from the port
|
||||
.onComplete(_ ⇒ system.terminate()) // and shutdown when done
|
||||
.onComplete(_ => system.terminate()) // and shutdown when done
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -267,14 +267,14 @@ class BasicDirectivesExamplesSpec extends RoutingSpec {
|
|||
|
||||
private def nonSuccessToEmptyJsonEntity(response: HttpResponse): HttpResponse =
|
||||
response.status match {
|
||||
case code if code.isSuccess ⇒ response
|
||||
case code ⇒
|
||||
case code if code.isSuccess => response
|
||||
case code =>
|
||||
log.warning("Dropping response entity since response status code was: {}", code)
|
||||
response.copy(entity = NullJsonEntity)
|
||||
}
|
||||
|
||||
/** Wrapper for all of our JSON API routes */
|
||||
def apiRoute(innerRoutes: ⇒ Route): Route =
|
||||
def apiRoute(innerRoutes: => Route): Route =
|
||||
mapResponse(nonSuccessToEmptyJsonEntity)(innerRoutes)
|
||||
}
|
||||
//#
|
||||
|
|
@ -388,13 +388,12 @@ class BasicDirectivesExamplesSpec extends RoutingSpec {
|
|||
"mapInnerRoute" in {
|
||||
//#mapInnerRoute
|
||||
val completeWithInnerException =
|
||||
mapInnerRoute { route =>
|
||||
ctx =>
|
||||
try {
|
||||
route(ctx)
|
||||
} catch {
|
||||
case NonFatal(e) => ctx.complete(s"Got ${e.getClass.getSimpleName} '${e.getMessage}'")
|
||||
}
|
||||
mapInnerRoute { route => ctx =>
|
||||
try {
|
||||
route(ctx)
|
||||
} catch {
|
||||
case NonFatal(e) => ctx.complete(s"Got ${e.getClass.getSimpleName} '${e.getMessage}'")
|
||||
}
|
||||
}
|
||||
|
||||
val route =
|
||||
|
|
@ -801,4 +800,4 @@ class BasicDirectivesExamplesSpec extends RoutingSpec {
|
|||
//#
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -147,7 +147,7 @@ class HeaderDirectivesExamplesSpec extends RoutingSpec with Inside {
|
|||
}
|
||||
"headerValueByType-0" in {
|
||||
val route =
|
||||
headerValueByType[Origin]() { origin ⇒
|
||||
headerValueByType[Origin]() { origin =>
|
||||
complete(s"The first origin was ${origin.origins.head}")
|
||||
}
|
||||
|
||||
|
|
@ -161,14 +161,14 @@ class HeaderDirectivesExamplesSpec extends RoutingSpec with Inside {
|
|||
|
||||
// reject a request if no header of the given type is present
|
||||
Get("abc") ~> route ~> check {
|
||||
inside(rejection) { case MissingHeaderRejection("Origin") ⇒ }
|
||||
inside(rejection) { case MissingHeaderRejection("Origin") => }
|
||||
}
|
||||
}
|
||||
"optionalHeaderValueByType-0" in {
|
||||
val route =
|
||||
optionalHeaderValueByType[Origin]() {
|
||||
case Some(origin) ⇒ complete(s"The first origin was ${origin.origins.head}")
|
||||
case None ⇒ complete("No Origin header found.")
|
||||
case Some(origin) => complete(s"The first origin was ${origin.origins.head}")
|
||||
case None => complete("No Origin header found.")
|
||||
}
|
||||
|
||||
val originHeader = Origin(HttpOrigin("http://localhost:8080"))
|
||||
|
|
|
|||
|
|
@ -67,13 +67,13 @@ class MiscDirectivesExamplesSpec extends RoutingSpec {
|
|||
Language("de") withQValue 0.5f)
|
||||
|
||||
request ~> {
|
||||
selectPreferredLanguage("en", "en-US") { lang ⇒
|
||||
selectPreferredLanguage("en", "en-US") { lang =>
|
||||
complete(lang.toString)
|
||||
}
|
||||
} ~> check { responseAs[String] shouldEqual "en-US" }
|
||||
|
||||
request ~> {
|
||||
selectPreferredLanguage("de-DE", "hu") { lang ⇒
|
||||
selectPreferredLanguage("de-DE", "hu") { lang =>
|
||||
complete(lang.toString)
|
||||
}
|
||||
} ~> check { responseAs[String] shouldEqual "de-DE" }
|
||||
|
|
|
|||
|
|
@ -25,7 +25,7 @@ class SchemeDirectivesExamplesSpec extends RoutingSpec {
|
|||
|
||||
val route =
|
||||
scheme("http") {
|
||||
extract(_.request.uri) { uri ⇒
|
||||
extract(_.request.uri) { uri =>
|
||||
redirect(uri.copy(scheme = "https"), MovedPermanently)
|
||||
}
|
||||
} ~
|
||||
|
|
|
|||
|
|
@ -39,7 +39,8 @@ class TimeoutDirectivesExamplesSpec extends RoutingSpec with CompileOnlySpec {
|
|||
|
||||
"allow mapping the response while setting the timeout" in compileOnlySpec {
|
||||
//#withRequestTimeout-with-handler
|
||||
val timeoutResponse = HttpResponse(StatusCodes.EnhanceYourCalm,
|
||||
val timeoutResponse = HttpResponse(
|
||||
StatusCodes.EnhanceYourCalm,
|
||||
entity = "Unable to serve response within time limit, please enchance your calm.")
|
||||
|
||||
val route =
|
||||
|
|
@ -57,7 +58,8 @@ class TimeoutDirectivesExamplesSpec extends RoutingSpec with CompileOnlySpec {
|
|||
pending // compile only spec since requires actuall Http server to be run
|
||||
|
||||
//#withRequestTimeoutResponse
|
||||
val timeoutResponse = HttpResponse(StatusCodes.EnhanceYourCalm,
|
||||
val timeoutResponse = HttpResponse(
|
||||
StatusCodes.EnhanceYourCalm,
|
||||
entity = "Unable to serve response within time limit, please enchance your calm.")
|
||||
|
||||
val route =
|
||||
|
|
|
|||
|
|
@ -19,9 +19,9 @@ class WebSocketDirectivesExamplesSpec extends RoutingSpec {
|
|||
"greeter-service" in {
|
||||
def greeter: Flow[Message, Message, Any] =
|
||||
Flow[Message].mapConcat {
|
||||
case tm: TextMessage ⇒
|
||||
case tm: TextMessage =>
|
||||
TextMessage(Source.single("Hello ") ++ tm.textStream ++ Source.single("!")) :: Nil
|
||||
case bm: BinaryMessage ⇒
|
||||
case bm: BinaryMessage =>
|
||||
// ignore binary messages but drain content to avoid the stream being clogged
|
||||
bm.dataStream.runWith(Sink.ignore)
|
||||
Nil
|
||||
|
|
@ -59,9 +59,9 @@ class WebSocketDirectivesExamplesSpec extends RoutingSpec {
|
|||
"handle-multiple-protocols" in {
|
||||
def greeterService: Flow[Message, Message, Any] =
|
||||
Flow[Message].mapConcat {
|
||||
case tm: TextMessage ⇒
|
||||
case tm: TextMessage =>
|
||||
TextMessage(Source.single("Hello ") ++ tm.textStream ++ Source.single("!")) :: Nil
|
||||
case bm: BinaryMessage ⇒
|
||||
case bm: BinaryMessage =>
|
||||
// ignore binary messages but drain content to avoid the stream being clogged
|
||||
bm.dataStream.runWith(Sink.ignore)
|
||||
Nil
|
||||
|
|
@ -85,7 +85,7 @@ class WebSocketDirectivesExamplesSpec extends RoutingSpec {
|
|||
WS("/services", wsClient.flow, List("other", "echo")) ~>
|
||||
websocketMultipleProtocolRoute ~>
|
||||
check {
|
||||
expectWebSocketUpgradeWithProtocol { protocol ⇒
|
||||
expectWebSocketUpgradeWithProtocol { protocol =>
|
||||
protocol shouldEqual "echo"
|
||||
|
||||
wsClient.sendMessage("Peter")
|
||||
|
|
|
|||
|
|
@ -24,7 +24,7 @@ class BackoffSupervisorDocSpec {
|
|||
minBackoff = 3.seconds,
|
||||
maxBackoff = 30.seconds,
|
||||
randomFactor = 0.2 // adds 20% "noise" to vary the intervals slightly
|
||||
))
|
||||
))
|
||||
|
||||
system.actorOf(supervisor, name = "echoSupervisor")
|
||||
//#backoff-stop
|
||||
|
|
@ -44,7 +44,7 @@ class BackoffSupervisorDocSpec {
|
|||
minBackoff = 3.seconds,
|
||||
maxBackoff = 30.seconds,
|
||||
randomFactor = 0.2 // adds 20% "noise" to vary the intervals slightly
|
||||
))
|
||||
))
|
||||
|
||||
system.actorOf(supervisor, name = "echoSupervisor")
|
||||
//#backoff-fail
|
||||
|
|
@ -59,14 +59,14 @@ class BackoffSupervisorDocSpec {
|
|||
//#backoff-custom-stop
|
||||
val supervisor = BackoffSupervisor.props(
|
||||
Backoff.onStop(
|
||||
childProps,
|
||||
childName = "myEcho",
|
||||
minBackoff = 3.seconds,
|
||||
maxBackoff = 30.seconds,
|
||||
randomFactor = 0.2 // adds 20% "noise" to vary the intervals slightly
|
||||
).withManualReset // the child must send BackoffSupervisor.Reset to its parent
|
||||
.withDefaultStoppingStrategy // Stop at any Exception thrown
|
||||
)
|
||||
childProps,
|
||||
childName = "myEcho",
|
||||
minBackoff = 3.seconds,
|
||||
maxBackoff = 30.seconds,
|
||||
randomFactor = 0.2 // adds 20% "noise" to vary the intervals slightly
|
||||
).withManualReset // the child must send BackoffSupervisor.Reset to its parent
|
||||
.withDefaultStoppingStrategy // Stop at any Exception thrown
|
||||
)
|
||||
//#backoff-custom-stop
|
||||
|
||||
system.actorOf(supervisor, name = "echoSupervisor")
|
||||
|
|
@ -86,11 +86,11 @@ class BackoffSupervisorDocSpec {
|
|||
minBackoff = 3.seconds,
|
||||
maxBackoff = 30.seconds,
|
||||
randomFactor = 0.2 // adds 20% "noise" to vary the intervals slightly
|
||||
).withAutoReset(10.seconds) // the child must send BackoffSupervisor.Reset to its parent
|
||||
).withAutoReset(10.seconds) // the child must send BackoffSupervisor.Reset to its parent
|
||||
.withSupervisorStrategy(
|
||||
OneForOneStrategy() {
|
||||
case _: MyException ⇒ SupervisorStrategy.Restart
|
||||
case _ ⇒ SupervisorStrategy.Escalate
|
||||
case _: MyException => SupervisorStrategy.Restart
|
||||
case _ => SupervisorStrategy.Escalate
|
||||
}))
|
||||
//#backoff-custom-fail
|
||||
|
||||
|
|
|
|||
|
|
@ -88,12 +88,14 @@ class SchedulerPatternSpec extends AkkaSpec {
|
|||
}
|
||||
|
||||
"send periodic ticks from the constructor" taggedAs TimingTest in {
|
||||
testSchedule(system.actorOf(Props(classOf[ScheduleInConstructor], testActor)),
|
||||
testSchedule(
|
||||
system.actorOf(Props(classOf[ScheduleInConstructor], testActor)),
|
||||
3000 millis, 2000 millis)
|
||||
}
|
||||
|
||||
"send ticks from the preStart and receive" taggedAs TimingTest in {
|
||||
testSchedule(system.actorOf(Props(classOf[ScheduleInConstructor], testActor)),
|
||||
testSchedule(
|
||||
system.actorOf(Props(classOf[ScheduleInConstructor], testActor)),
|
||||
3000 millis, 2500 millis)
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -326,13 +326,13 @@ object PersistenceDocSpec {
|
|||
override def receiveCommand: Receive = {
|
||||
case c: String =>
|
||||
sender() ! c
|
||||
persistAsync(c + "-outer-1") { outer ⇒
|
||||
persistAsync(c + "-outer-1") { outer =>
|
||||
sender() ! outer
|
||||
persistAsync(c + "-inner-1") { inner ⇒ sender() ! inner }
|
||||
persistAsync(c + "-inner-1") { inner => sender() ! inner }
|
||||
}
|
||||
persistAsync(c + "-outer-2") { outer ⇒
|
||||
persistAsync(c + "-outer-2") { outer =>
|
||||
sender() ! outer
|
||||
persistAsync(c + "-inner-2") { inner ⇒ sender() ! inner }
|
||||
persistAsync(c + "-inner-2") { inner => sender() ! inner }
|
||||
}
|
||||
}
|
||||
//#nested-persistAsync-persistAsync
|
||||
|
|
|
|||
|
|
@ -149,17 +149,19 @@ class MyJournal extends AsyncWriteJournal {
|
|||
def asyncDeleteMessagesTo(persistenceId: String, toSequenceNr: Long): Future[Unit] = ???
|
||||
def asyncReplayMessages(persistenceId: String, fromSequenceNr: Long,
|
||||
toSequenceNr: Long, max: Long)(
|
||||
replayCallback: (PersistentRepr) => Unit): Future[Unit] = ???
|
||||
def asyncReadHighestSequenceNr(persistenceId: String,
|
||||
fromSequenceNr: Long): Future[Long] = ???
|
||||
replayCallback: (PersistentRepr) => Unit): Future[Unit] = ???
|
||||
def asyncReadHighestSequenceNr(
|
||||
persistenceId: String,
|
||||
fromSequenceNr: Long): Future[Long] = ???
|
||||
|
||||
// optionally override:
|
||||
override def receivePluginInternal: Receive = super.receivePluginInternal
|
||||
}
|
||||
|
||||
class MySnapshotStore extends SnapshotStore {
|
||||
def loadAsync(persistenceId: String,
|
||||
criteria: SnapshotSelectionCriteria): Future[Option[SelectedSnapshot]] = ???
|
||||
def loadAsync(
|
||||
persistenceId: String,
|
||||
criteria: SnapshotSelectionCriteria): Future[Option[SelectedSnapshot]] = ???
|
||||
def saveAsync(metadata: SnapshotMetadata, snapshot: Any): Future[Unit] = ???
|
||||
def deleteAsync(metadata: SnapshotMetadata): Future[Unit] = ???
|
||||
def deleteAsync(persistenceId: String, criteria: SnapshotSelectionCriteria): Future[Unit] = ???
|
||||
|
|
|
|||
|
|
@ -247,7 +247,8 @@ class UserEventsAdapter extends EventAdapter {
|
|||
case UserDetailsChanged(null, address) => EventSeq(UserAddressChanged(address))
|
||||
case UserDetailsChanged(name, null) => EventSeq(UserNameChanged(name))
|
||||
case UserDetailsChanged(name, address) =>
|
||||
EventSeq(UserNameChanged(name),
|
||||
EventSeq(
|
||||
UserNameChanged(name),
|
||||
UserAddressChanged(address))
|
||||
case event: V2 => EventSeq(event)
|
||||
}
|
||||
|
|
@ -267,7 +268,7 @@ class RemovedEventsAwareSerializer extends SerializerWithStringManifest {
|
|||
|
||||
val SkipEventManifestsEvents = Set(
|
||||
"docs.persistence.CustomerBlinked" // ...
|
||||
)
|
||||
)
|
||||
|
||||
override def manifest(o: AnyRef): String = o.getClass.getName
|
||||
|
||||
|
|
|
|||
|
|
@ -22,13 +22,13 @@ object LeveldbPersistenceQueryDocSpec {
|
|||
class MyTaggingEventAdapter extends WriteEventAdapter {
|
||||
val colors = Set("green", "black", "blue")
|
||||
override def toJournal(event: Any): Any = event match {
|
||||
case s: String ⇒
|
||||
var tags = colors.foldLeft(Set.empty[String]) { (acc, c) ⇒
|
||||
case s: String =>
|
||||
var tags = colors.foldLeft(Set.empty[String]) { (acc, c) =>
|
||||
if (s.contains(c)) acc + c else acc
|
||||
}
|
||||
if (tags.isEmpty) event
|
||||
else Tagged(event, tags)
|
||||
case _ ⇒ event
|
||||
case _ => event
|
||||
}
|
||||
|
||||
override def manifest(event: Any): String = ""
|
||||
|
|
|
|||
|
|
@ -39,11 +39,11 @@ class MyEventsByTagPublisher(tag: String, offset: Long, refreshInterval: FiniteD
|
|||
}
|
||||
|
||||
def receive = {
|
||||
case _: Request | Continue ⇒
|
||||
case _: Request | Continue =>
|
||||
query()
|
||||
deliverBuf()
|
||||
|
||||
case Cancel ⇒
|
||||
case Cancel =>
|
||||
context.stop(self)
|
||||
}
|
||||
|
||||
|
|
@ -79,12 +79,12 @@ class MyEventsByTagPublisher(tag: String, offset: Long, refreshInterval: FiniteD
|
|||
val serialization = SerializationExtension(context.system)
|
||||
|
||||
buf = result.map {
|
||||
case (id, bytes) ⇒
|
||||
case (id, bytes) =>
|
||||
val p = serialization.deserialize(bytes, classOf[PersistentRepr]).get
|
||||
EventEnvelope(offset = id, p.persistenceId, p.sequenceNr, p.payload)
|
||||
}
|
||||
} catch {
|
||||
case e: Exception ⇒
|
||||
case e: Exception =>
|
||||
onErrorThenStop(e)
|
||||
}
|
||||
}
|
||||
|
|
@ -101,4 +101,4 @@ class MyEventsByTagPublisher(tag: String, offset: Long, refreshInterval: FiniteD
|
|||
}
|
||||
}
|
||||
}
|
||||
//#events-by-tag-publisher
|
||||
//#events-by-tag-publisher
|
||||
|
|
|
|||
|
|
@ -57,7 +57,7 @@ object PersistenceQueryDocSpec {
|
|||
tag: String, offset: Long = 0L): Source[EventEnvelope, NotUsed] = {
|
||||
val props = MyEventsByTagPublisher.props(tag, offset, refreshInterval)
|
||||
Source.actorPublisher[EventEnvelope](props)
|
||||
.mapMaterializedValue(_ ⇒ NotUsed)
|
||||
.mapMaterializedValue(_ => NotUsed)
|
||||
}
|
||||
|
||||
override def eventsByPersistenceId(
|
||||
|
|
|
|||
|
|
@ -124,7 +124,8 @@ class CustomRouterDocSpec extends AkkaSpec(CustomRouterDocSpec.config) with Impl
|
|||
|
||||
val paths = for (n <- 1 to 10) yield ("/user/s" + n)
|
||||
val redundancy1: ActorRef =
|
||||
system.actorOf(RedundancyGroup(paths, nbrCopies = 3).props(),
|
||||
system.actorOf(
|
||||
RedundancyGroup(paths, nbrCopies = 3).props(),
|
||||
name = "redundancy1")
|
||||
redundancy1 ! "important"
|
||||
//#usage-1
|
||||
|
|
@ -132,7 +133,8 @@ class CustomRouterDocSpec extends AkkaSpec(CustomRouterDocSpec.config) with Impl
|
|||
for (_ <- 1 to 3) expectMsg("important")
|
||||
|
||||
//#usage-2
|
||||
val redundancy2: ActorRef = system.actorOf(FromConfig.props(),
|
||||
val redundancy2: ActorRef = system.actorOf(
|
||||
FromConfig.props(),
|
||||
name = "redundancy2")
|
||||
redundancy2 ! "very important"
|
||||
//#usage-2
|
||||
|
|
|
|||
|
|
@ -415,7 +415,8 @@ router-dispatcher {}
|
|||
|
||||
//#scatter-gather-group-2
|
||||
val router20: ActorRef =
|
||||
context.actorOf(ScatterGatherFirstCompletedGroup(paths,
|
||||
context.actorOf(ScatterGatherFirstCompletedGroup(
|
||||
paths,
|
||||
within = 10.seconds).props(), "router20")
|
||||
//#scatter-gather-group-2
|
||||
|
||||
|
|
@ -437,7 +438,8 @@ router-dispatcher {}
|
|||
|
||||
//#tail-chopping-group-2
|
||||
val router24: ActorRef =
|
||||
context.actorOf(TailChoppingGroup(paths,
|
||||
context.actorOf(TailChoppingGroup(
|
||||
paths,
|
||||
within = 10.seconds, interval = 20.millis).props(), "router24")
|
||||
//#tail-chopping-group-2
|
||||
|
||||
|
|
@ -448,7 +450,8 @@ router-dispatcher {}
|
|||
|
||||
//#consistent-hashing-pool-2
|
||||
val router26: ActorRef =
|
||||
context.actorOf(ConsistentHashingPool(5).props(Props[Worker]),
|
||||
context.actorOf(
|
||||
ConsistentHashingPool(5).props(Props[Worker]),
|
||||
"router26")
|
||||
//#consistent-hashing-pool-2
|
||||
|
||||
|
|
@ -470,7 +473,8 @@ router-dispatcher {}
|
|||
//#resize-pool-2
|
||||
val resizer = DefaultResizer(lowerBound = 2, upperBound = 15)
|
||||
val router30: ActorRef =
|
||||
context.actorOf(RoundRobinPool(5, Some(resizer)).props(Props[Worker]),
|
||||
context.actorOf(
|
||||
RoundRobinPool(5, Some(resizer)).props(Props[Worker]),
|
||||
"router30")
|
||||
//#resize-pool-2
|
||||
|
||||
|
|
|
|||
|
|
@ -38,8 +38,9 @@ package docs.serialization {
|
|||
|
||||
// "fromBinary" deserializes the given array,
|
||||
// using the type hint (if any, see "includeManifest" above)
|
||||
def fromBinary(bytes: Array[Byte],
|
||||
clazz: Option[Class[_]]): AnyRef = {
|
||||
def fromBinary(
|
||||
bytes: Array[Byte],
|
||||
clazz: Option[Class[_]]): AnyRef = {
|
||||
// Put your code that deserializes here
|
||||
//#...
|
||||
null
|
||||
|
|
|
|||
|
|
@ -216,8 +216,9 @@ class CompositionDocSpec extends AkkaSpec {
|
|||
def close() = p.trySuccess(None)
|
||||
}
|
||||
|
||||
def f(p: Promise[Option[Int]],
|
||||
rest: (Future[OutgoingConnection], Future[String])): Future[MyClass] = {
|
||||
def f(
|
||||
p: Promise[Option[Int]],
|
||||
rest: (Future[OutgoingConnection], Future[String])): Future[MyClass] = {
|
||||
|
||||
val connFuture = rest._1
|
||||
connFuture.map(MyClass(p, _))
|
||||
|
|
|
|||
|
|
@ -149,12 +149,11 @@ class FlowDocSpec extends AkkaSpec {
|
|||
"various ways of transforming materialized values" in {
|
||||
import scala.concurrent.duration._
|
||||
|
||||
val throttler = Flow.fromGraph(GraphDSL.create(Source.tick(1.second, 1.second, "test")) { implicit builder =>
|
||||
tickSource =>
|
||||
import GraphDSL.Implicits._
|
||||
val zip = builder.add(ZipWith[String, Int, Int](Keep.right))
|
||||
tickSource ~> zip.in0
|
||||
FlowShape(zip.in1, zip.out)
|
||||
val throttler = Flow.fromGraph(GraphDSL.create(Source.tick(1.second, 1.second, "test")) { implicit builder => tickSource =>
|
||||
import GraphDSL.Implicits._
|
||||
val zip = builder.add(ZipWith[String, Int, Int](Keep.right))
|
||||
tickSource ~> zip.in0
|
||||
FlowShape(zip.in1, zip.out)
|
||||
})
|
||||
|
||||
//#flow-mat-combine
|
||||
|
|
@ -212,11 +211,10 @@ class FlowDocSpec extends AkkaSpec {
|
|||
|
||||
// The result of r11 can be also achieved by using the Graph API
|
||||
val r12: RunnableGraph[(Promise[Option[Int]], Cancellable, Future[Int])] =
|
||||
RunnableGraph.fromGraph(GraphDSL.create(source, flow, sink)((_, _, _)) { implicit builder =>
|
||||
(src, f, dst) =>
|
||||
import GraphDSL.Implicits._
|
||||
src ~> f ~> dst
|
||||
ClosedShape
|
||||
RunnableGraph.fromGraph(GraphDSL.create(source, flow, sink)((_, _, _)) { implicit builder => (src, f, dst) =>
|
||||
import GraphDSL.Implicits._
|
||||
src ~> f ~> dst
|
||||
ClosedShape
|
||||
})
|
||||
|
||||
//#flow-mat-combine
|
||||
|
|
|
|||
|
|
@ -97,9 +97,9 @@ class GraphDSLDocSpec extends AkkaSpec {
|
|||
// A shape represents the input and output ports of a reusable
|
||||
// processing module
|
||||
case class PriorityWorkerPoolShape[In, Out](
|
||||
jobsIn: Inlet[In],
|
||||
jobsIn: Inlet[In],
|
||||
priorityJobsIn: Inlet[In],
|
||||
resultsOut: Outlet[Out]) extends Shape {
|
||||
resultsOut: Outlet[Out]) extends Shape {
|
||||
|
||||
// It is important to provide the list of all input and output
|
||||
// ports with a stable order. Duplicates are not allowed.
|
||||
|
|
@ -117,7 +117,7 @@ class GraphDSLDocSpec extends AkkaSpec {
|
|||
|
||||
// A Shape must also be able to create itself from existing ports
|
||||
override def copyFromPorts(
|
||||
inlets: immutable.Seq[Inlet[_]],
|
||||
inlets: immutable.Seq[Inlet[_]],
|
||||
outlets: immutable.Seq[Outlet[_]]) = {
|
||||
assert(inlets.size == this.inlets.size)
|
||||
assert(outlets.size == this.outlets.size)
|
||||
|
|
@ -130,10 +130,10 @@ class GraphDSLDocSpec extends AkkaSpec {
|
|||
//#graph-dsl-components-create
|
||||
object PriorityWorkerPool {
|
||||
def apply[In, Out](
|
||||
worker: Flow[In, Out, Any],
|
||||
worker: Flow[In, Out, Any],
|
||||
workerCount: Int): Graph[PriorityWorkerPoolShape[In, Out], NotUsed] = {
|
||||
|
||||
GraphDSL.create() { implicit b ⇒
|
||||
GraphDSL.create() { implicit b =>
|
||||
import GraphDSL.Implicits._
|
||||
|
||||
val priorityMerge = b.add(MergePreferred[In](1))
|
||||
|
|
@ -203,10 +203,8 @@ class GraphDSLDocSpec extends AkkaSpec {
|
|||
"access to materialized value" in {
|
||||
//#graph-dsl-matvalue
|
||||
import GraphDSL.Implicits._
|
||||
val foldFlow: Flow[Int, Int, Future[Int]] = Flow.fromGraph(GraphDSL.create(Sink.fold[Int, Int](0)(_ + _)) {
|
||||
implicit builder ⇒
|
||||
fold ⇒
|
||||
FlowShape(fold.in, builder.materializedValue.mapAsync(4)(identity).outlet)
|
||||
val foldFlow: Flow[Int, Int, Future[Int]] = Flow.fromGraph(GraphDSL.create(Sink.fold[Int, Int](0)(_ + _)) { implicit builder => fold =>
|
||||
FlowShape(fold.in, builder.materializedValue.mapAsync(4)(identity).outlet)
|
||||
})
|
||||
//#graph-dsl-matvalue
|
||||
|
||||
|
|
@ -215,16 +213,14 @@ class GraphDSLDocSpec extends AkkaSpec {
|
|||
//#graph-dsl-matvalue-cycle
|
||||
import GraphDSL.Implicits._
|
||||
// This cannot produce any value:
|
||||
val cyclicFold: Source[Int, Future[Int]] = Source.fromGraph(GraphDSL.create(Sink.fold[Int, Int](0)(_ + _)) {
|
||||
implicit builder =>
|
||||
fold =>
|
||||
// - Fold cannot complete until its upstream mapAsync completes
|
||||
// - mapAsync cannot complete until the materialized Future produced by
|
||||
// fold completes
|
||||
// As a result this Source will never emit anything, and its materialited
|
||||
// Future will never complete
|
||||
builder.materializedValue.mapAsync(4)(identity) ~> fold
|
||||
SourceShape(builder.materializedValue.mapAsync(4)(identity).outlet)
|
||||
val cyclicFold: Source[Int, Future[Int]] = Source.fromGraph(GraphDSL.create(Sink.fold[Int, Int](0)(_ + _)) { implicit builder => fold =>
|
||||
// - Fold cannot complete until its upstream mapAsync completes
|
||||
// - mapAsync cannot complete until the materialized Future produced by
|
||||
// fold completes
|
||||
// As a result this Source will never emit anything, and its materialited
|
||||
// Future will never complete
|
||||
builder.materializedValue.mapAsync(4)(identity) ~> fold
|
||||
SourceShape(builder.materializedValue.mapAsync(4)(identity).outlet)
|
||||
})
|
||||
//#graph-dsl-matvalue-cycle
|
||||
}
|
||||
|
|
|
|||
|
|
@ -31,18 +31,17 @@ class StreamPartialGraphDSLDocSpec extends AkkaSpec {
|
|||
|
||||
val resultSink = Sink.head[Int]
|
||||
|
||||
val g = RunnableGraph.fromGraph(GraphDSL.create(resultSink) { implicit b =>
|
||||
sink =>
|
||||
import GraphDSL.Implicits._
|
||||
val g = RunnableGraph.fromGraph(GraphDSL.create(resultSink) { implicit b => sink =>
|
||||
import GraphDSL.Implicits._
|
||||
|
||||
// importing the partial graph will return its shape (inlets & outlets)
|
||||
val pm3 = b.add(pickMaxOfThree)
|
||||
// importing the partial graph will return its shape (inlets & outlets)
|
||||
val pm3 = b.add(pickMaxOfThree)
|
||||
|
||||
Source.single(1) ~> pm3.in(0)
|
||||
Source.single(2) ~> pm3.in(1)
|
||||
Source.single(3) ~> pm3.in(2)
|
||||
pm3.out ~> sink.in
|
||||
ClosedShape
|
||||
Source.single(1) ~> pm3.in(0)
|
||||
Source.single(2) ~> pm3.in(1)
|
||||
Source.single(3) ~> pm3.in(2)
|
||||
pm3.out ~> sink.in
|
||||
ClosedShape
|
||||
})
|
||||
|
||||
val max: Future[Int] = g.run()
|
||||
|
|
|
|||
|
|
@ -23,17 +23,16 @@ class RecipeDroppyBroadcast extends RecipeSpec {
|
|||
val mySink3 = Sink.fromSubscriber(sub3)
|
||||
|
||||
//#droppy-bcast
|
||||
val graph = RunnableGraph.fromGraph(GraphDSL.create(mySink1, mySink2, mySink3)((_, _, _)) { implicit b =>
|
||||
(sink1, sink2, sink3) =>
|
||||
import GraphDSL.Implicits._
|
||||
val graph = RunnableGraph.fromGraph(GraphDSL.create(mySink1, mySink2, mySink3)((_, _, _)) { implicit b => (sink1, sink2, sink3) =>
|
||||
import GraphDSL.Implicits._
|
||||
|
||||
val bcast = b.add(Broadcast[Int](3))
|
||||
myElements ~> bcast
|
||||
val bcast = b.add(Broadcast[Int](3))
|
||||
myElements ~> bcast
|
||||
|
||||
bcast.buffer(10, OverflowStrategy.dropHead) ~> sink1
|
||||
bcast.buffer(10, OverflowStrategy.dropHead) ~> sink2
|
||||
bcast.buffer(10, OverflowStrategy.dropHead) ~> sink3
|
||||
ClosedShape
|
||||
bcast.buffer(10, OverflowStrategy.dropHead) ~> sink1
|
||||
bcast.buffer(10, OverflowStrategy.dropHead) ~> sink2
|
||||
bcast.buffer(10, OverflowStrategy.dropHead) ~> sink3
|
||||
ClosedShape
|
||||
})
|
||||
//#droppy-bcast
|
||||
|
||||
|
|
|
|||
|
|
@ -45,8 +45,8 @@ class RecipeReduceByKey extends RecipeSpec {
|
|||
//#reduce-by-key-general
|
||||
def reduceByKey[In, K, Out](
|
||||
maximumGroupSize: Int,
|
||||
groupKey: (In) => K,
|
||||
map: (In) => Out)(reduce: (Out, Out) => Out): Flow[In, (K, Out), NotUsed] = {
|
||||
groupKey: (In) => K,
|
||||
map: (In) => Out)(reduce: (Out, Out) => Out): Flow[In, (K, Out), NotUsed] = {
|
||||
|
||||
Flow[In]
|
||||
.groupBy[K](maximumGroupSize, groupKey)
|
||||
|
|
@ -56,7 +56,8 @@ class RecipeReduceByKey extends RecipeSpec {
|
|||
}
|
||||
|
||||
val wordCounts = words.via(
|
||||
reduceByKey(MaximumDistinctWords,
|
||||
reduceByKey(
|
||||
MaximumDistinctWords,
|
||||
groupKey = (word: String) => word,
|
||||
map = (word: String) => 1)((left: Int, right: Int) => left + right))
|
||||
//#reduce-by-key-general
|
||||
|
|
|
|||
|
|
@ -85,7 +85,7 @@ class StreamTcpDocSpec extends AkkaSpec {
|
|||
allowTruncation = true))
|
||||
.map(_.utf8String)
|
||||
//#welcome-banner-chat-server
|
||||
.map { command ⇒ serverProbe.ref ! command; command }
|
||||
.map { command => serverProbe.ref ! command; command }
|
||||
//#welcome-banner-chat-server
|
||||
.via(commandParser)
|
||||
// merge in the initial banner after parser
|
||||
|
|
@ -102,8 +102,8 @@ class StreamTcpDocSpec extends AkkaSpec {
|
|||
val input = new AtomicReference("Hello world" :: "What a lovely day" :: Nil)
|
||||
def readLine(prompt: String): String = {
|
||||
input.get() match {
|
||||
case all @ cmd :: tail if input.compareAndSet(all, tail) ⇒ cmd
|
||||
case _ ⇒ "q"
|
||||
case all @ cmd :: tail if input.compareAndSet(all, tail) => cmd
|
||||
case _ => "q"
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -26,7 +26,8 @@ import scala.collection.immutable
|
|||
* a Test to show some TestKit examples
|
||||
*/
|
||||
class TestKitUsageSpec
|
||||
extends TestKit(ActorSystem("TestKitUsageSpec",
|
||||
extends TestKit(ActorSystem(
|
||||
"TestKitUsageSpec",
|
||||
ConfigFactory.parseString(TestKitUsageSpec.config)))
|
||||
with DefaultTimeout with ImplicitSender
|
||||
with WordSpecLike with Matchers with BeforeAndAfterAll {
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue