dotty phase 2: scalafix ExplicitNonNullaryApply (#28949)
* scalafix ExplicitNonNullaryApply prepare + Temporarily use com.sandinh:sbt-scalafix because scalacenter/scalafix#1098 + Add ExplicitNonNullaryApply rule to .scalafix.conf + Manually fix a NonNullaryApply case in DeathWatchSpec that cause `fixall` fail because ExplicitNonNullaryApply rule incorrectly rewrite `context unbecome` to `context unbecome()` instead of `context.unbecome()` * scalafix ExplicitNonNullaryApply fix by enabling only ExplicitNonNullaryApply rule in .scalafix.conf then: ``` % sbt -Dakka.build.scalaVersion=2.13.1 > fixall ``` * scalafmtAll * Revert to ch.epfl.scala:sbt-scalafix Co-authored-by: Bùi Việt Thành <thanhbv@sandinh.net>
This commit is contained in:
parent
4ba835d328
commit
ea7205eaf7
266 changed files with 929 additions and 919 deletions
|
|
@ -2,6 +2,7 @@
|
||||||
rules = [
|
rules = [
|
||||||
RemoveUnused
|
RemoveUnused
|
||||||
ExplicitResultTypes
|
ExplicitResultTypes
|
||||||
|
"github:ohze/scalafix-rules/ExplicitNonNullaryApply"
|
||||||
"github:ohze/scalafix-rules/ConstructorProcedureSyntax"
|
"github:ohze/scalafix-rules/ConstructorProcedureSyntax"
|
||||||
"github:ohze/scalafix-rules/FinalObject"
|
"github:ohze/scalafix-rules/FinalObject"
|
||||||
"github:ohze/scalafix-rules/Any2StringAdd"
|
"github:ohze/scalafix-rules/Any2StringAdd"
|
||||||
|
|
|
||||||
|
|
@ -90,7 +90,7 @@ import org.slf4j.LoggerFactory
|
||||||
|
|
||||||
override def scheduler: Scheduler = throw new UnsupportedOperationException("no scheduler")
|
override def scheduler: Scheduler = throw new UnsupportedOperationException("no scheduler")
|
||||||
|
|
||||||
private val terminationPromise = Promise[Done]
|
private val terminationPromise = Promise[Done]()
|
||||||
override def terminate(): Unit = terminationPromise.trySuccess(Done)
|
override def terminate(): Unit = terminationPromise.trySuccess(Done)
|
||||||
override def whenTerminated: Future[Done] = terminationPromise.future
|
override def whenTerminated: Future[Done] = terminationPromise.future
|
||||||
override def getWhenTerminated: CompletionStage[Done] = FutureConverters.toJava(whenTerminated)
|
override def getWhenTerminated: CompletionStage[Done] = FutureConverters.toJava(whenTerminated)
|
||||||
|
|
|
||||||
|
|
@ -128,7 +128,7 @@ private[akka] final class BehaviorTestKitImpl[T](_path: ActorPath, _initialBehav
|
||||||
} catch handleException
|
} catch handleException
|
||||||
}
|
}
|
||||||
|
|
||||||
override def runOne(): Unit = run(selfInbox.receiveMessage())
|
override def runOne(): Unit = run(selfInbox().receiveMessage())
|
||||||
|
|
||||||
override def signal(signal: Signal): Unit = {
|
override def signal(signal: Signal): Unit = {
|
||||||
try {
|
try {
|
||||||
|
|
|
||||||
|
|
@ -147,7 +147,7 @@ private[akka] final class FunctionRef[-T](override val path: ActorPath, send: (T
|
||||||
new FunctionRef[U](p, (message, _) => {
|
new FunctionRef[U](p, (message, _) => {
|
||||||
val m = f(message);
|
val m = f(message);
|
||||||
if (m != null) {
|
if (m != null) {
|
||||||
selfInbox.ref ! m; i.selfInbox.ref ! message
|
selfInbox.ref ! m; i.selfInbox().ref ! message
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -75,7 +75,7 @@ abstract class BehaviorTestKit[T] {
|
||||||
/**
|
/**
|
||||||
* The self reference of the actor living inside this testkit.
|
* The self reference of the actor living inside this testkit.
|
||||||
*/
|
*/
|
||||||
def getRef(): ActorRef[T] = selfInbox.getRef()
|
def getRef(): ActorRef[T] = selfInbox().getRef()
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Requests all the effects. The effects are consumed, subsequent calls will only
|
* Requests all the effects. The effects are consumed, subsequent calls will only
|
||||||
|
|
|
||||||
|
|
@ -72,7 +72,7 @@ trait BehaviorTestKit[T] {
|
||||||
/**
|
/**
|
||||||
* The self reference of the actor living inside this testkit.
|
* The self reference of the actor living inside this testkit.
|
||||||
*/
|
*/
|
||||||
def ref: ActorRef[T] = selfInbox.ref
|
def ref: ActorRef[T] = selfInbox().ref
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Requests all the effects. The effects are consumed, subsequent calls will only
|
* Requests all the effects. The effects are consumed, subsequent calls will only
|
||||||
|
|
|
||||||
|
|
@ -148,7 +148,7 @@ class TestProbeSpec extends ScalaTestWithActorTestKit with AnyWordSpecLike with
|
||||||
val probe = createTestProbe[EventT]()
|
val probe = createTestProbe[EventT]()
|
||||||
eventsT(10).forall { e =>
|
eventsT(10).forall { e =>
|
||||||
probe.ref ! e
|
probe.ref ! e
|
||||||
probe.receiveMessage == e
|
probe.receiveMessage() == e
|
||||||
} should ===(true)
|
} should ===(true)
|
||||||
|
|
||||||
probe.expectNoMessage()
|
probe.expectNoMessage()
|
||||||
|
|
|
||||||
|
|
@ -47,22 +47,22 @@ class ActorConfigurationVerificationSpec
|
||||||
"An Actor configured with a BalancingDispatcher" must {
|
"An Actor configured with a BalancingDispatcher" must {
|
||||||
"fail verification with a ConfigurationException if also configured with a RoundRobinPool" in {
|
"fail verification with a ConfigurationException if also configured with a RoundRobinPool" in {
|
||||||
intercept[ConfigurationException] {
|
intercept[ConfigurationException] {
|
||||||
system.actorOf(RoundRobinPool(2).withDispatcher("balancing-dispatcher").props(Props[TestActor]))
|
system.actorOf(RoundRobinPool(2).withDispatcher("balancing-dispatcher").props(Props[TestActor]()))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
"fail verification with a ConfigurationException if also configured with a BroadcastPool" in {
|
"fail verification with a ConfigurationException if also configured with a BroadcastPool" in {
|
||||||
intercept[ConfigurationException] {
|
intercept[ConfigurationException] {
|
||||||
system.actorOf(BroadcastPool(2).withDispatcher("balancing-dispatcher").props(Props[TestActor]))
|
system.actorOf(BroadcastPool(2).withDispatcher("balancing-dispatcher").props(Props[TestActor]()))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
"fail verification with a ConfigurationException if also configured with a RandomPool" in {
|
"fail verification with a ConfigurationException if also configured with a RandomPool" in {
|
||||||
intercept[ConfigurationException] {
|
intercept[ConfigurationException] {
|
||||||
system.actorOf(RandomPool(2).withDispatcher("balancing-dispatcher").props(Props[TestActor]))
|
system.actorOf(RandomPool(2).withDispatcher("balancing-dispatcher").props(Props[TestActor]()))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
"fail verification with a ConfigurationException if also configured with a SmallestMailboxPool" in {
|
"fail verification with a ConfigurationException if also configured with a SmallestMailboxPool" in {
|
||||||
intercept[ConfigurationException] {
|
intercept[ConfigurationException] {
|
||||||
system.actorOf(SmallestMailboxPool(2).withDispatcher("balancing-dispatcher").props(Props[TestActor]))
|
system.actorOf(SmallestMailboxPool(2).withDispatcher("balancing-dispatcher").props(Props[TestActor]()))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
"fail verification with a ConfigurationException if also configured with a ScatterGatherFirstCompletedPool" in {
|
"fail verification with a ConfigurationException if also configured with a ScatterGatherFirstCompletedPool" in {
|
||||||
|
|
@ -70,33 +70,33 @@ class ActorConfigurationVerificationSpec
|
||||||
system.actorOf(
|
system.actorOf(
|
||||||
ScatterGatherFirstCompletedPool(nrOfInstances = 2, within = 2 seconds)
|
ScatterGatherFirstCompletedPool(nrOfInstances = 2, within = 2 seconds)
|
||||||
.withDispatcher("balancing-dispatcher")
|
.withDispatcher("balancing-dispatcher")
|
||||||
.props(Props[TestActor]))
|
.props(Props[TestActor]()))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
"not fail verification with a ConfigurationException also not configured with a Router" in {
|
"not fail verification with a ConfigurationException also not configured with a Router" in {
|
||||||
system.actorOf(Props[TestActor].withDispatcher("balancing-dispatcher"))
|
system.actorOf(Props[TestActor]().withDispatcher("balancing-dispatcher"))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
"An Actor configured with a non-balancing dispatcher" must {
|
"An Actor configured with a non-balancing dispatcher" must {
|
||||||
"not fail verification with a ConfigurationException if also configured with a Router" in {
|
"not fail verification with a ConfigurationException if also configured with a Router" in {
|
||||||
system.actorOf(RoundRobinPool(2).props(Props[TestActor].withDispatcher("pinned-dispatcher")))
|
system.actorOf(RoundRobinPool(2).props(Props[TestActor]().withDispatcher("pinned-dispatcher")))
|
||||||
}
|
}
|
||||||
|
|
||||||
"fail verification if the dispatcher cannot be found" in {
|
"fail verification if the dispatcher cannot be found" in {
|
||||||
intercept[ConfigurationException] {
|
intercept[ConfigurationException] {
|
||||||
system.actorOf(Props[TestActor].withDispatcher("does not exist"))
|
system.actorOf(Props[TestActor]().withDispatcher("does not exist"))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
"fail verification if the dispatcher cannot be found for the head of a router" in {
|
"fail verification if the dispatcher cannot be found for the head of a router" in {
|
||||||
intercept[ConfigurationException] {
|
intercept[ConfigurationException] {
|
||||||
system.actorOf(RoundRobinPool(1, routerDispatcher = "does not exist").props(Props[TestActor]))
|
system.actorOf(RoundRobinPool(1, routerDispatcher = "does not exist").props(Props[TestActor]()))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
"fail verification if the dispatcher cannot be found for the routees of a router" in {
|
"fail verification if the dispatcher cannot be found for the routees of a router" in {
|
||||||
intercept[ConfigurationException] {
|
intercept[ConfigurationException] {
|
||||||
system.actorOf(RoundRobinPool(1).props(Props[TestActor].withDispatcher("does not exist")))
|
system.actorOf(RoundRobinPool(1).props(Props[TestActor]().withDispatcher("does not exist")))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -218,9 +218,9 @@ class ActorCreationPerfSpec
|
||||||
|
|
||||||
"Actor creation with actorOf" must {
|
"Actor creation with actorOf" must {
|
||||||
|
|
||||||
registerTests("Props[EmptyActor] with new Props", () => Props[EmptyActor])
|
registerTests("Props[EmptyActor] with new Props", () => Props[EmptyActor]())
|
||||||
|
|
||||||
val props1 = Props[EmptyActor]
|
val props1 = Props[EmptyActor]()
|
||||||
registerTests("Props[EmptyActor] with same Props", () => props1)
|
registerTests("Props[EmptyActor] with same Props", () => props1)
|
||||||
|
|
||||||
registerTests("Props(new EmptyActor) new", () => { Props(new EmptyActor) })
|
registerTests("Props(new EmptyActor) new", () => { Props(new EmptyActor) })
|
||||||
|
|
|
||||||
|
|
@ -242,22 +242,22 @@ class ActorMailboxSpec(conf: Config) extends AkkaSpec(conf) with DefaultTimeout
|
||||||
"An Actor" must {
|
"An Actor" must {
|
||||||
|
|
||||||
"get an unbounded message queue by default" in {
|
"get an unbounded message queue by default" in {
|
||||||
checkMailboxQueue(Props[QueueReportingActor], "default-default", UnboundedMailboxTypes)
|
checkMailboxQueue(Props[QueueReportingActor](), "default-default", UnboundedMailboxTypes)
|
||||||
}
|
}
|
||||||
|
|
||||||
"get an unbounded deque message queue when it is only configured on the props" in {
|
"get an unbounded deque message queue when it is only configured on the props" in {
|
||||||
checkMailboxQueue(
|
checkMailboxQueue(
|
||||||
Props[QueueReportingActor].withMailbox("akka.actor.mailbox.unbounded-deque-based"),
|
Props[QueueReportingActor]().withMailbox("akka.actor.mailbox.unbounded-deque-based"),
|
||||||
"default-override-from-props",
|
"default-override-from-props",
|
||||||
UnboundedDeqMailboxTypes)
|
UnboundedDeqMailboxTypes)
|
||||||
}
|
}
|
||||||
|
|
||||||
"get an bounded message queue when it's only configured with RequiresMailbox" in {
|
"get an bounded message queue when it's only configured with RequiresMailbox" in {
|
||||||
checkMailboxQueue(Props[BoundedQueueReportingActor], "default-override-from-trait", BoundedMailboxTypes)
|
checkMailboxQueue(Props[BoundedQueueReportingActor](), "default-override-from-trait", BoundedMailboxTypes)
|
||||||
}
|
}
|
||||||
|
|
||||||
"get an unbounded deque message queue when it's only mixed with Stash" in {
|
"get an unbounded deque message queue when it's only mixed with Stash" in {
|
||||||
checkMailboxQueue(Props[StashQueueReportingActor], "default-override-from-stash", UnboundedDeqMailboxTypes)
|
checkMailboxQueue(Props[StashQueueReportingActor](), "default-override-from-stash", UnboundedDeqMailboxTypes)
|
||||||
checkMailboxQueue(Props(new StashQueueReportingActor), "default-override-from-stash2", UnboundedDeqMailboxTypes)
|
checkMailboxQueue(Props(new StashQueueReportingActor), "default-override-from-stash2", UnboundedDeqMailboxTypes)
|
||||||
checkMailboxQueue(
|
checkMailboxQueue(
|
||||||
Props(classOf[StashQueueReportingActorWithParams], 17, "hello"),
|
Props(classOf[StashQueueReportingActorWithParams], 17, "hello"),
|
||||||
|
|
@ -270,99 +270,99 @@ class ActorMailboxSpec(conf: Config) extends AkkaSpec(conf) with DefaultTimeout
|
||||||
}
|
}
|
||||||
|
|
||||||
"get a bounded message queue when it's configured as mailbox" in {
|
"get a bounded message queue when it's configured as mailbox" in {
|
||||||
checkMailboxQueue(Props[QueueReportingActor], "default-bounded", BoundedMailboxTypes)
|
checkMailboxQueue(Props[QueueReportingActor](), "default-bounded", BoundedMailboxTypes)
|
||||||
}
|
}
|
||||||
|
|
||||||
"get an unbounded deque message queue when it's configured as mailbox" in {
|
"get an unbounded deque message queue when it's configured as mailbox" in {
|
||||||
checkMailboxQueue(Props[QueueReportingActor], "default-unbounded-deque", UnboundedDeqMailboxTypes)
|
checkMailboxQueue(Props[QueueReportingActor](), "default-unbounded-deque", UnboundedDeqMailboxTypes)
|
||||||
}
|
}
|
||||||
|
|
||||||
"get a bounded control aware message queue when it's configured as mailbox" in {
|
"get a bounded control aware message queue when it's configured as mailbox" in {
|
||||||
checkMailboxQueue(Props[QueueReportingActor], "default-bounded-control-aware", BoundedControlAwareMailboxTypes)
|
checkMailboxQueue(Props[QueueReportingActor](), "default-bounded-control-aware", BoundedControlAwareMailboxTypes)
|
||||||
}
|
}
|
||||||
|
|
||||||
"get an unbounded control aware message queue when it's configured as mailbox" in {
|
"get an unbounded control aware message queue when it's configured as mailbox" in {
|
||||||
checkMailboxQueue(
|
checkMailboxQueue(
|
||||||
Props[QueueReportingActor],
|
Props[QueueReportingActor](),
|
||||||
"default-unbounded-control-aware",
|
"default-unbounded-control-aware",
|
||||||
UnboundedControlAwareMailboxTypes)
|
UnboundedControlAwareMailboxTypes)
|
||||||
}
|
}
|
||||||
|
|
||||||
"get an bounded control aware message queue when it's only configured with RequiresMailbox" in {
|
"get an bounded control aware message queue when it's only configured with RequiresMailbox" in {
|
||||||
checkMailboxQueue(
|
checkMailboxQueue(
|
||||||
Props[BoundedControlAwareQueueReportingActor],
|
Props[BoundedControlAwareQueueReportingActor](),
|
||||||
"default-override-from-trait-bounded-control-aware",
|
"default-override-from-trait-bounded-control-aware",
|
||||||
BoundedControlAwareMailboxTypes)
|
BoundedControlAwareMailboxTypes)
|
||||||
}
|
}
|
||||||
|
|
||||||
"get an unbounded control aware message queue when it's only configured with RequiresMailbox" in {
|
"get an unbounded control aware message queue when it's only configured with RequiresMailbox" in {
|
||||||
checkMailboxQueue(
|
checkMailboxQueue(
|
||||||
Props[UnboundedControlAwareQueueReportingActor],
|
Props[UnboundedControlAwareQueueReportingActor](),
|
||||||
"default-override-from-trait-unbounded-control-aware",
|
"default-override-from-trait-unbounded-control-aware",
|
||||||
UnboundedControlAwareMailboxTypes)
|
UnboundedControlAwareMailboxTypes)
|
||||||
}
|
}
|
||||||
|
|
||||||
"fail to create actor when an unbounded dequeu message queue is configured as mailbox overriding RequestMailbox" in {
|
"fail to create actor when an unbounded dequeu message queue is configured as mailbox overriding RequestMailbox" in {
|
||||||
intercept[ConfigurationException](
|
intercept[ConfigurationException](
|
||||||
system.actorOf(Props[BoundedQueueReportingActor], "default-unbounded-deque-override-trait"))
|
system.actorOf(Props[BoundedQueueReportingActor](), "default-unbounded-deque-override-trait"))
|
||||||
}
|
}
|
||||||
|
|
||||||
"get an unbounded message queue when defined in dispatcher" in {
|
"get an unbounded message queue when defined in dispatcher" in {
|
||||||
checkMailboxQueue(Props[QueueReportingActor], "unbounded-default", UnboundedMailboxTypes)
|
checkMailboxQueue(Props[QueueReportingActor](), "unbounded-default", UnboundedMailboxTypes)
|
||||||
}
|
}
|
||||||
|
|
||||||
"fail to create actor when an unbounded message queue is defined in dispatcher overriding RequestMailbox" in {
|
"fail to create actor when an unbounded message queue is defined in dispatcher overriding RequestMailbox" in {
|
||||||
intercept[ConfigurationException](
|
intercept[ConfigurationException](
|
||||||
system.actorOf(Props[BoundedQueueReportingActor], "unbounded-default-override-trait"))
|
system.actorOf(Props[BoundedQueueReportingActor](), "unbounded-default-override-trait"))
|
||||||
}
|
}
|
||||||
|
|
||||||
"get a bounded message queue when it's configured as mailbox overriding unbounded in dispatcher" in {
|
"get a bounded message queue when it's configured as mailbox overriding unbounded in dispatcher" in {
|
||||||
checkMailboxQueue(Props[QueueReportingActor], "unbounded-bounded", BoundedMailboxTypes)
|
checkMailboxQueue(Props[QueueReportingActor](), "unbounded-bounded", BoundedMailboxTypes)
|
||||||
}
|
}
|
||||||
|
|
||||||
"get a bounded message queue when defined in dispatcher" in {
|
"get a bounded message queue when defined in dispatcher" in {
|
||||||
checkMailboxQueue(Props[QueueReportingActor], "bounded-default", BoundedMailboxTypes)
|
checkMailboxQueue(Props[QueueReportingActor](), "bounded-default", BoundedMailboxTypes)
|
||||||
}
|
}
|
||||||
|
|
||||||
"get a bounded message queue with 0 push timeout when defined in dispatcher" in {
|
"get a bounded message queue with 0 push timeout when defined in dispatcher" in {
|
||||||
val q = checkMailboxQueue(
|
val q = checkMailboxQueue(
|
||||||
Props[QueueReportingActor],
|
Props[QueueReportingActor](),
|
||||||
"default-bounded-mailbox-with-zero-pushtimeout",
|
"default-bounded-mailbox-with-zero-pushtimeout",
|
||||||
BoundedMailboxTypes)
|
BoundedMailboxTypes)
|
||||||
q.asInstanceOf[BoundedMessageQueueSemantics].pushTimeOut should ===(Duration.Zero)
|
q.asInstanceOf[BoundedMessageQueueSemantics].pushTimeOut should ===(Duration.Zero)
|
||||||
}
|
}
|
||||||
|
|
||||||
"get an unbounded message queue when it's configured as mailbox overriding bounded in dispatcher" in {
|
"get an unbounded message queue when it's configured as mailbox overriding bounded in dispatcher" in {
|
||||||
checkMailboxQueue(Props[QueueReportingActor], "bounded-unbounded", UnboundedMailboxTypes)
|
checkMailboxQueue(Props[QueueReportingActor](), "bounded-unbounded", UnboundedMailboxTypes)
|
||||||
}
|
}
|
||||||
|
|
||||||
"get an unbounded message queue overriding configuration on the props" in {
|
"get an unbounded message queue overriding configuration on the props" in {
|
||||||
checkMailboxQueue(
|
checkMailboxQueue(
|
||||||
Props[QueueReportingActor].withMailbox("akka.actor.mailbox.unbounded-deque-based"),
|
Props[QueueReportingActor]().withMailbox("akka.actor.mailbox.unbounded-deque-based"),
|
||||||
"bounded-unbounded-override-props",
|
"bounded-unbounded-override-props",
|
||||||
UnboundedMailboxTypes)
|
UnboundedMailboxTypes)
|
||||||
}
|
}
|
||||||
|
|
||||||
"get a bounded deque-based message queue if configured and required" in {
|
"get a bounded deque-based message queue if configured and required" in {
|
||||||
checkMailboxQueue(
|
checkMailboxQueue(
|
||||||
Props[StashQueueReportingActor],
|
Props[StashQueueReportingActor](),
|
||||||
"bounded-deque-requirements-configured",
|
"bounded-deque-requirements-configured",
|
||||||
BoundedDeqMailboxTypes)
|
BoundedDeqMailboxTypes)
|
||||||
}
|
}
|
||||||
|
|
||||||
"fail with a unbounded deque-based message queue if configured and required" in {
|
"fail with a unbounded deque-based message queue if configured and required" in {
|
||||||
intercept[ConfigurationException](
|
intercept[ConfigurationException](
|
||||||
system.actorOf(Props[StashQueueReportingActor], "bounded-deque-require-unbounded-configured"))
|
system.actorOf(Props[StashQueueReportingActor](), "bounded-deque-require-unbounded-configured"))
|
||||||
}
|
}
|
||||||
|
|
||||||
"fail with a bounded deque-based message queue if not configured" in {
|
"fail with a bounded deque-based message queue if not configured" in {
|
||||||
intercept[ConfigurationException](
|
intercept[ConfigurationException](
|
||||||
system.actorOf(Props[StashQueueReportingActor], "bounded-deque-require-unbounded-unconfigured"))
|
system.actorOf(Props[StashQueueReportingActor](), "bounded-deque-require-unbounded-unconfigured"))
|
||||||
}
|
}
|
||||||
|
|
||||||
"get a bounded deque-based message queue if configured and required with Props" in {
|
"get a bounded deque-based message queue if configured and required with Props" in {
|
||||||
checkMailboxQueue(
|
checkMailboxQueue(
|
||||||
Props[StashQueueReportingActor]
|
Props[StashQueueReportingActor]()
|
||||||
.withDispatcher("requiring-bounded-dispatcher")
|
.withDispatcher("requiring-bounded-dispatcher")
|
||||||
.withMailbox("akka.actor.mailbox.bounded-deque-based"),
|
.withMailbox("akka.actor.mailbox.bounded-deque-based"),
|
||||||
"bounded-deque-requirements-configured-props",
|
"bounded-deque-requirements-configured-props",
|
||||||
|
|
@ -372,7 +372,7 @@ class ActorMailboxSpec(conf: Config) extends AkkaSpec(conf) with DefaultTimeout
|
||||||
"fail with a unbounded deque-based message queue if configured and required with Props" in {
|
"fail with a unbounded deque-based message queue if configured and required with Props" in {
|
||||||
intercept[ConfigurationException](
|
intercept[ConfigurationException](
|
||||||
system.actorOf(
|
system.actorOf(
|
||||||
Props[StashQueueReportingActor]
|
Props[StashQueueReportingActor]()
|
||||||
.withDispatcher("requiring-bounded-dispatcher")
|
.withDispatcher("requiring-bounded-dispatcher")
|
||||||
.withMailbox("akka.actor.mailbox.unbounded-deque-based"),
|
.withMailbox("akka.actor.mailbox.unbounded-deque-based"),
|
||||||
"bounded-deque-require-unbounded-configured-props"))
|
"bounded-deque-require-unbounded-configured-props"))
|
||||||
|
|
@ -381,13 +381,13 @@ class ActorMailboxSpec(conf: Config) extends AkkaSpec(conf) with DefaultTimeout
|
||||||
"fail with a bounded deque-based message queue if not configured with Props" in {
|
"fail with a bounded deque-based message queue if not configured with Props" in {
|
||||||
intercept[ConfigurationException](
|
intercept[ConfigurationException](
|
||||||
system.actorOf(
|
system.actorOf(
|
||||||
Props[StashQueueReportingActor].withDispatcher("requiring-bounded-dispatcher"),
|
Props[StashQueueReportingActor]().withDispatcher("requiring-bounded-dispatcher"),
|
||||||
"bounded-deque-require-unbounded-unconfigured-props"))
|
"bounded-deque-require-unbounded-unconfigured-props"))
|
||||||
}
|
}
|
||||||
|
|
||||||
"get a bounded deque-based message queue if configured and required with Props (dispatcher)" in {
|
"get a bounded deque-based message queue if configured and required with Props (dispatcher)" in {
|
||||||
checkMailboxQueue(
|
checkMailboxQueue(
|
||||||
Props[StashQueueReportingActor].withDispatcher("requiring-bounded-dispatcher"),
|
Props[StashQueueReportingActor]().withDispatcher("requiring-bounded-dispatcher"),
|
||||||
"bounded-deque-requirements-configured-props-disp",
|
"bounded-deque-requirements-configured-props-disp",
|
||||||
BoundedDeqMailboxTypes)
|
BoundedDeqMailboxTypes)
|
||||||
}
|
}
|
||||||
|
|
@ -395,20 +395,20 @@ class ActorMailboxSpec(conf: Config) extends AkkaSpec(conf) with DefaultTimeout
|
||||||
"fail with a unbounded deque-based message queue if configured and required with Props (dispatcher)" in {
|
"fail with a unbounded deque-based message queue if configured and required with Props (dispatcher)" in {
|
||||||
intercept[ConfigurationException](
|
intercept[ConfigurationException](
|
||||||
system.actorOf(
|
system.actorOf(
|
||||||
Props[StashQueueReportingActor].withDispatcher("requiring-bounded-dispatcher"),
|
Props[StashQueueReportingActor]().withDispatcher("requiring-bounded-dispatcher"),
|
||||||
"bounded-deque-require-unbounded-configured-props-disp"))
|
"bounded-deque-require-unbounded-configured-props-disp"))
|
||||||
}
|
}
|
||||||
|
|
||||||
"fail with a bounded deque-based message queue if not configured with Props (dispatcher)" in {
|
"fail with a bounded deque-based message queue if not configured with Props (dispatcher)" in {
|
||||||
intercept[ConfigurationException](
|
intercept[ConfigurationException](
|
||||||
system.actorOf(
|
system.actorOf(
|
||||||
Props[StashQueueReportingActor].withDispatcher("requiring-bounded-dispatcher"),
|
Props[StashQueueReportingActor]().withDispatcher("requiring-bounded-dispatcher"),
|
||||||
"bounded-deque-require-unbounded-unconfigured-props-disp"))
|
"bounded-deque-require-unbounded-unconfigured-props-disp"))
|
||||||
}
|
}
|
||||||
|
|
||||||
"get a bounded deque-based message queue if configured and required with Props (mailbox)" in {
|
"get a bounded deque-based message queue if configured and required with Props (mailbox)" in {
|
||||||
checkMailboxQueue(
|
checkMailboxQueue(
|
||||||
Props[StashQueueReportingActor].withMailbox("akka.actor.mailbox.bounded-deque-based"),
|
Props[StashQueueReportingActor]().withMailbox("akka.actor.mailbox.bounded-deque-based"),
|
||||||
"bounded-deque-requirements-configured-props-mail",
|
"bounded-deque-requirements-configured-props-mail",
|
||||||
BoundedDeqMailboxTypes)
|
BoundedDeqMailboxTypes)
|
||||||
}
|
}
|
||||||
|
|
@ -416,32 +416,32 @@ class ActorMailboxSpec(conf: Config) extends AkkaSpec(conf) with DefaultTimeout
|
||||||
"fail with a unbounded deque-based message queue if configured and required with Props (mailbox)" in {
|
"fail with a unbounded deque-based message queue if configured and required with Props (mailbox)" in {
|
||||||
intercept[ConfigurationException](
|
intercept[ConfigurationException](
|
||||||
system.actorOf(
|
system.actorOf(
|
||||||
Props[StashQueueReportingActor].withMailbox("akka.actor.mailbox.unbounded-deque-based"),
|
Props[StashQueueReportingActor]().withMailbox("akka.actor.mailbox.unbounded-deque-based"),
|
||||||
"bounded-deque-require-unbounded-configured-props-mail"))
|
"bounded-deque-require-unbounded-configured-props-mail"))
|
||||||
}
|
}
|
||||||
|
|
||||||
"fail with a bounded deque-based message queue if not configured with Props (mailbox)" in {
|
"fail with a bounded deque-based message queue if not configured with Props (mailbox)" in {
|
||||||
intercept[ConfigurationException](
|
intercept[ConfigurationException](
|
||||||
system.actorOf(Props[StashQueueReportingActor], "bounded-deque-require-unbounded-unconfigured-props-mail"))
|
system.actorOf(Props[StashQueueReportingActor](), "bounded-deque-require-unbounded-unconfigured-props-mail"))
|
||||||
}
|
}
|
||||||
|
|
||||||
"get an unbounded message queue with a balancing dispatcher" in {
|
"get an unbounded message queue with a balancing dispatcher" in {
|
||||||
checkMailboxQueue(
|
checkMailboxQueue(
|
||||||
Props[QueueReportingActor].withDispatcher("balancing-dispatcher"),
|
Props[QueueReportingActor]().withDispatcher("balancing-dispatcher"),
|
||||||
"unbounded-balancing",
|
"unbounded-balancing",
|
||||||
UnboundedMailboxTypes)
|
UnboundedMailboxTypes)
|
||||||
}
|
}
|
||||||
|
|
||||||
"get a bounded message queue with a balancing bounded dispatcher" in {
|
"get a bounded message queue with a balancing bounded dispatcher" in {
|
||||||
checkMailboxQueue(
|
checkMailboxQueue(
|
||||||
Props[QueueReportingActor].withDispatcher("balancing-bounded-dispatcher"),
|
Props[QueueReportingActor]().withDispatcher("balancing-bounded-dispatcher"),
|
||||||
"bounded-balancing",
|
"bounded-balancing",
|
||||||
BoundedMailboxTypes)
|
BoundedMailboxTypes)
|
||||||
}
|
}
|
||||||
|
|
||||||
"get a bounded message queue with a requiring balancing bounded dispatcher" in {
|
"get a bounded message queue with a requiring balancing bounded dispatcher" in {
|
||||||
checkMailboxQueue(
|
checkMailboxQueue(
|
||||||
Props[QueueReportingActor].withDispatcher("requiring-balancing-bounded-dispatcher"),
|
Props[QueueReportingActor]().withDispatcher("requiring-balancing-bounded-dispatcher"),
|
||||||
"requiring-bounded-balancing",
|
"requiring-bounded-balancing",
|
||||||
BoundedMailboxTypes)
|
BoundedMailboxTypes)
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -25,11 +25,11 @@ object ActorRefSpec {
|
||||||
def receive = {
|
def receive = {
|
||||||
case "complexRequest" => {
|
case "complexRequest" => {
|
||||||
replyTo = sender()
|
replyTo = sender()
|
||||||
val worker = context.actorOf(Props[WorkerActor])
|
val worker = context.actorOf(Props[WorkerActor]())
|
||||||
worker ! "work"
|
worker ! "work"
|
||||||
}
|
}
|
||||||
case "complexRequest2" =>
|
case "complexRequest2" =>
|
||||||
val worker = context.actorOf(Props[WorkerActor])
|
val worker = context.actorOf(Props[WorkerActor]())
|
||||||
worker ! ReplyTo(sender())
|
worker ! ReplyTo(sender())
|
||||||
case "workDone" => replyTo ! "complexReply"
|
case "workDone" => replyTo ! "complexReply"
|
||||||
case "simpleRequest" => sender() ! "simpleReply"
|
case "simpleRequest" => sender() ! "simpleReply"
|
||||||
|
|
@ -278,7 +278,7 @@ class ActorRefSpec extends AkkaSpec("""
|
||||||
}
|
}
|
||||||
|
|
||||||
"be serializable using Java Serialization on local node" in {
|
"be serializable using Java Serialization on local node" in {
|
||||||
val a = system.actorOf(Props[InnerActor])
|
val a = system.actorOf(Props[InnerActor]())
|
||||||
val esys = system.asInstanceOf[ExtendedActorSystem]
|
val esys = system.asInstanceOf[ExtendedActorSystem]
|
||||||
|
|
||||||
import java.io._
|
import java.io._
|
||||||
|
|
@ -309,7 +309,7 @@ class ActorRefSpec extends AkkaSpec("""
|
||||||
}
|
}
|
||||||
|
|
||||||
"throw an exception on deserialize if no system in scope" in {
|
"throw an exception on deserialize if no system in scope" in {
|
||||||
val a = system.actorOf(Props[InnerActor])
|
val a = system.actorOf(Props[InnerActor]())
|
||||||
|
|
||||||
import java.io._
|
import java.io._
|
||||||
|
|
||||||
|
|
@ -337,7 +337,7 @@ class ActorRefSpec extends AkkaSpec("""
|
||||||
val out = new ObjectOutputStream(baos)
|
val out = new ObjectOutputStream(baos)
|
||||||
|
|
||||||
val sysImpl = system.asInstanceOf[ActorSystemImpl]
|
val sysImpl = system.asInstanceOf[ActorSystemImpl]
|
||||||
val ref = system.actorOf(Props[ReplyActor], "non-existing")
|
val ref = system.actorOf(Props[ReplyActor](), "non-existing")
|
||||||
val serialized = SerializedActorRef(ref)
|
val serialized = SerializedActorRef(ref)
|
||||||
|
|
||||||
out.writeObject(serialized)
|
out.writeObject(serialized)
|
||||||
|
|
@ -381,7 +381,7 @@ class ActorRefSpec extends AkkaSpec("""
|
||||||
|
|
||||||
"support reply via sender" in {
|
"support reply via sender" in {
|
||||||
val latch = new TestLatch(4)
|
val latch = new TestLatch(4)
|
||||||
val serverRef = system.actorOf(Props[ReplyActor])
|
val serverRef = system.actorOf(Props[ReplyActor]())
|
||||||
val clientRef = system.actorOf(Props(new SenderActor(serverRef, latch)))
|
val clientRef = system.actorOf(Props(new SenderActor(serverRef, latch)))
|
||||||
|
|
||||||
clientRef ! "complex"
|
clientRef ! "complex"
|
||||||
|
|
@ -391,7 +391,7 @@ class ActorRefSpec extends AkkaSpec("""
|
||||||
|
|
||||||
Await.ready(latch, timeout.duration)
|
Await.ready(latch, timeout.duration)
|
||||||
|
|
||||||
latch.reset
|
latch.reset()
|
||||||
|
|
||||||
clientRef ! "complex2"
|
clientRef ! "complex2"
|
||||||
clientRef ! "simple"
|
clientRef ! "simple"
|
||||||
|
|
|
||||||
|
|
@ -19,7 +19,7 @@ object ActorSelectionSpec {
|
||||||
final case class GetSender(to: ActorRef) extends Query
|
final case class GetSender(to: ActorRef) extends Query
|
||||||
final case class Forward(path: String, msg: Any) extends Query
|
final case class Forward(path: String, msg: Any) extends Query
|
||||||
|
|
||||||
val p = Props[Node]
|
val p = Props[Node]()
|
||||||
|
|
||||||
class Node extends Actor {
|
class Node extends Actor {
|
||||||
def receive = {
|
def receive = {
|
||||||
|
|
|
||||||
|
|
@ -31,7 +31,7 @@ object ActorSystemSpec {
|
||||||
case n: Int =>
|
case n: Int =>
|
||||||
master = sender()
|
master = sender()
|
||||||
terminaters = Set() ++ (for (_ <- 1 to n) yield {
|
terminaters = Set() ++ (for (_ <- 1 to n) yield {
|
||||||
val man = context.watch(context.system.actorOf(Props[Terminater]))
|
val man = context.watch(context.system.actorOf(Props[Terminater]()))
|
||||||
man ! "run"
|
man ! "run"
|
||||||
man
|
man
|
||||||
})
|
})
|
||||||
|
|
@ -142,7 +142,7 @@ class ActorSystemSpec extends AkkaSpec(ActorSystemSpec.config) with ImplicitSend
|
||||||
ActorSystem("LogDeadLetters", ConfigFactory.parseString("akka.loglevel=INFO").withFallback(AkkaSpec.testConf))
|
ActorSystem("LogDeadLetters", ConfigFactory.parseString("akka.loglevel=INFO").withFallback(AkkaSpec.testConf))
|
||||||
try {
|
try {
|
||||||
val probe = TestProbe()(sys)
|
val probe = TestProbe()(sys)
|
||||||
val a = sys.actorOf(Props[ActorSystemSpec.Terminater])
|
val a = sys.actorOf(Props[ActorSystemSpec.Terminater]())
|
||||||
probe.watch(a)
|
probe.watch(a)
|
||||||
a.tell("run", probe.ref)
|
a.tell("run", probe.ref)
|
||||||
probe.expectTerminated(a)
|
probe.expectTerminated(a)
|
||||||
|
|
@ -166,7 +166,7 @@ class ActorSystemSpec extends AkkaSpec(ActorSystemSpec.config) with ImplicitSend
|
||||||
ActorSystem("LogDeadLetters", ConfigFactory.parseString("akka.loglevel=INFO").withFallback(AkkaSpec.testConf))
|
ActorSystem("LogDeadLetters", ConfigFactory.parseString("akka.loglevel=INFO").withFallback(AkkaSpec.testConf))
|
||||||
try {
|
try {
|
||||||
val probe = TestProbe()(sys)
|
val probe = TestProbe()(sys)
|
||||||
val a = sys.actorOf(Props[ActorSystemSpec.Terminater])
|
val a = sys.actorOf(Props[ActorSystemSpec.Terminater]())
|
||||||
probe.watch(a)
|
probe.watch(a)
|
||||||
a.tell("run", probe.ref)
|
a.tell("run", probe.ref)
|
||||||
probe.expectTerminated(a)
|
probe.expectTerminated(a)
|
||||||
|
|
@ -264,7 +264,7 @@ class ActorSystemSpec extends AkkaSpec(ActorSystemSpec.config) with ImplicitSend
|
||||||
"reliably create waves of actors" in {
|
"reliably create waves of actors" in {
|
||||||
import system.dispatcher
|
import system.dispatcher
|
||||||
implicit val timeout: Timeout = Timeout((20 seconds).dilated)
|
implicit val timeout: Timeout = Timeout((20 seconds).dilated)
|
||||||
val waves = for (_ <- 1 to 3) yield system.actorOf(Props[ActorSystemSpec.Waves]) ? 50000
|
val waves = for (_ <- 1 to 3) yield system.actorOf(Props[ActorSystemSpec.Waves]()) ? 50000
|
||||||
Await.result(Future.sequence(waves), timeout.duration + 5.seconds) should ===(Vector("done", "done", "done"))
|
Await.result(Future.sequence(waves), timeout.duration + 5.seconds) should ===(Vector("done", "done", "done"))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -281,7 +281,7 @@ class ActorSystemSpec extends AkkaSpec(ActorSystemSpec.config) with ImplicitSend
|
||||||
var created = Vector.empty[ActorRef]
|
var created = Vector.empty[ActorRef]
|
||||||
while (!system.whenTerminated.isCompleted) {
|
while (!system.whenTerminated.isCompleted) {
|
||||||
try {
|
try {
|
||||||
val t = system.actorOf(Props[ActorSystemSpec.Terminater])
|
val t = system.actorOf(Props[ActorSystemSpec.Terminater]())
|
||||||
failing should not be true // because once failing => always failing (it’s due to shutdown)
|
failing should not be true // because once failing => always failing (it’s due to shutdown)
|
||||||
created :+= t
|
created :+= t
|
||||||
if (created.size % 1000 == 0) Thread.sleep(50) // in case of unfair thread scheduling
|
if (created.size % 1000 == 0) Thread.sleep(50) // in case of unfair thread scheduling
|
||||||
|
|
|
||||||
|
|
@ -135,22 +135,22 @@ class ActorWithBoundedStashSpec
|
||||||
"An Actor with Stash" must {
|
"An Actor with Stash" must {
|
||||||
|
|
||||||
"end up in DeadLetters in case of a capacity violation when configured via dispatcher" in {
|
"end up in DeadLetters in case of a capacity violation when configured via dispatcher" in {
|
||||||
val stasher = system.actorOf(Props[StashingActor].withDispatcher(dispatcherId1))
|
val stasher = system.actorOf(Props[StashingActor]().withDispatcher(dispatcherId1))
|
||||||
testDeadLetters(stasher)
|
testDeadLetters(stasher)
|
||||||
}
|
}
|
||||||
|
|
||||||
"end up in DeadLetters in case of a capacity violation when configured via mailbox" in {
|
"end up in DeadLetters in case of a capacity violation when configured via mailbox" in {
|
||||||
val stasher = system.actorOf(Props[StashingActor].withMailbox(mailboxId1))
|
val stasher = system.actorOf(Props[StashingActor]().withMailbox(mailboxId1))
|
||||||
testDeadLetters(stasher)
|
testDeadLetters(stasher)
|
||||||
}
|
}
|
||||||
|
|
||||||
"throw a StashOverflowException in case of a stash capacity violation when configured via dispatcher" in {
|
"throw a StashOverflowException in case of a stash capacity violation when configured via dispatcher" in {
|
||||||
val stasher = system.actorOf(Props[StashingActorWithOverflow].withDispatcher(dispatcherId2))
|
val stasher = system.actorOf(Props[StashingActorWithOverflow]().withDispatcher(dispatcherId2))
|
||||||
testStashOverflowException(stasher)
|
testStashOverflowException(stasher)
|
||||||
}
|
}
|
||||||
|
|
||||||
"throw a StashOverflowException in case of a stash capacity violation when configured via mailbox" in {
|
"throw a StashOverflowException in case of a stash capacity violation when configured via mailbox" in {
|
||||||
val stasher = system.actorOf(Props[StashingActorWithOverflow].withMailbox(mailboxId2))
|
val stasher = system.actorOf(Props[StashingActorWithOverflow]().withMailbox(mailboxId2))
|
||||||
testStashOverflowException(stasher)
|
testStashOverflowException(stasher)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -24,7 +24,7 @@ object ActorWithStashSpec {
|
||||||
def greeted: Receive = {
|
def greeted: Receive = {
|
||||||
case "bye" =>
|
case "bye" =>
|
||||||
state.s = "bye"
|
state.s = "bye"
|
||||||
state.finished.await
|
state.finished.await()
|
||||||
case _ => // do nothing
|
case _ => // do nothing
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -63,7 +63,7 @@ object ActorWithStashSpec {
|
||||||
context.unbecome()
|
context.unbecome()
|
||||||
case _ => stash()
|
case _ => stash()
|
||||||
}
|
}
|
||||||
case "done" => state.finished.await
|
case "done" => state.finished.await()
|
||||||
case _ => stash()
|
case _ => stash()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -73,7 +73,7 @@ object ActorWithStashSpec {
|
||||||
}
|
}
|
||||||
|
|
||||||
class TerminatedMessageStashingActor(probe: ActorRef) extends Actor with Stash {
|
class TerminatedMessageStashingActor(probe: ActorRef) extends Actor with Stash {
|
||||||
val watched = context.watch(context.actorOf(Props[WatchedActor]))
|
val watched = context.watch(context.actorOf(Props[WatchedActor]()))
|
||||||
var stashed = false
|
var stashed = false
|
||||||
|
|
||||||
context.stop(watched)
|
context.stop(watched)
|
||||||
|
|
@ -109,7 +109,7 @@ class ActorWithStashSpec extends AkkaSpec with DefaultTimeout with BeforeAndAfte
|
||||||
system.eventStream.publish(Mute(EventFilter[Exception]("Crashing...")))
|
system.eventStream.publish(Mute(EventFilter[Exception]("Crashing...")))
|
||||||
}
|
}
|
||||||
|
|
||||||
override def beforeEach() = state.finished.reset
|
override def beforeEach() = state.finished.reset()
|
||||||
|
|
||||||
"An Actor with Stash" must {
|
"An Actor with Stash" must {
|
||||||
|
|
||||||
|
|
@ -117,12 +117,12 @@ class ActorWithStashSpec extends AkkaSpec with DefaultTimeout with BeforeAndAfte
|
||||||
val stasher = system.actorOf(Props(new StashingActor))
|
val stasher = system.actorOf(Props(new StashingActor))
|
||||||
stasher ! "bye"
|
stasher ! "bye"
|
||||||
stasher ! "hello"
|
stasher ! "hello"
|
||||||
state.finished.await
|
state.finished.await()
|
||||||
state.s should ===("bye")
|
state.s should ===("bye")
|
||||||
}
|
}
|
||||||
|
|
||||||
"support protocols" in {
|
"support protocols" in {
|
||||||
val protoActor = system.actorOf(Props[ActorWithProtocol])
|
val protoActor = system.actorOf(Props[ActorWithProtocol]())
|
||||||
protoActor ! "open"
|
protoActor ! "open"
|
||||||
protoActor ! "write"
|
protoActor ! "write"
|
||||||
protoActor ! "open"
|
protoActor ! "open"
|
||||||
|
|
@ -130,12 +130,12 @@ class ActorWithStashSpec extends AkkaSpec with DefaultTimeout with BeforeAndAfte
|
||||||
protoActor ! "write"
|
protoActor ! "write"
|
||||||
protoActor ! "close"
|
protoActor ! "close"
|
||||||
protoActor ! "done"
|
protoActor ! "done"
|
||||||
state.finished.await
|
state.finished.await()
|
||||||
}
|
}
|
||||||
|
|
||||||
"throw an IllegalStateException if the same messages is stashed twice" in {
|
"throw an IllegalStateException if the same messages is stashed twice" in {
|
||||||
state.expectedException = new TestLatch
|
state.expectedException = new TestLatch
|
||||||
val stasher = system.actorOf(Props[StashingTwiceActor])
|
val stasher = system.actorOf(Props[StashingTwiceActor]())
|
||||||
stasher ! "hello"
|
stasher ! "hello"
|
||||||
stasher ! "hello"
|
stasher ! "hello"
|
||||||
Await.ready(state.expectedException, 10 seconds)
|
Await.ready(state.expectedException, 10 seconds)
|
||||||
|
|
|
||||||
|
|
@ -60,7 +60,7 @@ class ConsistencySpec extends AkkaSpec(ConsistencySpec.config) {
|
||||||
"The Akka actor model implementation" must {
|
"The Akka actor model implementation" must {
|
||||||
"provide memory consistency" in {
|
"provide memory consistency" in {
|
||||||
val noOfActors = threads + 1
|
val noOfActors = threads + 1
|
||||||
val props = Props[ConsistencyCheckingActor].withDispatcher("consistency-dispatcher")
|
val props = Props[ConsistencyCheckingActor]().withDispatcher("consistency-dispatcher")
|
||||||
val actors = Vector.fill(noOfActors)(system.actorOf(props))
|
val actors = Vector.fill(noOfActors)(system.actorOf(props))
|
||||||
|
|
||||||
for (i <- 0L until 10000L) {
|
for (i <- 0L until 10000L) {
|
||||||
|
|
|
||||||
|
|
@ -44,7 +44,7 @@ object DeathWatchSpec {
|
||||||
context.become {
|
context.become {
|
||||||
case Terminated(`currentKid`) =>
|
case Terminated(`currentKid`) =>
|
||||||
testActor ! "GREEN"
|
testActor ! "GREEN"
|
||||||
context unbecome
|
context.unbecome()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -217,7 +217,7 @@ trait DeathWatchSpec { this: AkkaSpec with ImplicitSender with DefaultTimeout =>
|
||||||
.sendSystemMessage(DeathWatchNotification(subject, existenceConfirmed = true, addressTerminated = false))
|
.sendSystemMessage(DeathWatchNotification(subject, existenceConfirmed = true, addressTerminated = false))
|
||||||
|
|
||||||
// the testActor is not watching subject and will not receive a Terminated msg
|
// the testActor is not watching subject and will not receive a Terminated msg
|
||||||
expectNoMessage
|
expectNoMessage()
|
||||||
}
|
}
|
||||||
|
|
||||||
"discard Terminated when unwatched between sysmsg and processing" in {
|
"discard Terminated when unwatched between sysmsg and processing" in {
|
||||||
|
|
|
||||||
|
|
@ -43,17 +43,17 @@ object FSMActorSpec {
|
||||||
case Event(digit: Char, CodeState(soFar, code)) => {
|
case Event(digit: Char, CodeState(soFar, code)) => {
|
||||||
soFar + digit match {
|
soFar + digit match {
|
||||||
case incomplete if incomplete.length < code.length =>
|
case incomplete if incomplete.length < code.length =>
|
||||||
stay.using(CodeState(incomplete, code))
|
stay().using(CodeState(incomplete, code))
|
||||||
case codeTry if (codeTry == code) => {
|
case codeTry if (codeTry == code) => {
|
||||||
doUnlock()
|
doUnlock()
|
||||||
goto(Open).using(CodeState("", code)).forMax(timeout)
|
goto(Open).using(CodeState("", code)).forMax(timeout)
|
||||||
}
|
}
|
||||||
case _ => {
|
case _ => {
|
||||||
stay.using(CodeState("", code))
|
stay().using(CodeState("", code))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
case Event("hello", _) => stay.replying("world")
|
case Event("hello", _) => stay().replying("world")
|
||||||
case Event("bye", _) => stop(FSM.Shutdown)
|
case Event("bye", _) => stop(FSM.Shutdown)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -67,13 +67,13 @@ object FSMActorSpec {
|
||||||
whenUnhandled {
|
whenUnhandled {
|
||||||
case Event(msg, _) => {
|
case Event(msg, _) => {
|
||||||
log.warning("unhandled event " + msg + " in state " + stateName + " with data " + stateData)
|
log.warning("unhandled event " + msg + " in state " + stateName + " with data " + stateData)
|
||||||
unhandledLatch.open
|
unhandledLatch.open()
|
||||||
stay
|
stay()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
onTransition {
|
onTransition {
|
||||||
case Locked -> Open => transitionLatch.open
|
case Locked -> Open => transitionLatch.open()
|
||||||
}
|
}
|
||||||
|
|
||||||
// verify that old-style does still compile
|
// verify that old-style does still compile
|
||||||
|
|
@ -119,8 +119,8 @@ class FSMActorSpec extends AkkaSpec(Map("akka.actor.debug.fsm" -> true)) with Im
|
||||||
|
|
||||||
val transitionTester = system.actorOf(Props(new Actor {
|
val transitionTester = system.actorOf(Props(new Actor {
|
||||||
def receive = {
|
def receive = {
|
||||||
case Transition(_, _, _) => transitionCallBackLatch.open
|
case Transition(_, _, _) => transitionCallBackLatch.open()
|
||||||
case CurrentState(_, s: LockState) if s eq Locked => initialStateLatch.open // SI-5900 workaround
|
case CurrentState(_, s: LockState) if s eq Locked => initialStateLatch.open() // SI-5900 workaround
|
||||||
}
|
}
|
||||||
}))
|
}))
|
||||||
|
|
||||||
|
|
@ -147,7 +147,7 @@ class FSMActorSpec extends AkkaSpec(Map("akka.actor.debug.fsm" -> true)) with Im
|
||||||
val tester = system.actorOf(Props(new Actor {
|
val tester = system.actorOf(Props(new Actor {
|
||||||
def receive = {
|
def receive = {
|
||||||
case Hello => lock ! "hello"
|
case Hello => lock ! "hello"
|
||||||
case "world" => answerLatch.open
|
case "world" => answerLatch.open()
|
||||||
case Bye => lock ! "bye"
|
case Bye => lock ! "bye"
|
||||||
}
|
}
|
||||||
}))
|
}))
|
||||||
|
|
@ -183,7 +183,7 @@ class FSMActorSpec extends AkkaSpec(Map("akka.actor.debug.fsm" -> true)) with Im
|
||||||
* It is necessary here because of the path-dependent type fsm.StopEvent.
|
* It is necessary here because of the path-dependent type fsm.StopEvent.
|
||||||
*/
|
*/
|
||||||
lazy val fsm = new Actor with FSM[Int, Null] {
|
lazy val fsm = new Actor with FSM[Int, Null] {
|
||||||
override def preStart = { started.countDown }
|
override def preStart = { started.countDown() }
|
||||||
startWith(1, null)
|
startWith(1, null)
|
||||||
when(1) { FSM.NullFunction }
|
when(1) { FSM.NullFunction }
|
||||||
onTermination {
|
onTermination {
|
||||||
|
|
@ -269,7 +269,7 @@ class FSMActorSpec extends AkkaSpec(Map("akka.actor.debug.fsm" -> true)) with Im
|
||||||
when(2) {
|
when(2) {
|
||||||
case Event("stop", _) =>
|
case Event("stop", _) =>
|
||||||
cancelTimer("t")
|
cancelTimer("t")
|
||||||
stop
|
stop()
|
||||||
}
|
}
|
||||||
onTermination {
|
onTermination {
|
||||||
case StopEvent(r, _, _) => testActor ! r
|
case StopEvent(r, _, _) => testActor ! r
|
||||||
|
|
@ -307,8 +307,8 @@ class FSMActorSpec extends AkkaSpec(Map("akka.actor.debug.fsm" -> true)) with Im
|
||||||
override def logDepth = 3
|
override def logDepth = 3
|
||||||
startWith(1, 0)
|
startWith(1, 0)
|
||||||
when(1) {
|
when(1) {
|
||||||
case Event("count", c) => stay.using(c + 1)
|
case Event("count", c) => stay().using(c + 1)
|
||||||
case Event("log", _) => stay.replying(getLog)
|
case Event("log", _) => stay().replying(getLog)
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
fsmref ! "log"
|
fsmref ! "log"
|
||||||
|
|
@ -327,12 +327,12 @@ class FSMActorSpec extends AkkaSpec(Map("akka.actor.debug.fsm" -> true)) with Im
|
||||||
val fsmref = system.actorOf(Props(new Actor with FSM[Int, Int] {
|
val fsmref = system.actorOf(Props(new Actor with FSM[Int, Int] {
|
||||||
startWith(0, 0)
|
startWith(0, 0)
|
||||||
when(0)(transform {
|
when(0)(transform {
|
||||||
case Event("go", _) => stay
|
case Event("go", _) => stay()
|
||||||
}.using {
|
}.using {
|
||||||
case _ => goto(1)
|
case _ => goto(1)
|
||||||
})
|
})
|
||||||
when(1) {
|
when(1) {
|
||||||
case _ => stay
|
case _ => stay()
|
||||||
}
|
}
|
||||||
}))
|
}))
|
||||||
fsmref ! SubscribeTransitionCallBack(testActor)
|
fsmref ! SubscribeTransitionCallBack(testActor)
|
||||||
|
|
|
||||||
|
|
@ -44,7 +44,7 @@ class FSMTimingSpec extends AkkaSpec with ImplicitSender {
|
||||||
}
|
}
|
||||||
|
|
||||||
"cancel a StateTimeout when actor is stopped" taggedAs TimingTest in {
|
"cancel a StateTimeout when actor is stopped" taggedAs TimingTest in {
|
||||||
val stoppingActor = system.actorOf(Props[StoppingActor])
|
val stoppingActor = system.actorOf(Props[StoppingActor]())
|
||||||
system.eventStream.subscribe(testActor, classOf[DeadLetter])
|
system.eventStream.subscribe(testActor, classOf[DeadLetter])
|
||||||
stoppingActor ! TestStoppingActorStateTimeout
|
stoppingActor ! TestStoppingActorStateTimeout
|
||||||
within(400 millis) {
|
within(400 millis) {
|
||||||
|
|
@ -56,7 +56,7 @@ class FSMTimingSpec extends AkkaSpec with ImplicitSender {
|
||||||
// the timeout in state TestStateTimeout is 800 ms, then it will change to Initial
|
// the timeout in state TestStateTimeout is 800 ms, then it will change to Initial
|
||||||
within(400 millis) {
|
within(400 millis) {
|
||||||
fsm ! TestStateTimeoutOverride
|
fsm ! TestStateTimeoutOverride
|
||||||
expectNoMessage
|
expectNoMessage()
|
||||||
}
|
}
|
||||||
within(1 second) {
|
within(1 second) {
|
||||||
fsm ! Cancel
|
fsm ! Cancel
|
||||||
|
|
@ -72,7 +72,7 @@ class FSMTimingSpec extends AkkaSpec with ImplicitSender {
|
||||||
expectMsg(Tick)
|
expectMsg(Tick)
|
||||||
expectMsg(Transition(fsm, TestSingleTimer, Initial))
|
expectMsg(Transition(fsm, TestSingleTimer, Initial))
|
||||||
}
|
}
|
||||||
expectNoMessage
|
expectNoMessage()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -86,7 +86,7 @@ class FSMTimingSpec extends AkkaSpec with ImplicitSender {
|
||||||
expectMsg(Tock)
|
expectMsg(Tock)
|
||||||
expectMsg(Transition(fsm, TestSingleTimerResubmit, Initial))
|
expectMsg(Transition(fsm, TestSingleTimerResubmit, Initial))
|
||||||
}
|
}
|
||||||
expectNoMessage
|
expectNoMessage()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -232,10 +232,10 @@ object FSMTimingSpec {
|
||||||
cancelTimer("hallo")
|
cancelTimer("hallo")
|
||||||
sender() ! Tick
|
sender() ! Tick
|
||||||
startSingleTimer("hallo", Tock, 500.millis.dilated)
|
startSingleTimer("hallo", Tock, 500.millis.dilated)
|
||||||
stay
|
stay()
|
||||||
case Event(Tock, _) =>
|
case Event(Tock, _) =>
|
||||||
tester ! Tock
|
tester ! Tock
|
||||||
stay
|
stay()
|
||||||
case Event(Cancel, _) =>
|
case Event(Cancel, _) =>
|
||||||
cancelTimer("hallo")
|
cancelTimer("hallo")
|
||||||
goto(Initial)
|
goto(Initial)
|
||||||
|
|
@ -247,7 +247,7 @@ object FSMTimingSpec {
|
||||||
cancelTimer("tester")
|
cancelTimer("tester")
|
||||||
goto(Initial)
|
goto(Initial)
|
||||||
} else {
|
} else {
|
||||||
stay.using(remaining - 1)
|
stay().using(remaining - 1)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
when(TestCancelStateTimerInNamedTimerMessage) {
|
when(TestCancelStateTimerInNamedTimerMessage) {
|
||||||
|
|
@ -256,7 +256,7 @@ object FSMTimingSpec {
|
||||||
suspend(self)
|
suspend(self)
|
||||||
startSingleTimer("named", Tock, 1.millis.dilated)
|
startSingleTimer("named", Tock, 1.millis.dilated)
|
||||||
TestKit.awaitCond(context.asInstanceOf[ActorCell].mailbox.hasMessages, 1.second.dilated)
|
TestKit.awaitCond(context.asInstanceOf[ActorCell].mailbox.hasMessages, 1.second.dilated)
|
||||||
stay.forMax(1.millis.dilated).replying(Tick)
|
stay().forMax(1.millis.dilated).replying(Tick)
|
||||||
case Event(Tock, _) =>
|
case Event(Tock, _) =>
|
||||||
goto(TestCancelStateTimerInNamedTimerMessage2)
|
goto(TestCancelStateTimerInNamedTimerMessage2)
|
||||||
}
|
}
|
||||||
|
|
@ -271,9 +271,9 @@ object FSMTimingSpec {
|
||||||
whenUnhandled {
|
whenUnhandled {
|
||||||
case Event(Tick, _) =>
|
case Event(Tick, _) =>
|
||||||
tester ! Unhandled(Tick)
|
tester ! Unhandled(Tick)
|
||||||
stay
|
stay()
|
||||||
}
|
}
|
||||||
stay
|
stay()
|
||||||
case Event(Cancel, _) =>
|
case Event(Cancel, _) =>
|
||||||
whenUnhandled(NullFunction)
|
whenUnhandled(NullFunction)
|
||||||
goto(Initial)
|
goto(Initial)
|
||||||
|
|
@ -286,7 +286,7 @@ object FSMTimingSpec {
|
||||||
when(Initial, 200 millis) {
|
when(Initial, 200 millis) {
|
||||||
case Event(TestStoppingActorStateTimeout, _) =>
|
case Event(TestStoppingActorStateTimeout, _) =>
|
||||||
context.stop(self)
|
context.stop(self)
|
||||||
stay
|
stay()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -35,7 +35,7 @@ object FSMTransitionSpec {
|
||||||
case Event("tick", _) => goto(0)
|
case Event("tick", _) => goto(0)
|
||||||
}
|
}
|
||||||
whenUnhandled {
|
whenUnhandled {
|
||||||
case Event("reply", _) => stay.replying("reply")
|
case Event("reply", _) => stay().replying("reply")
|
||||||
}
|
}
|
||||||
initialize()
|
initialize()
|
||||||
override def preRestart(reason: Throwable, msg: Option[Any]): Unit = { target ! "restarted" }
|
override def preRestart(reason: Throwable, msg: Option[Any]): Unit = { target ! "restarted" }
|
||||||
|
|
|
||||||
|
|
@ -28,7 +28,7 @@ object FunctionRefSpec {
|
||||||
}
|
}
|
||||||
|
|
||||||
class SupSuper extends Actor {
|
class SupSuper extends Actor {
|
||||||
val s = context.actorOf(Props[Super], "super")
|
val s = context.actorOf(Props[Super](), "super")
|
||||||
def receive = {
|
def receive = {
|
||||||
case msg => s ! msg
|
case msg => s ! msg
|
||||||
}
|
}
|
||||||
|
|
@ -86,12 +86,12 @@ class FunctionRefSpec extends AkkaSpec("""
|
||||||
"A FunctionRef" when {
|
"A FunctionRef" when {
|
||||||
|
|
||||||
"created by a toplevel actor" must {
|
"created by a toplevel actor" must {
|
||||||
val s = system.actorOf(Props[Super], "super")
|
val s = system.actorOf(Props[Super](), "super")
|
||||||
commonTests(s)
|
commonTests(s)
|
||||||
}
|
}
|
||||||
|
|
||||||
"created by a non-toplevel actor" must {
|
"created by a non-toplevel actor" must {
|
||||||
val s = system.actorOf(Props[SupSuper], "supsuper")
|
val s = system.actorOf(Props[SupSuper](), "supsuper")
|
||||||
commonTests(s)
|
commonTests(s)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -66,7 +66,7 @@ class ReceiveTimeoutSpec extends AkkaSpec() {
|
||||||
context.setReceiveTimeout(500 milliseconds)
|
context.setReceiveTimeout(500 milliseconds)
|
||||||
|
|
||||||
def receive = {
|
def receive = {
|
||||||
case ReceiveTimeout => timeoutLatch.open
|
case ReceiveTimeout => timeoutLatch.open()
|
||||||
}
|
}
|
||||||
}))
|
}))
|
||||||
|
|
||||||
|
|
@ -82,7 +82,7 @@ class ReceiveTimeoutSpec extends AkkaSpec() {
|
||||||
|
|
||||||
def receive = {
|
def receive = {
|
||||||
case Tick => ()
|
case Tick => ()
|
||||||
case ReceiveTimeout => timeoutLatch.open
|
case ReceiveTimeout => timeoutLatch.open()
|
||||||
}
|
}
|
||||||
}))
|
}))
|
||||||
|
|
||||||
|
|
@ -103,7 +103,7 @@ class ReceiveTimeoutSpec extends AkkaSpec() {
|
||||||
case Tick => ()
|
case Tick => ()
|
||||||
case ReceiveTimeout =>
|
case ReceiveTimeout =>
|
||||||
count.incrementAndGet
|
count.incrementAndGet
|
||||||
timeoutLatch.open
|
timeoutLatch.open()
|
||||||
context.setReceiveTimeout(Duration.Undefined)
|
context.setReceiveTimeout(Duration.Undefined)
|
||||||
}
|
}
|
||||||
}))
|
}))
|
||||||
|
|
@ -120,7 +120,7 @@ class ReceiveTimeoutSpec extends AkkaSpec() {
|
||||||
|
|
||||||
val timeoutActor = system.actorOf(Props(new Actor {
|
val timeoutActor = system.actorOf(Props(new Actor {
|
||||||
def receive = {
|
def receive = {
|
||||||
case ReceiveTimeout => timeoutLatch.open
|
case ReceiveTimeout => timeoutLatch.open()
|
||||||
}
|
}
|
||||||
}))
|
}))
|
||||||
|
|
||||||
|
|
@ -135,7 +135,7 @@ class ReceiveTimeoutSpec extends AkkaSpec() {
|
||||||
context.setReceiveTimeout(1 second)
|
context.setReceiveTimeout(1 second)
|
||||||
|
|
||||||
def receive = {
|
def receive = {
|
||||||
case ReceiveTimeout => timeoutLatch.open
|
case ReceiveTimeout => timeoutLatch.open()
|
||||||
case TransparentTick =>
|
case TransparentTick =>
|
||||||
}
|
}
|
||||||
}))
|
}))
|
||||||
|
|
@ -179,7 +179,7 @@ class ReceiveTimeoutSpec extends AkkaSpec() {
|
||||||
context.setReceiveTimeout(1 second)
|
context.setReceiveTimeout(1 second)
|
||||||
def receive: Receive = {
|
def receive: Receive = {
|
||||||
case ReceiveTimeout =>
|
case ReceiveTimeout =>
|
||||||
timeoutLatch.open
|
timeoutLatch.open()
|
||||||
case TransparentTick =>
|
case TransparentTick =>
|
||||||
count.incrementAndGet()
|
count.incrementAndGet()
|
||||||
}
|
}
|
||||||
|
|
@ -198,7 +198,7 @@ class ReceiveTimeoutSpec extends AkkaSpec() {
|
||||||
val timeoutActor = system.actorOf(Props(new Actor {
|
val timeoutActor = system.actorOf(Props(new Actor {
|
||||||
def receive = {
|
def receive = {
|
||||||
case TransparentTick => context.setReceiveTimeout(500 milliseconds)
|
case TransparentTick => context.setReceiveTimeout(500 milliseconds)
|
||||||
case ReceiveTimeout => timeoutLatch.open
|
case ReceiveTimeout => timeoutLatch.open()
|
||||||
}
|
}
|
||||||
}))
|
}))
|
||||||
|
|
||||||
|
|
@ -216,7 +216,7 @@ class ReceiveTimeoutSpec extends AkkaSpec() {
|
||||||
|
|
||||||
def receive = {
|
def receive = {
|
||||||
case TransparentTick => context.setReceiveTimeout(Duration.Inf)
|
case TransparentTick => context.setReceiveTimeout(Duration.Inf)
|
||||||
case ReceiveTimeout => timeoutLatch.open
|
case ReceiveTimeout => timeoutLatch.open()
|
||||||
}
|
}
|
||||||
}))
|
}))
|
||||||
|
|
||||||
|
|
@ -235,7 +235,7 @@ class ReceiveTimeoutSpec extends AkkaSpec() {
|
||||||
|
|
||||||
def receive: Receive = {
|
def receive: Receive = {
|
||||||
case TransparentTick => context.setReceiveTimeout(Duration.Undefined)
|
case TransparentTick => context.setReceiveTimeout(Duration.Undefined)
|
||||||
case ReceiveTimeout => timeoutLatch.open
|
case ReceiveTimeout => timeoutLatch.open()
|
||||||
}
|
}
|
||||||
}))
|
}))
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -116,7 +116,7 @@ class RestartStrategySpec extends AkkaSpec with DefaultTimeout {
|
||||||
|
|
||||||
def receive = {
|
def receive = {
|
||||||
case Ping =>
|
case Ping =>
|
||||||
if (!pingLatch.isOpen) pingLatch.open else secondPingLatch.open
|
if (!pingLatch.isOpen) pingLatch.open() else secondPingLatch.open()
|
||||||
case Crash => throw new Exception("Crashing...")
|
case Crash => throw new Exception("Crashing...")
|
||||||
}
|
}
|
||||||
override def postRestart(reason: Throwable) = {
|
override def postRestart(reason: Throwable) = {
|
||||||
|
|
|
||||||
|
|
@ -330,7 +330,7 @@ trait SchedulerSpec extends BeforeAndAfterEach with DefaultTimeout with Implicit
|
||||||
case Crash => throw new Exception("CRASH")
|
case Crash => throw new Exception("CRASH")
|
||||||
}
|
}
|
||||||
|
|
||||||
override def postRestart(reason: Throwable) = restartLatch.open
|
override def postRestart(reason: Throwable) = restartLatch.open()
|
||||||
})
|
})
|
||||||
val actor = Await.result((supervisor ? props).mapTo[ActorRef], timeout.duration)
|
val actor = Await.result((supervisor ? props).mapTo[ActorRef], timeout.duration)
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -53,7 +53,7 @@ object SupervisorHierarchySpec {
|
||||||
class Resumer extends Actor {
|
class Resumer extends Actor {
|
||||||
override def supervisorStrategy = OneForOneStrategy() { case _ => SupervisorStrategy.Resume }
|
override def supervisorStrategy = OneForOneStrategy() { case _ => SupervisorStrategy.Resume }
|
||||||
def receive = {
|
def receive = {
|
||||||
case "spawn" => sender() ! context.actorOf(Props[Resumer])
|
case "spawn" => sender() ! context.actorOf(Props[Resumer]())
|
||||||
case "fail" => throw new Exception("expected")
|
case "fail" => throw new Exception("expected")
|
||||||
case "ping" => sender() ! "pong"
|
case "ping" => sender() ! "pong"
|
||||||
}
|
}
|
||||||
|
|
@ -294,7 +294,7 @@ object SupervisorHierarchySpec {
|
||||||
setFlags(f.directive)
|
setFlags(f.directive)
|
||||||
stateCache.put(self.path, stateCache.get(self.path).copy(failConstr = f.copy()))
|
stateCache.put(self.path, stateCache.get(self.path).copy(failConstr = f.copy()))
|
||||||
throw f
|
throw f
|
||||||
case "ping" => { Thread.sleep((random.nextFloat * 1.03).toLong); sender() ! "pong" }
|
case "ping" => { Thread.sleep((random.nextFloat() * 1.03).toLong); sender() ! "pong" }
|
||||||
case Dump(0) => abort("dump")
|
case Dump(0) => abort("dump")
|
||||||
case Dump(level) => context.children.foreach(_ ! Dump(level - 1))
|
case Dump(level) => context.children.foreach(_ ! Dump(level - 1))
|
||||||
case Terminated(ref) =>
|
case Terminated(ref) =>
|
||||||
|
|
@ -432,7 +432,7 @@ object SupervisorHierarchySpec {
|
||||||
var idleChildren = Vector.empty[ActorRef]
|
var idleChildren = Vector.empty[ActorRef]
|
||||||
var pingChildren = Set.empty[ActorRef]
|
var pingChildren = Set.empty[ActorRef]
|
||||||
|
|
||||||
val nextJob = Iterator.continually(random.nextFloat match {
|
val nextJob = Iterator.continually(random.nextFloat() match {
|
||||||
case x if x >= 0.5 =>
|
case x if x >= 0.5 =>
|
||||||
// ping one child
|
// ping one child
|
||||||
val pick = ((x - 0.5) * 2 * idleChildren.size).toInt
|
val pick = ((x - 0.5) * 2 * idleChildren.size).toInt
|
||||||
|
|
@ -479,7 +479,7 @@ object SupervisorHierarchySpec {
|
||||||
} else {
|
} else {
|
||||||
children :+= ref
|
children :+= ref
|
||||||
if (children.size == size) goto(Stress)
|
if (children.size == size) goto(Stress)
|
||||||
else stay
|
else stay()
|
||||||
}
|
}
|
||||||
case Event(StateTimeout, _) =>
|
case Event(StateTimeout, _) =>
|
||||||
testActor ! "did not get children list"
|
testActor ! "did not get children list"
|
||||||
|
|
@ -497,7 +497,7 @@ object SupervisorHierarchySpec {
|
||||||
|
|
||||||
val workSchedule = 50.millis
|
val workSchedule = 50.millis
|
||||||
|
|
||||||
private def random012: Int = random.nextFloat match {
|
private def random012: Int = random.nextFloat() match {
|
||||||
case x if x > 0.1 => 0
|
case x if x > 0.1 => 0
|
||||||
case x if x > 0.03 => 1
|
case x if x > 0.03 => 1
|
||||||
case _ => 2
|
case _ => 2
|
||||||
|
|
@ -516,9 +516,9 @@ object SupervisorHierarchySpec {
|
||||||
when(Stress) {
|
when(Stress) {
|
||||||
case Event(Work, _) if idleChildren.isEmpty =>
|
case Event(Work, _) if idleChildren.isEmpty =>
|
||||||
context.system.scheduler.scheduleOnce(workSchedule, self, Work)(context.dispatcher)
|
context.system.scheduler.scheduleOnce(workSchedule, self, Work)(context.dispatcher)
|
||||||
stay
|
stay()
|
||||||
case Event(Work, x) if x > 0 =>
|
case Event(Work, x) if x > 0 =>
|
||||||
nextJob.next match {
|
nextJob.next() match {
|
||||||
case Ping(ref) => ref ! "ping"
|
case Ping(ref) => ref ! "ping"
|
||||||
case Fail(ref, dir) =>
|
case Fail(ref, dir) =>
|
||||||
val f = Failure(
|
val f = Failure(
|
||||||
|
|
@ -537,15 +537,15 @@ object SupervisorHierarchySpec {
|
||||||
}
|
}
|
||||||
if (idleChildren.nonEmpty) self ! Work
|
if (idleChildren.nonEmpty) self ! Work
|
||||||
else context.system.scheduler.scheduleOnce(workSchedule, self, Work)(context.dispatcher)
|
else context.system.scheduler.scheduleOnce(workSchedule, self, Work)(context.dispatcher)
|
||||||
stay.using(x - 1)
|
stay().using(x - 1)
|
||||||
case Event(Work, _) => if (pingChildren.isEmpty) goto(LastPing) else goto(Finishing)
|
case Event(Work, _) => if (pingChildren.isEmpty) goto(LastPing) else goto(Finishing)
|
||||||
case Event(Died(path), _) =>
|
case Event(Died(path), _) =>
|
||||||
bury(path)
|
bury(path)
|
||||||
stay
|
stay()
|
||||||
case Event("pong", _) =>
|
case Event("pong", _) =>
|
||||||
pingChildren -= sender()
|
pingChildren -= sender()
|
||||||
idleChildren :+= sender()
|
idleChildren :+= sender()
|
||||||
stay
|
stay()
|
||||||
case Event(StateTimeout, todo) =>
|
case Event(StateTimeout, todo) =>
|
||||||
log.info("dumping state due to StateTimeout")
|
log.info("dumping state due to StateTimeout")
|
||||||
log.info(
|
log.info(
|
||||||
|
|
@ -566,10 +566,10 @@ object SupervisorHierarchySpec {
|
||||||
case Event("pong", _) =>
|
case Event("pong", _) =>
|
||||||
pingChildren -= sender()
|
pingChildren -= sender()
|
||||||
idleChildren :+= sender()
|
idleChildren :+= sender()
|
||||||
if (pingChildren.isEmpty) goto(LastPing) else stay
|
if (pingChildren.isEmpty) goto(LastPing) else stay()
|
||||||
case Event(Died(ref), _) =>
|
case Event(Died(ref), _) =>
|
||||||
bury(ref)
|
bury(ref)
|
||||||
if (pingChildren.isEmpty) goto(LastPing) else stay
|
if (pingChildren.isEmpty) goto(LastPing) else stay()
|
||||||
}
|
}
|
||||||
|
|
||||||
onTransition {
|
onTransition {
|
||||||
|
|
@ -583,10 +583,10 @@ object SupervisorHierarchySpec {
|
||||||
case Event("pong", _) =>
|
case Event("pong", _) =>
|
||||||
pingChildren -= sender()
|
pingChildren -= sender()
|
||||||
idleChildren :+= sender()
|
idleChildren :+= sender()
|
||||||
if (pingChildren.isEmpty) goto(Stopping) else stay
|
if (pingChildren.isEmpty) goto(Stopping) else stay()
|
||||||
case Event(Died(ref), _) =>
|
case Event(Died(ref), _) =>
|
||||||
bury(ref)
|
bury(ref)
|
||||||
if (pingChildren.isEmpty) goto(Stopping) else stay
|
if (pingChildren.isEmpty) goto(Stopping) else stay()
|
||||||
}
|
}
|
||||||
|
|
||||||
onTransition {
|
onTransition {
|
||||||
|
|
@ -596,14 +596,14 @@ object SupervisorHierarchySpec {
|
||||||
}
|
}
|
||||||
|
|
||||||
when(Stopping, stateTimeout = 5.seconds.dilated) {
|
when(Stopping, stateTimeout = 5.seconds.dilated) {
|
||||||
case Event(PongOfDeath, _) => stay
|
case Event(PongOfDeath, _) => stay()
|
||||||
case Event(Terminated(r), _) if r == hierarchy =>
|
case Event(Terminated(r), _) if r == hierarchy =>
|
||||||
@silent
|
@silent
|
||||||
val undead = children.filterNot(_.isTerminated)
|
val undead = children.filterNot(_.isTerminated)
|
||||||
if (undead.nonEmpty) {
|
if (undead.nonEmpty) {
|
||||||
log.info("undead:\n" + undead.mkString("\n"))
|
log.info("undead:\n" + undead.mkString("\n"))
|
||||||
testActor ! "stressTestFailed (" + undead.size + " undead)"
|
testActor ! "stressTestFailed (" + undead.size + " undead)"
|
||||||
stop
|
stop()
|
||||||
} else if (false) {
|
} else if (false) {
|
||||||
/*
|
/*
|
||||||
* This part of the test is normally disabled, because it does not
|
* This part of the test is normally disabled, because it does not
|
||||||
|
|
@ -621,7 +621,7 @@ object SupervisorHierarchySpec {
|
||||||
goto(GC)
|
goto(GC)
|
||||||
} else {
|
} else {
|
||||||
testActor ! "stressTestSuccessful"
|
testActor ! "stressTestSuccessful"
|
||||||
stop
|
stop()
|
||||||
}
|
}
|
||||||
case Event(StateTimeout, _) =>
|
case Event(StateTimeout, _) =>
|
||||||
errors :+= self -> ErrorLog("timeout while Stopping", Vector.empty)
|
errors :+= self -> ErrorLog("timeout while Stopping", Vector.empty)
|
||||||
|
|
@ -630,7 +630,7 @@ object SupervisorHierarchySpec {
|
||||||
printErrors()
|
printErrors()
|
||||||
idleChildren.foreach(println)
|
idleChildren.foreach(println)
|
||||||
testActor ! "timeout in Stopping"
|
testActor ! "timeout in Stopping"
|
||||||
stop
|
stop()
|
||||||
case Event(e: ErrorLog, _) =>
|
case Event(e: ErrorLog, _) =>
|
||||||
errors :+= sender() -> e
|
errors :+= sender() -> e
|
||||||
goto(Failed)
|
goto(Failed)
|
||||||
|
|
@ -642,14 +642,14 @@ object SupervisorHierarchySpec {
|
||||||
if (next.nonEmpty) {
|
if (next.nonEmpty) {
|
||||||
context.system.scheduler.scheduleOnce(workSchedule, self, GCcheck(next))(context.dispatcher)
|
context.system.scheduler.scheduleOnce(workSchedule, self, GCcheck(next))(context.dispatcher)
|
||||||
System.gc()
|
System.gc()
|
||||||
stay
|
stay()
|
||||||
} else {
|
} else {
|
||||||
testActor ! "stressTestSuccessful"
|
testActor ! "stressTestSuccessful"
|
||||||
stop
|
stop()
|
||||||
}
|
}
|
||||||
case Event(StateTimeout, _) =>
|
case Event(StateTimeout, _) =>
|
||||||
testActor ! "timeout in GC"
|
testActor ! "timeout in GC"
|
||||||
stop
|
stop()
|
||||||
}
|
}
|
||||||
|
|
||||||
var errors = Vector.empty[(ActorRef, ErrorLog)]
|
var errors = Vector.empty[(ActorRef, ErrorLog)]
|
||||||
|
|
@ -658,19 +658,19 @@ object SupervisorHierarchySpec {
|
||||||
case Event(e: ErrorLog, _) =>
|
case Event(e: ErrorLog, _) =>
|
||||||
if (!e.msg.startsWith("not resumed") || !ignoreNotResumedLogs)
|
if (!e.msg.startsWith("not resumed") || !ignoreNotResumedLogs)
|
||||||
errors :+= sender() -> e
|
errors :+= sender() -> e
|
||||||
stay
|
stay()
|
||||||
case Event(Terminated(r), _) if r == hierarchy =>
|
case Event(Terminated(r), _) if r == hierarchy =>
|
||||||
printErrors()
|
printErrors()
|
||||||
testActor ! "stressTestFailed"
|
testActor ! "stressTestFailed"
|
||||||
stop
|
stop()
|
||||||
case Event(StateTimeout, _) =>
|
case Event(StateTimeout, _) =>
|
||||||
getErrors(hierarchy, 10)
|
getErrors(hierarchy, 10)
|
||||||
printErrors()
|
printErrors()
|
||||||
testActor ! "timeout in Failed"
|
testActor ! "timeout in Failed"
|
||||||
stop
|
stop()
|
||||||
case Event("pong", _) => stay // don’t care?
|
case Event("pong", _) => stay() // don’t care?
|
||||||
case Event(Work, _) => stay
|
case Event(Work, _) => stay()
|
||||||
case Event(Died(_), _) => stay
|
case Event(Died(_), _) => stay()
|
||||||
}
|
}
|
||||||
|
|
||||||
def getErrors(target: ActorRef, depth: Int): Unit = {
|
def getErrors(target: ActorRef, depth: Int): Unit = {
|
||||||
|
|
@ -716,9 +716,9 @@ object SupervisorHierarchySpec {
|
||||||
activeChildren :+= ref
|
activeChildren :+= ref
|
||||||
children :+= ref
|
children :+= ref
|
||||||
idleChildren :+= ref
|
idleChildren :+= ref
|
||||||
stay
|
stay()
|
||||||
case Event(e: ErrorLog, _) =>
|
case Event(e: ErrorLog, _) =>
|
||||||
if (e.msg.startsWith("not resumed")) stay
|
if (e.msg.startsWith("not resumed")) stay()
|
||||||
else {
|
else {
|
||||||
errors :+= sender() -> e
|
errors :+= sender() -> e
|
||||||
// don’t stop the hierarchy, that is going to happen all by itself and in the right order
|
// don’t stop the hierarchy, that is going to happen all by itself and in the right order
|
||||||
|
|
@ -737,7 +737,7 @@ object SupervisorHierarchySpec {
|
||||||
goto(Failed)
|
goto(Failed)
|
||||||
case Event(msg, _) =>
|
case Event(msg, _) =>
|
||||||
testActor ! ("received unexpected msg: " + msg)
|
testActor ! ("received unexpected msg: " + msg)
|
||||||
stop
|
stop()
|
||||||
}
|
}
|
||||||
|
|
||||||
initialize()
|
initialize()
|
||||||
|
|
@ -801,7 +801,7 @@ class SupervisorHierarchySpec extends AkkaSpec(SupervisorHierarchySpec.config) w
|
||||||
}
|
}
|
||||||
|
|
||||||
"resume children after Resume" taggedAs LongRunningTest in {
|
"resume children after Resume" taggedAs LongRunningTest in {
|
||||||
val boss = system.actorOf(Props[Resumer], "resumer")
|
val boss = system.actorOf(Props[Resumer](), "resumer")
|
||||||
boss ! "spawn"
|
boss ! "spawn"
|
||||||
val middle = expectMsgType[ActorRef]
|
val middle = expectMsgType[ActorRef]
|
||||||
middle ! "spawn"
|
middle ! "spawn"
|
||||||
|
|
@ -824,7 +824,7 @@ class SupervisorHierarchySpec extends AkkaSpec(SupervisorHierarchySpec.config) w
|
||||||
case _ => Await.ready(latch, 4.seconds.dilated); SupervisorStrategy.Resume
|
case _ => Await.ready(latch, 4.seconds.dilated); SupervisorStrategy.Resume
|
||||||
}
|
}
|
||||||
def receive = {
|
def receive = {
|
||||||
case "spawn" => sender() ! context.actorOf(Props[Resumer])
|
case "spawn" => sender() ! context.actorOf(Props[Resumer]())
|
||||||
}
|
}
|
||||||
}), "slowResumer")
|
}), "slowResumer")
|
||||||
slowResumer ! "spawn"
|
slowResumer ! "spawn"
|
||||||
|
|
|
||||||
|
|
@ -28,7 +28,7 @@ class Ticket669Spec extends AkkaSpec with BeforeAndAfterAll with ImplicitSender
|
||||||
filterEvents(EventFilter[Exception]("test", occurrences = 1)) {
|
filterEvents(EventFilter[Exception]("test", occurrences = 1)) {
|
||||||
val supervisor =
|
val supervisor =
|
||||||
system.actorOf(Props(new Supervisor(AllForOneStrategy(5, 10 seconds)(List(classOf[Exception])))))
|
system.actorOf(Props(new Supervisor(AllForOneStrategy(5, 10 seconds)(List(classOf[Exception])))))
|
||||||
val supervised = Await.result((supervisor ? Props[Supervised]).mapTo[ActorRef], timeout.duration)
|
val supervised = Await.result((supervisor ? Props[Supervised]()).mapTo[ActorRef], timeout.duration)
|
||||||
|
|
||||||
supervised.!("test")(testActor)
|
supervised.!("test")(testActor)
|
||||||
expectMsg("failure1")
|
expectMsg("failure1")
|
||||||
|
|
@ -40,7 +40,7 @@ class Ticket669Spec extends AkkaSpec with BeforeAndAfterAll with ImplicitSender
|
||||||
filterEvents(EventFilter[Exception]("test", occurrences = 1)) {
|
filterEvents(EventFilter[Exception]("test", occurrences = 1)) {
|
||||||
val supervisor =
|
val supervisor =
|
||||||
system.actorOf(Props(new Supervisor(AllForOneStrategy(maxNrOfRetries = 0)(List(classOf[Exception])))))
|
system.actorOf(Props(new Supervisor(AllForOneStrategy(maxNrOfRetries = 0)(List(classOf[Exception])))))
|
||||||
val supervised = Await.result((supervisor ? Props[Supervised]).mapTo[ActorRef], timeout.duration)
|
val supervised = Await.result((supervisor ? Props[Supervised]()).mapTo[ActorRef], timeout.duration)
|
||||||
|
|
||||||
supervised.!("test")(testActor)
|
supervised.!("test")(testActor)
|
||||||
expectMsg("failure2")
|
expectMsg("failure2")
|
||||||
|
|
|
||||||
|
|
@ -108,7 +108,7 @@ object TimerSpec {
|
||||||
startTimerWithFixedDelay("T", Tick(bumpCount + 1), interval)
|
startTimerWithFixedDelay("T", Tick(bumpCount + 1), interval)
|
||||||
else
|
else
|
||||||
startSingleTimer("T", Tick(bumpCount + 1), interval)
|
startSingleTimer("T", Tick(bumpCount + 1), interval)
|
||||||
stay.using(bumpCount + 1)
|
stay().using(bumpCount + 1)
|
||||||
}
|
}
|
||||||
|
|
||||||
def autoReceive(): State = {
|
def autoReceive(): State = {
|
||||||
|
|
@ -116,7 +116,7 @@ object TimerSpec {
|
||||||
startTimerWithFixedDelay("A", PoisonPill, interval)
|
startTimerWithFixedDelay("A", PoisonPill, interval)
|
||||||
else
|
else
|
||||||
startSingleTimer("A", PoisonPill, interval)
|
startSingleTimer("A", PoisonPill, interval)
|
||||||
stay
|
stay()
|
||||||
}
|
}
|
||||||
|
|
||||||
{
|
{
|
||||||
|
|
@ -131,7 +131,7 @@ object TimerSpec {
|
||||||
when(TheState) {
|
when(TheState) {
|
||||||
case Event(Tick(n), _) =>
|
case Event(Tick(n), _) =>
|
||||||
monitor ! Tock(n)
|
monitor ! Tock(n)
|
||||||
stay
|
stay()
|
||||||
case Event(Bump, bumpCount) =>
|
case Event(Bump, bumpCount) =>
|
||||||
bump(bumpCount)
|
bump(bumpCount)
|
||||||
case Event(SlowThenBump(latch), bumpCount) =>
|
case Event(SlowThenBump(latch), bumpCount) =>
|
||||||
|
|
@ -141,7 +141,7 @@ object TimerSpec {
|
||||||
stop()
|
stop()
|
||||||
case Event(Cancel, _) =>
|
case Event(Cancel, _) =>
|
||||||
cancelTimer("T")
|
cancelTimer("T")
|
||||||
stay
|
stay()
|
||||||
case Event(Throw(e), _) =>
|
case Event(Throw(e), _) =>
|
||||||
throw e
|
throw e
|
||||||
case Event(SlowThenThrow(latch, e), _) =>
|
case Event(SlowThenThrow(latch, e), _) =>
|
||||||
|
|
|
||||||
|
|
@ -123,11 +123,11 @@ object TypedActorSpec {
|
||||||
|
|
||||||
def pigdog = "Pigdog"
|
def pigdog = "Pigdog"
|
||||||
|
|
||||||
def futurePigdog(): Future[String] = Future.successful(pigdog)
|
def futurePigdog(): Future[String] = Future.successful(pigdog())
|
||||||
|
|
||||||
def futurePigdog(delay: FiniteDuration): Future[String] = {
|
def futurePigdog(delay: FiniteDuration): Future[String] = {
|
||||||
Thread.sleep(delay.toMillis)
|
Thread.sleep(delay.toMillis)
|
||||||
futurePigdog
|
futurePigdog()
|
||||||
}
|
}
|
||||||
|
|
||||||
def futurePigdog(delay: FiniteDuration, numbered: Int): Future[String] = {
|
def futurePigdog(delay: FiniteDuration, numbered: Int): Future[String] = {
|
||||||
|
|
@ -140,16 +140,16 @@ object TypedActorSpec {
|
||||||
foo.futurePigdog(500 millis).map(_.toUpperCase)
|
foo.futurePigdog(500 millis).map(_.toUpperCase)
|
||||||
}
|
}
|
||||||
|
|
||||||
def optionPigdog(): Option[String] = Some(pigdog)
|
def optionPigdog(): Option[String] = Some(pigdog())
|
||||||
|
|
||||||
def optionPigdog(delay: FiniteDuration): Option[String] = {
|
def optionPigdog(delay: FiniteDuration): Option[String] = {
|
||||||
Thread.sleep(delay.toMillis)
|
Thread.sleep(delay.toMillis)
|
||||||
Some(pigdog)
|
Some(pigdog())
|
||||||
}
|
}
|
||||||
|
|
||||||
def joptionPigdog(delay: FiniteDuration): JOption[String] = {
|
def joptionPigdog(delay: FiniteDuration): JOption[String] = {
|
||||||
Thread.sleep(delay.toMillis)
|
Thread.sleep(delay.toMillis)
|
||||||
JOption.some(pigdog)
|
JOption.some(pigdog())
|
||||||
}
|
}
|
||||||
|
|
||||||
var internalNumber = 0
|
var internalNumber = 0
|
||||||
|
|
@ -408,14 +408,14 @@ class TypedActorSpec
|
||||||
t.failingPigdog()
|
t.failingPigdog()
|
||||||
t.read() should ===(1) //Make sure state is not reset after failure
|
t.read() should ===(1) //Make sure state is not reset after failure
|
||||||
|
|
||||||
intercept[IllegalStateException] { Await.result(t.failingFuturePigdog, 2 seconds) }.getMessage should ===(
|
intercept[IllegalStateException] { Await.result(t.failingFuturePigdog(), 2 seconds) }.getMessage should ===(
|
||||||
"expected")
|
"expected")
|
||||||
t.read() should ===(1) //Make sure state is not reset after failure
|
t.read() should ===(1) //Make sure state is not reset after failure
|
||||||
|
|
||||||
intercept[IllegalStateException] { t.failingJOptionPigdog }.getMessage should ===("expected")
|
intercept[IllegalStateException] { t.failingJOptionPigdog() }.getMessage should ===("expected")
|
||||||
t.read() should ===(1) //Make sure state is not reset after failure
|
t.read() should ===(1) //Make sure state is not reset after failure
|
||||||
|
|
||||||
intercept[IllegalStateException] { t.failingOptionPigdog }.getMessage should ===("expected")
|
intercept[IllegalStateException] { t.failingOptionPigdog() }.getMessage should ===("expected")
|
||||||
|
|
||||||
t.read() should ===(1) //Make sure state is not reset after failure
|
t.read() should ===(1) //Make sure state is not reset after failure
|
||||||
|
|
||||||
|
|
@ -466,7 +466,7 @@ class TypedActorSpec
|
||||||
val thais = for (_ <- 1 to 60) yield newFooBar("pooled-dispatcher", 6 seconds)
|
val thais = for (_ <- 1 to 60) yield newFooBar("pooled-dispatcher", 6 seconds)
|
||||||
val iterator = new CyclicIterator(thais)
|
val iterator = new CyclicIterator(thais)
|
||||||
|
|
||||||
val results = for (i <- 1 to 120) yield (i, iterator.next.futurePigdog(200 millis, i))
|
val results = for (i <- 1 to 120) yield (i, iterator.next().futurePigdog(200 millis, i))
|
||||||
|
|
||||||
for ((i, r) <- results) Await.result(r, remaining) should ===("Pigdog" + i)
|
for ((i, r) <- results) Await.result(r, remaining) should ===("Pigdog" + i)
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -76,7 +76,7 @@ object UidClashTest {
|
||||||
Stop
|
Stop
|
||||||
case _ => Restart
|
case _ => Restart
|
||||||
}
|
}
|
||||||
val theRestartedOne = context.actorOf(Props[RestartedActor], "theRestartedOne")
|
val theRestartedOne = context.actorOf(Props[RestartedActor](), "theRestartedOne")
|
||||||
|
|
||||||
def receive = {
|
def receive = {
|
||||||
case PleaseRestart => theRestartedOne ! PleaseRestart
|
case PleaseRestart => theRestartedOne ! PleaseRestart
|
||||||
|
|
|
||||||
|
|
@ -257,7 +257,7 @@ abstract class ActorModelSpec(config: String) extends AkkaSpec(config) with Defa
|
||||||
|
|
||||||
import ActorModelSpec._
|
import ActorModelSpec._
|
||||||
|
|
||||||
def newTestActor(dispatcher: String) = system.actorOf(Props[DispatcherActor].withDispatcher(dispatcher))
|
def newTestActor(dispatcher: String) = system.actorOf(Props[DispatcherActor]().withDispatcher(dispatcher))
|
||||||
|
|
||||||
def awaitStarted(ref: ActorRef): Unit = {
|
def awaitStarted(ref: ActorRef): Unit = {
|
||||||
awaitCond(ref match {
|
awaitCond(ref match {
|
||||||
|
|
@ -352,7 +352,7 @@ abstract class ActorModelSpec(config: String) extends AkkaSpec(config) with Defa
|
||||||
val a = newTestActor(dispatcher.id).asInstanceOf[InternalActorRef]
|
val a = newTestActor(dispatcher.id).asInstanceOf[InternalActorRef]
|
||||||
awaitStarted(a)
|
awaitStarted(a)
|
||||||
val done = new CountDownLatch(1)
|
val done = new CountDownLatch(1)
|
||||||
a.suspend
|
a.suspend()
|
||||||
a ! CountDown(done)
|
a ! CountDown(done)
|
||||||
assertNoCountDown(done, 1000, "Should not process messages while suspended")
|
assertNoCountDown(done, 1000, "Should not process messages while suspended")
|
||||||
assertRefDefaultZero(a)(registers = 1, msgsReceived = 1, suspensions = 1)
|
assertRefDefaultZero(a)(registers = 1, msgsReceived = 1, suspensions = 1)
|
||||||
|
|
@ -373,7 +373,7 @@ abstract class ActorModelSpec(config: String) extends AkkaSpec(config) with Defa
|
||||||
|
|
||||||
"handle waves of actors" in {
|
"handle waves of actors" in {
|
||||||
val dispatcher = interceptedDispatcher()
|
val dispatcher = interceptedDispatcher()
|
||||||
val props = Props[DispatcherActor].withDispatcher(dispatcher.id)
|
val props = Props[DispatcherActor]().withDispatcher(dispatcher.id)
|
||||||
|
|
||||||
def flood(num: Int): Unit = {
|
def flood(num: Int): Unit = {
|
||||||
val cachedMessage = CountDownNStop(new CountDownLatch(num))
|
val cachedMessage = CountDownNStop(new CountDownLatch(num))
|
||||||
|
|
@ -417,7 +417,7 @@ abstract class ActorModelSpec(config: String) extends AkkaSpec(config) with Defa
|
||||||
}
|
}
|
||||||
|
|
||||||
System.err.println("Mailbox: " + mq.numberOfMessages + " " + mq.hasMessages)
|
System.err.println("Mailbox: " + mq.numberOfMessages + " " + mq.hasMessages)
|
||||||
Iterator.continually(mq.dequeue).takeWhile(_ ne null).foreach(System.err.println)
|
Iterator.continually(mq.dequeue()).takeWhile(_ ne null).foreach(System.err.println)
|
||||||
case _ =>
|
case _ =>
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -59,14 +59,14 @@ class DispatcherActorSpec extends AkkaSpec(DispatcherActorSpec.config) with Defa
|
||||||
"A Dispatcher and an Actor" must {
|
"A Dispatcher and an Actor" must {
|
||||||
|
|
||||||
"support tell" in {
|
"support tell" in {
|
||||||
val actor = system.actorOf(Props[OneWayTestActor].withDispatcher("test-dispatcher"))
|
val actor = system.actorOf(Props[OneWayTestActor]().withDispatcher("test-dispatcher"))
|
||||||
actor ! "OneWay"
|
actor ! "OneWay"
|
||||||
assert(OneWayTestActor.oneWay.await(1, TimeUnit.SECONDS))
|
assert(OneWayTestActor.oneWay.await(1, TimeUnit.SECONDS))
|
||||||
system.stop(actor)
|
system.stop(actor)
|
||||||
}
|
}
|
||||||
|
|
||||||
"support ask/reply" in {
|
"support ask/reply" in {
|
||||||
val actor = system.actorOf(Props[TestActor].withDispatcher("test-dispatcher"))
|
val actor = system.actorOf(Props[TestActor]().withDispatcher("test-dispatcher"))
|
||||||
assert("World" === Await.result(actor ? "Hello", timeout.duration))
|
assert("World" === Await.result(actor ? "Hello", timeout.duration))
|
||||||
system.stop(actor)
|
system.stop(actor)
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -182,11 +182,11 @@ class DispatchersSpec extends AkkaSpec(DispatchersSpec.config) with ImplicitSend
|
||||||
}
|
}
|
||||||
|
|
||||||
"include system name and dispatcher id in thread names for fork-join-executor" in {
|
"include system name and dispatcher id in thread names for fork-join-executor" in {
|
||||||
assertMyDispatcherIsUsed(system.actorOf(Props[ThreadNameEcho].withDispatcher("myapp.mydispatcher")))
|
assertMyDispatcherIsUsed(system.actorOf(Props[ThreadNameEcho]().withDispatcher("myapp.mydispatcher")))
|
||||||
}
|
}
|
||||||
|
|
||||||
"include system name and dispatcher id in thread names for thread-pool-executor" in {
|
"include system name and dispatcher id in thread names for thread-pool-executor" in {
|
||||||
system.actorOf(Props[ThreadNameEcho].withDispatcher("myapp.thread-pool-dispatcher")) ! "what's the name?"
|
system.actorOf(Props[ThreadNameEcho]().withDispatcher("myapp.thread-pool-dispatcher")) ! "what's the name?"
|
||||||
val Expected = R("(DispatchersSpec-myapp.thread-pool-dispatcher-[1-9][0-9]*)")
|
val Expected = R("(DispatchersSpec-myapp.thread-pool-dispatcher-[1-9][0-9]*)")
|
||||||
expectMsgPF() {
|
expectMsgPF() {
|
||||||
case Expected(_) =>
|
case Expected(_) =>
|
||||||
|
|
@ -194,7 +194,7 @@ class DispatchersSpec extends AkkaSpec(DispatchersSpec.config) with ImplicitSend
|
||||||
}
|
}
|
||||||
|
|
||||||
"include system name and dispatcher id in thread names for default-dispatcher" in {
|
"include system name and dispatcher id in thread names for default-dispatcher" in {
|
||||||
system.actorOf(Props[ThreadNameEcho]) ! "what's the name?"
|
system.actorOf(Props[ThreadNameEcho]()) ! "what's the name?"
|
||||||
val Expected = R("(DispatchersSpec-akka.actor.default-dispatcher-[1-9][0-9]*)")
|
val Expected = R("(DispatchersSpec-akka.actor.default-dispatcher-[1-9][0-9]*)")
|
||||||
expectMsgPF() {
|
expectMsgPF() {
|
||||||
case Expected(_) =>
|
case Expected(_) =>
|
||||||
|
|
@ -202,7 +202,7 @@ class DispatchersSpec extends AkkaSpec(DispatchersSpec.config) with ImplicitSend
|
||||||
}
|
}
|
||||||
|
|
||||||
"include system name and dispatcher id in thread names for pinned dispatcher" in {
|
"include system name and dispatcher id in thread names for pinned dispatcher" in {
|
||||||
system.actorOf(Props[ThreadNameEcho].withDispatcher("myapp.my-pinned-dispatcher")) ! "what's the name?"
|
system.actorOf(Props[ThreadNameEcho]().withDispatcher("myapp.my-pinned-dispatcher")) ! "what's the name?"
|
||||||
val Expected = R("(DispatchersSpec-myapp.my-pinned-dispatcher-[1-9][0-9]*)")
|
val Expected = R("(DispatchersSpec-myapp.my-pinned-dispatcher-[1-9][0-9]*)")
|
||||||
expectMsgPF() {
|
expectMsgPF() {
|
||||||
case Expected(_) =>
|
case Expected(_) =>
|
||||||
|
|
@ -210,7 +210,7 @@ class DispatchersSpec extends AkkaSpec(DispatchersSpec.config) with ImplicitSend
|
||||||
}
|
}
|
||||||
|
|
||||||
"include system name and dispatcher id in thread names for balancing dispatcher" in {
|
"include system name and dispatcher id in thread names for balancing dispatcher" in {
|
||||||
system.actorOf(Props[ThreadNameEcho].withDispatcher("myapp.balancing-dispatcher")) ! "what's the name?"
|
system.actorOf(Props[ThreadNameEcho]().withDispatcher("myapp.balancing-dispatcher")) ! "what's the name?"
|
||||||
val Expected = R("(DispatchersSpec-myapp.balancing-dispatcher-[1-9][0-9]*)")
|
val Expected = R("(DispatchersSpec-myapp.balancing-dispatcher-[1-9][0-9]*)")
|
||||||
expectMsgPF() {
|
expectMsgPF() {
|
||||||
case Expected(_) =>
|
case Expected(_) =>
|
||||||
|
|
@ -218,16 +218,16 @@ class DispatchersSpec extends AkkaSpec(DispatchersSpec.config) with ImplicitSend
|
||||||
}
|
}
|
||||||
|
|
||||||
"use dispatcher in deployment config" in {
|
"use dispatcher in deployment config" in {
|
||||||
assertMyDispatcherIsUsed(system.actorOf(Props[ThreadNameEcho], name = "echo1"))
|
assertMyDispatcherIsUsed(system.actorOf(Props[ThreadNameEcho](), name = "echo1"))
|
||||||
}
|
}
|
||||||
|
|
||||||
"use dispatcher in deployment config, trumps code" in {
|
"use dispatcher in deployment config, trumps code" in {
|
||||||
assertMyDispatcherIsUsed(
|
assertMyDispatcherIsUsed(
|
||||||
system.actorOf(Props[ThreadNameEcho].withDispatcher("myapp.my-pinned-dispatcher"), name = "echo2"))
|
system.actorOf(Props[ThreadNameEcho]().withDispatcher("myapp.my-pinned-dispatcher"), name = "echo2"))
|
||||||
}
|
}
|
||||||
|
|
||||||
"use pool-dispatcher router of deployment config" in {
|
"use pool-dispatcher router of deployment config" in {
|
||||||
val pool = system.actorOf(FromConfig.props(Props[ThreadNameEcho]), name = "pool1")
|
val pool = system.actorOf(FromConfig.props(Props[ThreadNameEcho]()), name = "pool1")
|
||||||
pool ! Identify(None)
|
pool ! Identify(None)
|
||||||
val routee = expectMsgType[ActorIdentity].ref.get
|
val routee = expectMsgType[ActorIdentity].ref.get
|
||||||
routee ! "what's the name?"
|
routee ! "what's the name?"
|
||||||
|
|
@ -238,7 +238,7 @@ class DispatchersSpec extends AkkaSpec(DispatchersSpec.config) with ImplicitSend
|
||||||
}
|
}
|
||||||
|
|
||||||
"use balancing-pool router with special routees mailbox of deployment config" in {
|
"use balancing-pool router with special routees mailbox of deployment config" in {
|
||||||
system.actorOf(FromConfig.props(Props[ThreadNameEcho]), name = "balanced") ! "what's the name?"
|
system.actorOf(FromConfig.props(Props[ThreadNameEcho]()), name = "balanced") ! "what's the name?"
|
||||||
val Expected = R("""(DispatchersSpec-BalancingPool-/balanced-[1-9][0-9]*)""")
|
val Expected = R("""(DispatchersSpec-BalancingPool-/balanced-[1-9][0-9]*)""")
|
||||||
expectMsgPF() {
|
expectMsgPF() {
|
||||||
case Expected(_) =>
|
case Expected(_) =>
|
||||||
|
|
|
||||||
|
|
@ -44,7 +44,7 @@ class PinnedActorSpec extends AkkaSpec(PinnedActorSpec.config) with BeforeAndAft
|
||||||
}
|
}
|
||||||
|
|
||||||
"support ask/reply" in {
|
"support ask/reply" in {
|
||||||
val actor = system.actorOf(Props[TestActor].withDispatcher("pinned-dispatcher"))
|
val actor = system.actorOf(Props[TestActor]().withDispatcher("pinned-dispatcher"))
|
||||||
assert("World" === Await.result(actor ? "Hello", timeout.duration))
|
assert("World" === Await.result(actor ? "Hello", timeout.duration))
|
||||||
system.stop(actor)
|
system.stop(actor)
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -25,7 +25,7 @@ class DispatchSpec extends AkkaSpec("""
|
||||||
|
|
||||||
"The dispatcher" should {
|
"The dispatcher" should {
|
||||||
"log an appropriate message when akka.actor.serialize-messages triggers a serialization error" in {
|
"log an appropriate message when akka.actor.serialize-messages triggers a serialization error" in {
|
||||||
val actor = system.actorOf(Props[EmptyActor])
|
val actor = system.actorOf(Props[EmptyActor]())
|
||||||
EventFilter[Exception](pattern = ".*NoSerializationVerificationNeeded.*", occurrences = 1).intercept {
|
EventFilter[Exception](pattern = ".*NoSerializationVerificationNeeded.*", occurrences = 1).intercept {
|
||||||
actor ! new UnserializableMessageClass
|
actor ! new UnserializableMessageClass
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -157,7 +157,7 @@ abstract class MailboxSpec extends AkkaSpec with BeforeAndAfterAll with BeforeAn
|
||||||
def createConsumer: Future[Vector[Envelope]] = spawn {
|
def createConsumer: Future[Vector[Envelope]] = spawn {
|
||||||
var r = Vector[Envelope]()
|
var r = Vector[Envelope]()
|
||||||
|
|
||||||
while (producers.exists(_.isCompleted == false) || q.hasMessages) Option(q.dequeue).foreach { message =>
|
while (producers.exists(_.isCompleted == false) || q.hasMessages) Option(q.dequeue()).foreach { message =>
|
||||||
r = r :+ message
|
r = r :+ message
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -85,7 +85,7 @@ class EventStreamSpec extends AkkaSpec(EventStreamSpec.config) {
|
||||||
expectMsg(M(42))
|
expectMsg(M(42))
|
||||||
bus.unsubscribe(testActor)
|
bus.unsubscribe(testActor)
|
||||||
bus.publish(M(13))
|
bus.publish(M(13))
|
||||||
expectNoMessage
|
expectNoMessage()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -159,7 +159,7 @@ class EventStreamSpec extends AkkaSpec(EventStreamSpec.config) {
|
||||||
bus.publish(a)
|
bus.publish(a)
|
||||||
expectMsg(b2)
|
expectMsg(b2)
|
||||||
expectMsg(a)
|
expectMsg(a)
|
||||||
expectNoMessage
|
expectNoMessage()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -235,7 +235,7 @@ class LoggerSpec extends AnyWordSpec with Matchers {
|
||||||
system.eventStream.publish(SetTarget(probe.ref, qualifier = 1))
|
system.eventStream.publish(SetTarget(probe.ref, qualifier = 1))
|
||||||
probe.expectMsg("OK")
|
probe.expectMsg("OK")
|
||||||
|
|
||||||
val ref = system.actorOf(Props[ActorWithMDC])
|
val ref = system.actorOf(Props[ActorWithMDC]())
|
||||||
|
|
||||||
ref ! "Processing new Request"
|
ref ! "Processing new Request"
|
||||||
probe.expectMsgPF(max = 3.seconds) {
|
probe.expectMsgPF(max = 3.seconds) {
|
||||||
|
|
|
||||||
|
|
@ -195,7 +195,7 @@ class LoggingReceiveSpec extends AnyWordSpec with BeforeAndAfterAll {
|
||||||
within(3 seconds) {
|
within(3 seconds) {
|
||||||
val lifecycleGuardian = appLifecycle.asInstanceOf[ActorSystemImpl].guardian
|
val lifecycleGuardian = appLifecycle.asInstanceOf[ActorSystemImpl].guardian
|
||||||
val lname = lifecycleGuardian.path.toString
|
val lname = lifecycleGuardian.path.toString
|
||||||
val supervisor = TestActorRef[TestLogActor](Props[TestLogActor])
|
val supervisor = TestActorRef[TestLogActor](Props[TestLogActor]())
|
||||||
val sname = supervisor.path.toString
|
val sname = supervisor.path.toString
|
||||||
|
|
||||||
fishForMessage(hint = "now supervising") {
|
fishForMessage(hint = "now supervising") {
|
||||||
|
|
@ -203,7 +203,7 @@ class LoggingReceiveSpec extends AnyWordSpec with BeforeAndAfterAll {
|
||||||
case _ => false
|
case _ => false
|
||||||
}
|
}
|
||||||
|
|
||||||
TestActorRef[TestLogActor](Props[TestLogActor], supervisor, "none")
|
TestActorRef[TestLogActor](Props[TestLogActor](), supervisor, "none")
|
||||||
|
|
||||||
fishForMessage(hint = "now supervising") {
|
fishForMessage(hint = "now supervising") {
|
||||||
case Logging.Debug(`sname`, _, msg: String) if msg.startsWith("now supervising") => true
|
case Logging.Debug(`sname`, _, msg: String) if msg.startsWith("now supervising") => true
|
||||||
|
|
@ -217,9 +217,9 @@ class LoggingReceiveSpec extends AnyWordSpec with BeforeAndAfterAll {
|
||||||
new TestKit(appLifecycle) {
|
new TestKit(appLifecycle) {
|
||||||
system.eventStream.subscribe(testActor, classOf[Logging.Debug])
|
system.eventStream.subscribe(testActor, classOf[Logging.Debug])
|
||||||
within(3 seconds) {
|
within(3 seconds) {
|
||||||
val supervisor = TestActorRef[TestLogActor](Props[TestLogActor])
|
val supervisor = TestActorRef[TestLogActor](Props[TestLogActor]())
|
||||||
val sclass = classOf[TestLogActor]
|
val sclass = classOf[TestLogActor]
|
||||||
val actor = TestActorRef[TestLogActor](Props[TestLogActor], supervisor, "none")
|
val actor = TestActorRef[TestLogActor](Props[TestLogActor](), supervisor, "none")
|
||||||
val aname = actor.path.toString
|
val aname = actor.path.toString
|
||||||
|
|
||||||
supervisor.watch(actor)
|
supervisor.watch(actor)
|
||||||
|
|
@ -242,7 +242,7 @@ class LoggingReceiveSpec extends AnyWordSpec with BeforeAndAfterAll {
|
||||||
system.eventStream.subscribe(testActor, classOf[Logging.Debug])
|
system.eventStream.subscribe(testActor, classOf[Logging.Debug])
|
||||||
system.eventStream.subscribe(testActor, classOf[Logging.Error])
|
system.eventStream.subscribe(testActor, classOf[Logging.Error])
|
||||||
within(3 seconds) {
|
within(3 seconds) {
|
||||||
val supervisor = TestActorRef[TestLogActor](Props[TestLogActor])
|
val supervisor = TestActorRef[TestLogActor](Props[TestLogActor]())
|
||||||
val sname = supervisor.path.toString
|
val sname = supervisor.path.toString
|
||||||
val sclass = classOf[TestLogActor]
|
val sclass = classOf[TestLogActor]
|
||||||
|
|
||||||
|
|
@ -251,7 +251,7 @@ class LoggingReceiveSpec extends AnyWordSpec with BeforeAndAfterAll {
|
||||||
case Logging.Debug(_, _, msg: String) if msg.startsWith("now supervising") => 1
|
case Logging.Debug(_, _, msg: String) if msg.startsWith("now supervising") => 1
|
||||||
}
|
}
|
||||||
|
|
||||||
val actor = TestActorRef[TestLogActor](Props[TestLogActor], supervisor, "none")
|
val actor = TestActorRef[TestLogActor](Props[TestLogActor](), supervisor, "none")
|
||||||
val aname = actor.path.toString
|
val aname = actor.path.toString
|
||||||
val aclass = classOf[TestLogActor]
|
val aclass = classOf[TestLogActor]
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -46,7 +46,7 @@ class JavaLoggerSpec extends AkkaSpec(JavaLoggerSpec.config) {
|
||||||
def close(): Unit = {}
|
def close(): Unit = {}
|
||||||
})
|
})
|
||||||
|
|
||||||
val producer = system.actorOf(Props[JavaLoggerSpec.LogProducer], name = "log")
|
val producer = system.actorOf(Props[JavaLoggerSpec.LogProducer](), name = "log")
|
||||||
|
|
||||||
"JavaLogger" must {
|
"JavaLogger" must {
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -161,8 +161,8 @@ class AskSpec extends AkkaSpec {
|
||||||
val echo = system.actorOf(Props(new Actor {
|
val echo = system.actorOf(Props(new Actor {
|
||||||
def receive = {
|
def receive = {
|
||||||
case x =>
|
case x =>
|
||||||
val name = sender.path.name
|
val name = sender().path.name
|
||||||
val parent = sender.path.parent
|
val parent = sender().path.parent
|
||||||
context.actorSelection(parent / ".." / "temp" / name) ! x
|
context.actorSelection(parent / ".." / "temp" / name) ! x
|
||||||
}
|
}
|
||||||
}), "select-echo4")
|
}), "select-echo4")
|
||||||
|
|
@ -182,7 +182,7 @@ class AskSpec extends AkkaSpec {
|
||||||
val echo = system.actorOf(Props(new Actor {
|
val echo = system.actorOf(Props(new Actor {
|
||||||
def receive = {
|
def receive = {
|
||||||
case x =>
|
case x =>
|
||||||
val parent = sender.path.parent
|
val parent = sender().path.parent
|
||||||
context.actorSelection(parent / "missing") ! x
|
context.actorSelection(parent / "missing") ! x
|
||||||
}
|
}
|
||||||
}), "select-echo5")
|
}), "select-echo5")
|
||||||
|
|
|
||||||
|
|
@ -119,7 +119,7 @@ class BackoffOnRestartSupervisorSpec extends AkkaSpec("""
|
||||||
val supervisorChildSelection = system.actorSelection(supervisor.path / "*")
|
val supervisorChildSelection = system.actorSelection(supervisor.path / "*")
|
||||||
supervisorChildSelection.tell("testmsg", probe.ref)
|
supervisorChildSelection.tell("testmsg", probe.ref)
|
||||||
probe.expectMsg("testmsg")
|
probe.expectMsg("testmsg")
|
||||||
probe.expectNoMessage
|
probe.expectNoMessage()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -180,7 +180,7 @@ class CircuitBreakerSpec extends AkkaSpec {
|
||||||
val breaker = shortResetTimeoutCb()
|
val breaker = shortResetTimeoutCb()
|
||||||
intercept[TestException] { breaker().withSyncCircuitBreaker(throwException) }
|
intercept[TestException] { breaker().withSyncCircuitBreaker(throwException) }
|
||||||
checkLatch(breaker.halfOpenLatch)
|
checkLatch(breaker.halfOpenLatch)
|
||||||
breaker.openLatch.reset
|
breaker.openLatch.reset()
|
||||||
intercept[TestException] { breaker().withSyncCircuitBreaker(throwException) }
|
intercept[TestException] { breaker().withSyncCircuitBreaker(throwException) }
|
||||||
checkLatch(breaker.openLatch)
|
checkLatch(breaker.openLatch)
|
||||||
}
|
}
|
||||||
|
|
@ -190,7 +190,7 @@ class CircuitBreakerSpec extends AkkaSpec {
|
||||||
val breaker = shortResetTimeoutCb()
|
val breaker = shortResetTimeoutCb()
|
||||||
intercept[TestException] { breaker().withSyncCircuitBreaker(throwException) }
|
intercept[TestException] { breaker().withSyncCircuitBreaker(throwException) }
|
||||||
checkLatch(breaker.halfOpenLatch)
|
checkLatch(breaker.halfOpenLatch)
|
||||||
breaker.openLatch.reset
|
breaker.openLatch.reset()
|
||||||
breaker().withSyncCircuitBreaker(2, evenNumberIsFailure)
|
breaker().withSyncCircuitBreaker(2, evenNumberIsFailure)
|
||||||
checkLatch(breaker.openLatch)
|
checkLatch(breaker.openLatch)
|
||||||
}
|
}
|
||||||
|
|
@ -200,7 +200,7 @@ class CircuitBreakerSpec extends AkkaSpec {
|
||||||
val breaker = shortResetTimeoutCb()
|
val breaker = shortResetTimeoutCb()
|
||||||
intercept[TestException] { breaker().withSyncCircuitBreaker(throwException) }
|
intercept[TestException] { breaker().withSyncCircuitBreaker(throwException) }
|
||||||
checkLatch(breaker.halfOpenLatch)
|
checkLatch(breaker.halfOpenLatch)
|
||||||
breaker.openLatch.reset
|
breaker.openLatch.reset()
|
||||||
breaker().fail()
|
breaker().fail()
|
||||||
checkLatch(breaker.openLatch)
|
checkLatch(breaker.openLatch)
|
||||||
}
|
}
|
||||||
|
|
@ -451,7 +451,7 @@ class CircuitBreakerSpec extends AkkaSpec {
|
||||||
checkLatch(breaker.halfOpenLatch)
|
checkLatch(breaker.halfOpenLatch)
|
||||||
|
|
||||||
// transit to open again
|
// transit to open again
|
||||||
breaker.openLatch.reset
|
breaker.openLatch.reset()
|
||||||
breaker().withCircuitBreaker(Future(throwException))
|
breaker().withCircuitBreaker(Future(throwException))
|
||||||
checkLatch(breaker.openLatch)
|
checkLatch(breaker.openLatch)
|
||||||
|
|
||||||
|
|
@ -513,7 +513,7 @@ class CircuitBreakerSpec extends AkkaSpec {
|
||||||
val breaker = shortResetTimeoutCb()
|
val breaker = shortResetTimeoutCb()
|
||||||
breaker().withCircuitBreaker(Future(throwException))
|
breaker().withCircuitBreaker(Future(throwException))
|
||||||
checkLatch(breaker.halfOpenLatch)
|
checkLatch(breaker.halfOpenLatch)
|
||||||
breaker.openLatch.reset
|
breaker.openLatch.reset()
|
||||||
intercept[TestException] { Await.result(breaker().withCircuitBreaker(Future(throwException)), awaitTimeout) }
|
intercept[TestException] { Await.result(breaker().withCircuitBreaker(Future(throwException)), awaitTimeout) }
|
||||||
checkLatch(breaker.openLatch)
|
checkLatch(breaker.openLatch)
|
||||||
}
|
}
|
||||||
|
|
@ -523,7 +523,7 @@ class CircuitBreakerSpec extends AkkaSpec {
|
||||||
val breaker = shortResetTimeoutCb()
|
val breaker = shortResetTimeoutCb()
|
||||||
intercept[TestException] { breaker().withSyncCircuitBreaker(throwException) }
|
intercept[TestException] { breaker().withSyncCircuitBreaker(throwException) }
|
||||||
checkLatch(breaker.halfOpenLatch)
|
checkLatch(breaker.halfOpenLatch)
|
||||||
breaker.openLatch.reset
|
breaker.openLatch.reset()
|
||||||
Await.result(breaker().withCircuitBreaker(Future(2), evenNumberIsFailure), awaitTimeout)
|
Await.result(breaker().withCircuitBreaker(Future(2), evenNumberIsFailure), awaitTimeout)
|
||||||
checkLatch(breaker.openLatch)
|
checkLatch(breaker.openLatch)
|
||||||
}
|
}
|
||||||
|
|
@ -534,7 +534,7 @@ class CircuitBreakerSpec extends AkkaSpec {
|
||||||
breaker().withCircuitBreaker(Future(throwException))
|
breaker().withCircuitBreaker(Future(throwException))
|
||||||
checkLatch(breaker.halfOpenLatch)
|
checkLatch(breaker.halfOpenLatch)
|
||||||
|
|
||||||
breaker.openLatch.reset
|
breaker.openLatch.reset()
|
||||||
breaker().withCircuitBreaker(Future(throwException))
|
breaker().withCircuitBreaker(Future(throwException))
|
||||||
checkLatch(breaker.openLatch)
|
checkLatch(breaker.openLatch)
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -29,19 +29,19 @@ class PatternSpec extends AkkaSpec {
|
||||||
"pattern.gracefulStop" must {
|
"pattern.gracefulStop" must {
|
||||||
|
|
||||||
"provide Future for stopping an actor" in {
|
"provide Future for stopping an actor" in {
|
||||||
val target = system.actorOf(Props[TargetActor])
|
val target = system.actorOf(Props[TargetActor]())
|
||||||
val result = gracefulStop(target, 5 seconds)
|
val result = gracefulStop(target, 5 seconds)
|
||||||
Await.result(result, 6 seconds) should ===(true)
|
Await.result(result, 6 seconds) should ===(true)
|
||||||
}
|
}
|
||||||
|
|
||||||
"complete Future when actor already terminated" in {
|
"complete Future when actor already terminated" in {
|
||||||
val target = system.actorOf(Props[TargetActor])
|
val target = system.actorOf(Props[TargetActor]())
|
||||||
Await.ready(gracefulStop(target, 5 seconds), 6 seconds)
|
Await.ready(gracefulStop(target, 5 seconds), 6 seconds)
|
||||||
Await.ready(gracefulStop(target, 1 millis), 1 second)
|
Await.ready(gracefulStop(target, 1 millis), 1 second)
|
||||||
}
|
}
|
||||||
|
|
||||||
"complete Future with AskTimeoutException when actor not terminated within timeout" in {
|
"complete Future with AskTimeoutException when actor not terminated within timeout" in {
|
||||||
val target = system.actorOf(Props[TargetActor])
|
val target = system.actorOf(Props[TargetActor]())
|
||||||
val latch = TestLatch()
|
val latch = TestLatch()
|
||||||
target ! ((latch, remainingOrDefault))
|
target ! ((latch, remainingOrDefault))
|
||||||
intercept[AskTimeoutException] { Await.result(gracefulStop(target, 500 millis), remainingOrDefault) }
|
intercept[AskTimeoutException] { Await.result(gracefulStop(target, 500 millis), remainingOrDefault) }
|
||||||
|
|
|
||||||
|
|
@ -59,7 +59,7 @@ class RetrySpec extends AkkaSpec with RetrySupport {
|
||||||
} else Future.successful(5)
|
} else Future.successful(5)
|
||||||
}
|
}
|
||||||
|
|
||||||
val retried = retry(() => attempt, 10, 100 milliseconds)
|
val retried = retry(() => attempt(), 10, 100 milliseconds)
|
||||||
|
|
||||||
within(3 seconds) {
|
within(3 seconds) {
|
||||||
Await.result(retried, remaining) should ===(5)
|
Await.result(retried, remaining) should ===(5)
|
||||||
|
|
@ -76,7 +76,7 @@ class RetrySpec extends AkkaSpec with RetrySupport {
|
||||||
} else Future.successful(5)
|
} else Future.successful(5)
|
||||||
}
|
}
|
||||||
|
|
||||||
val retried = retry(() => attempt, 5, 100 milliseconds)
|
val retried = retry(() => attempt(), 5, 100 milliseconds)
|
||||||
|
|
||||||
within(3 seconds) {
|
within(3 seconds) {
|
||||||
intercept[IllegalStateException] { Await.result(retried, remaining) }.getMessage should ===("6")
|
intercept[IllegalStateException] { Await.result(retried, remaining) }.getMessage should ===("6")
|
||||||
|
|
@ -94,7 +94,7 @@ class RetrySpec extends AkkaSpec with RetrySupport {
|
||||||
} else Future.successful(5)
|
} else Future.successful(5)
|
||||||
}
|
}
|
||||||
|
|
||||||
val retried = retry(() => attempt, 5, attempted => {
|
val retried = retry(() => attempt(), 5, attempted => {
|
||||||
attemptedCount = attempted
|
attemptedCount = attempted
|
||||||
Some(100.milliseconds * attempted)
|
Some(100.milliseconds * attempted)
|
||||||
})
|
})
|
||||||
|
|
@ -114,7 +114,7 @@ class RetrySpec extends AkkaSpec with RetrySupport {
|
||||||
} else Future.successful(1)
|
} else Future.successful(1)
|
||||||
}
|
}
|
||||||
val start = System.currentTimeMillis()
|
val start = System.currentTimeMillis()
|
||||||
val retried = retry(() => attempt, 999)
|
val retried = retry(() => attempt(), 999)
|
||||||
|
|
||||||
within(1 seconds) {
|
within(1 seconds) {
|
||||||
intercept[IllegalStateException] {
|
intercept[IllegalStateException] {
|
||||||
|
|
|
||||||
|
|
@ -118,14 +118,14 @@ class BalancingSpec extends AkkaSpec("""
|
||||||
|
|
||||||
"work with anonymous actor names" in {
|
"work with anonymous actor names" in {
|
||||||
// the dispatcher-id must not contain invalid config key characters (e.g. $a)
|
// the dispatcher-id must not contain invalid config key characters (e.g. $a)
|
||||||
system.actorOf(Props[Parent]) ! 1000
|
system.actorOf(Props[Parent]()) ! 1000
|
||||||
expectMsgType[Int]
|
expectMsgType[Int]
|
||||||
}
|
}
|
||||||
|
|
||||||
"work with encoded actor names" in {
|
"work with encoded actor names" in {
|
||||||
val encName = URLEncoder.encode("abcå6#$€xyz", "utf-8")
|
val encName = URLEncoder.encode("abcå6#$€xyz", "utf-8")
|
||||||
// % is a valid config key character (e.g. %C3%A5)
|
// % is a valid config key character (e.g. %C3%A5)
|
||||||
system.actorOf(Props[Parent], encName) ! 1001
|
system.actorOf(Props[Parent](), encName) ! 1001
|
||||||
expectMsgType[Int]
|
expectMsgType[Int]
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -127,26 +127,26 @@ class ConfiguredLocalRoutingSpec
|
||||||
"RouterConfig" must {
|
"RouterConfig" must {
|
||||||
|
|
||||||
"be picked up from Props" in {
|
"be picked up from Props" in {
|
||||||
val actor = system.actorOf(RoundRobinPool(12).props(routeeProps = Props[EchoProps]), "someOther")
|
val actor = system.actorOf(RoundRobinPool(12).props(routeeProps = Props[EchoProps]()), "someOther")
|
||||||
routerConfig(actor) should ===(RoundRobinPool(12))
|
routerConfig(actor) should ===(RoundRobinPool(12))
|
||||||
Await.result(gracefulStop(actor, 3 seconds), 3 seconds)
|
Await.result(gracefulStop(actor, 3 seconds), 3 seconds)
|
||||||
}
|
}
|
||||||
|
|
||||||
"be overridable in config" in {
|
"be overridable in config" in {
|
||||||
val actor = system.actorOf(RoundRobinPool(12).props(routeeProps = Props[EchoProps]), "config")
|
val actor = system.actorOf(RoundRobinPool(12).props(routeeProps = Props[EchoProps]()), "config")
|
||||||
routerConfig(actor) should ===(RandomPool(nrOfInstances = 4, usePoolDispatcher = true))
|
routerConfig(actor) should ===(RandomPool(nrOfInstances = 4, usePoolDispatcher = true))
|
||||||
Await.result(gracefulStop(actor, 3 seconds), 3 seconds)
|
Await.result(gracefulStop(actor, 3 seconds), 3 seconds)
|
||||||
}
|
}
|
||||||
|
|
||||||
"use routees.paths from config" in {
|
"use routees.paths from config" in {
|
||||||
val actor = system.actorOf(RandomPool(12).props(routeeProps = Props[EchoProps]), "paths")
|
val actor = system.actorOf(RandomPool(12).props(routeeProps = Props[EchoProps]()), "paths")
|
||||||
routerConfig(actor) should ===(RandomGroup(List("/user/service1", "/user/service2")))
|
routerConfig(actor) should ===(RandomGroup(List("/user/service1", "/user/service2")))
|
||||||
Await.result(gracefulStop(actor, 3 seconds), 3 seconds)
|
Await.result(gracefulStop(actor, 3 seconds), 3 seconds)
|
||||||
}
|
}
|
||||||
|
|
||||||
"be overridable in explicit deployment" in {
|
"be overridable in explicit deployment" in {
|
||||||
val actor = system.actorOf(
|
val actor = system.actorOf(
|
||||||
FromConfig.props(routeeProps = Props[EchoProps]).withDeploy(Deploy(routerConfig = RoundRobinPool(12))),
|
FromConfig.props(routeeProps = Props[EchoProps]()).withDeploy(Deploy(routerConfig = RoundRobinPool(12))),
|
||||||
"someOther")
|
"someOther")
|
||||||
routerConfig(actor) should ===(RoundRobinPool(12))
|
routerConfig(actor) should ===(RoundRobinPool(12))
|
||||||
Await.result(gracefulStop(actor, 3 seconds), 3 seconds)
|
Await.result(gracefulStop(actor, 3 seconds), 3 seconds)
|
||||||
|
|
@ -154,7 +154,7 @@ class ConfiguredLocalRoutingSpec
|
||||||
|
|
||||||
"be overridable in config even with explicit deployment" in {
|
"be overridable in config even with explicit deployment" in {
|
||||||
val actor = system.actorOf(
|
val actor = system.actorOf(
|
||||||
FromConfig.props(routeeProps = Props[EchoProps]).withDeploy(Deploy(routerConfig = RoundRobinPool(12))),
|
FromConfig.props(routeeProps = Props[EchoProps]()).withDeploy(Deploy(routerConfig = RoundRobinPool(12))),
|
||||||
"config")
|
"config")
|
||||||
routerConfig(actor) should ===(RandomPool(nrOfInstances = 4, usePoolDispatcher = true))
|
routerConfig(actor) should ===(RandomPool(nrOfInstances = 4, usePoolDispatcher = true))
|
||||||
Await.result(gracefulStop(actor, 3 seconds), 3 seconds)
|
Await.result(gracefulStop(actor, 3 seconds), 3 seconds)
|
||||||
|
|
@ -185,7 +185,7 @@ class ConfiguredLocalRoutingSpec
|
||||||
// we don't really support deployment configuration of system actors, but
|
// we don't really support deployment configuration of system actors, but
|
||||||
// it's used for the pool of the SimpleDnsManager "/IO-DNS/inet-address"
|
// it's used for the pool of the SimpleDnsManager "/IO-DNS/inet-address"
|
||||||
val probe = TestProbe()
|
val probe = TestProbe()
|
||||||
val parent = system.asInstanceOf[ExtendedActorSystem].systemActorOf(Props[Parent], "sys-parent")
|
val parent = system.asInstanceOf[ExtendedActorSystem].systemActorOf(Props[Parent](), "sys-parent")
|
||||||
parent.tell((FromConfig.props(echoActorProps), "round"), probe.ref)
|
parent.tell((FromConfig.props(echoActorProps), "round"), probe.ref)
|
||||||
val router = probe.expectMsgType[ActorRef]
|
val router = probe.expectMsgType[ActorRef]
|
||||||
val replies = collectRouteePaths(probe, router, 10)
|
val replies = collectRouteePaths(probe, router, 10)
|
||||||
|
|
|
||||||
|
|
@ -59,7 +59,7 @@ class ConsistentHashingRouterSpec
|
||||||
import ConsistentHashingRouterSpec._
|
import ConsistentHashingRouterSpec._
|
||||||
implicit val ec: ExecutionContextExecutor = system.dispatcher
|
implicit val ec: ExecutionContextExecutor = system.dispatcher
|
||||||
|
|
||||||
val router1 = system.actorOf(FromConfig.props(Props[Echo]), "router1")
|
val router1 = system.actorOf(FromConfig.props(Props[Echo]()), "router1")
|
||||||
|
|
||||||
"consistent hashing router" must {
|
"consistent hashing router" must {
|
||||||
"create routees from configuration" in {
|
"create routees from configuration" in {
|
||||||
|
|
@ -90,7 +90,7 @@ class ConsistentHashingRouterSpec
|
||||||
}
|
}
|
||||||
val router2 =
|
val router2 =
|
||||||
system.actorOf(
|
system.actorOf(
|
||||||
ConsistentHashingPool(nrOfInstances = 1, hashMapping = hashMapping).props(Props[Echo]),
|
ConsistentHashingPool(nrOfInstances = 1, hashMapping = hashMapping).props(Props[Echo]()),
|
||||||
"router2")
|
"router2")
|
||||||
|
|
||||||
router2 ! Msg2("a", "A")
|
router2 ! Msg2("a", "A")
|
||||||
|
|
|
||||||
|
|
@ -100,7 +100,7 @@ class ResizerSpec extends AkkaSpec(ResizerSpec.config) with DefaultTimeout with
|
||||||
c1 should ===(2)
|
c1 should ===(2)
|
||||||
|
|
||||||
val current =
|
val current =
|
||||||
Vector(ActorRefRoutee(system.actorOf(Props[TestActor])), ActorRefRoutee(system.actorOf(Props[TestActor])))
|
Vector(ActorRefRoutee(system.actorOf(Props[TestActor]())), ActorRefRoutee(system.actorOf(Props[TestActor]())))
|
||||||
val c2 = resizer.capacity(current)
|
val c2 = resizer.capacity(current)
|
||||||
c2 should ===(0)
|
c2 should ===(0)
|
||||||
}
|
}
|
||||||
|
|
@ -129,7 +129,7 @@ class ResizerSpec extends AkkaSpec(ResizerSpec.config) with DefaultTimeout with
|
||||||
val latch = new TestLatch(3)
|
val latch = new TestLatch(3)
|
||||||
|
|
||||||
val resizer = DefaultResizer(lowerBound = 2, upperBound = 3)
|
val resizer = DefaultResizer(lowerBound = 2, upperBound = 3)
|
||||||
val router = system.actorOf(RoundRobinPool(nrOfInstances = 0, resizer = Some(resizer)).props(Props[TestActor]))
|
val router = system.actorOf(RoundRobinPool(nrOfInstances = 0, resizer = Some(resizer)).props(Props[TestActor]()))
|
||||||
|
|
||||||
router ! latch
|
router ! latch
|
||||||
router ! latch
|
router ! latch
|
||||||
|
|
@ -144,7 +144,7 @@ class ResizerSpec extends AkkaSpec(ResizerSpec.config) with DefaultTimeout with
|
||||||
"be possible to define in configuration" in {
|
"be possible to define in configuration" in {
|
||||||
val latch = new TestLatch(3)
|
val latch = new TestLatch(3)
|
||||||
|
|
||||||
val router = system.actorOf(FromConfig.props(Props[TestActor]), "router1")
|
val router = system.actorOf(FromConfig.props(Props[TestActor]()), "router1")
|
||||||
|
|
||||||
router ! latch
|
router ! latch
|
||||||
router ! latch
|
router ! latch
|
||||||
|
|
|
||||||
|
|
@ -56,7 +56,7 @@ class RoutingSpec extends AkkaSpec(RoutingSpec.config) with DefaultTimeout with
|
||||||
"routers in general" must {
|
"routers in general" must {
|
||||||
|
|
||||||
"evict terminated routees" in {
|
"evict terminated routees" in {
|
||||||
val router = system.actorOf(RoundRobinPool(2).props(routeeProps = Props[Echo]))
|
val router = system.actorOf(RoundRobinPool(2).props(routeeProps = Props[Echo]()))
|
||||||
router ! ""
|
router ! ""
|
||||||
router ! ""
|
router ! ""
|
||||||
val c1, c2 = expectMsgType[ActorRef]
|
val c1, c2 = expectMsgType[ActorRef]
|
||||||
|
|
@ -87,7 +87,8 @@ class RoutingSpec extends AkkaSpec(RoutingSpec.config) with DefaultTimeout with
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
val router =
|
val router =
|
||||||
system.actorOf(RoundRobinPool(nrOfInstances = 0, resizer = Some(resizer)).props(routeeProps = Props[TestActor]))
|
system.actorOf(
|
||||||
|
RoundRobinPool(nrOfInstances = 0, resizer = Some(resizer)).props(routeeProps = Props[TestActor]()))
|
||||||
watch(router)
|
watch(router)
|
||||||
Await.ready(latch, remainingOrDefault)
|
Await.ready(latch, remainingOrDefault)
|
||||||
router ! GetRoutees
|
router ! GetRoutees
|
||||||
|
|
@ -99,7 +100,7 @@ class RoutingSpec extends AkkaSpec(RoutingSpec.config) with DefaultTimeout with
|
||||||
}
|
}
|
||||||
|
|
||||||
"use configured nr-of-instances when FromConfig" in {
|
"use configured nr-of-instances when FromConfig" in {
|
||||||
val router = system.actorOf(FromConfig.props(routeeProps = Props[TestActor]), "router1")
|
val router = system.actorOf(FromConfig.props(routeeProps = Props[TestActor]()), "router1")
|
||||||
router ! GetRoutees
|
router ! GetRoutees
|
||||||
expectMsgType[Routees].routees.size should ===(3)
|
expectMsgType[Routees].routees.size should ===(3)
|
||||||
watch(router)
|
watch(router)
|
||||||
|
|
@ -108,7 +109,7 @@ class RoutingSpec extends AkkaSpec(RoutingSpec.config) with DefaultTimeout with
|
||||||
}
|
}
|
||||||
|
|
||||||
"use configured nr-of-instances when router is specified" in {
|
"use configured nr-of-instances when router is specified" in {
|
||||||
val router = system.actorOf(RoundRobinPool(nrOfInstances = 2).props(routeeProps = Props[TestActor]), "router2")
|
val router = system.actorOf(RoundRobinPool(nrOfInstances = 2).props(routeeProps = Props[TestActor]()), "router2")
|
||||||
router ! GetRoutees
|
router ! GetRoutees
|
||||||
expectMsgType[Routees].routees.size should ===(3)
|
expectMsgType[Routees].routees.size should ===(3)
|
||||||
system.stop(router)
|
system.stop(router)
|
||||||
|
|
@ -125,7 +126,7 @@ class RoutingSpec extends AkkaSpec(RoutingSpec.config) with DefaultTimeout with
|
||||||
}
|
}
|
||||||
val router =
|
val router =
|
||||||
system.actorOf(
|
system.actorOf(
|
||||||
RoundRobinPool(nrOfInstances = 0, resizer = Some(resizer)).props(routeeProps = Props[TestActor]),
|
RoundRobinPool(nrOfInstances = 0, resizer = Some(resizer)).props(routeeProps = Props[TestActor]()),
|
||||||
"router3")
|
"router3")
|
||||||
Await.ready(latch, remainingOrDefault)
|
Await.ready(latch, remainingOrDefault)
|
||||||
router ! GetRoutees
|
router ! GetRoutees
|
||||||
|
|
@ -141,7 +142,7 @@ class RoutingSpec extends AkkaSpec(RoutingSpec.config) with DefaultTimeout with
|
||||||
//#custom-strategy
|
//#custom-strategy
|
||||||
}
|
}
|
||||||
val router =
|
val router =
|
||||||
system.actorOf(RoundRobinPool(1, supervisorStrategy = escalator).props(routeeProps = Props[TestActor]))
|
system.actorOf(RoundRobinPool(1, supervisorStrategy = escalator).props(routeeProps = Props[TestActor]()))
|
||||||
//#supervision
|
//#supervision
|
||||||
router ! GetRoutees
|
router ! GetRoutees
|
||||||
EventFilter[ActorKilledException](occurrences = 1).intercept {
|
EventFilter[ActorKilledException](occurrences = 1).intercept {
|
||||||
|
|
@ -150,7 +151,7 @@ class RoutingSpec extends AkkaSpec(RoutingSpec.config) with DefaultTimeout with
|
||||||
expectMsgType[ActorKilledException]
|
expectMsgType[ActorKilledException]
|
||||||
|
|
||||||
val router2 =
|
val router2 =
|
||||||
system.actorOf(RoundRobinPool(1).withSupervisorStrategy(escalator).props(routeeProps = Props[TestActor]))
|
system.actorOf(RoundRobinPool(1).withSupervisorStrategy(escalator).props(routeeProps = Props[TestActor]()))
|
||||||
router2 ! GetRoutees
|
router2 ! GetRoutees
|
||||||
EventFilter[ActorKilledException](occurrences = 1).intercept {
|
EventFilter[ActorKilledException](occurrences = 1).intercept {
|
||||||
expectMsgType[Routees].routees.head.send(Kill, testActor)
|
expectMsgType[Routees].routees.head.send(Kill, testActor)
|
||||||
|
|
@ -163,7 +164,7 @@ class RoutingSpec extends AkkaSpec(RoutingSpec.config) with DefaultTimeout with
|
||||||
case e => testActor ! e; SupervisorStrategy.Escalate
|
case e => testActor ! e; SupervisorStrategy.Escalate
|
||||||
}
|
}
|
||||||
val router =
|
val router =
|
||||||
system.actorOf(FromConfig.withSupervisorStrategy(escalator).props(routeeProps = Props[TestActor]), "router1")
|
system.actorOf(FromConfig.withSupervisorStrategy(escalator).props(routeeProps = Props[TestActor]()), "router1")
|
||||||
router ! GetRoutees
|
router ! GetRoutees
|
||||||
EventFilter[ActorKilledException](occurrences = 1).intercept {
|
EventFilter[ActorKilledException](occurrences = 1).intercept {
|
||||||
expectMsgType[Routees].routees.head.send(Kill, testActor)
|
expectMsgType[Routees].routees.head.send(Kill, testActor)
|
||||||
|
|
@ -227,7 +228,7 @@ class RoutingSpec extends AkkaSpec(RoutingSpec.config) with DefaultTimeout with
|
||||||
"router FromConfig" must {
|
"router FromConfig" must {
|
||||||
"throw suitable exception when not configured" in {
|
"throw suitable exception when not configured" in {
|
||||||
val e = intercept[ConfigurationException] {
|
val e = intercept[ConfigurationException] {
|
||||||
system.actorOf(FromConfig.props(routeeProps = Props[TestActor]), "routerNotDefined")
|
system.actorOf(FromConfig.props(routeeProps = Props[TestActor]()), "routerNotDefined")
|
||||||
}
|
}
|
||||||
e.getMessage should include("routerNotDefined")
|
e.getMessage should include("routerNotDefined")
|
||||||
}
|
}
|
||||||
|
|
@ -239,7 +240,7 @@ class RoutingSpec extends AkkaSpec(RoutingSpec.config) with DefaultTimeout with
|
||||||
.parseString("akka.actor.deployment./routed.router=round-robin-pool")
|
.parseString("akka.actor.deployment./routed.router=round-robin-pool")
|
||||||
.withFallback(system.settings.config))
|
.withFallback(system.settings.config))
|
||||||
try {
|
try {
|
||||||
sys.actorOf(FromConfig.props(routeeProps = Props[TestActor]), "routed")
|
sys.actorOf(FromConfig.props(routeeProps = Props[TestActor]()), "routed")
|
||||||
} finally {
|
} finally {
|
||||||
shutdown(sys)
|
shutdown(sys)
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -284,7 +284,7 @@ class VerifySerializabilitySpec extends AkkaSpec(SerializationTests.verifySerial
|
||||||
}
|
}
|
||||||
|
|
||||||
"verify creators" in {
|
"verify creators" in {
|
||||||
val a = system.actorOf(Props[FooActor])
|
val a = system.actorOf(Props[FooActor]())
|
||||||
system.stop(a)
|
system.stop(a)
|
||||||
|
|
||||||
val b = system.actorOf(Props(new FooAbstractActor))
|
val b = system.actorOf(Props(new FooAbstractActor))
|
||||||
|
|
@ -307,7 +307,7 @@ class VerifySerializabilitySpec extends AkkaSpec(SerializationTests.verifySerial
|
||||||
}
|
}
|
||||||
|
|
||||||
"verify messages" in {
|
"verify messages" in {
|
||||||
val a = system.actorOf(Props[FooActor])
|
val a = system.actorOf(Props[FooActor]())
|
||||||
Await.result(a ? "pigdog", timeout.duration) should ===("pigdog")
|
Await.result(a ? "pigdog", timeout.duration) should ===("pigdog")
|
||||||
|
|
||||||
EventFilter[SerializationCheckFailedException](
|
EventFilter[SerializationCheckFailedException](
|
||||||
|
|
@ -319,7 +319,7 @@ class VerifySerializabilitySpec extends AkkaSpec(SerializationTests.verifySerial
|
||||||
}
|
}
|
||||||
|
|
||||||
"not verify akka messages" in {
|
"not verify akka messages" in {
|
||||||
val a = system.actorOf(Props[FooActor])
|
val a = system.actorOf(Props[FooActor]())
|
||||||
EventFilter.warning(start = "ok", occurrences = 1).intercept {
|
EventFilter.warning(start = "ok", occurrences = 1).intercept {
|
||||||
// ActorSystem is not possible to serialize, but ok since it starts with "akka."
|
// ActorSystem is not possible to serialize, but ok since it starts with "akka."
|
||||||
val message = system
|
val message = system
|
||||||
|
|
|
||||||
|
|
@ -191,7 +191,7 @@ class ByteStringSpec extends AnyWordSpec with Matchers with Checkers {
|
||||||
body(bsBuilder)
|
body(bsBuilder)
|
||||||
body(vecBuilder)
|
body(vecBuilder)
|
||||||
|
|
||||||
bsBuilder.result == vecBuilder.result
|
bsBuilder.result() == vecBuilder.result()
|
||||||
}
|
}
|
||||||
|
|
||||||
def testShortDecoding(slice: ByteStringSlice, byteOrder: ByteOrder): Boolean = {
|
def testShortDecoding(slice: ByteStringSlice, byteOrder: ByteOrder): Boolean = {
|
||||||
|
|
@ -275,7 +275,7 @@ class ByteStringSpec extends AnyWordSpec with Matchers with Checkers {
|
||||||
for (i <- 0 until from) builder.putShort(data(i))(byteOrder)
|
for (i <- 0 until from) builder.putShort(data(i))(byteOrder)
|
||||||
builder.putShorts(data, from, to - from)(byteOrder)
|
builder.putShorts(data, from, to - from)(byteOrder)
|
||||||
for (i <- to until data.length) builder.putShort(data(i))(byteOrder)
|
for (i <- to until data.length) builder.putShort(data(i))(byteOrder)
|
||||||
reference.toSeq == builder.result
|
reference.toSeq == builder.result()
|
||||||
}
|
}
|
||||||
|
|
||||||
def testIntEncoding(slice: ArraySlice[Int], byteOrder: ByteOrder): Boolean = {
|
def testIntEncoding(slice: ArraySlice[Int], byteOrder: ByteOrder): Boolean = {
|
||||||
|
|
@ -287,7 +287,7 @@ class ByteStringSpec extends AnyWordSpec with Matchers with Checkers {
|
||||||
for (i <- 0 until from) builder.putInt(data(i))(byteOrder)
|
for (i <- 0 until from) builder.putInt(data(i))(byteOrder)
|
||||||
builder.putInts(data, from, to - from)(byteOrder)
|
builder.putInts(data, from, to - from)(byteOrder)
|
||||||
for (i <- to until data.length) builder.putInt(data(i))(byteOrder)
|
for (i <- to until data.length) builder.putInt(data(i))(byteOrder)
|
||||||
reference.toSeq == builder.result
|
reference.toSeq == builder.result()
|
||||||
}
|
}
|
||||||
|
|
||||||
def testLongEncoding(slice: ArraySlice[Long], byteOrder: ByteOrder): Boolean = {
|
def testLongEncoding(slice: ArraySlice[Long], byteOrder: ByteOrder): Boolean = {
|
||||||
|
|
@ -299,7 +299,7 @@ class ByteStringSpec extends AnyWordSpec with Matchers with Checkers {
|
||||||
for (i <- 0 until from) builder.putLong(data(i))(byteOrder)
|
for (i <- 0 until from) builder.putLong(data(i))(byteOrder)
|
||||||
builder.putLongs(data, from, to - from)(byteOrder)
|
builder.putLongs(data, from, to - from)(byteOrder)
|
||||||
for (i <- to until data.length) builder.putLong(data(i))(byteOrder)
|
for (i <- to until data.length) builder.putLong(data(i))(byteOrder)
|
||||||
reference.toSeq == builder.result
|
reference.toSeq == builder.result()
|
||||||
}
|
}
|
||||||
|
|
||||||
def testLongPartEncoding(anb: ArrayNumBytes[Long], byteOrder: ByteOrder): Boolean = {
|
def testLongPartEncoding(anb: ArrayNumBytes[Long], byteOrder: ByteOrder): Boolean = {
|
||||||
|
|
@ -316,7 +316,7 @@ class ByteStringSpec extends AnyWordSpec with Matchers with Checkers {
|
||||||
case (r, i) if byteOrder == ByteOrder.LITTLE_ENDIAN && i % elemSize < nBytes => r
|
case (r, i) if byteOrder == ByteOrder.LITTLE_ENDIAN && i % elemSize < nBytes => r
|
||||||
case (r, i) if byteOrder == ByteOrder.BIG_ENDIAN && i % elemSize >= (elemSize - nBytes) => r
|
case (r, i) if byteOrder == ByteOrder.BIG_ENDIAN && i % elemSize >= (elemSize - nBytes) => r
|
||||||
})
|
})
|
||||||
.toSeq == builder.result
|
.toSeq == builder.result()
|
||||||
}
|
}
|
||||||
|
|
||||||
def testFloatEncoding(slice: ArraySlice[Float], byteOrder: ByteOrder): Boolean = {
|
def testFloatEncoding(slice: ArraySlice[Float], byteOrder: ByteOrder): Boolean = {
|
||||||
|
|
@ -328,7 +328,7 @@ class ByteStringSpec extends AnyWordSpec with Matchers with Checkers {
|
||||||
for (i <- 0 until from) builder.putFloat(data(i))(byteOrder)
|
for (i <- 0 until from) builder.putFloat(data(i))(byteOrder)
|
||||||
builder.putFloats(data, from, to - from)(byteOrder)
|
builder.putFloats(data, from, to - from)(byteOrder)
|
||||||
for (i <- to until data.length) builder.putFloat(data(i))(byteOrder)
|
for (i <- to until data.length) builder.putFloat(data(i))(byteOrder)
|
||||||
reference.toSeq == builder.result
|
reference.toSeq == builder.result()
|
||||||
}
|
}
|
||||||
|
|
||||||
def testDoubleEncoding(slice: ArraySlice[Double], byteOrder: ByteOrder): Boolean = {
|
def testDoubleEncoding(slice: ArraySlice[Double], byteOrder: ByteOrder): Boolean = {
|
||||||
|
|
@ -340,7 +340,7 @@ class ByteStringSpec extends AnyWordSpec with Matchers with Checkers {
|
||||||
for (i <- 0 until from) builder.putDouble(data(i))(byteOrder)
|
for (i <- 0 until from) builder.putDouble(data(i))(byteOrder)
|
||||||
builder.putDoubles(data, from, to - from)(byteOrder)
|
builder.putDoubles(data, from, to - from)(byteOrder)
|
||||||
for (i <- to until data.length) builder.putDouble(data(i))(byteOrder)
|
for (i <- to until data.length) builder.putDouble(data(i))(byteOrder)
|
||||||
reference.toSeq == builder.result
|
reference.toSeq == builder.result()
|
||||||
}
|
}
|
||||||
|
|
||||||
"ByteString1" must {
|
"ByteString1" must {
|
||||||
|
|
@ -1301,7 +1301,7 @@ class ByteStringSpec extends AnyWordSpec with Matchers with Checkers {
|
||||||
for (i <- 0 until from) builder.putByte(data(i))
|
for (i <- 0 until from) builder.putByte(data(i))
|
||||||
builder.putBytes(data, from, to - from)
|
builder.putBytes(data, from, to - from)
|
||||||
for (i <- to until data.length) builder.putByte(data(i))
|
for (i <- to until data.length) builder.putByte(data(i))
|
||||||
data.toSeq == builder.result
|
data.toSeq == builder.result()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -1313,7 +1313,7 @@ class ByteStringSpec extends AnyWordSpec with Matchers with Checkers {
|
||||||
for (i <- 0 until from) builder.asOutputStream.write(data(i).toInt)
|
for (i <- 0 until from) builder.asOutputStream.write(data(i).toInt)
|
||||||
builder.asOutputStream.write(data, from, to - from)
|
builder.asOutputStream.write(data, from, to - from)
|
||||||
for (i <- to until data.length) builder.asOutputStream.write(data(i).toInt)
|
for (i <- to until data.length) builder.asOutputStream.write(data(i).toInt)
|
||||||
data.toSeq == builder.result
|
data.toSeq == builder.result()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -132,7 +132,7 @@ class IndexSpec extends AkkaSpec with Matchers with DefaultTimeout {
|
||||||
case 3 => readTask()
|
case 3 => readTask()
|
||||||
}
|
}
|
||||||
|
|
||||||
val tasks = List.fill(nrOfTasks)(executeRandomTask)
|
val tasks = List.fill(nrOfTasks)(executeRandomTask())
|
||||||
|
|
||||||
tasks.foreach(Await.result(_, timeout.duration))
|
tasks.foreach(Await.result(_, timeout.duration))
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -520,7 +520,7 @@ abstract class ActorContextSpec extends ScalaTestWithActorTestKit with AnyWordSp
|
||||||
|
|
||||||
"return the right context info" in {
|
"return the right context info" in {
|
||||||
type Info = (ActorSystem[Nothing], ActorRef[String])
|
type Info = (ActorSystem[Nothing], ActorRef[String])
|
||||||
val probe = TestProbe[Info]
|
val probe = TestProbe[Info]()
|
||||||
val actor = spawn(
|
val actor = spawn(
|
||||||
Behaviors
|
Behaviors
|
||||||
.receivePartial[String] {
|
.receivePartial[String] {
|
||||||
|
|
|
||||||
|
|
@ -1274,7 +1274,7 @@ class SupervisionSpec extends ScalaTestWithActorTestKit("""
|
||||||
}
|
}
|
||||||
|
|
||||||
"not allow AbstractBehavior without setup" in {
|
"not allow AbstractBehavior without setup" in {
|
||||||
val contextProbe = createTestProbe[ActorContext[String]]
|
val contextProbe = createTestProbe[ActorContext[String]]()
|
||||||
spawn(Behaviors.setup[String] { context =>
|
spawn(Behaviors.setup[String] { context =>
|
||||||
contextProbe.ref ! context
|
contextProbe.ref ! context
|
||||||
Behaviors.empty
|
Behaviors.empty
|
||||||
|
|
@ -1298,7 +1298,7 @@ class SupervisionSpec extends ScalaTestWithActorTestKit("""
|
||||||
}
|
}
|
||||||
|
|
||||||
"detect AbstractBehavior with wrong ActorContext" in {
|
"detect AbstractBehavior with wrong ActorContext" in {
|
||||||
val contextProbe = createTestProbe[ActorContext[String]]
|
val contextProbe = createTestProbe[ActorContext[String]]()
|
||||||
spawn(Behaviors.setup[String] { context =>
|
spawn(Behaviors.setup[String] { context =>
|
||||||
contextProbe.ref ! context
|
contextProbe.ref ! context
|
||||||
Behaviors.empty
|
Behaviors.empty
|
||||||
|
|
|
||||||
|
|
@ -49,8 +49,8 @@ class TypedSupervisingClassicSpec extends ScalaTestWithActorTestKit("""
|
||||||
"Typed supervising classic" should {
|
"Typed supervising classic" should {
|
||||||
"default to restart" in {
|
"default to restart" in {
|
||||||
val ref: ActorRef[Protocol] = spawn(classicActorOf())
|
val ref: ActorRef[Protocol] = spawn(classicActorOf())
|
||||||
val lifecycleProbe = TestProbe[String]
|
val lifecycleProbe = TestProbe[String]()
|
||||||
val probe = TestProbe[SpawnedClassicActor]
|
val probe = TestProbe[SpawnedClassicActor]()
|
||||||
ref ! SpawnClassicActor(classic.Props(new CLassicActor(lifecycleProbe.ref)), probe.ref)
|
ref ! SpawnClassicActor(classic.Props(new CLassicActor(lifecycleProbe.ref)), probe.ref)
|
||||||
val spawnedClassic = probe.expectMessageType[SpawnedClassicActor].ref
|
val spawnedClassic = probe.expectMessageType[SpawnedClassicActor].ref
|
||||||
lifecycleProbe.expectMessage("preStart")
|
lifecycleProbe.expectMessage("preStart")
|
||||||
|
|
|
||||||
|
|
@ -45,9 +45,9 @@ class EventStreamSpec extends ScalaTestWithActorTestKit with AnyWordSpecLike wit
|
||||||
}
|
}
|
||||||
|
|
||||||
"a system event stream subscriber" must {
|
"a system event stream subscriber" must {
|
||||||
val rootEventListener = testKit.createTestProbe[Root]
|
val rootEventListener = testKit.createTestProbe[Root]()
|
||||||
val level1EventListener = testKit.createTestProbe[Level1]
|
val level1EventListener = testKit.createTestProbe[Level1]()
|
||||||
val rootEventListenerForLevel1 = testKit.createTestProbe[Root]
|
val rootEventListenerForLevel1 = testKit.createTestProbe[Root]()
|
||||||
testKit.system.eventStream ! Subscribe(rootEventListener.ref)
|
testKit.system.eventStream ! Subscribe(rootEventListener.ref)
|
||||||
testKit.system.eventStream ! Subscribe(level1EventListener.ref)
|
testKit.system.eventStream ! Subscribe(level1EventListener.ref)
|
||||||
testKit.system.eventStream ! Subscribe[Level1](rootEventListenerForLevel1.ref)
|
testKit.system.eventStream ! Subscribe[Level1](rootEventListenerForLevel1.ref)
|
||||||
|
|
|
||||||
|
|
@ -133,7 +133,7 @@ class ActorSystemSpec
|
||||||
|
|
||||||
"have a working thread factory" in {
|
"have a working thread factory" in {
|
||||||
withSystem("thread", Behaviors.empty[String]) { sys =>
|
withSystem("thread", Behaviors.empty[String]) { sys =>
|
||||||
val p = Promise[Int]
|
val p = Promise[Int]()
|
||||||
sys.threadFactory
|
sys.threadFactory
|
||||||
.newThread(new Runnable {
|
.newThread(new Runnable {
|
||||||
def run(): Unit = p.success(42)
|
def run(): Unit = p.success(42)
|
||||||
|
|
|
||||||
|
|
@ -210,8 +210,8 @@ class RoutersSpec extends ScalaTestWithActorTestKit("""
|
||||||
val router = spawn(Behaviors.setup[String](context =>
|
val router = spawn(Behaviors.setup[String](context =>
|
||||||
new GroupRouterImpl(context, serviceKey, false, new RoutingLogics.RoundRobinLogic[String], true)))
|
new GroupRouterImpl(context, serviceKey, false, new RoutingLogics.RoundRobinLogic[String], true)))
|
||||||
|
|
||||||
val reachableProbe = createTestProbe[String]
|
val reachableProbe = createTestProbe[String]()
|
||||||
val unreachableProbe = createTestProbe[String]
|
val unreachableProbe = createTestProbe[String]()
|
||||||
router
|
router
|
||||||
.unsafeUpcast[Any] ! Receptionist.Listing(serviceKey, Set(reachableProbe.ref), Set(unreachableProbe.ref), false)
|
.unsafeUpcast[Any] ! Receptionist.Listing(serviceKey, Set(reachableProbe.ref), Set(unreachableProbe.ref), false)
|
||||||
router ! "one"
|
router ! "one"
|
||||||
|
|
@ -225,7 +225,7 @@ class RoutersSpec extends ScalaTestWithActorTestKit("""
|
||||||
val router = spawn(Behaviors.setup[String](context =>
|
val router = spawn(Behaviors.setup[String](context =>
|
||||||
new GroupRouterImpl(context, serviceKey, false, new RoutingLogics.RoundRobinLogic[String], true)))
|
new GroupRouterImpl(context, serviceKey, false, new RoutingLogics.RoundRobinLogic[String], true)))
|
||||||
|
|
||||||
val unreachableProbe = createTestProbe[String]
|
val unreachableProbe = createTestProbe[String]()
|
||||||
router.unsafeUpcast[Any] ! Receptionist.Listing(
|
router.unsafeUpcast[Any] ! Receptionist.Listing(
|
||||||
serviceKey,
|
serviceKey,
|
||||||
Set.empty[ActorRef[String]],
|
Set.empty[ActorRef[String]],
|
||||||
|
|
|
||||||
|
|
@ -660,7 +660,7 @@ class UnstashingSpec extends ScalaTestWithActorTestKit with AnyWordSpecLike with
|
||||||
}
|
}
|
||||||
|
|
||||||
"deal with initial stop" in {
|
"deal with initial stop" in {
|
||||||
val probe = TestProbe[Any]
|
val probe = TestProbe[Any]()
|
||||||
val ref = spawn(Behaviors.withStash[String](10) { stash =>
|
val ref = spawn(Behaviors.withStash[String](10) { stash =>
|
||||||
stash.stash("one")
|
stash.stash("one")
|
||||||
|
|
||||||
|
|
@ -675,7 +675,7 @@ class UnstashingSpec extends ScalaTestWithActorTestKit with AnyWordSpecLike with
|
||||||
}
|
}
|
||||||
|
|
||||||
"deal with stop" in {
|
"deal with stop" in {
|
||||||
val probe = TestProbe[Any]
|
val probe = TestProbe[Any]()
|
||||||
val deadLetterProbe = createDeadLetterProbe()
|
val deadLetterProbe = createDeadLetterProbe()
|
||||||
|
|
||||||
val ref = spawn(Behaviors.withStash[String](10) { stash =>
|
val ref = spawn(Behaviors.withStash[String](10) { stash =>
|
||||||
|
|
@ -699,7 +699,7 @@ class UnstashingSpec extends ScalaTestWithActorTestKit with AnyWordSpecLike with
|
||||||
}
|
}
|
||||||
|
|
||||||
"work with initial same" in {
|
"work with initial same" in {
|
||||||
val probe = TestProbe[Any]
|
val probe = TestProbe[Any]()
|
||||||
val ref = spawn(Behaviors.withStash[String](10) { stash =>
|
val ref = spawn(Behaviors.withStash[String](10) { stash =>
|
||||||
stash.stash("one")
|
stash.stash("one")
|
||||||
stash.stash("two")
|
stash.stash("two")
|
||||||
|
|
|
||||||
|
|
@ -195,7 +195,7 @@ private class RestartSupervisor[T, Thr <: Throwable: ClassTag](initial: Behavior
|
||||||
|
|
||||||
private def deadlineHasTimeLeft: Boolean = deadline match {
|
private def deadlineHasTimeLeft: Boolean = deadline match {
|
||||||
case OptionVal.None => true
|
case OptionVal.None => true
|
||||||
case OptionVal.Some(d) => d.hasTimeLeft
|
case OptionVal.Some(d) => d.hasTimeLeft()
|
||||||
}
|
}
|
||||||
|
|
||||||
override def aroundSignal(ctx: TypedActorContext[Any], signal: Signal, target: SignalTarget[T]): Behavior[T] = {
|
override def aroundSignal(ctx: TypedActorContext[Any], signal: Signal, target: SignalTarget[T]): Behavior[T] = {
|
||||||
|
|
|
||||||
|
|
@ -168,7 +168,7 @@ object ByteIterator {
|
||||||
if ((off < 0) || (len < 0) || (off + len > b.length)) throw new IndexOutOfBoundsException
|
if ((off < 0) || (len < 0) || (off + len > b.length)) throw new IndexOutOfBoundsException
|
||||||
if (len == 0) 0
|
if (len == 0) 0
|
||||||
else if (!isEmpty) {
|
else if (!isEmpty) {
|
||||||
val nRead = math.min(available, len)
|
val nRead = math.min(available(), len)
|
||||||
copyToArray(b, off, nRead)
|
copyToArray(b, off, nRead)
|
||||||
nRead
|
nRead
|
||||||
} else -1
|
} else -1
|
||||||
|
|
@ -269,7 +269,7 @@ object ByteIterator {
|
||||||
}
|
}
|
||||||
iterators = iterators.tail
|
iterators = iterators.tail
|
||||||
}
|
}
|
||||||
iterators = builder.result
|
iterators = builder.result()
|
||||||
normalize()
|
normalize()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -294,7 +294,7 @@ object ByteIterator {
|
||||||
if (current.len < lastLen) stop = true
|
if (current.len < lastLen) stop = true
|
||||||
dropCurrent()
|
dropCurrent()
|
||||||
}
|
}
|
||||||
iterators = builder.result
|
iterators = builder.result()
|
||||||
normalize()
|
normalize()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -1334,7 +1334,7 @@ final class ByteStringBuilder extends Builder[Byte, ByteString] {
|
||||||
if (_length == 0) ByteString.empty
|
if (_length == 0) ByteString.empty
|
||||||
else {
|
else {
|
||||||
clearTemp()
|
clearTemp()
|
||||||
val bytestrings = _builder.result
|
val bytestrings = _builder.result()
|
||||||
if (bytestrings.size == 1)
|
if (bytestrings.size == 1)
|
||||||
bytestrings.head
|
bytestrings.head
|
||||||
else
|
else
|
||||||
|
|
|
||||||
|
|
@ -1017,7 +1017,7 @@ private[akka] class ActorSystemImpl(
|
||||||
_initialized = true
|
_initialized = true
|
||||||
|
|
||||||
if (settings.LogDeadLetters > 0)
|
if (settings.LogDeadLetters > 0)
|
||||||
logDeadLetterListener = Some(systemActorOf(Props[DeadLetterListener], "deadLetterListener"))
|
logDeadLetterListener = Some(systemActorOf(Props[DeadLetterListener](), "deadLetterListener"))
|
||||||
eventStream.startUnsubscriber()
|
eventStream.startUnsubscriber()
|
||||||
ManifestInfo(this).checkSameVersion("Akka", allModules, logWarning = true)
|
ManifestInfo(this).checkSameVersion("Akka", allModules, logWarning = true)
|
||||||
if (!terminating)
|
if (!terminating)
|
||||||
|
|
|
||||||
|
|
@ -710,7 +710,7 @@ final class CoordinatedShutdown private[akka] (
|
||||||
val deadline = Deadline.now + timeout
|
val deadline = Deadline.now + timeout
|
||||||
val timeoutFut = try {
|
val timeoutFut = try {
|
||||||
after(timeout, system.scheduler) {
|
after(timeout, system.scheduler) {
|
||||||
if (phaseName == CoordinatedShutdown.PhaseActorSystemTerminate && deadline.hasTimeLeft) {
|
if (phaseName == CoordinatedShutdown.PhaseActorSystemTerminate && deadline.hasTimeLeft()) {
|
||||||
// too early, i.e. triggered by system termination
|
// too early, i.e. triggered by system termination
|
||||||
result
|
result
|
||||||
} else if (result.isCompleted)
|
} else if (result.isCompleted)
|
||||||
|
|
|
||||||
|
|
@ -466,7 +466,7 @@ trait FSM[S, D] extends Actor with Listeners with ActorLogging {
|
||||||
/**
|
/**
|
||||||
* Produce change descriptor to stop this FSM actor including specified reason.
|
* Produce change descriptor to stop this FSM actor including specified reason.
|
||||||
*/
|
*/
|
||||||
final def stop(reason: Reason, stateData: D): State = stay.using(stateData).withStopReason(reason)
|
final def stop(reason: Reason, stateData: D): State = stay().using(stateData).withStopReason(reason)
|
||||||
|
|
||||||
final class TransformHelper(func: StateFunction) {
|
final class TransformHelper(func: StateFunction) {
|
||||||
def using(andThen: PartialFunction[State, State]): StateFunction =
|
def using(andThen: PartialFunction[State, State]): StateFunction =
|
||||||
|
|
@ -559,7 +559,7 @@ trait FSM[S, D] extends Actor with Listeners with ActorLogging {
|
||||||
if (timers contains name) {
|
if (timers contains name) {
|
||||||
timers(name).cancel()
|
timers(name).cancel()
|
||||||
}
|
}
|
||||||
val timer = Timer(name, msg, mode, timerGen.next, this)(context)
|
val timer = Timer(name, msg, mode, timerGen.next(), this)(context)
|
||||||
timer.schedule(self, timeout)
|
timer.schedule(self, timeout)
|
||||||
timers(name) = timer
|
timers(name) = timer
|
||||||
}
|
}
|
||||||
|
|
@ -728,7 +728,7 @@ trait FSM[S, D] extends Actor with Listeners with ActorLogging {
|
||||||
private val handleEventDefault: StateFunction = {
|
private val handleEventDefault: StateFunction = {
|
||||||
case Event(value, _) =>
|
case Event(value, _) =>
|
||||||
log.warning("unhandled event " + value + " in state " + stateName)
|
log.warning("unhandled event " + value + " in state " + stateName)
|
||||||
stay
|
stay()
|
||||||
}
|
}
|
||||||
private var handleEvent: StateFunction = handleEventDefault
|
private var handleEvent: StateFunction = handleEventDefault
|
||||||
|
|
||||||
|
|
@ -821,7 +821,7 @@ trait FSM[S, D] extends Actor with Listeners with ActorLogging {
|
||||||
|
|
||||||
private[akka] def makeTransition(nextState: State): Unit = {
|
private[akka] def makeTransition(nextState: State): Unit = {
|
||||||
if (!stateFunctions.contains(nextState.stateName)) {
|
if (!stateFunctions.contains(nextState.stateName)) {
|
||||||
terminate(stay.withStopReason(Failure("Next state %s does not exist".format(nextState.stateName))))
|
terminate(stay().withStopReason(Failure("Next state %s does not exist".format(nextState.stateName))))
|
||||||
} else {
|
} else {
|
||||||
nextState.replies.reverse.foreach { r =>
|
nextState.replies.reverse.foreach { r =>
|
||||||
sender() ! r
|
sender() ! r
|
||||||
|
|
@ -862,7 +862,7 @@ trait FSM[S, D] extends Actor with Listeners with ActorLogging {
|
||||||
* setting this instance’s state to terminated does no harm during restart
|
* setting this instance’s state to terminated does no harm during restart
|
||||||
* since the new instance will initialize fresh using startWith()
|
* since the new instance will initialize fresh using startWith()
|
||||||
*/
|
*/
|
||||||
terminate(stay.withStopReason(Shutdown))
|
terminate(stay().withStopReason(Shutdown))
|
||||||
super.postStop()
|
super.postStop()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -39,7 +39,7 @@ object Props extends AbstractProps {
|
||||||
/**
|
/**
|
||||||
* A Props instance whose creator will create an actor that doesn't respond to any message
|
* A Props instance whose creator will create an actor that doesn't respond to any message
|
||||||
*/
|
*/
|
||||||
final val empty = Props[EmptyActor]
|
final val empty = Props[EmptyActor]()
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* The default Props instance, uses the settings from the Props object starting with default*.
|
* The default Props instance, uses the settings from the Props object starting with default*.
|
||||||
|
|
|
||||||
|
|
@ -149,7 +149,7 @@ private[akka] class RepointableActorRef(
|
||||||
|
|
||||||
def getChild(name: Iterator[String]): InternalActorRef =
|
def getChild(name: Iterator[String]): InternalActorRef =
|
||||||
if (name.hasNext) {
|
if (name.hasNext) {
|
||||||
name.next match {
|
name.next() match {
|
||||||
case ".." => getParent.getChild(name)
|
case ".." => getParent.getChild(name)
|
||||||
case "" => getChild(name)
|
case "" => getChild(name)
|
||||||
case other =>
|
case other =>
|
||||||
|
|
|
||||||
|
|
@ -48,7 +48,7 @@ trait TypedActorFactory {
|
||||||
*/
|
*/
|
||||||
def stop(proxy: AnyRef): Boolean = getActorRefFor(proxy) match {
|
def stop(proxy: AnyRef): Boolean = getActorRefFor(proxy) match {
|
||||||
case null => false
|
case null => false
|
||||||
case ref => ref.asInstanceOf[InternalActorRef].stop; true
|
case ref => ref.asInstanceOf[InternalActorRef].stop(); true
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
@ -77,7 +77,7 @@ trait TypedActorFactory {
|
||||||
val proxyVar = new AtomVar[R] //Chicken'n'egg-resolver
|
val proxyVar = new AtomVar[R] //Chicken'n'egg-resolver
|
||||||
val c = props.creator //Cache this to avoid closing over the Props
|
val c = props.creator //Cache this to avoid closing over the Props
|
||||||
val i = props.interfaces //Cache this to avoid closing over the Props
|
val i = props.interfaces //Cache this to avoid closing over the Props
|
||||||
val ap = Props(new TypedActor.TypedActor[R, T](proxyVar, c(), i)).withDeploy(props.actorProps.deploy)
|
val ap = Props(new TypedActor.TypedActor[R, T](proxyVar, c(), i)).withDeploy(props.actorProps().deploy)
|
||||||
typedActor.createActorRefProxy(props, proxyVar, actorFactory.actorOf(ap))
|
typedActor.createActorRefProxy(props, proxyVar, actorFactory.actorOf(ap))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -88,7 +88,7 @@ trait TypedActorFactory {
|
||||||
val proxyVar = new AtomVar[R] //Chicken'n'egg-resolver
|
val proxyVar = new AtomVar[R] //Chicken'n'egg-resolver
|
||||||
val c = props.creator //Cache this to avoid closing over the Props
|
val c = props.creator //Cache this to avoid closing over the Props
|
||||||
val i = props.interfaces //Cache this to avoid closing over the Props
|
val i = props.interfaces //Cache this to avoid closing over the Props
|
||||||
val ap = Props(new akka.actor.TypedActor.TypedActor[R, T](proxyVar, c(), i)).withDeploy(props.actorProps.deploy)
|
val ap = Props(new akka.actor.TypedActor.TypedActor[R, T](proxyVar, c(), i)).withDeploy(props.actorProps().deploy)
|
||||||
typedActor.createActorRefProxy(props, proxyVar, actorFactory.actorOf(ap, name))
|
typedActor.createActorRefProxy(props, proxyVar, actorFactory.actorOf(ap, name))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -272,7 +272,7 @@ object TypedActor extends ExtensionId[TypedActorExtension] with ExtensionIdProvi
|
||||||
private val me = withContext[T](createInstance)
|
private val me = withContext[T](createInstance)
|
||||||
|
|
||||||
override def supervisorStrategy: SupervisorStrategy = me match {
|
override def supervisorStrategy: SupervisorStrategy = me match {
|
||||||
case l: Supervisor => l.supervisorStrategy
|
case l: Supervisor => l.supervisorStrategy()
|
||||||
case _ => super.supervisorStrategy
|
case _ => super.supervisorStrategy
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -73,7 +73,7 @@ private[akka] trait Dispatch { this: ActorCell =>
|
||||||
*/
|
*/
|
||||||
// we need to delay the failure to the point of actor creation so we can handle
|
// we need to delay the failure to the point of actor creation so we can handle
|
||||||
// it properly in the normal way
|
// it properly in the normal way
|
||||||
val actorClass = props.actorClass
|
val actorClass = props.actorClass()
|
||||||
val createMessage = mailboxType match {
|
val createMessage = mailboxType match {
|
||||||
case _: ProducesMessageQueue[_] if system.mailboxes.hasRequiredType(actorClass) =>
|
case _: ProducesMessageQueue[_] if system.mailboxes.hasRequiredType(actorClass) =>
|
||||||
val req = system.mailboxes.getRequiredType(actorClass)
|
val req = system.mailboxes.getRequiredType(actorClass)
|
||||||
|
|
|
||||||
|
|
@ -103,7 +103,7 @@ private[akka] class BalancingDispatcher(
|
||||||
if (messageQueue.hasMessages
|
if (messageQueue.hasMessages
|
||||||
&& i.hasNext
|
&& i.hasNext
|
||||||
&& (executorService.executor match {
|
&& (executorService.executor match {
|
||||||
case lm: LoadMetrics => !lm.atFullThrottle
|
case lm: LoadMetrics => !lm.atFullThrottle()
|
||||||
case _ => true
|
case _ => true
|
||||||
})
|
})
|
||||||
&& !registerForExecution(i.next.mailbox, false, false))
|
&& !registerForExecution(i.next.mailbox, false, false))
|
||||||
|
|
|
||||||
|
|
@ -510,10 +510,10 @@ trait QueueBasedMessageQueue extends MessageQueue with MultipleConsumerSemantics
|
||||||
def hasMessages = !queue.isEmpty
|
def hasMessages = !queue.isEmpty
|
||||||
def cleanUp(owner: ActorRef, deadLetters: MessageQueue): Unit = {
|
def cleanUp(owner: ActorRef, deadLetters: MessageQueue): Unit = {
|
||||||
if (hasMessages) {
|
if (hasMessages) {
|
||||||
var envelope = dequeue
|
var envelope = dequeue()
|
||||||
while (envelope ne null) {
|
while (envelope ne null) {
|
||||||
deadLetters.enqueue(owner, envelope)
|
deadLetters.enqueue(owner, envelope)
|
||||||
envelope = dequeue
|
envelope = dequeue()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -139,7 +139,7 @@ private[akka] class Mailboxes(
|
||||||
protected[akka] def getMailboxType(props: Props, dispatcherConfig: Config): MailboxType = {
|
protected[akka] def getMailboxType(props: Props, dispatcherConfig: Config): MailboxType = {
|
||||||
val id = dispatcherConfig.getString("id")
|
val id = dispatcherConfig.getString("id")
|
||||||
val deploy = props.deploy
|
val deploy = props.deploy
|
||||||
val actorClass = props.actorClass
|
val actorClass = props.actorClass()
|
||||||
lazy val actorRequirement = getRequiredType(actorClass)
|
lazy val actorRequirement = getRequiredType(actorClass)
|
||||||
|
|
||||||
val mailboxRequirement: Class[_] = getMailboxRequirement(dispatcherConfig)
|
val mailboxRequirement: Class[_] = getMailboxRequirement(dispatcherConfig)
|
||||||
|
|
|
||||||
|
|
@ -39,7 +39,7 @@ private[akka] class LoggerMailbox(@unused owner: ActorRef, system: ActorSystem)
|
||||||
override def cleanUp(owner: ActorRef, deadLetters: MessageQueue): Unit = {
|
override def cleanUp(owner: ActorRef, deadLetters: MessageQueue): Unit = {
|
||||||
if (hasMessages) {
|
if (hasMessages) {
|
||||||
val logLevel = system.eventStream.logLevel
|
val logLevel = system.eventStream.logLevel
|
||||||
var envelope = dequeue
|
var envelope = dequeue()
|
||||||
// Drain all remaining messages to the StandardOutLogger.
|
// Drain all remaining messages to the StandardOutLogger.
|
||||||
// cleanUp is called after switching out the mailbox, which is why
|
// cleanUp is called after switching out the mailbox, which is why
|
||||||
// this kind of look works without a limit.
|
// this kind of look works without a limit.
|
||||||
|
|
@ -54,7 +54,7 @@ private[akka] class LoggerMailbox(@unused owner: ActorRef, system: ActorSystem)
|
||||||
case _ => // skip
|
case _ => // skip
|
||||||
}
|
}
|
||||||
|
|
||||||
envelope = dequeue
|
envelope = dequeue()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
super.cleanUp(owner, deadLetters)
|
super.cleanUp(owner, deadLetters)
|
||||||
|
|
|
||||||
|
|
@ -50,7 +50,7 @@ abstract class LookupEventBus[E, S, C] extends EventBus[E, S, C] {
|
||||||
type Subscriber = S
|
type Subscriber = S
|
||||||
type Classifier = C
|
type Classifier = C
|
||||||
|
|
||||||
override protected def mapSize: Int = LookupEventBus.this.mapSize
|
override protected def mapSize: Int = LookupEventBus.this.mapSize()
|
||||||
|
|
||||||
override protected def compareSubscribers(a: S, b: S): Int =
|
override protected def compareSubscribers(a: S, b: S): Int =
|
||||||
LookupEventBus.this.compareSubscribers(a, b)
|
LookupEventBus.this.compareSubscribers(a, b)
|
||||||
|
|
@ -197,7 +197,7 @@ abstract class ManagedActorEventBus[E](system: ActorSystem) extends EventBus[E,
|
||||||
|
|
||||||
override val system = ManagedActorEventBus.this.system
|
override val system = ManagedActorEventBus.this.system
|
||||||
|
|
||||||
override protected def mapSize: Int = ManagedActorEventBus.this.mapSize
|
override protected def mapSize: Int = ManagedActorEventBus.this.mapSize()
|
||||||
|
|
||||||
override protected def classify(event: E): ActorRef =
|
override protected def classify(event: E): ActorRef =
|
||||||
ManagedActorEventBus.this.classify(event)
|
ManagedActorEventBus.this.classify(event)
|
||||||
|
|
|
||||||
|
|
@ -30,7 +30,7 @@ class SimpleDnsCache extends Dns with PeriodicCacheCleanup with NoSerializationV
|
||||||
new Cache[(String, RequestType), Resolved](
|
new Cache[(String, RequestType), Resolved](
|
||||||
immutable.SortedSet()(expiryEntryOrdering()),
|
immutable.SortedSet()(expiryEntryOrdering()),
|
||||||
immutable.Map(),
|
immutable.Map(),
|
||||||
() => clock))
|
() => clock()))
|
||||||
|
|
||||||
private val nanoBase = System.nanoTime()
|
private val nanoBase = System.nanoTime()
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -769,12 +769,12 @@ class CircuitBreaker(
|
||||||
materialize(body).onComplete {
|
materialize(body).onComplete {
|
||||||
case Success(result) =>
|
case Success(result) =>
|
||||||
p.trySuccess(result)
|
p.trySuccess(result)
|
||||||
timeout.cancel
|
timeout.cancel()
|
||||||
case Failure(ex) =>
|
case Failure(ex) =>
|
||||||
if (p.tryFailure(ex)) {
|
if (p.tryFailure(ex)) {
|
||||||
notifyCallFailureListeners(start)
|
notifyCallFailureListeners(start)
|
||||||
}
|
}
|
||||||
timeout.cancel
|
timeout.cancel()
|
||||||
}(parasitic)
|
}(parasitic)
|
||||||
p.future
|
p.future
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -26,7 +26,7 @@ hand checking:
|
||||||
class ActorCreationBenchmark {
|
class ActorCreationBenchmark {
|
||||||
implicit val system: ActorSystem = ActorSystem()
|
implicit val system: ActorSystem = ActorSystem()
|
||||||
|
|
||||||
final val props = Props[MyActor]
|
final val props = Props[MyActor]()
|
||||||
|
|
||||||
var i = 1
|
var i = 1
|
||||||
def name = {
|
def name = {
|
||||||
|
|
|
||||||
|
|
@ -48,7 +48,7 @@ object BenchmarkActors {
|
||||||
}
|
}
|
||||||
|
|
||||||
class EchoSender(messagesPerPair: Int, latch: CountDownLatch, batchSize: Int) extends Actor {
|
class EchoSender(messagesPerPair: Int, latch: CountDownLatch, batchSize: Int) extends Actor {
|
||||||
private val echo = context.actorOf(Props[Echo].withDispatcher(context.props.dispatcher), "echo")
|
private val echo = context.actorOf(Props[Echo]().withDispatcher(context.props.dispatcher), "echo")
|
||||||
|
|
||||||
private var left = messagesPerPair / 2
|
private var left = messagesPerPair / 2
|
||||||
private var batch = 0
|
private var batch = 0
|
||||||
|
|
|
||||||
|
|
@ -21,7 +21,7 @@ class RouterPoolCreationBenchmark {
|
||||||
implicit val system: ActorSystem = ActorSystem()
|
implicit val system: ActorSystem = ActorSystem()
|
||||||
val probe = TestProbe()
|
val probe = TestProbe()
|
||||||
|
|
||||||
Props[TestActors.EchoActor]
|
Props[TestActors.EchoActor]()
|
||||||
|
|
||||||
@Param(Array("1000", "2000", "3000", "4000"))
|
@Param(Array("1000", "2000", "3000", "4000"))
|
||||||
var size = 0
|
var size = 0
|
||||||
|
|
|
||||||
|
|
@ -18,7 +18,7 @@ object StashCreationBenchmark {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
val props = Props[StashingActor]
|
val props = Props[StashingActor]()
|
||||||
}
|
}
|
||||||
|
|
||||||
@State(Scope.Benchmark)
|
@State(Scope.Benchmark)
|
||||||
|
|
|
||||||
|
|
@ -61,7 +61,7 @@ class TellOnlyBenchmark {
|
||||||
|
|
||||||
@Setup(Level.Iteration)
|
@Setup(Level.Iteration)
|
||||||
def setupIteration(): Unit = {
|
def setupIteration(): Unit = {
|
||||||
actor = system.actorOf(Props[TellOnlyBenchmark.Echo].withDispatcher("dropping-dispatcher"))
|
actor = system.actorOf(Props[TellOnlyBenchmark.Echo]().withDispatcher("dropping-dispatcher"))
|
||||||
probe = TestProbe()
|
probe = TestProbe()
|
||||||
probe.watch(actor)
|
probe.watch(actor)
|
||||||
probe.send(actor, message)
|
probe.send(actor, message)
|
||||||
|
|
|
||||||
|
|
@ -52,7 +52,7 @@ class LevelDbBatchingBenchmark {
|
||||||
SharedLeveldbJournal.setStore(store, sys)
|
SharedLeveldbJournal.setStore(store, sys)
|
||||||
|
|
||||||
probe = TestProbe()(sys)
|
probe = TestProbe()(sys)
|
||||||
store = sys.actorOf(Props[SharedLeveldbStore], "store")
|
store = sys.actorOf(Props[SharedLeveldbStore](), "store")
|
||||||
}
|
}
|
||||||
|
|
||||||
@TearDown(Level.Trial)
|
@TearDown(Level.Trial)
|
||||||
|
|
|
||||||
|
|
@ -45,7 +45,7 @@ class PersistentActorWithAtLeastOnceDeliveryBenchmark {
|
||||||
|
|
||||||
storageLocations.foreach(FileUtils.deleteDirectory)
|
storageLocations.foreach(FileUtils.deleteDirectory)
|
||||||
|
|
||||||
destinationActor = system.actorOf(Props[DestinationActor], "destination")
|
destinationActor = system.actorOf(Props[DestinationActor](), "destination")
|
||||||
|
|
||||||
noPersistPersistentActorWithAtLeastOnceDelivery = system.actorOf(
|
noPersistPersistentActorWithAtLeastOnceDelivery = system.actorOf(
|
||||||
Props(classOf[NoPersistPersistentActorWithAtLeastOnceDelivery], dataCount, probe.ref, destinationActor.path),
|
Props(classOf[NoPersistPersistentActorWithAtLeastOnceDelivery], dataCount, probe.ref, destinationActor.path),
|
||||||
|
|
|
||||||
|
|
@ -108,7 +108,7 @@ class JmxMetricsCollector(address: Address, decayFactor: Double) extends Metrics
|
||||||
* Samples and collects new data points.
|
* Samples and collects new data points.
|
||||||
* Creates a new instance each time.
|
* Creates a new instance each time.
|
||||||
*/
|
*/
|
||||||
def sample(): NodeMetrics = NodeMetrics(address, newTimestamp, metrics)
|
def sample(): NodeMetrics = NodeMetrics(address, newTimestamp, metrics())
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Generate metrics set.
|
* Generate metrics set.
|
||||||
|
|
@ -209,7 +209,7 @@ class SigarMetricsCollector(address: Address, decayFactor: Double, sigar: SigarP
|
||||||
override def metrics(): Set[Metric] = {
|
override def metrics(): Set[Metric] = {
|
||||||
// Must obtain cpuPerc in one shot. See https://github.com/akka/akka/issues/16121
|
// Must obtain cpuPerc in one shot. See https://github.com/akka/akka/issues/16121
|
||||||
val cpuPerc = sigar.getCpuPerc
|
val cpuPerc = sigar.getCpuPerc
|
||||||
super.metrics.union(Set(cpuCombined(cpuPerc), cpuStolen(cpuPerc)).flatten)
|
super.metrics().union(Set(cpuCombined(cpuPerc), cpuStolen(cpuPerc)).flatten)
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
|
||||||
|
|
@ -111,7 +111,7 @@ abstract class ClusterMetricsEnabledSpec
|
||||||
//awaitAssert(clusterView.clusterMetrics.size should ===(roles.size))
|
//awaitAssert(clusterView.clusterMetrics.size should ===(roles.size))
|
||||||
awaitAssert(metricsView.clusterMetrics.size should ===(roles.size))
|
awaitAssert(metricsView.clusterMetrics.size should ===(roles.size))
|
||||||
val collector = MetricsCollector(cluster.system)
|
val collector = MetricsCollector(cluster.system)
|
||||||
collector.sample.metrics.size should be > (3)
|
collector.sample().metrics.size should be > (3)
|
||||||
enterBarrier("after")
|
enterBarrier("after")
|
||||||
}
|
}
|
||||||
"reflect the correct number of node metrics in cluster view" in within(30 seconds) {
|
"reflect the correct number of node metrics in cluster view" in within(30 seconds) {
|
||||||
|
|
@ -150,7 +150,7 @@ abstract class ClusterMetricsDisabledSpec
|
||||||
//clusterView.clusterMetrics.size should ===(0)
|
//clusterView.clusterMetrics.size should ===(0)
|
||||||
metricsView.clusterMetrics.size should ===(0)
|
metricsView.clusterMetrics.size should ===(0)
|
||||||
ClusterMetricsExtension(system).subscribe(testActor)
|
ClusterMetricsExtension(system).subscribe(testActor)
|
||||||
expectNoMessage
|
expectNoMessage()
|
||||||
// TODO ensure same contract
|
// TODO ensure same contract
|
||||||
//clusterView.clusterMetrics.size should ===(0)
|
//clusterView.clusterMetrics.size should ===(0)
|
||||||
metricsView.clusterMetrics.size should ===(0)
|
metricsView.clusterMetrics.size should ===(0)
|
||||||
|
|
|
||||||
|
|
@ -150,7 +150,7 @@ abstract class AdaptiveLoadBalancingRouterSpec
|
||||||
ClusterRouterPool(
|
ClusterRouterPool(
|
||||||
local = AdaptiveLoadBalancingPool(HeapMetricsSelector),
|
local = AdaptiveLoadBalancingPool(HeapMetricsSelector),
|
||||||
settings = ClusterRouterPoolSettings(totalInstances = 10, maxInstancesPerNode = 1, allowLocalRoutees = true))
|
settings = ClusterRouterPoolSettings(totalInstances = 10, maxInstancesPerNode = 1, allowLocalRoutees = true))
|
||||||
.props(Props[Echo]),
|
.props(Props[Echo]()),
|
||||||
name)
|
name)
|
||||||
// it may take some time until router receives cluster member events
|
// it may take some time until router receives cluster member events
|
||||||
awaitAssert { currentRoutees(router).size should ===(roles.size) }
|
awaitAssert { currentRoutees(router).size should ===(roles.size) }
|
||||||
|
|
@ -201,7 +201,7 @@ abstract class AdaptiveLoadBalancingRouterSpec
|
||||||
|
|
||||||
runOn(node2) {
|
runOn(node2) {
|
||||||
within(20.seconds) {
|
within(20.seconds) {
|
||||||
system.actorOf(Props[Memory], "memory") ! AllocateMemory
|
system.actorOf(Props[Memory](), "memory") ! AllocateMemory
|
||||||
expectMsg("done")
|
expectMsg("done")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -230,7 +230,7 @@ abstract class AdaptiveLoadBalancingRouterSpec
|
||||||
|
|
||||||
"create routees from configuration" taggedAs LongRunningTest in {
|
"create routees from configuration" taggedAs LongRunningTest in {
|
||||||
runOn(node1) {
|
runOn(node1) {
|
||||||
val router3 = system.actorOf(FromConfig.props(Props[Memory]), "router3")
|
val router3 = system.actorOf(FromConfig.props(Props[Memory]()), "router3")
|
||||||
// it may take some time until router receives cluster member events
|
// it may take some time until router receives cluster member events
|
||||||
awaitAssert { currentRoutees(router3).size should ===(9) }
|
awaitAssert { currentRoutees(router3).size should ===(9) }
|
||||||
val routees = currentRoutees(router3)
|
val routees = currentRoutees(router3)
|
||||||
|
|
@ -241,7 +241,7 @@ abstract class AdaptiveLoadBalancingRouterSpec
|
||||||
|
|
||||||
"create routees from cluster.enabled configuration" taggedAs LongRunningTest in {
|
"create routees from cluster.enabled configuration" taggedAs LongRunningTest in {
|
||||||
runOn(node1) {
|
runOn(node1) {
|
||||||
val router4 = system.actorOf(FromConfig.props(Props[Memory]), "router4")
|
val router4 = system.actorOf(FromConfig.props(Props[Memory]()), "router4")
|
||||||
// it may take some time until router receives cluster member events
|
// it may take some time until router receives cluster member events
|
||||||
awaitAssert { currentRoutees(router4).size should ===(6) }
|
awaitAssert { currentRoutees(router4).size should ===(6) }
|
||||||
val routees = currentRoutees(router4)
|
val routees = currentRoutees(router4)
|
||||||
|
|
|
||||||
|
|
@ -107,8 +107,8 @@ abstract class StatsSampleSpec
|
||||||
Cluster(system).join(firstAddress)
|
Cluster(system).join(firstAddress)
|
||||||
//#join
|
//#join
|
||||||
|
|
||||||
system.actorOf(Props[StatsWorker], "statsWorker")
|
system.actorOf(Props[StatsWorker](), "statsWorker")
|
||||||
system.actorOf(Props[StatsService], "statsService")
|
system.actorOf(Props[StatsService](), "statsService")
|
||||||
|
|
||||||
receiveN(3).collect { case MemberUp(m) => m.address }.toSet should be(
|
receiveN(3).collect { case MemberUp(m) => m.address }.toSet should be(
|
||||||
Set(firstAddress, secondAddress, thirdAddress))
|
Set(firstAddress, secondAddress, thirdAddress))
|
||||||
|
|
|
||||||
|
|
@ -15,7 +15,7 @@ class StatsService extends Actor {
|
||||||
// This router is used both with lookup and deploy of routees. If you
|
// This router is used both with lookup and deploy of routees. If you
|
||||||
// have a router with only lookup of routees you can use Props.empty
|
// have a router with only lookup of routees you can use Props.empty
|
||||||
// instead of Props[StatsWorker.class].
|
// instead of Props[StatsWorker.class].
|
||||||
val workerRouter = context.actorOf(FromConfig.props(Props[StatsWorker]), name = "workerRouter")
|
val workerRouter = context.actorOf(FromConfig.props(Props[StatsWorker]()), name = "workerRouter")
|
||||||
|
|
||||||
def receive = {
|
def receive = {
|
||||||
case StatsJob(text) if text != "" =>
|
case StatsJob(text) if text != "" =>
|
||||||
|
|
@ -76,7 +76,7 @@ abstract class StatsService3 extends Actor {
|
||||||
ClusterRouterPool(
|
ClusterRouterPool(
|
||||||
ConsistentHashingPool(0),
|
ConsistentHashingPool(0),
|
||||||
ClusterRouterPoolSettings(totalInstances = 100, maxInstancesPerNode = 3, allowLocalRoutees = false))
|
ClusterRouterPoolSettings(totalInstances = 100, maxInstancesPerNode = 3, allowLocalRoutees = false))
|
||||||
.props(Props[StatsWorker]),
|
.props(Props[StatsWorker]()),
|
||||||
name = "workerRouter3")
|
name = "workerRouter3")
|
||||||
//#router-deploy-in-code
|
//#router-deploy-in-code
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -82,7 +82,7 @@ class EWMASpec extends AkkaSpec(MetricsConfig.defaultEnabled) with MetricsCollec
|
||||||
// wait a while between each message to give the metrics a chance to change
|
// wait a while between each message to give the metrics a chance to change
|
||||||
Thread.sleep(100)
|
Thread.sleep(100)
|
||||||
usedMemory = usedMemory ++ Array.fill(1024)(ThreadLocalRandom.current.nextInt(127).toByte)
|
usedMemory = usedMemory ++ Array.fill(1024)(ThreadLocalRandom.current.nextInt(127).toByte)
|
||||||
val changes = collector.sample.metrics.flatMap { latest =>
|
val changes = collector.sample().metrics.flatMap { latest =>
|
||||||
streamingDataSet.get(latest.name) match {
|
streamingDataSet.get(latest.name) match {
|
||||||
case None => Some(latest)
|
case None => Some(latest)
|
||||||
case Some(previous) =>
|
case Some(previous) =>
|
||||||
|
|
|
||||||
|
|
@ -146,13 +146,13 @@ class MetricsGossipSpec
|
||||||
*/
|
*/
|
||||||
def newSample(previousSample: Set[Metric]): Set[Metric] = {
|
def newSample(previousSample: Set[Metric]): Set[Metric] = {
|
||||||
// Metric.equals is based on name equality
|
// Metric.equals is based on name equality
|
||||||
collector.sample.metrics.filter(previousSample.contains) ++ previousSample
|
collector.sample().metrics.filter(previousSample.contains) ++ previousSample
|
||||||
}
|
}
|
||||||
|
|
||||||
"A MetricsGossip" must {
|
"A MetricsGossip" must {
|
||||||
"add new NodeMetrics" in {
|
"add new NodeMetrics" in {
|
||||||
val m1 = NodeMetrics(Address("akka", "sys", "a", 2554), newTimestamp, collector.sample.metrics)
|
val m1 = NodeMetrics(Address("akka", "sys", "a", 2554), newTimestamp, collector.sample().metrics)
|
||||||
val m2 = NodeMetrics(Address("akka", "sys", "a", 2555), newTimestamp, collector.sample.metrics)
|
val m2 = NodeMetrics(Address("akka", "sys", "a", 2555), newTimestamp, collector.sample().metrics)
|
||||||
|
|
||||||
m1.metrics.size should be > 3
|
m1.metrics.size should be > 3
|
||||||
m2.metrics.size should be > 3
|
m2.metrics.size should be > 3
|
||||||
|
|
@ -168,8 +168,8 @@ class MetricsGossipSpec
|
||||||
}
|
}
|
||||||
|
|
||||||
"merge peer metrics" in {
|
"merge peer metrics" in {
|
||||||
val m1 = NodeMetrics(Address("akka", "sys", "a", 2554), newTimestamp, collector.sample.metrics)
|
val m1 = NodeMetrics(Address("akka", "sys", "a", 2554), newTimestamp, collector.sample().metrics)
|
||||||
val m2 = NodeMetrics(Address("akka", "sys", "a", 2555), newTimestamp, collector.sample.metrics)
|
val m2 = NodeMetrics(Address("akka", "sys", "a", 2555), newTimestamp, collector.sample().metrics)
|
||||||
|
|
||||||
val g1 = MetricsGossip.empty :+ m1 :+ m2
|
val g1 = MetricsGossip.empty :+ m1 :+ m2
|
||||||
g1.nodes.size should ===(2)
|
g1.nodes.size should ===(2)
|
||||||
|
|
@ -183,9 +183,9 @@ class MetricsGossipSpec
|
||||||
}
|
}
|
||||||
|
|
||||||
"merge an existing metric set for a node and update node ring" in {
|
"merge an existing metric set for a node and update node ring" in {
|
||||||
val m1 = NodeMetrics(Address("akka", "sys", "a", 2554), newTimestamp, collector.sample.metrics)
|
val m1 = NodeMetrics(Address("akka", "sys", "a", 2554), newTimestamp, collector.sample().metrics)
|
||||||
val m2 = NodeMetrics(Address("akka", "sys", "a", 2555), newTimestamp, collector.sample.metrics)
|
val m2 = NodeMetrics(Address("akka", "sys", "a", 2555), newTimestamp, collector.sample().metrics)
|
||||||
val m3 = NodeMetrics(Address("akka", "sys", "a", 2556), newTimestamp, collector.sample.metrics)
|
val m3 = NodeMetrics(Address("akka", "sys", "a", 2556), newTimestamp, collector.sample().metrics)
|
||||||
val m2Updated = m2.copy(metrics = newSample(m2.metrics), timestamp = m2.timestamp + 1000)
|
val m2Updated = m2.copy(metrics = newSample(m2.metrics), timestamp = m2.timestamp + 1000)
|
||||||
|
|
||||||
val g1 = MetricsGossip.empty :+ m1 :+ m2
|
val g1 = MetricsGossip.empty :+ m1 :+ m2
|
||||||
|
|
@ -204,14 +204,14 @@ class MetricsGossipSpec
|
||||||
}
|
}
|
||||||
|
|
||||||
"get the current NodeMetrics if it exists in the local nodes" in {
|
"get the current NodeMetrics if it exists in the local nodes" in {
|
||||||
val m1 = NodeMetrics(Address("akka", "sys", "a", 2554), newTimestamp, collector.sample.metrics)
|
val m1 = NodeMetrics(Address("akka", "sys", "a", 2554), newTimestamp, collector.sample().metrics)
|
||||||
val g1 = MetricsGossip.empty :+ m1
|
val g1 = MetricsGossip.empty :+ m1
|
||||||
g1.nodeMetricsFor(m1.address).map(_.metrics) should ===(Some(m1.metrics))
|
g1.nodeMetricsFor(m1.address).map(_.metrics) should ===(Some(m1.metrics))
|
||||||
}
|
}
|
||||||
|
|
||||||
"remove a node if it is no longer Up" in {
|
"remove a node if it is no longer Up" in {
|
||||||
val m1 = NodeMetrics(Address("akka", "sys", "a", 2554), newTimestamp, collector.sample.metrics)
|
val m1 = NodeMetrics(Address("akka", "sys", "a", 2554), newTimestamp, collector.sample().metrics)
|
||||||
val m2 = NodeMetrics(Address("akka", "sys", "a", 2555), newTimestamp, collector.sample.metrics)
|
val m2 = NodeMetrics(Address("akka", "sys", "a", 2555), newTimestamp, collector.sample().metrics)
|
||||||
|
|
||||||
val g1 = MetricsGossip.empty :+ m1 :+ m2
|
val g1 = MetricsGossip.empty :+ m1 :+ m2
|
||||||
g1.nodes.size should ===(2)
|
g1.nodes.size should ===(2)
|
||||||
|
|
@ -223,8 +223,8 @@ class MetricsGossipSpec
|
||||||
}
|
}
|
||||||
|
|
||||||
"filter nodes" in {
|
"filter nodes" in {
|
||||||
val m1 = NodeMetrics(Address("akka", "sys", "a", 2554), newTimestamp, collector.sample.metrics)
|
val m1 = NodeMetrics(Address("akka", "sys", "a", 2554), newTimestamp, collector.sample().metrics)
|
||||||
val m2 = NodeMetrics(Address("akka", "sys", "a", 2555), newTimestamp, collector.sample.metrics)
|
val m2 = NodeMetrics(Address("akka", "sys", "a", 2555), newTimestamp, collector.sample().metrics)
|
||||||
|
|
||||||
val g1 = MetricsGossip.empty :+ m1 :+ m2
|
val g1 = MetricsGossip.empty :+ m1 :+ m2
|
||||||
g1.nodes.size should ===(2)
|
g1.nodes.size should ===(2)
|
||||||
|
|
@ -243,16 +243,20 @@ class MetricValuesSpec extends AkkaSpec(MetricsConfig.defaultEnabled) with Metri
|
||||||
|
|
||||||
val collector = createMetricsCollector
|
val collector = createMetricsCollector
|
||||||
|
|
||||||
val node1 = NodeMetrics(Address("akka", "sys", "a", 2554), 1, collector.sample.metrics)
|
val node1 = NodeMetrics(Address("akka", "sys", "a", 2554), 1, collector.sample().metrics)
|
||||||
val node2 = NodeMetrics(Address("akka", "sys", "a", 2555), 1, collector.sample.metrics)
|
val node2 = NodeMetrics(Address("akka", "sys", "a", 2555), 1, collector.sample().metrics)
|
||||||
|
|
||||||
val nodes: Seq[NodeMetrics] = {
|
val nodes: Seq[NodeMetrics] = {
|
||||||
(1 to 100).foldLeft(List(node1, node2)) { (nodes, _) =>
|
(1 to 100).foldLeft(List(node1, node2)) { (nodes, _) =>
|
||||||
nodes.map { n =>
|
nodes.map { n =>
|
||||||
n.copy(metrics = collector.sample.metrics.flatMap(latest =>
|
n.copy(
|
||||||
n.metrics.collect {
|
metrics = collector
|
||||||
case streaming if latest.sameAs(streaming) => streaming :+ latest
|
.sample()
|
||||||
}))
|
.metrics
|
||||||
|
.flatMap(latest =>
|
||||||
|
n.metrics.collect {
|
||||||
|
case streaming if latest.sameAs(streaming) => streaming :+ latest
|
||||||
|
}))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -23,8 +23,8 @@ class MetricsCollectorSpec
|
||||||
|
|
||||||
"merge 2 metrics that are tracking the same metric" in {
|
"merge 2 metrics that are tracking the same metric" in {
|
||||||
for (_ <- 1 to 20) {
|
for (_ <- 1 to 20) {
|
||||||
val sample1 = collector.sample.metrics
|
val sample1 = collector.sample().metrics
|
||||||
val sample2 = collector.sample.metrics
|
val sample2 = collector.sample().metrics
|
||||||
sample2.flatMap(latest =>
|
sample2.flatMap(latest =>
|
||||||
sample1.collect {
|
sample1.collect {
|
||||||
case peer if latest.sameAs(peer) =>
|
case peer if latest.sameAs(peer) =>
|
||||||
|
|
@ -34,8 +34,8 @@ class MetricsCollectorSpec
|
||||||
m
|
m
|
||||||
})
|
})
|
||||||
|
|
||||||
val sample3 = collector.sample.metrics
|
val sample3 = collector.sample().metrics
|
||||||
val sample4 = collector.sample.metrics
|
val sample4 = collector.sample().metrics
|
||||||
sample4.flatMap(latest =>
|
sample4.flatMap(latest =>
|
||||||
sample3.collect {
|
sample3.collect {
|
||||||
case peer if latest.sameAs(peer) =>
|
case peer if latest.sameAs(peer) =>
|
||||||
|
|
@ -55,7 +55,7 @@ class MetricsCollectorSpec
|
||||||
}
|
}
|
||||||
|
|
||||||
"collect accurate metrics for a node" in {
|
"collect accurate metrics for a node" in {
|
||||||
val sample = collector.sample
|
val sample = collector.sample()
|
||||||
val metrics = sample.metrics.collect { case m => (m.name, m.value) }
|
val metrics = sample.metrics.collect { case m => (m.name, m.value) }
|
||||||
val used = metrics.collectFirst { case (HeapMemoryUsed, b) => b }
|
val used = metrics.collectFirst { case (HeapMemoryUsed, b) => b }
|
||||||
val committed = metrics.collectFirst { case (HeapMemoryCommitted, b) => b }
|
val committed = metrics.collectFirst { case (HeapMemoryCommitted, b) => b }
|
||||||
|
|
@ -93,7 +93,7 @@ class MetricsCollectorSpec
|
||||||
|
|
||||||
"collect 50 node metrics samples in an acceptable duration" taggedAs LongRunningTest in within(10 seconds) {
|
"collect 50 node metrics samples in an acceptable duration" taggedAs LongRunningTest in within(10 seconds) {
|
||||||
(1 to 50).foreach { _ =>
|
(1 to 50).foreach { _ =>
|
||||||
val sample = collector.sample
|
val sample = collector.sample()
|
||||||
sample.metrics.size should be >= 3
|
sample.metrics.size should be >= 3
|
||||||
Thread.sleep(100)
|
Thread.sleep(100)
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -67,14 +67,14 @@ abstract class MultiDcClusterShardingSpec
|
||||||
"init sharding" in {
|
"init sharding" in {
|
||||||
val sharding = ClusterSharding(typedSystem)
|
val sharding = ClusterSharding(typedSystem)
|
||||||
val shardRegion: ActorRef[ShardingEnvelope[Command]] = sharding.init(Entity(typeKey)(_ => MultiDcPinger()))
|
val shardRegion: ActorRef[ShardingEnvelope[Command]] = sharding.init(Entity(typeKey)(_ => MultiDcPinger()))
|
||||||
val probe = TestProbe[Pong]
|
val probe = TestProbe[Pong]()
|
||||||
shardRegion ! ShardingEnvelope(entityId, Ping(probe.ref))
|
shardRegion ! ShardingEnvelope(entityId, Ping(probe.ref))
|
||||||
probe.expectMessage(max = 15.seconds, Pong(cluster.selfMember.dataCenter))
|
probe.expectMessage(max = 15.seconds, Pong(cluster.selfMember.dataCenter))
|
||||||
enterBarrier("sharding-initialized")
|
enterBarrier("sharding-initialized")
|
||||||
}
|
}
|
||||||
|
|
||||||
"be able to message via entity ref" in {
|
"be able to message via entity ref" in {
|
||||||
val probe = TestProbe[Pong]
|
val probe = TestProbe[Pong]()
|
||||||
val entityRef = ClusterSharding(typedSystem).entityRefFor(typeKey, entityId)
|
val entityRef = ClusterSharding(typedSystem).entityRefFor(typeKey, entityId)
|
||||||
entityRef ! Ping(probe.ref)
|
entityRef ! Ping(probe.ref)
|
||||||
probe.expectMessage(Pong(cluster.selfMember.dataCenter))
|
probe.expectMessage(Pong(cluster.selfMember.dataCenter))
|
||||||
|
|
@ -94,7 +94,7 @@ abstract class MultiDcClusterShardingSpec
|
||||||
runOn(first, second) {
|
runOn(first, second) {
|
||||||
val proxy: ActorRef[ShardingEnvelope[Command]] = ClusterSharding(typedSystem).init(
|
val proxy: ActorRef[ShardingEnvelope[Command]] = ClusterSharding(typedSystem).init(
|
||||||
Entity(typeKey)(_ => MultiDcPinger()).withSettings(ClusterShardingSettings(typedSystem).withDataCenter("dc2")))
|
Entity(typeKey)(_ => MultiDcPinger()).withSettings(ClusterShardingSettings(typedSystem).withDataCenter("dc2")))
|
||||||
val probe = TestProbe[Pong]
|
val probe = TestProbe[Pong]()
|
||||||
proxy ! ShardingEnvelope(entityId, Ping(probe.ref))
|
proxy ! ShardingEnvelope(entityId, Ping(probe.ref))
|
||||||
probe.expectMessage(remainingOrDefault, Pong("dc2"))
|
probe.expectMessage(remainingOrDefault, Pong("dc2"))
|
||||||
}
|
}
|
||||||
|
|
@ -108,7 +108,7 @@ abstract class MultiDcClusterShardingSpec
|
||||||
val proxy: ActorRef[ShardingEnvelope[Command]] =
|
val proxy: ActorRef[ShardingEnvelope[Command]] =
|
||||||
ClusterSharding(system).init(Entity(typeKey)(_ => MultiDcPinger()).withDataCenter("dc2"))
|
ClusterSharding(system).init(Entity(typeKey)(_ => MultiDcPinger()).withDataCenter("dc2"))
|
||||||
//#proxy-dc
|
//#proxy-dc
|
||||||
val probe = TestProbe[Pong]
|
val probe = TestProbe[Pong]()
|
||||||
proxy ! ShardingEnvelope(entityId, Ping(probe.ref))
|
proxy ! ShardingEnvelope(entityId, Ping(probe.ref))
|
||||||
probe.expectMessage(remainingOrDefault, Pong("dc2"))
|
probe.expectMessage(remainingOrDefault, Pong("dc2"))
|
||||||
}
|
}
|
||||||
|
|
@ -125,7 +125,7 @@ abstract class MultiDcClusterShardingSpec
|
||||||
val entityRef = ClusterSharding(system).entityRefFor(typeKey, entityId, "dc2")
|
val entityRef = ClusterSharding(system).entityRefFor(typeKey, entityId, "dc2")
|
||||||
//#proxy-dc-entityref
|
//#proxy-dc-entityref
|
||||||
|
|
||||||
val probe = TestProbe[Pong]
|
val probe = TestProbe[Pong]()
|
||||||
entityRef ! Ping(probe.ref)
|
entityRef ! Ping(probe.ref)
|
||||||
probe.expectMessage(remainingOrDefault, Pong("dc2"))
|
probe.expectMessage(remainingOrDefault, Pong("dc2"))
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -181,7 +181,7 @@ class ClusterSharding(system: ExtendedActorSystem) extends Extension {
|
||||||
val guardianName: String =
|
val guardianName: String =
|
||||||
system.settings.config.getString("akka.cluster.sharding.guardian-name")
|
system.settings.config.getString("akka.cluster.sharding.guardian-name")
|
||||||
val dispatcher = system.settings.config.getString("akka.cluster.sharding.use-dispatcher")
|
val dispatcher = system.settings.config.getString("akka.cluster.sharding.use-dispatcher")
|
||||||
system.systemActorOf(Props[ClusterShardingGuardian].withDispatcher(dispatcher), guardianName)
|
system.systemActorOf(Props[ClusterShardingGuardian]().withDispatcher(dispatcher), guardianName)
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
|
||||||
|
|
@ -91,7 +91,7 @@ object RemoveInternalClusterShardingData {
|
||||||
if (journalPluginId == "") system.settings.config.getString("akka.persistence.journal.plugin")
|
if (journalPluginId == "") system.settings.config.getString("akka.persistence.journal.plugin")
|
||||||
else journalPluginId
|
else journalPluginId
|
||||||
if (resolvedJournalPluginId == "akka.persistence.journal.leveldb-shared") {
|
if (resolvedJournalPluginId == "akka.persistence.journal.leveldb-shared") {
|
||||||
val store = system.actorOf(Props[SharedLeveldbStore], "store")
|
val store = system.actorOf(Props[SharedLeveldbStore](), "store")
|
||||||
SharedLeveldbJournal.setStore(store, system)
|
SharedLeveldbJournal.setStore(store, system)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -113,7 +113,7 @@ abstract class ClusterShardingCustomShardAllocationSpec(multiNodeConfig: Cluster
|
||||||
|
|
||||||
lazy val region = ClusterSharding(system).shardRegion("Entity")
|
lazy val region = ClusterSharding(system).shardRegion("Entity")
|
||||||
|
|
||||||
lazy val allocator = system.actorOf(Props[Allocator], "allocator")
|
lazy val allocator = system.actorOf(Props[Allocator](), "allocator")
|
||||||
|
|
||||||
s"Cluster sharding ($mode) with custom allocation strategy" must {
|
s"Cluster sharding ($mode) with custom allocation strategy" must {
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -95,7 +95,7 @@ abstract class ClusterShardingFailureSpec(multiNodeConfig: ClusterShardingFailur
|
||||||
startSharding(
|
startSharding(
|
||||||
system,
|
system,
|
||||||
typeName = "Entity",
|
typeName = "Entity",
|
||||||
entityProps = Props[Entity],
|
entityProps = Props[Entity](),
|
||||||
extractEntityId = extractEntityId,
|
extractEntityId = extractEntityId,
|
||||||
extractShardId = extractShardId))
|
extractShardId = extractShardId))
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -56,7 +56,7 @@ abstract class ClusterShardingGracefulShutdownSpec(multiNodeConfig: ClusterShard
|
||||||
startSharding(
|
startSharding(
|
||||||
system,
|
system,
|
||||||
typeName,
|
typeName,
|
||||||
entityProps = Props[ShardedEntity],
|
entityProps = Props[ShardedEntity](),
|
||||||
extractEntityId = MultiNodeClusterShardingSpec.intExtractEntityId,
|
extractEntityId = MultiNodeClusterShardingSpec.intExtractEntityId,
|
||||||
extractShardId = MultiNodeClusterShardingSpec.intExtractShardId,
|
extractShardId = MultiNodeClusterShardingSpec.intExtractShardId,
|
||||||
allocationStrategy =
|
allocationStrategy =
|
||||||
|
|
|
||||||
|
|
@ -91,7 +91,7 @@ abstract class ClusterShardingLeavingSpec(multiNodeConfig: ClusterShardingLeavin
|
||||||
startSharding(
|
startSharding(
|
||||||
system,
|
system,
|
||||||
typeName = "Entity",
|
typeName = "Entity",
|
||||||
entityProps = Props[Entity],
|
entityProps = Props[Entity](),
|
||||||
extractEntityId = extractEntityId,
|
extractEntityId = extractEntityId,
|
||||||
extractShardId = extractShardId)
|
extractShardId = extractShardId)
|
||||||
}
|
}
|
||||||
|
|
@ -120,7 +120,7 @@ abstract class ClusterShardingLeavingSpec(multiNodeConfig: ClusterShardingLeavin
|
||||||
|
|
||||||
"initialize shards" in {
|
"initialize shards" in {
|
||||||
runOn(first) {
|
runOn(first) {
|
||||||
val shardLocations = system.actorOf(Props[ShardLocations], "shardLocations")
|
val shardLocations = system.actorOf(Props[ShardLocations](), "shardLocations")
|
||||||
val locations = (for (n <- 1 to 10) yield {
|
val locations = (for (n <- 1 to 10) yield {
|
||||||
val id = n.toString
|
val id = n.toString
|
||||||
region ! Ping(id)
|
region ! Ping(id)
|
||||||
|
|
|
||||||
|
|
@ -66,7 +66,7 @@ abstract class ClusterShardingRegistrationCoordinatedShutdownSpec
|
||||||
startSharding(
|
startSharding(
|
||||||
system,
|
system,
|
||||||
typeName = "Entity",
|
typeName = "Entity",
|
||||||
entityProps = Props[ShardedEntity],
|
entityProps = Props[ShardedEntity](),
|
||||||
extractEntityId = MultiNodeClusterShardingSpec.intExtractEntityId,
|
extractEntityId = MultiNodeClusterShardingSpec.intExtractEntityId,
|
||||||
extractShardId = MultiNodeClusterShardingSpec.intExtractShardId)
|
extractShardId = MultiNodeClusterShardingSpec.intExtractShardId)
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -45,7 +45,7 @@ abstract class ClusterShardingSingleShardPerEntitySpec
|
||||||
startSharding(
|
startSharding(
|
||||||
system,
|
system,
|
||||||
typeName = "Entity",
|
typeName = "Entity",
|
||||||
entityProps = Props[ShardedEntity],
|
entityProps = Props[ShardedEntity](),
|
||||||
extractEntityId = MultiNodeClusterShardingSpec.intExtractEntityId,
|
extractEntityId = MultiNodeClusterShardingSpec.intExtractEntityId,
|
||||||
extractShardId = MultiNodeClusterShardingSpec.intExtractShardId))
|
extractShardId = MultiNodeClusterShardingSpec.intExtractShardId))
|
||||||
}
|
}
|
||||||
|
|
|
||||||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue