2019-03-28 13:24:46 +01:00
|
|
|
/*
|
2020-01-02 07:24:59 -05:00
|
|
|
* Copyright (C) 2009-2020 Lightbend Inc. <https://www.lightbend.com>
|
2019-03-28 13:24:46 +01:00
|
|
|
*/
|
|
|
|
|
|
|
|
|
|
package akka.cluster.singleton
|
|
|
|
|
|
2020-04-27 20:32:18 +08:00
|
|
|
import scala.concurrent.duration._
|
|
|
|
|
|
|
|
|
|
import com.typesafe.config.ConfigFactory
|
|
|
|
|
|
2019-03-28 13:24:46 +01:00
|
|
|
import akka.actor.{ Actor, ActorIdentity, ActorLogging, ActorRef, Address, Identify, PoisonPill, Props }
|
2020-04-27 20:32:18 +08:00
|
|
|
import akka.cluster._
|
2019-03-28 13:24:46 +01:00
|
|
|
import akka.cluster.MemberStatus.Up
|
|
|
|
|
import akka.cluster.singleton.ClusterSingletonManagerLeaseSpec.ImportantSingleton.Response
|
2020-05-25 12:21:13 +02:00
|
|
|
import akka.coordination.lease.TestLeaseActor
|
|
|
|
|
import akka.coordination.lease.TestLeaseActorClient
|
|
|
|
|
import akka.coordination.lease.TestLeaseActorClientExt
|
2019-03-28 13:24:46 +01:00
|
|
|
import akka.remote.testkit.{ MultiNodeConfig, MultiNodeSpec, STMultiNodeSpec }
|
|
|
|
|
import akka.testkit._
|
|
|
|
|
|
|
|
|
|
object ClusterSingletonManagerLeaseSpec extends MultiNodeConfig {
|
|
|
|
|
val controller = role("controller")
|
|
|
|
|
val first = role("first")
|
|
|
|
|
val second = role("second")
|
|
|
|
|
val third = role("third")
|
|
|
|
|
val fourth = role("fourth")
|
|
|
|
|
|
|
|
|
|
testTransport(true)
|
|
|
|
|
|
2020-05-25 12:21:13 +02:00
|
|
|
commonConfig(ConfigFactory.parseString(s"""
|
2019-03-28 13:24:46 +01:00
|
|
|
akka.loglevel = INFO
|
|
|
|
|
akka.actor.provider = "cluster"
|
|
|
|
|
akka.remote.log-remote-lifecycle-events = off
|
2019-10-03 14:08:43 +02:00
|
|
|
akka.cluster.downing-provider-class = akka.cluster.testkit.AutoDowning
|
|
|
|
|
akka.cluster.testkit.auto-down-unreachable-after = 0s
|
2019-03-28 13:24:46 +01:00
|
|
|
test-lease {
|
2020-05-25 12:21:13 +02:00
|
|
|
lease-class = ${classOf[TestLeaseActorClient].getName}
|
2019-03-28 13:24:46 +01:00
|
|
|
heartbeat-interval = 1s
|
|
|
|
|
heartbeat-timeout = 120s
|
|
|
|
|
lease-operation-timeout = 3s
|
|
|
|
|
}
|
|
|
|
|
akka.cluster.singleton {
|
|
|
|
|
use-lease = "test-lease"
|
|
|
|
|
}
|
|
|
|
|
"""))
|
|
|
|
|
|
|
|
|
|
nodeConfig(first, second, third)(ConfigFactory.parseString("akka.cluster.roles = [worker]"))
|
|
|
|
|
|
|
|
|
|
object ImportantSingleton {
|
Disable Java serialization by default, #22333 (#27285)
* akka.actor.allow-java-serialization = off
* Moved primitive (Long, Int, String, ByteString) serializers
from akka-remote to akka-actor since they had no dependency
and are useful also in local systems, e.g. persistence.
* e.g. needed for persistence-tck
* less allow-java-serialization=on in tests
* CborSerializable in Jackson/test module for ease of use
* JavaSerializable for Java serialization in tests, already in akka-testkit,
but misconfigured
* Made tests pass
* allow-java-serialization=on in akka-persistence
* allow-java-serialization=on in classic remoting tests
* JavaSerializable and CborSerializable in other remoting tests
* Added serialization for
* Boolean
* java.util.concurrent.TimeoutException, AskTimeoutException
* support for testing serialization with the inmem journal
* utility to verifySerialization, in SerializationTestKit
* remove AccountExampleWithCommandHandlersInState becuase not possible to serialize State when it's not static
* Effect() is factory in EventSourcedBehavior class
* test the account examples
* SharedLeveldbJournal.configToEnableJavaSerializationForTest
* support for exceptions from remote deployed child actors
* fallback to akka.remote.serialization.ThrowableNotSerializableException
if exception is not serializable when wrapped in system messages from
remote deployed child actors and Status.Failure messages
* it's implemented in `WrappedPayloadSupport.payloadBuilder`
* update reference documentation
* serialize-messages=off in most places, separate ticket for
improving or removing that feature
* migration guide, including description of rolling update
* fix 2.13 compiler error
* minor review feedback
2019-07-11 14:04:24 +02:00
|
|
|
case class Response(msg: Any, address: Address) extends JavaSerializable
|
2019-03-28 13:24:46 +01:00
|
|
|
|
|
|
|
|
def props(): Props = Props(new ImportantSingleton())
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
class ImportantSingleton extends Actor with ActorLogging {
|
|
|
|
|
val selfAddress = Cluster(context.system).selfAddress
|
|
|
|
|
override def preStart(): Unit = {
|
|
|
|
|
log.info("Singleton starting")
|
|
|
|
|
}
|
|
|
|
|
override def postStop(): Unit = {
|
|
|
|
|
log.info("Singleton stopping")
|
|
|
|
|
}
|
|
|
|
|
override def receive: Receive = {
|
2019-04-15 17:40:26 +02:00
|
|
|
case msg =>
|
2019-03-28 13:24:46 +01:00
|
|
|
sender() ! Response(msg, selfAddress)
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
class ClusterSingletonManagerLeaseMultiJvmNode1 extends ClusterSingletonManagerLeaseSpec
|
|
|
|
|
class ClusterSingletonManagerLeaseMultiJvmNode2 extends ClusterSingletonManagerLeaseSpec
|
|
|
|
|
class ClusterSingletonManagerLeaseMultiJvmNode3 extends ClusterSingletonManagerLeaseSpec
|
|
|
|
|
class ClusterSingletonManagerLeaseMultiJvmNode4 extends ClusterSingletonManagerLeaseSpec
|
|
|
|
|
class ClusterSingletonManagerLeaseMultiJvmNode5 extends ClusterSingletonManagerLeaseSpec
|
|
|
|
|
|
|
|
|
|
class ClusterSingletonManagerLeaseSpec
|
|
|
|
|
extends MultiNodeSpec(ClusterSingletonManagerLeaseSpec)
|
|
|
|
|
with STMultiNodeSpec
|
|
|
|
|
with ImplicitSender
|
|
|
|
|
with MultiNodeClusterSpec {
|
|
|
|
|
|
|
|
|
|
import ClusterSingletonManagerLeaseSpec._
|
2020-04-27 20:32:18 +08:00
|
|
|
import ClusterSingletonManagerLeaseSpec.ImportantSingleton._
|
2020-05-25 12:21:13 +02:00
|
|
|
import TestLeaseActor._
|
2019-03-28 13:24:46 +01:00
|
|
|
|
|
|
|
|
override def initialParticipants = roles.size
|
|
|
|
|
|
|
|
|
|
// used on the controller
|
|
|
|
|
val leaseProbe = TestProbe()
|
|
|
|
|
|
|
|
|
|
"Cluster singleton manager with lease" should {
|
|
|
|
|
|
|
|
|
|
"form a cluster" in {
|
|
|
|
|
awaitClusterUp(controller, first)
|
|
|
|
|
enterBarrier("initial-up")
|
|
|
|
|
runOn(second) {
|
|
|
|
|
joinWithin(first)
|
|
|
|
|
awaitAssert({
|
|
|
|
|
cluster.state.members.toList.map(_.status) shouldEqual List(Up, Up, Up)
|
|
|
|
|
}, 10.seconds)
|
|
|
|
|
}
|
|
|
|
|
enterBarrier("second-up")
|
|
|
|
|
runOn(third) {
|
|
|
|
|
joinWithin(first)
|
|
|
|
|
awaitAssert({
|
|
|
|
|
cluster.state.members.toList.map(_.status) shouldEqual List(Up, Up, Up, Up)
|
|
|
|
|
}, 10.seconds)
|
|
|
|
|
}
|
|
|
|
|
enterBarrier("third-up")
|
|
|
|
|
runOn(fourth) {
|
|
|
|
|
joinWithin(first)
|
|
|
|
|
awaitAssert({
|
|
|
|
|
cluster.state.members.toList.map(_.status) shouldEqual List(Up, Up, Up, Up, Up)
|
|
|
|
|
}, 10.seconds)
|
|
|
|
|
}
|
|
|
|
|
enterBarrier("fourth-up")
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
"start test lease" in {
|
|
|
|
|
runOn(controller) {
|
2019-04-04 15:35:18 +02:00
|
|
|
system.actorOf(TestLeaseActor.props(), s"lease-${system.name}")
|
2019-03-28 13:24:46 +01:00
|
|
|
}
|
|
|
|
|
enterBarrier("lease-actor-started")
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
"find the lease on every node" in {
|
|
|
|
|
system.actorSelection(node(controller) / "user" / s"lease-${system.name}") ! Identify(None)
|
|
|
|
|
val leaseRef: ActorRef = expectMsgType[ActorIdentity].ref.get
|
|
|
|
|
TestLeaseActorClientExt(system).setActorLease(leaseRef)
|
|
|
|
|
enterBarrier("singleton-started")
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
"Start singleton and ping from all nodes" in {
|
2020-05-25 12:21:13 +02:00
|
|
|
// fourth doesn't have the worker role
|
|
|
|
|
runOn(first, second, third) {
|
2019-03-28 13:24:46 +01:00
|
|
|
system.actorOf(
|
|
|
|
|
ClusterSingletonManager
|
2020-05-25 12:21:13 +02:00
|
|
|
.props(ImportantSingleton.props(), PoisonPill, ClusterSingletonManagerSettings(system).withRole("worker")),
|
2019-03-28 13:24:46 +01:00
|
|
|
"important")
|
|
|
|
|
}
|
|
|
|
|
enterBarrier("singleton-started")
|
|
|
|
|
|
|
|
|
|
val proxy = system.actorOf(
|
|
|
|
|
ClusterSingletonProxy.props(
|
|
|
|
|
singletonManagerPath = "/user/important",
|
|
|
|
|
settings = ClusterSingletonProxySettings(system).withRole("worker")))
|
|
|
|
|
|
|
|
|
|
runOn(first, second, third, fourth) {
|
|
|
|
|
proxy ! "Ping"
|
|
|
|
|
// lease has not been granted so now allowed to come up
|
|
|
|
|
expectNoMessage(2.seconds)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
enterBarrier("singleton-pending")
|
|
|
|
|
|
|
|
|
|
runOn(controller) {
|
|
|
|
|
TestLeaseActorClientExt(system).getLeaseActor() ! GetRequests
|
|
|
|
|
expectMsg(LeaseRequests(List(Acquire(address(first).hostPort))))
|
|
|
|
|
TestLeaseActorClientExt(system).getLeaseActor() ! ActionRequest(Acquire(address(first).hostPort), true)
|
|
|
|
|
}
|
|
|
|
|
enterBarrier("lease-acquired")
|
|
|
|
|
|
|
|
|
|
runOn(first, second, third, fourth) {
|
|
|
|
|
expectMsg(Response("Ping", address(first)))
|
|
|
|
|
}
|
|
|
|
|
enterBarrier("pinged")
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
"Move singleton when oldest node downed" in {
|
|
|
|
|
|
|
|
|
|
cluster.state.members.size shouldEqual 5
|
|
|
|
|
runOn(controller) {
|
|
|
|
|
cluster.down(address(first))
|
|
|
|
|
awaitAssert({
|
|
|
|
|
cluster.state.members.toList.map(_.status) shouldEqual List(Up, Up, Up, Up)
|
|
|
|
|
}, 20.seconds)
|
|
|
|
|
val requests = awaitAssert({
|
|
|
|
|
TestLeaseActorClientExt(system).getLeaseActor() ! GetRequests
|
|
|
|
|
val msg = expectMsgType[LeaseRequests]
|
|
|
|
|
withClue("Requests: " + msg) {
|
|
|
|
|
msg.requests.size shouldEqual 2
|
|
|
|
|
}
|
|
|
|
|
msg
|
|
|
|
|
}, 10.seconds)
|
|
|
|
|
|
|
|
|
|
requests.requests should contain(Release(address(first).hostPort))
|
|
|
|
|
requests.requests should contain(Acquire(address(second).hostPort))
|
|
|
|
|
}
|
|
|
|
|
runOn(second, third, fourth) {
|
|
|
|
|
awaitAssert({
|
|
|
|
|
cluster.state.members.toList.map(_.status) shouldEqual List(Up, Up, Up, Up)
|
|
|
|
|
}, 20.seconds)
|
|
|
|
|
}
|
|
|
|
|
enterBarrier("first node downed")
|
|
|
|
|
val proxy = system.actorOf(
|
|
|
|
|
ClusterSingletonProxy.props(
|
|
|
|
|
singletonManagerPath = "/user/important",
|
|
|
|
|
settings = ClusterSingletonProxySettings(system).withRole("worker")))
|
|
|
|
|
|
|
|
|
|
runOn(second, third, fourth) {
|
|
|
|
|
proxy ! "Ping"
|
|
|
|
|
// lease has not been granted so now allowed to come up
|
|
|
|
|
expectNoMessage(2.seconds)
|
|
|
|
|
}
|
|
|
|
|
enterBarrier("singleton-not-migrated")
|
|
|
|
|
|
|
|
|
|
runOn(controller) {
|
|
|
|
|
TestLeaseActorClientExt(system).getLeaseActor() ! ActionRequest(Acquire(address(second).hostPort), true)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
enterBarrier("singleton-moved-to-second")
|
|
|
|
|
|
|
|
|
|
runOn(second, third, fourth) {
|
|
|
|
|
proxy ! "Ping"
|
|
|
|
|
expectMsg(Response("Ping", address(second)))
|
|
|
|
|
}
|
|
|
|
|
enterBarrier("finished")
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|