Merge pull request #26962 from akka/wip-26957-ClusterShardingFailureSpec-patriknw
Deliver buffered messages after passivation, #26957
This commit is contained in:
commit
d9b6f633a0
3 changed files with 27 additions and 6 deletions
|
|
@ -430,9 +430,18 @@ private[akka] class Shard(
|
||||||
|
|
||||||
// EntityStopped handler
|
// EntityStopped handler
|
||||||
def passivateCompleted(event: EntityStopped): Unit = {
|
def passivateCompleted(event: EntityStopped): Unit = {
|
||||||
log.debug("Entity stopped after passivation [{}]", event.entityId)
|
val hasBufferedMessages = messageBuffers.getOrEmpty(event.entityId).nonEmpty
|
||||||
state = state.copy(state.entities - event.entityId)
|
state = state.copy(state.entities - event.entityId)
|
||||||
messageBuffers.remove(event.entityId)
|
if (hasBufferedMessages) {
|
||||||
|
log.debug(
|
||||||
|
"Entity stopped after passivation [{}], but will be started again due to buffered messages.",
|
||||||
|
event.entityId)
|
||||||
|
processChange(EntityStarted(event.entityId))(sendMsgBuffer)
|
||||||
|
} else {
|
||||||
|
log.debug("Entity stopped after passivation [{}]", event.entityId)
|
||||||
|
messageBuffers.remove(event.entityId)
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// EntityStarted handler
|
// EntityStarted handler
|
||||||
|
|
|
||||||
|
|
@ -1171,8 +1171,14 @@ class DDataShardCoordinator(
|
||||||
unstashAll()
|
unstashAll()
|
||||||
}
|
}
|
||||||
|
|
||||||
private def stashGetShardHomeRequest(sender: ActorRef, request: GetShardHome): Unit =
|
private def stashGetShardHomeRequest(sender: ActorRef, request: GetShardHome): Unit = {
|
||||||
|
log.debug(
|
||||||
|
"GetShardHome [{}] request from [{}] stashed, because waiting for initial state or update of state. " +
|
||||||
|
"It will be handled afterwards.",
|
||||||
|
request.shard,
|
||||||
|
sender)
|
||||||
getShardHomeRequests += (sender -> request)
|
getShardHomeRequests += (sender -> request)
|
||||||
|
}
|
||||||
|
|
||||||
private def unstashGetShardHomeRequests(): Unit = {
|
private def unstashGetShardHomeRequests(): Unit = {
|
||||||
getShardHomeRequests.foreach {
|
getShardHomeRequests.foreach {
|
||||||
|
|
|
||||||
|
|
@ -238,10 +238,16 @@ abstract class ClusterShardingFailureSpec(config: ClusterShardingFailureSpecConf
|
||||||
//Test the Shard passivate works after a journal failure
|
//Test the Shard passivate works after a journal failure
|
||||||
shard2.tell(Passivate(PoisonPill), entity21)
|
shard2.tell(Passivate(PoisonPill), entity21)
|
||||||
|
|
||||||
awaitCond({
|
awaitAssert {
|
||||||
|
// Note that the order between this Get message to 21 and the above Passivate to 21 is undefined.
|
||||||
|
// If this Get arrives first the reply will be Value("21", 3) and then it is retried by the
|
||||||
|
// awaitAssert.
|
||||||
|
// Also note that there is no timeout parameter on below expectMsg because messages should not
|
||||||
|
// be lost here. They should be buffered and delivered also after Passivate completed.
|
||||||
region ! Get("21")
|
region ! Get("21")
|
||||||
expectMsgType[Value] == Value("21", 0)
|
// counter reset to 0 when started again
|
||||||
}, message = "Passivating did not reset Value down to 0")
|
expectMsg(Value("21", 0))
|
||||||
|
}
|
||||||
|
|
||||||
region ! Add("21", 1)
|
region ! Add("21", 1)
|
||||||
|
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue