diff --git a/akka-actor-testkit-typed/src/main/mima-filters/2.6.13.backwards.excludes/fish-for-message-pf.excludes b/akka-actor-testkit-typed/src/main/mima-filters/2.6.13.backwards.excludes/fish-for-message-pf.excludes new file mode 100644 index 0000000000..10a03c6353 --- /dev/null +++ b/akka-actor-testkit-typed/src/main/mima-filters/2.6.13.backwards.excludes/fish-for-message-pf.excludes @@ -0,0 +1,2 @@ +# 2 new methods, TestProbe is not for user extension +ProblemFilters.exclude[ReversedMissingMethodProblem]("akka.actor.testkit.typed.scaladsl.TestProbe.fishForMessagePF") \ No newline at end of file diff --git a/akka-actor-testkit-typed/src/main/scala/akka/actor/testkit/typed/internal/TestProbeImpl.scala b/akka-actor-testkit-typed/src/main/scala/akka/actor/testkit/typed/internal/TestProbeImpl.scala index 2cae162af9..c82cf70b90 100644 --- a/akka-actor-testkit-typed/src/main/scala/akka/actor/testkit/typed/internal/TestProbeImpl.scala +++ b/akka-actor-testkit-typed/src/main/scala/akka/actor/testkit/typed/internal/TestProbeImpl.scala @@ -105,6 +105,7 @@ private[akka] final class TestProbeImpl[M](name: String, system: ActorSystem[_]) case x if x eq Duration.Undefined => duration case x if !x.isFinite => throw new IllegalArgumentException("`end` cannot be infinite") case f: FiniteDuration => f - now + case _ => throw new RuntimeException() // compiler exhaustiveness check pleaser } override def getRemainingOr(duration: JDuration): JDuration = @@ -266,9 +267,16 @@ private[akka] final class TestProbeImpl[M](name: String, system: ActorSystem[_]) override def fishForMessage(max: FiniteDuration, hint: String)(fisher: M => FishingOutcome): immutable.Seq[M] = fishForMessage_internal(max.dilated, hint, fisher) + override def fishForMessagePF(max: FiniteDuration, hint: String)( + fisher: PartialFunction[M, FishingOutcome]): immutable.Seq[M] = + fishForMessage(max, hint)(fisher) + override def fishForMessage(max: FiniteDuration)(fisher: M => FishingOutcome): immutable.Seq[M] = fishForMessage(max, "")(fisher) + override def fishForMessagePF(max: FiniteDuration)(fisher: PartialFunction[M, FishingOutcome]): immutable.Seq[M] = + fishForMessage(max)(fisher) + override def fishForMessage(max: JDuration, fisher: java.util.function.Function[M, FishingOutcome]): JList[M] = fishForMessage(max, "", fisher) diff --git a/akka-actor-testkit-typed/src/main/scala/akka/actor/testkit/typed/scaladsl/TestProbe.scala b/akka-actor-testkit-typed/src/main/scala/akka/actor/testkit/typed/scaladsl/TestProbe.scala index ebf1554747..4481506192 100644 --- a/akka-actor-testkit-typed/src/main/scala/akka/actor/testkit/typed/scaladsl/TestProbe.scala +++ b/akka-actor-testkit-typed/src/main/scala/akka/actor/testkit/typed/scaladsl/TestProbe.scala @@ -198,11 +198,21 @@ object TestProbe { */ def fishForMessage(max: FiniteDuration, hint: String)(fisher: M => FishingOutcome): immutable.Seq[M] + /** + * Same as `fishForMessage` but accepting a partial function and failing for non-matches + */ + def fishForMessagePF(max: FiniteDuration, hint: String)(fisher: PartialFunction[M, FishingOutcome]): immutable.Seq[M] + /** * Same as the other `fishForMessage` but with no hint */ def fishForMessage(max: FiniteDuration)(fisher: M => FishingOutcome): immutable.Seq[M] + /** + * Same as `fishForMessage` but with no hint, accepting a partial function and failing for non-matches + */ + def fishForMessagePF(max: FiniteDuration)(fisher: PartialFunction[M, FishingOutcome]): immutable.Seq[M] + /** * Expect the given actor to be stopped or stop within the given timeout or * throw an [[AssertionError]]. diff --git a/akka-actor-testkit-typed/src/test/scala/akka/actor/testkit/typed/scaladsl/BehaviorTestKitSpec.scala b/akka-actor-testkit-typed/src/test/scala/akka/actor/testkit/typed/scaladsl/BehaviorTestKitSpec.scala index e59bdfefdb..8defe8ab55 100644 --- a/akka-actor-testkit-typed/src/test/scala/akka/actor/testkit/typed/scaladsl/BehaviorTestKitSpec.scala +++ b/akka-actor-testkit-typed/src/test/scala/akka/actor/testkit/typed/scaladsl/BehaviorTestKitSpec.scala @@ -138,6 +138,8 @@ object BehaviorTestKitSpec { case IsTimerActive(key, replyTo) => replyTo ! timers.isTimerActive(key) Behaviors.same + case unexpected => + throw new RuntimeException(s"Unexpected command: $unexpected") } } .receiveSignal { diff --git a/akka-actor-testkit-typed/src/test/scala/akka/actor/testkit/typed/scaladsl/TestProbeSpec.scala b/akka-actor-testkit-typed/src/test/scala/akka/actor/testkit/typed/scaladsl/TestProbeSpec.scala index 8567964701..09e45a3111 100644 --- a/akka-actor-testkit-typed/src/test/scala/akka/actor/testkit/typed/scaladsl/TestProbeSpec.scala +++ b/akka-actor-testkit-typed/src/test/scala/akka/actor/testkit/typed/scaladsl/TestProbeSpec.scala @@ -105,7 +105,7 @@ class TestProbeSpec extends ScalaTestWithActorTestKit with AnyWordSpecLike with probe.ref ! "two" intercept[AssertionError] { - probe.fishForMessage(shortDuration) { + probe.fishForMessagePF(shortDuration) { case "one" => FishingOutcomes.continue } } @@ -117,7 +117,7 @@ class TestProbeSpec extends ScalaTestWithActorTestKit with AnyWordSpecLike with probe.ref ! "one" intercept[AssertionError] { - probe.fishForMessage(shortDuration) { + probe.fishForMessagePF(shortDuration) { case "one" => FishingOutcomes.continue } } diff --git a/akka-actor-tests/src/test/scala/akka/actor/ActorSelectionSpec.scala b/akka-actor-tests/src/test/scala/akka/actor/ActorSelectionSpec.scala index 2c30fe9295..7d0638685c 100644 --- a/akka-actor-tests/src/test/scala/akka/actor/ActorSelectionSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/actor/ActorSelectionSpec.scala @@ -51,6 +51,7 @@ class ActorSelectionSpec extends AkkaSpec with DefaultTimeout { def empty(path: String) = new EmptyLocalActorRef(sysImpl.provider, path match { case RelativeActorPath(elems) => sysImpl.lookupRoot.path / elems + case _ => throw new RuntimeException() }, system.eventStream) val idProbe = TestProbe() @@ -79,6 +80,7 @@ class ActorSelectionSpec extends AkkaSpec with DefaultTimeout { Await.result(node ? query, timeout.duration) match { case ref: ActorRef => Some(ref) case selection: ActorSelection => identify(selection) + case _ => throw new RuntimeException() } } @@ -365,8 +367,13 @@ class ActorSelectionSpec extends AkkaSpec with DefaultTimeout { val probe = TestProbe() system.actorSelection("/user/a/*").tell(Identify(1), probe.ref) - probe.receiveN(2).map { case ActorIdentity(1, r) => r }.toSet should ===( - Set[Option[ActorRef]](Some(b1), Some(b2))) + probe + .receiveN(2) + .map { + case ActorIdentity(1, r) => r + case _ => throw new IllegalArgumentException() + } + .toSet should ===(Set[Option[ActorRef]](Some(b1), Some(b2))) probe.expectNoMessage() system.actorSelection("/user/a/b1/*").tell(Identify(2), probe.ref) diff --git a/akka-actor-tests/src/test/scala/akka/actor/ActorSystemSpec.scala b/akka-actor-tests/src/test/scala/akka/actor/ActorSystemSpec.scala index 62e18058ec..42f059974e 100644 --- a/akka-actor-tests/src/test/scala/akka/actor/ActorSystemSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/actor/ActorSystemSpec.scala @@ -94,6 +94,7 @@ object ActorSystemSpec { TestKit.awaitCond(mbox.actor.actor != null, 1.second) mbox.actor.actor match { case FastActor(latch, _) => Await.ready(latch, 1.second) + case _ => throw new IllegalStateException() } } ret diff --git a/akka-actor-tests/src/test/scala/akka/actor/Bench.scala b/akka-actor-tests/src/test/scala/akka/actor/Bench.scala index 07f1c9a4ed..c74aadc527 100644 --- a/akka-actor-tests/src/test/scala/akka/actor/Bench.scala +++ b/akka-actor-tests/src/test/scala/akka/actor/Bench.scala @@ -14,7 +14,7 @@ object Chameneos { final case class MeetingCount(count: Int) extends ChameneosEvent case object Exit extends ChameneosEvent - abstract class Colour + abstract sealed class Colour case object RED extends Colour case object YELLOW extends Colour case object BLUE extends Colour diff --git a/akka-actor-tests/src/test/scala/akka/actor/SupervisorHierarchySpec.scala b/akka-actor-tests/src/test/scala/akka/actor/SupervisorHierarchySpec.scala index 5176d6c60d..bbf4e567e6 100644 --- a/akka-actor-tests/src/test/scala/akka/actor/SupervisorHierarchySpec.scala +++ b/akka-actor-tests/src/test/scala/akka/actor/SupervisorHierarchySpec.scala @@ -683,6 +683,7 @@ object SupervisorHierarchySpec { if (depth > 0) { l.underlying.children.foreach(getErrors(_, depth - 1)) } + case _ => throw new IllegalArgumentException() } } @@ -694,6 +695,7 @@ object SupervisorHierarchySpec { case _ => errors :+= target -> ErrorLog("fetched", stateCache.get(target.path).log) } if (target != hierarchy) getErrorsUp(l.getParent) + case _ => throw new IllegalArgumentException() } } diff --git a/akka-actor-tests/src/test/scala/akka/actor/TypedActorSpec.scala b/akka-actor-tests/src/test/scala/akka/actor/TypedActorSpec.scala index 3866b6efa5..da4fee5e9e 100644 --- a/akka-actor-tests/src/test/scala/akka/actor/TypedActorSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/actor/TypedActorSpec.scala @@ -212,6 +212,7 @@ object TypedActorSpec { override def onReceive(msg: Any, sender: ActorRef): Unit = { ensureContextAvailable(msg match { case "pigdog" => sender ! "dogpig" + case _ => }) } } diff --git a/akka-actor-tests/src/test/scala/akka/dispatch/MailboxConfigSpec.scala b/akka-actor-tests/src/test/scala/akka/dispatch/MailboxConfigSpec.scala index 77681b9c2f..74663ede7f 100644 --- a/akka-actor-tests/src/test/scala/akka/dispatch/MailboxConfigSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/dispatch/MailboxConfigSpec.scala @@ -122,6 +122,7 @@ abstract class MailboxSpec extends AkkaSpec with BeforeAndAfterAll with BeforeAn config match { case BoundedMailbox(capacity, _) => aQueue.remainingCapacity should ===(capacity) case UnboundedMailbox() => aQueue.remainingCapacity should ===(Int.MaxValue) + case _ => fail() } case _ => } @@ -187,6 +188,7 @@ class DefaultMailboxSpec extends MailboxSpec { def factory = { case u: UnboundedMailbox => u.create(None, None) case b: BoundedMailbox => b.create(None, None) + case _ => throw new RuntimeException() // compiler exhaustiveness check pleaser } } @@ -197,6 +199,7 @@ class PriorityMailboxSpec extends MailboxSpec { case UnboundedMailbox() => new UnboundedPriorityMailbox(comparator).create(None, None) case BoundedMailbox(capacity, pushTimeOut) => new BoundedPriorityMailbox(comparator, capacity, pushTimeOut).create(None, None) + case _ => throw new RuntimeException() // compiler exhaustiveness check pleaser } } @@ -207,6 +210,7 @@ class StablePriorityMailboxSpec extends MailboxSpec { case UnboundedMailbox() => new UnboundedStablePriorityMailbox(comparator).create(None, None) case BoundedMailbox(capacity, pushTimeOut) => new BoundedStablePriorityMailbox(comparator, capacity, pushTimeOut).create(None, None) + case _ => throw new RuntimeException() // compiler exhaustiveness check pleaser } } @@ -216,6 +220,7 @@ class ControlAwareMailboxSpec extends MailboxSpec { case UnboundedMailbox() => new UnboundedControlAwareMailbox().create(None, None) case BoundedMailbox(capacity, pushTimeOut) => new BoundedControlAwareMailbox(capacity, pushTimeOut).create(None, None) + case _ => throw new RuntimeException() // compiler exhaustiveness check pleaser } } @@ -258,6 +263,7 @@ class SingleConsumerOnlyMailboxSpec extends MailboxSpec { def factory = { case _: UnboundedMailbox => SingleConsumerOnlyUnboundedMailbox().create(None, None) case _ @BoundedMailbox(capacity, _) => NonBlockingBoundedMailbox(capacity).create(None, None) + case _ => throw new RuntimeException() // compiler exhaustiveness check pleaser } } diff --git a/akka-actor-tests/src/test/scala/akka/dispatch/PriorityDispatcherSpec.scala b/akka-actor-tests/src/test/scala/akka/dispatch/PriorityDispatcherSpec.scala index 0ae1ed718b..47f0f13e44 100644 --- a/akka-actor-tests/src/test/scala/akka/dispatch/PriorityDispatcherSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/dispatch/PriorityDispatcherSpec.scala @@ -30,12 +30,14 @@ object PriorityDispatcherSpec { extends UnboundedPriorityMailbox(PriorityGenerator({ case i: Int => i //Reverse order case Result => Int.MaxValue + case _ => throw new RuntimeException() // compiler exhaustiveness check pleaser }: Any => Int)) class Bounded(@unused settings: ActorSystem.Settings, @unused config: Config) extends BoundedPriorityMailbox(PriorityGenerator({ case i: Int => i //Reverse order case Result => Int.MaxValue + case _ => throw new RuntimeException() // compiler exhaustiveness check pleaser }: Any => Int), 1000, 10 seconds) } diff --git a/akka-actor-tests/src/test/scala/akka/dispatch/StablePriorityDispatcherSpec.scala b/akka-actor-tests/src/test/scala/akka/dispatch/StablePriorityDispatcherSpec.scala index 9e881f558b..4ee8bdeece 100644 --- a/akka-actor-tests/src/test/scala/akka/dispatch/StablePriorityDispatcherSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/dispatch/StablePriorityDispatcherSpec.scala @@ -30,6 +30,7 @@ object StablePriorityDispatcherSpec { case i: Int if i <= 100 => i // Small integers have high priority case _: Int => 101 // Don't care for other integers case Result => Int.MaxValue + case _ => throw new RuntimeException() // compiler exhaustiveness check pleaser }: Any => Int)) class Bounded(@unused settings: ActorSystem.Settings, @unused config: Config) @@ -37,6 +38,7 @@ object StablePriorityDispatcherSpec { case i: Int if i <= 100 => i // Small integers have high priority case _: Int => 101 // Don't care for other integers case Result => Int.MaxValue + case _ => throw new RuntimeException() // compiler exhaustiveness check pleaser }: Any => Int), 1000, 10 seconds) } diff --git a/akka-actor-tests/src/test/scala/akka/io/UdpIntegrationSpec.scala b/akka-actor-tests/src/test/scala/akka/io/UdpIntegrationSpec.scala index adb0a84d54..f378f26653 100644 --- a/akka-actor-tests/src/test/scala/akka/io/UdpIntegrationSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/io/UdpIntegrationSpec.scala @@ -6,7 +6,6 @@ package akka.io import java.net.DatagramSocket import java.net.InetSocketAddress - import akka.actor.ActorRef import akka.io.Inet._ import akka.io.Udp._ @@ -64,7 +63,9 @@ class UdpIntegrationSpec extends AkkaSpec(""" } "be able to send several packet back and forth with binding" in { - val Seq(serverAddress, clientAddress) = temporaryServerAddresses(2, udp = true) + val addresses = temporaryServerAddresses(2, udp = true) + val serverAddress = addresses(0) + val clientAddress = addresses(1) val server = bindUdp(serverAddress, testActor) val client = bindUdp(clientAddress, testActor) val data = ByteString("Fly little packet!") diff --git a/akka-actor-tests/src/test/scala/akka/io/dns/internal/AsyncDnsManagerSpec.scala b/akka-actor-tests/src/test/scala/akka/io/dns/internal/AsyncDnsManagerSpec.scala index 8062f300fb..5bb71d1b7f 100644 --- a/akka-actor-tests/src/test/scala/akka/io/dns/internal/AsyncDnsManagerSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/io/dns/internal/AsyncDnsManagerSpec.scala @@ -37,7 +37,10 @@ class AsyncDnsManagerSpec extends AkkaSpec(""" "support ipv6" in { dns ! Resolve("::1") // ::1 will short circuit the resolution - val Resolved("::1", Seq(AAAARecord("::1", Ttl.effectivelyForever, _)), Nil) = expectMsgType[Resolved] + expectMsgType[Resolved] match { + case Resolved("::1", Seq(AAAARecord("::1", Ttl.effectivelyForever, _)), Nil) => + case other => fail(other.toString) + } } "support ipv6 also using the old protocol" in { diff --git a/akka-actor-tests/src/test/scala/akka/io/dns/internal/AsyncDnsResolverSpec.scala b/akka-actor-tests/src/test/scala/akka/io/dns/internal/AsyncDnsResolverSpec.scala index 60cf29ffa0..6b6c7b10ed 100644 --- a/akka-actor-tests/src/test/scala/akka/io/dns/internal/AsyncDnsResolverSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/io/dns/internal/AsyncDnsResolverSpec.scala @@ -123,8 +123,9 @@ class AsyncDnsResolverSpec extends AkkaSpec(""" r ! Resolve(name) dnsClient1.expectNoMessage(50.millis) val answer = senderProbe.expectMsgType[Resolved] - val Seq(aaaaRecord) = answer.records.collect { - case r: AAAARecord => r + val aaaaRecord = answer.records match { + case Seq(r: AAAARecord) => r + case _ => throw new RuntimeException() // compiler exhaustiveness check pleaser } aaaaRecord.name should be("1:2:3:0:0:0:0:0") aaaaRecord.ttl should be(Ttl.effectivelyForever) diff --git a/akka-actor-tests/src/test/scala/akka/routing/ConfiguredLocalRoutingSpec.scala b/akka-actor-tests/src/test/scala/akka/routing/ConfiguredLocalRoutingSpec.scala index 7acc1e6665..daf8709dd3 100644 --- a/akka-actor-tests/src/test/scala/akka/routing/ConfiguredLocalRoutingSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/routing/ConfiguredLocalRoutingSpec.scala @@ -113,7 +113,9 @@ class ConfiguredLocalRoutingSpec r.underlying match { case c: RoutedActorCell => c.routerConfig case _: UnstartedCell => awaitCond(r.isStarted, 1 second, 10 millis); routerConfig(ref) + case _ => throw new IllegalArgumentException(s"Unexpected underlying cell ${r.underlying}") } + case _ => throw new IllegalArgumentException(s"Unexpected actorref $ref") } def collectRouteePaths(probe: TestProbe, router: ActorRef, n: Int): immutable.Seq[ActorPath] = { diff --git a/akka-actor-tests/src/test/scala/akka/serialization/AsyncSerializeSpec.scala b/akka-actor-tests/src/test/scala/akka/serialization/AsyncSerializeSpec.scala index b62925d9b8..c53e80a5c8 100644 --- a/akka-actor-tests/src/test/scala/akka/serialization/AsyncSerializeSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/serialization/AsyncSerializeSpec.scala @@ -6,11 +6,8 @@ package akka.serialization import java.util.concurrent.CompletableFuture import java.util.concurrent.CompletionStage - import scala.concurrent.Future - import com.typesafe.config.ConfigFactory - import akka.actor.ExtendedActorSystem import akka.testkit.{ AkkaSpec, EventFilter } @@ -45,6 +42,7 @@ object AsyncSerializeSpec { o match { case Message1(msg) => Future.successful(msg.getBytes) case Message2(msg) => Future.successful(msg.getBytes) + case _ => throw new IllegalArgumentException(s"Unknown type $o") } } @@ -52,6 +50,7 @@ object AsyncSerializeSpec { manifest match { case "1" => Future.successful(Message1(new String(bytes))) case "2" => Future.successful(Message2(new String(bytes))) + case _ => throw new IllegalArgumentException(s"Unknown manifest $manifest") } } @@ -60,6 +59,7 @@ object AsyncSerializeSpec { override def manifest(o: AnyRef): String = o match { case _: Message1 => "1" case _: Message2 => "2" + case _ => throw new IllegalArgumentException(s"Unknown type $o") } } @@ -69,6 +69,7 @@ object AsyncSerializeSpec { o match { case Message3(msg) => CompletableFuture.completedFuture(msg.getBytes) case Message4(msg) => CompletableFuture.completedFuture(msg.getBytes) + case _ => throw new IllegalArgumentException(s"Unknown type $o") } } @@ -76,6 +77,7 @@ object AsyncSerializeSpec { manifest match { case "1" => CompletableFuture.completedFuture(Message3(new String(bytes))) case "2" => CompletableFuture.completedFuture(Message4(new String(bytes))) + case _ => throw new IllegalArgumentException(s"Unknown manifest $manifest") } } @@ -84,6 +86,7 @@ object AsyncSerializeSpec { override def manifest(o: AnyRef): String = o match { case _: Message3 => "1" case _: Message4 => "2" + case _ => throw new IllegalArgumentException(s"Unknown type $o") } } diff --git a/akka-actor-tests/src/test/scala/akka/util/BoundedBlockingQueueSpec.scala b/akka-actor-tests/src/test/scala/akka/util/BoundedBlockingQueueSpec.scala index 95325fff54..4ab64a0946 100644 --- a/akka-actor-tests/src/test/scala/akka/util/BoundedBlockingQueueSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/util/BoundedBlockingQueueSpec.scala @@ -619,10 +619,10 @@ trait CustomContainsMatcher { def attemptMatch(remainingTruth: List[A], remainingSequence: List[A]): MatchResult = (remainingTruth, remainingSequence) match { - case (_, Nil) => matchResult(true) - case (Nil, _) => matchResult(false) - case (x :: xs, y :: ys) if x.equals(y) => attemptMatch(xs, ys) - case (_ :: xs, ys) => attemptMatch(xs, ys) + case (_, Nil) => matchResult(true) + case (Nil, _) => matchResult(false) + case (x :: xs, y :: ys) if x == y => attemptMatch(xs, ys) + case (_ :: xs, ys) => attemptMatch(xs, ys) } def matchResult(success: Boolean): MatchResult = diff --git a/akka-actor-typed-tests/src/test/scala/akka/actor/typed/InterceptSpec.scala b/akka-actor-typed-tests/src/test/scala/akka/actor/typed/InterceptSpec.scala index b9738238fa..f6cb8c901b 100644 --- a/akka-actor-typed-tests/src/test/scala/akka/actor/typed/InterceptSpec.scala +++ b/akka-actor-typed-tests/src/test/scala/akka/actor/typed/InterceptSpec.scala @@ -55,6 +55,7 @@ object InterceptSpec { val wrapped = msg match { case c: Command => InternalProtocol.WrappedCommand(c) case r: ExternalResponse => InternalProtocol.WrappedExternalResponse(r) + case unexpected => throw new RuntimeException(s"Unexpected: $unexpected") } target(ctx, wrapped) } @@ -63,7 +64,7 @@ object InterceptSpec { def apply(probe: ActorRef[String]): Behavior[Command] = { Behaviors - .intercept(() => new ProtocolTransformer)(Behaviors.receiveMessage[InternalProtocol] { + .intercept(() => new ProtocolTransformer)(Behaviors.receiveMessagePartial[InternalProtocol] { case InternalProtocol.WrappedCommand(cmd) => probe ! cmd.s Behaviors.same @@ -396,6 +397,7 @@ class InterceptSpec extends ScalaTestWithActorTestKit with AnyWordSpecLike with signal match { case PostStop => probe.ref ! "interceptor-post-stop" + case _ => } target(ctx, signal) } diff --git a/akka-actor-typed-tests/src/test/scala/akka/actor/typed/MailboxSelectorSpec.scala b/akka-actor-typed-tests/src/test/scala/akka/actor/typed/MailboxSelectorSpec.scala index 3b965266dc..655836fad2 100644 --- a/akka-actor-typed-tests/src/test/scala/akka/actor/typed/MailboxSelectorSpec.scala +++ b/akka-actor-typed-tests/src/test/scala/akka/actor/typed/MailboxSelectorSpec.scala @@ -38,7 +38,9 @@ class MailboxSelectorSpec extends ScalaTestWithActorTestKit(""" adapter.classicContext match { case cell: ActorCell => cell.mailbox.messageQueue + case unexpected => throw new RuntimeException(s"Unexpected: $unexpected") } + case unexpected => throw new RuntimeException(s"Unexpected: $unexpected") } replyTo ! mailbox diff --git a/akka-actor-typed-tests/src/test/scala/akka/actor/typed/SupervisionSpec.scala b/akka-actor-typed-tests/src/test/scala/akka/actor/typed/SupervisionSpec.scala index 3ebe5b84f9..28b3c74a76 100644 --- a/akka-actor-typed-tests/src/test/scala/akka/actor/typed/SupervisionSpec.scala +++ b/akka-actor-typed-tests/src/test/scala/akka/actor/typed/SupervisionSpec.scala @@ -1195,6 +1195,7 @@ class SupervisionSpec extends ScalaTestWithActorTestKit(""" case "boom" => probe.ref ! context.self Behaviors.stopped + case unexpected => throw new RuntimeException(s"Unexpected: $unexpected") } }) context.watch(child) @@ -1420,7 +1421,7 @@ class SupervisionSpec extends ScalaTestWithActorTestKit(""" Behaviors.stopped } else { stopInSetup.set(true) - Behaviors.receiveMessage { + Behaviors.receiveMessagePartial { case "boom" => throw TestException("boom") } } diff --git a/akka-actor-typed-tests/src/test/scala/akka/actor/typed/TerminatedSpec.scala b/akka-actor-typed-tests/src/test/scala/akka/actor/typed/TerminatedSpec.scala index 9a7993ce99..c87971f28e 100644 --- a/akka-actor-typed-tests/src/test/scala/akka/actor/typed/TerminatedSpec.scala +++ b/akka-actor-typed-tests/src/test/scala/akka/actor/typed/TerminatedSpec.scala @@ -21,10 +21,12 @@ class TerminatedSpec extends AnyWordSpec with Matchers with LogCapturing { (childFailed match { case Terminated(r) => r + case unexpected => throw new RuntimeException(s"Unexpected: $unexpected") }) shouldEqual probe.ref (childFailed match { case ChildFailed(ref, e) => (ref, e) + case unexpected => throw new RuntimeException(s"Unexpected: $unexpected") }) shouldEqual ((probe.ref, ex)) } diff --git a/akka-actor-typed-tests/src/test/scala/akka/actor/typed/coexistence/ClassicSupervisingTypedSpec.scala b/akka-actor-typed-tests/src/test/scala/akka/actor/typed/coexistence/ClassicSupervisingTypedSpec.scala index e242f24a98..fd3826c219 100644 --- a/akka-actor-typed-tests/src/test/scala/akka/actor/typed/coexistence/ClassicSupervisingTypedSpec.scala +++ b/akka-actor-typed-tests/src/test/scala/akka/actor/typed/coexistence/ClassicSupervisingTypedSpec.scala @@ -19,7 +19,7 @@ import akka.testkit.TestProbe object ProbedBehavior { def behavior(probe: u.ActorRef): Behavior[String] = { Behaviors - .receiveMessage[String] { + .receiveMessagePartial[String] { case "throw" => throw TestException("oh dear") } .receiveSignal { diff --git a/akka-actor-typed-tests/src/test/scala/akka/actor/typed/delivery/TestConsumer.scala b/akka-actor-typed-tests/src/test/scala/akka/actor/typed/delivery/TestConsumer.scala index c705bcebac..f3091ef7a1 100644 --- a/akka-actor-typed-tests/src/test/scala/akka/actor/typed/delivery/TestConsumer.scala +++ b/akka-actor-typed-tests/src/test/scala/akka/actor/typed/delivery/TestConsumer.scala @@ -5,14 +5,11 @@ package akka.actor.typed.delivery import java.nio.charset.StandardCharsets - import scala.concurrent.duration.Duration import scala.concurrent.duration.FiniteDuration import scala.concurrent.duration._ - import com.typesafe.config.Config import com.typesafe.config.ConfigFactory - import akka.actor.typed.ActorRef import akka.actor.typed.Behavior import akka.actor.typed.delivery.ConsumerController.SequencedMessage @@ -21,6 +18,8 @@ import akka.actor.typed.scaladsl.ActorContext import akka.actor.typed.scaladsl.Behaviors import akka.serialization.SerializerWithStringManifest +import java.io.NotSerializableException + object TestConsumer { final case class Job(payload: String) @@ -140,6 +139,7 @@ class TestSerializer extends SerializerWithStringManifest { override def toBinary(o: AnyRef): Array[Byte] = o match { case TestConsumer.Job(payload) => payload.getBytes(StandardCharsets.UTF_8) + case unexpected => throw new NotSerializableException(s"Unexpected: $unexpected") } override def fromBinary(bytes: Array[Byte], manifest: String): AnyRef = diff --git a/akka-actor-typed-tests/src/test/scala/akka/actor/typed/delivery/TestDurableProducerQueue.scala b/akka-actor-typed-tests/src/test/scala/akka/actor/typed/delivery/TestDurableProducerQueue.scala index 22a4b24819..cd307b646d 100644 --- a/akka-actor-typed-tests/src/test/scala/akka/actor/typed/delivery/TestDurableProducerQueue.scala +++ b/akka-actor-typed-tests/src/test/scala/akka/actor/typed/delivery/TestDurableProducerQueue.scala @@ -55,7 +55,7 @@ class TestDurableProducerQueue[A]( private def active(state: State[A]): Behavior[Command[A]] = { stateHolder.set(state) - Behaviors.receiveMessage { + Behaviors.receiveMessagePartial { case cmd: LoadState[A] @unchecked => maybeFail(cmd) if (delay == Duration.Zero) cmd.replyTo ! state else context.scheduleOnce(delay, cmd.replyTo, state) diff --git a/akka-actor-typed-tests/src/test/scala/akka/actor/typed/delivery/TestProducer.scala b/akka-actor-typed-tests/src/test/scala/akka/actor/typed/delivery/TestProducer.scala index 1f654b46b8..0f818e66f2 100644 --- a/akka-actor-typed-tests/src/test/scala/akka/actor/typed/delivery/TestProducer.scala +++ b/akka-actor-typed-tests/src/test/scala/akka/actor/typed/delivery/TestProducer.scala @@ -15,7 +15,7 @@ import akka.actor.typed.scaladsl.Behaviors object TestProducer { - trait Command + sealed trait Command final case class RequestNext(sendTo: ActorRef[TestConsumer.Job]) extends Command private case object Tick extends Command @@ -62,12 +62,10 @@ object TestProducer { } private def activeNoDelay(n: Int): Behavior[Command] = { - Behaviors.receive { (ctx, msg) => - msg match { - case RequestNext(sendTo) => - sendMessage(n, sendTo, ctx) - activeNoDelay(n + 1) - } + Behaviors.receivePartial { + case (ctx, RequestNext(sendTo)) => + sendMessage(n, sendTo, ctx) + activeNoDelay(n + 1) } } diff --git a/akka-actor-typed-tests/src/test/scala/akka/actor/typed/delivery/TestProducerWithAsk.scala b/akka-actor-typed-tests/src/test/scala/akka/actor/typed/delivery/TestProducerWithAsk.scala index 987af0d4dc..dbaa8fa162 100644 --- a/akka-actor-typed-tests/src/test/scala/akka/actor/typed/delivery/TestProducerWithAsk.scala +++ b/akka-actor-typed-tests/src/test/scala/akka/actor/typed/delivery/TestProducerWithAsk.scala @@ -43,17 +43,15 @@ object TestProducerWithAsk { } private def idle(n: Int, replyProbe: ActorRef[Long]): Behavior[Command] = { - Behaviors.receive { (ctx, msg) => - msg match { - case Tick => Behaviors.same - case RequestNext(sendTo) => active(n + 1, replyProbe, sendTo) - case Confirmed(seqNr) => - replyProbe ! seqNr - Behaviors.same - case AskTimeout => - ctx.log.warn("Timeout") - Behaviors.same - } + Behaviors.receivePartial { + case (_, Tick) => Behaviors.same + case (_, RequestNext(sendTo)) => active(n + 1, replyProbe, sendTo) + case (_, Confirmed(seqNr)) => + replyProbe ! seqNr + Behaviors.same + case (ctx, AskTimeout) => + ctx.log.warn("Timeout") + Behaviors.same } } @@ -61,33 +59,32 @@ object TestProducerWithAsk { n: Int, replyProbe: ActorRef[Long], sendTo: ActorRef[ProducerController.MessageWithConfirmation[TestConsumer.Job]]): Behavior[Command] = { - Behaviors.receive { (ctx, msg) => - msg match { - case Tick => - val msg = s"msg-$n" - ctx.log.info("sent {}", msg) - ctx.ask( - sendTo, - (askReplyTo: ActorRef[Long]) => - ProducerController.MessageWithConfirmation(TestConsumer.Job(msg), askReplyTo)) { - case Success(seqNr) => Confirmed(seqNr) - case Failure(_) => AskTimeout - } - idle(n, replyProbe) + Behaviors.receivePartial { + case (ctx, Tick) => + val msg = s"msg-$n" + ctx.log.info("sent {}", msg) + ctx.ask( + sendTo, + (askReplyTo: ActorRef[Long]) => + ProducerController.MessageWithConfirmation(TestConsumer.Job(msg), askReplyTo)) { + case Success(seqNr) => Confirmed(seqNr) + case Failure(_) => AskTimeout + } + idle(n, replyProbe) - case RequestNext(_) => - throw new IllegalStateException("Unexpected RequestNext, already got one.") + case (_, RequestNext(_)) => + throw new IllegalStateException("Unexpected RequestNext, already got one.") - case Confirmed(seqNr) => - ctx.log.info("Reply Confirmed [{}]", seqNr) - replyProbe ! seqNr - Behaviors.same + case (ctx, Confirmed(seqNr)) => + ctx.log.info("Reply Confirmed [{}]", seqNr) + replyProbe ! seqNr + Behaviors.same - case AskTimeout => - ctx.log.warn("Timeout") - Behaviors.same - } + case (ctx, AskTimeout) => + ctx.log.warn("Timeout") + Behaviors.same } + } } diff --git a/akka-actor-typed-tests/src/test/scala/akka/actor/typed/delivery/TestProducerWorkPulling.scala b/akka-actor-typed-tests/src/test/scala/akka/actor/typed/delivery/TestProducerWorkPulling.scala index b9216e4843..edddd2ce0f 100644 --- a/akka-actor-typed-tests/src/test/scala/akka/actor/typed/delivery/TestProducerWorkPulling.scala +++ b/akka-actor-typed-tests/src/test/scala/akka/actor/typed/delivery/TestProducerWorkPulling.scala @@ -33,24 +33,22 @@ object TestProducerWorkPulling { } private def idle(n: Int): Behavior[Command] = { - Behaviors.receiveMessage { + Behaviors.receiveMessagePartial { case Tick => Behaviors.same case RequestNext(sendTo) => active(n + 1, sendTo) } } private def active(n: Int, sendTo: ActorRef[TestConsumer.Job]): Behavior[Command] = { - Behaviors.receive { (ctx, msg) => - msg match { - case Tick => - val msg = s"msg-$n" - ctx.log.info("sent {}", msg) - sendTo ! TestConsumer.Job(msg) - idle(n) + Behaviors.receivePartial { + case (ctx, Tick) => + val msg = s"msg-$n" + ctx.log.info("sent {}", msg) + sendTo ! TestConsumer.Job(msg) + idle(n) - case RequestNext(_) => - throw new IllegalStateException("Unexpected RequestNext, already got one.") - } + case (_, RequestNext(_)) => + throw new IllegalStateException("Unexpected RequestNext, already got one.") } } diff --git a/akka-actor-typed-tests/src/test/scala/akka/actor/typed/javadsl/AdaptationFailureSpec.scala b/akka-actor-typed-tests/src/test/scala/akka/actor/typed/javadsl/AdaptationFailureSpec.scala index bbd9b3bba6..dc3739f381 100644 --- a/akka-actor-typed-tests/src/test/scala/akka/actor/typed/javadsl/AdaptationFailureSpec.scala +++ b/akka-actor-typed-tests/src/test/scala/akka/actor/typed/javadsl/AdaptationFailureSpec.scala @@ -86,6 +86,7 @@ class AdaptationFailureSpec extends ScalaTestWithActorTestKit with AnyWordSpecLi case (_, Terminated(`ref`)) => probe.ref ! "actor-stopped" Behaviors.same + case _ => Behaviors.unhandled } }) diff --git a/akka-actor-typed-tests/src/test/scala/akka/actor/typed/receptionist/ReceptionistApiSpec.scala b/akka-actor-typed-tests/src/test/scala/akka/actor/typed/receptionist/ReceptionistApiSpec.scala index 6bb9bbd32a..2cf9186949 100644 --- a/akka-actor-typed-tests/src/test/scala/akka/actor/typed/receptionist/ReceptionistApiSpec.scala +++ b/akka-actor-typed-tests/src/test/scala/akka/actor/typed/receptionist/ReceptionistApiSpec.scala @@ -68,7 +68,7 @@ object ReceptionistApiSpec { // to cover as much of the API as possible context.system.receptionist ! Receptionist.Register(key, context.self.narrow, context.self.narrow) - Behaviors.receiveMessage { + Behaviors.receiveMessagePartial { case key.Listing(services) => services.foreach(_ ! "woho") Behaviors.same diff --git a/akka-actor-typed-tests/src/test/scala/akka/actor/typed/scaladsl/ActorContextAskSpec.scala b/akka-actor-typed-tests/src/test/scala/akka/actor/typed/scaladsl/ActorContextAskSpec.scala index 252d3fc0c1..1141f87872 100644 --- a/akka-actor-typed-tests/src/test/scala/akka/actor/typed/scaladsl/ActorContextAskSpec.scala +++ b/akka-actor-typed-tests/src/test/scala/akka/actor/typed/scaladsl/ActorContextAskSpec.scala @@ -79,12 +79,11 @@ class ActorContextAskSpec case class Ping(respondTo: ActorRef[Pong.type]) extends Protocol case object Pong extends Protocol - val pingPong = spawn(Behaviors.receive[Protocol]((_, message) => - message match { - case Ping(respondTo) => - respondTo ! Pong - Behaviors.same - })) + val pingPong = spawn(Behaviors.receiveMessagePartial[Protocol] { + case Ping(respondTo) => + respondTo ! Pong + Behaviors.same + }) val snitch = Behaviors.setup[AnyRef] { context => context.ask(pingPong, Ping) { @@ -93,7 +92,7 @@ class ActorContextAskSpec } Behaviors - .receive[AnyRef] { + .receivePartial[AnyRef] { case (_, message) => probe.ref ! message Behaviors.same diff --git a/akka-actor-typed-tests/src/test/scala/akka/actor/typed/scaladsl/MessageAdapterSpec.scala b/akka-actor-typed-tests/src/test/scala/akka/actor/typed/scaladsl/MessageAdapterSpec.scala index 87c01d5c18..d27d66bc21 100644 --- a/akka-actor-typed-tests/src/test/scala/akka/actor/typed/scaladsl/MessageAdapterSpec.scala +++ b/akka-actor-typed-tests/src/test/scala/akka/actor/typed/scaladsl/MessageAdapterSpec.scala @@ -93,7 +93,7 @@ class MessageAdapterSpec case class Wrapped(qualifier: String, response: Response) - val pingPong = spawn(Behaviors.receiveMessage[Ping] { + val pingPong = spawn(Behaviors.receiveMessagePartial[Ping] { case Ping1(sender) => sender ! Pong1("hello-1") Behaviors.same @@ -131,10 +131,10 @@ class MessageAdapterSpec } "not break if wrong/unknown response type" in { - trait Ping + sealed trait Ping case class Ping1(sender: ActorRef[Pong1]) extends Ping case class Ping2(sender: ActorRef[Pong2]) extends Ping - trait Response + sealed trait Response case class Pong1(greeting: String) extends Response case class Pong2(greeting: String) extends Response diff --git a/akka-actor-typed-tests/src/test/scala/akka/actor/typed/scaladsl/OnSignalSpec.scala b/akka-actor-typed-tests/src/test/scala/akka/actor/typed/scaladsl/OnSignalSpec.scala index 17923e8427..cebd22a7ee 100644 --- a/akka-actor-typed-tests/src/test/scala/akka/actor/typed/scaladsl/OnSignalSpec.scala +++ b/akka-actor-typed-tests/src/test/scala/akka/actor/typed/scaladsl/OnSignalSpec.scala @@ -39,7 +39,7 @@ final class OnSignalSpec extends ScalaTestWithActorTestKit with AnyWordSpecLike }, s"$i") } Behaviors - .receiveMessage[String] { + .receiveMessagePartial[String] { case "stop" => Behaviors.stopped } diff --git a/akka-actor-typed-tests/src/test/scala/akka/actor/typed/scaladsl/RoutersSpec.scala b/akka-actor-typed-tests/src/test/scala/akka/actor/typed/scaladsl/RoutersSpec.scala index d190f53cc6..4ae43a10b6 100644 --- a/akka-actor-typed-tests/src/test/scala/akka/actor/typed/scaladsl/RoutersSpec.scala +++ b/akka-actor-typed-tests/src/test/scala/akka/actor/typed/scaladsl/RoutersSpec.scala @@ -119,7 +119,7 @@ class RoutersSpec extends ScalaTestWithActorTestKit(""" case object BCast extends Cmd def behavior(replyTo: ActorRef[AnyRef]) = Behaviors.setup[Cmd] { ctx => - Behaviors.receiveMessage[Cmd] { + Behaviors.receiveMessagePartial[Cmd] { case ReplyWithAck | BCast => val reply = ctx.self.path replyTo ! reply diff --git a/akka-actor-typed-tests/src/test/scala/akka/actor/typed/scaladsl/StashSpec.scala b/akka-actor-typed-tests/src/test/scala/akka/actor/typed/scaladsl/StashSpec.scala index c41c1425a3..49f71abf36 100644 --- a/akka-actor-typed-tests/src/test/scala/akka/actor/typed/scaladsl/StashSpec.scala +++ b/akka-actor-typed-tests/src/test/scala/akka/actor/typed/scaladsl/StashSpec.scala @@ -299,7 +299,7 @@ class UnstashingSpec extends ScalaTestWithActorTestKit with AnyWordSpecLike with Behaviors.same } - Behaviors.receiveMessage[String] { + Behaviors.receiveMessagePartial[String] { case msg if msg.startsWith("stash") => stash.stash(msg) Behaviors.same @@ -635,14 +635,14 @@ class UnstashingSpec extends ScalaTestWithActorTestKit with AnyWordSpecLike with stash.stash("handled") def unstashing(n: Int): Behavior[String] = - Behaviors.receiveMessage { + Behaviors.receiveMessagePartial { case "unhandled" => Behaviors.unhandled case "handled" => probe.ref ! s"handled $n" unstashing(n + 1) } - Behaviors.receiveMessage { + Behaviors.receiveMessagePartial { case "unstash" => stash.unstashAll(unstashing(1)) } @@ -665,7 +665,7 @@ class UnstashingSpec extends ScalaTestWithActorTestKit with AnyWordSpecLike with val ref = spawn(Behaviors.withStash[String](10) { stash => stash.stash("one") - Behaviors.receiveMessage { + Behaviors.receiveMessagePartial { case "unstash" => stash.unstashAll(Behaviors.stopped) } @@ -683,7 +683,7 @@ class UnstashingSpec extends ScalaTestWithActorTestKit with AnyWordSpecLike with stash.stash("one") stash.stash("two") - Behaviors.receiveMessage { + Behaviors.receiveMessagePartial { case "unstash" => stash.unstashAll(Behaviors.receiveMessage { unstashed => probe.ref ! unstashed diff --git a/akka-actor-typed-tests/src/test/scala/akka/actor/typed/scaladsl/StopSpec.scala b/akka-actor-typed-tests/src/test/scala/akka/actor/typed/scaladsl/StopSpec.scala index b18bc604ba..72b19c5dc8 100644 --- a/akka-actor-typed-tests/src/test/scala/akka/actor/typed/scaladsl/StopSpec.scala +++ b/akka-actor-typed-tests/src/test/scala/akka/actor/typed/scaladsl/StopSpec.scala @@ -32,7 +32,7 @@ class StopSpec extends ScalaTestWithActorTestKit with AnyWordSpecLike with LogCa "execute the post stop" in { val probe = TestProbe[Done]() - val ref = spawn(Behaviors.receiveMessage[String] { + val ref = spawn(Behaviors.receiveMessagePartial[String] { case "stop" => Behaviors.stopped { () => probe.ref ! Done @@ -46,7 +46,7 @@ class StopSpec extends ScalaTestWithActorTestKit with AnyWordSpecLike with LogCa val probe = TestProbe[String]() val ref = spawn( Behaviors - .receiveMessage[String] { + .receiveMessagePartial[String] { case "stop" => Behaviors.stopped { () => probe.ref ! "callback" diff --git a/akka-actor-typed-tests/src/test/scala/docs/akka/typed/Aggregator.scala b/akka-actor-typed-tests/src/test/scala/docs/akka/typed/Aggregator.scala index 1810b439e6..de40c1ad3b 100644 --- a/akka-actor-typed-tests/src/test/scala/docs/akka/typed/Aggregator.scala +++ b/akka-actor-typed-tests/src/test/scala/docs/akka/typed/Aggregator.scala @@ -32,8 +32,8 @@ object Aggregator { def collecting(replies: immutable.IndexedSeq[Reply]): Behavior[Command] = { Behaviors.receiveMessage { - case WrappedReply(reply: Reply) => - val newReplies = replies :+ reply + case WrappedReply(reply) => + val newReplies = replies :+ reply.asInstanceOf[Reply] if (newReplies.size == expectedReplies) { val result = aggregateReplies(newReplies) replyTo ! result diff --git a/akka-actor-typed-tests/src/test/scala/docs/akka/typed/AggregatorSpec.scala b/akka-actor-typed-tests/src/test/scala/docs/akka/typed/AggregatorSpec.scala index ba08a6adc2..4df6cd46c7 100644 --- a/akka-actor-typed-tests/src/test/scala/docs/akka/typed/AggregatorSpec.scala +++ b/akka-actor-typed-tests/src/test/scala/docs/akka/typed/AggregatorSpec.scala @@ -51,6 +51,7 @@ object AggregatorSpec { .map { case Hotel1.Quote(hotel, price) => Quote(hotel, price) case Hotel2.Price(hotel, price) => Quote(hotel, price) + case unknown => throw new RuntimeException(s"Unknown reply $unknown") } .sortBy(_.price) .toList), diff --git a/akka-actor-typed-tests/src/test/scala/docs/akka/typed/IntroSpec.scala b/akka-actor-typed-tests/src/test/scala/docs/akka/typed/IntroSpec.scala index 118d3a0668..8abcfea1fa 100644 --- a/akka-actor-typed-tests/src/test/scala/docs/akka/typed/IntroSpec.scala +++ b/akka-actor-typed-tests/src/test/scala/docs/akka/typed/IntroSpec.scala @@ -145,7 +145,7 @@ object IntroSpec { final case class SessionDenied(reason: String) extends SessionEvent final case class MessagePosted(screenName: String, message: String) extends SessionEvent - trait SessionCommand + sealed trait SessionCommand final case class PostMessage(message: String) extends SessionCommand private final case class NotifyClient(message: MessagePosted) extends SessionCommand //#chatroom-protocol diff --git a/akka-actor-typed-tests/src/test/scala/docs/akka/typed/OOIntroSpec.scala b/akka-actor-typed-tests/src/test/scala/docs/akka/typed/OOIntroSpec.scala index 98184eefa4..ca453cd78a 100644 --- a/akka-actor-typed-tests/src/test/scala/docs/akka/typed/OOIntroSpec.scala +++ b/akka-actor-typed-tests/src/test/scala/docs/akka/typed/OOIntroSpec.scala @@ -39,7 +39,7 @@ object OOIntroSpec { final case class SessionDenied(reason: String) extends SessionEvent final case class MessagePosted(screenName: String, message: String) extends SessionEvent - trait SessionCommand + sealed trait SessionCommand final case class PostMessage(message: String) extends SessionCommand private final case class NotifyClient(message: MessagePosted) extends SessionCommand //#chatroom-protocol @@ -84,7 +84,7 @@ object OOIntroSpec { client: ActorRef[SessionEvent]) extends AbstractBehavior[SessionCommand](context) { - override def onMessage(msg: SessionCommand): Behavior[SessionCommand] = { + override def onMessage(msg: SessionCommand): Behavior[SessionCommand] = msg match { case PostMessage(message) => // from client, publish to others via the room @@ -95,7 +95,6 @@ object OOIntroSpec { client ! message Behaviors.same } - } } //#chatroom-protocol } diff --git a/akka-actor-typed-tests/src/test/scala/docs/akka/typed/StyleGuideDocExamples.scala b/akka-actor-typed-tests/src/test/scala/docs/akka/typed/StyleGuideDocExamples.scala index b89fa6ba3d..47b0941cc4 100644 --- a/akka-actor-typed-tests/src/test/scala/docs/akka/typed/StyleGuideDocExamples.scala +++ b/akka-actor-typed-tests/src/test/scala/docs/akka/typed/StyleGuideDocExamples.scala @@ -471,7 +471,8 @@ object StyleGuideDocExamples { private def counterWithGuard(remaining: Int): Behavior[Command] = { //#pattern-match-guard // no exhaustiveness check because of guard condition - Behaviors.receiveMessage { + // FIXME not true anymore since Scala 2.13.5 + Behaviors.receiveMessagePartial { case Down if remaining == 1 => notifyWhenZero.tell(Done) zero diff --git a/akka-actor-typed-tests/src/test/scala/docs/akka/typed/TailChopping.scala b/akka-actor-typed-tests/src/test/scala/docs/akka/typed/TailChopping.scala index bd0c9c23ec..e08f6490ad 100644 --- a/akka-actor-typed-tests/src/test/scala/docs/akka/typed/TailChopping.scala +++ b/akka-actor-typed-tests/src/test/scala/docs/akka/typed/TailChopping.scala @@ -31,8 +31,8 @@ object TailChopping { def waiting(requestCount: Int): Behavior[Command] = { Behaviors.receiveMessage { - case WrappedReply(reply: Reply) => - replyTo ! reply + case WrappedReply(reply) => + replyTo ! reply.asInstanceOf[Reply] Behaviors.stopped case RequestTimeout => diff --git a/akka-actor-typed/src/main/mima-filters/2.6.13.backwards.excludes/scala-2.13.5-upgrade.excludes b/akka-actor-typed/src/main/mima-filters/2.6.13.backwards.excludes/scala-2.13.5-upgrade.excludes new file mode 100644 index 0000000000..a5efbeb7a5 --- /dev/null +++ b/akka-actor-typed/src/main/mima-filters/2.6.13.backwards.excludes/scala-2.13.5-upgrade.excludes @@ -0,0 +1,2 @@ +# Java API with classtag so have likely not been used from Java +ProblemFilters.exclude[DirectMissingMethodProblem]("akka.actor.typed.delivery.WorkPullingProducerController.apply") \ No newline at end of file diff --git a/akka-actor-typed/src/main/scala/akka/actor/typed/delivery/WorkPullingProducerController.scala b/akka-actor-typed/src/main/scala/akka/actor/typed/delivery/WorkPullingProducerController.scala index 00775144e6..35b1dfa73f 100644 --- a/akka-actor-typed/src/main/scala/akka/actor/typed/delivery/WorkPullingProducerController.scala +++ b/akka-actor-typed/src/main/scala/akka/actor/typed/delivery/WorkPullingProducerController.scala @@ -237,7 +237,7 @@ object WorkPullingProducerController { /** * Java API */ - def apply[A: ClassTag]( + def apply[A]( messageClass: Class[A], producerId: String, workerServiceKey: ServiceKey[ConsumerController.Command[A]], diff --git a/akka-actor-typed/src/main/scala/akka/actor/typed/delivery/internal/ProducerControllerImpl.scala b/akka-actor-typed/src/main/scala/akka/actor/typed/delivery/internal/ProducerControllerImpl.scala index 92ca9813b9..a3a66c17b6 100644 --- a/akka-actor-typed/src/main/scala/akka/actor/typed/delivery/internal/ProducerControllerImpl.scala +++ b/akka-actor-typed/src/main/scala/akka/actor/typed/delivery/internal/ProducerControllerImpl.scala @@ -193,7 +193,7 @@ object ProducerControllerImpl { .narrow } - private def askLoadState[A: ClassTag]( + private def askLoadState[A]( context: ActorContext[InternalCommand], durableQueueBehavior: Option[Behavior[DurableProducerQueue.Command[A]]], settings: ProducerController.Settings): Option[ActorRef[DurableProducerQueue.Command[A]]] = { @@ -206,7 +206,7 @@ object ProducerControllerImpl { } } - private def askLoadState[A: ClassTag]( + private def askLoadState[A]( context: ActorContext[InternalCommand], durableQueue: Option[ActorRef[DurableProducerQueue.Command[A]]], settings: ProducerController.Settings, @@ -222,11 +222,11 @@ object ProducerControllerImpl { } } - private def createInitialState[A: ClassTag](hasDurableQueue: Boolean) = { + private def createInitialState[A](hasDurableQueue: Boolean) = { if (hasDurableQueue) None else Some(DurableProducerQueue.State.empty[A]) } - private def createState[A: ClassTag]( + private def createState[A]( self: ActorRef[InternalCommand], producerId: String, send: SequencedMessage[A] => Unit, @@ -825,6 +825,9 @@ private class ProducerControllerImpl[A: ClassTag]( case DurableQueueTerminated => throw new IllegalStateException("DurableQueue was unexpectedly terminated.") + + case unexpected => + throw new RuntimeException(s"Unexpected message: $unexpected") } } diff --git a/akka-actor-typed/src/main/scala/akka/actor/typed/delivery/internal/WorkPullingProducerControllerImpl.scala b/akka-actor-typed/src/main/scala/akka/actor/typed/delivery/internal/WorkPullingProducerControllerImpl.scala index a4a6a477a3..13d0121f4b 100644 --- a/akka-actor-typed/src/main/scala/akka/actor/typed/delivery/internal/WorkPullingProducerControllerImpl.scala +++ b/akka-actor-typed/src/main/scala/akka/actor/typed/delivery/internal/WorkPullingProducerControllerImpl.scala @@ -140,7 +140,7 @@ import akka.util.Timeout .narrow } - private def createInitialState[A: ClassTag](hasDurableQueue: Boolean) = { + private def createInitialState[A](hasDurableQueue: Boolean) = { if (hasDurableQueue) None else Some(DurableProducerQueue.State.empty[A]) } @@ -158,6 +158,7 @@ import akka.util.Timeout s.unconfirmed.foreach { case DurableProducerQueue.MessageSent(oldSeqNr, msg, _, oldConfirmationQualifier, _) => context.self ! ResendDurableMsg(msg, oldConfirmationQualifier, oldSeqNr) + case _ => // please compiler exhaustiveness check } val msgAdapter: ActorRef[A] = context.messageAdapter(msg => Msg(msg, wasStashed = false, replyTo = None)) @@ -220,12 +221,12 @@ import akka.util.Timeout } } - private def checkStashFull[A: ClassTag](stashBuffer: StashBuffer[InternalCommand]): Unit = { + private def checkStashFull[A](stashBuffer: StashBuffer[InternalCommand]): Unit = { if (stashBuffer.isFull) throw new IllegalArgumentException(s"Buffer is full, size [${stashBuffer.size}].") } - private def askLoadState[A: ClassTag]( + private def askLoadState[A]( context: ActorContext[InternalCommand], durableQueueBehavior: Option[Behavior[DurableProducerQueue.Command[A]]], settings: WorkPullingProducerController.Settings): Option[ActorRef[DurableProducerQueue.Command[A]]] = { @@ -238,7 +239,7 @@ import akka.util.Timeout } } - private def askLoadState[A: ClassTag]( + private def askLoadState[A]( context: ActorContext[InternalCommand], durableQueue: Option[ActorRef[DurableProducerQueue.Command[A]]], settings: WorkPullingProducerController.Settings, @@ -658,6 +659,8 @@ private class WorkPullingProducerControllerImpl[A: ClassTag]( case DurableQueueTerminated => throw new IllegalStateException("DurableQueue was unexpectedly terminated.") + case unexpected => + throw new RuntimeException(s"Unexpected message: $unexpected") } } diff --git a/akka-actor-typed/src/main/scala/akka/actor/typed/internal/ActorContextImpl.scala b/akka-actor-typed/src/main/scala/akka/actor/typed/internal/ActorContextImpl.scala index 4b6f20b191..b8ed3560f7 100644 --- a/akka-actor-typed/src/main/scala/akka/actor/typed/internal/ActorContextImpl.scala +++ b/akka-actor-typed/src/main/scala/akka/actor/typed/internal/ActorContextImpl.scala @@ -105,7 +105,7 @@ import scala.util.Success // context-shared timer needed to allow for nested timer usage def timer: TimerSchedulerCrossDslSupport[T] = _timer match { case OptionVal.Some(timer) => timer - case OptionVal.None => + case _ => checkCurrentActorThread() val timer = mkTimer() _timer = OptionVal.Some(timer) @@ -152,7 +152,7 @@ import scala.util.Success // lazy init of logging setup _logging match { case OptionVal.Some(l) => l - case OptionVal.None => + case _ => val logClass = LoggerClass.detectLoggerClassFromStack(classOf[Behavior[_]]) val logger = LoggerFactory.getLogger(logClass.getName) val l = LoggingContext(logger, classicActorContext.props.deploy.tags, this) @@ -216,6 +216,7 @@ import scala.util.Success case Success(StatusReply.Success(t: Res)) => mapResponse(Success(t)) case Success(StatusReply.Error(why)) => mapResponse(Failure(why)) case fail: Failure[_] => mapResponse(fail.asInstanceOf[Failure[Res]]) + case _ => throw new RuntimeException() // won't happen, compiler exhaustiveness check pleaser } // Java API impl @@ -247,6 +248,7 @@ import scala.util.Success case StatusReply.Success(value: Res) => applyToResponse(value, null) case StatusReply.Error(why) => applyToResponse(null.asInstanceOf[Res], why) case null => applyToResponse(null.asInstanceOf[Res], failure) + case _ => throw new RuntimeException() // won't happen, compiler exhaustiveness check pleaser }) } @@ -295,7 +297,7 @@ import scala.util.Success _messageAdapters.filterNot { case (cls, _) => cls == boxedMessageClass } val ref = messageAdapterRef match { case OptionVal.Some(ref) => ref.asInstanceOf[ActorRef[U]] - case OptionVal.None => + case _ => // AdaptMessage is not really a T, but that is erased val ref = internalSpawnMessageAdapter[Any](msg => AdaptWithRegisteredMessageAdapter(msg).asInstanceOf[T], "adapter") @@ -315,12 +317,12 @@ import scala.util.Success */ @InternalApi private[akka] def setCurrentActorThread(): Unit = { _currentActorThread match { - case OptionVal.None => - _currentActorThread = OptionVal.Some(Thread.currentThread()) case OptionVal.Some(t) => throw new IllegalStateException( s"Invalid access by thread from the outside of $self. " + s"Current message is processed by $t, but also accessed from ${Thread.currentThread()}.") + case _ => + _currentActorThread = OptionVal.Some(Thread.currentThread()) } } @@ -343,7 +345,7 @@ import scala.util.Success s"Unsupported access to ActorContext operation from the outside of $self. " + s"Current message is processed by $t, but ActorContext was called from $callerThread.") } - case OptionVal.None => + case _ => throw new UnsupportedOperationException( s"Unsupported access to ActorContext from the outside of $self. " + s"No message is currently processed by the actor, but ActorContext was called from $callerThread.") diff --git a/akka-actor-typed/src/main/scala/akka/actor/typed/internal/BehaviorImpl.scala b/akka-actor-typed/src/main/scala/akka/actor/typed/internal/BehaviorImpl.scala index b65c50c430..8914a1e033 100644 --- a/akka-actor-typed/src/main/scala/akka/actor/typed/internal/BehaviorImpl.scala +++ b/akka-actor-typed/src/main/scala/akka/actor/typed/internal/BehaviorImpl.scala @@ -97,7 +97,7 @@ private[akka] object BehaviorTags { def onPostStop(ctx: TypedActorContext[T]): Unit = { postStop match { case OptionVal.Some(callback) => callback(ctx) - case OptionVal.None => + case _ => } } diff --git a/akka-actor-typed/src/main/scala/akka/actor/typed/internal/ExtensionsImpl.scala b/akka-actor-typed/src/main/scala/akka/actor/typed/internal/ExtensionsImpl.scala index add02d8a64..cffe966811 100644 --- a/akka-actor-typed/src/main/scala/akka/actor/typed/internal/ExtensionsImpl.scala +++ b/akka-actor-typed/src/main/scala/akka/actor/typed/internal/ExtensionsImpl.scala @@ -91,8 +91,9 @@ private[akka] trait ExtensionsImpl extends Extensions { self: ActorSystem[_] wit } .getOrElse(ext.createExtension(self)) instance match { - case null => throw new IllegalStateException(s"Extension instance created as 'null' for extension [$ext]") - case instance: T @unchecked => + case null => throw new IllegalStateException(s"Extension instance created as 'null' for extension [$ext]") + case nonNull => + val instance = nonNull.asInstanceOf[T] // Replace our in process signal with the initialized extension extensions.replace(ext, inProcessOfRegistration, instance) instance diff --git a/akka-actor-typed/src/main/scala/akka/actor/typed/internal/StashBufferImpl.scala b/akka-actor-typed/src/main/scala/akka/actor/typed/internal/StashBufferImpl.scala index bbac343653..83731b64ae 100644 --- a/akka-actor-typed/src/main/scala/akka/actor/typed/internal/StashBufferImpl.scala +++ b/akka-actor-typed/src/main/scala/akka/actor/typed/internal/StashBufferImpl.scala @@ -219,8 +219,8 @@ import java.util.function.Predicate throw new IllegalArgumentException("Cannot unstash with unhandled as starting behavior") else if (started == BehaviorImpl.same) { currentBehaviorWhenUnstashInProgress match { - case OptionVal.None => ctx.asScala.currentBehavior case OptionVal.Some(c) => c + case _ => ctx.asScala.currentBehavior } } else started diff --git a/akka-actor-typed/src/main/scala/akka/actor/typed/internal/Supervision.scala b/akka-actor-typed/src/main/scala/akka/actor/typed/internal/Supervision.scala index ca3bf00f81..0c1d1c08ef 100644 --- a/akka-actor-typed/src/main/scala/akka/actor/typed/internal/Supervision.scala +++ b/akka-actor-typed/src/main/scala/akka/actor/typed/internal/Supervision.scala @@ -193,14 +193,12 @@ private class RestartSupervisor[T, Thr <: Throwable: ClassTag](initial: Behavior private var deadline: OptionVal[Deadline] = OptionVal.None private def deadlineHasTimeLeft: Boolean = deadline match { - case OptionVal.None => true case OptionVal.Some(d) => d.hasTimeLeft() + case _ => true } override def aroundSignal(ctx: TypedActorContext[Any], signal: Signal, target: SignalTarget[T]): Behavior[T] = { restartingInProgress match { - case OptionVal.None => - super.aroundSignal(ctx, signal, target) case OptionVal.Some((stashBuffer, children)) => signal match { case Terminated(ref) if strategy.stopChildren && children(ref) => @@ -219,6 +217,8 @@ private class RestartSupervisor[T, Thr <: Throwable: ClassTag](initial: Behavior stashBuffer.stash(signal) Behaviors.same } + case _ => + super.aroundSignal(ctx, signal, target) } } @@ -235,7 +235,7 @@ private class RestartSupervisor[T, Thr <: Throwable: ClassTag](initial: Behavior } else restartCompleted(ctx) - case OptionVal.None => + case _ => throw new IllegalStateException("Unexpected ScheduledRestart when restart not in progress") } } else { @@ -254,18 +254,19 @@ private class RestartSupervisor[T, Thr <: Throwable: ClassTag](initial: Behavior target(ctx, msg.asInstanceOf[T]) } - case m: T @unchecked => + case msg => + val m = msg.asInstanceOf[T] restartingInProgress match { - case OptionVal.None => - try { - target(ctx, m) - } catch handleReceiveException(ctx, target) case OptionVal.Some((stashBuffer, _)) => if (stashBuffer.isFull) dropped(ctx, m) else stashBuffer.stash(m) Behaviors.same + case _ => + try { + target(ctx, m) + } catch handleReceiveException(ctx, target) } } } @@ -371,10 +372,10 @@ private class RestartSupervisor[T, Thr <: Throwable: ClassTag](initial: Behavior try { val newBehavior = Behavior.validateAsInitial(Behavior.start(initial, ctx.asInstanceOf[TypedActorContext[T]])) val nextBehavior = restartingInProgress match { - case OptionVal.None => newBehavior case OptionVal.Some((stashBuffer, _)) => restartingInProgress = OptionVal.None stashBuffer.unstashAll(newBehavior.unsafeCast) + case _ => newBehavior } nextBehavior.narrow } catch handleException(ctx, signalRestart = { diff --git a/akka-actor-typed/src/main/scala/akka/actor/typed/internal/adapter/ActorAdapter.scala b/akka-actor-typed/src/main/scala/akka/actor/typed/internal/adapter/ActorAdapter.scala index 95b240b046..6d42097fcb 100644 --- a/akka-actor-typed/src/main/scala/akka/actor/typed/internal/adapter/ActorAdapter.scala +++ b/akka-actor-typed/src/main/scala/akka/actor/typed/internal/adapter/ActorAdapter.scala @@ -102,8 +102,9 @@ import akka.util.OptionVal adaptAndHandle(msg) case signal: Signal => handleSignal(signal) - case msg: T @unchecked => - handleMessage(msg) + case msg => + val t = msg.asInstanceOf[T] + handleMessage(t) } } finally { ctx.clearCurrentActorThread() @@ -119,9 +120,9 @@ import akka.util.OptionVal case timerMsg: TimerMsg => //we can only get this kind of message if the timer is of this concrete class c.timer.asInstanceOf[TimerSchedulerImpl[T]].interceptTimerMsg(ctx.log, timerMsg) match { - case OptionVal.None => // means TimerMsg not applicable, discard case OptionVal.Some(m) => next(Behavior.interpretMessage(behavior, c, m), m) + case _ => // means TimerMsg not applicable, discard } case _ => next(Behavior.interpretMessage(behavior, c, msg), msg) diff --git a/akka-actor-typed/src/main/scala/akka/actor/typed/internal/adapter/ActorSystemAdapter.scala b/akka-actor-typed/src/main/scala/akka/actor/typed/internal/adapter/ActorSystemAdapter.scala index c66e9986c7..724ce348da 100644 --- a/akka-actor-typed/src/main/scala/akka/actor/typed/internal/adapter/ActorSystemAdapter.scala +++ b/akka-actor-typed/src/main/scala/akka/actor/typed/internal/adapter/ActorSystemAdapter.scala @@ -88,6 +88,7 @@ import akka.annotation.InternalApi case DispatcherDefault(_) => system.dispatcher case DispatcherFromConfig(str, _) => system.dispatchers.lookup(str) case DispatcherSameAsParent(_) => system.dispatcher + case unknown => throw new RuntimeException(s"Unsupported dispatcher selector: $unknown") } override def shutdown(): Unit = () // there was no shutdown in classic Akka } diff --git a/akka-actor-typed/src/main/scala/akka/actor/typed/internal/adapter/PropsAdapter.scala b/akka-actor-typed/src/main/scala/akka/actor/typed/internal/adapter/PropsAdapter.scala index 58d4eda81d..20c6a08fa2 100644 --- a/akka-actor-typed/src/main/scala/akka/actor/typed/internal/adapter/PropsAdapter.scala +++ b/akka-actor-typed/src/main/scala/akka/actor/typed/internal/adapter/PropsAdapter.scala @@ -25,6 +25,7 @@ import akka.dispatch.Mailboxes case _: DispatcherDefault => classicProps case DispatcherFromConfig(name, _) => classicProps.withDispatcher(name) case _: DispatcherSameAsParent => classicProps.withDispatcher(Deploy.DispatcherSameAsParent) + case unknown => throw new RuntimeException(s"Unsupported dispatcher selector: $unknown") }).withDeploy(Deploy.local) // disallow remote deployment for typed actors val mailboxProps = props.firstOrElse[MailboxSelector](MailboxSelector.default()) match { @@ -34,6 +35,7 @@ import akka.dispatch.Mailboxes dispatcherProps.withMailbox(s"${Mailboxes.BoundedCapacityPrefix}$capacity") case MailboxFromConfigSelector(path, _) => dispatcherProps.withMailbox(path) + case unknown => throw new RuntimeException(s"Unsupported mailbox selector: $unknown") } val localDeploy = mailboxProps.withDeploy(Deploy.local) // disallow remote deployment for typed actors diff --git a/akka-actor-typed/src/main/scala/akka/actor/typed/internal/pubsub/TopicImpl.scala b/akka-actor-typed/src/main/scala/akka/actor/typed/internal/pubsub/TopicImpl.scala index 3fc6a7ac07..7d50b6a133 100644 --- a/akka-actor-typed/src/main/scala/akka/actor/typed/internal/pubsub/TopicImpl.scala +++ b/akka-actor-typed/src/main/scala/akka/actor/typed/internal/pubsub/TopicImpl.scala @@ -69,6 +69,7 @@ private[akka] final class TopicImpl[T](topicName: String, context: ActorContext[ private val receptionist = context.system.receptionist private val receptionistAdapter = context.messageAdapter[Receptionist.Listing] { case topicServiceKey.Listing(topics) => TopicInstancesUpdated(topics) + case _ => throw new IllegalArgumentException() // FIXME exhaustiveness check fails on receptionist listing match } receptionist ! Receptionist.Subscribe(topicServiceKey, receptionistAdapter) @@ -139,5 +140,9 @@ private[akka] final class TopicImpl[T](topicName: String, context: ActorContext[ case GetTopicStats(replyTo) => replyTo ! TopicStats(localSubscribers.size, topicInstances.size) this + + case other => + // can't do exhaustiveness check correctly because of protocol internal/public design + throw new IllegalArgumentException(s"Unexpected command type ${other.getClass}") } } diff --git a/akka-actor-typed/src/main/scala/akka/actor/typed/internal/receptionist/LocalReceptionist.scala b/akka-actor-typed/src/main/scala/akka/actor/typed/internal/receptionist/LocalReceptionist.scala index b0ce2ef89e..2936055cae 100644 --- a/akka-actor-typed/src/main/scala/akka/actor/typed/internal/receptionist/LocalReceptionist.scala +++ b/akka-actor-typed/src/main/scala/akka/actor/typed/internal/receptionist/LocalReceptionist.scala @@ -218,6 +218,10 @@ private[akka] object LocalReceptionist extends ReceptionistBehaviorProvider { replyWithListing(key, subscriber) behavior(state.subscriberAdded(key)(subscriber)) + + case other => + // compiler does not know about our division into public and internal commands + throw new IllegalArgumentException(s"Unexpected command type ${other.getClass}") } def onInternal(ctx: ActorContext[Any], cmd: InternalCommand): Behavior[Any] = cmd match { diff --git a/akka-actor-typed/src/main/scala/akka/actor/typed/javadsl/AbstractBehavior.scala b/akka-actor-typed/src/main/scala/akka/actor/typed/javadsl/AbstractBehavior.scala index 7910e9555a..7d06ad4768 100644 --- a/akka-actor-typed/src/main/scala/akka/actor/typed/javadsl/AbstractBehavior.scala +++ b/akka-actor-typed/src/main/scala/akka/actor/typed/javadsl/AbstractBehavior.scala @@ -41,11 +41,11 @@ abstract class AbstractBehavior[T](context: ActorContext[T]) extends ExtensibleB private var _receive: OptionVal[Receive[T]] = OptionVal.None private def receive: Receive[T] = _receive match { - case OptionVal.None => + case OptionVal.Some(r) => r + case _ => val receive = createReceive _receive = OptionVal.Some(receive) receive - case OptionVal.Some(r) => r } protected def getContext: ActorContext[T] = context diff --git a/akka-actor-typed/src/main/scala/akka/actor/typed/javadsl/BehaviorBuilder.scala b/akka-actor-typed/src/main/scala/akka/actor/typed/javadsl/BehaviorBuilder.scala index e3c1fac520..66c0e38edf 100644 --- a/akka-actor-typed/src/main/scala/akka/actor/typed/javadsl/BehaviorBuilder.scala +++ b/akka-actor-typed/src/main/scala/akka/actor/typed/javadsl/BehaviorBuilder.scala @@ -82,7 +82,7 @@ final class BehaviorBuilder[T] private (messageHandlers: List[Case[T, T]], signa def onMessageEquals(msg: T, handler: Creator[Behavior[T]]): BehaviorBuilder[T] = withMessage[T]( OptionVal.Some(msg.getClass.asInstanceOf[Class[T]]), - OptionVal.Some(_.equals(msg)), + OptionVal.Some(_ == msg), (_: T) => handler.create()) /** diff --git a/akka-actor/src/main/scala-2.13/akka/util/ByteIterator.scala b/akka-actor/src/main/scala-2.13/akka/util/ByteIterator.scala index bbc6ec8aa3..4f19c90030 100644 --- a/akka-actor/src/main/scala-2.13/akka/util/ByteIterator.scala +++ b/akka-actor/src/main/scala-2.13/akka/util/ByteIterator.scala @@ -5,15 +5,15 @@ package akka.util import java.nio.{ ByteBuffer, ByteOrder } - import scala.annotation.tailrec import scala.collection.BufferedIterator import scala.collection.LinearSeq import scala.collection.mutable.ListBuffer import scala.reflect.ClassTag - import akka.util.Collections.EmptyImmutableSeq +import scala.annotation.nowarn + object ByteIterator { object ByteArrayIterator { @@ -48,24 +48,25 @@ object ByteIterator { final override def size: Int = { val l = len; clear(); l } final override def ++(that: IterableOnce[Byte]): ByteIterator = that match { - case that: ByteIterator => - if (that.isEmpty) this - else if (this.isEmpty) that + case byteIterator: ByteIterator => + if (byteIterator.isEmpty) this + else if (this.isEmpty) byteIterator else - that match { - case that: ByteArrayIterator => - if ((this.array eq that.array) && (this.until == that.from)) { - this.until = that.until - that.clear() + byteIterator match { + case bai: ByteArrayIterator => + if ((this.array eq bai.array) && (this.until == bai.from)) { + this.until = bai.until + bai.clear() this } else { - val result = MultiByteArrayIterator(List(this, that)) + val result = MultiByteArrayIterator(List(this, bai)) this.clear() result } - case that: MultiByteArrayIterator => this ++: that + case mbai: MultiByteArrayIterator => this ++: mbai + case bi => super.++(bi) } - case _ => super.++(that) + case io => super.++(io) } final override def clone: ByteArrayIterator = new ByteArrayIterator(array, from, until) @@ -99,9 +100,11 @@ object ByteIterator { this } + @nowarn("msg=deprecated") override def copyToArray[B >: Byte](xs: Array[B], start: Int): Int = this.copyToArray(xs, start, xs.length) + @nowarn("msg=deprecated") override def copyToArray[B >: Byte](xs: Array[B]): Int = this.copyToArray(xs, 0, xs.length) @@ -234,22 +237,23 @@ object ByteIterator { } final override def ++(that: IterableOnce[Byte]): ByteIterator = that match { - case that: ByteIterator => - if (that.isEmpty) this - else if (this.isEmpty) that + case bi: ByteIterator => + if (bi.isEmpty) this + else if (this.isEmpty) bi else { - that match { - case that: ByteArrayIterator => - iterators = this.iterators :+ that - that.clear() + bi match { + case bai: ByteArrayIterator => + iterators = this.iterators :+ bai + bai.clear() this - case that: MultiByteArrayIterator => - iterators = this.iterators ++ that.iterators - that.clear() + case mbai: MultiByteArrayIterator => + iterators = this.iterators ++ mbai.iterators + mbai.clear() this + case bi => super.++(bi) } } - case _ => super.++(that) + case io => super.++(io) } final override def clone: MultiByteArrayIterator = { diff --git a/akka-actor/src/main/scala-2.13/akka/util/ByteString.scala b/akka-actor/src/main/scala-2.13/akka/util/ByteString.scala index 852e837079..853bfd39dd 100644 --- a/akka-actor/src/main/scala-2.13/akka/util/ByteString.scala +++ b/akka-actor/src/main/scala-2.13/akka/util/ByteString.scala @@ -833,6 +833,7 @@ sealed abstract class ByteString array } + @nowarn("msg=deprecated") final override def copyToArray[B >: Byte](xs: Array[B], start: Int): Int = { // super uses byteiterator copyToArray(xs, start, size.min(xs.size)) diff --git a/akka-actor/src/main/scala/akka/actor/AbstractProps.scala b/akka-actor/src/main/scala/akka/actor/AbstractProps.scala index 7147827f27..00b5e6eddc 100644 --- a/akka-actor/src/main/scala/akka/actor/AbstractProps.scala +++ b/akka-actor/src/main/scala/akka/actor/AbstractProps.scala @@ -65,6 +65,8 @@ private[akka] trait AbstractProps { case c: Class[_] if c == coc => throw new IllegalArgumentException( "erased Creator types (e.g. lambdas) are unsupported, use Props.create(actorClass, creator) instead") + case unexpected => + throw new IllegalArgumentException(s"unexpected type: $unexpected") } create(classOf[CreatorConsumer], actorClass, creator) } diff --git a/akka-actor/src/main/scala/akka/actor/ActorCell.scala b/akka-actor/src/main/scala/akka/actor/ActorCell.scala index 69b5f1d364..5b485d2d37 100644 --- a/akka-actor/src/main/scala/akka/actor/ActorCell.scala +++ b/akka-actor/src/main/scala/akka/actor/ActorCell.scala @@ -565,6 +565,8 @@ private[akka] class ActorCell( case PoisonPill => self.stop() case sel: ActorSelectionMessage => receiveSelection(sel) case Identify(messageId) => sender() ! ActorIdentity(messageId, Some(self)) + case unexpected => + throw new RuntimeException(s"Unexpected message for autoreceive: $unexpected") // for exhaustiveness check, will not happen } } diff --git a/akka-actor/src/main/scala/akka/actor/ActorRef.scala b/akka-actor/src/main/scala/akka/actor/ActorRef.scala index 243aa99865..bf524db443 100644 --- a/akka-actor/src/main/scala/akka/actor/ActorRef.scala +++ b/akka-actor/src/main/scala/akka/actor/ActorRef.scala @@ -241,6 +241,8 @@ private[akka] trait RepointableRef extends ActorRefScope { case i: InternalActorRef => (i.isLocal && i.isInstanceOf[PromiseActorRef]) || (!i.isLocal && i.path.elements.head == "temp") + case unexpected => + throw new IllegalArgumentException(s"ActorRef is not internal: $unexpected") // will not happen, for exhaustiveness check } } @@ -924,8 +926,9 @@ private[akka] class VirtualPathContainer( (oldWatching, wBy) - case OptionVal.None => + case _ => (ActorCell.emptyActorRefSet, ActorCell.emptyActorRefSet) + } } @@ -949,14 +952,14 @@ private[akka] class VirtualPathContainer( val toNotify = this.synchronized { // cleanup watchedBy since we know they are dead _watchedBy match { - case OptionVal.None => - // terminated - ActorCell.emptyActorRefSet case OptionVal.Some(watchedBy) => maintainAddressTerminatedSubscription(OptionVal.None) { _watchedBy = OptionVal.Some(watchedBy.filterNot(_.path.address == address)) } watching + case _ => + // terminated + ActorCell.emptyActorRefSet } } @@ -978,8 +981,6 @@ private[akka] class VirtualPathContainer( private def addWatcher(watchee: ActorRef, watcher: ActorRef): Unit = { val selfTerminated = this.synchronized { _watchedBy match { - case OptionVal.None => - true case OptionVal.Some(watchedBy) => val watcheeSelf = watchee == this val watcherSelf = watcher == this @@ -1001,6 +1002,8 @@ private[akka] class VirtualPathContainer( Logging.Error(path.toString, classOf[FunctionRef], s"BUG: illegal Watch($watchee,$watcher) for $this")) } false + case _ => + true } } // outside of synchronized block @@ -1012,7 +1015,6 @@ private[akka] class VirtualPathContainer( private def remWatcher(watchee: ActorRef, watcher: ActorRef): Unit = this.synchronized { _watchedBy match { - case OptionVal.None => // do nothing... case OptionVal.Some(watchedBy) => val watcheeSelf = watchee == this val watcherSelf = watcher == this @@ -1033,6 +1035,8 @@ private[akka] class VirtualPathContainer( publish( Logging.Error(path.toString, classOf[FunctionRef], s"BUG: illegal Unwatch($watchee,$watcher) for $this")) } + + case _ => // do nothing... } } @@ -1097,7 +1101,7 @@ private[akka] class VirtualPathContainer( def watchedByOrEmpty: Set[ActorRef] = _watchedBy match { case OptionVal.Some(watchedBy) => watchedBy - case OptionVal.None => ActorCell.emptyActorRefSet + case _ => ActorCell.emptyActorRefSet } change match { diff --git a/akka-actor/src/main/scala/akka/actor/ActorRefProvider.scala b/akka-actor/src/main/scala/akka/actor/ActorRefProvider.scala index 4ca4fcaade..bbb64f1d02 100644 --- a/akka-actor/src/main/scala/akka/actor/ActorRefProvider.scala +++ b/akka-actor/src/main/scala/akka/actor/ActorRefProvider.scala @@ -756,7 +756,7 @@ private[akka] class LocalActorRefProvider private[akka] ( Serialization.Information(getDefaultAddress, system) serializationInformationCache match { case OptionVal.Some(info) => info - case OptionVal.None => + case _ => if (system eq null) throw new IllegalStateException("Too early access of serializationInformation") else { @@ -773,7 +773,7 @@ private[akka] class LocalActorRefProvider private[akka] ( override private[akka] def addressString: String = { _addressString match { case OptionVal.Some(addr) => addr - case OptionVal.None => + case _ => val addr = getDefaultAddress.toString _addressString = OptionVal.Some(addr) addr diff --git a/akka-actor/src/main/scala/akka/actor/ReflectiveDynamicAccess.scala b/akka-actor/src/main/scala/akka/actor/ReflectiveDynamicAccess.scala index 8af5d4bec0..cd319f8d27 100644 --- a/akka-actor/src/main/scala/akka/actor/ReflectiveDynamicAccess.scala +++ b/akka-actor/src/main/scala/akka/actor/ReflectiveDynamicAccess.scala @@ -69,6 +69,8 @@ class ReflectiveDynamicAccess(val classLoader: ClassLoader) extends DynamicAcces case null => throw new NullPointerException case x if !t.isInstance(x) => throw new ClassCastException(fqcn + " is not a subtype of " + t) case x: T => x + case unexpected => + throw new IllegalArgumentException(s"Unexpected module field: $unexpected") // will not happen, for exhaustiveness check } }.recover { case i: InvocationTargetException if i.getTargetException ne null => throw i.getTargetException } } diff --git a/akka-actor/src/main/scala/akka/actor/Timers.scala b/akka-actor/src/main/scala/akka/actor/Timers.scala index 049a506df6..37eee8e3c7 100644 --- a/akka-actor/src/main/scala/akka/actor/Timers.scala +++ b/akka-actor/src/main/scala/akka/actor/Timers.scala @@ -50,7 +50,7 @@ trait Timers extends Actor { actorCell.currentMessage = actorCell.currentMessage.copy(message = m) } super.aroundReceive(receive, m) - case OptionVal.None => // discard + case _ => // discard } case _ => super.aroundReceive(receive, msg) diff --git a/akka-actor/src/main/scala/akka/actor/dungeon/DeathWatch.scala b/akka-actor/src/main/scala/akka/actor/dungeon/DeathWatch.scala index ee55e931f5..62f12cdba4 100644 --- a/akka-actor/src/main/scala/akka/actor/dungeon/DeathWatch.scala +++ b/akka-actor/src/main/scala/akka/actor/dungeon/DeathWatch.scala @@ -33,6 +33,8 @@ private[akka] trait DeathWatch { this: ActorCell => checkWatchingSame(a, None) } a + case unexpected => + throw new IllegalArgumentException(s"ActorRef is not internal: $unexpected") // will not happen, for exhaustiveness check } override final def watchWith(subject: ActorRef, msg: Any): ActorRef = subject match { @@ -46,6 +48,8 @@ private[akka] trait DeathWatch { this: ActorCell => checkWatchingSame(a, Some(msg)) } a + case unexpected => + throw new IllegalArgumentException(s"ActorRef is not internal: $unexpected") // will not happen, for exhaustiveness check } override final def unwatch(subject: ActorRef): ActorRef = subject match { @@ -58,6 +62,8 @@ private[akka] trait DeathWatch { this: ActorCell => } terminatedQueued -= a a + case unexpected => + throw new IllegalArgumentException(s"ActorRef is not internal: $unexpected") // will not happen, for exhaustiveness check } protected def receivedTerminated(t: Terminated): Unit = diff --git a/akka-actor/src/main/scala/akka/dispatch/AbstractDispatcher.scala b/akka-actor/src/main/scala/akka/dispatch/AbstractDispatcher.scala index 1592f347e9..95ecaa77b4 100644 --- a/akka-actor/src/main/scala/akka/dispatch/AbstractDispatcher.scala +++ b/akka-actor/src/main/scala/akka/dispatch/AbstractDispatcher.scala @@ -186,6 +186,8 @@ abstract class MessageDispatcher(val configurator: MessageDispatcherConfigurator if (updateShutdownSchedule(SCHEDULED, RESCHEDULED)) () else ifSensibleToDoSoThenScheduleShutdown() case RESCHEDULED => + case unexpected => + throw new IllegalArgumentException(s"Unexpected actor class marker: $unexpected") // will not happen, for exhaustiveness check } } @@ -245,6 +247,8 @@ abstract class MessageDispatcher(val configurator: MessageDispatcherConfigurator if (updateShutdownSchedule(RESCHEDULED, SCHEDULED)) scheduleShutdownAction() else run() case UNSCHEDULED => + case unexpected => + throw new IllegalArgumentException(s"Unexpected actor class marker: $unexpected") // will not happen, for exhaustiveness check } } } diff --git a/akka-actor/src/main/scala/akka/dispatch/Mailboxes.scala b/akka-actor/src/main/scala/akka/dispatch/Mailboxes.scala index 6b219e25bd..6f9340e321 100644 --- a/akka-actor/src/main/scala/akka/dispatch/Mailboxes.scala +++ b/akka-actor/src/main/scala/akka/dispatch/Mailboxes.scala @@ -108,6 +108,8 @@ private[akka] class Mailboxes( case x => throw new IllegalArgumentException(s"no wildcard type allowed in RequireMessageQueue argument (was [$x])") } + case unexpected => + throw new IllegalArgumentException(s"Unexpected actor class marker: $unexpected") // will not happen, for exhaustiveness check } // don’t care if this happens twice @@ -131,6 +133,8 @@ private[akka] class Mailboxes( throw new IllegalArgumentException( s"no wildcard type allowed in ProducesMessageQueue argument (was [$x])") } + case unexpected => + throw new IllegalArgumentException(s"Unexpected message queue type marker: $unexpected") // will not happen, for exhaustiveness check } } diff --git a/akka-actor/src/main/scala/akka/io/UdpListener.scala b/akka-actor/src/main/scala/akka/io/UdpListener.scala index 820854fc88..1bb3504422 100644 --- a/akka-actor/src/main/scala/akka/io/UdpListener.scala +++ b/akka-actor/src/main/scala/akka/io/UdpListener.scala @@ -100,6 +100,8 @@ private[io] class UdpListener(val udp: UdpExt, channelRegistry: ChannelRegistry, handler ! Received(ByteString(buffer), sender) if (readsLeft > 0) innerReceive(readsLeft - 1, buffer) case null => // null means no data was available + case unexpected => + throw new RuntimeException(s"Unexpected address in buffer: $unexpected") // will not happen, for exhaustiveness check } } diff --git a/akka-actor/src/main/scala/akka/io/dns/DnsSettings.scala b/akka-actor/src/main/scala/akka/io/dns/DnsSettings.scala index 074aaa8adb..1cbb09aee6 100644 --- a/akka-actor/src/main/scala/akka/io/dns/DnsSettings.scala +++ b/akka-actor/src/main/scala/akka/io/dns/DnsSettings.scala @@ -136,10 +136,13 @@ object DnsSettings { /** * INTERNAL API */ - @InternalApi private[akka] def parseNameserverAddress(str: String): InetSocketAddress = { - val inetSocketAddress(host, port) = str - new InetSocketAddress(host, Option(port).fold(DnsFallbackPort)(_.toInt)) - } + @InternalApi private[akka] def parseNameserverAddress(str: String): InetSocketAddress = + str match { + case inetSocketAddress(host, port) => + new InetSocketAddress(host, Option(port).fold(DnsFallbackPort)(_.toInt)) + case unexpected => + throw new IllegalArgumentException(s"Unparseable address string: $unexpected") // will not happen, for exhaustiveness check + } /** * INTERNAL API diff --git a/akka-actor/src/main/scala/akka/io/dns/internal/AsyncDnsResolver.scala b/akka-actor/src/main/scala/akka/io/dns/internal/AsyncDnsResolver.scala index 36312c0773..c496636a14 100644 --- a/akka-actor/src/main/scala/akka/io/dns/internal/AsyncDnsResolver.scala +++ b/akka-actor/src/main/scala/akka/io/dns/internal/AsyncDnsResolver.scala @@ -94,6 +94,7 @@ private[io] final class AsyncDnsResolver( val record = address match { case _: Inet4Address => ARecord(name, Ttl.effectivelyForever, address) case ipv6address: Inet6Address => AAAARecord(name, Ttl.effectivelyForever, ipv6address) + case unexpected => throw new IllegalArgumentException(s"Unexpected address: $unexpected") } DnsProtocol.Resolved(name, record :: Nil) } diff --git a/akka-actor/src/main/scala/akka/io/dns/internal/RecordTypeSerializer.scala b/akka-actor/src/main/scala/akka/io/dns/internal/RecordTypeSerializer.scala index 73db95d5f0..773310f11d 100644 --- a/akka-actor/src/main/scala/akka/io/dns/internal/RecordTypeSerializer.scala +++ b/akka-actor/src/main/scala/akka/io/dns/internal/RecordTypeSerializer.scala @@ -20,8 +20,8 @@ private[akka] object RecordTypeSerializer { def parse(it: ByteIterator): RecordType = { val id = it.getShort RecordType(id) match { - case OptionVal.None => throw new IllegalArgumentException(s"Illegal id [$id] for DnsRecordType") case OptionVal.Some(t) => t + case _ => throw new IllegalArgumentException(s"Illegal id [$id] for DnsRecordType") } } diff --git a/akka-actor/src/main/scala/akka/pattern/AskSupport.scala b/akka-actor/src/main/scala/akka/pattern/AskSupport.scala index cb0c7962a3..68647766ec 100644 --- a/akka-actor/src/main/scala/akka/pattern/AskSupport.scala +++ b/akka-actor/src/main/scala/akka/pattern/AskSupport.scala @@ -604,6 +604,7 @@ private[akka] final class PromiseActorRef private ( updateState(Stopped, StoppedWithPath(provider.tempPath())) path case Registering => path // spin until registration is completed + case unexpected => throw new IllegalStateException(s"Unexpected state: $unexpected") } override def !(message: Any)(implicit sender: ActorRef = Actor.noSender): Unit = state match { @@ -668,6 +669,7 @@ private[akka] final class PromiseActorRef private ( } else stop() case Stopped | _: StoppedWithPath => // already stopped case Registering => stop() // spin until registration is completed before stopping + case unexpected => throw new IllegalStateException(s"Unexpected state: $unexpected") } } diff --git a/akka-actor/src/main/scala/akka/pattern/StatusReply.scala b/akka-actor/src/main/scala/akka/pattern/StatusReply.scala index d517bb30df..e8d13ec8f4 100644 --- a/akka-actor/src/main/scala/akka/pattern/StatusReply.scala +++ b/akka-actor/src/main/scala/akka/pattern/StatusReply.scala @@ -164,6 +164,8 @@ object StatusReply { s match { case StatusReply.Success(v) => ScalaSuccess(v.asInstanceOf[T]) case StatusReply.Error(ex) => ScalaFailure[T](ex) + case unexpected => + ScalaFailure(new IllegalArgumentException(s"Unexpected status reply success value: ${unexpected}")) } case fail @ ScalaFailure(_) => fail.asInstanceOf[Try[T]] }(ExecutionContexts.parasitic) diff --git a/akka-actor/src/main/scala/akka/routing/ConsistentHashing.scala b/akka-actor/src/main/scala/akka/routing/ConsistentHashing.scala index f319a6ba61..d3ac79e752 100644 --- a/akka-actor/src/main/scala/akka/routing/ConsistentHashing.scala +++ b/akka-actor/src/main/scala/akka/routing/ConsistentHashing.scala @@ -219,6 +219,8 @@ final case class ConsistentHashingRoutingLogic( case bytes: Array[Byte] => currentConsistenHash.nodeFor(bytes).routee case str: String => currentConsistenHash.nodeFor(str).routee case x: AnyRef => currentConsistenHash.nodeFor(SerializationExtension(system).serialize(x).get).routee + case unexpected => + throw new IllegalArgumentException(s"Unexpected hashdata: $unexpected") // will not happen, for exhaustiveness check } } catch { case NonFatal(e) => diff --git a/akka-actor/src/main/scala/akka/serialization/PrimitiveSerializers.scala b/akka-actor/src/main/scala/akka/serialization/PrimitiveSerializers.scala index 2dd472bf85..fe6f2b629c 100644 --- a/akka-actor/src/main/scala/akka/serialization/PrimitiveSerializers.scala +++ b/akka-actor/src/main/scala/akka/serialization/PrimitiveSerializers.scala @@ -168,6 +168,7 @@ import akka.util.ByteString val flag = o match { case TRUE => TrueB case FALSE => FalseB + case b => throw new IllegalArgumentException(s"Non boolean flag: $b") } buf.put(flag) } @@ -176,6 +177,7 @@ import akka.util.ByteString buf.get() match { case TrueB => TRUE case FalseB => FALSE + case b => throw new IllegalArgumentException(s"Non boolean flag byte: $b") } } @@ -183,6 +185,7 @@ import akka.util.ByteString val flag = o match { case TRUE => TrueB case FALSE => FalseB + case b => throw new IllegalArgumentException(s"Non boolean flag: $b") } val result = new Array[Byte](1) result(0) = flag @@ -193,6 +196,7 @@ import akka.util.ByteString bytes(0) match { case TrueB => TRUE case FalseB => FALSE + case b => throw new IllegalArgumentException(s"Non boolean flag byte: $b") } } } diff --git a/akka-actor/src/main/scala/akka/util/HashCode.scala b/akka-actor/src/main/scala/akka/util/HashCode.scala index f8c2f9b0b9..672f0157d8 100644 --- a/akka-actor/src/main/scala/akka/util/HashCode.scala +++ b/akka-actor/src/main/scala/akka/util/HashCode.scala @@ -40,6 +40,8 @@ object HashCode { else if (!isArray(value)) result = hash(result, value.hashCode()) else for (id <- 0 until JArray.getLength(value)) result = hash(result, JArray.get(value, id)) // is an array result + case unexpected => + throw new IllegalArgumentException(s"Unexpected hash parameter: $unexpected") // will not happen, for exhaustiveness check } def hash(seed: Int, value: Boolean): Int = firstTerm(seed) + (if (value) 1 else 0) def hash(seed: Int, value: Char): Int = firstTerm(seed) + value.asInstanceOf[Int] diff --git a/akka-bench-jmh/src/main/scala/akka/actor/typed/TypedBenchmarkActors.scala b/akka-bench-jmh/src/main/scala/akka/actor/typed/TypedBenchmarkActors.scala index 858675f1e5..7fa3faef4c 100644 --- a/akka-bench-jmh/src/main/scala/akka/actor/typed/TypedBenchmarkActors.scala +++ b/akka-bench-jmh/src/main/scala/akka/actor/typed/TypedBenchmarkActors.scala @@ -95,7 +95,7 @@ object TypedBenchmarkActors { val startNanoTime = System.nanoTime() pairs.foreach(_ ! Message) var interactionsLeft = numPairs - Behaviors.receiveMessage { + Behaviors.receiveMessagePartial { case Done => interactionsLeft -= 1 if (interactionsLeft == 0) { diff --git a/akka-bench-jmh/src/main/scala/akka/actor/typed/delivery/ReliableDeliveryBenchmark.scala b/akka-bench-jmh/src/main/scala/akka/actor/typed/delivery/ReliableDeliveryBenchmark.scala index 148ce9db6e..6b347d5ab9 100644 --- a/akka-bench-jmh/src/main/scala/akka/actor/typed/delivery/ReliableDeliveryBenchmark.scala +++ b/akka-bench-jmh/src/main/scala/akka/actor/typed/delivery/ReliableDeliveryBenchmark.scala @@ -41,7 +41,7 @@ object Producer { val requestNextAdapter = context.messageAdapter[ProducerController.RequestNext[Consumer.Command]](WrappedRequestNext(_)) - Behaviors.receiveMessage { + Behaviors.receiveMessagePartial { case WrappedRequestNext(next) => if (next.confirmedSeqNr >= numberOfMessages) { context.log.info("Completed {} messages", numberOfMessages) @@ -114,7 +114,7 @@ object WorkPullingProducer { var remaining = numberOfMessages + context.system.settings.config .getInt("akka.reliable-delivery.consumer-controller.flow-control-window") - Behaviors.receiveMessage { + Behaviors.receiveMessagePartial { case WrappedRequestNext(next) => remaining -= 1 if (remaining == 0) { @@ -183,6 +183,9 @@ object Guardian { consumers.foreach(context.stop) replyTo ! Done Behaviors.same + + case msg => + throw new RuntimeException(s"Unexpected message $msg") } } } diff --git a/akka-cluster-metrics/src/main/scala/akka/cluster/metrics/ClusterMetricsRouting.scala b/akka-cluster-metrics/src/main/scala/akka/cluster/metrics/ClusterMetricsRouting.scala index 8d83faadae..b42dac85a3 100644 --- a/akka-cluster-metrics/src/main/scala/akka/cluster/metrics/ClusterMetricsRouting.scala +++ b/akka-cluster-metrics/src/main/scala/akka/cluster/metrics/ClusterMetricsRouting.scala @@ -489,6 +489,7 @@ private[metrics] class WeightedRoutees( val a = routee match { case ActorRefRoutee(ref) => ref.path.address case ActorSelectionRoutee(sel) => sel.anchor.path.address + case _ => throw new RuntimeException() } a match { case Address(_, _, None, None) => selfAddress diff --git a/akka-cluster-metrics/src/multi-jvm/scala/akka/cluster/metrics/ClusterMetricsRoutingSpec.scala b/akka-cluster-metrics/src/multi-jvm/scala/akka/cluster/metrics/ClusterMetricsRoutingSpec.scala index 5fbb6c45d1..10b571dbcc 100644 --- a/akka-cluster-metrics/src/multi-jvm/scala/akka/cluster/metrics/ClusterMetricsRoutingSpec.scala +++ b/akka-cluster-metrics/src/multi-jvm/scala/akka/cluster/metrics/ClusterMetricsRoutingSpec.scala @@ -156,7 +156,7 @@ abstract class AdaptiveLoadBalancingRouterSpec // it may take some time until router receives cluster member events awaitAssert { currentRoutees(router).size should ===(roles.size) } val routees = currentRoutees(router) - routees.map { case ActorRefRoutee(ref) => fullAddress(ref) }.toSet should ===(roles.map(address).toSet) + routees.collect { case ActorRefRoutee(ref) => fullAddress(ref) }.toSet should ===(roles.map(address).toSet) router } @@ -235,7 +235,7 @@ abstract class AdaptiveLoadBalancingRouterSpec // it may take some time until router receives cluster member events awaitAssert { currentRoutees(router3).size should ===(9) } val routees = currentRoutees(router3) - routees.map { case ActorRefRoutee(ref) => fullAddress(ref) }.toSet should ===(Set(address(node1))) + routees.collect { case ActorRefRoutee(ref) => fullAddress(ref) }.toSet should ===(Set(address(node1))) } enterBarrier("after-4") } @@ -246,7 +246,7 @@ abstract class AdaptiveLoadBalancingRouterSpec // it may take some time until router receives cluster member events awaitAssert { currentRoutees(router4).size should ===(6) } val routees = currentRoutees(router4) - routees.map { case ActorRefRoutee(ref) => fullAddress(ref) }.toSet should ===( + routees.collect { case ActorRefRoutee(ref) => fullAddress(ref) }.toSet should ===( Set(address(node1), address(node2), address(node3))) } enterBarrier("after-5") diff --git a/akka-cluster-metrics/src/test/scala/akka/cluster/metrics/ClusterMetricsExtensionSpec.scala b/akka-cluster-metrics/src/test/scala/akka/cluster/metrics/ClusterMetricsExtensionSpec.scala index 60f6189cdb..983b02413c 100644 --- a/akka-cluster-metrics/src/test/scala/akka/cluster/metrics/ClusterMetricsExtensionSpec.scala +++ b/akka-cluster-metrics/src/test/scala/akka/cluster/metrics/ClusterMetricsExtensionSpec.scala @@ -82,6 +82,7 @@ class ClusterMetricsExtensionSpec loadAverageMock.get should ===(loadAverageEwma +- epsilon) cpuCombinedMock.get should ===(cpuCombinedEwma +- epsilon) cpuStolenMock.get should ===(cpuStolenEwma +- epsilon) + case _ => fail() } } } diff --git a/akka-cluster-sharding-typed/src/main/scala/akka/cluster/sharding/typed/delivery/internal/ShardingProducerControllerImpl.scala b/akka-cluster-sharding-typed/src/main/scala/akka/cluster/sharding/typed/delivery/internal/ShardingProducerControllerImpl.scala index f83d0afa56..0773346684 100644 --- a/akka-cluster-sharding-typed/src/main/scala/akka/cluster/sharding/typed/delivery/internal/ShardingProducerControllerImpl.scala +++ b/akka-cluster-sharding-typed/src/main/scala/akka/cluster/sharding/typed/delivery/internal/ShardingProducerControllerImpl.scala @@ -125,7 +125,7 @@ import akka.util.Timeout .narrow } - private def createInitialState[A: ClassTag](hasDurableQueue: Boolean) = { + private def createInitialState[A](hasDurableQueue: Boolean) = { if (hasDurableQueue) None else Some(DurableProducerQueue.State.empty[A]) } @@ -226,12 +226,12 @@ import akka.util.Timeout } } - private def checkStashFull[A: ClassTag](stashBuffer: StashBuffer[InternalCommand]): Unit = { + private def checkStashFull[A](stashBuffer: StashBuffer[InternalCommand]): Unit = { if (stashBuffer.isFull) throw new IllegalArgumentException(s"Buffer is full, size [${stashBuffer.size}].") } - private def askLoadState[A: ClassTag]( + private def askLoadState[A]( context: ActorContext[InternalCommand], durableQueueBehavior: Option[Behavior[DurableProducerQueue.Command[A]]], settings: ShardingProducerController.Settings): Option[ActorRef[DurableProducerQueue.Command[A]]] = { @@ -244,7 +244,7 @@ import akka.util.Timeout } } - private def askLoadState[A: ClassTag]( + private def askLoadState[A]( context: ActorContext[InternalCommand], durableQueue: Option[ActorRef[DurableProducerQueue.Command[A]]], settings: ShardingProducerController.Settings, @@ -565,6 +565,8 @@ private class ShardingProducerControllerImpl[A: ClassTag]( case DurableQueueTerminated => throw new IllegalStateException("DurableQueue was unexpectedly terminated.") + case unexpected => + throw new RuntimeException(s"Unexpected message: $unexpected") } } diff --git a/akka-cluster-sharding-typed/src/main/scala/akka/cluster/sharding/typed/internal/ClusterShardingImpl.scala b/akka-cluster-sharding-typed/src/main/scala/akka/cluster/sharding/typed/internal/ClusterShardingImpl.scala index 4206d415ed..fa891288d2 100644 --- a/akka-cluster-sharding-typed/src/main/scala/akka/cluster/sharding/typed/internal/ClusterShardingImpl.scala +++ b/akka-cluster-sharding-typed/src/main/scala/akka/cluster/sharding/typed/internal/ClusterShardingImpl.scala @@ -55,7 +55,7 @@ import akka.util.JavaDurationConverters._ message match { case ShardingEnvelope(entityId, _) => entityId //also covers ClassicStartEntity in ShardingEnvelope case ClassicStartEntity(entityId) => entityId - case msg: E @unchecked => delegate.entityId(msg) + case msg => delegate.entityId(msg.asInstanceOf[E]) } } @@ -69,8 +69,8 @@ import akka.util.JavaDurationConverters._ case msg: ClassicStartEntity => // not really of type M, but erased and StartEntity is only handled internally, not delivered to the entity msg.asInstanceOf[M] - case msg: E @unchecked => - delegate.unwrapMessage(msg) + case msg => + delegate.unwrapMessage(msg.asInstanceOf[E]) } } diff --git a/akka-cluster-sharding-typed/src/multi-jvm/scala/akka/cluster/sharding/typed/ShardedDaemonProcessSpec.scala b/akka-cluster-sharding-typed/src/multi-jvm/scala/akka/cluster/sharding/typed/ShardedDaemonProcessSpec.scala index da6f66f814..789341f2a4 100644 --- a/akka-cluster-sharding-typed/src/multi-jvm/scala/akka/cluster/sharding/typed/ShardedDaemonProcessSpec.scala +++ b/akka-cluster-sharding-typed/src/multi-jvm/scala/akka/cluster/sharding/typed/ShardedDaemonProcessSpec.scala @@ -39,7 +39,7 @@ object ShardedDaemonProcessSpec extends MultiNodeConfig { val snitchRouter = ctx.spawn(Routers.group(SnitchServiceKey), "router") snitchRouter ! ProcessActorEvent(id, "Started") - Behaviors.receiveMessage { + Behaviors.receiveMessagePartial { case Stop => snitchRouter ! ProcessActorEvent(id, "Stopped") Behaviors.stopped diff --git a/akka-cluster-sharding-typed/src/multi-jvm/scala/akka/cluster/sharding/typed/delivery/DeliveryThroughputSpec.scala b/akka-cluster-sharding-typed/src/multi-jvm/scala/akka/cluster/sharding/typed/delivery/DeliveryThroughputSpec.scala index e99967965a..b921b950ca 100644 --- a/akka-cluster-sharding-typed/src/multi-jvm/scala/akka/cluster/sharding/typed/delivery/DeliveryThroughputSpec.scala +++ b/akka-cluster-sharding-typed/src/multi-jvm/scala/akka/cluster/sharding/typed/delivery/DeliveryThroughputSpec.scala @@ -77,7 +77,7 @@ object DeliveryThroughputSpec extends MultiNodeConfig { } object Consumer { - trait Command + sealed trait Command case object TheMessage extends Command with CborSerializable case object Stop extends Command @@ -122,7 +122,7 @@ object DeliveryThroughputSpec extends MultiNodeConfig { } object Producer { - trait Command + sealed trait Command case object Run extends Command private case class WrappedRequestNext(r: ProducerController.RequestNext[Consumer.Command]) extends Command @@ -180,7 +180,7 @@ object DeliveryThroughputSpec extends MultiNodeConfig { def serviceKey(testName: String) = ServiceKey[ConsumerController.Command[Consumer.Command]](testName) object WorkPullingProducer { - trait Command + sealed trait Command case object Run extends Command private case class WrappedRequestNext(r: WorkPullingProducerController.RequestNext[Consumer.Command]) @@ -225,7 +225,7 @@ object DeliveryThroughputSpec extends MultiNodeConfig { def typeKey(testName: String) = EntityTypeKey[ConsumerController.SequencedMessage[Consumer.Command]](testName) object ShardingProducer { - trait Command + sealed trait Command case object Run extends Command private case class WrappedRequestNext(r: ShardingProducerController.RequestNext[Consumer.Command]) extends Command diff --git a/akka-cluster-sharding-typed/src/test/scala/akka/cluster/sharding/typed/ReplicatedShardingSpec.scala b/akka-cluster-sharding-typed/src/test/scala/akka/cluster/sharding/typed/ReplicatedShardingSpec.scala index 22202f4cff..95a3a251d1 100644 --- a/akka-cluster-sharding-typed/src/test/scala/akka/cluster/sharding/typed/ReplicatedShardingSpec.scala +++ b/akka-cluster-sharding-typed/src/test/scala/akka/cluster/sharding/typed/ReplicatedShardingSpec.scala @@ -70,7 +70,7 @@ object ReplicatedShardingSpec { val AllReplicas = Set(ReplicaId("DC-A"), ReplicaId("DC-B")) object MyReplicatedStringSet { - trait Command extends CborSerializable + sealed trait Command extends CborSerializable case class Add(text: String) extends Command case class GetTexts(replyTo: ActorRef[Texts]) extends Command @@ -122,7 +122,7 @@ object ReplicatedShardingSpec { } object MyReplicatedIntSet { - trait Command extends CborSerializable + sealed trait Command extends CborSerializable case class Add(text: Int) extends Command case class GetInts(replyTo: ActorRef[Ints]) extends Command case class Ints(ints: Set[Int]) extends CborSerializable diff --git a/akka-cluster-sharding-typed/src/test/scala/akka/cluster/sharding/typed/delivery/ReliableDeliveryShardingSpec.scala b/akka-cluster-sharding-typed/src/test/scala/akka/cluster/sharding/typed/delivery/ReliableDeliveryShardingSpec.scala index a6ffa341ce..59178a51f2 100644 --- a/akka-cluster-sharding-typed/src/test/scala/akka/cluster/sharding/typed/delivery/ReliableDeliveryShardingSpec.scala +++ b/akka-cluster-sharding-typed/src/test/scala/akka/cluster/sharding/typed/delivery/ReliableDeliveryShardingSpec.scala @@ -40,7 +40,7 @@ object ReliableDeliveryShardingSpec { object TestShardingProducer { - trait Command + sealed trait Command final case class RequestNext(sendToRef: ActorRef[ShardingEnvelope[TestConsumer.Job]]) extends Command private case object Tick extends Command diff --git a/akka-cluster-sharding-typed/src/test/scala/akka/cluster/sharding/typed/scaladsl/ShardedDaemonProcessSpec.scala b/akka-cluster-sharding-typed/src/test/scala/akka/cluster/sharding/typed/scaladsl/ShardedDaemonProcessSpec.scala index bf6b54e2ae..568bd0e2bf 100644 --- a/akka-cluster-sharding-typed/src/test/scala/akka/cluster/sharding/typed/scaladsl/ShardedDaemonProcessSpec.scala +++ b/akka-cluster-sharding-typed/src/test/scala/akka/cluster/sharding/typed/scaladsl/ShardedDaemonProcessSpec.scala @@ -38,7 +38,7 @@ object ShardedDaemonProcessSpec { """) object MyActor { - trait Command + sealed trait Command case object Stop extends Command case class Started(id: Int, selfRef: ActorRef[Command]) diff --git a/akka-cluster-sharding-typed/src/test/scala/docs/akka/cluster/sharding/typed/HelloWorldPersistentEntityExample.scala b/akka-cluster-sharding-typed/src/test/scala/docs/akka/cluster/sharding/typed/HelloWorldPersistentEntityExample.scala index 7a039e1246..3825c70699 100644 --- a/akka-cluster-sharding-typed/src/test/scala/docs/akka/cluster/sharding/typed/HelloWorldPersistentEntityExample.scala +++ b/akka-cluster-sharding-typed/src/test/scala/docs/akka/cluster/sharding/typed/HelloWorldPersistentEntityExample.scala @@ -50,7 +50,7 @@ object HelloWorldPersistentEntityExample { object HelloWorld { // Command - trait Command extends CborSerializable + sealed trait Command extends CborSerializable final case class Greet(whom: String)(val replyTo: ActorRef[Greeting]) extends Command // Response final case class Greeting(whom: String, numberOfPeople: Int) extends CborSerializable diff --git a/akka-cluster-sharding-typed/src/test/scala/docs/delivery/WorkPullingDocExample.scala b/akka-cluster-sharding-typed/src/test/scala/docs/delivery/WorkPullingDocExample.scala index f32419e1fc..64a8d0a3be 100644 --- a/akka-cluster-sharding-typed/src/test/scala/docs/delivery/WorkPullingDocExample.scala +++ b/akka-cluster-sharding-typed/src/test/scala/docs/delivery/WorkPullingDocExample.scala @@ -67,7 +67,7 @@ object WorkPullingDocExample { import akka.actor.typed.scaladsl.StashBuffer object ImageWorkManager { - trait Command + sealed trait Command final case class Convert(fromFormat: String, toFormat: String, image: Array[Byte]) extends Command private case class WrappedRequestNext(r: WorkPullingProducerController.RequestNext[ImageConverter.ConversionJob]) extends Command @@ -137,7 +137,7 @@ object WorkPullingDocExample { import ImageWorkManager._ private def waitForNext(): Behavior[Command] = { - Behaviors.receiveMessage { + Behaviors.receiveMessagePartial { case WrappedRequestNext(next) => stashBuffer.unstashAll(active(next)) case c: Convert => @@ -156,7 +156,7 @@ object WorkPullingDocExample { private def active( next: WorkPullingProducerController.RequestNext[ImageConverter.ConversionJob]): Behavior[Command] = { - Behaviors.receiveMessage { + Behaviors.receiveMessagePartial { case Convert(from, to, image) => val resultId = UUID.randomUUID() next.sendNextTo ! ImageConverter.ConversionJob(resultId, from, to, image) @@ -178,7 +178,7 @@ object WorkPullingDocExample { implicit val askTimeout: Timeout = 5.seconds private def waitForNext(): Behavior[Command] = { - Behaviors.receiveMessage { + Behaviors.receiveMessagePartial { case WrappedRequestNext(next) => stashBuffer.unstashAll(active(next)) case c: ConvertRequest => @@ -201,7 +201,7 @@ object WorkPullingDocExample { private def active( next: WorkPullingProducerController.RequestNext[ImageConverter.ConversionJob]): Behavior[Command] = { - Behaviors.receiveMessage { + Behaviors.receiveMessagePartial { case ConvertRequest(from, to, image, originalReplyTo) => val resultId = UUID.randomUUID() context.ask[MessageWithConfirmation[ImageConverter.ConversionJob], Done]( diff --git a/akka-cluster-sharding/src/main/scala/akka/cluster/sharding/ClusterSharding.scala b/akka-cluster-sharding/src/main/scala/akka/cluster/sharding/ClusterSharding.scala index b5f182dbb4..2ea0eb942e 100755 --- a/akka-cluster-sharding/src/main/scala/akka/cluster/sharding/ClusterSharding.scala +++ b/akka-cluster-sharding/src/main/scala/akka/cluster/sharding/ClusterSharding.scala @@ -301,7 +301,7 @@ class ClusterSharding(system: ExtendedActorSystem) extends Extension { extractShardId, allocationStrategy, handOffStopMessage) - val Started(shardRegion) = Await.result(guardian ? startMsg, timeout.duration) + val shardRegion = Await.result((guardian ? startMsg).mapTo[Started], timeout.duration).shardRegion regions.put(typeName, shardRegion) shardRegion case ref => ref // already started, use cached ActorRef @@ -545,7 +545,7 @@ class ClusterSharding(system: ExtendedActorSystem) extends Extension { implicit val timeout = system.settings.CreationTimeout val settings = ClusterShardingSettings(system).withRole(role) val startMsg = StartProxy(typeName, dataCenter, settings, extractEntityId, extractShardId) - val Started(shardRegion) = Await.result(guardian ? startMsg, timeout.duration) + val shardRegion = Await.result((guardian ? startMsg).mapTo[Started], timeout.duration).shardRegion // it must be possible to start several proxies, one per data center proxies.put(proxyName(typeName, dataCenter), shardRegion) shardRegion @@ -777,6 +777,8 @@ private[akka] class ClusterShardingGuardian extends Actor { new EventSourcedRememberEntitiesProvider(typeName, settings) case ClusterShardingSettings.RememberEntitiesStoreCustom => new CustomStateStoreModeProvider(typeName, context.system, settings) + case unknown => + throw new IllegalArgumentException(s"Unknown store type: $unknown") // compiler exhaustiveness check pleaser }) } diff --git a/akka-cluster-sharding/src/main/scala/akka/cluster/sharding/Shard.scala b/akka-cluster-sharding/src/main/scala/akka/cluster/sharding/Shard.scala index 5dc9af7d21..49a084a127 100644 --- a/akka-cluster-sharding/src/main/scala/akka/cluster/sharding/Shard.scala +++ b/akka-cluster-sharding/src/main/scala/akka/cluster/sharding/Shard.scala @@ -960,7 +960,7 @@ private[akka] class Shard( entityId, unexpected) } - case OptionVal.None => + case _ => log.warning("{}: Unexpected entity terminated: {}", typeName, ref) } } @@ -982,7 +982,7 @@ private[akka] class Shard( entity ! stopMessage flightRecorder.entityPassivate(id) } - case OptionVal.None => + case _ => log.debug("{}: Unknown entity passivating [{}]. Not sending stopMessage back to entity", typeName, entity) } } @@ -1108,7 +1108,7 @@ private[akka] class Shard( def getOrCreateEntity(id: EntityId): ActorRef = { entities.entity(id) match { case OptionVal.Some(child) => child - case OptionVal.None => + case _ => val name = URLEncoder.encode(id, "utf-8") val a = context.actorOf(entityProps(id), name) context.watchWith(a, EntityTerminated(a)) diff --git a/akka-cluster-sharding/src/main/scala/akka/cluster/sharding/external/internal/ExternalShardAllocationClientImpl.scala b/akka-cluster-sharding/src/main/scala/akka/cluster/sharding/external/internal/ExternalShardAllocationClientImpl.scala index 23a8f2dd7e..a0af8cb211 100644 --- a/akka-cluster-sharding/src/main/scala/akka/cluster/sharding/external/internal/ExternalShardAllocationClientImpl.scala +++ b/akka-cluster-sharding/src/main/scala/akka/cluster/sharding/external/internal/ExternalShardAllocationClientImpl.scala @@ -70,6 +70,7 @@ final private[external] class ExternalShardAllocationClientImpl(system: ActorSys case UpdateSuccess(_, _) => Future.successful(Done) case UpdateTimeout => Future.failed(new ClientTimeoutException(s"Unable to update shard location after ${timeout.duration.pretty}")) + case _ => throw new IllegalArgumentException() // compiler exhaustiveness check pleaser } } @@ -86,6 +87,7 @@ final private[external] class ExternalShardAllocationClientImpl(system: ActorSys Future.successful(Map.empty[ShardId, ShardLocation]) case GetFailure(_, _) => Future.failed((new ClientTimeoutException(s"Unable to get shard locations after ${timeout.duration.pretty}"))) + case _ => throw new IllegalArgumentException() // compiler exhaustiveness check pleaser } .map { locations => new ShardLocations(locations) @@ -104,6 +106,7 @@ final private[external] class ExternalShardAllocationClientImpl(system: ActorSys case UpdateSuccess(_, _) => Future.successful(Done) case UpdateTimeout => Future.failed(new ClientTimeoutException(s"Unable to update shard location after ${timeout.duration.pretty}")) + case _ => throw new IllegalArgumentException() // compiler exhaustiveness check pleaser } } diff --git a/akka-cluster-sharding/src/multi-jvm/scala/akka/cluster/sbr/GlobalRegistry.scala b/akka-cluster-sharding/src/multi-jvm/scala/akka/cluster/sbr/GlobalRegistry.scala index 7b8e0860c4..cf269eceb2 100644 --- a/akka-cluster-sharding/src/multi-jvm/scala/akka/cluster/sbr/GlobalRegistry.scala +++ b/akka-cluster-sharding/src/multi-jvm/scala/akka/cluster/sbr/GlobalRegistry.scala @@ -31,10 +31,10 @@ object GlobalRegistry { case id: Int => (id.toString, id) } - val extractShardId: ShardRegion.ExtractShardId = msg => - msg match { - case id: Int => (id % 10).toString - } + val extractShardId: ShardRegion.ExtractShardId = { + case id: Int => (id % 10).toString + case _ => throw new IllegalArgumentException() + } } class SingletonActor(registry: ActorRef) extends Actor with ActorLogging { diff --git a/akka-cluster-sharding/src/multi-jvm/scala/akka/cluster/sharding/ClusterShardCoordinatorDowning2Spec.scala b/akka-cluster-sharding/src/multi-jvm/scala/akka/cluster/sharding/ClusterShardCoordinatorDowning2Spec.scala index f650263333..b90a4491ed 100644 --- a/akka-cluster-sharding/src/multi-jvm/scala/akka/cluster/sharding/ClusterShardCoordinatorDowning2Spec.scala +++ b/akka-cluster-sharding/src/multi-jvm/scala/akka/cluster/sharding/ClusterShardCoordinatorDowning2Spec.scala @@ -42,6 +42,7 @@ object ClusterShardCoordinatorDowning2Spec { val extractShardId: ShardRegion.ExtractShardId = { case Ping(id: String) => id.charAt(0).toString + case _ => throw new IllegalArgumentException() } } diff --git a/akka-cluster-sharding/src/multi-jvm/scala/akka/cluster/sharding/ClusterShardCoordinatorDowningSpec.scala b/akka-cluster-sharding/src/multi-jvm/scala/akka/cluster/sharding/ClusterShardCoordinatorDowningSpec.scala index 533c391752..4d8201a299 100644 --- a/akka-cluster-sharding/src/multi-jvm/scala/akka/cluster/sharding/ClusterShardCoordinatorDowningSpec.scala +++ b/akka-cluster-sharding/src/multi-jvm/scala/akka/cluster/sharding/ClusterShardCoordinatorDowningSpec.scala @@ -42,6 +42,7 @@ object ClusterShardCoordinatorDowningSpec { val extractShardId: ShardRegion.ExtractShardId = { case Ping(id: String) => id.charAt(0).toString + case _ => throw new IllegalArgumentException() } } diff --git a/akka-cluster-sharding/src/multi-jvm/scala/akka/cluster/sharding/ClusterShardingFailureSpec.scala b/akka-cluster-sharding/src/multi-jvm/scala/akka/cluster/sharding/ClusterShardingFailureSpec.scala index 76958ce3a3..ad91b405f9 100644 --- a/akka-cluster-sharding/src/multi-jvm/scala/akka/cluster/sharding/ClusterShardingFailureSpec.scala +++ b/akka-cluster-sharding/src/multi-jvm/scala/akka/cluster/sharding/ClusterShardingFailureSpec.scala @@ -46,6 +46,7 @@ object ClusterShardingFailureSpec { case Get(id) => id.charAt(0).toString case Add(id, _) => id.charAt(0).toString case StartEntity(id) => id + case _ => throw new IllegalArgumentException() } } diff --git a/akka-cluster-sharding/src/multi-jvm/scala/akka/cluster/sharding/ClusterShardingGetStateSpec.scala b/akka-cluster-sharding/src/multi-jvm/scala/akka/cluster/sharding/ClusterShardingGetStateSpec.scala index 96c603fb96..85bc204f4d 100644 --- a/akka-cluster-sharding/src/multi-jvm/scala/akka/cluster/sharding/ClusterShardingGetStateSpec.scala +++ b/akka-cluster-sharding/src/multi-jvm/scala/akka/cluster/sharding/ClusterShardingGetStateSpec.scala @@ -24,6 +24,7 @@ object ClusterShardingGetStateSpec { val extractShardId: ShardRegion.ExtractShardId = { case PingPongActor.Ping(id) => (id % numberOfShards).toString + case _ => throw new IllegalArgumentException() } val shardTypeName = "Ping" diff --git a/akka-cluster-sharding/src/multi-jvm/scala/akka/cluster/sharding/ClusterShardingGetStatsSpec.scala b/akka-cluster-sharding/src/multi-jvm/scala/akka/cluster/sharding/ClusterShardingGetStatsSpec.scala index 464013c1fc..2f2ecf2768 100644 --- a/akka-cluster-sharding/src/multi-jvm/scala/akka/cluster/sharding/ClusterShardingGetStatsSpec.scala +++ b/akka-cluster-sharding/src/multi-jvm/scala/akka/cluster/sharding/ClusterShardingGetStatsSpec.scala @@ -25,6 +25,7 @@ object ClusterShardingGetStatsSpec { } val extractShardId: ShardRegion.ExtractShardId = { case PingPongActor.Ping(id) => (id % numberOfShards).toString + case _ => throw new IllegalArgumentException() } } diff --git a/akka-cluster-sharding/src/multi-jvm/scala/akka/cluster/sharding/ClusterShardingLeavingSpec.scala b/akka-cluster-sharding/src/multi-jvm/scala/akka/cluster/sharding/ClusterShardingLeavingSpec.scala index 01227c70cc..31d7830b53 100644 --- a/akka-cluster-sharding/src/multi-jvm/scala/akka/cluster/sharding/ClusterShardingLeavingSpec.scala +++ b/akka-cluster-sharding/src/multi-jvm/scala/akka/cluster/sharding/ClusterShardingLeavingSpec.scala @@ -39,6 +39,7 @@ object ClusterShardingLeavingSpec { val extractShardId: ShardRegion.ExtractShardId = { case Ping(id: String) => id.charAt(0).toString + case _ => throw new IllegalArgumentException() } } diff --git a/akka-cluster-sharding/src/multi-jvm/scala/akka/cluster/sharding/ClusterShardingQueriesSpec.scala b/akka-cluster-sharding/src/multi-jvm/scala/akka/cluster/sharding/ClusterShardingQueriesSpec.scala index 587cb87503..edc36617e6 100644 --- a/akka-cluster-sharding/src/multi-jvm/scala/akka/cluster/sharding/ClusterShardingQueriesSpec.scala +++ b/akka-cluster-sharding/src/multi-jvm/scala/akka/cluster/sharding/ClusterShardingQueriesSpec.scala @@ -23,6 +23,7 @@ object ClusterShardingQueriesSpec { val extractShardId: ShardRegion.ExtractShardId = { case PingPongActor.Ping(id) => (id % numberOfShards).toString + case _ => throw new RuntimeException() } val shardTypeName = "DatatypeA" diff --git a/akka-cluster-sharding/src/multi-jvm/scala/akka/cluster/sharding/ClusterShardingRememberEntitiesNewExtractorSpec.scala b/akka-cluster-sharding/src/multi-jvm/scala/akka/cluster/sharding/ClusterShardingRememberEntitiesNewExtractorSpec.scala index 1fd1b1a6be..6ea5924de3 100644 --- a/akka-cluster-sharding/src/multi-jvm/scala/akka/cluster/sharding/ClusterShardingRememberEntitiesNewExtractorSpec.scala +++ b/akka-cluster-sharding/src/multi-jvm/scala/akka/cluster/sharding/ClusterShardingRememberEntitiesNewExtractorSpec.scala @@ -37,12 +37,14 @@ object ClusterShardingRememberEntitiesNewExtractorSpec { val extractShardId1: ShardRegion.ExtractShardId = { case id: Int => (id % shardCount).toString case ShardRegion.StartEntity(id) => extractShardId1(id.toInt) + case _ => throw new IllegalArgumentException() } val extractShardId2: ShardRegion.ExtractShardId = { // always bump it one shard id case id: Int => ((id + 1) % shardCount).toString case ShardRegion.StartEntity(id) => extractShardId2(id.toInt) + case _ => throw new IllegalArgumentException() } } diff --git a/akka-cluster-sharding/src/multi-jvm/scala/akka/cluster/sharding/ClusterShardingRememberEntitiesSpec.scala b/akka-cluster-sharding/src/multi-jvm/scala/akka/cluster/sharding/ClusterShardingRememberEntitiesSpec.scala index 2cf3669560..68b5486e0f 100644 --- a/akka-cluster-sharding/src/multi-jvm/scala/akka/cluster/sharding/ClusterShardingRememberEntitiesSpec.scala +++ b/akka-cluster-sharding/src/multi-jvm/scala/akka/cluster/sharding/ClusterShardingRememberEntitiesSpec.scala @@ -23,6 +23,7 @@ object ClusterShardingRememberEntitiesSpec { val extractShardId: ShardRegion.ExtractShardId = { case id: Int => id.toString case ShardRegion.StartEntity(id) => id + case _ => throw new IllegalArgumentException() } } diff --git a/akka-cluster-sharding/src/multi-jvm/scala/akka/cluster/sharding/ClusterShardingRolePartitioningSpec.scala b/akka-cluster-sharding/src/multi-jvm/scala/akka/cluster/sharding/ClusterShardingRolePartitioningSpec.scala index 6b53cbd5c7..1b52717f41 100644 --- a/akka-cluster-sharding/src/multi-jvm/scala/akka/cluster/sharding/ClusterShardingRolePartitioningSpec.scala +++ b/akka-cluster-sharding/src/multi-jvm/scala/akka/cluster/sharding/ClusterShardingRolePartitioningSpec.scala @@ -22,6 +22,7 @@ object E1 { val extractShardId: ShardRegion.ExtractShardId = { case id: String => id + case _ => throw new IllegalArgumentException() } } @@ -33,6 +34,7 @@ object E2 { val extractShardId: ShardRegion.ExtractShardId = { case id: Int => id.toString + case _ => throw new IllegalArgumentException() } } diff --git a/akka-cluster-sharding/src/multi-jvm/scala/akka/cluster/sharding/ClusterShardingSpec.scala b/akka-cluster-sharding/src/multi-jvm/scala/akka/cluster/sharding/ClusterShardingSpec.scala index a65514a5e0..99a4c6f448 100644 --- a/akka-cluster-sharding/src/multi-jvm/scala/akka/cluster/sharding/ClusterShardingSpec.scala +++ b/akka-cluster-sharding/src/multi-jvm/scala/akka/cluster/sharding/ClusterShardingSpec.scala @@ -78,6 +78,7 @@ object ClusterShardingSpec { case EntityEnvelope(id, _) => (id % numberOfShards).toString case Get(id) => (id % numberOfShards).toString case ShardRegion.StartEntity(id) => (id.toLong % numberOfShards).toString + case _ => throw new IllegalArgumentException() } def qualifiedCounterProps(typeName: String): Props = @@ -193,6 +194,7 @@ object ClusterShardingDocCode { case ShardRegion.StartEntity(id) => // StartEntity is used by remembering entities feature (id.toLong % numberOfShards).toString + case _ => throw new IllegalArgumentException() } //#counter-extractor @@ -204,6 +206,7 @@ object ClusterShardingDocCode { case ShardRegion.StartEntity(id) => // StartEntity is used by remembering entities feature (id.toLong % numberOfShards).toString + case _ => throw new IllegalArgumentException() } //#extractShardId-StartEntity extractShardId.toString() // keep the compiler happy @@ -368,6 +371,7 @@ abstract class ClusterShardingSpec(multiNodeConfig: ClusterShardingSpecConfig) case ClusterShardingSettings.RememberEntitiesStoreDData => Some(ddataRememberEntitiesProvider(typeName)) case ClusterShardingSettings.RememberEntitiesStoreEventsourced => Some(eventSourcedRememberEntitiesProvider(typeName, settings)) + case _ => fail() } system.actorOf( @@ -1002,6 +1006,7 @@ abstract class ClusterShardingSpec(multiNodeConfig: ClusterShardingSpecConfig) receiveOne(3 seconds) match { case ActorIdentity(id, Some(_)) if id == n => count = count + 1 case ActorIdentity(_, None) => //Not on the fifth shard + case _ => fail() } } count should be >= (2) diff --git a/akka-cluster-sharding/src/multi-jvm/scala/akka/cluster/sharding/ExternalShardAllocationSpec.scala b/akka-cluster-sharding/src/multi-jvm/scala/akka/cluster/sharding/ExternalShardAllocationSpec.scala index deeafedb6e..4c20630ead 100644 --- a/akka-cluster-sharding/src/multi-jvm/scala/akka/cluster/sharding/ExternalShardAllocationSpec.scala +++ b/akka-cluster-sharding/src/multi-jvm/scala/akka/cluster/sharding/ExternalShardAllocationSpec.scala @@ -48,6 +48,7 @@ object ExternalShardAllocationSpec { // shard == id to make testing easier val extractShardId: ShardRegion.ExtractShardId = { case Get(id) => id + case _ => throw new IllegalArgumentException() } } diff --git a/akka-cluster-sharding/src/multi-jvm/scala/akka/cluster/sharding/MultiDcClusterShardingSpec.scala b/akka-cluster-sharding/src/multi-jvm/scala/akka/cluster/sharding/MultiDcClusterShardingSpec.scala index 8be9f93fed..1eb004c12f 100644 --- a/akka-cluster-sharding/src/multi-jvm/scala/akka/cluster/sharding/MultiDcClusterShardingSpec.scala +++ b/akka-cluster-sharding/src/multi-jvm/scala/akka/cluster/sharding/MultiDcClusterShardingSpec.scala @@ -41,6 +41,7 @@ object MultiDcClusterShardingSpec { val extractShardId: ShardRegion.ExtractShardId = { case m: EntityMsg => m.id.charAt(0).toString + case _ => throw new IllegalArgumentException() } } diff --git a/akka-cluster-sharding/src/multi-jvm/scala/akka/cluster/sharding/MultiNodeClusterShardingSpec.scala b/akka-cluster-sharding/src/multi-jvm/scala/akka/cluster/sharding/MultiNodeClusterShardingSpec.scala index f242dbf5ac..8430eca68e 100644 --- a/akka-cluster-sharding/src/multi-jvm/scala/akka/cluster/sharding/MultiNodeClusterShardingSpec.scala +++ b/akka-cluster-sharding/src/multi-jvm/scala/akka/cluster/sharding/MultiNodeClusterShardingSpec.scala @@ -70,6 +70,7 @@ object MultiNodeClusterShardingSpec { msg match { case id: Int => id.toString case ShardRegion.StartEntity(id) => id + case _ => throw new IllegalArgumentException() } } diff --git a/akka-cluster-sharding/src/multi-jvm/scala/akka/cluster/sharding/RollingUpdateShardAllocationSpec.scala b/akka-cluster-sharding/src/multi-jvm/scala/akka/cluster/sharding/RollingUpdateShardAllocationSpec.scala index 8db6f278f2..3650095d64 100644 --- a/akka-cluster-sharding/src/multi-jvm/scala/akka/cluster/sharding/RollingUpdateShardAllocationSpec.scala +++ b/akka-cluster-sharding/src/multi-jvm/scala/akka/cluster/sharding/RollingUpdateShardAllocationSpec.scala @@ -57,6 +57,7 @@ object RollingUpdateShardAllocationSpec { // shard == id to make testing easier val extractShardId: ShardRegion.ExtractShardId = { case Get(id) => id + case _ => throw new IllegalArgumentException() } } diff --git a/akka-cluster-sharding/src/test/scala/akka/cluster/sharding/ClusterShardingLeaseSpec.scala b/akka-cluster-sharding/src/test/scala/akka/cluster/sharding/ClusterShardingLeaseSpec.scala index 92bdbd14b0..f3d50a9ebf 100644 --- a/akka-cluster-sharding/src/test/scala/akka/cluster/sharding/ClusterShardingLeaseSpec.scala +++ b/akka-cluster-sharding/src/test/scala/akka/cluster/sharding/ClusterShardingLeaseSpec.scala @@ -52,6 +52,7 @@ object ClusterShardingLeaseSpec { val extractShardId: ShardRegion.ExtractShardId = { case msg: Int => (msg % 10).toString + case _ => throw new IllegalArgumentException() } case class LeaseFailed(msg: String) extends RuntimeException(msg) with NoStackTrace } diff --git a/akka-cluster-sharding/src/test/scala/akka/cluster/sharding/CoordinatedShutdownShardingSpec.scala b/akka-cluster-sharding/src/test/scala/akka/cluster/sharding/CoordinatedShutdownShardingSpec.scala index 5230b07ec9..bdde997577 100644 --- a/akka-cluster-sharding/src/test/scala/akka/cluster/sharding/CoordinatedShutdownShardingSpec.scala +++ b/akka-cluster-sharding/src/test/scala/akka/cluster/sharding/CoordinatedShutdownShardingSpec.scala @@ -36,6 +36,7 @@ object CoordinatedShutdownShardingSpec { val extractShardId: ShardRegion.ExtractShardId = { case msg: Int => (msg % 10).toString + case _ => throw new IllegalArgumentException() } } diff --git a/akka-cluster-sharding/src/test/scala/akka/cluster/sharding/EntityTerminationSpec.scala b/akka-cluster-sharding/src/test/scala/akka/cluster/sharding/EntityTerminationSpec.scala index 38a4202465..20e29f3641 100644 --- a/akka-cluster-sharding/src/test/scala/akka/cluster/sharding/EntityTerminationSpec.scala +++ b/akka-cluster-sharding/src/test/scala/akka/cluster/sharding/EntityTerminationSpec.scala @@ -63,6 +63,7 @@ class EntityTerminationSpec extends AkkaSpec(EntityTerminationSpec.config) with val extractShardId: ShardRegion.ExtractShardId = { case EntityEnvelope(_, _) => "1" // single shard for all entities case ShardRegion.StartEntity(_) => "1" + case _ => throw new IllegalArgumentException() } override def atStartup(): Unit = { diff --git a/akka-cluster-sharding/src/test/scala/akka/cluster/sharding/GetShardTypeNamesSpec.scala b/akka-cluster-sharding/src/test/scala/akka/cluster/sharding/GetShardTypeNamesSpec.scala index 393cbb5855..c02d769e3c 100644 --- a/akka-cluster-sharding/src/test/scala/akka/cluster/sharding/GetShardTypeNamesSpec.scala +++ b/akka-cluster-sharding/src/test/scala/akka/cluster/sharding/GetShardTypeNamesSpec.scala @@ -23,10 +23,12 @@ object GetShardTypeNamesSpec { val extractEntityId: ShardRegion.ExtractEntityId = { case msg: Int => (msg.toString, msg) + case _ => throw new IllegalArgumentException() } val extractShardId: ShardRegion.ExtractShardId = { case msg: Int => (msg % 10).toString + case _ => throw new IllegalArgumentException() } } diff --git a/akka-cluster-sharding/src/test/scala/akka/cluster/sharding/InactiveEntityPassivationSpec.scala b/akka-cluster-sharding/src/test/scala/akka/cluster/sharding/InactiveEntityPassivationSpec.scala index 7b4ab2a70d..559a8eb2db 100644 --- a/akka-cluster-sharding/src/test/scala/akka/cluster/sharding/InactiveEntityPassivationSpec.scala +++ b/akka-cluster-sharding/src/test/scala/akka/cluster/sharding/InactiveEntityPassivationSpec.scala @@ -56,6 +56,7 @@ object InactiveEntityPassivationSpec { val extractShardId: ShardRegion.ExtractShardId = { case msg: Int => (msg % 10).toString + case _ => throw new IllegalArgumentException() } } diff --git a/akka-cluster-sharding/src/test/scala/akka/cluster/sharding/PersistentShardingMigrationSpec.scala b/akka-cluster-sharding/src/test/scala/akka/cluster/sharding/PersistentShardingMigrationSpec.scala index a9c5bf8a96..ff3716dd9f 100644 --- a/akka-cluster-sharding/src/test/scala/akka/cluster/sharding/PersistentShardingMigrationSpec.scala +++ b/akka-cluster-sharding/src/test/scala/akka/cluster/sharding/PersistentShardingMigrationSpec.scala @@ -99,6 +99,7 @@ object PersistentShardingMigrationSpec { // StartEntity is used by remembering entities feature probe ! id id + case _ => throw new IllegalArgumentException() } } diff --git a/akka-cluster-sharding/src/test/scala/akka/cluster/sharding/PersistentStartEntitySpec.scala b/akka-cluster-sharding/src/test/scala/akka/cluster/sharding/PersistentStartEntitySpec.scala index 2fbfd2dc77..77303ba20c 100644 --- a/akka-cluster-sharding/src/test/scala/akka/cluster/sharding/PersistentStartEntitySpec.scala +++ b/akka-cluster-sharding/src/test/scala/akka/cluster/sharding/PersistentStartEntitySpec.scala @@ -32,11 +32,13 @@ object PersistentStartEntitySpec { val extractEntityId: ShardRegion.ExtractEntityId = { case EntityEnvelope(id, payload) => (id.toString, payload) + case _ => throw new IllegalArgumentException() } val extractShardId: ShardRegion.ExtractShardId = { case EntityEnvelope(id, _) => (id % 10).toString case StartEntity(id) => (id.toInt % 10).toString + case _ => throw new IllegalArgumentException() } val config = ConfigFactory.parseString(""" diff --git a/akka-cluster-sharding/src/test/scala/akka/cluster/sharding/ProxyShardingSpec.scala b/akka-cluster-sharding/src/test/scala/akka/cluster/sharding/ProxyShardingSpec.scala index 4b38ced857..27f0562b93 100644 --- a/akka-cluster-sharding/src/test/scala/akka/cluster/sharding/ProxyShardingSpec.scala +++ b/akka-cluster-sharding/src/test/scala/akka/cluster/sharding/ProxyShardingSpec.scala @@ -36,11 +36,12 @@ class ProxyShardingSpec extends AkkaSpec(ProxyShardingSpec.config) with WithLogC } val idExtractor: ShardRegion.ExtractEntityId = { - case msg @ id => (id.toString, msg) + case msg => (msg.toString, msg) } val shardResolver: ShardRegion.ExtractShardId = { case id: Int => id.toString + case _ => throw new IllegalArgumentException() } val shardProxy: ActorRef = diff --git a/akka-cluster-sharding/src/test/scala/akka/cluster/sharding/RememberEntitiesBatchedUpdatesSpec.scala b/akka-cluster-sharding/src/test/scala/akka/cluster/sharding/RememberEntitiesBatchedUpdatesSpec.scala index 763b61d0de..6ddec73880 100644 --- a/akka-cluster-sharding/src/test/scala/akka/cluster/sharding/RememberEntitiesBatchedUpdatesSpec.scala +++ b/akka-cluster-sharding/src/test/scala/akka/cluster/sharding/RememberEntitiesBatchedUpdatesSpec.scala @@ -62,11 +62,13 @@ class RememberEntitiesBatchedUpdatesSpec val extractEntityId: ShardRegion.ExtractEntityId = { case EntityEnvelope(id, payload) => (id.toString, payload) + case _ => throw new IllegalArgumentException() } val extractShardId: ShardRegion.ExtractShardId = { case EntityEnvelope(_, _) => "1" // single shard for all entities case ShardRegion.StartEntity(_) => "1" + case _ => throw new IllegalArgumentException() } override def atStartup(): Unit = { diff --git a/akka-cluster-sharding/src/test/scala/akka/cluster/sharding/RememberEntitiesFailureSpec.scala b/akka-cluster-sharding/src/test/scala/akka/cluster/sharding/RememberEntitiesFailureSpec.scala index a4109c18b3..58de13e3dc 100644 --- a/akka-cluster-sharding/src/test/scala/akka/cluster/sharding/RememberEntitiesFailureSpec.scala +++ b/akka-cluster-sharding/src/test/scala/akka/cluster/sharding/RememberEntitiesFailureSpec.scala @@ -62,10 +62,12 @@ object RememberEntitiesFailureSpec { val extractEntityId: ShardRegion.ExtractEntityId = { case EntityEnvelope(id, payload) => (id.toString, payload) + case _ => throw new IllegalArgumentException() } val extractShardId: ShardRegion.ExtractShardId = { case EntityEnvelope(id, _) => (id % 10).toString + case _ => throw new IllegalArgumentException() } sealed trait Fail diff --git a/akka-cluster-sharding/src/test/scala/akka/cluster/sharding/RememberEntitiesShardIdExtractorChangeSpec.scala b/akka-cluster-sharding/src/test/scala/akka/cluster/sharding/RememberEntitiesShardIdExtractorChangeSpec.scala index dff9724737..f5c188dd57 100644 --- a/akka-cluster-sharding/src/test/scala/akka/cluster/sharding/RememberEntitiesShardIdExtractorChangeSpec.scala +++ b/akka-cluster-sharding/src/test/scala/akka/cluster/sharding/RememberEntitiesShardIdExtractorChangeSpec.scala @@ -62,16 +62,19 @@ object RememberEntitiesShardIdExtractorChangeSpec { val extractEntityId: ShardRegion.ExtractEntityId = { case msg @ Message(id) => (id.toString, msg) + case _ => throw new IllegalArgumentException() } val firstExtractShardId: ShardRegion.ExtractShardId = { case Message(id) => (id % 10).toString case ShardRegion.StartEntity(id) => (id.toInt % 10).toString + case _ => throw new IllegalArgumentException() } val secondExtractShardId: ShardRegion.ExtractShardId = { case Message(id) => (id % 10 + 1L).toString case ShardRegion.StartEntity(id) => (id.toInt % 10 + 1L).toString + case _ => throw new IllegalArgumentException() } val TypeName = "ShardIdExtractorChange" diff --git a/akka-cluster-sharding/src/test/scala/akka/cluster/sharding/RemoveInternalClusterShardingDataSpec.scala b/akka-cluster-sharding/src/test/scala/akka/cluster/sharding/RemoveInternalClusterShardingDataSpec.scala index c72586f1a7..c858a5e6bc 100644 --- a/akka-cluster-sharding/src/test/scala/akka/cluster/sharding/RemoveInternalClusterShardingDataSpec.scala +++ b/akka-cluster-sharding/src/test/scala/akka/cluster/sharding/RemoveInternalClusterShardingDataSpec.scala @@ -49,10 +49,12 @@ object RemoveInternalClusterShardingDataSpec { val extractEntityId: ShardRegion.ExtractEntityId = { case msg: Int => (msg.toString, msg) + case _ => throw new IllegalArgumentException() } val extractShardId: ShardRegion.ExtractShardId = { case msg: Int => (msg % 10).toString + case _ => throw new IllegalArgumentException() } class HasSnapshots(override val persistenceId: String, replyTo: ActorRef) extends PersistentActor { diff --git a/akka-cluster-sharding/src/test/scala/akka/cluster/sharding/ShardRegionSpec.scala b/akka-cluster-sharding/src/test/scala/akka/cluster/sharding/ShardRegionSpec.scala index 0219ed1715..f04ead16a8 100644 --- a/akka-cluster-sharding/src/test/scala/akka/cluster/sharding/ShardRegionSpec.scala +++ b/akka-cluster-sharding/src/test/scala/akka/cluster/sharding/ShardRegionSpec.scala @@ -47,11 +47,13 @@ object ShardRegionSpec { val extractEntityId: ShardRegion.ExtractEntityId = { case msg: Int => (msg.toString, msg) + case _ => throw new IllegalArgumentException() } val extractShardId: ShardRegion.ExtractShardId = { case msg: Int => (msg % 10).toString case ShardRegion.StartEntity(id) => (id.toLong % numberOfShards).toString + case _ => throw new IllegalArgumentException() } class EntityActor extends Actor with ActorLogging { diff --git a/akka-cluster-sharding/src/test/scala/akka/cluster/sharding/ShardWithLeaseSpec.scala b/akka-cluster-sharding/src/test/scala/akka/cluster/sharding/ShardWithLeaseSpec.scala index 7883b40d3b..0e1cf1171f 100644 --- a/akka-cluster-sharding/src/test/scala/akka/cluster/sharding/ShardWithLeaseSpec.scala +++ b/akka-cluster-sharding/src/test/scala/akka/cluster/sharding/ShardWithLeaseSpec.scala @@ -54,10 +54,12 @@ object ShardWithLeaseSpec { val extractEntityId: ShardRegion.ExtractEntityId = { case EntityEnvelope(id, payload) => (id.toString, payload) + case _ => throw new IllegalArgumentException() } val extractShardId: ShardRegion.ExtractShardId = { case EntityEnvelope(id, _) => (id % numberOfShards).toString + case _ => throw new IllegalArgumentException() } case class BadLease(msg: String) extends RuntimeException(msg) with NoStackTrace diff --git a/akka-cluster-sharding/src/test/scala/akka/cluster/sharding/StartEntitySpec.scala b/akka-cluster-sharding/src/test/scala/akka/cluster/sharding/StartEntitySpec.scala index 92d6b27e4c..7c01ab2872 100644 --- a/akka-cluster-sharding/src/test/scala/akka/cluster/sharding/StartEntitySpec.scala +++ b/akka-cluster-sharding/src/test/scala/akka/cluster/sharding/StartEntitySpec.scala @@ -67,11 +67,13 @@ class StartEntitySpec extends AkkaSpec(StartEntitySpec.config) with ImplicitSend val extractEntityId: ShardRegion.ExtractEntityId = { case EntityEnvelope(id, payload) => (id.toString, payload) + case _ => throw new IllegalArgumentException() } val extractShardId: ShardRegion.ExtractShardId = { case EntityEnvelope(_, _) => "1" // single shard for all entities case ShardRegion.StartEntity(_) => "1" + case _ => throw new IllegalArgumentException() } override def atStartup(): Unit = { diff --git a/akka-cluster-sharding/src/test/scala/akka/cluster/sharding/SupervisionSpec.scala b/akka-cluster-sharding/src/test/scala/akka/cluster/sharding/SupervisionSpec.scala index f77efd6d36..7ccffab3fe 100644 --- a/akka-cluster-sharding/src/test/scala/akka/cluster/sharding/SupervisionSpec.scala +++ b/akka-cluster-sharding/src/test/scala/akka/cluster/sharding/SupervisionSpec.scala @@ -32,10 +32,12 @@ object SupervisionSpec { val idExtractor: ShardRegion.ExtractEntityId = { case Msg(id, msg) => (id.toString, msg) + case _ => throw new IllegalArgumentException() } val shardResolver: ShardRegion.ExtractShardId = { case Msg(id, _) => (id % 2).toString + case _ => throw new IllegalArgumentException() } class PassivatingActor extends Actor with ActorLogging { diff --git a/akka-cluster-typed/src/main/scala/akka/cluster/ddata/typed/internal/ReplicatorBehavior.scala b/akka-cluster-typed/src/main/scala/akka/cluster/ddata/typed/internal/ReplicatorBehavior.scala index d571bc6b5b..f4a48a280a 100644 --- a/akka-cluster-typed/src/main/scala/akka/cluster/ddata/typed/internal/ReplicatorBehavior.scala +++ b/akka-cluster-typed/src/main/scala/akka/cluster/ddata/typed/internal/ReplicatorBehavior.scala @@ -205,6 +205,8 @@ import akka.util.Timeout classicReplicator.tell(dd.Replicator.FlushChanges, sender = akka.actor.ActorRef.noSender) Behaviors.same + case unexpected => + throw new RuntimeException(s"Unexpected message: ${unexpected.getClass}") // compiler exhaustiveness check pleaser } } .receiveSignal { diff --git a/akka-cluster-typed/src/main/scala/akka/cluster/typed/internal/AdaptedClusterImpl.scala b/akka-cluster-typed/src/main/scala/akka/cluster/typed/internal/AdaptedClusterImpl.scala index f094853293..2d2453a9fb 100644 --- a/akka-cluster-typed/src/main/scala/akka/cluster/typed/internal/AdaptedClusterImpl.scala +++ b/akka-cluster-typed/src/main/scala/akka/cluster/typed/internal/AdaptedClusterImpl.scala @@ -100,6 +100,8 @@ private[akka] object AdapterClusterImpl { case _: MemberEvent => Behaviors.same + case _ => throw new IllegalArgumentException() // compiler exhaustiveness check pleaser + } .receiveSignal { diff --git a/akka-cluster-typed/src/main/scala/akka/cluster/typed/internal/receptionist/ClusterReceptionist.scala b/akka-cluster-typed/src/main/scala/akka/cluster/typed/internal/receptionist/ClusterReceptionist.scala index 8807f3670e..207c210270 100644 --- a/akka-cluster-typed/src/main/scala/akka/cluster/typed/internal/receptionist/ClusterReceptionist.scala +++ b/akka-cluster-typed/src/main/scala/akka/cluster/typed/internal/receptionist/ClusterReceptionist.scala @@ -208,6 +208,7 @@ private[typed] object ClusterReceptionist extends ReceptionistBehaviorProvider { ChangeFromReplicator( changed.key.asInstanceOf[DDataKey], changed.dataValue.asInstanceOf[ORMultiMap[ServiceKey[_], Entry]]) + case _ => throw new IllegalArgumentException() // compiler exhaustiveness check pleaser } initialRegistry.allDdataKeys.foreach(key => @@ -403,6 +404,9 @@ private[typed] object ClusterReceptionist extends ReceptionistBehaviorProvider { Behaviors.same } + case _ => + throw new IllegalArgumentException() // to please exhaustiveness check, compiler does not know about internal/public command + } def onInternalCommand(cmd: InternalCommand): Behavior[Command] = cmd match { diff --git a/akka-cluster-typed/src/test/scala/akka/cluster/ddata/typed/scaladsl/ReplicatorCompileOnlyTest.scala b/akka-cluster-typed/src/test/scala/akka/cluster/ddata/typed/scaladsl/ReplicatorCompileOnlyTest.scala index 08bdc8df92..28efc66fe1 100644 --- a/akka-cluster-typed/src/test/scala/akka/cluster/ddata/typed/scaladsl/ReplicatorCompileOnlyTest.scala +++ b/akka-cluster-typed/src/test/scala/akka/cluster/ddata/typed/scaladsl/ReplicatorCompileOnlyTest.scala @@ -84,6 +84,7 @@ object ReplicatorCompileOnlyTest { case GetFailure(`key`) => case NotFound(`key`) => case GetDataDeleted(`key`) => + case unexpected => throw new RuntimeException(s"Unexpected: $unexpected") } val updateResponse: UpdateResponse[GCounter] = ??? @@ -94,6 +95,7 @@ object ReplicatorCompileOnlyTest { case StoreFailure(`key`) => case UpdateFailure(`key`) => case UpdateDataDeleted(`key`) => + case unexpected => throw new RuntimeException(s"Unexpected: $unexpected") } val deleteResponse: DeleteResponse[GCounter] = ??? @@ -101,17 +103,20 @@ object ReplicatorCompileOnlyTest { case DeleteSuccess(`key`) => case DeleteFailure(`key`) => case DataDeleted(`key`) => + case unexpected => throw new RuntimeException(s"Unexpected: $unexpected") } val subscribeResponse: SubscribeResponse[GCounter] = ??? subscribeResponse match { case Changed(`key`) => case Deleted(`key`) => + case unexpected => throw new RuntimeException(s"Unexpected: $unexpected") } val replicaCount: ReplicaCount = ??? replicaCount match { case ReplicaCount(_) => + case unexpected => throw new RuntimeException(s"Unexpected: $unexpected") } } diff --git a/akka-cluster-typed/src/test/scala/akka/cluster/typed/ClusterSingletonApiSpec.scala b/akka-cluster-typed/src/test/scala/akka/cluster/typed/ClusterSingletonApiSpec.scala index 85c4904550..971e040ef5 100644 --- a/akka-cluster-typed/src/test/scala/akka/cluster/typed/ClusterSingletonApiSpec.scala +++ b/akka-cluster-typed/src/test/scala/akka/cluster/typed/ClusterSingletonApiSpec.scala @@ -29,21 +29,19 @@ object ClusterSingletonApiSpec { akka.cluster.jmx.multi-mbeans-in-same-jvm = on """) - trait PingProtocol + sealed trait PingProtocol case object Pong extends CborSerializable case class Ping(respondTo: ActorRef[Pong.type]) extends PingProtocol with CborSerializable case object Perish extends PingProtocol with CborSerializable - val pingPong = Behaviors.receive[PingProtocol] { (_, msg) => - msg match { - case Ping(respondTo) => - respondTo ! Pong - Behaviors.same + val pingPong = Behaviors.receiveMessage[PingProtocol] { + case Ping(respondTo) => + respondTo ! Pong + Behaviors.same - case Perish => - Behaviors.stopped - } + case Perish => + Behaviors.stopped } diff --git a/akka-cluster-typed/src/test/scala/akka/cluster/typed/GroupRouterSpec.scala b/akka-cluster-typed/src/test/scala/akka/cluster/typed/GroupRouterSpec.scala index 820d757c48..03cf862333 100644 --- a/akka-cluster-typed/src/test/scala/akka/cluster/typed/GroupRouterSpec.scala +++ b/akka-cluster-typed/src/test/scala/akka/cluster/typed/GroupRouterSpec.scala @@ -35,7 +35,7 @@ object GroupRouterSpec { case object Ping extends CborSerializable - trait Command + sealed trait Command case class UpdateWorker(actorRef: ActorRef[Ping.type]) extends Command case class GetWorkers(replyTo: ActorRef[Seq[ActorRef[Ping.type]]]) extends Command diff --git a/akka-cluster-typed/src/test/scala/akka/cluster/typed/RemoteDeployNotAllowedSpec.scala b/akka-cluster-typed/src/test/scala/akka/cluster/typed/RemoteDeployNotAllowedSpec.scala index 339560f066..af634564a6 100644 --- a/akka-cluster-typed/src/test/scala/akka/cluster/typed/RemoteDeployNotAllowedSpec.scala +++ b/akka-cluster-typed/src/test/scala/akka/cluster/typed/RemoteDeployNotAllowedSpec.scala @@ -82,6 +82,8 @@ class RemoteDeployNotAllowedSpec case ex: Exception => probe.ref ! ex } Behaviors.same + + case unexpected => throw new RuntimeException(s"Unexpected: $unexpected") } } diff --git a/akka-cluster-typed/src/test/scala/akka/cluster/typed/internal/receptionist/ClusterReceptionistSpec.scala b/akka-cluster-typed/src/test/scala/akka/cluster/typed/internal/receptionist/ClusterReceptionistSpec.scala index 6a4623a46e..e44d460e07 100644 --- a/akka-cluster-typed/src/test/scala/akka/cluster/typed/internal/receptionist/ClusterReceptionistSpec.scala +++ b/akka-cluster-typed/src/test/scala/akka/cluster/typed/internal/receptionist/ClusterReceptionistSpec.scala @@ -56,19 +56,17 @@ object ClusterReceptionistSpec { """) case object Pong extends CborSerializable - trait PingProtocol + sealed trait PingProtocol case class Ping(respondTo: ActorRef[Pong.type]) extends PingProtocol with CborSerializable case object Perish extends PingProtocol with CborSerializable - val pingPongBehavior = Behaviors.receive[PingProtocol] { (_, msg) => - msg match { - case Ping(respondTo) => - respondTo ! Pong - Behaviors.same + val pingPongBehavior = Behaviors.receiveMessage[PingProtocol] { + case Ping(respondTo) => + respondTo ! Pong + Behaviors.same - case Perish => - Behaviors.stopped - } + case Perish => + Behaviors.stopped } val PingKey = ServiceKey[PingProtocol]("pingy") @@ -104,7 +102,10 @@ class ClusterReceptionistSpec extends AnyWordSpec with Matchers with LogCapturin testKit1.system.receptionist ! Register(PingKey, service, regProbe1.ref) regProbe1.expectMessage(Registered(PingKey, service)) - val PingKey.Listing(remoteServiceRefs) = regProbe2.expectMessageType[Listing] + val remoteServiceRefs = regProbe2.expectMessageType[Listing] match { + case PingKey.Listing(r) => r + case unexpected => throw new RuntimeException(s"Unexpected: $unexpected") + } val theRef = remoteServiceRefs.head theRef ! Ping(regProbe2.ref) regProbe2.expectMessage(Pong) @@ -136,7 +137,10 @@ class ClusterReceptionistSpec extends AnyWordSpec with Matchers with LogCapturin val clusterNode2 = Cluster(system2) clusterNode2.manager ! Join(clusterNode1.selfMember.address) - val PingKey.Listing(remoteServiceRefs) = regProbe2.expectMessageType[Listing](10.seconds) + val remoteServiceRefs = regProbe2.expectMessageType[Listing](10.seconds) match { + case PingKey.Listing(r) => r + case unexpected => throw new RuntimeException(s"Unexpected: $unexpected") + } remoteServiceRefs.head.path.address should ===(Cluster(system1).selfMember.address) } finally { testKit1.shutdownTestKit() @@ -392,11 +396,14 @@ class ClusterReceptionistSpec extends AnyWordSpec with Matchers with LogCapturin // we should get either empty message and then updated with the new incarnation actor // or just updated with the new service directly - val msg = regProbe1.fishForMessage(20.seconds) { + val msg = regProbe1.fishForMessagePF(20.seconds) { case PingKey.Listing(entries) if entries.size == 1 => FishingOutcome.Complete case _: Listing => FishingOutcome.ContinueAndIgnore } - val PingKey.Listing(entries) = msg.last + val entries = msg.last match { + case PingKey.Listing(e) => e + case unexpected => throw new RuntimeException(s"Unexpected: $unexpected") + } entries should have size 1 val ref = entries.head val service3RemotePath = RootActorPath(clusterNode3.selfMember.address) / "user" / "instance" @@ -557,7 +564,7 @@ class ClusterReceptionistSpec extends AnyWordSpec with Matchers with LogCapturin regProbe1.awaitAssert(regProbe1.expectMessage(Listing(TheKey, Set(actor1))), 5.seconds) system2.receptionist ! Subscribe(TheKey, regProbe2.ref) - regProbe2.fishForMessage(10.seconds) { + regProbe2.fishForMessagePF(10.seconds) { case TheKey.Listing(actors) if actors.nonEmpty => FishingOutcomes.complete case _ => FishingOutcomes.continue @@ -572,13 +579,13 @@ class ClusterReceptionistSpec extends AnyWordSpec with Matchers with LogCapturin system2.log.info("actor2 registered") // we should now, eventually, see the removal on both nodes - regProbe1.fishForMessage(10.seconds) { + regProbe1.fishForMessagePF(10.seconds) { case TheKey.Listing(actors) if actors.size == 1 => FishingOutcomes.complete case _ => FishingOutcomes.continue } - regProbe2.fishForMessage(10.seconds) { + regProbe2.fishForMessagePF(10.seconds) { case TheKey.Listing(actors) if actors.size == 1 => FishingOutcomes.complete case _ => @@ -747,14 +754,14 @@ class ClusterReceptionistSpec extends AnyWordSpec with Matchers with LogCapturin regProbe1.expectMessage(Deregistered(PingKey, service1)) regProbe2.expectMessage(Registered(PingKey, service2)) - regProbe2.fishForMessage(3.seconds) { + regProbe2.fishForMessagePF(3.seconds) { case PingKey.Listing(actors) if actors == Set(service2) => FishingOutcomes.complete case PingKey.Listing(actors) if actors.size == 2 => // we may see both actors before we see the removal FishingOutcomes.continueAndIgnore } - regProbe1.fishForMessage(3.seconds) { + regProbe1.fishForMessagePF(3.seconds) { case PingKey.Listing(actors) if actors.size == 1 => FishingOutcomes.complete case PingKey.Listing(actors) if actors.isEmpty => FishingOutcomes.continueAndIgnore } @@ -818,7 +825,7 @@ class ClusterReceptionistSpec extends AnyWordSpec with Matchers with LogCapturin // eventually, all should be included in the Listing (0 until numberOfNodes).foreach { i => - probes(i).fishForMessage(10.seconds, s"$i") { + probes(i).fishForMessagePF(10.seconds, s"$i") { case PingKey.Listing(actors) if actors.size == numberOfNodes => FishingOutcomes.complete case PingKey.Listing(_) => FishingOutcomes.continue } diff --git a/akka-cluster-typed/src/test/scala/docs/akka/cluster/ddata/typed/scaladsl/ReplicatorDocSpec.scala b/akka-cluster-typed/src/test/scala/docs/akka/cluster/ddata/typed/scaladsl/ReplicatorDocSpec.scala index 52736caa89..008f5f32d4 100644 --- a/akka-cluster-typed/src/test/scala/docs/akka/cluster/ddata/typed/scaladsl/ReplicatorDocSpec.scala +++ b/akka-cluster-typed/src/test/scala/docs/akka/cluster/ddata/typed/scaladsl/ReplicatorDocSpec.scala @@ -98,6 +98,10 @@ object ReplicatorDocSpec { case InternalSubscribeResponse(Replicator.Deleted(_)) => Behaviors.unhandled // no deletes + + case InternalSubscribeResponse(_) => // changed but wrong key + Behaviors.unhandled + } } } diff --git a/akka-cluster-typed/src/test/scala/docs/akka/cluster/typed/ReceptionistExample.scala b/akka-cluster-typed/src/test/scala/docs/akka/cluster/typed/ReceptionistExample.scala index 5246d48e1f..f78a1c128d 100644 --- a/akka-cluster-typed/src/test/scala/docs/akka/cluster/typed/ReceptionistExample.scala +++ b/akka-cluster-typed/src/test/scala/docs/akka/cluster/typed/ReceptionistExample.scala @@ -81,7 +81,7 @@ object PingPongExample { context.spawnAnonymous(PingService()) - Behaviors.receiveMessage { + Behaviors.receiveMessagePartial { case PingAll => context.system.receptionist ! Receptionist.Find(PingService.PingServiceKey, listingResponseAdapter) Behaviors.same diff --git a/akka-cluster-typed/src/test/scala/docs/akka/cluster/typed/SingletonCompileOnlySpec.scala b/akka-cluster-typed/src/test/scala/docs/akka/cluster/typed/SingletonCompileOnlySpec.scala index fc7af125bb..5117039f0d 100644 --- a/akka-cluster-typed/src/test/scala/docs/akka/cluster/typed/SingletonCompileOnlySpec.scala +++ b/akka-cluster-typed/src/test/scala/docs/akka/cluster/typed/SingletonCompileOnlySpec.scala @@ -16,7 +16,7 @@ object SingletonCompileOnlySpec { //#counter object Counter { - trait Command + sealed trait Command case object Increment extends Command final case class GetValue(replyTo: ActorRef[Int]) extends Command case object GoodByeCounter extends Command diff --git a/akka-cluster/src/main/scala/akka/cluster/ClusterReadView.scala b/akka-cluster/src/main/scala/akka/cluster/ClusterReadView.scala index 97d99ed8c7..12f8edf7b2 100644 --- a/akka-cluster/src/main/scala/akka/cluster/ClusterReadView.scala +++ b/akka-cluster/src/main/scala/akka/cluster/ClusterReadView.scala @@ -90,6 +90,8 @@ private[akka] class ClusterReadView(cluster: Cluster) extends Closeable { _state = _state.withUnreachableDataCenters(_state.unreachableDataCenters + r.dataCenter) case MemberTombstonesChanged(tombstones) => _state = _state.withMemberTombstones(tombstones) + case unexpected => + throw new IllegalArgumentException(s"Unexpected cluster event type ${unexpected.getClass}") // compiler exhaustiveness check pleaser } e match { diff --git a/akka-cluster/src/main/scala/akka/cluster/ClusterSettings.scala b/akka-cluster/src/main/scala/akka/cluster/ClusterSettings.scala index 639769f12c..6cc651a337 100644 --- a/akka-cluster/src/main/scala/akka/cluster/ClusterSettings.scala +++ b/akka-cluster/src/main/scala/akka/cluster/ClusterSettings.scala @@ -77,7 +77,10 @@ final class ClusterSettings(val config: Config, val systemName: String) { } val SeedNodes: immutable.IndexedSeq[Address] = - immutableSeq(cc.getStringList("seed-nodes")).map { case AddressFromURIString(address) => address }.toVector + immutableSeq(cc.getStringList("seed-nodes")).map { + case AddressFromURIString(address) => address + case _ => throw new RuntimeException() // compiler exhaustiveness check pleaser + }.toVector val SeedNodeTimeout: FiniteDuration = cc.getMillisDuration("seed-node-timeout") val RetryUnsuccessfulJoinAfter: Duration = { val key = "retry-unsuccessful-join-after" diff --git a/akka-cluster/src/main/scala/akka/cluster/routing/ClusterRouterConfig.scala b/akka-cluster/src/main/scala/akka/cluster/routing/ClusterRouterConfig.scala index d28dd5567a..54ff70757f 100644 --- a/akka-cluster/src/main/scala/akka/cluster/routing/ClusterRouterConfig.scala +++ b/akka-cluster/src/main/scala/akka/cluster/routing/ClusterRouterConfig.scala @@ -542,6 +542,7 @@ private[akka] trait ClusterRouterActor { this: RouterActor => val address = routee match { case ActorRefRoutee(ref) => ref.path.address case ActorSelectionRoutee(sel) => sel.anchor.path.address + case unknown => throw new IllegalArgumentException(s"Unsupported routee type: ${unknown.getClass}") } address match { case Address(_, _, None, None) => cluster.selfAddress diff --git a/akka-cluster/src/multi-jvm/scala/akka/cluster/LargeMessageClusterSpec.scala b/akka-cluster/src/multi-jvm/scala/akka/cluster/LargeMessageClusterSpec.scala index 88240d0e84..1651bd8eaf 100644 --- a/akka-cluster/src/multi-jvm/scala/akka/cluster/LargeMessageClusterSpec.scala +++ b/akka-cluster/src/multi-jvm/scala/akka/cluster/LargeMessageClusterSpec.scala @@ -5,9 +5,7 @@ package akka.cluster import scala.concurrent.duration._ - import com.typesafe.config.ConfigFactory - import akka.actor.ActorIdentity import akka.actor.ActorRef import akka.actor.ExtendedActorSystem @@ -22,6 +20,8 @@ import akka.serialization.SerializerWithStringManifest import akka.testkit._ import akka.util.unused +import java.io.NotSerializableException + object LargeMessageClusterMultiJvmSpec extends MultiNodeConfig { val first = role("first") val second = role("second") @@ -72,6 +72,7 @@ object LargeMessageClusterMultiJvmSpec extends MultiNodeConfig { // simulate slow serialization to not completely overload the machine/network, see issue #24576 Thread.sleep(100) payload + case _ => throw new NotSerializableException() } override def fromBinary(bytes: Array[Byte], manifest: String) = { Slow(bytes) diff --git a/akka-cluster/src/multi-jvm/scala/akka/cluster/LeaderElectionSpec.scala b/akka-cluster/src/multi-jvm/scala/akka/cluster/LeaderElectionSpec.scala index 6216f84240..fa65182ec8 100644 --- a/akka-cluster/src/multi-jvm/scala/akka/cluster/LeaderElectionSpec.scala +++ b/akka-cluster/src/multi-jvm/scala/akka/cluster/LeaderElectionSpec.scala @@ -108,6 +108,8 @@ abstract class LeaderElectionSpec(multiNodeConfig: LeaderElectionMultiNodeConfig enterBarrier("completed" + n) + case _ => + fail() } } diff --git a/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/ClusterConsistentHashingRouterSpec.scala b/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/ClusterConsistentHashingRouterSpec.scala index fc7aaab0f8..708ecfbebc 100644 --- a/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/ClusterConsistentHashingRouterSpec.scala +++ b/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/ClusterConsistentHashingRouterSpec.scala @@ -91,7 +91,7 @@ abstract class ClusterConsistentHashingRouterSpec // it may take some time until router receives cluster member events awaitAssert { currentRoutees(router1).size should ===(4) } val routees = currentRoutees(router1) - routees.map { case ActorRefRoutee(ref) => fullAddress(ref) }.toSet should ===( + routees.collect { case ActorRefRoutee(ref) => fullAddress(ref) }.toSet should ===( Set(address(first), address(second))) } enterBarrier("after-2") @@ -115,7 +115,7 @@ abstract class ClusterConsistentHashingRouterSpec // it may take some time until router receives cluster member events awaitAssert { currentRoutees(router1).size should ===(6) } val routees = currentRoutees(router1) - routees.map { case ActorRefRoutee(ref) => fullAddress(ref) }.toSet should ===(roles.map(address).toSet) + routees.collect { case ActorRefRoutee(ref) => fullAddress(ref) }.toSet should ===(roles.map(address).toSet) } enterBarrier("after-3") @@ -132,7 +132,7 @@ abstract class ClusterConsistentHashingRouterSpec // it may take some time until router receives cluster member events awaitAssert { currentRoutees(router2).size should ===(6) } val routees = currentRoutees(router2) - routees.map { case ActorRefRoutee(ref) => fullAddress(ref) }.toSet should ===(roles.map(address).toSet) + routees.collect { case ActorRefRoutee(ref) => fullAddress(ref) }.toSet should ===(roles.map(address).toSet) } enterBarrier("after-4") @@ -180,7 +180,7 @@ abstract class ClusterConsistentHashingRouterSpec // it may take some time until router receives cluster member events awaitAssert { currentRoutees(router).size should ===(6) } val routees = currentRoutees(router) - routees.map { case ActorRefRoutee(ref) => fullAddress(ref) }.toSet should ===(roles.map(address).toSet) + routees.collect { case ActorRefRoutee(ref) => fullAddress(ref) }.toSet should ===(roles.map(address).toSet) router ! "a" val destinationA = expectMsgType[ActorRef] diff --git a/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/ClusterRoundRobinSpec.scala b/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/ClusterRoundRobinSpec.scala index da03f0866a..6aaa62deb2 100644 --- a/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/ClusterRoundRobinSpec.scala +++ b/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/ClusterRoundRobinSpec.scala @@ -311,7 +311,7 @@ abstract class ClusterRoundRobinSpec // note that router2 has totalInstances = 3, maxInstancesPerNode = 1 val routees = currentRoutees(router2) - val routeeAddresses = routees.map { case ActorRefRoutee(ref) => fullAddress(ref) } + val routeeAddresses = routees.collect { case ActorRefRoutee(ref) => fullAddress(ref) } routeeAddresses.size should ===(3) replies.values.sum should ===(iterationCount) @@ -325,7 +325,7 @@ abstract class ClusterRoundRobinSpec // myservice is already running def routees = currentRoutees(router4) - def routeeAddresses = routees.map { case ActorSelectionRoutee(sel) => fullAddress(sel.anchor) }.toSet + def routeeAddresses = routees.collect { case ActorSelectionRoutee(sel) => fullAddress(sel.anchor) }.toSet runOn(first) { // 4 nodes, 2 routees on each node @@ -350,9 +350,12 @@ abstract class ClusterRoundRobinSpec runOn(first) { def routees = currentRoutees(router2) - def routeeAddresses = routees.map { case ActorRefRoutee(ref) => fullAddress(ref) }.toSet + def routeeAddresses = routees.collect { case ActorRefRoutee(ref) => fullAddress(ref) }.toSet - routees.foreach { case ActorRefRoutee(ref) => watch(ref) } + routees.foreach { + case ActorRefRoutee(ref) => watch(ref) + case _ => + } val notUsedAddress = roles.map(address).toSet.diff(routeeAddresses).head val downAddress = routeeAddresses.find(_ != address(first)).get val downRouteeRef = routees.collectFirst { diff --git a/akka-cluster/src/test/scala/akka/cluster/ClusterHeartbeatSenderStateSpec.scala b/akka-cluster/src/test/scala/akka/cluster/ClusterHeartbeatSenderStateSpec.scala index 2f57dd2e74..3b07bf8f35 100644 --- a/akka-cluster/src/test/scala/akka/cluster/ClusterHeartbeatSenderStateSpec.scala +++ b/akka-cluster/src/test/scala/akka/cluster/ClusterHeartbeatSenderStateSpec.scala @@ -16,7 +16,7 @@ import akka.remote.FailureDetector object ClusterHeartbeatSenderStateSpec { class FailureDetectorStub extends FailureDetector { - trait Status + sealed trait Status object Up extends Status object Down extends Status object Unknown extends Status @@ -220,6 +220,7 @@ class ClusterHeartbeatSenderStateSpec extends AnyWordSpec with Matchers { } + case _ => throw new RuntimeException() } } catch { case e: Throwable => diff --git a/akka-cluster/src/test/scala/akka/cluster/FailureDetectorPuppet.scala b/akka-cluster/src/test/scala/akka/cluster/FailureDetectorPuppet.scala index 754a51ca2b..af95b32042 100644 --- a/akka-cluster/src/test/scala/akka/cluster/FailureDetectorPuppet.scala +++ b/akka-cluster/src/test/scala/akka/cluster/FailureDetectorPuppet.scala @@ -17,7 +17,7 @@ import akka.util.unused */ class FailureDetectorPuppet(@unused config: Config, @unused ev: EventStream) extends FailureDetector { - trait Status + sealed trait Status object Up extends Status object Down extends Status object Unknown extends Status @@ -31,6 +31,7 @@ class FailureDetectorPuppet(@unused config: Config, @unused ev: EventStream) ext override def isAvailable: Boolean = status.get match { case Unknown | Up => true case Down => false + } override def isMonitoring: Boolean = status.get != Unknown diff --git a/akka-discovery/src/main/scala/akka/discovery/dns/DnsServiceDiscovery.scala b/akka-discovery/src/main/scala/akka/discovery/dns/DnsServiceDiscovery.scala index 05bc394769..1d600cd5a3 100644 --- a/akka-discovery/src/main/scala/akka/discovery/dns/DnsServiceDiscovery.scala +++ b/akka-discovery/src/main/scala/akka/discovery/dns/DnsServiceDiscovery.scala @@ -140,7 +140,7 @@ private[akka] class DnsServiceDiscovery(system: ExtendedActorSystem) extends Ser } case OptionVal.None => askResolve() - + case unexpected => throw new RuntimeException(s"Unexpected: $unexpected") // OptionVal exhaustiveness problem } } @@ -187,7 +187,7 @@ private[akka] class DnsServiceDiscovery(system: ExtendedActorSystem) extends Ser } case OptionVal.None => askResolve() - + case unexpected => throw new RuntimeException(s"Unexpected: $unexpected") // OptionVal exhaustiveness problem } } diff --git a/akka-distributed-data/src/main/scala/akka/cluster/ddata/DurableStore.scala b/akka-distributed-data/src/main/scala/akka/cluster/ddata/DurableStore.scala index 8e095c7d09..0a5229e692 100644 --- a/akka-distributed-data/src/main/scala/akka/cluster/ddata/DurableStore.scala +++ b/akka-distributed-data/src/main/scala/akka/cluster/ddata/DurableStore.scala @@ -137,7 +137,7 @@ final class LmdbDurableStore(config: Config) extends Actor with ActorLogging { private def lmdb(): Lmdb = _lmdb match { case OptionVal.Some(l) => l - case OptionVal.None => + case _ => val t0 = System.nanoTime() log.info("Using durable data in LMDB directory [{}]", dir.getCanonicalPath) val env = { @@ -274,8 +274,8 @@ final class LmdbDurableStore(config: Config) extends Actor with ActorLogging { l.keyBuffer.put(key.getBytes(ByteString.UTF_8)).flip() l.valueBuffer.put(value).flip() tx match { - case OptionVal.None => l.db.put(l.keyBuffer, l.valueBuffer) case OptionVal.Some(t) => l.db.put(t, l.keyBuffer, l.valueBuffer) + case _ => l.db.put(l.keyBuffer, l.valueBuffer) } } finally { val l = lmdb() diff --git a/akka-distributed-data/src/multi-jvm/scala/akka/cluster/ddata/ReplicatorChaosSpec.scala b/akka-distributed-data/src/multi-jvm/scala/akka/cluster/ddata/ReplicatorChaosSpec.scala index cacfcae074..9528feddb2 100644 --- a/akka-distributed-data/src/multi-jvm/scala/akka/cluster/ddata/ReplicatorChaosSpec.scala +++ b/akka-distributed-data/src/multi-jvm/scala/akka/cluster/ddata/ReplicatorChaosSpec.scala @@ -77,6 +77,7 @@ class ReplicatorChaosSpec extends MultiNodeSpec(ReplicatorChaosSpec) with STMult case c: PNCounter => c.value case c: GSet[_] => c.elements case c: ORSet[_] => c.elements + case _ => fail() } } value should be(expected) diff --git a/akka-distributed-data/src/multi-jvm/scala/akka/cluster/ddata/ReplicatorORSetDeltaSpec.scala b/akka-distributed-data/src/multi-jvm/scala/akka/cluster/ddata/ReplicatorORSetDeltaSpec.scala index 376ff3d5a4..81092efff4 100644 --- a/akka-distributed-data/src/multi-jvm/scala/akka/cluster/ddata/ReplicatorORSetDeltaSpec.scala +++ b/akka-distributed-data/src/multi-jvm/scala/akka/cluster/ddata/ReplicatorORSetDeltaSpec.scala @@ -67,6 +67,7 @@ class ReplicatorORSetDeltaSpec case g @ GetSuccess(`key`, _) => g.dataValue match { case c: ORSet[_] => c.elements + case _ => fail() } } value should be(expected) diff --git a/akka-distributed-data/src/test/scala/akka/cluster/ddata/GCounterSpec.scala b/akka-distributed-data/src/test/scala/akka/cluster/ddata/GCounterSpec.scala index a4382b1f48..0207e27ef1 100644 --- a/akka-distributed-data/src/test/scala/akka/cluster/ddata/GCounterSpec.scala +++ b/akka-distributed-data/src/test/scala/akka/cluster/ddata/GCounterSpec.scala @@ -190,13 +190,19 @@ class GCounterSpec extends AnyWordSpec with Matchers { "have unapply extractor" in { val c1 = GCounter.empty.increment(node1).increment(node2) - val GCounter(value1) = c1 + val value1 = c1 match { + case GCounter(value1) => value1 + case _ => fail() + } val value2: BigInt = value1 value2 should be(2L) Changed(GCounterKey("key"))(c1) match { case c @ Changed(GCounterKey("key")) => - val GCounter(value3) = c.dataValue + val value3 = c.dataValue match { + case GCounter(value3) => value3 + case _ => fail() + } val value4: BigInt = value3 value4 should be(2L) case _ => diff --git a/akka-distributed-data/src/test/scala/akka/cluster/ddata/LWWMapSpec.scala b/akka-distributed-data/src/test/scala/akka/cluster/ddata/LWWMapSpec.scala index 2c1786dd44..f05dd9d7a5 100644 --- a/akka-distributed-data/src/test/scala/akka/cluster/ddata/LWWMapSpec.scala +++ b/akka-distributed-data/src/test/scala/akka/cluster/ddata/LWWMapSpec.scala @@ -71,13 +71,19 @@ class LWWMapSpec extends AnyWordSpec with Matchers { "have unapply extractor" in { val m1 = LWWMap.empty[String, Long].put(node1, "a", 1L, defaultClock[Long]) - val LWWMap(entries1) = m1 + val entries1 = m1 match { + case LWWMap(entries1) => entries1 + case _ => fail() + } val entries2: Map[String, Long] = entries1 entries2 should be(Map("a" -> 1L)) Changed(LWWMapKey[String, Long]("key"))(m1) match { case c @ Changed(LWWMapKey("key")) => - val LWWMap(entries3) = c.dataValue + val entries3 = c.dataValue match { + case LWWMap(entries3) => entries3 + case _ => fail() + } val entries4: Map[String, Long] = entries3 entries4 should be(Map("a" -> 1L)) case changed => diff --git a/akka-distributed-data/src/test/scala/akka/cluster/ddata/LWWRegisterSpec.scala b/akka-distributed-data/src/test/scala/akka/cluster/ddata/LWWRegisterSpec.scala index abf900f889..992a6bb870 100644 --- a/akka-distributed-data/src/test/scala/akka/cluster/ddata/LWWRegisterSpec.scala +++ b/akka-distributed-data/src/test/scala/akka/cluster/ddata/LWWRegisterSpec.scala @@ -70,13 +70,19 @@ class LWWRegisterSpec extends AnyWordSpec with Matchers { "have unapply extractor" in { val r1 = LWWRegister(node1, "a", defaultClock[String]) - val LWWRegister(value1) = r1 + val value1 = r1 match { + case LWWRegister(value1) => value1 + case _ => fail() + } val value2: String = value1 value2 should be("a") Changed(LWWRegisterKey[String]("key"))(r1) match { case c @ Changed(LWWRegisterKey("key")) => - val LWWRegister(value3) = c.dataValue + val value3 = c.dataValue match { + case LWWRegister(value3) => value3 + case _ => fail() + } val value4: String = value3 value4 should be("a") case changed => diff --git a/akka-distributed-data/src/test/scala/akka/cluster/ddata/LocalConcurrencySpec.scala b/akka-distributed-data/src/test/scala/akka/cluster/ddata/LocalConcurrencySpec.scala index b14d5e1059..139a444d9e 100644 --- a/akka-distributed-data/src/test/scala/akka/cluster/ddata/LocalConcurrencySpec.scala +++ b/akka-distributed-data/src/test/scala/akka/cluster/ddata/LocalConcurrencySpec.scala @@ -77,7 +77,10 @@ class LocalConcurrencySpec(_system: ActorSystem) val expected = ((1 to numMessages).map("a" + _) ++ (1 to numMessages).map("b" + _)).toSet awaitAssert { replicator ! Replicator.Get(Updater.key, Replicator.ReadLocal) - val ORSet(elements) = expectMsgType[Replicator.GetSuccess[_]].get(Updater.key) + val elements = expectMsgType[Replicator.GetSuccess[_]].get(Updater.key) match { + case ORSet(e) => e + case _ => fail() + } elements should be(expected) } diff --git a/akka-distributed-data/src/test/scala/akka/cluster/ddata/LotsOfDataBot.scala b/akka-distributed-data/src/test/scala/akka/cluster/ddata/LotsOfDataBot.scala index dd83419b70..ac1cc4b861 100644 --- a/akka-distributed-data/src/test/scala/akka/cluster/ddata/LotsOfDataBot.scala +++ b/akka-distributed-data/src/test/scala/akka/cluster/ddata/LotsOfDataBot.scala @@ -115,7 +115,10 @@ class LotsOfDataBot extends Actor with ActorLogging { case _: UpdateResponse[_] => // ignore case c @ Changed(ORSetKey(id)) => - val ORSet(elements) = c.dataValue + val elements = c.dataValue match { + case ORSet(e) => e + case _ => throw new RuntimeException() + } log.info("Current elements: {} -> {}", id, elements) } @@ -130,7 +133,10 @@ class LotsOfDataBot extends Actor with ActorLogging { log.info("It took {} ms to replicate {} entries", duration, keys.size) } case c @ Changed(ORSetKey(id)) => - val ORSet(elements) = c.dataValue + val elements = c.dataValue match { + case ORSet(e) => e + case _ => throw new RuntimeException() + } log.info("Current elements: {} -> {}", id, elements) } diff --git a/akka-distributed-data/src/test/scala/akka/cluster/ddata/ORMapSpec.scala b/akka-distributed-data/src/test/scala/akka/cluster/ddata/ORMapSpec.scala index a12b8600dd..3fdb9f40fe 100644 --- a/akka-distributed-data/src/test/scala/akka/cluster/ddata/ORMapSpec.scala +++ b/akka-distributed-data/src/test/scala/akka/cluster/ddata/ORMapSpec.scala @@ -21,13 +21,22 @@ class ORMapSpec extends AnyWordSpec with Matchers { "be able to add entries" in { val m = ORMap().put(node1, "a", GSet() + "A").put(node1, "b", GSet() + "B") - val GSet(a) = m.entries("a") + val a = m.entries("a") match { + case GSet(a) => a + case _ => fail() + } a should be(Set("A")) - val GSet(b) = m.entries("b") + val b = m.entries("b") match { + case GSet(b) => b + case _ => fail() + } b should be(Set("B")) val m2 = m.put(node1, "a", GSet() + "C") - val GSet(a2) = m2.entries("a") + val a2 = m2.entries("a") match { + case GSet(a2) => a2 + case _ => fail() + } a2 should be(Set("C")) } @@ -38,13 +47,22 @@ class ORMapSpec extends AnyWordSpec with Matchers { val m1 = ORMap().mergeDelta(md) - val GSet(a) = m1.entries("a") + val a = m1.entries("a") match { + case GSet(a) => a + case _ => fail() + } a should be(Set("A")) - val GSet(b) = m1.entries("b") + val b = m1.entries("b") match { + case GSet(b) => b + case _ => fail() + } b should be(Set("B")) val m2 = m1.put(node1, "a", GSet() + "C") - val GSet(a2) = m2.entries("a") + val a2 = m2.entries("a") match { + case GSet(a2) => a2 + case _ => fail() + } a2 should be(Set("C")) } @@ -110,26 +128,44 @@ class ORMapSpec extends AnyWordSpec with Matchers { // merge both ways val merged1 = m1.merge(m2) merged1.entries.keySet should contain("a") - val GSet(a1) = merged1.entries("a") + val a1 = merged1.entries("a") match { + case GSet(a1) => a1 + case _ => fail() + } a1 should be(Set("A2")) merged1.entries.keySet should contain("b") - val GSet(b1) = merged1.entries("b") + val b1 = merged1.entries("b") match { + case GSet(b1) => b1 + case _ => fail() + } b1 should be(Set("B1")) merged1.entries.keySet should contain("c") merged1.entries.keySet should contain("d") - val GSet(d1) = merged1.entries("d") + val d1 = merged1.entries("d") match { + case GSet(d1) => d1 + case _ => fail() + } d1 should be(Set("D1", "D2")) val merged2 = m2.merge(m1) merged2.entries.keySet should contain("a") - val GSet(a2) = merged1.entries("a") + val a2 = merged1.entries("a") match { + case GSet(a2) => a2 + case _ => fail() + } a2 should be(Set("A2")) merged2.entries.keySet should contain("b") - val GSet(b2) = merged2.entries("b") + val b2 = merged2.entries("b") match { + case GSet(b2) => b2 + case _ => fail() + } b2 should be(Set("B1")) merged2.entries.keySet should contain("c") merged2.entries.keySet should contain("d") - val GSet(d2) = merged2.entries("d") + val d2 = merged2.entries("d") match { + case GSet(d2) => d2 + case _ => fail() + } d2 should be(Set("D1", "D2")) } @@ -553,7 +589,10 @@ class ORMapSpec extends AnyWordSpec with Matchers { val m1 = ORMap.empty.put(node1, "a", GSet.empty + "A") val m2 = m1.resetDelta.updated(node1, "a", GSet.empty[String])(_.add("B")) val m3 = ORMap().mergeDelta(m1.delta.get).mergeDelta(m2.delta.get) - val GSet(d3) = m3.entries("a") + val d3 = m3.entries("a") match { + case GSet(d3) => d3 + case _ => fail() + } d3 should be(Set("A", "B")) } @@ -562,7 +601,10 @@ class ORMapSpec extends AnyWordSpec with Matchers { val m2 = m1.resetDelta.updated(node1, "a", ORSet.empty[String])(_.add(node1, "B")) val m3 = ORMap().mergeDelta(m1.delta.get).mergeDelta(m2.delta.get) - val ORSet(d3) = m3.entries("a") + val d3 = m3.entries("a") match { + case ORSet(d3) => d3 + case _ => fail() + } d3 should be(Set("A", "B")) } @@ -572,7 +614,10 @@ class ORMapSpec extends AnyWordSpec with Matchers { .updated(node1, "a", GSet.empty[String])(_.add("B")) .updated(node1, "a", GSet.empty[String])(_.add("C")) val m3 = ORMap().mergeDelta(m1.delta.get).mergeDelta(m2.delta.get) - val GSet(d3) = m3.entries("a") + val d3 = m3.entries("a") match { + case GSet(d3) => d3 + case _ => fail() + } d3 should be(Set("A", "B", "C")) } @@ -581,7 +626,10 @@ class ORMapSpec extends AnyWordSpec with Matchers { val m2 = m1.resetDelta.updated(node1, "a", GCounter.empty)(_.increment(node1, 10)) val m3 = m2.resetDelta.updated(node2, "a", GCounter.empty)(_.increment(node2, 10)) val m4 = ORMap().mergeDelta(m1.delta.get).mergeDelta(m2.delta.get).mergeDelta(m3.delta.get) - val GCounter(num) = m4.entries("a") + val num = m4.entries("a") match { + case GCounter(num) => num + case _ => fail() + } num should ===(20) } @@ -590,7 +638,10 @@ class ORMapSpec extends AnyWordSpec with Matchers { val m2 = m1.resetDelta.updated(node1, "a", PNCounter.empty)(_.increment(node1, 10)) val m3 = m2.resetDelta.updated(node2, "a", PNCounter.empty)(_.decrement(node2, 10)) val m4 = ORMap().mergeDelta(m1.delta.get).mergeDelta(m2.delta.get).mergeDelta(m3.delta.get) - val PNCounter(num) = m4.entries("a") + val num = m4.entries("a") match { + case PNCounter(num) => num + case _ => fail() + } num should ===(0) } @@ -598,7 +649,10 @@ class ORMapSpec extends AnyWordSpec with Matchers { val m1 = ORMap.empty.put(node1, "a", Flag(false)) val m2 = m1.resetDelta.updated(node1, "a", Flag.Disabled)(_.switchOn) val m3 = ORMap().mergeDelta(m1.delta.get).mergeDelta(m2.delta.get) - val Flag(d3) = m3.entries("a") + val d3 = m3.entries("a") match { + case Flag(d3) => d3 + case _ => fail() + } d3 should be(true) } @@ -689,13 +743,19 @@ class ORMapSpec extends AnyWordSpec with Matchers { "have unapply extractor" in { val m1 = ORMap.empty.put(node1, "a", Flag(true)).put(node2, "b", Flag(false)) val _: ORMap[String, Flag] = m1 - val ORMap(entries1) = m1 + val entries1 = m1 match { + case ORMap(entries1) => entries1 + case _ => fail() + } val entries2: Map[String, Flag] = entries1 entries2 should be(Map("a" -> Flag(true), "b" -> Flag(false))) Changed(ORMapKey[String, Flag]("key"))(m1) match { case c @ Changed(ORMapKey("key")) => - val ORMap(entries3) = c.dataValue + val entries3 = c.dataValue match { + case ORMap(entries3) => entries3 + case _ => fail() + } val entries4: Map[String, ReplicatedData] = entries3 entries4 should be(Map("a" -> Flag(true), "b" -> Flag(false))) case changed => diff --git a/akka-distributed-data/src/test/scala/akka/cluster/ddata/ORMultiMapSpec.scala b/akka-distributed-data/src/test/scala/akka/cluster/ddata/ORMultiMapSpec.scala index e3544e4654..cfbd15baa0 100644 --- a/akka-distributed-data/src/test/scala/akka/cluster/ddata/ORMultiMapSpec.scala +++ b/akka-distributed-data/src/test/scala/akka/cluster/ddata/ORMultiMapSpec.scala @@ -555,13 +555,19 @@ class ORMultiMapSpec extends AnyWordSpec with Matchers { "have unapply extractor" in { val m1 = ORMultiMap.empty.put(node1, "a", Set(1L, 2L)).put(node2, "b", Set(3L)) val _: ORMultiMap[String, Long] = m1 - val ORMultiMap(entries1) = m1 + val entries1 = m1 match { + case ORMultiMap(entries1) => entries1 + case _ => fail() + } val entries2: Map[String, Set[Long]] = entries1 entries2 should be(Map("a" -> Set(1L, 2L), "b" -> Set(3L))) Changed(ORMultiMapKey[String, Long]("key"))(m1) match { case c @ Changed(ORMultiMapKey("key")) => - val ORMultiMap(entries3) = c.dataValue + val entries3 = c.dataValue match { + case ORMultiMap(entries3: Map[String, Set[Long]]) => entries3 + case _ => fail() + } val entries4: Map[String, Set[Long]] = entries3 entries4 should be(Map("a" -> Set(1L, 2L), "b" -> Set(3L))) case changed => diff --git a/akka-distributed-data/src/test/scala/akka/cluster/ddata/ORSetSpec.scala b/akka-distributed-data/src/test/scala/akka/cluster/ddata/ORSetSpec.scala index 7147f2aee4..c8c347e9d1 100644 --- a/akka-distributed-data/src/test/scala/akka/cluster/ddata/ORSetSpec.scala +++ b/akka-distributed-data/src/test/scala/akka/cluster/ddata/ORSetSpec.scala @@ -450,7 +450,10 @@ class ORSetSpec extends AnyWordSpec with Matchers { val merged2 = node3_1.mergeDelta(delta2_2) - val ORSet(mg2) = merged2 + val mg2 = merged2 match { + case ORSet(mg2) => mg2 + case _ => fail() + } mg2 should be(Set("x")) // !!! } @@ -602,14 +605,20 @@ class ORSetSpec extends AnyWordSpec with Matchers { "have unapply extractor" in { val s1 = ORSet.empty.add(node1, "a").add(node2, "b") val _: ORSet[String] = s1 - val ORSet(elements1) = s1 // `unapply[A](s: ORSet[A])` is used here + val elements1 = s1 match { // `unapply[A](s: ORSet[A])` is used here + case ORSet(elements1) => elements1 + case _ => fail() + } val elements2: Set[String] = elements1 elements2 should be(Set("a", "b")) Changed(ORSetKey[String]("key"))(s1) match { case c @ Changed(ORSetKey("key")) => val _: ORSet[String] = c.dataValue - val ORSet(elements3) = c.dataValue + val elements3 = c.dataValue match { + case ORSet(elements3) => elements3 + case _ => fail() + } val elements4: Set[String] = elements3 elements4 should be(Set("a", "b")) case changed => @@ -619,10 +628,15 @@ class ORSetSpec extends AnyWordSpec with Matchers { val msg: Any = Changed(ORSetKey[String]("key"))(s1) msg match { case c @ Changed(ORSetKey("key")) => - val ORSet(elements3) = c.dataValue // `unapply(a: ReplicatedData)` is used here + // FIXME we need to look into this for Scala 2.13.5 + // val ORSet(elements3) = c.dataValue // `unapply(a: ReplicatedData)` is used here // if `unapply(a: ReplicatedData)` isn't defined the next line doesn't compile: // type mismatch; found : scala.collection.immutable.Set[A] where type A required: Set[Any] Note: A <: Any, // but trait Set is invariant in type A. You may wish to investigate a wildcard type such as _ <: Any. (SLS 3.2.10) + val elements3 = c.dataValue match { + case ORSet(elements3) => elements3 + case _ => fail() + } val elements4: Set[Any] = elements3 elements4 should be(Set("a", "b")) case changed => diff --git a/akka-distributed-data/src/test/scala/akka/cluster/ddata/PNCounterMapSpec.scala b/akka-distributed-data/src/test/scala/akka/cluster/ddata/PNCounterMapSpec.scala index 37cfe6d6b2..90e7893879 100644 --- a/akka-distributed-data/src/test/scala/akka/cluster/ddata/PNCounterMapSpec.scala +++ b/akka-distributed-data/src/test/scala/akka/cluster/ddata/PNCounterMapSpec.scala @@ -73,13 +73,19 @@ class PNCounterMapSpec extends AnyWordSpec with Matchers { "have unapply extractor" in { val m1 = PNCounterMap.empty.increment(node1, "a", 1).increment(node2, "b", 2) - val PNCounterMap(entries1) = m1 + val entries1 = m1 match { + case PNCounterMap(entries1) => entries1 + case _ => throw new RuntimeException() + } val entries2: Map[String, BigInt] = entries1 entries2 should be(Map("a" -> 1L, "b" -> 2L)) Changed(PNCounterMapKey[String]("key"))(m1) match { case c @ Changed(PNCounterMapKey("key")) => - val PNCounterMap(entries3) = c.dataValue + val entries3 = c.dataValue match { + case PNCounterMap(entries3) => entries3 + case _ => throw new RuntimeException() + } val entries4: Map[String, BigInt] = entries3 entries4 should be(Map("a" -> 1L, "b" -> 2L)) case _ => diff --git a/akka-distributed-data/src/test/scala/akka/cluster/ddata/PNCounterSpec.scala b/akka-distributed-data/src/test/scala/akka/cluster/ddata/PNCounterSpec.scala index 13f99e927c..0c183537f2 100644 --- a/akka-distributed-data/src/test/scala/akka/cluster/ddata/PNCounterSpec.scala +++ b/akka-distributed-data/src/test/scala/akka/cluster/ddata/PNCounterSpec.scala @@ -191,13 +191,13 @@ class PNCounterSpec extends AnyWordSpec with Matchers { "have unapply extractor" in { val c1 = PNCounter.empty.increment(node1).increment(node1).decrement(node2) - val PNCounter(value1) = c1 + val value1 = c1.value val value2: BigInt = value1 value2 should be(1L) Changed(PNCounterKey("key"))(c1) match { case c @ Changed(PNCounterKey("key")) => - val PNCounter(value3) = c.dataValue + val value3 = c.dataValue.value val value4: BigInt = value3 value4 should be(1L) case changed => diff --git a/akka-multi-node-testkit/src/main/scala/akka/remote/testconductor/Conductor.scala b/akka-multi-node-testkit/src/main/scala/akka/remote/testconductor/Conductor.scala index cf950e5cfe..3a3e45348a 100644 --- a/akka-multi-node-testkit/src/main/scala/akka/remote/testconductor/Conductor.scala +++ b/akka-multi-node-testkit/src/main/scala/akka/remote/testconductor/Conductor.scala @@ -88,7 +88,7 @@ trait Conductor { this: TestConductorExt => _controller = system.systemActorOf(Props(classOf[Controller], participants, controllerPort), "controller") import Settings.BarrierTimeout import system.dispatcher - (controller ? GetSockAddr).flatMap { + (controller ? GetSockAddr).mapTo[InetSocketAddress].flatMap { case sockAddr: InetSocketAddress => startClient(name, sockAddr).map(_ => sockAddr) } } @@ -464,6 +464,7 @@ private[akka] class Controller(private var initialParticipants: Int, controllerP case CreateServerFSM(channel) => val (ip, port) = channel.getRemoteAddress match { case s: InetSocketAddress => (s.getAddress.getHostAddress, s.getPort) + case _ => throw new RuntimeException() // compiler exhaustiveness check pleaser } val name = ip + ":" + port + "-server" + generation.next() sender() ! context.actorOf(Props(classOf[ServerFSM], self, channel).withDeploy(Deploy.local), name) diff --git a/akka-persistence-query/src/main/scala/akka/persistence/query/journal/leveldb/AllPersistenceIdsStage.scala b/akka-persistence-query/src/main/scala/akka/persistence/query/journal/leveldb/AllPersistenceIdsStage.scala index f4c5c465a1..283575248a 100644 --- a/akka-persistence-query/src/main/scala/akka/persistence/query/journal/leveldb/AllPersistenceIdsStage.scala +++ b/akka-persistence-query/src/main/scala/akka/persistence/query/journal/leveldb/AllPersistenceIdsStage.scala @@ -55,6 +55,8 @@ final private[akka] class AllPersistenceIdsStage(liveQuery: Boolean, writeJourna buffer(persistenceId) deliverBuf(out) } + + case _ => throw new RuntimeException() // compiler exhaustiveness check pleaser } } diff --git a/akka-persistence-query/src/main/scala/akka/persistence/query/journal/leveldb/EventsByPersistenceIdStage.scala b/akka-persistence-query/src/main/scala/akka/persistence/query/journal/leveldb/EventsByPersistenceIdStage.scala index c614c47ef0..26ad262778 100644 --- a/akka-persistence-query/src/main/scala/akka/persistence/query/journal/leveldb/EventsByPersistenceIdStage.scala +++ b/akka-persistence-query/src/main/scala/akka/persistence/query/journal/leveldb/EventsByPersistenceIdStage.scala @@ -133,6 +133,8 @@ final private[akka] class EventsByPersistenceIdStage( case EventAppended(_) => requestMore() + + case _ => throw new RuntimeException() // compiler exhaustiveness check pleaser } } diff --git a/akka-persistence-query/src/main/scala/akka/persistence/query/journal/leveldb/EventsByTagStage.scala b/akka-persistence-query/src/main/scala/akka/persistence/query/journal/leveldb/EventsByTagStage.scala index e8b5b29dc2..8598019217 100644 --- a/akka-persistence-query/src/main/scala/akka/persistence/query/journal/leveldb/EventsByTagStage.scala +++ b/akka-persistence-query/src/main/scala/akka/persistence/query/journal/leveldb/EventsByTagStage.scala @@ -132,6 +132,8 @@ final private[leveldb] class EventsByTagStage( case TaggedEventAppended(_) => requestMore() + + case _ => throw new RuntimeException() // compiler exhaustiveness check pleaser } } diff --git a/akka-persistence-shared/src/test/scala/akka/persistence/serialization/SerializerSpec.scala b/akka-persistence-shared/src/test/scala/akka/persistence/serialization/SerializerSpec.scala index 01ef5b951a..65ac753b75 100644 --- a/akka-persistence-shared/src/test/scala/akka/persistence/serialization/SerializerSpec.scala +++ b/akka-persistence-shared/src/test/scala/akka/persistence/serialization/SerializerSpec.scala @@ -313,6 +313,7 @@ object MessageSerializerRemotingSpec { case a: AtomicWrite => a.payload.foreach { case p @ PersistentRepr(MyPayload(data), _) => p.sender ! s"p${data}" + case x => throw new RuntimeException(s"Unexpected payload: $x") } } } @@ -383,6 +384,7 @@ class MyPayloadSerializer extends Serializer { def toBinary(o: AnyRef): Array[Byte] = o match { case MyPayload(data) => s".${data}".getBytes(UTF_8) + case x => throw new NotSerializableException(s"Unexpected object: $x") } def fromBinary(bytes: Array[Byte], manifest: Option[Class[_]]): AnyRef = manifest match { @@ -404,6 +406,7 @@ class MyPayload2Serializer extends SerializerWithStringManifest { def toBinary(o: AnyRef): Array[Byte] = o match { case MyPayload2(data, n) => s".$data:$n".getBytes(UTF_8) + case x => throw new NotSerializableException(s"Unexpected object: $x") } def fromBinary(bytes: Array[Byte], manifest: String): AnyRef = manifest match { @@ -425,6 +428,7 @@ class MySnapshotSerializer extends Serializer { def toBinary(o: AnyRef): Array[Byte] = o match { case MySnapshot(data) => s".${data}".getBytes(UTF_8) + case x => throw new NotSerializableException(s"Unexpected object: $x") } def fromBinary(bytes: Array[Byte], manifest: Option[Class[_]]): AnyRef = manifest match { @@ -444,6 +448,7 @@ class MySnapshotSerializer2 extends SerializerWithStringManifest { def toBinary(o: AnyRef): Array[Byte] = o match { case MySnapshot2(data) => s".${data}".getBytes(UTF_8) + case unexpected => throw new NotSerializableException(s"Unexpected: $unexpected") } def fromBinary(bytes: Array[Byte], manifest: String): AnyRef = manifest match { @@ -466,6 +471,7 @@ class OldPayloadSerializer extends SerializerWithStringManifest { case MyPayload(data) => s".${data}".getBytes(UTF_8) case old if old.getClass.getName == OldPayloadClassName => o.toString.getBytes(UTF_8) + case x => throw new NotSerializableException(s"Unexpected object: $x") } def fromBinary(bytes: Array[Byte], manifest: String): AnyRef = manifest match { diff --git a/akka-persistence-tck/src/main/scala/akka/persistence/TestSerializer.scala b/akka-persistence-tck/src/main/scala/akka/persistence/TestSerializer.scala index 3b0a575bf7..bd4e8b0370 100644 --- a/akka-persistence-tck/src/main/scala/akka/persistence/TestSerializer.scala +++ b/akka-persistence-tck/src/main/scala/akka/persistence/TestSerializer.scala @@ -5,24 +5,27 @@ package akka.persistence import java.nio.charset.StandardCharsets - import akka.actor.ActorRef import akka.actor.ExtendedActorSystem import akka.serialization.Serialization import akka.serialization.SerializerWithStringManifest +import java.io.NotSerializableException + final case class TestPayload(ref: ActorRef) class TestSerializer(system: ExtendedActorSystem) extends SerializerWithStringManifest { def identifier: Int = 666 def manifest(o: AnyRef): String = o match { case _: TestPayload => "A" + case _ => throw new RuntimeException() // compiler exhaustiveness check pleaser } def toBinary(o: AnyRef): Array[Byte] = o match { case TestPayload(ref) => verifyTransportInfo() val refStr = Serialization.serializedActorPath(ref) refStr.getBytes(StandardCharsets.UTF_8) + case _ => throw new NotSerializableException() // compiler exhaustiveness check pleaser } def fromBinary(bytes: Array[Byte], manifest: String): AnyRef = { verifyTransportInfo() @@ -31,6 +34,7 @@ class TestSerializer(system: ExtendedActorSystem) extends SerializerWithStringMa val refStr = new String(bytes, StandardCharsets.UTF_8) val ref = system.provider.resolveActorRef(refStr) TestPayload(ref) + case _ => throw new NotSerializableException() // compiler exhaustiveness check pleaser } } diff --git a/akka-persistence-testkit/src/main/scala/akka/persistence/testkit/internal/SerializedEventStorageImpl.scala b/akka-persistence-testkit/src/main/scala/akka/persistence/testkit/internal/SerializedEventStorageImpl.scala index 2fd363da61..c427ed9139 100644 --- a/akka-persistence-testkit/src/main/scala/akka/persistence/testkit/internal/SerializedEventStorageImpl.scala +++ b/akka-persistence-testkit/src/main/scala/akka/persistence/testkit/internal/SerializedEventStorageImpl.scala @@ -43,6 +43,7 @@ private[testkit] class SerializedEventStorageImpl(system: ActorSystem) extends E val (payload, tags) = pr.payload match { case Tagged(event: AnyRef, tags) => (event, tags) case event: AnyRef => (event, Set.empty[String]) + case p => throw new RuntimeException(s"Unexpected payload: $p") } val s = serialization.findSerializerFor(payload) val manifest = Serializers.manifestFor(s, payload) diff --git a/akka-persistence-testkit/src/test/scala/akka/persistence/testkit/CommonUtils.scala b/akka-persistence-testkit/src/test/scala/akka/persistence/testkit/CommonUtils.scala index 62ad1fc1d0..4d0ade4a35 100644 --- a/akka-persistence-testkit/src/test/scala/akka/persistence/testkit/CommonUtils.scala +++ b/akka-persistence-testkit/src/test/scala/akka/persistence/testkit/CommonUtils.scala @@ -97,7 +97,7 @@ class A(pid: String, notifyOnStateChange: Option[ActorRef]) extends PersistentAc override def persistenceId = pid } -trait TestCommand +sealed trait TestCommand case class Cmd(data: String) extends TestCommand case object Passivate extends TestCommand case class Evt(data: String) diff --git a/akka-persistence-typed-tests/src/test/scala/akka/persistence/typed/ReplicatedEventPublishingSpec.scala b/akka-persistence-typed-tests/src/test/scala/akka/persistence/typed/ReplicatedEventPublishingSpec.scala index 6e0123756c..e74bd7f7df 100644 --- a/akka-persistence-typed-tests/src/test/scala/akka/persistence/typed/ReplicatedEventPublishingSpec.scala +++ b/akka-persistence-typed-tests/src/test/scala/akka/persistence/typed/ReplicatedEventPublishingSpec.scala @@ -51,6 +51,7 @@ object ReplicatedEventPublishingSpec { Effect.none case Stop => Effect.stop() + case unexpected => throw new RuntimeException(s"Unexpected: $unexpected") }, (state, string) => state + string)) } diff --git a/akka-persistence-typed/src/main/scala/akka/persistence/typed/delivery/EventSourcedProducerQueue.scala b/akka-persistence-typed/src/main/scala/akka/persistence/typed/delivery/EventSourcedProducerQueue.scala index 6143073488..be3299c9d3 100644 --- a/akka-persistence-typed/src/main/scala/akka/persistence/typed/delivery/EventSourcedProducerQueue.scala +++ b/akka-persistence-typed/src/main/scala/akka/persistence/typed/delivery/EventSourcedProducerQueue.scala @@ -266,6 +266,9 @@ private class EventSourcedProducerQueue[A]( case _: CleanupTick[_] => onCleanupTick(state) + + case cmd => + throw new RuntimeException(s"Unexpected command $cmd") } } else { onCommandBeforeInitialCleanup(state, command) diff --git a/akka-persistence-typed/src/main/scala/akka/persistence/typed/internal/BehaviorSetup.scala b/akka-persistence-typed/src/main/scala/akka/persistence/typed/internal/BehaviorSetup.scala index 08dc7900d3..ae54fb0524 100644 --- a/akka-persistence-typed/src/main/scala/akka/persistence/typed/internal/BehaviorSetup.scala +++ b/akka-persistence-typed/src/main/scala/akka/persistence/typed/internal/BehaviorSetup.scala @@ -102,7 +102,7 @@ private[akka] final class BehaviorSetup[C, E, S]( def cancelRecoveryTimer(): Unit = { recoveryTimer match { case OptionVal.Some(t) => t.cancel() - case OptionVal.None => + case _ => } recoveryTimer = OptionVal.None } @@ -141,6 +141,7 @@ private[akka] final class BehaviorSetup[C, E, S]( if (s.snapshotWhen(sequenceNr)) SnapshotWithRetention else if (snapshotWhen(state, event, sequenceNr)) SnapshotWithoutRetention else NoSnapshot + case unexpected => throw new IllegalStateException(s"Unexpected retention criteria: $unexpected") } } diff --git a/akka-persistence-typed/src/main/scala/akka/persistence/typed/internal/EventSourcedBehaviorImpl.scala b/akka-persistence-typed/src/main/scala/akka/persistence/typed/internal/EventSourcedBehaviorImpl.scala index 636543678f..335e125368 100644 --- a/akka-persistence-typed/src/main/scala/akka/persistence/typed/internal/EventSourcedBehaviorImpl.scala +++ b/akka-persistence-typed/src/main/scala/akka/persistence/typed/internal/EventSourcedBehaviorImpl.scala @@ -199,7 +199,7 @@ private[akka] final case class EventSourcedBehaviorImpl[Command, Event, State]( case res: SnapshotProtocol.Response => InternalProtocol.SnapshotterResponse(res) case RecoveryPermitter.RecoveryPermitGranted => InternalProtocol.RecoveryPermitGranted case internal: InternalProtocol => internal // such as RecoveryTickEvent - case cmd: Command @unchecked => InternalProtocol.IncomingCommand(cmd) + case cmd => InternalProtocol.IncomingCommand(cmd.asInstanceOf[Command]) } target(ctx, innerMsg) } diff --git a/akka-persistence-typed/src/main/scala/akka/persistence/typed/internal/ExternalInteractions.scala b/akka-persistence-typed/src/main/scala/akka/persistence/typed/internal/ExternalInteractions.scala index 6ac7d2232b..75c37d93c1 100644 --- a/akka-persistence-typed/src/main/scala/akka/persistence/typed/internal/ExternalInteractions.scala +++ b/akka-persistence-typed/src/main/scala/akka/persistence/typed/internal/ExternalInteractions.scala @@ -65,7 +65,7 @@ private[akka] trait JournalInteractions[C, E, S] { val write = AtomicWrite(metadata match { case OptionVal.Some(meta) => repr.withMetadata(meta) - case OptionVal.None => repr + case _ => repr }) :: Nil setup.journal diff --git a/akka-persistence-typed/src/main/scala/akka/persistence/typed/internal/ReplicationSetup.scala b/akka-persistence-typed/src/main/scala/akka/persistence/typed/internal/ReplicationSetup.scala index 595c16ac0a..caa5e46383 100644 --- a/akka-persistence-typed/src/main/scala/akka/persistence/typed/internal/ReplicationSetup.scala +++ b/akka-persistence-typed/src/main/scala/akka/persistence/typed/internal/ReplicationSetup.scala @@ -37,7 +37,7 @@ private[akka] final class ReplicationContextImpl( _currentThread match { case OptionVal.Some(t) => if (callerThread ne t) error() - case OptionVal.None => + case _ => error() } } @@ -50,7 +50,7 @@ private[akka] final class ReplicationContextImpl( checkAccess("origin") _origin match { case OptionVal.Some(origin) => origin - case OptionVal.None => throw new IllegalStateException("origin can only be accessed from the event handler") + case _ => throw new IllegalStateException("origin can only be accessed from the event handler") } } diff --git a/akka-persistence-typed/src/main/scala/akka/persistence/typed/internal/Running.scala b/akka-persistence-typed/src/main/scala/akka/persistence/typed/internal/Running.scala index 37512cdb2f..7419afca34 100644 --- a/akka-persistence-typed/src/main/scala/akka/persistence/typed/internal/Running.scala +++ b/akka-persistence-typed/src/main/scala/akka/persistence/typed/internal/Running.scala @@ -188,7 +188,7 @@ private[akka] object Running { if (setup.internalLogger.isDebugEnabled) setup.internalLogger.debug("Fast forward replica [{}] to [{}]", replicaId, sequenceNumber) control.fastForward(sequenceNumber) - case OptionVal.None => + case _ => // stream not started yet, ok, fast forward is an optimization if (setup.internalLogger.isDebugEnabled) setup.internalLogger.debug( @@ -596,6 +596,8 @@ private[akka] object Running { case _: Stash.type => stashUser(IncomingCommand(msg)) (applySideEffects(sideEffects, state), true) + + case unexpected => throw new IllegalStateException(s"Unexpected retention effect: $unexpected") } } @@ -810,6 +812,7 @@ private[akka] object Running { // deleteEventsOnSnapshot == false, deletion of old snapshots val deleteSnapshotsToSeqNr = s.deleteUpperSequenceNr(meta.sequenceNr) internalDeleteSnapshots(s.deleteLowerSequenceNr(deleteSnapshotsToSeqNr), deleteSnapshotsToSeqNr) + case unexpected => throw new IllegalStateException(s"Unexpected retention criteria: $unexpected") } } @@ -926,6 +929,7 @@ private[akka] object Running { // starting at the snapshot at toSequenceNr would be invalid. val deleteSnapshotsToSeqNr = toSequenceNr - 1 internalDeleteSnapshots(s.deleteLowerSequenceNr(deleteSnapshotsToSeqNr), deleteSnapshotsToSeqNr) + case unexpected => throw new IllegalStateException(s"Unexpected retention criteria: $unexpected") } Some(DeleteEventsCompleted(toSequenceNr)) case DeleteMessagesFailure(e, toSequenceNr) => diff --git a/akka-persistence-typed/src/main/scala/akka/persistence/typed/javadsl/CommandHandler.scala b/akka-persistence-typed/src/main/scala/akka/persistence/typed/javadsl/CommandHandler.scala index 5ab694405e..b4534dad90 100644 --- a/akka-persistence-typed/src/main/scala/akka/persistence/typed/javadsl/CommandHandler.scala +++ b/akka-persistence-typed/src/main/scala/akka/persistence/typed/javadsl/CommandHandler.scala @@ -392,9 +392,9 @@ final class CommandHandlerBuilderByState[Command, Event, S <: State, State] @Int } effect match { - case OptionVal.None => - throw new MatchError(s"No match found for command of type [${command.getClass.getName}]") case OptionVal.Some(e) => e.asInstanceOf[EffectImpl[Event, State]] + case _ => + throw new MatchError(s"No match found for command of type [${command.getClass.getName}]") } } } diff --git a/akka-persistence-typed/src/main/scala/akka/persistence/typed/javadsl/CommandHandlerWithReply.scala b/akka-persistence-typed/src/main/scala/akka/persistence/typed/javadsl/CommandHandlerWithReply.scala index f449e07cb7..faf9dab46b 100644 --- a/akka-persistence-typed/src/main/scala/akka/persistence/typed/javadsl/CommandHandlerWithReply.scala +++ b/akka-persistence-typed/src/main/scala/akka/persistence/typed/javadsl/CommandHandlerWithReply.scala @@ -403,9 +403,9 @@ final class CommandHandlerWithReplyBuilderByState[Command, Event, S <: State, St } effect match { - case OptionVal.None => - throw new MatchError(s"No match found for command of type [${command.getClass.getName}]") case OptionVal.Some(e) => e.asInstanceOf[EffectImpl[Event, State]] + case _ => + throw new MatchError(s"No match found for command of type [${command.getClass.getName}]") } } } diff --git a/akka-persistence-typed/src/main/scala/akka/persistence/typed/javadsl/EventHandler.scala b/akka-persistence-typed/src/main/scala/akka/persistence/typed/javadsl/EventHandler.scala index 4f4a1e2085..4a54313b69 100644 --- a/akka-persistence-typed/src/main/scala/akka/persistence/typed/javadsl/EventHandler.scala +++ b/akka-persistence-typed/src/main/scala/akka/persistence/typed/javadsl/EventHandler.scala @@ -327,11 +327,11 @@ final class EventHandlerBuilderByState[S <: State, State, Event]( } result match { - case OptionVal.None => + case OptionVal.Some(s) => s + case _ => val stateClass = if (state == null) "null" else state.getClass.getName throw new MatchError( s"No match found for event [${event.getClass}] and state [$stateClass]. Has this event been stored using an EventAdapter?") - case OptionVal.Some(s) => s } } } diff --git a/akka-persistence-typed/src/main/scala/akka/persistence/typed/scaladsl/PersistentFSMMigration.scala b/akka-persistence-typed/src/main/scala/akka/persistence/typed/scaladsl/PersistentFSMMigration.scala index be62620555..065c6fe70f 100644 --- a/akka-persistence-typed/src/main/scala/akka/persistence/typed/scaladsl/PersistentFSMMigration.scala +++ b/akka-persistence-typed/src/main/scala/akka/persistence/typed/scaladsl/PersistentFSMMigration.scala @@ -28,7 +28,7 @@ object PersistentFSMMigration { override def fromJournal(from: Any): State = { from match { case PersistentFSMSnapshot(stateIdentifier, data, timeout) => adapt(stateIdentifier, data, timeout) - case data: State @unchecked => data + case data => data.asInstanceOf[State] } } } diff --git a/akka-persistence-typed/src/test/scala/akka/persistence/typed/scaladsl/EventSourcedSnapshotAdapterSpec.scala b/akka-persistence-typed/src/test/scala/akka/persistence/typed/scaladsl/EventSourcedSnapshotAdapterSpec.scala index 1c3d746e8e..424d2abd04 100644 --- a/akka-persistence-typed/src/test/scala/akka/persistence/typed/scaladsl/EventSourcedSnapshotAdapterSpec.scala +++ b/akka-persistence-typed/src/test/scala/akka/persistence/typed/scaladsl/EventSourcedSnapshotAdapterSpec.scala @@ -84,6 +84,7 @@ class EventSourcedSnapshotAdapterSpec case ps: PersistedState => snapshotFromJournal.ref.tell(ps) State(ps.s) + case unexpected => throw new RuntimeException(s"Unexpected: $unexpected") } }) .snapshotWhen { (_, event, _) => diff --git a/akka-persistence-typed/src/test/scala/akka/persistence/typed/scaladsl/EventSourcedStashOverflowSpec.scala b/akka-persistence-typed/src/test/scala/akka/persistence/typed/scaladsl/EventSourcedStashOverflowSpec.scala index 5365774d3a..78bbe3212b 100644 --- a/akka-persistence-typed/src/test/scala/akka/persistence/typed/scaladsl/EventSourcedStashOverflowSpec.scala +++ b/akka-persistence-typed/src/test/scala/akka/persistence/typed/scaladsl/EventSourcedStashOverflowSpec.scala @@ -21,7 +21,7 @@ import scala.concurrent.duration._ object EventSourcedStashOverflowSpec { object EventSourcedStringList { - trait Command + sealed trait Command case class DoNothing(replyTo: ActorRef[Done]) extends Command def apply(persistenceId: PersistenceId): Behavior[Command] = diff --git a/akka-persistence/src/main/scala/akka/persistence/fsm/PersistentFSM.scala b/akka-persistence/src/main/scala/akka/persistence/fsm/PersistentFSM.scala index edd75997d1..e1dbcff1a2 100644 --- a/akka-persistence/src/main/scala/akka/persistence/fsm/PersistentFSM.scala +++ b/akka-persistence/src/main/scala/akka/persistence/fsm/PersistentFSM.scala @@ -178,6 +178,8 @@ trait PersistentFSM[S <: FSMState, D, E] extends PersistentActor with Persistent case _: StateChangeEvent => doSnapshot = doSnapshot || snapshotAfterExtension.isSnapshotAfterSeqNo(lastSequenceNr) applyStateOnLastHandler() + case _ => + throw new RuntimeException() // compiler exhaustiveness check pleaser } } } diff --git a/akka-persistence/src/main/scala/akka/persistence/journal/AsyncWriteProxy.scala b/akka-persistence/src/main/scala/akka/persistence/journal/AsyncWriteProxy.scala index c6345e6cf1..3f6c2fe55e 100644 --- a/akka-persistence/src/main/scala/akka/persistence/journal/AsyncWriteProxy.scala +++ b/akka-persistence/src/main/scala/akka/persistence/journal/AsyncWriteProxy.scala @@ -87,6 +87,7 @@ private[persistence] trait AsyncWriteProxy extends AsyncWriteJournal with Stash case Some(s) => (s ? ReplayMessages(persistenceId, fromSequenceNr = 0L, toSequenceNr = 0L, max = 0L)).map { case ReplaySuccess(highest) => highest + case _ => throw new RuntimeException() // compiler exhaustiveness check pleaser } case None => storeNotInitialized } diff --git a/akka-persistence/src/test/scala/akka/persistence/AtLeastOnceDeliverySpec.scala b/akka-persistence/src/test/scala/akka/persistence/AtLeastOnceDeliverySpec.scala index 50117c391c..aef5773778 100644 --- a/akka-persistence/src/test/scala/akka/persistence/AtLeastOnceDeliverySpec.scala +++ b/akka-persistence/src/test/scala/akka/persistence/AtLeastOnceDeliverySpec.scala @@ -401,11 +401,11 @@ abstract class AtLeastOnceDeliverySpec(config: Config) extends PersistenceSpec(c snd.tell(Req("c-" + n), probe.ref) } val deliverWithin = 20.seconds - probeA.receiveN(N, deliverWithin).map { case a: Action => a.payload }.toSet should ===( + probeA.receiveN(N, deliverWithin).collect { case a: Action => a.payload }.toSet should ===( (1 to N).map(n => "a-" + n).toSet) - probeB.receiveN(N, deliverWithin).map { case a: Action => a.payload }.toSet should ===( + probeB.receiveN(N, deliverWithin).collect { case a: Action => a.payload }.toSet should ===( (1 to N).map(n => "b-" + n).toSet) - probeC.receiveN(N, deliverWithin).map { case a: Action => a.payload }.toSet should ===( + probeC.receiveN(N, deliverWithin).collect { case a: Action => a.payload }.toSet should ===( (1 to N).map(n => "c-" + n).toSet) } diff --git a/akka-persistence/src/test/scala/akka/persistence/EndToEndEventAdapterSpec.scala b/akka-persistence/src/test/scala/akka/persistence/EndToEndEventAdapterSpec.scala index 4f0de8c1e2..3286076700 100644 --- a/akka-persistence/src/test/scala/akka/persistence/EndToEndEventAdapterSpec.scala +++ b/akka-persistence/src/test/scala/akka/persistence/EndToEndEventAdapterSpec.scala @@ -34,7 +34,10 @@ object EndToEndEventAdapterSpec { override def manifest(event: Any): String = event.getClass.getCanonicalName override def toJournal(event: Any): Any = - event match { case m: AppModel => JSON(m.payload) } + event match { + case m: AppModel => JSON(m.payload) + case _ => throw new RuntimeException() + } override def fromJournal(event: Any, manifest: String): EventSeq = event match { case m: JSON if m.payload.toString.startsWith("a") => EventSeq.single(A(m.payload)) case _ => EventSeq.empty @@ -44,7 +47,10 @@ object EndToEndEventAdapterSpec { override def manifest(event: Any): String = event.getClass.getCanonicalName override def toJournal(event: Any): Any = - event match { case m: AppModel => JSON(m.payload) } + event match { + case m: AppModel => JSON(m.payload) + case _ => throw new RuntimeException() + } override def fromJournal(event: Any, manifest: String): EventSeq = event match { case m: JSON if m.payload.toString.startsWith("a") => EventSeq.single(NewA(m.payload)) case _ => EventSeq.empty @@ -54,7 +60,10 @@ object EndToEndEventAdapterSpec { override def manifest(event: Any): String = event.getClass.getCanonicalName override def toJournal(event: Any): Any = - event match { case m: AppModel => JSON(m.payload) } + event match { + case m: AppModel => JSON(m.payload) + case _ => throw new RuntimeException() + } override def fromJournal(event: Any, manifest: String): EventSeq = event match { case m: JSON if m.payload.toString.startsWith("b") => EventSeq.single(B(m.payload)) case _ => EventSeq.empty @@ -64,7 +73,10 @@ object EndToEndEventAdapterSpec { override def manifest(event: Any): String = event.getClass.getCanonicalName override def toJournal(event: Any): Any = - event match { case m: AppModel => JSON(m.payload) } + event match { + case m: AppModel => JSON(m.payload) + case _ => throw new RuntimeException() + } override def fromJournal(event: Any, manifest: String): EventSeq = event match { case m: JSON if m.payload.toString.startsWith("b") => EventSeq.single(NewB(m.payload)) case _ => EventSeq.empty diff --git a/akka-persistence/src/test/scala/akka/persistence/EventAdapterSpec.scala b/akka-persistence/src/test/scala/akka/persistence/EventAdapterSpec.scala index b31231f002..115f4fdf73 100644 --- a/akka-persistence/src/test/scala/akka/persistence/EventAdapterSpec.scala +++ b/akka-persistence/src/test/scala/akka/persistence/EventAdapterSpec.scala @@ -43,6 +43,7 @@ object EventAdapterSpec { override def fromJournal(event: Any, manifest: String): EventSeq = EventSeq.single { event match { case m: JournalModel => m.payload + case _ => throw new RuntimeException() } } @@ -53,6 +54,7 @@ object EventAdapterSpec { override def fromJournal(event: Any, manifest: String): EventSeq = EventSeq.single { event match { case m: JournalModel => m // don't unpack, just pass through the JournalModel + case _ => throw new RuntimeException() } } } diff --git a/akka-persistence/src/test/scala/akka/persistence/EventSourcedActorFailureSpec.scala b/akka-persistence/src/test/scala/akka/persistence/EventSourcedActorFailureSpec.scala index 082aaf893a..e38bd51a88 100644 --- a/akka-persistence/src/test/scala/akka/persistence/EventSourcedActorFailureSpec.scala +++ b/akka-persistence/src/test/scala/akka/persistence/EventSourcedActorFailureSpec.scala @@ -50,7 +50,10 @@ object EventSourcedActorFailureSpec { def isWrong(messages: immutable.Seq[AtomicWrite]): Boolean = messages.exists { case a: AtomicWrite => - a.payload.exists { case PersistentRepr(Evt(s: String), _) => s.contains("wrong") } + a.payload.exists { + case PersistentRepr(Evt(s: String), _) => s.contains("wrong") + case _ => false + } case _ => false } @@ -66,7 +69,10 @@ object EventSourcedActorFailureSpec { } def isCorrupt(events: Seq[PersistentRepr]): Boolean = - events.exists { case PersistentRepr(Evt(s: String), _) => s.contains("corrupt") } + events.exists { + case PersistentRepr(Evt(s: String), _) => s.contains("corrupt") + case _ => false + } } diff --git a/akka-persistence/src/test/scala/akka/persistence/PersistentActorJournalProtocolSpec.scala b/akka-persistence/src/test/scala/akka/persistence/PersistentActorJournalProtocolSpec.scala index 063cce52c8..8c96faf09b 100644 --- a/akka-persistence/src/test/scala/akka/persistence/PersistentActorJournalProtocolSpec.scala +++ b/akka-persistence/src/test/scala/akka/persistence/PersistentActorJournalProtocolSpec.scala @@ -117,6 +117,7 @@ class PersistentActorJournalProtocolSpec extends AkkaSpec(config) with ImplicitS writes.zip(msg.msg).foreach { case (PersistentRepr(evt, _), m) => evt should ===(m) + case _ => } case x => fail(s"unexpected $x") } diff --git a/akka-persistence/src/test/scala/akka/persistence/journal/SteppingInmemJournal.scala b/akka-persistence/src/test/scala/akka/persistence/journal/SteppingInmemJournal.scala index 728cd1f5ff..4d27b88f0b 100644 --- a/akka-persistence/src/test/scala/akka/persistence/journal/SteppingInmemJournal.scala +++ b/akka-persistence/src/test/scala/akka/persistence/journal/SteppingInmemJournal.scala @@ -75,8 +75,8 @@ final class SteppingInmemJournal extends InmemJournal { override def receivePluginInternal = super.receivePluginInternal.orElse { case Token if queuedOps.isEmpty => queuedTokenRecipients = queuedTokenRecipients :+ sender() case Token => - val op +: rest = queuedOps - queuedOps = rest + val op = queuedOps.head + queuedOps = queuedOps.tail val tokenConsumer = sender() op().onComplete(_ => tokenConsumer ! TokenConsumed) } @@ -97,8 +97,8 @@ final class SteppingInmemJournal extends InmemJournal { val future = promise.future doOrEnqueue { () => promise.completeWith(super.asyncWriteMessages(Seq(message)).map { - case Nil => AsyncWriteJournal.successUnit - case head :: _ => head + case Nil => AsyncWriteJournal.successUnit + case nonEmpty => nonEmpty.head }) future.map(_ => ()) } @@ -144,7 +144,8 @@ final class SteppingInmemJournal extends InmemJournal { private def doOrEnqueue(op: () => Future[Unit]): Unit = { if (queuedTokenRecipients.nonEmpty) { val completed = op() - val tokenRecipient +: rest = queuedTokenRecipients + val tokenRecipient = queuedTokenRecipients.head + val rest = queuedTokenRecipients.tail queuedTokenRecipients = rest completed.onComplete(_ => tokenRecipient ! TokenConsumed) } else { diff --git a/akka-remote-tests/src/multi-jvm/scala/akka/io/DnsSpec.scala b/akka-remote-tests/src/multi-jvm/scala/akka/io/DnsSpec.scala index ff289cece3..dbe87e4484 100644 --- a/akka-remote-tests/src/multi-jvm/scala/akka/io/DnsSpec.scala +++ b/akka-remote-tests/src/multi-jvm/scala/akka/io/DnsSpec.scala @@ -27,10 +27,12 @@ class DnsSpec extends RemotingMultiNodeSpec(DnsSpec) { val ip4Address = InetAddress.getByAddress("localhost", Array[Byte](127, 0, 0, 1)) match { case address: Inet4Address => address + case _ => fail() } val ipv6Address = InetAddress.getByAddress("localhost", Array[Byte](0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1)) match { case address: Inet6Address => address + case _ => fail() } var temporaryValue: Option[String] = None diff --git a/akka-remote-tests/src/multi-jvm/scala/akka/remote/artery/MaxThroughputSpec.scala b/akka-remote-tests/src/multi-jvm/scala/akka/remote/artery/MaxThroughputSpec.scala index d20dab1ccc..fdd2a8811d 100644 --- a/akka-remote-tests/src/multi-jvm/scala/akka/remote/artery/MaxThroughputSpec.scala +++ b/akka-remote-tests/src/multi-jvm/scala/akka/remote/artery/MaxThroughputSpec.scala @@ -7,11 +7,8 @@ package akka.remote.artery import java.nio.ByteBuffer import java.util.concurrent.Executors import java.util.concurrent.TimeUnit.NANOSECONDS - import scala.concurrent.duration._ - import com.typesafe.config.ConfigFactory - import akka.actor._ import akka.remote.{ RARP, RemoteActorRefProvider, RemotingMultiNodeSpec } import akka.remote.artery.compress.CompressionProtocol.Events.ReceivedActorRefCompressionTable @@ -21,6 +18,8 @@ import akka.serialization.{ ByteBufferSerializer, SerializerWithStringManifest } import akka.serialization.jackson.CborSerializable import akka.testkit._ +import java.io.NotSerializableException + object MaxThroughputSpec extends MultiNodeConfig { val first = role("first") val second = role("second") @@ -326,6 +325,7 @@ object MaxThroughputSpec extends MultiNodeConfig { override def manifest(o: AnyRef): String = o match { case _: FlowControl => FlowControlManifest + case _ => throw new NotSerializableException() } override def toBinary(o: AnyRef, buf: ByteBuffer): Unit = @@ -333,11 +333,13 @@ object MaxThroughputSpec extends MultiNodeConfig { case FlowControl(id, burstStartTime) => buf.putInt(id) buf.putLong(burstStartTime) + case _ => throw new NotSerializableException() } override def fromBinary(buf: ByteBuffer, manifest: String): AnyRef = manifest match { case FlowControlManifest => FlowControl(buf.getInt, buf.getLong) + case _ => throw new NotSerializableException() } override def toBinary(o: AnyRef): Array[Byte] = o match { @@ -348,6 +350,7 @@ object MaxThroughputSpec extends MultiNodeConfig { val bytes = new Array[Byte](buf.remaining) buf.get(bytes) bytes + case _ => throw new NotSerializableException() } override def fromBinary(bytes: Array[Byte], manifest: String): AnyRef = diff --git a/akka-remote-tests/src/multi-jvm/scala/akka/remote/artery/TestMessage.scala b/akka-remote-tests/src/multi-jvm/scala/akka/remote/artery/TestMessage.scala index a501d0ba1f..1c26599689 100644 --- a/akka-remote-tests/src/multi-jvm/scala/akka/remote/artery/TestMessage.scala +++ b/akka-remote-tests/src/multi-jvm/scala/akka/remote/artery/TestMessage.scala @@ -9,6 +9,8 @@ import akka.protobufv3.internal.ByteString import akka.remote.artery.protobuf.{ TestMessages => proto } import akka.serialization.SerializerWithStringManifest +import java.io.NotSerializableException + object TestMessage { final case class Item(id: Long, name: String) } @@ -30,6 +32,7 @@ class TestMessageSerializer(val system: ExtendedActorSystem) extends SerializerW override def manifest(o: AnyRef): String = o match { case _: TestMessage => TestMessageManifest + case _ => throw new NotSerializableException() } override def toBinary(o: AnyRef): Array[Byte] = o match { @@ -45,6 +48,7 @@ class TestMessageSerializer(val system: ExtendedActorSystem) extends SerializerW builder.addItems(proto.Item.newBuilder().setId(item.id).setName(item.name)) } builder.build().toByteArray() + case _ => throw new NotSerializableException() } override def fromBinary(bytes: Array[Byte], manifest: String): AnyRef = { diff --git a/akka-remote/src/main/scala/akka/remote/BoundAddressesExtension.scala b/akka-remote/src/main/scala/akka/remote/BoundAddressesExtension.scala index 25f645b591..5eb77fd0a6 100644 --- a/akka-remote/src/main/scala/akka/remote/BoundAddressesExtension.scala +++ b/akka-remote/src/main/scala/akka/remote/BoundAddressesExtension.scala @@ -34,5 +34,6 @@ class BoundAddressesExtension(val system: ExtendedActorSystem) extends Extension def boundAddresses: Map[String, Set[Address]] = system.provider.asInstanceOf[RemoteActorRefProvider].transport match { case artery: ArteryTransport => Map(ArteryTransport.ProtocolName -> Set(artery.bindAddress.address)) case remoting: Remoting => remoting.boundAddresses + case other => throw new IllegalStateException(s"Unexpected transport type: ${other.getClass}") } } diff --git a/akka-remote/src/main/scala/akka/remote/Endpoint.scala b/akka-remote/src/main/scala/akka/remote/Endpoint.scala index 2c3085402c..99b1347f70 100644 --- a/akka-remote/src/main/scala/akka/remote/Endpoint.scala +++ b/akka-remote/src/main/scala/akka/remote/Endpoint.scala @@ -781,6 +781,8 @@ private[remote] class EndpointWriter( case s @ StopReading(_, replyTo) => reader.foreach(_.tell(s, replyTo)) true + case unexpected => + throw new IllegalArgumentException(s"Unexpected message type: ${unexpected.getClass}") } @tailrec def writeLoop(count: Int): Boolean = @@ -906,7 +908,7 @@ private[remote] class EndpointWriter( "Transient association error (association remains live)") extendedSystem.eventStream.publish(s.senderOption match { case OptionVal.Some(msgSender) => Dropped(s.message, reasonText, msgSender, s.recipient) - case OptionVal.None => Dropped(s.message, reasonText, s.recipient) + case _ => Dropped(s.message, reasonText, s.recipient) }) true } else { diff --git a/akka-remote/src/main/scala/akka/remote/RemoteActorRefProvider.scala b/akka-remote/src/main/scala/akka/remote/RemoteActorRefProvider.scala index 544dff3495..1a73aace6a 100644 --- a/akka-remote/src/main/scala/akka/remote/RemoteActorRefProvider.scala +++ b/akka-remote/src/main/scala/akka/remote/RemoteActorRefProvider.scala @@ -606,7 +606,7 @@ private[akka] class RemoteActorRefProvider( @InternalApi override private[akka] def serializationInformation: Serialization.Information = serializationInformationCache match { case OptionVal.Some(info) => info - case OptionVal.None => + case _ => if ((transport eq null) || (transport.defaultAddress eq null)) local.serializationInformation // address not know yet, access before complete init and binding else { @@ -635,7 +635,7 @@ private[akka] class RemoteActorRefProvider( override private[akka] def addressString: String = { _addressString match { case OptionVal.Some(addr) => addr - case OptionVal.None => + case _ => // not initialized yet, fallback local.addressString } diff --git a/akka-remote/src/main/scala/akka/remote/Remoting.scala b/akka-remote/src/main/scala/akka/remote/Remoting.scala index da509772b4..3fa955ca61 100644 --- a/akka-remote/src/main/scala/akka/remote/Remoting.scala +++ b/akka-remote/src/main/scala/akka/remote/Remoting.scala @@ -258,7 +258,10 @@ private[remote] class Remoting(_system: ExtendedActorSystem, _provider: RemoteAc override def managementCommand(cmd: Any): Future[Boolean] = endpointManager match { case Some(manager) => implicit val timeout = CommandAckTimeout - (manager ? ManagementCommand(cmd)).map { case ManagementCommandAck(status) => status } + (manager ? ManagementCommand(cmd)).map { + case ManagementCommandAck(status) => status + case unexpected => throw new IllegalArgumentException(s"Unexpected response type: ${unexpected.getClass}") + } case None => throw new RemoteTransportExceptionNoStackTrace( "Attempted to send management command but Remoting is not running.", diff --git a/akka-remote/src/main/scala/akka/remote/artery/ArteryTransport.scala b/akka-remote/src/main/scala/akka/remote/artery/ArteryTransport.scala index 8dcdbb97e8..8f3f92328e 100644 --- a/akka-remote/src/main/scala/akka/remote/artery/ArteryTransport.scala +++ b/akka-remote/src/main/scala/akka/remote/artery/ArteryTransport.scala @@ -454,7 +454,7 @@ private[remote] abstract class ArteryTransport(_system: ExtendedActorSystem, _pr val hashA = 23 + a val hash: Int = 23 * hashA + java.lang.Long.hashCode(b) math.abs(hash % inboundLanes) - case OptionVal.None => + case _ => // the lane is set by the DuplicateHandshakeReq stage, otherwise 0 env.lane } @@ -855,7 +855,7 @@ private[remote] abstract class ArteryTransport(_system: ExtendedActorSystem, _pr if (!isShutdown) quarantine(from.address, Some(from.uid), "ActorSystem terminated", harmless = true) }(materializer.executionContext) - case OptionVal.None => + case _ => log.error("Expected sender for ActorSystemTerminating message from [{}]", from) } false @@ -874,7 +874,7 @@ private[remote] abstract class ArteryTransport(_system: ExtendedActorSystem, _pr envelope.sender match { case OptionVal.Some(snd) => snd.tell(FlushAck, ActorRef.noSender) - case OptionVal.None => + case _ => log.error("Expected sender for Flush message from [{}]", envelope.association) } false diff --git a/akka-remote/src/main/scala/akka/remote/artery/Association.scala b/akka-remote/src/main/scala/akka/remote/artery/Association.scala index 219e98afa0..263ed9b772 100644 --- a/akka-remote/src/main/scala/akka/remote/artery/Association.scala +++ b/akka-remote/src/main/scala/akka/remote/artery/Association.scala @@ -226,7 +226,7 @@ private[remote] class Association( def outboundControlIngress: OutboundControlIngress = { _outboundControlIngress match { case OptionVal.Some(o) => o - case OptionVal.None => + case _ => controlQueue match { case w: LazyQueueWrapper => w.runMaterialize() case _ => @@ -236,7 +236,7 @@ private[remote] class Association( materializing.await(10, TimeUnit.SECONDS) _outboundControlIngress match { case OptionVal.Some(o) => o - case OptionVal.None => + case _ => if (transport.isShutdown || isRemovedAfterQuarantined()) throw ShuttingDown else throw new IllegalStateException(s"outboundControlIngress for [$remoteAddress] not initialized yet") } @@ -345,7 +345,7 @@ private[remote] class Association( val removed = isRemovedAfterQuarantined() if (removed) recipient match { case OptionVal.Some(ref) => ref.cachedAssociation = null // don't use this Association instance any more - case OptionVal.None => + case _ => } val reason = if (removed) "Due to removed unused quarantined association" @@ -472,7 +472,7 @@ private[remote] class Association( case idx => idx } - case OptionVal.None => + case _ => OrdinaryQueueIndex } } @@ -642,7 +642,7 @@ private[remote] class Association( setStopReason(queueIndex, OutboundStreamStopQuarantinedSignal) clearStreamKillSwitch(queueIndex, k) k.abort(OutboundStreamStopQuarantinedSignal) - case OptionVal.None => // already aborted + case _ => // already aborted } } } @@ -688,7 +688,7 @@ private[remote] class Association( setStopReason(queueIndex, OutboundStreamStopIdleSignal) clearStreamKillSwitch(queueIndex, k) k.abort(OutboundStreamStopIdleSignal) - case OptionVal.None => // already aborted + case _ => // already aborted } } else { @@ -700,7 +700,7 @@ private[remote] class Association( flightRecorder.transportStopIdleOutbound(remoteAddress, queueIndex) setControlIdleKillSwitch(OptionVal.None) killSwitch.abort(OutboundStreamStopIdleSignal) - case OptionVal.None => // already stopped + case _ => // already stopped } } } diff --git a/akka-remote/src/main/scala/akka/remote/artery/Codecs.scala b/akka-remote/src/main/scala/akka/remote/artery/Codecs.scala index d3b6d616a2..ba32e2930d 100644 --- a/akka-remote/src/main/scala/akka/remote/artery/Codecs.scala +++ b/akka-remote/src/main/scala/akka/remote/artery/Codecs.scala @@ -83,7 +83,7 @@ private[remote] class Encoder( private var _serialization: OptionVal[Serialization] = OptionVal.None private def serialization: Serialization = _serialization match { case OptionVal.Some(s) => s - case OptionVal.None => + case _ => val s = SerializationExtension(system) _serialization = OptionVal.Some(s) s @@ -131,12 +131,12 @@ private[remote] class Encoder( // internally compression is applied by the builder: outboundEnvelope.recipient match { case OptionVal.Some(r) => headerBuilder.setRecipientActorRef(r) - case OptionVal.None => headerBuilder.setNoRecipient() + case _ => headerBuilder.setNoRecipient() } outboundEnvelope.sender match { - case OptionVal.None => headerBuilder.setNoSender() case OptionVal.Some(s) => headerBuilder.setSenderActorRef(s) + case _ => headerBuilder.setNoSender() } val startTime: Long = if (instruments.timeSerialization) System.nanoTime else 0 @@ -186,7 +186,7 @@ private[remote] class Encoder( reasonText, msgSender, outboundEnvelope.recipient.getOrElse(ActorRef.noSender)) - case OptionVal.None => + case _ => Dropped( outboundEnvelope.message, reasonText, @@ -478,13 +478,13 @@ private[remote] class Decoder( sender match { case OptionVal.Some(snd) => compressions.hitActorRef(originUid, remoteAddress, snd, 1) - case OptionVal.None => + case _ => } recipient match { case OptionVal.Some(rcp) => compressions.hitActorRef(originUid, remoteAddress, rcp, 1) - case OptionVal.None => + case _ => } compressions.hitClassManifest(originUid, remoteAddress, classManifest, 1) @@ -529,7 +529,7 @@ private[remote] class Decoder( "Message for banned (terminated, unresolved) remote deployed recipient [{}].", recipientActorRefPath) push(out, decoded.withRecipient(ref)) - case OptionVal.None => + case _ => log.warning( "Dropping message for banned (terminated, unresolved) remote deployed recipient [{}].", recipientActorRefPath) @@ -594,7 +594,9 @@ private[remote] class Decoder( case RetryResolveRemoteDeployedRecipient(attemptsLeft, recipientPath, inboundEnvelope) => resolveRecipient(recipientPath) match { - case OptionVal.None => + case OptionVal.Some(recipient) => + push(out, inboundEnvelope.withRecipient(recipient)) + case _ => if (attemptsLeft > 0) scheduleOnce( RetryResolveRemoteDeployedRecipient(attemptsLeft - 1, recipientPath, inboundEnvelope), @@ -613,9 +615,9 @@ private[remote] class Decoder( val recipient = actorRefResolver.getOrCompute(recipientPath) push(out, inboundEnvelope.withRecipient(recipient)) } - case OptionVal.Some(recipient) => - push(out, inboundEnvelope.withRecipient(recipient)) } + + case unknown => throw new IllegalArgumentException(s"Unknown timer key: $unknown") } } @@ -648,7 +650,7 @@ private[remote] class Deserializer( private var _serialization: OptionVal[Serialization] = OptionVal.None private def serialization: Serialization = _serialization match { case OptionVal.Some(s) => s - case OptionVal.None => + case _ => val s = SerializationExtension(system) _serialization = OptionVal.Some(s) s @@ -682,7 +684,7 @@ private[remote] class Deserializer( case NonFatal(e) => val from = envelope.association match { case OptionVal.Some(a) => a.remoteAddress - case OptionVal.None => "unknown" + case _ => "unknown" } log.warning( "Failed to deserialize message from [{}] with serializer id [{}] and manifest [{}]. {}", diff --git a/akka-remote/src/main/scala/akka/remote/artery/Handshake.scala b/akka-remote/src/main/scala/akka/remote/artery/Handshake.scala index 875f638fab..09bd09208b 100644 --- a/akka-remote/src/main/scala/akka/remote/artery/Handshake.scala +++ b/akka-remote/src/main/scala/akka/remote/artery/Handshake.scala @@ -125,12 +125,12 @@ private[remote] class OutboundHandshake( handshakeState match { case Completed => pendingMessage match { - case OptionVal.None => - if (!hasBeenPulled(in)) - pull(in) case OptionVal.Some(p) => push(out, p) pendingMessage = OptionVal.None + case _ => + if (!hasBeenPulled(in)) + pull(in) } case Start => @@ -207,6 +207,8 @@ private[remote] class OutboundHandshake( failStage( new HandshakeTimeoutException( s"Handshake with [${outboundContext.remoteAddress}] did not complete within ${timeout.toMillis} ms")) + case unknown => + throw new IllegalArgumentException(s"Unknown timer key: $unknown") } setHandlers(in, out, this) diff --git a/akka-remote/src/main/scala/akka/remote/artery/InboundQuarantineCheck.scala b/akka-remote/src/main/scala/akka/remote/artery/InboundQuarantineCheck.scala index 26e5b7c448..2d34c813d5 100644 --- a/akka-remote/src/main/scala/akka/remote/artery/InboundQuarantineCheck.scala +++ b/akka-remote/src/main/scala/akka/remote/artery/InboundQuarantineCheck.scala @@ -33,9 +33,6 @@ private[remote] class InboundQuarantineCheck(inboundContext: InboundContext) override def onPush(): Unit = { val env = grab(in) env.association match { - case OptionVal.None => - // unknown, handshake not completed - push(out, env) case OptionVal.Some(association) => if (association.associationState.isQuarantined(env.originUid)) { if (log.isDebugEnabled) @@ -52,6 +49,9 @@ private[remote] class InboundQuarantineCheck(inboundContext: InboundContext) pull(in) } else push(out, env) + case _ => + // unknown, handshake not completed + push(out, env) } } diff --git a/akka-remote/src/main/scala/akka/remote/artery/MessageDispatcher.scala b/akka-remote/src/main/scala/akka/remote/artery/MessageDispatcher.scala index e90d66640f..9de6dcf498 100644 --- a/akka-remote/src/main/scala/akka/remote/artery/MessageDispatcher.scala +++ b/akka-remote/src/main/scala/akka/remote/artery/MessageDispatcher.scala @@ -35,7 +35,7 @@ private[remote] class MessageDispatcher(system: ExtendedActorSystem, provider: R val senderOption = inboundEnvelope.sender val originAddress = inboundEnvelope.association match { case OptionVal.Some(a) => OptionVal.Some(a.remoteAddress) - case OptionVal.None => OptionVal.None + case _ => OptionVal.None } val sender: ActorRef = senderOption.getOrElse(system.deadLetters) diff --git a/akka-remote/src/main/scala/akka/remote/artery/SystemMessageDelivery.scala b/akka-remote/src/main/scala/akka/remote/artery/SystemMessageDelivery.scala index 7e5acca5bb..8b4e4f2a9a 100644 --- a/akka-remote/src/main/scala/akka/remote/artery/SystemMessageDelivery.scala +++ b/akka-remote/src/main/scala/akka/remote/artery/SystemMessageDelivery.scala @@ -138,6 +138,9 @@ import akka.util.PrettyDuration.PrettyPrintableDuration } if (!unacknowledged.isEmpty) scheduleOnce(ResendTick, resendInterval) + + case other => + throw new IllegalArgumentException(s"Unknown timer key: $other") } // ControlMessageObserver, external call @@ -347,7 +350,7 @@ import akka.util.PrettyDuration.PrettyPrintableDuration // for logging def fromRemoteAddressStr: String = env.association match { case OptionVal.Some(a) => a.remoteAddress.toString - case OptionVal.None => "N/A" + case _ => "N/A" } env.message match { diff --git a/akka-remote/src/main/scala/akka/remote/artery/TestStage.scala b/akka-remote/src/main/scala/akka/remote/artery/TestStage.scala index 1eee63fa89..9b5fcab931 100644 --- a/akka-remote/src/main/scala/akka/remote/artery/TestStage.scala +++ b/akka-remote/src/main/scala/akka/remote/artery/TestStage.scala @@ -161,13 +161,6 @@ private[remote] class InboundTestStage(inboundContext: InboundContext, state: Sh case _ => val env = grab(in) env.association match { - case OptionVal.None => - // unknown, handshake not completed - if (state.anyBlackholePresent()) - log.debug( - "inbound message [{}] before handshake completed, cannot check if remote is blackholed, letting through", - Logging.messageClassName(env.message)) - push(out, env) case OptionVal.Some(association) => if (state.isBlackhole(inboundContext.localAddress.address, association.remoteAddress)) { log.debug( @@ -178,6 +171,13 @@ private[remote] class InboundTestStage(inboundContext: InboundContext, state: Sh pull(in) // drop message } else push(out, env) + case _ => + // unknown, handshake not completed + if (state.anyBlackholePresent()) + log.debug( + "inbound message [{}] before handshake completed, cannot check if remote is blackholed, letting through", + Logging.messageClassName(env.message)) + push(out, env) } } } diff --git a/akka-remote/src/main/scala/akka/remote/artery/compress/InboundCompressions.scala b/akka-remote/src/main/scala/akka/remote/artery/compress/InboundCompressions.scala index cf186ee77f..c2ed6e7abe 100644 --- a/akka-remote/src/main/scala/akka/remote/artery/compress/InboundCompressions.scala +++ b/akka-remote/src/main/scala/akka/remote/artery/compress/InboundCompressions.scala @@ -205,6 +205,7 @@ private[remote] final class InboundActorRefCompression( mb += ref -> idx idx += 1 } + case _ => // ignore others } mb.result() } @@ -415,7 +416,7 @@ private[remote] abstract class InboundCompression[T >: Null]( tableVersion, originUid, inProgress.version) - case None => + case _ => // already confirmed } @@ -469,7 +470,7 @@ private[remote] abstract class InboundCompression[T >: Null]( originUid) } - case OptionVal.None => + case _ => // otherwise it's too early, association not ready yet. // so we don't build the table since we would not be able to send it anyway. log.debug("No Association for originUid [{}] yet, unable to advertise compression table.", originUid) @@ -489,7 +490,7 @@ private[remote] abstract class InboundCompression[T >: Null]( resendCount, maxResendCount) advertiseCompressionTable(association, inProgress) // resend - case OptionVal.None => + case _ => } } else { // give up, it might be dead diff --git a/akka-remote/src/main/scala/akka/remote/serialization/ArteryMessageSerializer.scala b/akka-remote/src/main/scala/akka/remote/serialization/ArteryMessageSerializer.scala index ffc80f2378..b08dd1e97d 100644 --- a/akka-remote/src/main/scala/akka/remote/serialization/ArteryMessageSerializer.scala +++ b/akka-remote/src/main/scala/akka/remote/serialization/ArteryMessageSerializer.scala @@ -91,6 +91,8 @@ private[akka] final class ArteryMessageSerializer(val system: ExtendedActorSyste case adv: ClassManifestCompressionAdvertisement => serializeCompressionAdvertisement(adv)(identity).toByteArray case ClassManifestCompressionAdvertisementAck(from, id) => serializeCompressionTableAdvertisementAck(from, id).toByteArray + case _ => + throw new IllegalArgumentException(s"Can't serialize object of type ${o.getClass} in [${getClass.getName}]") } override def fromBinary(bytes: Array[Byte], manifest: String): AnyRef = diff --git a/akka-remote/src/main/scala/akka/remote/transport/AkkaPduCodec.scala b/akka-remote/src/main/scala/akka/remote/transport/AkkaPduCodec.scala index 03a613d101..ec95a219eb 100644 --- a/akka-remote/src/main/scala/akka/remote/transport/AkkaPduCodec.scala +++ b/akka-remote/src/main/scala/akka/remote/transport/AkkaPduCodec.scala @@ -146,7 +146,7 @@ private[remote] object AkkaPduProtobufCodec extends AkkaPduCodec { envelopeBuilder.setRecipient(serializeActorRef(recipient.path.address, recipient)) senderOption match { case OptionVal.Some(sender) => envelopeBuilder.setSender(serializeActorRef(localAddress, sender)) - case OptionVal.None => + case _ => } seqOption.foreach { seq => diff --git a/akka-remote/src/main/scala/akka/remote/transport/AkkaProtocolTransport.scala b/akka-remote/src/main/scala/akka/remote/transport/AkkaProtocolTransport.scala index e90c23a5a4..b272f19fd9 100644 --- a/akka-remote/src/main/scala/akka/remote/transport/AkkaProtocolTransport.scala +++ b/akka-remote/src/main/scala/akka/remote/transport/AkkaProtocolTransport.scala @@ -394,6 +394,8 @@ private[remote] class ProtocolStateActor( d.wrappedHandle.readHandlerPromise.success(ActorHandleEventListener(self)) initHandshakeTimer() startWith(WaitHandshake, d) + + case _ => throw new IllegalStateException() // won't happen, compiler exhaustiveness check pleaser } initHandshakeTimer() diff --git a/akka-remote/src/main/scala/akka/remote/transport/ThrottlerTransportAdapter.scala b/akka-remote/src/main/scala/akka/remote/transport/ThrottlerTransportAdapter.scala index 0f6bcffa14..ee11487051 100644 --- a/akka-remote/src/main/scala/akka/remote/transport/ThrottlerTransportAdapter.scala +++ b/akka-remote/src/main/scala/akka/remote/transport/ThrottlerTransportAdapter.scala @@ -224,10 +224,22 @@ class ThrottlerTransportAdapter(_wrappedTransport: Transport, _system: ExtendedA override def managementCommand(cmd: Any): Future[Boolean] = { import ActorTransportAdapter.AskTimeout cmd match { - case s: SetThrottle => (manager ? s).map { case SetThrottleAck => true } - case f: ForceDisassociate => (manager ? f).map { case ForceDisassociateAck => true } - case f: ForceDisassociateExplicitly => (manager ? f).map { case ForceDisassociateAck => true } - case _ => wrappedTransport.managementCommand(cmd) + case s: SetThrottle => + (manager ? s).map { + case SetThrottleAck => true + case _ => throw new IllegalStateException() // won't happen, compiler exhaustiveness check pleaser + } + case f: ForceDisassociate => + (manager ? f).map { + case ForceDisassociateAck => true + case _ => throw new IllegalStateException() // won't happen, compiler exhaustiveness check pleaser + } + case f: ForceDisassociateExplicitly => + (manager ? f).map { + case ForceDisassociateAck => true + case _ => throw new IllegalStateException() // won't happen, compiler exhaustiveness check pleaser + } + case _ => wrappedTransport.managementCommand(cmd) } } } @@ -363,8 +375,13 @@ private[transport] class ThrottlerManager(wrappedTransport: Transport) internalTarget.sendSystemMessage(Watch(internalTarget, ref)) target.tell(mode, ref) ref.result.future.transform({ - case Terminated(t) if t.path == target.path => SetThrottleAck - case SetThrottleAck => { internalTarget.sendSystemMessage(Unwatch(target, ref)); SetThrottleAck } + case Terminated(t) if t.path == target.path => + SetThrottleAck + case SetThrottleAck => + internalTarget.sendSystemMessage(Unwatch(target, ref)) + SetThrottleAck + case _ => + throw new IllegalArgumentException() // won't happen, compiler exhaustiveness check pleaser }, t => { internalTarget.sendSystemMessage(Unwatch(target, ref)); t })(ExecutionContexts.parasitic) } } diff --git a/akka-remote/src/main/scala/akka/remote/transport/netty/NettyTransport.scala b/akka-remote/src/main/scala/akka/remote/transport/netty/NettyTransport.scala index a9b53ef190..95d421c225 100644 --- a/akka-remote/src/main/scala/akka/remote/transport/netty/NettyTransport.scala +++ b/akka-remote/src/main/scala/akka/remote/transport/netty/NettyTransport.scala @@ -432,7 +432,7 @@ class NettyTransport(val settings: NettyTransportSettings, val system: ExtendedA val handler = NettySSLSupport(sslProvider, isClient) handler.setCloseOnSSLException(true) handler - case OptionVal.None => + case _ => throw new IllegalStateException("Expected enable-ssl=on") } @@ -508,6 +508,7 @@ class NettyTransport(val settings: NettyTransportSettings, val system: ExtendedA val newServerChannel = inboundBootstrap match { case b: ServerBootstrap => b.bind(address) case b: ConnectionlessBootstrap => b.bind(address) + case _ => throw new IllegalStateException() // won't happen, compiler exhaustiveness check pleaser } // Block reads until a handler actor is registered diff --git a/akka-remote/src/test/scala/akka/remote/Ticket1978CommunicationSpec.scala b/akka-remote/src/test/scala/akka/remote/Ticket1978CommunicationSpec.scala index 4d0c206133..6bb2ae185c 100644 --- a/akka-remote/src/test/scala/akka/remote/Ticket1978CommunicationSpec.scala +++ b/akka-remote/src/test/scala/akka/remote/Ticket1978CommunicationSpec.scala @@ -74,6 +74,7 @@ object Configuration { def getCipherConfig(cipher: String, enabled: String*): CipherConfig = { val (localPort, remotePort) = SocketUtil.temporaryServerAddresses(2, "127.0.0.1").map(_.getPort) match { case Seq(local, remote) => (local, remote) + case _ => throw new RuntimeException() } try { //if (true) throw new IllegalArgumentException("Ticket1978*Spec isn't enabled") diff --git a/akka-remote/src/test/scala/akka/remote/TransientSerializationErrorSpec.scala b/akka-remote/src/test/scala/akka/remote/TransientSerializationErrorSpec.scala index 586e6b0e1a..6ac975ced3 100644 --- a/akka-remote/src/test/scala/akka/remote/TransientSerializationErrorSpec.scala +++ b/akka-remote/src/test/scala/akka/remote/TransientSerializationErrorSpec.scala @@ -30,6 +30,7 @@ object TransientSerializationErrorSpec { case ToBinaryIllegal => "TI" case NotDeserializable => "ND" case IllegalOnDeserialize => "IOD" + case _ => throw new NotSerializableException() } def toBinary(o: AnyRef): Array[Byte] = o match { case ToBinaryNotSerializable => throw new NotSerializableException() @@ -40,6 +41,7 @@ object TransientSerializationErrorSpec { manifest match { case "ND" => throw new NotSerializableException() // Not sure this applies here case "IOD" => throw new IllegalArgumentException() + case _ => throw new NotSerializableException() } } } diff --git a/akka-remote/src/test/scala/akka/remote/artery/SystemMessageAckerSpec.scala b/akka-remote/src/test/scala/akka/remote/artery/SystemMessageAckerSpec.scala index 9fd63fb37a..cdf291242c 100644 --- a/akka-remote/src/test/scala/akka/remote/artery/SystemMessageAckerSpec.scala +++ b/akka-remote/src/test/scala/akka/remote/artery/SystemMessageAckerSpec.scala @@ -32,6 +32,7 @@ class SystemMessageAckerSpec extends AkkaSpec(""" .map { case sysMsg @ SystemMessageEnvelope(_, _, ackReplyTo) => InboundEnvelope(recipient, sysMsg, OptionVal.None, ackReplyTo.uid, inboundContext.association(ackReplyTo.uid)) + case _ => throw new RuntimeException() } .via(new SystemMessageAcker(inboundContext)) .map { case env: InboundEnvelope => env.message } diff --git a/akka-remote/src/test/scala/akka/remote/artery/SystemMessageDeliverySpec.scala b/akka-remote/src/test/scala/akka/remote/artery/SystemMessageDeliverySpec.scala index 8da523d4f4..a262700423 100644 --- a/akka-remote/src/test/scala/akka/remote/artery/SystemMessageDeliverySpec.scala +++ b/akka-remote/src/test/scala/akka/remote/artery/SystemMessageDeliverySpec.scala @@ -82,6 +82,7 @@ abstract class AbstractSystemMessageDeliverySpec(c: Config) extends ArteryMultiN outboundEnvelope.message match { case sysEnv: SystemMessageEnvelope => InboundEnvelope(recipient, sysEnv, OptionVal.None, addressA.uid, inboundContext.association(addressA.uid)) + case _ => throw new RuntimeException() }) .async .via(new SystemMessageAcker(inboundContext)) diff --git a/akka-remote/src/test/scala/akka/remote/artery/compress/CompressionIntegrationSpec.scala b/akka-remote/src/test/scala/akka/remote/artery/compress/CompressionIntegrationSpec.scala index 32b0d753cc..b0c10e9df1 100644 --- a/akka-remote/src/test/scala/akka/remote/artery/compress/CompressionIntegrationSpec.scala +++ b/akka-remote/src/test/scala/akka/remote/artery/compress/CompressionIntegrationSpec.scala @@ -5,9 +5,7 @@ package akka.remote.artery.compress import scala.concurrent.duration._ - import com.typesafe.config.ConfigFactory - import akka.actor._ import akka.actor.ExtendedActorSystem import akka.remote.artery.ArteryMultiNodeSpec @@ -15,6 +13,8 @@ import akka.remote.artery.compress.CompressionProtocol.Events import akka.serialization.SerializerWithStringManifest import akka.testkit._ +import java.io.NotSerializableException + object CompressionIntegrationSpec { val commonConfig = ConfigFactory.parseString(s""" @@ -419,10 +419,12 @@ class TestMessageSerializer(val system: ExtendedActorSystem) extends SerializerW override def manifest(o: AnyRef): String = o match { case _: TestMessage => TestMessageManifest + case _ => throw new NotSerializableException() } override def toBinary(o: AnyRef): Array[Byte] = o match { case msg: TestMessage => msg.name.getBytes + case _ => throw new NotSerializableException() } override def fromBinary(bytes: Array[Byte], manifest: String): AnyRef = { diff --git a/akka-remote/src/test/scala/akka/remote/artery/compress/HandshakeShouldDropCompressionTableSpec.scala b/akka-remote/src/test/scala/akka/remote/artery/compress/HandshakeShouldDropCompressionTableSpec.scala index ea07c5b225..249d3ec06e 100644 --- a/akka-remote/src/test/scala/akka/remote/artery/compress/HandshakeShouldDropCompressionTableSpec.scala +++ b/akka-remote/src/test/scala/akka/remote/artery/compress/HandshakeShouldDropCompressionTableSpec.scala @@ -136,8 +136,9 @@ class HandshakeShouldDropCompressionTableSpec def identify(_system: String, port: Int, name: String) = { val selection = system.actorSelection(s"akka://${_system}@localhost:$port/user/$name") - val ActorIdentity(1, ref) = Await.result(selection ? Identify(1), 3.seconds) - ref.get + val identity = Await.result((selection ? Identify(1)).mapTo[ActorIdentity], 3.seconds) + if (identity.correlationId != 1) throw new RuntimeException("Got the wrong identity back") + identity.ref.get } } diff --git a/akka-remote/src/test/scala/akka/remote/serialization/PrimitivesSerializationSpec.scala b/akka-remote/src/test/scala/akka/remote/serialization/PrimitivesSerializationSpec.scala index 8d8640545a..ff43aedf9d 100644 --- a/akka-remote/src/test/scala/akka/remote/serialization/PrimitivesSerializationSpec.scala +++ b/akka-remote/src/test/scala/akka/remote/serialization/PrimitivesSerializationSpec.scala @@ -6,11 +6,8 @@ package akka.remote.serialization import java.nio.ByteBuffer import java.nio.ByteOrder - import scala.util.Random - import com.typesafe.config.ConfigFactory - import akka.actor.ExtendedActorSystem import akka.serialization.BaseSerializer import akka.serialization.ByteBufferSerializer @@ -19,6 +16,8 @@ import akka.serialization.Serializer import akka.testkit.AkkaSpec import akka.util.ByteString +import java.io.NotSerializableException + object PrimitivesSerializationSpec { val serializationTestOverrides = "" @@ -50,6 +49,7 @@ class PrimitivesSerializationSpec extends AkkaSpec(PrimitivesSerializationSpec.t case _: akka.serialization.IntSerializer => new IntSerializer(extSystem) case _: akka.serialization.StringSerializer => new StringSerializer(extSystem) case _: akka.serialization.ByteStringSerializer => new ByteStringSerializer(extSystem) + case _ => throw new NotSerializableException() } } diff --git a/akka-remote/src/test/scala/akka/remote/serialization/SerializationTransportInformationSpec.scala b/akka-remote/src/test/scala/akka/remote/serialization/SerializationTransportInformationSpec.scala index cc82c2a78e..3d9c13bdfd 100644 --- a/akka-remote/src/test/scala/akka/remote/serialization/SerializationTransportInformationSpec.scala +++ b/akka-remote/src/test/scala/akka/remote/serialization/SerializationTransportInformationSpec.scala @@ -5,10 +5,8 @@ package akka.remote.serialization import java.nio.charset.StandardCharsets - import com.typesafe.config.Config import com.typesafe.config.ConfigFactory - import akka.actor.ActorIdentity import akka.actor.ActorRef import akka.actor.ActorSystem @@ -23,6 +21,8 @@ import akka.testkit.ImplicitSender import akka.testkit.JavaSerializable import akka.testkit.TestActors +import java.io.NotSerializableException + object SerializationTransportInformationSpec { final case class TestMessage(from: ActorRef, to: ActorRef) @@ -32,6 +32,7 @@ object SerializationTransportInformationSpec { def identifier: Int = 666 def manifest(o: AnyRef): String = o match { case _: TestMessage => "A" + case _ => throw new NotSerializableException() } def toBinary(o: AnyRef): Array[Byte] = o match { case TestMessage(from, to) => @@ -39,6 +40,7 @@ object SerializationTransportInformationSpec { val fromStr = Serialization.serializedActorPath(from) val toStr = Serialization.serializedActorPath(to) s"$fromStr,$toStr".getBytes(StandardCharsets.UTF_8) + case _ => throw new NotSerializableException() } def fromBinary(bytes: Array[Byte], manifest: String): AnyRef = { verifyTransportInfo() @@ -50,6 +52,7 @@ object SerializationTransportInformationSpec { val from = system.provider.resolveActorRef(fromStr) val to = system.provider.resolveActorRef(toStr) TestMessage(from, to) + case _ => throw new NotSerializableException() } } diff --git a/akka-serialization-jackson/src/main/scala/akka/serialization/jackson/JacksonSerializer.scala b/akka-serialization-jackson/src/main/scala/akka/serialization/jackson/JacksonSerializer.scala index 460ae1f375..2596d45b2d 100644 --- a/akka-serialization-jackson/src/main/scala/akka/serialization/jackson/JacksonSerializer.scala +++ b/akka-serialization-jackson/src/main/scala/akka/serialization/jackson/JacksonSerializer.scala @@ -327,7 +327,7 @@ import akka.util.OptionVal throw new IllegalStateException( s"Migration version ${transformer.supportedForwardVersion} is " + s"behind version $fromVersion of deserialized type [$manifestClassName]") - case None => + case _ => manifestClassName } @@ -370,7 +370,7 @@ import akka.util.OptionVal val jsonTree = objectMapper.readTree(decompressedBytes) val newJsonTree = transformer.transform(fromVersion, jsonTree) objectMapper.treeToValue(newJsonTree, clazz) - case None => + case _ => objectMapper.readValue(decompressedBytes, clazz) } @@ -539,10 +539,10 @@ import akka.util.OptionVal out.toByteArray } else { LZ4Meta.get(bytes) match { - case OptionVal.None => bytes case OptionVal.Some(meta) => val srcLen = bytes.length - meta.offset lz4Decompressor.decompress(bytes, meta.offset, srcLen, meta.length) + case _ => bytes } } } diff --git a/akka-serialization-jackson/src/test/scala/akka/serialization/jackson/JacksonSerializerSpec.scala b/akka-serialization-jackson/src/test/scala/akka/serialization/jackson/JacksonSerializerSpec.scala index f564594389..8f2c0409e0 100644 --- a/akka-serialization-jackson/src/test/scala/akka/serialization/jackson/JacksonSerializerSpec.scala +++ b/akka-serialization-jackson/src/test/scala/akka/serialization/jackson/JacksonSerializerSpec.scala @@ -69,6 +69,7 @@ object ScalaTestMessages { final class SimpleCommandNotCaseClass(val name: String) extends TestMessage { override def equals(obj: Any): Boolean = obj match { case other: SimpleCommandNotCaseClass => other.name == name + case _ => false } override def hashCode(): Int = name.hashCode } diff --git a/akka-stream-testkit/src/main/scala/akka/stream/testkit/StreamTestKit.scala b/akka-stream-testkit/src/main/scala/akka/stream/testkit/StreamTestKit.scala index 24106d9892..5e75bc34e6 100644 --- a/akka-stream-testkit/src/main/scala/akka/stream/testkit/StreamTestKit.scala +++ b/akka-stream-testkit/src/main/scala/akka/stream/testkit/StreamTestKit.scala @@ -587,6 +587,7 @@ object TestSubscriber { } match { case OnNext(n: I @unchecked) => Right(n) case OnError(err) => Left(err) + case _ => throw new RuntimeException() // compiler exhaustiveness check pleaser } } @@ -601,6 +602,7 @@ object TestSubscriber { } match { case OnNext(n: I @unchecked) => Right(n) case OnError(err) => Left(err) + case _ => throw new RuntimeException() // compiler exhaustiveness check pleaser } } @@ -614,6 +616,7 @@ object TestSubscriber { } match { case OnComplete => Left(OnComplete) case OnNext(n: I @unchecked) => Right(n) + case _ => throw new RuntimeException() // compiler exhaustiveness check pleaser } } @@ -766,6 +769,7 @@ object TestSubscriber { case OnNext(i: I @unchecked) => b += i drain() + case _ => throw new RuntimeException() // compiler exhaustiveness check pleaser } // if no subscription was obtained yet, we expect it diff --git a/akka-stream-tests/src/test/scala/akka/stream/FusingSpec.scala b/akka-stream-tests/src/test/scala/akka/stream/FusingSpec.scala index cebcb26026..4048eef52d 100644 --- a/akka-stream-tests/src/test/scala/akka/stream/FusingSpec.scala +++ b/akka-stream-tests/src/test/scala/akka/stream/FusingSpec.scala @@ -110,7 +110,8 @@ class FusingSpec extends StreamSpec { val downstream = Flow[Int] .prepend(Source.single(1)) .flatMapPrefix(0) { - case Nil => throw TE("I hate mondays") + case Nil => throw TE("I hate mondays") + case unexpected => throw new RuntimeException(s"Unexpected: $unexpected") } .watchTermination()(Keep.right) .to(Sink.ignore) diff --git a/akka-stream-tests/src/test/scala/akka/stream/impl/fusing/KeepGoingStageSpec.scala b/akka-stream-tests/src/test/scala/akka/stream/impl/fusing/KeepGoingStageSpec.scala index 7d08f8d005..55cbd71275 100644 --- a/akka-stream-tests/src/test/scala/akka/stream/impl/fusing/KeepGoingStageSpec.scala +++ b/akka-stream-tests/src/test/scala/akka/stream/impl/fusing/KeepGoingStageSpec.scala @@ -26,14 +26,14 @@ import akka.stream.testkit.scaladsl.StreamTestKit._ class KeepGoingStageSpec extends StreamSpec { - trait PingCmd extends NoSerializationVerificationNeeded + sealed trait PingCmd extends NoSerializationVerificationNeeded case class Register(probe: ActorRef) extends PingCmd case object Ping extends PingCmd case object CompleteStage extends PingCmd case object FailStage extends PingCmd case object Throw extends PingCmd - trait PingEvt extends NoSerializationVerificationNeeded + sealed trait PingEvt extends NoSerializationVerificationNeeded case object Pong extends PingEvt case object PostStop extends PingEvt case object UpstreamCompleted extends PingEvt diff --git a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowDispatcherSpec.scala b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowDispatcherSpec.scala index 76b01c6857..12b3d7cb8c 100644 --- a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowDispatcherSpec.scala +++ b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowDispatcherSpec.scala @@ -25,7 +25,8 @@ class FlowDispatcherSpec extends StreamSpec(s"my-dispatcher = $${akka.test.strea val probe = TestProbe() Source(List(1, 2, 3)).map(i => { probe.ref ! Thread.currentThread().getName(); i }).to(Sink.ignore).run() probe.receiveN(3).foreach { - case s: String => s should startWith(system.name + "-" + dispatcher) + case s: String => s should startWith(system.name + "-" + dispatcher) + case unexpected => throw new RuntimeException(s"Unexpected: $unexpected") } } diff --git a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowFlatMapPrefixSpec.scala b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowFlatMapPrefixSpec.scala index fae8b5994c..3766933e3d 100644 --- a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowFlatMapPrefixSpec.scala +++ b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowFlatMapPrefixSpec.scala @@ -465,6 +465,7 @@ class FlowFlatMapPrefixSpec extends StreamSpec { case ex: NeverMaterializedException => ex.getCause should not be null ex.getCause should be(a[AbruptTerminationException]) + case unexpected => throw new RuntimeException(s"Unexpected: $unexpected") } doneF.failed.futureValue should be(a[AbruptTerminationException]) } diff --git a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowSectionSpec.scala b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowSectionSpec.scala index ae0314511a..5f791f6e4c 100644 --- a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowSectionSpec.scala +++ b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FlowSectionSpec.scala @@ -78,11 +78,13 @@ class FlowSectionSpec extends StreamSpec(FlowSectionSpec.config) { Source(0 to 2).via(f1).via(f2).runWith(Sink.ignore) defaultDispatcher.receiveN(3).foreach { - case s: String => s should include("akka.test.stream-dispatcher") + case s: String => s should include("akka.test.stream-dispatcher") + case unexpected => throw new RuntimeException(s"Unexpected: $unexpected") } customDispatcher.receiveN(3).foreach { - case s: String => s should include("my-dispatcher1") + case s: String => s should include("my-dispatcher1") + case unexpected => throw new RuntimeException(s"Unexpected: $unexpected") } } diff --git a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FramingSpec.scala b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FramingSpec.scala index e9c58f540f..ae76d437de 100644 --- a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FramingSpec.scala +++ b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/FramingSpec.scala @@ -188,6 +188,7 @@ class FramingSpec extends StreamSpec { byteOrder match { case ByteOrder.LITTLE_ENDIAN => h.take(fieldLength) case ByteOrder.BIG_ENDIAN => h.drop(4 - fieldLength) + case unexpected => throw new RuntimeException(s"Unexpected: $unexpected") } } offset ++ header ++ payload ++ tail diff --git a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/GraphStageTimersSpec.scala b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/GraphStageTimersSpec.scala index a1223751c4..709d84efe4 100644 --- a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/GraphStageTimersSpec.scala +++ b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/GraphStageTimersSpec.scala @@ -80,6 +80,7 @@ class GraphStageTimersSpec extends StreamSpec { scheduleOnce("TestCancelTimer", 500.milli.dilated) case TestRepeatedTimer => scheduleWithFixedDelay("TestRepeatedTimer", 100.millis.dilated, 100.millis.dilated) + case unexpected => throw new RuntimeException(s"Unexpected: $unexpected") } } } diff --git a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/GraphUnzipWithSpec.scala b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/GraphUnzipWithSpec.scala index 6c1e611a42..db65825893 100644 --- a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/GraphUnzipWithSpec.scala +++ b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/GraphUnzipWithSpec.scala @@ -190,6 +190,7 @@ class GraphUnzipWithSpec extends StreamSpec(""" leftProbe.expectError() match { case a: java.lang.ArithmeticException => a.getMessage should be("/ by zero") + case unexpected => throw new RuntimeException(s"Unexpected: $unexpected") } rightProbe.expectError() diff --git a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/GraphZipLatestWithSpec.scala b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/GraphZipLatestWithSpec.scala index 9e1147bb12..13512b7585 100644 --- a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/GraphZipLatestWithSpec.scala +++ b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/GraphZipLatestWithSpec.scala @@ -97,6 +97,7 @@ class GraphZipLatestWithSpec extends TwoStreamsSetup { probe.expectError() match { case a: java.lang.ArithmeticException => a.getMessage should be("/ by zero") + case unexpected => throw new RuntimeException(s"Unexpected: $unexpected") } probe.expectNoMessage(200.millis) } diff --git a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/GraphZipWithNSpec.scala b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/GraphZipWithNSpec.scala index b1f4c8a926..9b1d09db80 100644 --- a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/GraphZipWithNSpec.scala +++ b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/GraphZipWithNSpec.scala @@ -81,6 +81,7 @@ class GraphZipWithNSpec extends TwoStreamsSetup { } probe.expectError() match { case a: java.lang.ArithmeticException => a.getMessage should be("/ by zero") + case unexpected => throw new RuntimeException(s"Unexpected: $unexpected") } probe.expectNoMessage(200.millis) } diff --git a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/GraphZipWithSpec.scala b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/GraphZipWithSpec.scala index 879d0e73d0..d9538eec1a 100644 --- a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/GraphZipWithSpec.scala +++ b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/GraphZipWithSpec.scala @@ -80,6 +80,7 @@ class GraphZipWithSpec extends TwoStreamsSetup { } probe.expectError() match { case a: java.lang.ArithmeticException => a.getMessage should be("/ by zero") + case unexpected => throw new RuntimeException(s"Unexpected: $unexpected") } probe.expectNoMessage(200.millis) } diff --git a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/UnfoldResourceAsyncSourceSpec.scala b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/UnfoldResourceAsyncSourceSpec.scala index d7908b6247..8170eebcfd 100644 --- a/akka-stream-tests/src/test/scala/akka/stream/scaladsl/UnfoldResourceAsyncSourceSpec.scala +++ b/akka-stream-tests/src/test/scala/akka/stream/scaladsl/UnfoldResourceAsyncSourceSpec.scala @@ -172,8 +172,9 @@ class UnfoldResourceAsyncSourceSpec extends StreamSpec(UnboundedMailboxConfig) { iterator => if (iterator.hasNext) { iterator.next() match { - case n: Int => Future.successful(Some(n)) - case e: TE => throw e + case n: Int => Future.successful(Some(n)) + case e: TE => throw e + case unexpected => throw new RuntimeException(s"Unexpected: $unexpected") } } else Future.successful(None), _ => Future.successful(Done)) @@ -190,8 +191,9 @@ class UnfoldResourceAsyncSourceSpec extends StreamSpec(UnboundedMailboxConfig) { iterator => if (iterator.hasNext) { iterator.next() match { - case n: Int => Future.successful(Some(n)) - case e: TE => Future.failed(e) + case n: Int => Future.successful(Some(n)) + case e: TE => Future.failed(e) + case unexpected => throw new RuntimeException(s"Unexpected: $unexpected") } } else Future.successful(None), _ => Future.successful(Done)) diff --git a/akka-stream-typed/src/main/scala/akka/stream/typed/scaladsl/ActorFlow.scala b/akka-stream-typed/src/main/scala/akka/stream/typed/scaladsl/ActorFlow.scala index dd6f11d7a4..159d41f0aa 100644 --- a/akka-stream-typed/src/main/scala/akka/stream/typed/scaladsl/ActorFlow.scala +++ b/akka-stream-typed/src/main/scala/akka/stream/typed/scaladsl/ActorFlow.scala @@ -146,6 +146,7 @@ object ActorFlow { ActorFlow.ask(parallelism)(ref)(makeMessage).map { case StatusReply.Success(a) => a.asInstanceOf[A] case StatusReply.Error(err) => throw err + case _ => throw new RuntimeException() // compiler exhaustiveness check pleaser } } diff --git a/akka-stream-typed/src/test/scala/akka/stream/typed/scaladsl/MaterializerForTypedSpec.scala b/akka-stream-typed/src/test/scala/akka/stream/typed/scaladsl/MaterializerForTypedSpec.scala index 0c22bf2dd2..35cef18b76 100644 --- a/akka-stream-typed/src/test/scala/akka/stream/typed/scaladsl/MaterializerForTypedSpec.scala +++ b/akka-stream-typed/src/test/scala/akka/stream/typed/scaladsl/MaterializerForTypedSpec.scala @@ -38,7 +38,7 @@ class MaterializerForTypedSpec extends ScalaTestWithActorTestKit with AnyWordSpe val actor = testKit.spawn(Behaviors.setup[String] { context => val materializerForActor = Materializer(context) - Behaviors.receiveMessage[String] { + Behaviors.receiveMessagePartial[String] { case "run" => val f = Source.single("hello").runWith(Sink.head)(materializerForActor) f.onComplete(probe.ref ! _)(system.executionContext) diff --git a/akka-stream/src/main/scala/akka/stream/ActorMaterializer.scala b/akka-stream/src/main/scala/akka/stream/ActorMaterializer.scala index 2f4eb3122b..c145898a99 100644 --- a/akka-stream/src/main/scala/akka/stream/ActorMaterializer.scala +++ b/akka-stream/src/main/scala/akka/stream/ActorMaterializer.scala @@ -80,6 +80,9 @@ object ActorMaterializer { case context: ActorContext => // actor context level materializer, will live as a child of this actor PhasedFusingActorMaterializer(context, namePrefix, materializerSettings, materializerSettings.toAttributes) + + case other => + throw new IllegalArgumentException(s"Unexpected type of context: ${other}") } } diff --git a/akka-stream/src/main/scala/akka/stream/Attributes.scala b/akka-stream/src/main/scala/akka/stream/Attributes.scala index da63816b41..176b01275a 100644 --- a/akka-stream/src/main/scala/akka/stream/Attributes.scala +++ b/akka-stream/src/main/scala/akka/stream/Attributes.scala @@ -129,7 +129,7 @@ final case class Attributes(attributeList: List[Attributes.Attribute] = Nil) { find(attributeList) match { case OptionVal.Some(t) => t.asInstanceOf[T] - case OptionVal.None => throw new IllegalStateException(s"Mandatory attribute [$c] not found") + case _ => throw new IllegalStateException(s"Mandatory attribute [$c] not found") } } diff --git a/akka-stream/src/main/scala/akka/stream/impl/ActorRefSource.scala b/akka-stream/src/main/scala/akka/stream/impl/ActorRefSource.scala index 84ec89e0f2..6455da33e9 100644 --- a/akka-stream/src/main/scala/akka/stream/impl/ActorRefSource.scala +++ b/akka-stream/src/main/scala/akka/stream/impl/ActorRefSource.scala @@ -65,18 +65,6 @@ private object ActorRefSource { } case (_, m: T @unchecked) => buffer match { - case OptionVal.None => - if (isCompleting) { - log.warning("Dropping element because Status.Success received already: [{}] in stream [{}]", m, name) - } else if (isAvailable(out)) { - push(out, m) - } else { - log.debug( - "Dropping element because there is no downstream demand and no buffer: [{}] in stream [{}]", - m, - name) - } - case OptionVal.Some(buf) => if (isCompleting) { log.warning( @@ -130,7 +118,20 @@ private object ActorRefSource { // there is a precondition check in Source.actorRefSource factory method to not allow backpressure as strategy failStage(new IllegalStateException("Backpressure is not supported")) } + case _ => + if (isCompleting) { + log.warning("Dropping element because Status.Success received already: [{}] in stream [{}]", m, name) + } else if (isAvailable(out)) { + push(out, m) + } else { + log.debug( + "Dropping element because there is no downstream demand and no buffer: [{}] in stream [{}]", + m, + name) + } } + + case _ => throw new IllegalArgumentException() // won't happen, compiler exhaustiveness check pleaser }.ref private def tryPush(): Unit = { diff --git a/akka-stream/src/main/scala/akka/stream/impl/MaybeSource.scala b/akka-stream/src/main/scala/akka/stream/impl/MaybeSource.scala index f3fbc14a07..5728ad72db 100644 --- a/akka-stream/src/main/scala/akka/stream/impl/MaybeSource.scala +++ b/akka-stream/src/main/scala/akka/stream/impl/MaybeSource.scala @@ -47,7 +47,7 @@ import akka.util.OptionVal case OptionVal.Some(value) => push(out, value) completeStage() - case OptionVal.None => + case _ => } private def handleCompletion(elem: Try[Option[AnyRef]]): Unit = { diff --git a/akka-stream/src/main/scala/akka/stream/impl/PhasedFusingActorMaterializer.scala b/akka-stream/src/main/scala/akka/stream/impl/PhasedFusingActorMaterializer.scala index 074e00a88e..9c0f1dbb2d 100644 --- a/akka-stream/src/main/scala/akka/stream/impl/PhasedFusingActorMaterializer.scala +++ b/akka-stream/src/main/scala/akka/stream/impl/PhasedFusingActorMaterializer.scala @@ -703,8 +703,8 @@ private final case class SavedIslandData( logics.add(logic) logic.stageId = logics.size() - 1 fullIslandName match { - case OptionVal.Some(_) => // already set - case OptionVal.None => fullIslandName = OptionVal.Some(islandName + "-" + logic.attributes.nameForActorRef()) + case OptionVal.None => fullIslandName = OptionVal.Some(islandName + "-" + logic.attributes.nameForActorRef()) + case _ => // already set } matAndLogic } @@ -812,7 +812,7 @@ private final case class SavedIslandData( val actorName = fullIslandName match { case OptionVal.Some(n) => n - case OptionVal.None => islandName + case _ => islandName } val ref = materializer.actorOf(props, actorName) @@ -829,7 +829,7 @@ private final case class SavedIslandData( case OptionVal.Some(stage) => if (isIn) s"in port [${stage.shape.inlets(missingHandlerIdx)}]" else s"out port [${stage.shape.outlets(missingHandlerIdx - logic.inCount)}" - case OptionVal.None => + case _ => if (isIn) s"in port id [$missingHandlerIdx]" else s"out port id [$missingHandlerIdx]" } @@ -907,6 +907,7 @@ private final case class SavedIslandData( subscriberOrVirtualPublisher match { case v: VirtualPublisher[_] => v.registerPublisher(publisher) case s: Subscriber[Any] @unchecked => publisher.subscribe(s) + case _ => throw new IllegalStateException() // won't happen, compiler exhaustiveness check pleaser } } diff --git a/akka-stream/src/main/scala/akka/stream/impl/RetryFlowCoordinator.scala b/akka-stream/src/main/scala/akka/stream/impl/RetryFlowCoordinator.scala index c1ae2478d5..0641e4c9c1 100644 --- a/akka-stream/src/main/scala/akka/stream/impl/RetryFlowCoordinator.scala +++ b/akka-stream/src/main/scala/akka/stream/impl/RetryFlowCoordinator.scala @@ -98,16 +98,16 @@ import akka.util.OptionVal override def onPush(): Unit = { val result = grab(internalIn) elementInProgress match { - case OptionVal.None => - failStage( - new IllegalStateException( - s"inner flow emitted unexpected element $result; the flow must be one-in one-out")) case OptionVal.Some(_) if retryNo == maxRetries => pushExternal(result) case OptionVal.Some(in) => decideRetry(in, result) match { case None => pushExternal(result) case Some(element) => planRetry(element) } + case _ => + failStage( + new IllegalStateException( + s"inner flow emitted unexpected element $result; the flow must be one-in one-out")) } } }) diff --git a/akka-stream/src/main/scala/akka/stream/impl/StreamLayout.scala b/akka-stream/src/main/scala/akka/stream/impl/StreamLayout.scala index f62e2efb0b..cc88dfe1d4 100644 --- a/akka-stream/src/main/scala/akka/stream/impl/StreamLayout.scala +++ b/akka-stream/src/main/scala/akka/stream/impl/StreamLayout.scala @@ -179,6 +179,8 @@ import akka.util.OptionVal case Inert => // nothing to be done case _ => pub.subscribe(subscriber.asInstanceOf[Subscriber[Any]]) } + case other => + throw new IllegalStateException(s"Unexpected state in VirtualProcessor: $other") } case state @ _ => if (VirtualProcessor.Debug) println(s"VirtualPublisher#$hashCode(_).onSubscribe.rec($s) spec violation") @@ -477,6 +479,8 @@ import akka.util.OptionVal case _: Subscriber[_] => rejectAdditionalSubscriber(subscriber, "Sink.asPublisher(fanout = false)") + + case unexpected => throw new IllegalStateException(s"Unexpected state in VirtualPublisher: $unexpected") } } rec() // return value is boolean only to make the expressions above compile diff --git a/akka-stream/src/main/scala/akka/stream/impl/TraversalBuilder.scala b/akka-stream/src/main/scala/akka/stream/impl/TraversalBuilder.scala index 70c91689c7..44e6dcd892 100644 --- a/akka-stream/src/main/scala/akka/stream/impl/TraversalBuilder.scala +++ b/akka-stream/src/main/scala/akka/stream/impl/TraversalBuilder.scala @@ -532,8 +532,8 @@ import akka.util.unused override def makeIsland(islandTag: IslandTag): TraversalBuilder = this.islandTag match { - case OptionVal.None => copy(islandTag = OptionVal(islandTag)) - case OptionVal.Some(_) => this + case OptionVal.None => copy(islandTag = OptionVal(islandTag)) + case _ => this } override def assign(out: OutPort, relativeSlot: Int): TraversalBuilder = @@ -687,7 +687,7 @@ import akka.util.unused val inOpt = OptionVal(shape.inlets.headOption.orNull) val inOffs = inOpt match { case OptionVal.Some(in) => completed.offsetOf(in) - case OptionVal.None => 0 + case _ => 0 } LinearTraversalBuilder( @@ -704,7 +704,7 @@ import akka.util.unused val out = shape.outlets.head // Cannot be empty, otherwise it would be a CompletedTraversalBuilder val inOffs = inOpt match { case OptionVal.Some(in) => composite.offsetOf(in) - case OptionVal.None => 0 + case _ => 0 } LinearTraversalBuilder( @@ -771,8 +771,8 @@ import akka.util.unused private def applyIslandAndAttributes(t: Traversal): Traversal = { val withIslandTag = islandTag match { - case OptionVal.None => t case OptionVal.Some(tag) => EnterIsland(tag).concat(t).concat(ExitIsland) + case _ => t } if (attributes eq Attributes.none) withIslandTag @@ -808,7 +808,7 @@ import akka.util.unused .concat(traversalSoFar)), pendingBuilder = OptionVal.None, beforeBuilder = EmptyTraversal) - case OptionVal.None => + case _ => copy( inPort = OptionVal.None, outPort = OptionVal.None, @@ -822,7 +822,7 @@ import akka.util.unused if (outPort.contains(out)) { pendingBuilder match { case OptionVal.Some(composite) => composite.offsetOfModule(out) - case OptionVal.None => 0 // Output belongs to the last module, which will be materialized *first* + case _ => 0 // Output belongs to the last module, which will be materialized *first* } } else throw new IllegalArgumentException(s"Port $out cannot be accessed in this builder") @@ -847,7 +847,7 @@ import akka.util.unused beforeBuilder.concat(composite.assign(out, relativeSlot).traversal.concat(traversalSoFar))), pendingBuilder = OptionVal.None, beforeBuilder = EmptyTraversal) - case OptionVal.None => + case _ => copy(outPort = OptionVal.None, traversalSoFar = rewireLastOutTo(traversalSoFar, relativeSlot)) } } else @@ -919,45 +919,6 @@ import akka.util.unused * different. */ val assembledTraversalForThis = this.pendingBuilder match { - case OptionVal.None => - /* - * This is the case where we are a pure linear builder (all composites have been already completed), - * which means that traversalSoFar contains everything already, except the final attributes and islands - * applied. - * - * Since the exposed output port has been wired optimistically to -1, we need to check if this is correct, - * and correct if necessary. This is the step below: - */ - if (toAppend.inOffset == (toAppend.inSlots - 1)) { - /* - * if the builder we want to append (remember that is _prepend_ from the Traversal's perspective) - * has its exposed input port at the last location (which is toAppend.inSlots - 1 because input - * port offsets start with 0), then -1 is the correct wiring. I.e. - * - * 1. Visit the appended module first in the traversal, its input port is the last - * 2. Visit this module second in the traversal, wire the output port back to the previous input port (-1) - */ - traversalSoFar - } else { - /* - * The optimistic mapping to -1 is not correct, we need to unfold the Traversal to find our last module - * (which is the _first_ module in the Traversal) and rewire the output assignment to the correct offset. - * - * Since we will be visited second (and the appended toAppend first), we need to - * - * 1. go backward toAppend.inSlots slots to reach the beginning offset of toAppend - * 2. now go forward toAppend.inOffset to reach the correct location - * - * <-------------- (-toAppend.inSlots) - * -------> (+toAppend.inOffset) - * - * --------in----|[out module]---------- - * toAppend this - * - */ - rewireLastOutTo(traversalSoFar, toAppend.inOffset - toAppend.inSlots) - } - case OptionVal.Some(composite) => /* * This is the case where our last module is a composite, and since it does not have its output port @@ -995,6 +956,45 @@ import akka.util.unused * (remember that this is the _reverse_ of the Flow DSL order) */ beforeBuilder.concat(compositeTraversal).concat(traversalSoFar) + + case _ => + /* + * This is the case where we are a pure linear builder (all composites have been already completed), + * which means that traversalSoFar contains everything already, except the final attributes and islands + * applied. + * + * Since the exposed output port has been wired optimistically to -1, we need to check if this is correct, + * and correct if necessary. This is the step below: + */ + if (toAppend.inOffset == (toAppend.inSlots - 1)) { + /* + * if the builder we want to append (remember that is _prepend_ from the Traversal's perspective) + * has its exposed input port at the last location (which is toAppend.inSlots - 1 because input + * port offsets start with 0), then -1 is the correct wiring. I.e. + * + * 1. Visit the appended module first in the traversal, its input port is the last + * 2. Visit this module second in the traversal, wire the output port back to the previous input port (-1) + */ + traversalSoFar + } else { + /* + * The optimistic mapping to -1 is not correct, we need to unfold the Traversal to find our last module + * (which is the _first_ module in the Traversal) and rewire the output assignment to the correct offset. + * + * Since we will be visited second (and the appended toAppend first), we need to + * + * 1. go backward toAppend.inSlots slots to reach the beginning offset of toAppend + * 2. now go forward toAppend.inOffset to reach the correct location + * + * <-------------- (-toAppend.inSlots) + * -------> (+toAppend.inOffset) + * + * --------in----|[out module]---------- + * toAppend this + * + */ + rewireLastOutTo(traversalSoFar, toAppend.inOffset - toAppend.inSlots) + } } /* @@ -1029,7 +1029,7 @@ import akka.util.unused islandTag = OptionVal.None // islandTag is reset for the new enclosing builder ) - case OptionVal.Some(_) => + case _ => // Some(pendingBuilder) /* * In this case we need to assemble as much as we can, and create a new "sandwich" of * beforeBuilder ~ pendingBuilder ~ traversalSoFar @@ -1043,12 +1043,12 @@ import akka.util.unused // First prepare island enter and exit if tags are present toAppend.islandTag match { - case OptionVal.None => // Nothing changes case OptionVal.Some(tag) => // Enter the island just before the appended builder (keeping the toAppend.beforeBuilder steps) newBeforeTraversal = EnterIsland(tag).concat(newBeforeTraversal) // Exit the island just after the appended builder (they should not applied to _this_ builder) newTraversalSoFar = ExitIsland.concat(newTraversalSoFar) + case _ => // Nothing changes } // Secondly, prepare attribute push and pop if Attributes are present @@ -1101,7 +1101,7 @@ import akka.util.unused this.islandTag match { case OptionVal.Some(_) => this // Wrapping with an island, then immediately re-wrapping makes the second island empty, so can be omitted - case OptionVal.None => copy(islandTag = OptionVal.Some(islandTag)) + case _ => copy(islandTag = OptionVal.Some(islandTag)) } } @@ -1207,8 +1207,8 @@ import akka.util.unused } val finalTraversal = islandTag match { - case OptionVal.None => traversal case OptionVal.Some(tag) => EnterIsland(tag).concat(traversal).concat(ExitIsland) + case _ => traversal } // The CompleteTraversalBuilder only keeps the minimum amount of necessary information that is needed for it diff --git a/akka-stream/src/main/scala/akka/stream/impl/UnfoldResourceSourceAsync.scala b/akka-stream/src/main/scala/akka/stream/impl/UnfoldResourceSourceAsync.scala index c8ae3ad9d6..708fcbd321 100644 --- a/akka-stream/src/main/scala/akka/stream/impl/UnfoldResourceSourceAsync.scala +++ b/akka-stream/src/main/scala/akka/stream/impl/UnfoldResourceSourceAsync.scala @@ -123,6 +123,7 @@ import scala.concurrent.ExecutionContext case Failure(ex) => throw ex // failed to open but stream is stopped already } + case _ => // we don't care here } } } diff --git a/akka-stream/src/main/scala/akka/stream/impl/fusing/ActorGraphInterpreter.scala b/akka-stream/src/main/scala/akka/stream/impl/fusing/ActorGraphInterpreter.scala index 4e997dbc9d..c542e1ab61 100644 --- a/akka-stream/src/main/scala/akka/stream/impl/fusing/ActorGraphInterpreter.scala +++ b/akka-stream/src/main/scala/akka/stream/impl/fusing/ActorGraphInterpreter.scala @@ -344,7 +344,7 @@ import akka.util.OptionVal case OptionVal.Some(e) => tryOnSubscribe(subscriber, CancelledSubscription) tryOnError(subscriber, e) - case OptionVal.None => + case _ => tryOnSubscribe(subscriber, CancelledSubscription) tryOnComplete(subscriber) } catch { @@ -761,6 +761,8 @@ import akka.util.OptionVal shortCircuitBuffer.poll() match { case b: BoundaryEvent => processEvent(b) case Resume => finishShellRegistration() + case unexpected => + throw new IllegalStateException(s"Unexpected element in short circuit buffer: '${unexpected.getClass}'") } shortCircuitBatch() } diff --git a/akka-stream/src/main/scala/akka/stream/impl/fusing/FlatMapPrefix.scala b/akka-stream/src/main/scala/akka/stream/impl/fusing/FlatMapPrefix.scala index 4f52d7b95b..aa37382147 100644 --- a/akka-stream/src/main/scala/akka/stream/impl/fusing/FlatMapPrefix.scala +++ b/akka-stream/src/main/scala/akka/stream/impl/fusing/FlatMapPrefix.scala @@ -51,7 +51,7 @@ import akka.util.OptionVal override def onPush(): Unit = { subSource match { case OptionVal.Some(s) => s.push(grab(in)) - case OptionVal.None => + case _ => accumulated.append(grab(in)) if (accumulated.size == n) { materializeFlow() @@ -65,14 +65,14 @@ import akka.util.OptionVal override def onUpstreamFinish(): Unit = { subSource match { case OptionVal.Some(s) => s.complete() - case OptionVal.None => materializeFlow() + case _ => materializeFlow() } } override def onUpstreamFailure(ex: Throwable): Unit = { subSource match { case OptionVal.Some(s) => s.fail(ex) - case OptionVal.None => + case _ => //flow won't be materialized, so we have to complete the future with a failure indicating this matPromise.failure(new NeverMaterializedException(ex)) super.onUpstreamFailure(ex) @@ -84,30 +84,34 @@ import akka.util.OptionVal case OptionVal.Some(s) => //delegate to subSink s.pull() - case OptionVal.None if accumulated.size < n => - pull(in) - case OptionVal.None if accumulated.size == n => - //corner case for n = 0, can be handled in FlowOps - materializeFlow() + case _ => + if (accumulated.size < n) pull(in) + else if (accumulated.size == n) { + //corner case for n = 0, can be handled in FlowOps + materializeFlow() + } else { + throw new IllegalStateException(s"Unexpected accumulated size: ${accumulated.size} (n: $n)") + } } } - override def onDownstreamFinish(cause: Throwable): Unit = { + override def onDownstreamFinish(cause: Throwable): Unit = subSink match { - case OptionVal.None if propagateToNestedMaterialization => downstreamCause = OptionVal.Some(cause) - case OptionVal.None => - matPromise.failure(new NeverMaterializedException(cause)) - cancelStage(cause) case OptionVal.Some(s) => s.cancel(cause) + case _ => + if (propagateToNestedMaterialization) downstreamCause = OptionVal.Some(cause) + else { + matPromise.failure(new NeverMaterializedException(cause)) + cancelStage(cause) + } } - } def materializeFlow(): Unit = try { val prefix = accumulated.toVector accumulated.clear() subSource = OptionVal.Some(new SubSourceOutlet[In]("FlatMapPrefix.subSource")) - val OptionVal.Some(theSubSource) = subSource + val theSubSource = subSource.get theSubSource.setHandler { new OutHandler { override def onPull(): Unit = { @@ -124,7 +128,7 @@ import akka.util.OptionVal } } subSink = OptionVal.Some(new SubSinkInlet[Out]("FlatMapPrefix.subSink")) - val OptionVal.Some(theSubSink) = subSink + val theSubSink = subSink.get theSubSink.setHandler { new InHandler { override def onPush(): Unit = { @@ -156,7 +160,7 @@ import akka.util.OptionVal //in case downstream was closed downstreamCause match { case OptionVal.Some(ex) => theSubSink.cancel(ex) - case OptionVal.None => + case _ => } //in case we've materialized due to upstream completion diff --git a/akka-stream/src/main/scala/akka/stream/impl/fusing/FutureFlow.scala b/akka-stream/src/main/scala/akka/stream/impl/fusing/FutureFlow.scala index 73a6b1278b..d3f3e21a3b 100644 --- a/akka-stream/src/main/scala/akka/stream/impl/fusing/FutureFlow.scala +++ b/akka-stream/src/main/scala/akka/stream/impl/fusing/FutureFlow.scala @@ -116,11 +116,11 @@ import scala.util.{ Failure, Success, Try } innerMatValue.success(matVal) upstreamFailure match { case OptionVal.Some(ex) => subSource.fail(ex) - case OptionVal.None => if (isClosed(in)) subSource.complete() + case _ => if (isClosed(in)) subSource.complete() } downstreamCause match { case OptionVal.Some(cause) => subSink.cancel(cause) - case OptionVal.None => if (isAvailable(out)) subSink.pull() + case _ => if (isAvailable(out)) subSink.pull() } setHandlers(in, out, new InHandler with OutHandler { override def onPull(): Unit = subSink.pull() diff --git a/akka-stream/src/main/scala/akka/stream/impl/fusing/Ops.scala b/akka-stream/src/main/scala/akka/stream/impl/fusing/Ops.scala index 5fc89f2409..f9dbab1109 100644 --- a/akka-stream/src/main/scala/akka/stream/impl/fusing/Ops.scala +++ b/akka-stream/src/main/scala/akka/stream/impl/fusing/Ops.scala @@ -166,10 +166,12 @@ import akka.util.ccompat._ override def onPush(): Unit = { val elem = grab(in) withSupervision(() => p(elem)) match { - case Some(flag) if flag => pull(in) - case Some(flag) if !flag => - push(out, elem) - setHandler(in, rest) + case Some(flag) => + if (flag) pull(in) + else { + push(out, elem) + setHandler(in, rest) + } case None => // do nothing } } @@ -245,6 +247,7 @@ private[stream] object Collect { result match { case NotApplied => pull(in) case result: Out @unchecked => push(out, result) + case _ => throw new RuntimeException() // won't happen, compiler exhaustiveness check pleaser } case None => //do nothing } @@ -298,6 +301,7 @@ private[stream] object Collect { recovered = Some(result) } } + case _ => throw new RuntimeException() // won't happen, compiler exhaustiveness check pleaser } catch { case NonFatal(ex) => failStage(ex) } @@ -523,12 +527,15 @@ private[stream] object Collect { } private val futureCB = getAsyncCallback[Try[Out]] { - case Success(next) if next != null => - current = next - pushAndPullOrFinish(next) - elementHandled = true - case Success(null) => doSupervision(ReactiveStreamsCompliance.elementMustNotBeNullException) - case Failure(t) => doSupervision(t) + case Success(next) => + if (next != null) { + current = next + pushAndPullOrFinish(next) + elementHandled = true + } else { + doSupervision(ReactiveStreamsCompliance.elementMustNotBeNullException) + } + case Failure(t) => doSupervision(t) }.invoke _ setHandlers(in, out, ZeroHandler) @@ -663,8 +670,10 @@ private[stream] object Collect { case other => val ex = other match { case Failure(t) => t - case Success(s) if s == null => + case Success(null) => ReactiveStreamsCompliance.elementMustNotBeNullException + case Success(_) => + throw new IllegalArgumentException() // won't happen, compiler exhaustiveness check pleaser } val supervision = decider(ex) @@ -1227,7 +1236,7 @@ private[stream] object Collect { def supervisionDirectiveFor(decider: Supervision.Decider, ex: Throwable): Supervision.Directive = { cachedSupervisionDirective match { case OptionVal.Some(d) => d - case OptionVal.None => + case _ => val d = decider(ex) cachedSupervisionDirective = OptionVal.Some(d) d @@ -1319,13 +1328,15 @@ private[stream] object Collect { else if (isAvailable(out)) { val holder = buffer.dequeue() holder.elem match { - case Success(elem) if elem != null => - push(out, elem) - pullIfNeeded() - - case Success(null) => - pullIfNeeded() - pushNextIfPossible() + case Success(elem) => + if (elem != null) { + push(out, elem) + pullIfNeeded() + } else { + // elem is null + pullIfNeeded() + pushNextIfPossible() + } case Failure(NonFatal(ex)) => holder.supervisionDirectiveFor(decider, ex) match { @@ -1336,6 +1347,9 @@ private[stream] object Collect { // try next element pushNextIfPossible() } + case Failure(ex) => + // fatal exception in buffer, not sure that it can actually happen, but for good measure + throw ex } } @@ -1386,7 +1400,7 @@ private[stream] object Collect { push(out, elem) if (isCompleted) completeStage() } else buffer.enqueue(elem) - case Success(null) => + case Success(_) => if (isCompleted) completeStage() else if (!hasBeenPulled(in)) tryPull(in) case Failure(ex) => diff --git a/akka-stream/src/main/scala/akka/stream/impl/fusing/StreamOfStreams.scala b/akka-stream/src/main/scala/akka/stream/impl/fusing/StreamOfStreams.scala index 7ed9a4f13d..2d6b4e68c7 100644 --- a/akka-stream/src/main/scala/akka/stream/impl/fusing/StreamOfStreams.scala +++ b/akka-stream/src/main/scala/akka/stream/impl/fusing/StreamOfStreams.scala @@ -60,6 +60,8 @@ import akka.util.ccompat.JavaConverters._ case single: SingleSource[T] @unchecked => push(out, single.elem) removeSource(single) + case other => + throw new IllegalStateException(s"Unexpected source type in queue: '${other.getClass}'") } } @@ -120,6 +122,7 @@ import akka.util.ccompat.JavaConverters._ sources -= sub case _: SingleSource[_] => pendingSingleSources -= 1 + case other => throw new IllegalArgumentException(s"Unexpected source type: '${other.getClass}'") } if (pullSuppressed) tryPull(in) if (activeSources == 0 && isClosed(in)) completeStage() @@ -704,6 +707,8 @@ import akka.util.ccompat.JavaConverters._ case cmd: CommandScheduledBeforeMaterialization => throw new IllegalStateException( s"${newState.command} on subsink($name) is illegal when ${cmd.command} is still pending") + + case _ => throw new RuntimeException() // won't happen, compiler exhaustiveness check pleaser } override def createLogic(attr: Attributes) = new GraphStageLogic(shape) with InHandler { @@ -737,6 +742,8 @@ import akka.util.ccompat.JavaConverters._ case _: /* Materialized */ AsyncCallback[Command @unchecked] => failStage(materializationException.getOrElse(createMaterializedTwiceException())) + + case _ => throw new RuntimeException() // won't happen, compiler exhaustiveness check pleaser } override def preStart(): Unit = @@ -779,6 +786,7 @@ import akka.util.ccompat.JavaConverters._ status.get.asInstanceOf[AsyncCallback[Any]].invoke(ActorSubscriberMessage.OnComplete) case OnError(_) => // already failed out, keep the exception as that happened first case ActorSubscriberMessage.OnComplete => // it was already completed + case _ => throw new RuntimeException() // won't happen, compiler exhaustiveness check pleaser } def failSubstream(ex: Throwable): Unit = status.get match { @@ -789,6 +797,7 @@ import akka.util.ccompat.JavaConverters._ status.get.asInstanceOf[AsyncCallback[Any]].invoke(failure) case ActorSubscriberMessage.OnComplete => // it was already completed, ignore failure as completion happened first case OnError(_) => // already failed out, keep the exception as that happened first + case _ => throw new RuntimeException() // won't happen, compiler exhaustiveness check pleaser } def timeout(d: FiniteDuration): Boolean = @@ -814,6 +823,7 @@ import akka.util.ccompat.JavaConverters._ case ActorSubscriberMessage.OnError(ex) => failStage(ex) case _: AsyncCallback[_] => failStage(materializationException.getOrElse(createMaterializedTwiceException())) + case _ => throw new RuntimeException() // won't happen, compiler exhaustiveness check pleaser } } diff --git a/akka-stream/src/main/scala/akka/stream/impl/io/TLSActor.scala b/akka-stream/src/main/scala/akka/stream/impl/io/TLSActor.scala index bf30a81edc..47c5184de7 100644 --- a/akka-stream/src/main/scala/akka/stream/impl/io/TLSActor.scala +++ b/akka-stream/src/main/scala/akka/stream/impl/io/TLSActor.scala @@ -103,6 +103,7 @@ import akka.util.ByteString case n: NegotiateNewSession => setNewSessionParameters(n) ByteString.empty + case _ => throw new RuntimeException() // won't happen, compiler exhaustiveness check pleaser } if (tracing) log.debug(s"chopping from new chunk of ${buffer.size} into $name (${b.position()})") } else { diff --git a/akka-stream/src/main/scala/akka/stream/impl/io/TcpStages.scala b/akka-stream/src/main/scala/akka/stream/impl/io/TcpStages.scala index 7554f51468..3f43d3fdf7 100644 --- a/akka-stream/src/main/scala/akka/stream/impl/io/TcpStages.scala +++ b/akka-stream/src/main/scala/akka/stream/impl/io/TcpStages.scala @@ -113,6 +113,8 @@ import akka.util.ByteString unbindPromise.tryFailure(ex) failStage(ex) } + case other => + log.warning("Unexpected message to TcpStage: [{}]", other.getClass) } } @@ -184,6 +186,8 @@ import akka.util.ByteString override def onTimer(timerKey: Any): Unit = timerKey match { case BindShutdownTimer => completeStage() // TODO need to manually shut down instead right? + case other => + throw new IllegalArgumentException(s"Unknown timer key $other") } override def postStop(): Unit = { @@ -278,6 +282,7 @@ private[stream] object ConnectionSourceStage { case ob @ Outbound(manager, cmd, _, _) => getStageActor(connecting(ob)).watch(manager) manager ! cmd + case other => throw new IllegalArgumentException(s"Unsupported TCP role: ${other}") } } @@ -300,6 +305,7 @@ private[stream] object ConnectionSourceStage { if (isAvailable(bytesOut)) connection ! ResumeReading if (isClosed(bytesIn)) connection ! ConfirmedClose else pull(bytesIn) + case other => log.warning("Unexpected message to connecting TcpStage: [{}]", other.getClass) } } @@ -336,6 +342,8 @@ private[stream] object ConnectionSourceStage { case Closed => completeStage() case ConfirmedClosed => completeStage() case PeerClosed => complete(bytesOut) + case other => + log.warning("Unexpected message to connected TcpStage: [{}]", other.getClass) } } diff --git a/akka-stream/src/main/scala/akka/stream/impl/streamref/SinkRefImpl.scala b/akka-stream/src/main/scala/akka/stream/impl/streamref/SinkRefImpl.scala index 79c043d733..f26eca4660 100644 --- a/akka-stream/src/main/scala/akka/stream/impl/streamref/SinkRefImpl.scala +++ b/akka-stream/src/main/scala/akka/stream/impl/streamref/SinkRefImpl.scala @@ -49,7 +49,7 @@ private[stream] final class SinkRefStageImpl[In] private[akka] (val initialPartn private def initialRefName: String = initialPartnerRef match { case OptionVal.Some(ref) => ref.toString - case OptionVal.None => "" + case _ => "" } override def createLogicAndMaterializedValue(inheritedAttributes: Attributes): (GraphStageLogic, SourceRef[In]) = @@ -84,7 +84,7 @@ private[stream] final class SinkRefStageImpl[In] private[akka] (val initialPartn private def getPartnerRef: ActorRef = partnerRef match { case OptionVal.Some(ref) => ref - case OptionVal.None => throw TargetRefNotInitializedYetException() + case _ => throw TargetRefNotInitializedYetException() } val SubscriptionTimeoutTimerKey = "SubscriptionTimeoutKey" @@ -115,7 +115,7 @@ private[stream] final class SinkRefStageImpl[In] private[akka] (val initialPartn "Illegal initialPartnerRef! This may be a bug, please report your " + "usage and complete stack trace on the issue tracker: https://github.com/akka/akka") tryPull() - case OptionVal.None => + case _ => log.debug( "[{}] Created SinkRef with initial partner, local worker: {}, subscription timeout: {}", stageActorName, @@ -142,7 +142,7 @@ private[stream] final class SinkRefStageImpl[In] private[akka] (val initialPartn failStage(ex) case OptionVal.Some(_ /* known to be Success*/ ) => completeStage() // other side has terminated (in response to a completion message) so we can safely terminate - case OptionVal.None => + case _ => failStage( RemoteStreamRefActorTerminatedException( s"Remote target receiver of data $partnerRef terminated. " + @@ -209,6 +209,8 @@ private[stream] final class SinkRefStageImpl[In] private[akka] (val initialPartn s"within subscription timeout: ${PrettyDuration.format(subscriptionTimeout.timeout)}!") throw ex + + case other => throw new IllegalArgumentException(s"Unknown timer key: $other") } private def grabSequenced[T](in: Inlet[T]): StreamRefsProtocol.SequencedOnNext[T] = { @@ -271,7 +273,7 @@ private[stream] final class SinkRefStageImpl[In] private[akka] (val initialPartn finishedWithAwaitingPartnerTermination = OptionVal(Success(Done)) setKeepGoing(true) // we will terminate once partner ref has Terminated (to avoid racing Terminated with completion message) - case OptionVal.None => + case _ => if (partner != getPartnerRef) { val ex = InvalidPartnerActorException(partner, getPartnerRef, failureMsg) partner ! StreamRefsProtocol.RemoteStreamFailure(ex.getMessage) diff --git a/akka-stream/src/main/scala/akka/stream/impl/streamref/SourceRefImpl.scala b/akka-stream/src/main/scala/akka/stream/impl/streamref/SourceRefImpl.scala index 939c4a670c..8145a54121 100644 --- a/akka-stream/src/main/scala/akka/stream/impl/streamref/SourceRefImpl.scala +++ b/akka-stream/src/main/scala/akka/stream/impl/streamref/SourceRefImpl.scala @@ -161,7 +161,7 @@ private[stream] final class SourceRefStageImpl[Out](val initialPartnerRef: Optio // this means we're the "remote" for an already active Source on the other side (the "origin") self.watch(ref) AwaitingSubscription(ref) - case OptionVal.None => + case _ => // we are the "origin", and awaiting the other side to start when we'll receive their partherRef AwaitingPartner } @@ -394,6 +394,8 @@ private[stream] final class SourceRefStageImpl[Out](val initialPartnerRef: Optio throw new IllegalStateException( s"[$stageActorName] CancellationDeadlineTimerKey can't happen in state $other") } + + case other => throw new IllegalArgumentException(s"Unknown timer key: ${other}") } override def onDownstreamFinish(cause: Throwable): Unit = { diff --git a/akka-stream/src/main/scala/akka/stream/impl/streamref/StreamRefResolverImpl.scala b/akka-stream/src/main/scala/akka/stream/impl/streamref/StreamRefResolverImpl.scala index d206ec724e..61124befe9 100644 --- a/akka-stream/src/main/scala/akka/stream/impl/streamref/StreamRefResolverImpl.scala +++ b/akka-stream/src/main/scala/akka/stream/impl/streamref/StreamRefResolverImpl.scala @@ -18,11 +18,13 @@ import akka.stream.StreamRefResolver def toSerializationFormat[T](ref: SourceRef[T]): String = ref match { case SourceRefImpl(actorRef) => actorRef.path.toSerializationFormatWithAddress(system.provider.getDefaultAddress) + case other => throw new IllegalArgumentException(s"Unexpected SourceRef impl: ${other.getClass}") } def toSerializationFormat[T](ref: SinkRef[T]): String = ref match { case SinkRefImpl(actorRef) => actorRef.path.toSerializationFormatWithAddress(system.provider.getDefaultAddress) + case other => throw new IllegalArgumentException(s"Unexpected SinkRef impl: ${other.getClass}") } def resolveSourceRef[T](serializedSourceRef: String): SourceRef[T] = diff --git a/akka-stream/src/main/scala/akka/stream/scaladsl/Flow.scala b/akka-stream/src/main/scala/akka/stream/scaladsl/Flow.scala index 3a2a5f935b..403bde7647 100755 --- a/akka-stream/src/main/scala/akka/stream/scaladsl/Flow.scala +++ b/akka-stream/src/main/scala/akka/stream/scaladsl/Flow.scala @@ -592,6 +592,7 @@ object Flow { .flatMapPrefix(1) { case Seq(a) => futureFlow(flowFactory(a)).mapMaterializedValue(_ => NotUsed) case Nil => Flow[I].asInstanceOf[Flow[I, O, NotUsed]] + case _ => throw new RuntimeException() // won't happen, compiler exhaustiveness check pleaser } .mapMaterializedValue(_ => fallback()) @@ -702,6 +703,7 @@ object Flow { .asInstanceOf[Flow[I, O, NotUsed]] .mapMaterializedValue(_ => Future.failed[M](new NeverMaterializedException())) f + case _ => throw new RuntimeException() // won't happen, compiler exhaustiveness check pleaser }(Keep.right) .addAttributes(Attributes(SourceLocation.forLambda(create))) .mapMaterializedValue(_.flatten) diff --git a/akka-stream/src/main/scala/akka/stream/scaladsl/Framing.scala b/akka-stream/src/main/scala/akka/stream/scaladsl/Framing.scala index a7a0323a4f..6deb9dc409 100644 --- a/akka-stream/src/main/scala/akka/stream/scaladsl/Framing.scala +++ b/akka-stream/src/main/scala/akka/stream/scaladsl/Framing.scala @@ -255,7 +255,7 @@ object Framing { // Retrive previous position val previous = indices.lastOption match { case OptionVal.Some((_, i)) => i + separatorBytes.size - case OptionVal.None => 0 + case _ => 0 } if (possibleMatchPos - previous > maximumLineBytes) { @@ -315,7 +315,7 @@ object Framing { private def reset(): Unit = { val previous = indices.lastOption match { case OptionVal.Some((_, i)) => i + separatorBytes.size - case OptionVal.None => 0 + case _ => 0 } buffer = buffer.drop(previous).compact @@ -378,6 +378,7 @@ object Framing { private val intDecoder = byteOrder match { case ByteOrder.BIG_ENDIAN => bigEndianDecoder case ByteOrder.LITTLE_ENDIAN => littleEndianDecoder + case _ => throw new RuntimeException() // won't happen, compiler exhaustiveness check pleaser } val in = Inlet[ByteString]("LengthFieldFramingStage.in") diff --git a/akka-stream/src/main/scala/akka/stream/scaladsl/RestartFlow.scala b/akka-stream/src/main/scala/akka/stream/scaladsl/RestartFlow.scala index 5d523a21cf..ae68cf5edb 100644 --- a/akka-stream/src/main/scala/akka/stream/scaladsl/RestartFlow.scala +++ b/akka-stream/src/main/scala/akka/stream/scaladsl/RestartFlow.scala @@ -435,7 +435,7 @@ object RestartWithBackoffFlow { cause match { case OptionVal.Some(ex) => cancelStage(ex) - case OptionVal.None => + case _ => throw new IllegalStateException("Timer hitting without first getting a cancel cannot happen") } diff --git a/akka-stream/src/main/scala/akka/stream/scaladsl/Source.scala b/akka-stream/src/main/scala/akka/stream/scaladsl/Source.scala index 86b78ec478..253708423f 100644 --- a/akka-stream/src/main/scala/akka/stream/scaladsl/Source.scala +++ b/akka-stream/src/main/scala/akka/stream/scaladsl/Source.scala @@ -808,6 +808,7 @@ object Source { case immutable.Seq() => empty[O] case immutable.Seq(source) => source.map(t => zipper(immutable.Seq(t))).mapMaterializedValue(_ => NotUsed) case s1 +: s2 +: ss => combine(s1, s2, ss: _*)(ZipWithN(zipper)) + case _ => throw new IllegalArgumentException() // just to please compiler completeness check } source.addAttributes(DefaultAttributes.zipWithN) diff --git a/akka-stream/src/main/scala/akka/stream/serialization/StreamRefSerializer.scala b/akka-stream/src/main/scala/akka/stream/serialization/StreamRefSerializer.scala index 2ebafe305b..005805467d 100644 --- a/akka-stream/src/main/scala/akka/stream/serialization/StreamRefSerializer.scala +++ b/akka-stream/src/main/scala/akka/stream/serialization/StreamRefSerializer.scala @@ -43,6 +43,7 @@ private[akka] final class StreamRefSerializer(val system: ExtendedActorSystem) case _: SinkRefImpl[_] => SinkRefManifest // case _: MaterializedSinkRef[_] => SinkRefManifest case StreamRefsProtocol.Ack => AckManifest + case unknown => throw new IllegalArgumentException(s"Unsupported object ${unknown.getClass}") } override def toBinary(o: AnyRef): Array[Byte] = o match { @@ -60,6 +61,7 @@ private[akka] final class StreamRefSerializer(val system: ExtendedActorSystem) case ref: SourceRefImpl[_] => serializeSourceRef(ref).toByteArray // case ref: MaterializedSourceRef[_] => serializeSourceRef(ref.).toByteArray case StreamRefsProtocol.Ack => Array.emptyByteArray + case unknown => throw new IllegalArgumentException(s"Unsupported object ${unknown.getClass}") } override def fromBinary(bytes: Array[Byte], manifest: String): AnyRef = manifest match { @@ -73,6 +75,7 @@ private[akka] final class StreamRefSerializer(val system: ExtendedActorSystem) case SinkRefManifest => deserializeSinkRef(bytes) case SourceRefManifest => deserializeSourceRef(bytes) case AckManifest => StreamRefsProtocol.Ack + case unknown => throw new IllegalArgumentException(s"Unsupported manifest '$unknown''") } // ----- diff --git a/akka-stream/src/main/scala/akka/stream/snapshot/MaterializerState.scala b/akka-stream/src/main/scala/akka/stream/snapshot/MaterializerState.scala index be9e12643b..cd336f44ea 100644 --- a/akka-stream/src/main/scala/akka/stream/snapshot/MaterializerState.scala +++ b/akka-stream/src/main/scala/akka/stream/snapshot/MaterializerState.scala @@ -36,6 +36,7 @@ object MaterializerState { SystemMaterializer(system).materializer match { case impl: PhasedFusingActorMaterializer => requestFromSupervisor(impl.supervisor)(impl.system.dispatchers.internalDispatcher) + case other => throw new IllegalArgumentException(s"Unsupported Materializer type ${other.getClass}") } } @@ -47,6 +48,7 @@ object MaterializerState { mat match { case impl: PhasedFusingActorMaterializer => requestFromSupervisor(impl.supervisor)(impl.system.dispatchers.internalDispatcher) + case other => throw new IllegalArgumentException(s"Unsupported Materializer type ${other.getClass}") } } diff --git a/akka-testkit/src/main/scala/akka/testkit/TestKit.scala b/akka-testkit/src/main/scala/akka/testkit/TestKit.scala index 835f50cb75..347fda0c4d 100644 --- a/akka-testkit/src/main/scala/akka/testkit/TestKit.scala +++ b/akka-testkit/src/main/scala/akka/testkit/TestKit.scala @@ -812,6 +812,7 @@ trait TestKitBase { queue.offerFirst(lastMessage) lastMessage = msg acc.reverse + case unexpected => throw new RuntimeException(s"Unexpected: $unexpected") // exhaustiveness check } } } @@ -865,6 +866,7 @@ trait TestKitBase { case RealMessage(msg, _) => lastMessage = message msg + case unexpected => throw new RuntimeException(s"Unexpected: $unexpected") // exhaustiveness check } } diff --git a/project/Dependencies.scala b/project/Dependencies.scala index 3b83755631..0724e16d90 100644 --- a/project/Dependencies.scala +++ b/project/Dependencies.scala @@ -28,7 +28,7 @@ object Dependencies { val jacksonVersion = "2.11.4" val scala212Version = "2.12.13" - val scala213Version = "2.13.3" + val scala213Version = "2.13.5" val scala3Version = "3.0.0-RC1" val reactiveStreamsVersion = "1.0.3" @@ -57,7 +57,7 @@ object Dependencies { case twoThirteen if twoThirteen.startsWith("2.13") => scala213Version case twoTwelve if twoTwelve.startsWith("2.12") => scala212Version case three if three.startsWith("3.0") => scala3Version - case "default" => scala212Version + case "default" => scala213Version case other => throw new IllegalArgumentException(s"Unsupported scala version [$other]. Must be 2.12, 2.13 or 3.0.") }