diff --git a/akka-actor-tests/src/test/java/akka/actor/JavaExtension.java b/akka-actor-tests/src/test/java/akka/actor/JavaExtension.java index 616675eb77..b87ace5849 100644 --- a/akka-actor-tests/src/test/java/akka/actor/JavaExtension.java +++ b/akka-actor-tests/src/test/java/akka/actor/JavaExtension.java @@ -64,6 +64,7 @@ public class JavaExtension { @Test public void mustBeAccessible() { + assertTrue(system.hasExtension((TestExtensionId.TestExtensionProvider))); assertSame(system.extension(TestExtensionId.TestExtensionProvider).system, system); assertSame(TestExtensionId.TestExtensionProvider.apply(system).system, system); } diff --git a/akka-actor-tests/src/test/scala/akka/actor/ActorSystemSpec.scala b/akka-actor-tests/src/test/scala/akka/actor/ActorSystemSpec.scala index 28485c9bad..9c5dd95dc5 100644 --- a/akka-actor-tests/src/test/scala/akka/actor/ActorSystemSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/actor/ActorSystemSpec.scala @@ -70,7 +70,7 @@ object ActorSystemSpec { } @org.junit.runner.RunWith(classOf[org.scalatest.junit.JUnitRunner]) -class ActorSystemSpec extends AkkaSpec("""akka.extensions = ["akka.actor.TestExtension$"]""") with ImplicitSender { +class ActorSystemSpec extends AkkaSpec("""akka.extensions = ["akka.actor.TestExtension"]""") with ImplicitSender { "An ActorSystem" must { @@ -95,9 +95,10 @@ class ActorSystemSpec extends AkkaSpec("""akka.extensions = ["akka.actor.TestExt } "support extensions" in { + // TestExtension is configured and should be loaded at startup + system.hasExtension(TestExtension) must be(true) TestExtension(system).system must be === system system.extension(TestExtension).system must be === system - system.hasExtension(TestExtension) must be(true) } "run termination callbacks in order" in { diff --git a/akka-actor-tests/src/test/scala/akka/actor/DeployerSpec.scala b/akka-actor-tests/src/test/scala/akka/actor/DeployerSpec.scala index 1aef438627..37aa133583 100644 --- a/akka-actor-tests/src/test/scala/akka/actor/DeployerSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/actor/DeployerSpec.scala @@ -35,6 +35,9 @@ object DeployerSpec { router = scatter-gather within = 2 seconds } + /service-consistent-hashing { + router = consistent-hashing + } /service-resizer { router = round-robin resizer { @@ -118,6 +121,10 @@ class DeployerSpec extends AkkaSpec(DeployerSpec.deployerConf) { assertRouting("/service-scatter-gather", ScatterGatherFirstCompletedRouter(nrOfInstances = 1, within = 2 seconds), "/service-scatter-gather") } + "be able to parse 'akka.actor.deployment._' with consistent-hashing router" in { + assertRouting("/service-consistent-hashing", ConsistentHashingRouter(1), "/service-consistent-hashing") + } + "be able to parse 'akka.actor.deployment._' with router resizer" in { val resizer = DefaultResizer() assertRouting("/service-resizer", RoundRobinRouter(resizer = Some(resizer)), "/service-resizer") diff --git a/akka-actor-tests/src/test/scala/akka/actor/FSMActorSpec.scala b/akka-actor-tests/src/test/scala/akka/actor/FSMActorSpec.scala index c0c6bf0f06..bae6d2f6fe 100644 --- a/akka-actor-tests/src/test/scala/akka/actor/FSMActorSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/actor/FSMActorSpec.scala @@ -5,7 +5,6 @@ package akka.actor import language.postfixOps - import org.scalatest.{ BeforeAndAfterAll, BeforeAndAfterEach } import akka.testkit._ import TestEvent.Mute @@ -15,6 +14,7 @@ import com.typesafe.config.ConfigFactory import scala.concurrent.Await import akka.util.Timeout import scala.concurrent.util.Duration +import scala.concurrent.util.FiniteDuration object FSMActorSpec { val timeout = Timeout(2 seconds) @@ -33,7 +33,7 @@ object FSMActorSpec { case object Locked extends LockState case object Open extends LockState - class Lock(code: String, timeout: Duration, latches: Latches) extends Actor with FSM[LockState, CodeState] { + class Lock(code: String, timeout: FiniteDuration, latches: Latches) extends Actor with FSM[LockState, CodeState] { import latches._ @@ -183,6 +183,10 @@ class FSMActorSpec extends AkkaSpec(Map("akka.actor.debug.fsm" -> true)) with Im "run onTermination upon ActorRef.stop()" in { val started = TestLatch(1) + /* + * This lazy val trick is beyond evil: KIDS, DON'T TRY THIS AT HOME! + * It is necessary here because of the path-dependent type fsm.StopEvent. + */ lazy val fsm = new Actor with FSM[Int, Null] { override def preStart = { started.countDown } startWith(1, null) diff --git a/akka-actor-tests/src/test/scala/akka/actor/IOActor.scala b/akka-actor-tests/src/test/scala/akka/actor/IOActor.scala index 72efaef0d4..58ffb9c602 100644 --- a/akka-actor-tests/src/test/scala/akka/actor/IOActor.scala +++ b/akka-actor-tests/src/test/scala/akka/actor/IOActor.scala @@ -5,7 +5,6 @@ package akka.actor import language.postfixOps - import akka.util.ByteString import scala.concurrent.{ ExecutionContext, Await, Future, Promise } import scala.concurrent.util.{ Duration, Deadline } @@ -17,6 +16,7 @@ import akka.pattern.ask import java.net.{ Socket, InetSocketAddress, InetAddress, SocketAddress } import scala.util.Failure import annotation.tailrec +import scala.concurrent.util.FiniteDuration object IOActorSpec { @@ -244,7 +244,10 @@ class IOActorSpec extends AkkaSpec with DefaultTimeout { * @param filter determines which exceptions should be retried * @return a future containing the result or the last exception before a limit was hit. */ - def retry[T](count: Option[Int] = None, timeout: Option[Duration] = None, delay: Option[Duration] = Some(100 millis), filter: Option[Throwable ⇒ Boolean] = None)(future: ⇒ Future[T])(implicit executor: ExecutionContext): Future[T] = { + def retry[T](count: Option[Int] = None, + timeout: Option[FiniteDuration] = None, + delay: Option[FiniteDuration] = Some(100 millis), + filter: Option[Throwable ⇒ Boolean] = None)(future: ⇒ Future[T])(implicit executor: ExecutionContext): Future[T] = { val promise = Promise[T]() diff --git a/akka-actor-tests/src/test/scala/akka/actor/ReceiveTimeoutSpec.scala b/akka-actor-tests/src/test/scala/akka/actor/ReceiveTimeoutSpec.scala index dc5229ae41..a74cbc9839 100644 --- a/akka-actor-tests/src/test/scala/akka/actor/ReceiveTimeoutSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/actor/ReceiveTimeoutSpec.scala @@ -5,13 +5,12 @@ package akka.actor import language.postfixOps - import akka.testkit._ import scala.concurrent.util.duration._ - import java.util.concurrent.atomic.AtomicInteger import scala.concurrent.Await import java.util.concurrent.TimeoutException +import scala.concurrent.util.Duration @org.junit.runner.RunWith(classOf[org.scalatest.junit.JUnitRunner]) class ReceiveTimeoutSpec extends AkkaSpec { @@ -65,7 +64,7 @@ class ReceiveTimeoutSpec extends AkkaSpec { case ReceiveTimeout ⇒ count.incrementAndGet timeoutLatch.open - context.resetReceiveTimeout() + context.setReceiveTimeout(Duration.Undefined) } })) diff --git a/akka-actor-tests/src/test/scala/akka/actor/TypedActorSpec.scala b/akka-actor-tests/src/test/scala/akka/actor/TypedActorSpec.scala index 61a1e84f7e..84367f7ec0 100644 --- a/akka-actor-tests/src/test/scala/akka/actor/TypedActorSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/actor/TypedActorSpec.scala @@ -4,7 +4,6 @@ package akka.actor import language.postfixOps - import org.scalatest.{ BeforeAndAfterAll, BeforeAndAfterEach } import akka.util.Timeout import scala.concurrent.{ Await, Future, Promise } @@ -21,6 +20,7 @@ import akka.serialization.JavaSerializer import akka.actor.TypedActor._ import java.lang.IllegalStateException import java.util.concurrent.{ TimeoutException, TimeUnit, CountDownLatch } +import scala.concurrent.util.FiniteDuration object TypedActorSpec { @@ -203,10 +203,10 @@ class TypedActorSpec extends AkkaSpec(TypedActorSpec.config) def newFooBar: Foo = newFooBar(Duration(2, "s")) - def newFooBar(d: Duration): Foo = + def newFooBar(d: FiniteDuration): Foo = TypedActor(system).typedActorOf(TypedProps[Bar](classOf[Foo], classOf[Bar]).withTimeout(Timeout(d))) - def newFooBar(dispatcher: String, d: Duration): Foo = + def newFooBar(dispatcher: String, d: FiniteDuration): Foo = TypedActor(system).typedActorOf(TypedProps[Bar](classOf[Foo], classOf[Bar]).withTimeout(Timeout(d)).withDispatcher(dispatcher)) def newStacked(): Stacked = diff --git a/akka-actor-tests/src/test/scala/akka/config/ConfigSpec.scala b/akka-actor-tests/src/test/scala/akka/config/ConfigSpec.scala index 5fe3466b25..56f8cd45fc 100644 --- a/akka-actor-tests/src/test/scala/akka/config/ConfigSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/config/ConfigSpec.scala @@ -43,6 +43,9 @@ class ConfigSpec extends AkkaSpec(ConfigFactory.defaultReference(ActorSystem.fin getBoolean("akka.jvm-exit-on-fatal-error") must be(true) settings.JvmExitOnFatalError must be(true) + + getInt("akka.actor.deployment.default.virtual-nodes-factor") must be(10) + settings.DefaultVirtualNodesFactor must be(10) } { diff --git a/akka-actor-tests/src/test/scala/akka/pattern/AskSpec.scala b/akka-actor-tests/src/test/scala/akka/pattern/AskSpec.scala index a67f65d870..7104e2edb6 100644 --- a/akka-actor-tests/src/test/scala/akka/pattern/AskSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/pattern/AskSpec.scala @@ -68,16 +68,6 @@ class AskSpec extends AkkaSpec { }.getMessage must be === expectedMsg } - "return broken promises on infinite timeout" in { - implicit val timeout = Timeout.never - val echo = system.actorOf(Props(new Actor { def receive = { case x ⇒ sender ! x } })) - val f = echo ? "foo" - val expectedMsg = "Timeouts to `ask` must be finite. Question not sent to [%s]" format echo - intercept[IllegalArgumentException] { - Await.result(f, remaining) - }.getMessage must be === expectedMsg - } - } } \ No newline at end of file diff --git a/akka-actor-tests/src/test/scala/akka/routing/ConsistentHashingRouterSpec.scala b/akka-actor-tests/src/test/scala/akka/routing/ConsistentHashingRouterSpec.scala new file mode 100644 index 0000000000..867da83bd7 --- /dev/null +++ b/akka-actor-tests/src/test/scala/akka/routing/ConsistentHashingRouterSpec.scala @@ -0,0 +1,104 @@ +/** + * Copyright (C) 2009-2012 Typesafe Inc. + */ +package akka.routing + +import scala.concurrent.Await + +import akka.actor.Actor +import akka.actor.ActorRef +import akka.actor.Props +import akka.actor.actorRef2Scala +import akka.pattern.ask +import akka.routing.ConsistentHashingRouter.ConsistentHashable +import akka.routing.ConsistentHashingRouter.ConsistentHashableEnvelope +import akka.routing.ConsistentHashingRouter.ConsistentHashMapping +import akka.testkit.AkkaSpec +import akka.testkit._ + +object ConsistentHashingRouterSpec { + + val config = """ + akka.actor.deployment { + /router1 { + router = consistent-hashing + nr-of-instances = 3 + virtual-nodes-factor = 17 + } + /router2 { + router = consistent-hashing + nr-of-instances = 5 + } + } + """ + + class Echo extends Actor { + def receive = { + case _ ⇒ sender ! self + } + } + + case class Msg(key: Any, data: String) extends ConsistentHashable { + override def consistentHashKey = key + } + + case class MsgKey(name: String) + + case class Msg2(key: Any, data: String) +} + +@org.junit.runner.RunWith(classOf[org.scalatest.junit.JUnitRunner]) +class ConsistentHashingRouterSpec extends AkkaSpec(ConsistentHashingRouterSpec.config) with DefaultTimeout with ImplicitSender { + import akka.routing.ConsistentHashingRouterSpec._ + implicit val ec = system.dispatcher + + val router1 = system.actorOf(Props[Echo].withRouter(FromConfig()), "router1") + + "consistent hashing router" must { + "create routees from configuration" in { + val currentRoutees = Await.result(router1 ? CurrentRoutees, remaining).asInstanceOf[RouterRoutees] + currentRoutees.routees.size must be(3) + } + + "select destination based on consistentHashKey of the message" in { + router1 ! Msg("a", "A") + val destinationA = expectMsgType[ActorRef] + router1 ! ConsistentHashableEnvelope(message = "AA", hashKey = "a") + expectMsg(destinationA) + + router1 ! Msg(17, "B") + val destinationB = expectMsgType[ActorRef] + router1 ! ConsistentHashableEnvelope(message = "BB", hashKey = 17) + expectMsg(destinationB) + + router1 ! Msg(MsgKey("c"), "C") + val destinationC = expectMsgType[ActorRef] + router1 ! ConsistentHashableEnvelope(message = "CC", hashKey = MsgKey("c")) + expectMsg(destinationC) + } + + "select destination with defined consistentHashRoute" in { + def hashMapping: ConsistentHashMapping = { + case Msg2(key, data) ⇒ key + } + val router2 = system.actorOf(Props[Echo].withRouter(ConsistentHashingRouter( + hashMapping = hashMapping)), "router2") + + router2 ! Msg2("a", "A") + val destinationA = expectMsgType[ActorRef] + router2 ! ConsistentHashableEnvelope(message = "AA", hashKey = "a") + expectMsg(destinationA) + + router2 ! Msg2(17, "B") + val destinationB = expectMsgType[ActorRef] + router2 ! ConsistentHashableEnvelope(message = "BB", hashKey = 17) + expectMsg(destinationB) + + router2 ! Msg2(MsgKey("c"), "C") + val destinationC = expectMsgType[ActorRef] + router2 ! ConsistentHashableEnvelope(message = "CC", hashKey = MsgKey("c")) + expectMsg(destinationC) + } + } + +} diff --git a/akka-actor-tests/src/test/scala/akka/routing/ResizerSpec.scala b/akka-actor-tests/src/test/scala/akka/routing/ResizerSpec.scala index 0de9c9d1c8..561e3d54dd 100644 --- a/akka-actor-tests/src/test/scala/akka/routing/ResizerSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/routing/ResizerSpec.scala @@ -4,7 +4,6 @@ package akka.routing import language.postfixOps - import akka.actor.Actor import akka.testkit._ import akka.actor.Props @@ -15,6 +14,7 @@ import java.util.concurrent.atomic.AtomicInteger import akka.pattern.ask import scala.concurrent.util.Duration import java.util.concurrent.TimeoutException +import scala.concurrent.util.FiniteDuration object ResizerSpec { @@ -174,8 +174,8 @@ class ResizerSpec extends AkkaSpec(ResizerSpec.config) with DefaultTimeout with val router = system.actorOf(Props(new Actor { def receive = { - case d: Duration ⇒ Thread.sleep(d.dilated.toMillis); sender ! "done" - case "echo" ⇒ sender ! "reply" + case d: FiniteDuration ⇒ Thread.sleep(d.dilated.toMillis); sender ! "done" + case "echo" ⇒ sender ! "reply" } }).withRouter(RoundRobinRouter(resizer = Some(resizer)))) @@ -190,7 +190,7 @@ class ResizerSpec extends AkkaSpec(ResizerSpec.config) with DefaultTimeout with routees(router) must be(3) - def loop(loops: Int, d: Duration) = { + def loop(loops: Int, d: FiniteDuration) = { for (m ← 0 until loops) router ! d for (m ← 0 until loops) expectMsg(d * 3, "done") } diff --git a/akka-actor-tests/src/test/scala/akka/routing/RoutingSpec.scala b/akka-actor-tests/src/test/scala/akka/routing/RoutingSpec.scala index 9e126e75ed..2d4f2245f1 100644 --- a/akka-actor-tests/src/test/scala/akka/routing/RoutingSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/routing/RoutingSpec.scala @@ -171,8 +171,6 @@ class RoutingSpec extends AkkaSpec(RoutingSpec.config) with DefaultTimeout with } expectMsgType[ActorKilledException] - //#supervision - val router2 = system.actorOf(Props.empty.withRouter(RoundRobinRouter(1).withSupervisorStrategy(escalator))) router2 ! CurrentRoutees EventFilter[ActorKilledException](occurrences = 2) intercept { diff --git a/akka-actor/src/main/java/akka/actor/cell/AbstractActorCell.java b/akka-actor/src/main/java/akka/actor/dungeon/AbstractActorCell.java similarity index 65% rename from akka-actor/src/main/java/akka/actor/cell/AbstractActorCell.java rename to akka-actor/src/main/java/akka/actor/dungeon/AbstractActorCell.java index 2d8c4fbc1e..6735b6e2cb 100644 --- a/akka-actor/src/main/java/akka/actor/cell/AbstractActorCell.java +++ b/akka-actor/src/main/java/akka/actor/dungeon/AbstractActorCell.java @@ -2,7 +2,7 @@ * Copyright (C) 2009-2012 Typesafe Inc. */ -package akka.actor.cell; +package akka.actor.dungeon; import akka.actor.ActorCell; import akka.util.Unsafe; @@ -14,9 +14,9 @@ final class AbstractActorCell { static { try { - mailboxOffset = Unsafe.instance.objectFieldOffset(ActorCell.class.getDeclaredField("akka$actor$cell$Dispatch$$_mailboxDoNotCallMeDirectly")); - childrenOffset = Unsafe.instance.objectFieldOffset(ActorCell.class.getDeclaredField("akka$actor$cell$Children$$_childrenRefsDoNotCallMeDirectly")); - nextNameOffset = Unsafe.instance.objectFieldOffset(ActorCell.class.getDeclaredField("akka$actor$cell$Children$$_nextNameDoNotCallMeDirectly")); + mailboxOffset = Unsafe.instance.objectFieldOffset(ActorCell.class.getDeclaredField("akka$actor$dungeon$Dispatch$$_mailboxDoNotCallMeDirectly")); + childrenOffset = Unsafe.instance.objectFieldOffset(ActorCell.class.getDeclaredField("akka$actor$dungeon$Children$$_childrenRefsDoNotCallMeDirectly")); + nextNameOffset = Unsafe.instance.objectFieldOffset(ActorCell.class.getDeclaredField("akka$actor$dungeon$Children$$_nextNameDoNotCallMeDirectly")); } catch(Throwable t){ throw new ExceptionInInitializerError(t); } diff --git a/akka-actor/src/main/java/akka/util/internal/HashedWheelTimer.java b/akka-actor/src/main/java/akka/util/internal/HashedWheelTimer.java index cc4328d763..1630f599ee 100644 --- a/akka-actor/src/main/java/akka/util/internal/HashedWheelTimer.java +++ b/akka-actor/src/main/java/akka/util/internal/HashedWheelTimer.java @@ -25,6 +25,7 @@ import java.util.concurrent.locks.ReadWriteLock; import java.util.concurrent.locks.ReentrantReadWriteLock; import scala.concurrent.util.Duration; +import scala.concurrent.util.FiniteDuration; import akka.event.LoggingAdapter; import akka.util.Unsafe; @@ -241,7 +242,7 @@ public class HashedWheelTimer implements Timer { return new HashedWheelTimeout(this, task, time); } - public Timeout newTimeout(TimerTask task, Duration delay) { + public Timeout newTimeout(TimerTask task, FiniteDuration delay) { final long currentTime = System.nanoTime(); if (task == null) { diff --git a/akka-actor/src/main/java/akka/util/internal/Timer.java b/akka-actor/src/main/java/akka/util/internal/Timer.java index 7086aef9c6..be7656ec6c 100644 --- a/akka-actor/src/main/java/akka/util/internal/Timer.java +++ b/akka-actor/src/main/java/akka/util/internal/Timer.java @@ -15,9 +15,9 @@ */ package akka.util.internal; -import scala.concurrent.util.Duration; import java.util.Set; -import java.util.concurrent.TimeUnit; + +import scala.concurrent.util.FiniteDuration; /** * Schedules {@link TimerTask}s for one-time future execution in a background @@ -42,7 +42,7 @@ public interface Timer { * @throws IllegalStateException if this timer has been * {@linkplain #stop() stopped} already */ - Timeout newTimeout(TimerTask task, Duration delay); + Timeout newTimeout(TimerTask task, FiniteDuration delay); /** * Releases all resources acquired by this {@link Timer} and cancels all diff --git a/akka-actor/src/main/resources/reference.conf b/akka-actor/src/main/resources/reference.conf index a585fe3406..163d2e83d9 100644 --- a/akka-actor/src/main/resources/reference.conf +++ b/akka-actor/src/main/resources/reference.conf @@ -14,7 +14,7 @@ akka { # Event handlers to register at boot time (Logging$DefaultLogger logs to STDOUT) event-handlers = ["akka.event.Logging$DefaultLogger"] - + # Event handlers are created and registered synchronously during ActorSystem # start-up, and since they are actors, this timeout is used to bound the # waiting time @@ -49,7 +49,7 @@ akka { # FQCN of the ActorRefProvider to be used; the below is the built-in default, # another one is akka.remote.RemoteActorRefProvider in the akka-remote bundle. provider = "akka.actor.LocalActorRefProvider" - + # The guardian "/user" will use this subclass of akka.actor.SupervisorStrategyConfigurator # to obtain its supervisorStrategy. Besides the default there is # akka.actor.StoppingSupervisorStrategy @@ -69,7 +69,7 @@ akka { # Serializes and deserializes creators (in Props) to ensure that they can be sent over the network, # this is only intended for testing. serialize-creators = off - + # Timeout for send operations to top-level actors which are in the process of being started. # This is only relevant if using a bounded mailbox or the CallingThreadDispatcher for a top-level actor. unstarted-push-timeout = 10s @@ -108,6 +108,9 @@ akka { # within is the timeout used for routers containing future calls within = 5 seconds + # number of virtual nodes per node for consistent-hashing router + virtual-nodes-factor = 10 + routees { # Alternatively to giving nr-of-instances you can specify the full # paths of those actors which should be routed to. This setting takes @@ -296,7 +299,7 @@ akka { # enable DEBUG logging of subscription changes on the eventStream event-stream = off - + # enable DEBUG logging of unhandled messages unhandled = off @@ -318,13 +321,13 @@ akka { serialization-bindings { "java.io.Serializable" = java } - + # Configuration items which are used by the akka.actor.ActorDSL._ methods dsl { # Maximum queue size of the actor created by newInbox(); this protects against # faulty programs which use select() and consistently miss messages inbox-size = 1000 - + # Default timeout to assume for operations like Inbox.receive et al default-timeout = 5s } diff --git a/akka-actor/src/main/scala/akka/actor/ActorCell.scala b/akka-actor/src/main/scala/akka/actor/ActorCell.scala index c34b9ac05e..2c12dae8e4 100644 --- a/akka-actor/src/main/scala/akka/actor/ActorCell.scala +++ b/akka-actor/src/main/scala/akka/actor/ActorCell.scala @@ -4,13 +4,13 @@ package akka.actor -import cell.ChildrenContainer.{ WaitingForChildren } import java.io.{ ObjectOutputStream, NotSerializableException } import scala.annotation.tailrec import scala.collection.immutable.TreeSet import scala.concurrent.util.Duration import scala.util.control.NonFatal -import akka.actor.cell.ChildrenContainer +import akka.actor.dungeon.ChildrenContainer +import akka.actor.dungeon.ChildrenContainer.WaitingForChildren import akka.dispatch.{ Watch, Unwatch, Terminate, SystemMessage, Suspend, Supervise, Resume, Recreate, NoMessage, MessageDispatcher, Envelope, Create, ChildTerminated } import akka.event.Logging.{ LogEvent, Debug, Error } import akka.japi.Procedure @@ -55,23 +55,25 @@ trait ActorContext extends ActorRefFactory { def props: Props /** - * Gets the current receive timeout + * Gets the current receive timeout. * When specified, the receive method should be able to handle a 'ReceiveTimeout' message. */ - def receiveTimeout: Option[Duration] + def receiveTimeout: Duration /** - * Defines the default timeout for an initial receive invocation. + * Defines the inactivity timeout after which the sending of a `ReceiveTimeout` message is triggered. * When specified, the receive function should be able to handle a 'ReceiveTimeout' message. * 1 millisecond is the minimum supported timeout. + * + * Please note that the receive timeout might fire and enqueue the `ReceiveTimeout` message right after + * another message was enqueued; hence it is '''not guaranteed''' that upon reception of the receive + * timeout there must have been an idle period beforehand as configured via this method. + * + * Once set, the receive timeout stays in effect (i.e. continues firing repeatedly after inactivity + * periods). Pass in `Duration.Undefined` to switch off this feature. */ def setReceiveTimeout(timeout: Duration): Unit - /** - * Clears the receive timeout, i.e. deactivates this feature. - */ - def resetReceiveTimeout(): Unit - /** * Changes the Actor's behavior to become the new 'Receive' (PartialFunction[Any, Unit]) handler. * Puts the behavior on top of the hotswap stack. @@ -290,11 +292,11 @@ private[akka] class ActorCell( val props: Props, val parent: InternalActorRef) extends UntypedActorContext with Cell - with cell.ReceiveTimeout - with cell.Children - with cell.Dispatch - with cell.DeathWatch - with cell.FaultHandling { + with dungeon.ReceiveTimeout + with dungeon.Children + with dungeon.Dispatch + with dungeon.DeathWatch + with dungeon.FaultHandling { import ActorCell._ diff --git a/akka-actor/src/main/scala/akka/actor/ActorSystem.scala b/akka-actor/src/main/scala/akka/actor/ActorSystem.scala index bce966b99e..182239adbf 100644 --- a/akka-actor/src/main/scala/akka/actor/ActorSystem.scala +++ b/akka-actor/src/main/scala/akka/actor/ActorSystem.scala @@ -17,7 +17,7 @@ import akka.util._ import akka.util.internal.{ HashedWheelTimer, ConcurrentIdentityHashMap } import java.util.concurrent.{ ThreadFactory, CountDownLatch, TimeoutException, RejectedExecutionException } import java.util.concurrent.TimeUnit.MILLISECONDS -import akka.actor.cell.ChildrenContainer +import akka.actor.dungeon.ChildrenContainer import scala.concurrent.util.FiniteDuration import util.{ Failure, Success } @@ -166,6 +166,8 @@ object ActorSystem { final val Daemonicity: Boolean = getBoolean("akka.daemonic") final val JvmExitOnFatalError: Boolean = getBoolean("akka.jvm-exit-on-fatal-error") + final val DefaultVirtualNodesFactor: Int = getInt("akka.actor.deployment.default.virtual-nodes-factor") + if (ConfigVersion != Version) throw new akka.ConfigurationException("Akka JAR version [" + Version + "] does not match the provided config version [" + ConfigVersion + "]") diff --git a/akka-actor/src/main/scala/akka/actor/Deployer.scala b/akka-actor/src/main/scala/akka/actor/Deployer.scala index 9ceebaacb3..3f5e2b1785 100644 --- a/akka-actor/src/main/scala/akka/actor/Deployer.scala +++ b/akka-actor/src/main/scala/akka/actor/Deployer.scala @@ -143,8 +143,6 @@ private[akka] class Deployer(val settings: ActorSystem.Settings, val dynamicAcce val nrOfInstances = deployment.getInt("nr-of-instances") - val within = Duration(deployment.getMilliseconds("within"), TimeUnit.MILLISECONDS) - val resizer: Option[Resizer] = if (config.hasPath("resizer")) Some(DefaultResizer(deployment.getConfig("resizer"))) else None val router: RouterConfig = deployment.getString("router") match { @@ -152,8 +150,13 @@ private[akka] class Deployer(val settings: ActorSystem.Settings, val dynamicAcce case "round-robin" ⇒ RoundRobinRouter(nrOfInstances, routees, resizer) case "random" ⇒ RandomRouter(nrOfInstances, routees, resizer) case "smallest-mailbox" ⇒ SmallestMailboxRouter(nrOfInstances, routees, resizer) - case "scatter-gather" ⇒ ScatterGatherFirstCompletedRouter(nrOfInstances, routees, within, resizer) case "broadcast" ⇒ BroadcastRouter(nrOfInstances, routees, resizer) + case "scatter-gather" ⇒ + val within = Duration(deployment.getMilliseconds("within"), TimeUnit.MILLISECONDS) + ScatterGatherFirstCompletedRouter(nrOfInstances, routees, within, resizer) + case "consistent-hashing" ⇒ + val vnodes = deployment.getInt("virtual-nodes-factor") + ConsistentHashingRouter(nrOfInstances, routees, resizer, virtualNodesFactor = vnodes) case fqn ⇒ val args = Seq(classOf[Config] -> deployment) dynamicAccess.createInstanceFor[RouterConfig](fqn, args).recover({ diff --git a/akka-actor/src/main/scala/akka/actor/DynamicAccess.scala b/akka-actor/src/main/scala/akka/actor/DynamicAccess.scala index 50f5d26177..2929032dc5 100644 --- a/akka-actor/src/main/scala/akka/actor/DynamicAccess.scala +++ b/akka-actor/src/main/scala/akka/actor/DynamicAccess.scala @@ -85,7 +85,10 @@ class ReflectiveDynamicAccess(val classLoader: ClassLoader) extends DynamicAcces getClassFor(fqcn) flatMap { c ⇒ createInstanceFor(c, args) } override def getObjectFor[T: ClassTag](fqcn: String): Try[T] = { - getClassFor(fqcn) flatMap { c ⇒ + val classTry = + if (fqcn.endsWith("$")) getClassFor(fqcn) + else getClassFor(fqcn + "$") recoverWith { case _ ⇒ getClassFor(fqcn) } + classTry flatMap { c ⇒ Try { val module = c.getDeclaredField("MODULE$") module.setAccessible(true) diff --git a/akka-actor/src/main/scala/akka/actor/FSM.scala b/akka-actor/src/main/scala/akka/actor/FSM.scala index 51d0c290dc..a58abb0ac3 100644 --- a/akka-actor/src/main/scala/akka/actor/FSM.scala +++ b/akka-actor/src/main/scala/akka/actor/FSM.scala @@ -4,11 +4,11 @@ package akka.actor import language.implicitConversions - import akka.util._ import scala.concurrent.util.Duration import scala.collection.mutable import akka.routing.{ Deafen, Listen, Listeners } +import scala.concurrent.util.FiniteDuration object FSM { @@ -92,7 +92,7 @@ object FSM { private val scheduler = context.system.scheduler private implicit val executionContext = context.dispatcher - def schedule(actor: ActorRef, timeout: Duration): Unit = + def schedule(actor: ActorRef, timeout: FiniteDuration): Unit = ref = Some( if (repeat) scheduler.schedule(timeout, timeout, actor, this) else scheduler.scheduleOnce(timeout, actor, this)) @@ -121,15 +121,18 @@ object FSM { * name, the state data, possibly custom timeout, stop reason and replies * accumulated while processing the last message. */ - case class State[S, D](stateName: S, stateData: D, timeout: Option[Duration] = None, stopReason: Option[Reason] = None, replies: List[Any] = Nil) { + case class State[S, D](stateName: S, stateData: D, timeout: Option[FiniteDuration] = None, stopReason: Option[Reason] = None, replies: List[Any] = Nil) { /** * Modify state transition descriptor to include a state timeout for the * next state. This timeout overrides any default timeout set for the next * state. + * + * Use Duration.Inf to deactivate an existing timeout. */ - def forMax(timeout: Duration): State[S, D] = { - copy(timeout = Some(timeout)) + def forMax(timeout: Duration): State[S, D] = timeout match { + case f: FiniteDuration ⇒ copy(timeout = Some(f)) + case _ ⇒ copy(timeout = None) } /** @@ -245,7 +248,7 @@ trait FSM[S, D] extends Listeners with ActorLogging { type State = FSM.State[S, D] type StateFunction = scala.PartialFunction[Event, State] - type Timeout = Option[Duration] + type Timeout = Option[FiniteDuration] type TransitionHandler = PartialFunction[(S, S), Unit] /* @@ -279,7 +282,7 @@ trait FSM[S, D] extends Listeners with ActorLogging { * @param stateTimeout default state timeout for this state * @param stateFunction partial function describing response to input */ - final def when(stateName: S, stateTimeout: Duration = null)(stateFunction: StateFunction): Unit = + final def when(stateName: S, stateTimeout: FiniteDuration = null)(stateFunction: StateFunction): Unit = register(stateName, stateFunction, Option(stateTimeout)) /** @@ -339,7 +342,7 @@ trait FSM[S, D] extends Listeners with ActorLogging { * @param repeat send once if false, scheduleAtFixedRate if true * @return current state descriptor */ - final def setTimer(name: String, msg: Any, timeout: Duration, repeat: Boolean): State = { + final def setTimer(name: String, msg: Any, timeout: FiniteDuration, repeat: Boolean): State = { if (debugEvent) log.debug("setting " + (if (repeat) "repeating " else "") + "timer '" + name + "'/" + timeout + ": " + msg) if (timers contains name) { diff --git a/akka-actor/src/main/scala/akka/actor/RepointableActorRef.scala b/akka-actor/src/main/scala/akka/actor/RepointableActorRef.scala index 3f0539f123..8ca8ab5cb7 100644 --- a/akka-actor/src/main/scala/akka/actor/RepointableActorRef.scala +++ b/akka-actor/src/main/scala/akka/actor/RepointableActorRef.scala @@ -12,7 +12,7 @@ import scala.annotation.tailrec import scala.collection.mutable.Queue import scala.concurrent.forkjoin.ThreadLocalRandom -import akka.actor.cell.ChildrenContainer +import akka.actor.dungeon.ChildrenContainer import akka.dispatch.{ Envelope, Supervise, SystemMessage, Terminate } import akka.event.Logging.Warning import akka.util.Unsafe diff --git a/akka-actor/src/main/scala/akka/actor/Scheduler.scala b/akka-actor/src/main/scala/akka/actor/Scheduler.scala index 42f2a10604..c210bc0976 100644 --- a/akka-actor/src/main/scala/akka/actor/Scheduler.scala +++ b/akka-actor/src/main/scala/akka/actor/Scheduler.scala @@ -13,6 +13,7 @@ import java.util.concurrent.atomic.AtomicReference import scala.annotation.tailrec import akka.util.internal._ import concurrent.ExecutionContext +import scala.concurrent.util.FiniteDuration //#scheduler /** @@ -29,13 +30,13 @@ trait Scheduler { * Schedules a message to be sent repeatedly with an initial delay and * frequency. E.g. if you would like a message to be sent immediately and * thereafter every 500ms you would set delay=Duration.Zero and - * frequency=Duration(500, TimeUnit.MILLISECONDS) + * interval=Duration(500, TimeUnit.MILLISECONDS) * * Java & Scala API */ def schedule( - initialDelay: Duration, - frequency: Duration, + initialDelay: FiniteDuration, + interval: FiniteDuration, receiver: ActorRef, message: Any)(implicit executor: ExecutionContext): Cancellable @@ -43,23 +44,23 @@ trait Scheduler { * Schedules a function to be run repeatedly with an initial delay and a * frequency. E.g. if you would like the function to be run after 2 seconds * and thereafter every 100ms you would set delay = Duration(2, TimeUnit.SECONDS) - * and frequency = Duration(100, TimeUnit.MILLISECONDS) + * and interval = Duration(100, TimeUnit.MILLISECONDS) * * Scala API */ def schedule( - initialDelay: Duration, frequency: Duration)(f: ⇒ Unit)(implicit executor: ExecutionContext): Cancellable + initialDelay: FiniteDuration, interval: FiniteDuration)(f: ⇒ Unit)(implicit executor: ExecutionContext): Cancellable /** * Schedules a function to be run repeatedly with an initial delay and * a frequency. E.g. if you would like the function to be run after 2 * seconds and thereafter every 100ms you would set delay = Duration(2, - * TimeUnit.SECONDS) and frequency = Duration(100, TimeUnit.MILLISECONDS) + * TimeUnit.SECONDS) and interval = Duration(100, TimeUnit.MILLISECONDS) * * Java API */ def schedule( - initialDelay: Duration, frequency: Duration, runnable: Runnable)(implicit executor: ExecutionContext): Cancellable + initialDelay: FiniteDuration, interval: FiniteDuration, runnable: Runnable)(implicit executor: ExecutionContext): Cancellable /** * Schedules a Runnable to be run once with a delay, i.e. a time period that @@ -67,7 +68,7 @@ trait Scheduler { * * Java & Scala API */ - def scheduleOnce(delay: Duration, runnable: Runnable)(implicit executor: ExecutionContext): Cancellable + def scheduleOnce(delay: FiniteDuration, runnable: Runnable)(implicit executor: ExecutionContext): Cancellable /** * Schedules a message to be sent once with a delay, i.e. a time period that has @@ -75,7 +76,7 @@ trait Scheduler { * * Java & Scala API */ - def scheduleOnce(delay: Duration, receiver: ActorRef, message: Any)(implicit executor: ExecutionContext): Cancellable + def scheduleOnce(delay: FiniteDuration, receiver: ActorRef, message: Any)(implicit executor: ExecutionContext): Cancellable /** * Schedules a function to be run once with a delay, i.e. a time period that has @@ -83,7 +84,7 @@ trait Scheduler { * * Scala API */ - def scheduleOnce(delay: Duration)(f: ⇒ Unit)(implicit executor: ExecutionContext): Cancellable + def scheduleOnce(delay: FiniteDuration)(f: ⇒ Unit)(implicit executor: ExecutionContext): Cancellable } //#scheduler @@ -120,8 +121,8 @@ trait Cancellable { * returned from stop(). */ class DefaultScheduler(hashedWheelTimer: HashedWheelTimer, log: LoggingAdapter) extends Scheduler with Closeable { - override def schedule(initialDelay: Duration, - delay: Duration, + override def schedule(initialDelay: FiniteDuration, + delay: FiniteDuration, receiver: ActorRef, message: Any)(implicit executor: ExecutionContext): Cancellable = { val continuousCancellable = new ContinuousCancellable @@ -142,12 +143,12 @@ class DefaultScheduler(hashedWheelTimer: HashedWheelTimer, log: LoggingAdapter) initialDelay)) } - override def schedule(initialDelay: Duration, - delay: Duration)(f: ⇒ Unit)(implicit executor: ExecutionContext): Cancellable = + override def schedule(initialDelay: FiniteDuration, + delay: FiniteDuration)(f: ⇒ Unit)(implicit executor: ExecutionContext): Cancellable = schedule(initialDelay, delay, new Runnable { override def run = f }) - override def schedule(initialDelay: Duration, - delay: Duration, + override def schedule(initialDelay: FiniteDuration, + delay: FiniteDuration, runnable: Runnable)(implicit executor: ExecutionContext): Cancellable = { val continuousCancellable = new ContinuousCancellable continuousCancellable.init( @@ -163,20 +164,20 @@ class DefaultScheduler(hashedWheelTimer: HashedWheelTimer, log: LoggingAdapter) initialDelay)) } - override def scheduleOnce(delay: Duration, runnable: Runnable)(implicit executor: ExecutionContext): Cancellable = + override def scheduleOnce(delay: FiniteDuration, runnable: Runnable)(implicit executor: ExecutionContext): Cancellable = new DefaultCancellable( hashedWheelTimer.newTimeout( new TimerTask() { def run(timeout: HWTimeout): Unit = executor.execute(runnable) }, delay)) - override def scheduleOnce(delay: Duration, receiver: ActorRef, message: Any)(implicit executor: ExecutionContext): Cancellable = + override def scheduleOnce(delay: FiniteDuration, receiver: ActorRef, message: Any)(implicit executor: ExecutionContext): Cancellable = scheduleOnce(delay, new Runnable { override def run = receiver ! message }) - override def scheduleOnce(delay: Duration)(f: ⇒ Unit)(implicit executor: ExecutionContext): Cancellable = + override def scheduleOnce(delay: FiniteDuration)(f: ⇒ Unit)(implicit executor: ExecutionContext): Cancellable = scheduleOnce(delay, new Runnable { override def run = f }) private trait ContinuousScheduling { this: TimerTask ⇒ - def scheduleNext(timeout: HWTimeout, delay: Duration, delegator: ContinuousCancellable) { + def scheduleNext(timeout: HWTimeout, delay: FiniteDuration, delegator: ContinuousCancellable) { try delegator.swap(timeout.getTimer.newTimeout(this, delay)) catch { case _: IllegalStateException ⇒ } // stop recurring if timer is stopped } } diff --git a/akka-actor/src/main/scala/akka/actor/TypedActor.scala b/akka-actor/src/main/scala/akka/actor/TypedActor.scala index aaa5432815..e10e5350bc 100644 --- a/akka-actor/src/main/scala/akka/actor/TypedActor.scala +++ b/akka-actor/src/main/scala/akka/actor/TypedActor.scala @@ -4,7 +4,6 @@ package akka.actor import language.existentials - import akka.japi.{ Creator, Option ⇒ JOption } import java.lang.reflect.{ InvocationTargetException, Method, InvocationHandler, Proxy } import akka.util.Timeout @@ -20,6 +19,7 @@ import scala.reflect.ClassTag import akka.serialization.{ JavaSerializer, SerializationExtension } import java.io.ObjectStreamException import scala.util.{ Try, Success, Failure } +import scala.concurrent.util.FiniteDuration /** * A TypedActorFactory is something that can created TypedActor instances. @@ -421,7 +421,7 @@ object TypedActor extends ExtensionId[TypedActorExtension] with ExtensionIdProvi /** * INTERNAL USE ONLY */ - private[akka] case class SerializedTypedActorInvocationHandler(val actor: ActorRef, val timeout: Duration) { + private[akka] case class SerializedTypedActorInvocationHandler(val actor: ActorRef, val timeout: FiniteDuration) { @throws(classOf[ObjectStreamException]) private def readResolve(): AnyRef = JavaSerializer.currentSystem.value match { case null ⇒ throw new IllegalStateException("SerializedTypedActorInvocationHandler.readResolve requires that JavaSerializer.currentSystem.value is set to a non-null value") case some ⇒ toTypedActorInvocationHandler(some) diff --git a/akka-actor/src/main/scala/akka/actor/cell/ReceiveTimeout.scala b/akka-actor/src/main/scala/akka/actor/cell/ReceiveTimeout.scala deleted file mode 100644 index c04d485262..0000000000 --- a/akka-actor/src/main/scala/akka/actor/cell/ReceiveTimeout.scala +++ /dev/null @@ -1,54 +0,0 @@ -/** - * Copyright (C) 2009-2012 Typesafe Inc. - */ - -package akka.actor.cell - -import ReceiveTimeout.emptyReceiveTimeoutData -import akka.actor.ActorCell -import akka.actor.ActorCell.emptyCancellable -import akka.actor.Cancellable -import scala.concurrent.util.Duration - -private[akka] object ReceiveTimeout { - final val emptyReceiveTimeoutData: (Duration, Cancellable) = (Duration.Undefined, ActorCell.emptyCancellable) -} - -private[akka] trait ReceiveTimeout { this: ActorCell ⇒ - - import ReceiveTimeout._ - import ActorCell._ - - private var receiveTimeoutData: (Duration, Cancellable) = emptyReceiveTimeoutData - - final def receiveTimeout: Option[Duration] = receiveTimeoutData._1 match { - case Duration.Undefined ⇒ None - case duration ⇒ Some(duration) - } - - final def setReceiveTimeout(timeout: Option[Duration]): Unit = setReceiveTimeout(timeout.getOrElse(Duration.Undefined)) - - final def setReceiveTimeout(timeout: Duration): Unit = - receiveTimeoutData = ( - if (Duration.Undefined == timeout || timeout.toMillis < 1) Duration.Undefined else timeout, - receiveTimeoutData._2) - - final def resetReceiveTimeout(): Unit = setReceiveTimeout(None) - - final def checkReceiveTimeout() { - val recvtimeout = receiveTimeoutData - if (Duration.Undefined != recvtimeout._1 && !mailbox.hasMessages) { - recvtimeout._2.cancel() //Cancel any ongoing future - //Only reschedule if desired and there are currently no more messages to be processed - receiveTimeoutData = (recvtimeout._1, system.scheduler.scheduleOnce(recvtimeout._1, self, akka.actor.ReceiveTimeout)(this.dispatcher)) - } else cancelReceiveTimeout() - - } - - final def cancelReceiveTimeout(): Unit = - if (receiveTimeoutData._2 ne emptyCancellable) { - receiveTimeoutData._2.cancel() - receiveTimeoutData = (receiveTimeoutData._1, emptyCancellable) - } - -} \ No newline at end of file diff --git a/akka-actor/src/main/scala/akka/actor/dsl/Inbox.scala b/akka-actor/src/main/scala/akka/actor/dsl/Inbox.scala index 9857023e82..7b1a77bc71 100644 --- a/akka-actor/src/main/scala/akka/actor/dsl/Inbox.scala +++ b/akka-actor/src/main/scala/akka/actor/dsl/Inbox.scala @@ -129,10 +129,10 @@ trait Inbox { this: ActorDSL.type ⇒ val next = clientsByTimeout.head.deadline import context.dispatcher if (currentDeadline.isEmpty) { - currentDeadline = Some((next, context.system.scheduler.scheduleOnce(next.timeLeft, self, Kick))) + currentDeadline = Some((next, context.system.scheduler.scheduleOnce(next.timeLeft.asInstanceOf[FiniteDuration], self, Kick))) } else if (currentDeadline.get._1 != next) { currentDeadline.get._2.cancel() - currentDeadline = Some((next, context.system.scheduler.scheduleOnce(next.timeLeft, self, Kick))) + currentDeadline = Some((next, context.system.scheduler.scheduleOnce(next.timeLeft.asInstanceOf[FiniteDuration], self, Kick))) } } } @@ -169,7 +169,7 @@ trait Inbox { this: ActorDSL.type ⇒ * this method within an actor! */ def receive(timeout: FiniteDuration = defaultTimeout): Any = { - implicit val t = Timeout(timeout + extraTime) + implicit val t = Timeout((timeout + extraTime).asInstanceOf[FiniteDuration]) Await.result(receiver ? Get(Deadline.now + timeout), Duration.Inf) } @@ -186,7 +186,7 @@ trait Inbox { this: ActorDSL.type ⇒ * this method within an actor! */ def select[T](timeout: FiniteDuration = defaultTimeout)(predicate: PartialFunction[Any, T]): T = { - implicit val t = Timeout(timeout + extraTime) + implicit val t = Timeout((timeout + extraTime).asInstanceOf[FiniteDuration]) predicate(Await.result(receiver ? Select(Deadline.now + timeout, predicate), Duration.Inf)) } diff --git a/akka-actor/src/main/scala/akka/actor/cell/Children.scala b/akka-actor/src/main/scala/akka/actor/dungeon/Children.scala similarity index 99% rename from akka-actor/src/main/scala/akka/actor/cell/Children.scala rename to akka-actor/src/main/scala/akka/actor/dungeon/Children.scala index 7fa8eceece..85dfd7095a 100644 --- a/akka-actor/src/main/scala/akka/actor/cell/Children.scala +++ b/akka-actor/src/main/scala/akka/actor/dungeon/Children.scala @@ -2,7 +2,7 @@ * Copyright (C) 2009-2012 Typesafe Inc. */ -package akka.actor.cell +package akka.actor.dungeon import scala.annotation.tailrec import scala.collection.JavaConverters.asJavaIterableConverter diff --git a/akka-actor/src/main/scala/akka/actor/cell/ChildrenContainer.scala b/akka-actor/src/main/scala/akka/actor/dungeon/ChildrenContainer.scala similarity index 99% rename from akka-actor/src/main/scala/akka/actor/cell/ChildrenContainer.scala rename to akka-actor/src/main/scala/akka/actor/dungeon/ChildrenContainer.scala index d0bf76953a..eeb28cf018 100644 --- a/akka-actor/src/main/scala/akka/actor/cell/ChildrenContainer.scala +++ b/akka-actor/src/main/scala/akka/actor/dungeon/ChildrenContainer.scala @@ -2,7 +2,7 @@ * Copyright (C) 2009-2012 Typesafe Inc. */ -package akka.actor.cell +package akka.actor.dungeon import scala.collection.immutable.TreeMap diff --git a/akka-actor/src/main/scala/akka/actor/cell/DeathWatch.scala b/akka-actor/src/main/scala/akka/actor/dungeon/DeathWatch.scala similarity index 99% rename from akka-actor/src/main/scala/akka/actor/cell/DeathWatch.scala rename to akka-actor/src/main/scala/akka/actor/dungeon/DeathWatch.scala index f994e956c6..5407afc2c8 100644 --- a/akka-actor/src/main/scala/akka/actor/cell/DeathWatch.scala +++ b/akka-actor/src/main/scala/akka/actor/dungeon/DeathWatch.scala @@ -2,7 +2,7 @@ * Copyright (C) 2009-2012 Typesafe Inc. */ -package akka.actor.cell +package akka.actor.dungeon import akka.actor.{ Terminated, InternalActorRef, ActorRef, ActorCell, Actor, Address, AddressTerminated } import akka.dispatch.{ Watch, Unwatch } diff --git a/akka-actor/src/main/scala/akka/actor/cell/Dispatch.scala b/akka-actor/src/main/scala/akka/actor/dungeon/Dispatch.scala similarity index 99% rename from akka-actor/src/main/scala/akka/actor/cell/Dispatch.scala rename to akka-actor/src/main/scala/akka/actor/dungeon/Dispatch.scala index 0f3619e208..e071c1605d 100644 --- a/akka-actor/src/main/scala/akka/actor/cell/Dispatch.scala +++ b/akka-actor/src/main/scala/akka/actor/dungeon/Dispatch.scala @@ -2,7 +2,7 @@ * Copyright (C) 2009-2012 Typesafe Inc. */ -package akka.actor.cell +package akka.actor.dungeon import scala.annotation.tailrec import akka.actor.{ ActorRef, ActorCell } diff --git a/akka-actor/src/main/scala/akka/actor/cell/FaultHandling.scala b/akka-actor/src/main/scala/akka/actor/dungeon/FaultHandling.scala similarity index 98% rename from akka-actor/src/main/scala/akka/actor/cell/FaultHandling.scala rename to akka-actor/src/main/scala/akka/actor/dungeon/FaultHandling.scala index 383db82b70..f7c06032c8 100644 --- a/akka-actor/src/main/scala/akka/actor/cell/FaultHandling.scala +++ b/akka-actor/src/main/scala/akka/actor/dungeon/FaultHandling.scala @@ -2,7 +2,7 @@ * Copyright (C) 2009-2012 Typesafe Inc. */ -package akka.actor.cell +package akka.actor.dungeon import scala.annotation.tailrec import akka.actor.{ PreRestartException, PostRestartException, InternalActorRef, Failed, ActorRef, ActorInterruptedException, ActorCell, Actor } @@ -16,6 +16,7 @@ import akka.actor.PreRestartException import akka.actor.Failed import akka.actor.PostRestartException import akka.event.Logging.Debug +import scala.concurrent.util.Duration private[akka] trait FaultHandling { this: ActorCell ⇒ @@ -121,7 +122,7 @@ private[akka] trait FaultHandling { this: ActorCell ⇒ assert(mailbox.isSuspended, "mailbox must be suspended during failed creation, status=" + mailbox.status) assert(perpetrator == self) - setReceiveTimeout(None) + setReceiveTimeout(Duration.Undefined) cancelReceiveTimeout // stop all children, which will turn childrenRefs into TerminatingChildrenContainer (if there are children) @@ -137,7 +138,7 @@ private[akka] trait FaultHandling { this: ActorCell ⇒ } protected def terminate() { - setReceiveTimeout(None) + setReceiveTimeout(Duration.Undefined) cancelReceiveTimeout // stop all children, which will turn childrenRefs into TerminatingChildrenContainer (if there are children) diff --git a/akka-actor/src/main/scala/akka/actor/dungeon/ReceiveTimeout.scala b/akka-actor/src/main/scala/akka/actor/dungeon/ReceiveTimeout.scala new file mode 100644 index 0000000000..0c3661b59a --- /dev/null +++ b/akka-actor/src/main/scala/akka/actor/dungeon/ReceiveTimeout.scala @@ -0,0 +1,49 @@ +/** + * Copyright (C) 2009-2012 Typesafe Inc. + */ + +package akka.actor.dungeon + +import ReceiveTimeout.emptyReceiveTimeoutData +import akka.actor.ActorCell +import akka.actor.ActorCell.emptyCancellable +import akka.actor.Cancellable +import scala.concurrent.util.Duration +import scala.concurrent.util.FiniteDuration + +private[akka] object ReceiveTimeout { + final val emptyReceiveTimeoutData: (Duration, Cancellable) = (Duration.Undefined, ActorCell.emptyCancellable) +} + +private[akka] trait ReceiveTimeout { this: ActorCell ⇒ + + import ReceiveTimeout._ + import ActorCell._ + + private var receiveTimeoutData: (Duration, Cancellable) = emptyReceiveTimeoutData + + final def receiveTimeout: Duration = receiveTimeoutData._1 + + final def setReceiveTimeout(timeout: Duration): Unit = receiveTimeoutData = receiveTimeoutData.copy(_1 = timeout) + + final def checkReceiveTimeout() { + val recvtimeout = receiveTimeoutData + //Only reschedule if desired and there are currently no more messages to be processed + if (!mailbox.hasMessages) recvtimeout._1 match { + case f: FiniteDuration ⇒ + recvtimeout._2.cancel() //Cancel any ongoing future + val task = system.scheduler.scheduleOnce(f, self, akka.actor.ReceiveTimeout)(this.dispatcher) + receiveTimeoutData = (f, task) + case _ ⇒ cancelReceiveTimeout() + } + else cancelReceiveTimeout() + + } + + final def cancelReceiveTimeout(): Unit = + if (receiveTimeoutData._2 ne emptyCancellable) { + receiveTimeoutData._2.cancel() + receiveTimeoutData = (receiveTimeoutData._1, emptyCancellable) + } + +} \ No newline at end of file diff --git a/akka-actor/src/main/scala/akka/dispatch/AbstractDispatcher.scala b/akka-actor/src/main/scala/akka/dispatch/AbstractDispatcher.scala index 340195d1a6..eeff39f2e6 100644 --- a/akka-actor/src/main/scala/akka/dispatch/AbstractDispatcher.scala +++ b/akka-actor/src/main/scala/akka/dispatch/AbstractDispatcher.scala @@ -16,6 +16,7 @@ import scala.concurrent.forkjoin.{ ForkJoinTask, ForkJoinPool } import scala.concurrent.util.Duration import scala.concurrent.{ ExecutionContext, Await, Awaitable } import scala.util.control.NonFatal +import scala.concurrent.util.FiniteDuration final case class Envelope private (val message: Any, val sender: ActorRef) @@ -316,7 +317,7 @@ abstract class MessageDispatcher(val prerequisites: DispatcherPrerequisites) ext * * INTERNAL API */ - protected[akka] def shutdownTimeout: Duration + protected[akka] def shutdownTimeout: FiniteDuration /** * After the call to this method, the dispatcher mustn't begin any new message processing for the specified reference diff --git a/akka-actor/src/main/scala/akka/dispatch/BalancingDispatcher.scala b/akka-actor/src/main/scala/akka/dispatch/BalancingDispatcher.scala index 3897027d9b..c90048c80b 100644 --- a/akka-actor/src/main/scala/akka/dispatch/BalancingDispatcher.scala +++ b/akka-actor/src/main/scala/akka/dispatch/BalancingDispatcher.scala @@ -11,6 +11,7 @@ import akka.util.Helpers import java.util.{ Comparator, Iterator } import java.util.concurrent.{ Executor, LinkedBlockingQueue, ConcurrentLinkedQueue, ConcurrentSkipListSet } import akka.actor.ActorSystemImpl +import scala.concurrent.util.FiniteDuration /** * An executor based event driven dispatcher which will try to redistribute work from busy actors to idle actors. It is assumed @@ -33,7 +34,7 @@ class BalancingDispatcher( throughputDeadlineTime: Duration, mailboxType: MailboxType, _executorServiceFactoryProvider: ExecutorServiceFactoryProvider, - _shutdownTimeout: Duration, + _shutdownTimeout: FiniteDuration, attemptTeamWork: Boolean) extends Dispatcher(_prerequisites, _id, throughput, throughputDeadlineTime, mailboxType, _executorServiceFactoryProvider, _shutdownTimeout) { diff --git a/akka-actor/src/main/scala/akka/dispatch/Dispatcher.scala b/akka-actor/src/main/scala/akka/dispatch/Dispatcher.scala index 21f4612750..96166022f8 100644 --- a/akka-actor/src/main/scala/akka/dispatch/Dispatcher.scala +++ b/akka-actor/src/main/scala/akka/dispatch/Dispatcher.scala @@ -12,6 +12,7 @@ import java.util.concurrent.{ ExecutorService, RejectedExecutionException } import scala.concurrent.forkjoin.ForkJoinPool import scala.concurrent.util.Duration import scala.concurrent.Awaitable +import scala.concurrent.util.FiniteDuration /** * The event-based ``Dispatcher`` binds a set of Actors to a thread pool backed up by a @@ -32,7 +33,7 @@ class Dispatcher( val throughputDeadlineTime: Duration, val mailboxType: MailboxType, executorServiceFactoryProvider: ExecutorServiceFactoryProvider, - val shutdownTimeout: Duration) + val shutdownTimeout: FiniteDuration) extends MessageDispatcher(_prerequisites) { private class LazyExecutorServiceDelegate(factory: ExecutorServiceFactory) extends ExecutorServiceDelegate { diff --git a/akka-actor/src/main/scala/akka/dispatch/PinnedDispatcher.scala b/akka-actor/src/main/scala/akka/dispatch/PinnedDispatcher.scala index 8fdde39cb3..af421ddb96 100644 --- a/akka-actor/src/main/scala/akka/dispatch/PinnedDispatcher.scala +++ b/akka-actor/src/main/scala/akka/dispatch/PinnedDispatcher.scala @@ -6,6 +6,7 @@ package akka.dispatch import akka.actor.ActorCell import scala.concurrent.util.Duration +import scala.concurrent.util.FiniteDuration /** * Dedicates a unique thread for each actor passed in as reference. Served through its messageQueue. @@ -18,7 +19,7 @@ class PinnedDispatcher( _actor: ActorCell, _id: String, _mailboxType: MailboxType, - _shutdownTimeout: Duration, + _shutdownTimeout: FiniteDuration, _threadPoolConfig: ThreadPoolConfig = ThreadPoolConfig()) extends Dispatcher(_prerequisites, _id, diff --git a/akka-actor/src/main/scala/akka/pattern/AskSupport.scala b/akka-actor/src/main/scala/akka/pattern/AskSupport.scala index 345d133ea4..7e3c3ce14d 100644 --- a/akka-actor/src/main/scala/akka/pattern/AskSupport.scala +++ b/akka-actor/src/main/scala/akka/pattern/AskSupport.scala @@ -78,8 +78,7 @@ trait AskSupport { actorRef.tell(message) Future.failed[Any](new AskTimeoutException("Recipient[%s] had already been terminated." format actorRef)) case ref: InternalActorRef ⇒ - if (!timeout.duration.isFinite) Future.failed[Any](new IllegalArgumentException("Timeouts to `ask` must be finite. Question not sent to [%s]" format actorRef)) - else if (timeout.duration.length <= 0) Future.failed[Any](new IllegalArgumentException("Timeout length for an `ask` must be greater or equal to 1. Question not sent to [%s]" format actorRef)) + if (timeout.duration.length <= 0) Future.failed[Any](new IllegalArgumentException("Timeout length for an `ask` must be greater or equal to 1. Question not sent to [%s]" format actorRef)) else { val provider = ref.provider val a = PromiseActorRef(provider, timeout) diff --git a/akka-actor/src/main/scala/akka/pattern/CircuitBreaker.scala b/akka-actor/src/main/scala/akka/pattern/CircuitBreaker.scala index a9cb8bc0c5..df228f821d 100644 --- a/akka-actor/src/main/scala/akka/pattern/CircuitBreaker.scala +++ b/akka-actor/src/main/scala/akka/pattern/CircuitBreaker.scala @@ -10,7 +10,7 @@ import akka.util.Unsafe import scala.util.control.NoStackTrace import java.util.concurrent.{ Callable, CopyOnWriteArrayList } import scala.concurrent.{ ExecutionContext, Future, Promise, Await } -import scala.concurrent.util.{ Duration, Deadline } +import scala.concurrent.util.{ FiniteDuration, Deadline } import scala.concurrent.util.duration._ import scala.util.control.NonFatal import scala.util.Success @@ -38,8 +38,8 @@ object CircuitBreaker { * @param callTimeout [[scala.concurrent.util.Duration]] of time after which to consider a call a failure * @param resetTimeout [[scala.concurrent.util.Duration]] of time after which to attempt to close the circuit */ - def apply(scheduler: Scheduler, maxFailures: Int, callTimeout: Duration, resetTimeout: Duration): CircuitBreaker = - new CircuitBreaker(scheduler: Scheduler, maxFailures: Int, callTimeout: Duration, resetTimeout: Duration)(syncExecutionContext) + def apply(scheduler: Scheduler, maxFailures: Int, callTimeout: FiniteDuration, resetTimeout: FiniteDuration): CircuitBreaker = + new CircuitBreaker(scheduler, maxFailures, callTimeout, resetTimeout)(syncExecutionContext) /** * Callbacks run in caller's thread when using withSyncCircuitBreaker, and in same ExecutionContext as the passed @@ -52,8 +52,8 @@ object CircuitBreaker { * @param callTimeout [[scala.concurrent.util.Duration]] of time after which to consider a call a failure * @param resetTimeout [[scala.concurrent.util.Duration]] of time after which to attempt to close the circuit */ - def create(scheduler: Scheduler, maxFailures: Int, callTimeout: Duration, resetTimeout: Duration): CircuitBreaker = - apply(scheduler: Scheduler, maxFailures: Int, callTimeout: Duration, resetTimeout: Duration) + def create(scheduler: Scheduler, maxFailures: Int, callTimeout: FiniteDuration, resetTimeout: FiniteDuration): CircuitBreaker = + apply(scheduler, maxFailures, callTimeout, resetTimeout) } /** @@ -76,9 +76,9 @@ object CircuitBreaker { * @param resetTimeout [[scala.concurrent.util.Duration]] of time after which to attempt to close the circuit * @param executor [[scala.concurrent.ExecutionContext]] used for execution of state transition listeners */ -class CircuitBreaker(scheduler: Scheduler, maxFailures: Int, callTimeout: Duration, resetTimeout: Duration)(implicit executor: ExecutionContext) extends AbstractCircuitBreaker { +class CircuitBreaker(scheduler: Scheduler, maxFailures: Int, callTimeout: FiniteDuration, resetTimeout: FiniteDuration)(implicit executor: ExecutionContext) extends AbstractCircuitBreaker { - def this(executor: ExecutionContext, scheduler: Scheduler, maxFailures: Int, callTimeout: Duration, resetTimeout: Duration) = { + def this(executor: ExecutionContext, scheduler: Scheduler, maxFailures: Int, callTimeout: FiniteDuration, resetTimeout: FiniteDuration) = { this(scheduler, maxFailures, callTimeout, resetTimeout)(executor) } @@ -409,7 +409,7 @@ class CircuitBreaker(scheduler: Scheduler, maxFailures: Int, callTimeout: Durati * @return Future containing result of protected call */ override def invoke[T](body: ⇒ Future[T]): Future[T] = - if (compareAndSet(true, false)) callThrough(body) else Promise.failed[T](new CircuitBreakerOpenException(Duration.Zero)).future + if (compareAndSet(true, false)) callThrough(body) else Promise.failed[T](new CircuitBreakerOpenException(0.seconds)).future /** * Reset breaker on successful call. @@ -453,7 +453,7 @@ class CircuitBreaker(scheduler: Scheduler, maxFailures: Int, callTimeout: Durati * @return Future containing result of protected call */ override def invoke[T](body: ⇒ Future[T]): Future[T] = - Promise.failed[T](new CircuitBreakerOpenException(remainingTimeout().timeLeft)).future + Promise.failed[T](new CircuitBreakerOpenException(remainingTimeout().timeLeft.asInstanceOf[FiniteDuration])).future /** * Calculate remaining timeout to inform the caller in case a backoff algorithm is useful @@ -510,6 +510,6 @@ class CircuitBreaker(scheduler: Scheduler, maxFailures: Int, callTimeout: Durati * @param message Defaults to "Circuit Breaker is open; calls are failing fast" */ class CircuitBreakerOpenException( - val remainingDuration: Duration, + val remainingDuration: FiniteDuration, message: String = "Circuit Breaker is open; calls are failing fast") extends AkkaException(message) with NoStackTrace diff --git a/akka-actor/src/main/scala/akka/pattern/FutureTimeoutSupport.scala b/akka-actor/src/main/scala/akka/pattern/FutureTimeoutSupport.scala index 72335d810b..dc398e7fa2 100644 --- a/akka-actor/src/main/scala/akka/pattern/FutureTimeoutSupport.scala +++ b/akka-actor/src/main/scala/akka/pattern/FutureTimeoutSupport.scala @@ -8,13 +8,14 @@ import scala.concurrent.util.Duration import scala.concurrent.{ ExecutionContext, Promise, Future } import akka.actor._ import scala.util.control.NonFatal +import scala.concurrent.util.FiniteDuration trait FutureTimeoutSupport { /** * Returns a [[scala.concurrent.Future]] that will be completed with the success or failure of the provided value * after the specified duration. */ - def after[T](duration: Duration, using: Scheduler)(value: ⇒ Future[T])(implicit ec: ExecutionContext): Future[T] = + def after[T](duration: FiniteDuration, using: Scheduler)(value: ⇒ Future[T])(implicit ec: ExecutionContext): Future[T] = if (duration.isFinite() && duration.length < 1) { try value catch { case NonFatal(t) ⇒ Future.failed(t) } } else { diff --git a/akka-actor/src/main/scala/akka/pattern/GracefulStopSupport.scala b/akka-actor/src/main/scala/akka/pattern/GracefulStopSupport.scala index 3db168e4c4..37fcc532e6 100644 --- a/akka-actor/src/main/scala/akka/pattern/GracefulStopSupport.scala +++ b/akka-actor/src/main/scala/akka/pattern/GracefulStopSupport.scala @@ -10,6 +10,7 @@ import akka.dispatch.{ Unwatch, Watch } import scala.concurrent.Future import scala.concurrent.util.Duration import scala.util.Success +import scala.concurrent.util.FiniteDuration trait GracefulStopSupport { /** @@ -36,7 +37,7 @@ trait GracefulStopSupport { * If the target actor isn't terminated within the timeout the [[scala.concurrent.Future]] * is completed with failure [[akka.pattern.AskTimeoutException]]. */ - def gracefulStop(target: ActorRef, timeout: Duration)(implicit system: ActorSystem): Future[Boolean] = { + def gracefulStop(target: ActorRef, timeout: FiniteDuration)(implicit system: ActorSystem): Future[Boolean] = { if (target.isTerminated) Future successful true else system match { case e: ExtendedActorSystem ⇒ diff --git a/akka-actor/src/main/scala/akka/pattern/Patterns.scala b/akka-actor/src/main/scala/akka/pattern/Patterns.scala index 8dda900e35..c4440f4723 100644 --- a/akka-actor/src/main/scala/akka/pattern/Patterns.scala +++ b/akka-actor/src/main/scala/akka/pattern/Patterns.scala @@ -6,6 +6,7 @@ package akka.pattern import akka.actor.Scheduler import scala.concurrent.ExecutionContext import java.util.concurrent.Callable +import scala.concurrent.util.FiniteDuration object Patterns { import akka.actor.{ ActorRef, ActorSystem } @@ -103,20 +104,20 @@ object Patterns { * If the target actor isn't terminated within the timeout the [[scala.concurrent.Future]] * is completed with failure [[akka.pattern.AskTimeoutException]]. */ - def gracefulStop(target: ActorRef, timeout: Duration, system: ActorSystem): Future[java.lang.Boolean] = + def gracefulStop(target: ActorRef, timeout: FiniteDuration, system: ActorSystem): Future[java.lang.Boolean] = scalaGracefulStop(target, timeout)(system).asInstanceOf[Future[java.lang.Boolean]] /** * Returns a [[scala.concurrent.Future]] that will be completed with the success or failure of the provided Callable * after the specified duration. */ - def after[T](duration: Duration, scheduler: Scheduler, context: ExecutionContext, value: Callable[Future[T]]): Future[T] = + def after[T](duration: FiniteDuration, scheduler: Scheduler, context: ExecutionContext, value: Callable[Future[T]]): Future[T] = scalaAfter(duration, scheduler)(value.call())(context) /** * Returns a [[scala.concurrent.Future]] that will be completed with the success or failure of the provided value * after the specified duration. */ - def after[T](duration: Duration, scheduler: Scheduler, context: ExecutionContext, value: Future[T]): Future[T] = + def after[T](duration: FiniteDuration, scheduler: Scheduler, context: ExecutionContext, value: Future[T]): Future[T] = scalaAfter(duration, scheduler)(value)(context) } diff --git a/akka-actor/src/main/scala/akka/routing/ConnectionManager.scala b/akka-actor/src/main/scala/akka/routing/ConnectionManager.scala deleted file mode 100644 index 9029c1f78b..0000000000 --- a/akka-actor/src/main/scala/akka/routing/ConnectionManager.scala +++ /dev/null @@ -1,61 +0,0 @@ -/** - * Copyright (C) 2009-2012 Typesafe Inc. - */ - -package akka.routing - -import akka.actor._ - -/** - * An Iterable that also contains a version. - */ -trait VersionedIterable[A] { - def version: Long - def iterable: Iterable[A] - def apply(): Iterable[A] = iterable -} - -/** - * Manages connections (ActorRefs) for a router. - */ -trait ConnectionManager { - /** - * A version that is useful to see if there is any change in the connections. If there is a change, a router is - * able to update its internal datastructures. - */ - def version: Long - - /** - * Returns the number of 'available' connections. Value could be stale as soon as received, and this method can't be combined (easily) - * with an atomic read of and size and version. - */ - def size: Int - - /** - * Returns if the number of 'available' is 0 or not. Value could be stale as soon as received, and this method can't be combined (easily) - * with an atomic read of and isEmpty and version. - */ - def isEmpty: Boolean - - /** - * Shuts the connection manager down, which stops all managed actors - */ - def shutdown(): Unit - - /** - * Returns a VersionedIterator containing all connected ActorRefs at some moment in time. Since there is - * the time element, also the version is included to be able to read the data (the connections) and the version - * in an atomic manner. - * - * This Iterable is 'persistent'. So it can be handed out to different threads and they see a stable (immutable) - * view of some set of connections. - */ - def connections: VersionedIterable[ActorRef] - - /** - * Removes a connection from the connection manager. - * - * @param ref the dead - */ - def remove(deadRef: ActorRef): Unit -} diff --git a/akka-actor/src/main/scala/akka/routing/ConsistentHash.scala b/akka-actor/src/main/scala/akka/routing/ConsistentHash.scala index afa321d07d..79c31cda33 100644 --- a/akka-actor/src/main/scala/akka/routing/ConsistentHash.scala +++ b/akka-actor/src/main/scala/akka/routing/ConsistentHash.scala @@ -4,255 +4,126 @@ package akka.routing -import scala.collection.immutable.{ TreeSet, Seq } -import scala.collection.mutable.{ Buffer, Map } - -// ============================================================================================= -// Adapted from HashRing.scala in Debasish Ghosh's Redis Client, licensed under Apache 2 license -// ============================================================================================= +import scala.collection.immutable.SortedMap +import scala.reflect.ClassTag +import java.util.Arrays /** - * Consistent Hashing node ring abstraction. + * Consistent Hashing node ring implementation. + * + * A good explanation of Consistent Hashing: + * http://weblogs.java.net/blog/tomwhite/archive/2007/11/consistent_hash.html + * + * Note that toString of the ring nodes are used for the node + * hash, i.e. make sure it is different for different nodes. * - * Not thread-safe, to be used from within an Actor or protected some other way. */ -class ConsistentHash[T](nodes: Seq[T], replicas: Int) { - private val cluster = Buffer[T]() - private var sortedKeys = TreeSet[Long]() - private var ring = Map[Long, T]() +class ConsistentHash[T: ClassTag] private (nodes: SortedMap[Int, T], virtualNodesFactor: Int) { - nodes.foreach(this += _) + import ConsistentHash._ - def +=(node: T): Unit = { - cluster += node - (1 to replicas) foreach { replica ⇒ - val key = hashFor((node + ":" + replica).getBytes("UTF-8")) - ring += (key -> node) - sortedKeys = sortedKeys + key - } + if (virtualNodesFactor < 1) throw new IllegalArgumentException("virtualNodesFactor must be >= 1") + + // arrays for fast binary search and access + // nodeHashRing is the sorted hash values of the nodes + // nodeRing is the nodes sorted in the same order as nodeHashRing, i.e. same index + private val (nodeHashRing: Array[Int], nodeRing: Array[T]) = { + val (nhr: Seq[Int], nr: Seq[T]) = nodes.toSeq.unzip + (nhr.toArray, nr.toArray) } - def -=(node: T): Unit = { - cluster -= node - (1 to replicas) foreach { replica ⇒ - val key = hashFor((node + ":" + replica).getBytes("UTF-8")) - ring -= key - sortedKeys = sortedKeys - key - } - } + /** + * Adds a node to the node ring. + * Note that the instance is immutable and this + * operation returns a new instance. + */ + def :+(node: T): ConsistentHash[T] = + new ConsistentHash(nodes ++ ((1 to virtualNodesFactor) map { r ⇒ (nodeHashFor(node, r) -> node) }), virtualNodesFactor) - def nodeFor(key: Array[Byte]): T = { - val hash = hashFor(key) - if (sortedKeys contains hash) ring(hash) + /** + * Adds a node to the node ring. + * Note that the instance is immutable and this + * operation returns a new instance. + * JAVA API + */ + def add(node: T): ConsistentHash[T] = this :+ node + + /** + * Removes a node from the node ring. + * Note that the instance is immutable and this + * operation returns a new instance. + */ + def :-(node: T): ConsistentHash[T] = + new ConsistentHash(nodes -- ((1 to virtualNodesFactor) map { r ⇒ nodeHashFor(node, r) }), virtualNodesFactor) + + /** + * Removes a node from the node ring. + * Note that the instance is immutable and this + * operation returns a new instance. + * JAVA API + */ + def remove(node: T): ConsistentHash[T] = this :- node + + // converts the result of Arrays.binarySearch into a index in the nodeRing array + // see documentation of Arrays.binarySearch for what it returns + private def idx(i: Int): Int = { + if (i >= 0) i // exact match else { - if (hash < sortedKeys.firstKey) ring(sortedKeys.firstKey) - else if (hash > sortedKeys.lastKey) ring(sortedKeys.lastKey) - else ring(sortedKeys.rangeImpl(None, Some(hash)).lastKey) + val j = math.abs(i + 1) + if (j >= nodeHashRing.length) 0 // after last, use first + else j // next node clockwise } } - private def hashFor(bytes: Array[Byte]): Long = { - val hash = MurmurHash.arrayHash(bytes) - if (hash == Int.MinValue) hash + 1 - math.abs(hash) - } -} - -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -/** - * An implementation of Austin Appleby's MurmurHash 3.0 algorithm - * (32 bit version); reference: http://code.google.com/p/smhasher - * - * This is the hash used by collections and case classes (including - * tuples). - * - * @author Rex Kerr - * @version 2.9 - * @since 2.9 - */ - -import java.lang.Integer.{ rotateLeft ⇒ rotl } - -/** - * A class designed to generate well-distributed non-cryptographic - * hashes. It is designed to be passed to a collection's foreach method, - * or can take individual hash values with append. Its own hash code is - * set equal to the hash code of whatever it is hashing. - */ -class MurmurHash[@specialized(Int, Long, Float, Double) T](seed: Int) extends (T ⇒ Unit) { - import MurmurHash._ - - private var h = startHash(seed) - private var c = hiddenMagicA - private var k = hiddenMagicB - private var hashed = false - private var hashvalue = h - - /** Begin a new hash using the same seed. */ - def reset(): Unit = { - h = startHash(seed) - c = hiddenMagicA - k = hiddenMagicB - hashed = false - } - - /** Incorporate the hash value of one item. */ - def apply(t: T): Unit = { - h = extendHash(h, t.##, c, k) - c = nextMagicA(c) - k = nextMagicB(k) - hashed = false - } - - /** Incorporate a known hash value. */ - def append(i: Int): Unit = { - h = extendHash(h, i, c, k) - c = nextMagicA(c) - k = nextMagicB(k) - hashed = false - } - - /** Retrieve the hash value */ - def hash: Int = { - if (!hashed) { - hashvalue = finalizeHash(h) - hashed = true - } - hashvalue - } - - override def hashCode: Int = hash -} - -/** - * An object designed to generate well-distributed non-cryptographic - * hashes. It is designed to hash a collection of integers; along with - * the integers to hash, it generates two magic streams of integers to - * increase the distribution of repetitive input sequences. Thus, - * three methods need to be called at each step (to start and to - * incorporate a new integer) to update the values. Only one method - * needs to be called to finalize the hash. - */ - -object MurmurHash { - // Magic values used for MurmurHash's 32 bit hash. - // Don't change these without consulting a hashing expert! - final private val visibleMagic: Int = 0x971e137b - final private val hiddenMagicA: Int = 0x95543787 - final private val hiddenMagicB: Int = 0x2ad7eb25 - final private val visibleMixer: Int = 0x52dce729 - final private val hiddenMixerA: Int = 0x7b7d159c - final private val hiddenMixerB: Int = 0x6bce6396 - final private val finalMixer1: Int = 0x85ebca6b - final private val finalMixer2: Int = 0xc2b2ae35 - - // Arbitrary values used for hashing certain classes - final private val seedString: Int = 0xf7ca7fd2 - final private val seedArray: Int = 0x3c074a61 - - /** The first 23 magic integers from the first stream are stored here */ - val storedMagicA: Array[Int] = - Iterator.iterate(hiddenMagicA)(nextMagicA).take(23).toArray - - /** The first 23 magic integers from the second stream are stored here */ - val storedMagicB: Array[Int] = - Iterator.iterate(hiddenMagicB)(nextMagicB).take(23).toArray - - /** Begin a new hash with a seed value. */ - def startHash(seed: Int): Int = seed ^ visibleMagic - - /** The initial magic integers in the first stream. */ - def startMagicA: Int = hiddenMagicA - - /** The initial magic integer in the second stream. */ - def startMagicB: Int = hiddenMagicB - - /** - * Incorporates a new value into an existing hash. - * - * @param hash the prior hash value - * @param value the new value to incorporate - * @param magicA a magic integer from the stream - * @param magicB a magic integer from a different stream - * @return the updated hash value - */ - def extendHash(hash: Int, value: Int, magicA: Int, magicB: Int): Int = - (hash ^ rotl(value * magicA, 11) * magicB) * 3 + visibleMixer - - /** Given a magic integer from the first stream, compute the next */ - def nextMagicA(magicA: Int): Int = magicA * 5 + hiddenMixerA - - /** Given a magic integer from the second stream, compute the next */ - def nextMagicB(magicB: Int): Int = magicB * 5 + hiddenMixerB - - /** Once all hashes have been incorporated, this performs a final mixing */ - def finalizeHash(hash: Int): Int = { - var i = (hash ^ (hash >>> 16)) - i *= finalMixer1 - i ^= (i >>> 13) - i *= finalMixer2 - i ^= (i >>> 16) - i - } - - /** Compute a high-quality hash of an array */ - def arrayHash[@specialized T](a: Array[T]): Int = { - var h = startHash(a.length * seedArray) - var c = hiddenMagicA - var k = hiddenMagicB - var j = 0 - while (j < a.length) { - h = extendHash(h, a(j).##, c, k) - c = nextMagicA(c) - k = nextMagicB(k) - j += 1 - } - finalizeHash(h) - } - - /** Compute a high-quality hash of a string */ - def stringHash(s: String): Int = { - var h = startHash(s.length * seedString) - var c = hiddenMagicA - var k = hiddenMagicB - var j = 0 - while (j + 1 < s.length) { - val i = (s.charAt(j) << 16) + s.charAt(j + 1); - h = extendHash(h, i, c, k) - c = nextMagicA(c) - k = nextMagicB(k) - j += 2 - } - if (j < s.length) h = extendHash(h, s.charAt(j), c, k) - finalizeHash(h) - } - /** - * Compute a hash that is symmetric in its arguments--that is, - * where the order of appearance of elements does not matter. - * This is useful for hashing sets, for example. + * Get the node responsible for the data key. + * Can only be used if nodes exists in the node ring, + * otherwise throws `IllegalStateException` */ - def symmetricHash[T](xs: TraversableOnce[T], seed: Int): Int = { - var a, b, n = 0 - var c = 1 - xs.foreach(i ⇒ { - val h = i.## - a += h - b ^= h - if (h != 0) c *= h - n += 1 - }) - var h = startHash(seed * n) - h = extendHash(h, a, storedMagicA(0), storedMagicB(0)) - h = extendHash(h, b, storedMagicA(1), storedMagicB(1)) - h = extendHash(h, c, storedMagicA(2), storedMagicB(2)) - finalizeHash(h) + def nodeFor(key: Array[Byte]): T = { + if (isEmpty) throw new IllegalStateException("Can't get node for [%s] from an empty node ring" format key) + + nodeRing(idx(Arrays.binarySearch(nodeHashRing, hashFor(key)))) } + + /** + * Get the node responsible for the data key. + * Can only be used if nodes exists in the node ring, + * otherwise throws `IllegalStateException` + */ + def nodeFor(key: String): T = { + if (isEmpty) throw new IllegalStateException("Can't get node for [%s] from an empty node ring" format key) + + nodeRing(idx(Arrays.binarySearch(nodeHashRing, hashFor(key)))) + } + + /** + * Is the node ring empty, i.e. no nodes added or all removed. + */ + def isEmpty: Boolean = nodes.isEmpty + +} + +object ConsistentHash { + def apply[T: ClassTag](nodes: Iterable[T], virtualNodesFactor: Int): ConsistentHash[T] = { + new ConsistentHash(SortedMap.empty[Int, T] ++ + (for (node ← nodes; vnode ← 1 to virtualNodesFactor) yield (nodeHashFor(node, vnode) -> node)), + virtualNodesFactor) + } + + /** + * Factory method to create a ConsistentHash + * JAVA API + */ + def create[T](nodes: java.lang.Iterable[T], virtualNodesFactor: Int): ConsistentHash[T] = { + import scala.collection.JavaConverters._ + apply(nodes.asScala, virtualNodesFactor)(ClassTag(classOf[Any].asInstanceOf[Class[T]])) + } + + private def nodeHashFor(node: Any, vnode: Int): Int = + hashFor((node + ":" + vnode).getBytes("UTF-8")) + + private def hashFor(bytes: Array[Byte]): Int = MurmurHash.arrayHash(bytes) + + private def hashFor(string: String): Int = MurmurHash.stringHash(string) } diff --git a/akka-actor/src/main/scala/akka/routing/ConsistentHashingRouter.scala b/akka-actor/src/main/scala/akka/routing/ConsistentHashingRouter.scala new file mode 100644 index 0000000000..3b9802d7fd --- /dev/null +++ b/akka-actor/src/main/scala/akka/routing/ConsistentHashingRouter.scala @@ -0,0 +1,297 @@ +/** + * Copyright (C) 2009-2012 Typesafe Inc. + */ +package akka.routing + +import scala.collection.JavaConversions.iterableAsScalaIterable +import scala.util.control.NonFatal +import akka.actor.ActorRef +import akka.actor.SupervisorStrategy +import akka.actor.Props +import akka.dispatch.Dispatchers +import akka.event.Logging +import akka.serialization.SerializationExtension +import java.util.concurrent.atomic.AtomicReference + +object ConsistentHashingRouter { + /** + * Creates a new ConsistentHashingRouter, routing to the specified routees + */ + def apply(routees: Iterable[ActorRef]): ConsistentHashingRouter = + new ConsistentHashingRouter(routees = routees map (_.path.toString)) + + /** + * Java API to create router with the supplied 'routees' actors. + */ + def create(routees: java.lang.Iterable[ActorRef]): ConsistentHashingRouter = { + import scala.collection.JavaConverters._ + apply(routees.asScala) + } + + /** + * If you don't define the `hashMapping` when + * constructing the [[akka.routing.ConsistentHashingRouter]] + * the messages need to implement this interface to define what + * data to use for the consistent hash key. Note that it's not + * the hash, but the data to be hashed. + * + * If returning an `Array[Byte]` or String it will be used as is, + * otherwise the configured [[akka.akka.serialization.Serializer]] + * will be applied to the returned data. + * + * If messages can't implement this interface themselves, + * it's possible to wrap the messages in + * [[akka.routing.ConsistentHashingRouter.ConsistentHashableEnvelope]], + * or use [[akka.routing.ConsistentHashingRouter.ConsistentHashableEnvelope]] + */ + trait ConsistentHashable { + def consistentHashKey: Any + } + + /** + * If you don't define the `hashMapping` when + * constructing the [[akka.routing.ConsistentHashingRouter]] + * and messages can't implement [[akka.routing.ConsistentHashingRouter.ConsistentHashable]] + * themselves they can we wrapped by this envelope instead. The + * router will only send the wrapped message to the destination, + * i.e. the envelope will be stripped off. + */ + @SerialVersionUID(1L) + final case class ConsistentHashableEnvelope(message: Any, hashKey: Any) + extends ConsistentHashable with RouterEnvelope { + override def consistentHashKey: Any = hashKey + } + + /** + * Partial function from message to the data to + * use for the consistent hash key. Note that it's not + * the hash that is to be returned, but the data to be hashed. + * + * If returning an `Array[Byte]` or String it will be used as is, + * otherwise the configured [[akka.akka.serialization.Serializer]] + * will be applied to the returned data. + */ + type ConsistentHashMapping = PartialFunction[Any, Any] + + @SerialVersionUID(1L) + object emptyConsistentHashMapping extends ConsistentHashMapping { + def isDefinedAt(x: Any) = false + def apply(x: Any) = throw new UnsupportedOperationException("Empty ConsistentHashMapping apply()") + } + + /** + * JAVA API + * Mapping from message to the data to use for the consistent hash key. + * Note that it's not the hash that is to be returned, but the data to be + * hashed. + * + * May return `null` to indicate that the message is not handled by + * this mapping. + * + * If returning an `Array[Byte]` or String it will be used as is, + * otherwise the configured [[akka.akka.serialization.Serializer]] + * will be applied to the returned data. + */ + trait ConsistentHashMapper { + def hashKey(message: Any): Any + } +} +/** + * A Router that uses consistent hashing to select a connection based on the + * sent message. + * + * There is 3 ways to define what data to use for the consistent hash key. + * + * 1. You can define `hashMapping` / `withHashMapper` + * of the router to map incoming messages to their consistent hash key. + * This makes the decision transparent for the sender. + * + * 2. The messages may implement [[akka.routing.ConsistentHashingRouter.ConsistentHashable]]. + * The key is part of the message and it's convenient to define it together + * with the message definition. + * + * 3. The messages can be be wrapped in a [[akka.routing.ConsistentHashingRouter.ConsistentHashableEnvelope]] + * to define what data to use for the consistent hash key. The sender knows + * the key to use. + * + * These ways to define the consistent hash key can be use together and at + * the same time for one router. The `hashMapping` is tried first. + * + * Please note that providing both 'nrOfInstances' and 'routees' does not make logical + * sense as this means that the router should both create new actors and use the 'routees' + * actor(s). In this case the 'nrOfInstances' will be ignored and the 'routees' will be used. + *
+ * The configuration parameter trumps the constructor arguments. This means that + * if you provide either 'nrOfInstances' or 'routees' during instantiation they will + * be ignored if the router is defined in the configuration file for the actor being used. + * + *

Supervision Setup

+ * + * The router creates a “head” actor which supervises and/or monitors the + * routees. Instances are created as children of this actor, hence the + * children are not supervised by the parent of the router. Common choices are + * to always escalate (meaning that fault handling is always applied to all + * children simultaneously; this is the default) or use the parent’s strategy, + * which will result in routed children being treated individually, but it is + * possible as well to use Routers to give different supervisor strategies to + * different groups of children. + * + * @param routees string representation of the actor paths of the routees that will be looked up + * using `actorFor` in [[akka.actor.ActorRefProvider]] + * @param virtualNodesFactor number of virtual nodes per node, used in [[akka.routing.ConsistantHash]] + * @param hashMapping partial function from message to the data to + * use for the consistent hash key + */ +@SerialVersionUID(1L) +case class ConsistentHashingRouter( + nrOfInstances: Int = 0, routees: Iterable[String] = Nil, override val resizer: Option[Resizer] = None, + val routerDispatcher: String = Dispatchers.DefaultDispatcherId, + val supervisorStrategy: SupervisorStrategy = Router.defaultSupervisorStrategy, + val virtualNodesFactor: Int = 0, + val hashMapping: ConsistentHashingRouter.ConsistentHashMapping = ConsistentHashingRouter.emptyConsistentHashMapping) + extends RouterConfig with ConsistentHashingLike { + + /** + * Constructor that sets nrOfInstances to be created. + * Java API + */ + def this(nr: Int) = this(nrOfInstances = nr) + + /** + * Constructor that sets the routees to be used. + * Java API + * @param routeePaths string representation of the actor paths of the routees that will be looked up + * using `actorFor` in [[akka.actor.ActorRefProvider]] + */ + def this(routeePaths: java.lang.Iterable[String]) = this(routees = iterableAsScalaIterable(routeePaths)) + + /** + * Constructor that sets the resizer to be used. + * Java API + */ + def this(resizer: Resizer) = this(resizer = Some(resizer)) + + /** + * Java API for setting routerDispatcher + */ + def withDispatcher(dispatcherId: String): ConsistentHashingRouter = copy(routerDispatcher = dispatcherId) + + /** + * Java API for setting the supervisor strategy to be used for the “head” + * Router actor. + */ + def withSupervisorStrategy(strategy: SupervisorStrategy): ConsistentHashingRouter = copy(supervisorStrategy = strategy) + + /** + * Java API for setting the number of virtual nodes per node, used in [[akka.routing.ConsistantHash]] + */ + def withVirtualNodesFactor(vnodes: Int): ConsistentHashingRouter = copy(virtualNodesFactor = vnodes) + + /** + * Java API for setting the mapping from message to the data to use for the consistent hash key. + */ + def withHashMapper(mapping: ConsistentHashingRouter.ConsistentHashMapper) = { + copy(hashMapping = { + case message if (mapping.hashKey(message).asInstanceOf[AnyRef] ne null) ⇒ + mapping.hashKey(message) + }) + } + + /** + * Uses the resizer of the given RouterConfig if this RouterConfig + * doesn't have one, i.e. the resizer defined in code is used if + * resizer was not defined in config. + * Uses the the `hashMapping` defined in code, since + * that can't be defined in configuration. + */ + override def withFallback(other: RouterConfig): RouterConfig = other match { + case _: FromConfig ⇒ this + case otherRouter: ConsistentHashingRouter ⇒ + val useResizer = + if (this.resizer.isEmpty && otherRouter.resizer.isDefined) otherRouter.resizer + else this.resizer + copy(resizer = useResizer, hashMapping = otherRouter.hashMapping) + case _ ⇒ throw new IllegalArgumentException("Expected ConsistentHashingRouter, got [%s]".format(other)) + } +} + +/** + * The core pieces of the routing logic is located in this + * trait to be able to extend. + */ +trait ConsistentHashingLike { this: RouterConfig ⇒ + + import ConsistentHashingRouter._ + + def nrOfInstances: Int + + def routees: Iterable[String] + + def virtualNodesFactor: Int + + def hashMapping: ConsistentHashMapping + + override def createRoute(routeeProvider: RouteeProvider): Route = { + if (resizer.isEmpty) { + if (routees.isEmpty) routeeProvider.createRoutees(nrOfInstances) + else routeeProvider.registerRouteesFor(routees) + } + + val log = Logging(routeeProvider.context.system, routeeProvider.context.self) + val vnodes = + if (virtualNodesFactor == 0) routeeProvider.context.system.settings.DefaultVirtualNodesFactor + else virtualNodesFactor + + // tuple of routees and the ConsistentHash, updated together in updateConsistentHash + val consistentHashRef = new AtomicReference[(IndexedSeq[ActorRef], ConsistentHash[ActorRef])]((null, null)) + updateConsistentHash() + + // update consistentHash when routees has changed + // changes to routees are rare and when no changes this is a quick operation + def updateConsistentHash(): ConsistentHash[ActorRef] = { + val oldConsistentHashTuple = consistentHashRef.get + val (oldConsistentHashRoutees, oldConsistentHash) = oldConsistentHashTuple + val currentRoutees = routeeProvider.routees + if (currentRoutees ne oldConsistentHashRoutees) { + // when other instance, same content, no need to re-hash, but try to set routees + val consistentHash = + if (currentRoutees == oldConsistentHashRoutees) oldConsistentHash + else ConsistentHash(currentRoutees, vnodes) // re-hash + // ignore, don't update, in case of CAS failure + consistentHashRef.compareAndSet(oldConsistentHashTuple, (currentRoutees, consistentHash)) + consistentHash + } else oldConsistentHash + } + + def target(hashData: Any): ActorRef = try { + val currentConsistenHash = updateConsistentHash() + if (currentConsistenHash.isEmpty) routeeProvider.context.system.deadLetters + else hashData match { + case bytes: Array[Byte] ⇒ currentConsistenHash.nodeFor(bytes) + case str: String ⇒ currentConsistenHash.nodeFor(str) + case x: AnyRef ⇒ currentConsistenHash.nodeFor(SerializationExtension(routeeProvider.context.system).serialize(x).get) + } + } catch { + case NonFatal(e) ⇒ + // serialization failed + log.warning("Couldn't route message with consistent hash key [{}] due to [{}]", hashData, e.getMessage) + routeeProvider.context.system.deadLetters + } + + { + case (sender, message) ⇒ + message match { + case Broadcast(msg) ⇒ toAll(sender, routeeProvider.routees) + case _ if hashMapping.isDefinedAt(message) ⇒ + List(Destination(sender, target(hashMapping(message)))) + case hashable: ConsistentHashable ⇒ List(Destination(sender, target(hashable.consistentHashKey))) + case other ⇒ + log.warning("Message [{}] must be handled by hashMapping, or implement [{}] or be wrapped in [{}]", + message.getClass.getName, classOf[ConsistentHashable].getName, + classOf[ConsistentHashableEnvelope].getName) + List(Destination(sender, routeeProvider.context.system.deadLetters)) + } + + } + } +} \ No newline at end of file diff --git a/akka-actor/src/main/scala/akka/routing/MurmurHash.scala b/akka-actor/src/main/scala/akka/routing/MurmurHash.scala new file mode 100644 index 0000000000..fc67613a5d --- /dev/null +++ b/akka-actor/src/main/scala/akka/routing/MurmurHash.scala @@ -0,0 +1,149 @@ +/* __ *\ +** ________ ___ / / ___ Scala API ** +** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL ** +** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** +** /____/\___/_/ |_/____/_/ | | ** +** |/ ** +\* */ + +/** + * An implementation of Austin Appleby's MurmurHash 3.0 algorithm + * (32 bit version); reference: http://code.google.com/p/smhasher + * + * This is the hash used by collections and case classes (including + * tuples). + * + * @author Rex Kerr + * @version 2.9 + * @since 2.9 + */ + +package akka.routing + +import java.lang.Integer.{ rotateLeft ⇒ rotl } + +/** + * An object designed to generate well-distributed non-cryptographic + * hashes. It is designed to hash a collection of integers; along with + * the integers to hash, it generates two magic streams of integers to + * increase the distribution of repetitive input sequences. Thus, + * three methods need to be called at each step (to start and to + * incorporate a new integer) to update the values. Only one method + * needs to be called to finalize the hash. + */ + +object MurmurHash { + // Magic values used for MurmurHash's 32 bit hash. + // Don't change these without consulting a hashing expert! + final private val visibleMagic: Int = 0x971e137b + final private val hiddenMagicA: Int = 0x95543787 + final private val hiddenMagicB: Int = 0x2ad7eb25 + final private val visibleMixer: Int = 0x52dce729 + final private val hiddenMixerA: Int = 0x7b7d159c + final private val hiddenMixerB: Int = 0x6bce6396 + final private val finalMixer1: Int = 0x85ebca6b + final private val finalMixer2: Int = 0xc2b2ae35 + + // Arbitrary values used for hashing certain classes + final private val seedString: Int = 0xf7ca7fd2 + final private val seedArray: Int = 0x3c074a61 + + /** The first 23 magic integers from the first stream are stored here */ + private val storedMagicA: Array[Int] = + Iterator.iterate(hiddenMagicA)(nextMagicA).take(23).toArray + + /** The first 23 magic integers from the second stream are stored here */ + private val storedMagicB: Array[Int] = + Iterator.iterate(hiddenMagicB)(nextMagicB).take(23).toArray + + /** Begin a new hash with a seed value. */ + def startHash(seed: Int): Int = seed ^ visibleMagic + + /** The initial magic integers in the first stream. */ + def startMagicA: Int = hiddenMagicA + + /** The initial magic integer in the second stream. */ + def startMagicB: Int = hiddenMagicB + + /** + * Incorporates a new value into an existing hash. + * + * @param hash the prior hash value + * @param value the new value to incorporate + * @param magicA a magic integer from the stream + * @param magicB a magic integer from a different stream + * @return the updated hash value + */ + def extendHash(hash: Int, value: Int, magicA: Int, magicB: Int): Int = + (hash ^ rotl(value * magicA, 11) * magicB) * 3 + visibleMixer + + /** Given a magic integer from the first stream, compute the next */ + def nextMagicA(magicA: Int): Int = magicA * 5 + hiddenMixerA + + /** Given a magic integer from the second stream, compute the next */ + def nextMagicB(magicB: Int): Int = magicB * 5 + hiddenMixerB + + /** Once all hashes have been incorporated, this performs a final mixing */ + def finalizeHash(hash: Int): Int = { + var i = (hash ^ (hash >>> 16)) + i *= finalMixer1 + i ^= (i >>> 13) + i *= finalMixer2 + i ^= (i >>> 16) + i + } + + /** Compute a high-quality hash of an array */ + def arrayHash[@specialized T](a: Array[T]): Int = { + var h = startHash(a.length * seedArray) + var c = hiddenMagicA + var k = hiddenMagicB + var j = 0 + while (j < a.length) { + h = extendHash(h, a(j).##, c, k) + c = nextMagicA(c) + k = nextMagicB(k) + j += 1 + } + finalizeHash(h) + } + + /** Compute a high-quality hash of a string */ + def stringHash(s: String): Int = { + var h = startHash(s.length * seedString) + var c = hiddenMagicA + var k = hiddenMagicB + var j = 0 + while (j + 1 < s.length) { + val i = (s.charAt(j) << 16) + s.charAt(j + 1); + h = extendHash(h, i, c, k) + c = nextMagicA(c) + k = nextMagicB(k) + j += 2 + } + if (j < s.length) h = extendHash(h, s.charAt(j), c, k) + finalizeHash(h) + } + + /** + * Compute a hash that is symmetric in its arguments--that is, + * where the order of appearance of elements does not matter. + * This is useful for hashing sets, for example. + */ + def symmetricHash[T](xs: TraversableOnce[T], seed: Int): Int = { + var a, b, n = 0 + var c = 1 + xs.foreach(i ⇒ { + val h = i.## + a += h + b ^= h + if (h != 0) c *= h + n += 1 + }) + var h = startHash(seed * n) + h = extendHash(h, a, storedMagicA(0), storedMagicB(0)) + h = extendHash(h, b, storedMagicA(1), storedMagicB(1)) + h = extendHash(h, c, storedMagicA(2), storedMagicB(2)) + finalizeHash(h) + } +} diff --git a/akka-actor/src/main/scala/akka/routing/Routing.scala b/akka-actor/src/main/scala/akka/routing/Routing.scala index 1d18e7ed2e..8fcff84831 100644 --- a/akka-actor/src/main/scala/akka/routing/Routing.scala +++ b/akka-actor/src/main/scala/akka/routing/Routing.scala @@ -5,7 +5,6 @@ package akka.routing import language.implicitConversions import language.postfixOps - import akka.actor._ import scala.concurrent.util.Duration import scala.concurrent.util.duration._ @@ -19,6 +18,7 @@ import scala.concurrent.forkjoin.ThreadLocalRandom import akka.dispatch.Dispatchers import scala.annotation.tailrec import concurrent.ExecutionContext +import scala.concurrent.util.FiniteDuration /** * A RoutedActorRef is an ActorRef that has a set of connected ActorRef and it uses a Router to @@ -115,8 +115,8 @@ private[akka] class RoutedActorCell(_system: ActorSystemImpl, _ref: InternalActo val s = if (sender eq null) system.deadLetters else sender val msg = message match { - case Broadcast(m) ⇒ m - case m ⇒ m + case wrapped: RouterEnvelope ⇒ wrapped.message + case m ⇒ m } applyRoute(s, message) match { @@ -283,7 +283,7 @@ class RouteeProvider(val context: ActorContext, val routeeProps: Props, val resi * The reason for the delay is to give concurrent messages a chance to be * placed in mailbox before sending PoisonPill. */ - def removeRoutees(nrOfInstances: Int, stopDelay: Duration): Unit = { + def removeRoutees(nrOfInstances: Int, stopDelay: FiniteDuration): Unit = { if (nrOfInstances <= 0) { throw new IllegalArgumentException("Expected positive nrOfInstances, got [%s]".format(nrOfInstances)) } else if (nrOfInstances > 0) { @@ -298,7 +298,7 @@ class RouteeProvider(val context: ActorContext, val routeeProps: Props, val resi * Give concurrent messages a chance to be placed in mailbox before * sending PoisonPill. */ - protected def delayedStop(scheduler: Scheduler, abandon: Iterable[ActorRef], stopDelay: Duration): Unit = { + protected def delayedStop(scheduler: Scheduler, abandon: Iterable[ActorRef], stopDelay: FiniteDuration): Unit = { if (abandon.nonEmpty) { if (stopDelay <= Duration.Zero) { abandon foreach (_ ! PoisonPill) @@ -400,7 +400,15 @@ private object Router { * Router implementations may choose to handle this message differently. */ @SerialVersionUID(1L) -case class Broadcast(message: Any) +case class Broadcast(message: Any) extends RouterEnvelope + +/** + * Only the contained message will be forwarded to the + * destination, i.e. the envelope will be stripped off. + */ +trait RouterEnvelope { + def message: Any +} /** * Sending this message to a router will make it send back its currently used routees. @@ -588,6 +596,10 @@ case class RoundRobinRouter(nrOfInstances: Int = 0, routees: Iterable[String] = } } +/** + * The core pieces of the routing logic is located in this + * trait to be able to extend. + */ trait RoundRobinLike { this: RouterConfig ⇒ def nrOfInstances: Int @@ -721,6 +733,10 @@ case class RandomRouter(nrOfInstances: Int = 0, routees: Iterable[String] = Nil, } } +/** + * The core pieces of the routing logic is located in this + * trait to be able to extend. + */ trait RandomLike { this: RouterConfig ⇒ def nrOfInstances: Int @@ -861,6 +877,10 @@ case class SmallestMailboxRouter(nrOfInstances: Int = 0, routees: Iterable[Strin } } +/** + * The core pieces of the routing logic is located in this + * trait to be able to extend. + */ trait SmallestMailboxLike { this: RouterConfig ⇒ def nrOfInstances: Int @@ -1076,6 +1096,10 @@ case class BroadcastRouter(nrOfInstances: Int = 0, routees: Iterable[String] = N } } +/** + * The core pieces of the routing logic is located in this + * trait to be able to extend. + */ trait BroadcastLike { this: RouterConfig ⇒ def nrOfInstances: Int @@ -1098,13 +1122,13 @@ object ScatterGatherFirstCompletedRouter { /** * Creates a new ScatterGatherFirstCompletedRouter, routing to the specified routees, timing out after the specified Duration */ - def apply(routees: Iterable[ActorRef], within: Duration): ScatterGatherFirstCompletedRouter = + def apply(routees: Iterable[ActorRef], within: FiniteDuration): ScatterGatherFirstCompletedRouter = new ScatterGatherFirstCompletedRouter(routees = routees map (_.path.toString), within = within) /** * Java API to create router with the supplied 'routees' actors. */ - def create(routees: java.lang.Iterable[ActorRef], within: Duration): ScatterGatherFirstCompletedRouter = { + def create(routees: java.lang.Iterable[ActorRef], within: FiniteDuration): ScatterGatherFirstCompletedRouter = { import scala.collection.JavaConverters._ apply(routees.asScala, within) } @@ -1153,7 +1177,7 @@ object ScatterGatherFirstCompletedRouter { * using `actorFor` in [[akka.actor.ActorRefProvider]] */ @SerialVersionUID(1L) -case class ScatterGatherFirstCompletedRouter(nrOfInstances: Int = 0, routees: Iterable[String] = Nil, within: Duration, +case class ScatterGatherFirstCompletedRouter(nrOfInstances: Int = 0, routees: Iterable[String] = Nil, within: FiniteDuration, override val resizer: Option[Resizer] = None, val routerDispatcher: String = Dispatchers.DefaultDispatcherId, val supervisorStrategy: SupervisorStrategy = Router.defaultSupervisorStrategy) @@ -1166,7 +1190,7 @@ case class ScatterGatherFirstCompletedRouter(nrOfInstances: Int = 0, routees: It * Constructor that sets nrOfInstances to be created. * Java API */ - def this(nr: Int, w: Duration) = this(nrOfInstances = nr, within = w) + def this(nr: Int, w: FiniteDuration) = this(nrOfInstances = nr, within = w) /** * Constructor that sets the routees to be used. @@ -1174,14 +1198,14 @@ case class ScatterGatherFirstCompletedRouter(nrOfInstances: Int = 0, routees: It * @param routeePaths string representation of the actor paths of the routees that will be looked up * using `actorFor` in [[akka.actor.ActorRefProvider]] */ - def this(routeePaths: java.lang.Iterable[String], w: Duration) = + def this(routeePaths: java.lang.Iterable[String], w: FiniteDuration) = this(routees = iterableAsScalaIterable(routeePaths), within = w) /** * Constructor that sets the resizer to be used. * Java API */ - def this(resizer: Resizer, w: Duration) = this(resizer = Some(resizer), within = w) + def this(resizer: Resizer, w: FiniteDuration) = this(resizer = Some(resizer), within = w) /** * Java API for setting routerDispatcher @@ -1205,13 +1229,17 @@ case class ScatterGatherFirstCompletedRouter(nrOfInstances: Int = 0, routees: It } } +/** + * The core pieces of the routing logic is located in this + * trait to be able to extend. + */ trait ScatterGatherFirstCompletedLike { this: RouterConfig ⇒ def nrOfInstances: Int def routees: Iterable[String] - def within: Duration + def within: FiniteDuration def createRoute(routeeProvider: RouteeProvider): Route = { if (resizer.isEmpty) { @@ -1332,7 +1360,7 @@ case class DefaultResizer( * messages a chance to be placed in mailbox before sending PoisonPill. * Use 0 seconds to skip delay. */ - stopDelay: Duration = 1.second, + stopDelay: FiniteDuration = 1.second, /** * Number of messages between resize operation. * Use 1 to resize before each message. diff --git a/akka-actor/src/main/scala/akka/serialization/Serialization.scala b/akka-actor/src/main/scala/akka/serialization/Serialization.scala index b9d6298784..003c9de2b1 100644 --- a/akka-actor/src/main/scala/akka/serialization/Serialization.scala +++ b/akka-actor/src/main/scala/akka/serialization/Serialization.scala @@ -9,7 +9,6 @@ import com.typesafe.config.Config import akka.actor.{ Extension, ExtendedActorSystem, Address, DynamicAccess } import akka.event.Logging import java.util.concurrent.ConcurrentHashMap -import scala.util.control.NonFatal import scala.collection.mutable.ArrayBuffer import java.io.NotSerializableException import util.{ Try, DynamicVariable } @@ -100,8 +99,8 @@ class Serialization(val system: ExtendedActorSystem) extends Extension { // bindings are ordered from most specific to least specific def unique(possibilities: Seq[(Class[_], Serializer)]): Boolean = possibilities.size == 1 || - (possibilities map (_._1) forall (_ isAssignableFrom possibilities(0)._1)) || - (possibilities map (_._2) forall (_ == possibilities(0)._2)) + (possibilities forall (_._1 isAssignableFrom possibilities(0)._1)) || + (possibilities forall (_._2 == possibilities(0)._2)) val ser = bindings filter { _._1 isAssignableFrom clazz } match { case Seq() ⇒ diff --git a/akka-actor/src/main/scala/akka/util/Timeout.scala b/akka-actor/src/main/scala/akka/util/Timeout.scala index 7080775d9b..62faa56f3d 100644 --- a/akka-actor/src/main/scala/akka/util/Timeout.scala +++ b/akka-actor/src/main/scala/akka/util/Timeout.scala @@ -8,10 +8,10 @@ import language.implicitConversions import java.util.concurrent.TimeUnit import java.lang.{ Double ⇒ JDouble } -import scala.concurrent.util.Duration +import scala.concurrent.util.{ Duration, FiniteDuration } @SerialVersionUID(1L) -case class Timeout(duration: Duration) { +case class Timeout(duration: FiniteDuration) { def this(timeout: Long) = this(Duration(timeout, TimeUnit.MILLISECONDS)) def this(length: Long, unit: TimeUnit) = this(Duration(length, unit)) } @@ -26,17 +26,10 @@ object Timeout { */ val zero: Timeout = new Timeout(Duration.Zero) - /** - * A Timeout with infinite duration. Will never timeout. Use extreme caution with this - * as it may cause memory leaks, blocked threads, or may not even be supported by - * the receiver, which would result in an exception. - */ - val never: Timeout = new Timeout(Duration.Inf) - def apply(timeout: Long): Timeout = new Timeout(timeout) def apply(length: Long, unit: TimeUnit): Timeout = new Timeout(length, unit) - implicit def durationToTimeout(duration: Duration): Timeout = new Timeout(duration) + implicit def durationToTimeout(duration: FiniteDuration): Timeout = new Timeout(duration) implicit def intToTimeout(timeout: Int): Timeout = new Timeout(timeout) implicit def longToTimeout(timeout: Long): Timeout = new Timeout(timeout) } diff --git a/akka-agent/src/main/scala/akka/agent/Agent.scala b/akka-agent/src/main/scala/akka/agent/Agent.scala index 87fb5af568..5b786b2599 100644 --- a/akka-agent/src/main/scala/akka/agent/Agent.scala +++ b/akka-agent/src/main/scala/akka/agent/Agent.scala @@ -11,6 +11,7 @@ import akka.util.Timeout import scala.concurrent.stm._ import concurrent.{ ExecutionContext, Future, Promise, Await } import concurrent.util.Duration +import scala.concurrent.util.FiniteDuration /** * Used internally to send functions. @@ -240,7 +241,7 @@ class Agent[T](initialValue: T, refFactory: ActorRefFactory, system: ActorSystem * Dispatch a function to update the internal state, and return a Future where that new state can be obtained * within the given timeout */ - def alter(f: JFunc[T, T], timeout: Duration): Future[T] = alter(x ⇒ f(x))(timeout) + def alter(f: JFunc[T, T], timeout: FiniteDuration): Future[T] = alter(x ⇒ f(x))(timeout) /** * Java API: @@ -259,7 +260,7 @@ class Agent[T](initialValue: T, refFactory: ActorRefFactory, system: ActorSystem * or blocking operations. Dispatches using either `alterOff` or `alter` will * still be executed in order. */ - def alterOff(f: JFunc[T, T], timeout: Duration, ec: ExecutionContext): Unit = alterOff(x ⇒ f(x))(Timeout(timeout), ec) + def alterOff(f: JFunc[T, T], timeout: FiniteDuration, ec: ExecutionContext): Unit = alterOff(x ⇒ f(x))(Timeout(timeout), ec) /** * Java API: diff --git a/akka-camel/src/main/scala/akka/camel/Activation.scala b/akka-camel/src/main/scala/akka/camel/Activation.scala index 61bbae14f7..4af600215c 100644 --- a/akka-camel/src/main/scala/akka/camel/Activation.scala +++ b/akka-camel/src/main/scala/akka/camel/Activation.scala @@ -10,6 +10,7 @@ import akka.actor.{ ActorSystem, Props, ActorRef } import akka.pattern._ import scala.concurrent.util.Duration import concurrent.{ ExecutionContext, Future } +import scala.concurrent.util.FiniteDuration /** * Activation trait that can be used to wait on activation or de-activation of Camel endpoints. @@ -27,7 +28,7 @@ trait Activation { * @param endpoint the endpoint to be activated * @param timeout the timeout for the Future */ - def activationFutureFor(endpoint: ActorRef)(implicit timeout: Duration, executor: ExecutionContext): Future[ActorRef] = + def activationFutureFor(endpoint: ActorRef)(implicit timeout: FiniteDuration, executor: ExecutionContext): Future[ActorRef] = (activationTracker.ask(AwaitActivation(endpoint))(Timeout(timeout))).map[ActorRef]({ case EndpointActivated(`endpoint`) ⇒ endpoint case EndpointFailedToActivate(`endpoint`, cause) ⇒ throw cause @@ -40,7 +41,7 @@ trait Activation { * @param endpoint the endpoint to be deactivated * @param timeout the timeout of the Future */ - def deactivationFutureFor(endpoint: ActorRef)(implicit timeout: Duration, executor: ExecutionContext): Future[ActorRef] = + def deactivationFutureFor(endpoint: ActorRef)(implicit timeout: FiniteDuration, executor: ExecutionContext): Future[ActorRef] = (activationTracker.ask(AwaitDeActivation(endpoint))(Timeout(timeout))).map[ActorRef]({ case EndpointDeActivated(`endpoint`) ⇒ endpoint case EndpointFailedToDeActivate(`endpoint`, cause) ⇒ throw cause diff --git a/akka-camel/src/main/scala/akka/camel/Consumer.scala b/akka-camel/src/main/scala/akka/camel/Consumer.scala index 72daa89da0..89f7719e23 100644 --- a/akka-camel/src/main/scala/akka/camel/Consumer.scala +++ b/akka-camel/src/main/scala/akka/camel/Consumer.scala @@ -5,12 +5,12 @@ package akka.camel import language.postfixOps - import internal.component.DurationTypeConverter import org.apache.camel.model.{ RouteDefinition, ProcessorDefinition } import akka.actor._ import scala.concurrent.util.Duration import scala.concurrent.util.duration._ +import scala.concurrent.util.FiniteDuration /** * Mixed in by Actor implementations that consume message from Camel endpoints. @@ -41,7 +41,7 @@ trait ConsumerConfig { this: CamelSupport ⇒ /** * How long the actor should wait for activation before it fails. */ - def activationTimeout: Duration = camel.settings.activationTimeout + def activationTimeout: FiniteDuration = camel.settings.activationTimeout /** * When endpoint is out-capable (can produce responses) replyTimeout is the maximum time diff --git a/akka-camel/src/main/scala/akka/camel/internal/ConsumerRegistry.scala b/akka-camel/src/main/scala/akka/camel/internal/ConsumerRegistry.scala index c69c2f55bf..37fec6e1d0 100644 --- a/akka-camel/src/main/scala/akka/camel/internal/ConsumerRegistry.scala +++ b/akka-camel/src/main/scala/akka/camel/internal/ConsumerRegistry.scala @@ -7,9 +7,7 @@ package akka.camel.internal import akka.camel._ import component.CamelPath import java.io.InputStream - import org.apache.camel.builder.RouteBuilder - import akka.actor._ import collection.mutable import org.apache.camel.model.RouteDefinition @@ -17,6 +15,7 @@ import org.apache.camel.CamelContext import scala.concurrent.util.Duration import concurrent.Await import akka.util.Timeout +import scala.concurrent.util.FiniteDuration /** * For internal use only. @@ -38,7 +37,7 @@ private[camel] trait ConsumerRegistry { this: Activation ⇒ * @param activationTimeout the timeout for activation * @return the actorRef to the consumer */ - private[camel] def registerConsumer(endpointUri: String, consumer: Consumer, activationTimeout: Duration) = { + private[camel] def registerConsumer(endpointUri: String, consumer: Consumer, activationTimeout: FiniteDuration) = { idempotentRegistry ! RegisterConsumer(endpointUri, consumer.self, consumer) Await.result(activationFutureFor(consumer.self)(activationTimeout, consumer.context.dispatcher), activationTimeout) } diff --git a/akka-camel/src/main/scala/akka/camel/internal/DefaultCamel.scala b/akka-camel/src/main/scala/akka/camel/internal/DefaultCamel.scala index df681ba98a..41aaacdf8c 100644 --- a/akka-camel/src/main/scala/akka/camel/internal/DefaultCamel.scala +++ b/akka-camel/src/main/scala/akka/camel/internal/DefaultCamel.scala @@ -8,8 +8,8 @@ import akka.event.Logging import akka.camel.{ CamelSettings, Camel } import scala.util.control.NonFatal import scala.concurrent.util.Duration - import org.apache.camel.{ ProducerTemplate, CamelContext } +import scala.concurrent.util.FiniteDuration /** * For internal use only. @@ -32,7 +32,7 @@ private[camel] class DefaultCamel(val system: ActorSystem) extends Camel { ctx.setName(system.name) ctx.setStreamCaching(true) ctx.addComponent("akka", new ActorComponent(this, system)) - ctx.getTypeConverterRegistry.addTypeConverter(classOf[Duration], classOf[String], DurationTypeConverter) + ctx.getTypeConverterRegistry.addTypeConverter(classOf[FiniteDuration], classOf[String], DurationTypeConverter) ctx } diff --git a/akka-camel/src/main/scala/akka/camel/internal/component/ActorComponent.scala b/akka-camel/src/main/scala/akka/camel/internal/component/ActorComponent.scala index 159ec437c5..61c4cdb1fb 100644 --- a/akka-camel/src/main/scala/akka/camel/internal/component/ActorComponent.scala +++ b/akka-camel/src/main/scala/akka/camel/internal/component/ActorComponent.scala @@ -5,15 +5,11 @@ package akka.camel.internal.component import language.postfixOps - import java.util.{ Map ⇒ JMap } - import org.apache.camel._ import org.apache.camel.impl.{ DefaultProducer, DefaultEndpoint, DefaultComponent } - import akka.actor._ import akka.pattern._ - import scala.reflect.BeanProperty import scala.concurrent.util.duration._ import scala.concurrent.util.Duration @@ -25,6 +21,7 @@ import akka.camel.internal.CamelExchangeAdapter import akka.camel.{ ActorNotRegisteredException, Camel, Ack, FailureResult, CamelMessage } import support.TypeConverterSupport import scala.util.{ Failure, Success, Try } +import scala.concurrent.util.FiniteDuration /** * For internal use only. @@ -98,7 +95,7 @@ private[camel] trait ActorEndpointConfig { def path: ActorEndpointPath def camel: Camel - @BeanProperty var replyTimeout: Duration = camel.settings.replyTimeout + @BeanProperty var replyTimeout: FiniteDuration = camel.settings.replyTimeout @BeanProperty var autoAck: Boolean = camel.settings.autoAck } diff --git a/akka-camel/src/test/java/akka/camel/ConsumerJavaTestBase.java b/akka-camel/src/test/java/akka/camel/ConsumerJavaTestBase.java index b30f541bb1..e1e87f8903 100644 --- a/akka-camel/src/test/java/akka/camel/ConsumerJavaTestBase.java +++ b/akka-camel/src/test/java/akka/camel/ConsumerJavaTestBase.java @@ -4,20 +4,22 @@ package akka.camel; -import akka.actor.ActorRef; -import akka.actor.ActorSystem; -import akka.actor.Props; -import akka.testkit.JavaTestKit; +import static org.junit.Assert.assertEquals; + +import java.util.concurrent.TimeUnit; + +import org.junit.AfterClass; +import org.junit.Test; + import scala.concurrent.Await; import scala.concurrent.ExecutionContext; import scala.concurrent.util.Duration; -import org.junit.AfterClass; -import org.junit.Test; -import java.util.concurrent.TimeUnit; +import scala.concurrent.util.FiniteDuration; +import akka.actor.ActorRef; +import akka.actor.ActorSystem; +import akka.actor.Props; import akka.testkit.AkkaSpec; -import akka.testkit.JavaTestKit.EventFilter; - -import static org.junit.Assert.assertEquals; +import akka.testkit.JavaTestKit; /** @@ -37,7 +39,7 @@ public class ConsumerJavaTestBase { new JavaTestKit(system) {{ String result = new EventFilter(Exception.class) { protected String run() { - Duration timeout = Duration.create(1, TimeUnit.SECONDS); + FiniteDuration timeout = Duration.create(1, TimeUnit.SECONDS); Camel camel = CamelExtension.get(system); ExecutionContext executionContext = system.dispatcher(); try { diff --git a/akka-camel/src/test/java/akka/camel/CustomRouteTestBase.java b/akka-camel/src/test/java/akka/camel/CustomRouteTestBase.java index bb7bf4042f..5454dd0074 100644 --- a/akka-camel/src/test/java/akka/camel/CustomRouteTestBase.java +++ b/akka-camel/src/test/java/akka/camel/CustomRouteTestBase.java @@ -7,6 +7,7 @@ import akka.camel.javaapi.UntypedProducerActor; import scala.concurrent.Await; import scala.concurrent.ExecutionContext; import scala.concurrent.util.Duration; +import scala.concurrent.util.FiniteDuration; import org.apache.camel.CamelExecutionException; import org.apache.camel.Exchange; import org.apache.camel.Predicate; @@ -59,7 +60,7 @@ public class CustomRouteTestBase { @Test public void testCustomConsumerRoute() throws Exception { MockEndpoint mockEndpoint = camel.context().getEndpoint("mock:mockConsumer", MockEndpoint.class); - Duration timeout = Duration.create(10, TimeUnit.SECONDS); + FiniteDuration timeout = Duration.create(10, TimeUnit.SECONDS); ExecutionContext executionContext = system.dispatcher(); ActorRef consumer = Await.result( camel.activationFutureFor(system.actorOf(new Props(TestConsumer.class), "testConsumer"), timeout, executionContext), @@ -73,7 +74,7 @@ public class CustomRouteTestBase { @Test public void testCustomAckConsumerRoute() throws Exception { MockEndpoint mockEndpoint = camel.context().getEndpoint("mock:mockAck", MockEndpoint.class); - Duration timeout = Duration.create(10, TimeUnit.SECONDS); + FiniteDuration timeout = Duration.create(10, TimeUnit.SECONDS); ExecutionContext executionContext = system.dispatcher(); ActorRef consumer = Await.result( camel.activationFutureFor( @@ -91,7 +92,7 @@ public class CustomRouteTestBase { public void testCustomAckConsumerRouteFromUri() throws Exception { MockEndpoint mockEndpoint = camel.context().getEndpoint("mock:mockAckUri", MockEndpoint.class); ExecutionContext executionContext = system.dispatcher(); - Duration timeout = Duration.create(10, TimeUnit.SECONDS); + FiniteDuration timeout = Duration.create(10, TimeUnit.SECONDS); ActorRef consumer = Await.result( camel.activationFutureFor(system.actorOf(new Props( new UntypedActorFactory(){ public Actor create() { return new TestAckConsumer("direct:testConsumerAckFromUri","mock:mockAckUri"); } }), "testConsumerAckUri"), timeout, executionContext), @@ -104,7 +105,7 @@ public class CustomRouteTestBase { @Test(expected=CamelExecutionException.class) public void testCustomTimeoutConsumerRoute() throws Exception { - Duration timeout = Duration.create(10, TimeUnit.SECONDS); + FiniteDuration timeout = Duration.create(10, TimeUnit.SECONDS); ExecutionContext executionContext = system.dispatcher(); ActorRef consumer = Await.result( camel.activationFutureFor(system.actorOf(new Props( new UntypedActorFactory(){ public Actor create() { return new TestAckConsumer("direct:testConsumerException","mock:mockException"); } }), "testConsumerException"), diff --git a/akka-camel/src/test/scala/akka/camel/internal/ActivationTrackerTest.scala b/akka-camel/src/test/scala/akka/camel/internal/ActivationTrackerTest.scala index 8b251c4f9a..ddff6017ec 100644 --- a/akka-camel/src/test/scala/akka/camel/internal/ActivationTrackerTest.scala +++ b/akka-camel/src/test/scala/akka/camel/internal/ActivationTrackerTest.scala @@ -1,7 +1,6 @@ package akka.camel.internal import language.postfixOps - import org.scalatest.matchers.MustMatchers import scala.concurrent.util.duration._ import org.scalatest.{ GivenWhenThen, BeforeAndAfterEach, BeforeAndAfterAll, WordSpec } @@ -9,6 +8,7 @@ import akka.actor.{ Props, ActorSystem } import scala.concurrent.util.Duration import akka.camel._ import akka.testkit.{ TimingTest, TestProbe, TestKit } +import scala.concurrent.util.FiniteDuration class ActivationTrackerTest extends TestKit(ActorSystem("test")) with WordSpec with MustMatchers with BeforeAndAfterAll with BeforeAndAfterEach with GivenWhenThen { @@ -115,11 +115,11 @@ class ActivationTrackerTest extends TestKit(ActorSystem("test")) with WordSpec w val probe = TestProbe() def awaitActivation() = at.tell(AwaitActivation(actor.ref), probe.ref) def awaitDeActivation() = at.tell(AwaitDeActivation(actor.ref), probe.ref) - def verifyActivated(timeout: Duration = 50 millis) = within(timeout) { probe.expectMsg(EndpointActivated(actor.ref)) } - def verifyDeActivated(timeout: Duration = 50 millis) = within(timeout) { probe.expectMsg(EndpointDeActivated(actor.ref)) } + def verifyActivated(timeout: FiniteDuration = 50 millis) = within(timeout) { probe.expectMsg(EndpointActivated(actor.ref)) } + def verifyDeActivated(timeout: FiniteDuration = 50 millis) = within(timeout) { probe.expectMsg(EndpointDeActivated(actor.ref)) } - def verifyFailedToActivate(timeout: Duration = 50 millis) = within(timeout) { probe.expectMsg(EndpointFailedToActivate(actor.ref, cause)) } - def verifyFailedToDeActivate(timeout: Duration = 50 millis) = within(timeout) { probe.expectMsg(EndpointFailedToDeActivate(actor.ref, cause)) } + def verifyFailedToActivate(timeout: FiniteDuration = 50 millis) = within(timeout) { probe.expectMsg(EndpointFailedToActivate(actor.ref, cause)) } + def verifyFailedToDeActivate(timeout: FiniteDuration = 50 millis) = within(timeout) { probe.expectMsg(EndpointFailedToDeActivate(actor.ref, cause)) } } diff --git a/akka-camel/src/test/scala/akka/camel/internal/component/ActorProducerTest.scala b/akka-camel/src/test/scala/akka/camel/internal/component/ActorProducerTest.scala index d4e1ac0ad5..09f9431cc9 100644 --- a/akka-camel/src/test/scala/akka/camel/internal/component/ActorProducerTest.scala +++ b/akka-camel/src/test/scala/akka/camel/internal/component/ActorProducerTest.scala @@ -5,7 +5,6 @@ package akka.camel.internal.component import language.postfixOps - import org.scalatest.mock.MockitoSugar import org.mockito.Matchers.any import org.mockito.Mockito._ @@ -27,6 +26,7 @@ import com.typesafe.config.ConfigFactory import akka.actor.ActorSystem.Settings import akka.event.LoggingAdapter import akka.testkit.{ TimingTest, TestKit, TestProbe } +import scala.concurrent.util.FiniteDuration class ActorProducerTest extends TestKit(ActorSystem("test")) with WordSpec with MustMatchers with ActorProducerFixture { @@ -303,7 +303,7 @@ trait ActorProducerFixture extends MockitoSugar with BeforeAndAfterAll with Befo def msg(s: String) = CamelMessage(s, Map.empty) - def given(actor: ActorRef = probe.ref, outCapable: Boolean = true, autoAck: Boolean = true, replyTimeout: Duration = Int.MaxValue seconds) = { + def given(actor: ActorRef = probe.ref, outCapable: Boolean = true, autoAck: Boolean = true, replyTimeout: FiniteDuration = Int.MaxValue seconds) = { prepareMocks(actor, outCapable = outCapable) new ActorProducer(configure(isAutoAck = autoAck, _replyTimeout = replyTimeout), camel) } @@ -325,16 +325,16 @@ trait ActorProducerFixture extends MockitoSugar with BeforeAndAfterAll with Befo callbackReceived.countDown() } - private[this] def valueWithin(implicit timeout: Duration) = + private[this] def valueWithin(implicit timeout: FiniteDuration) = if (!callbackReceived.await(timeout.toNanos, TimeUnit.NANOSECONDS)) fail("Callback not received!") else callbackValue.get - def expectDoneSyncWithin(implicit timeout: Duration): Unit = if (!valueWithin(timeout)) fail("Expected to be done Synchronously") - def expectDoneAsyncWithin(implicit timeout: Duration): Unit = if (valueWithin(timeout)) fail("Expected to be done Asynchronously") + def expectDoneSyncWithin(implicit timeout: FiniteDuration): Unit = if (!valueWithin(timeout)) fail("Expected to be done Synchronously") + def expectDoneAsyncWithin(implicit timeout: FiniteDuration): Unit = if (valueWithin(timeout)) fail("Expected to be done Asynchronously") } - def configure(endpointUri: String = "test-uri", isAutoAck: Boolean = true, _replyTimeout: Duration = Int.MaxValue seconds) = { + def configure(endpointUri: String = "test-uri", isAutoAck: Boolean = true, _replyTimeout: FiniteDuration = Int.MaxValue seconds) = { val endpoint = new ActorEndpoint(endpointUri, actorComponent, actorEndpointPath, camel) endpoint.autoAck = isAutoAck endpoint.replyTimeout = _replyTimeout diff --git a/akka-cluster/src/main/scala/akka/cluster/Cluster.scala b/akka-cluster/src/main/scala/akka/cluster/Cluster.scala index 09feaeb656..48e30080be 100644 --- a/akka-cluster/src/main/scala/akka/cluster/Cluster.scala +++ b/akka-cluster/src/main/scala/akka/cluster/Cluster.scala @@ -5,7 +5,6 @@ package akka.cluster import language.implicitConversions - import akka.actor._ import akka.actor.Status._ import akka.ConfigurationException @@ -20,13 +19,12 @@ import scala.concurrent.util.{ Duration, Deadline } import scala.concurrent.forkjoin.ThreadLocalRandom import scala.annotation.tailrec import scala.collection.immutable.SortedSet - import java.io.Closeable import java.util.concurrent.atomic.AtomicBoolean import java.util.concurrent.atomic.AtomicReference - import akka.util.internal.HashedWheelTimer import concurrent.{ ExecutionContext, Await } +import scala.concurrent.util.FiniteDuration /** * Cluster Extension Id and factory for creating Cluster extension. @@ -111,26 +109,26 @@ class Cluster(val system: ExtendedActorSystem) extends Extension { new Scheduler with Closeable { override def close(): Unit = () // we are using system.scheduler, which we are not responsible for closing - override def schedule(initialDelay: Duration, frequency: Duration, + override def schedule(initialDelay: FiniteDuration, interval: FiniteDuration, receiver: ActorRef, message: Any)(implicit executor: ExecutionContext): Cancellable = - systemScheduler.schedule(initialDelay, frequency, receiver, message) + systemScheduler.schedule(initialDelay, interval, receiver, message) - override def schedule(initialDelay: Duration, frequency: Duration)(f: ⇒ Unit)(implicit executor: ExecutionContext): Cancellable = - systemScheduler.schedule(initialDelay, frequency)(f) + override def schedule(initialDelay: FiniteDuration, interval: FiniteDuration)(f: ⇒ Unit)(implicit executor: ExecutionContext): Cancellable = + systemScheduler.schedule(initialDelay, interval)(f) - override def schedule(initialDelay: Duration, frequency: Duration, + override def schedule(initialDelay: FiniteDuration, interval: FiniteDuration, runnable: Runnable)(implicit executor: ExecutionContext): Cancellable = - systemScheduler.schedule(initialDelay, frequency, runnable) + systemScheduler.schedule(initialDelay, interval, runnable) - override def scheduleOnce(delay: Duration, + override def scheduleOnce(delay: FiniteDuration, runnable: Runnable)(implicit executor: ExecutionContext): Cancellable = systemScheduler.scheduleOnce(delay, runnable) - override def scheduleOnce(delay: Duration, receiver: ActorRef, + override def scheduleOnce(delay: FiniteDuration, receiver: ActorRef, message: Any)(implicit executor: ExecutionContext): Cancellable = systemScheduler.scheduleOnce(delay, receiver, message) - override def scheduleOnce(delay: Duration)(f: ⇒ Unit)(implicit executor: ExecutionContext): Cancellable = + override def scheduleOnce(delay: FiniteDuration)(f: ⇒ Unit)(implicit executor: ExecutionContext): Cancellable = systemScheduler.scheduleOnce(delay)(f) } } @@ -192,6 +190,24 @@ class Cluster(val system: ExtendedActorSystem) extends Extension { def unsubscribe(subscriber: ActorRef): Unit = clusterCore ! InternalClusterAction.Unsubscribe(subscriber) + /** + * Publish current (full) state of the cluster to subscribers, + * that are subscribing to [[akka.cluster.ClusterEvent.ClusterDomainEvent]] + * or [[akka.cluster.ClusterEvent.CurrentClusterState]]. + * If you want this to happen periodically you need to schedule a call to + * this method yourself. + */ + def publishCurrentClusterState(): Unit = + clusterCore ! InternalClusterAction.PublishCurrentClusterState(None) + + /** + * Publish current (full) state of the cluster to the specified + * receiver. If you want this to happen periodically you need to schedule + * a call to this method yourself. + */ + def sendCurrentClusterState(receiver: ActorRef): Unit = + clusterCore ! InternalClusterAction.PublishCurrentClusterState(Some(receiver)) + /** * Try to join this cluster node with the node specified by 'address'. * A 'Join(thisNodeAddress)' command is sent to the node to join. diff --git a/akka-cluster/src/main/scala/akka/cluster/ClusterActorRefProvider.scala b/akka-cluster/src/main/scala/akka/cluster/ClusterActorRefProvider.scala index bb0634876e..03d9982bd1 100644 --- a/akka-cluster/src/main/scala/akka/cluster/ClusterActorRefProvider.scala +++ b/akka-cluster/src/main/scala/akka/cluster/ClusterActorRefProvider.scala @@ -15,7 +15,7 @@ import akka.cluster.routing.ClusterRouterConfig import akka.event.EventStream import akka.remote.RemoteActorRefProvider import akka.remote.RemoteDeployer -import akka.routing.RemoteRouterConfig +import akka.remote.routing.RemoteRouterConfig import akka.cluster.routing.ClusterRouterSettings class ClusterActorRefProvider( diff --git a/akka-cluster/src/main/scala/akka/cluster/ClusterDaemon.scala b/akka-cluster/src/main/scala/akka/cluster/ClusterDaemon.scala index f84bf100ea..3d198572c9 100644 --- a/akka-cluster/src/main/scala/akka/cluster/ClusterDaemon.scala +++ b/akka-cluster/src/main/scala/akka/cluster/ClusterDaemon.scala @@ -16,6 +16,7 @@ import akka.cluster.MemberStatus._ import akka.cluster.ClusterEvent._ import language.existentials import language.postfixOps +import scala.concurrent.util.FiniteDuration /** * Base trait for all cluster messages. All ClusterMessage's are serializable. @@ -106,13 +107,15 @@ private[cluster] object InternalClusterAction { sealed trait SubscriptionMessage case class Subscribe(subscriber: ActorRef, to: Class[_]) extends SubscriptionMessage case class Unsubscribe(subscriber: ActorRef) extends SubscriptionMessage + /** + * @param receiver if `receiver` is defined the event will only be sent to that + * actor, otherwise it will be sent to all subscribers via the `eventStream`. + */ + case class PublishCurrentClusterState(receiver: Option[ActorRef]) extends SubscriptionMessage case class PublishChanges(oldGossip: Gossip, newGossip: Gossip) case object PublishDone - case class Ping(timestamp: Long = System.currentTimeMillis) extends ClusterMessage - case class Pong(ping: Ping, timestamp: Long = System.currentTimeMillis) extends ClusterMessage - } /** @@ -189,32 +192,32 @@ private[cluster] final class ClusterCoreDaemon extends Actor with ActorLogging { // start periodic gossip to random nodes in cluster val gossipTask = - FixedRateTask(scheduler, PeriodicTasksInitialDelay.max(GossipInterval), GossipInterval) { + FixedRateTask(scheduler, PeriodicTasksInitialDelay.max(GossipInterval).asInstanceOf[FiniteDuration], GossipInterval) { self ! GossipTick } // start periodic heartbeat to all nodes in cluster val heartbeatTask = - FixedRateTask(scheduler, PeriodicTasksInitialDelay.max(HeartbeatInterval), HeartbeatInterval) { + FixedRateTask(scheduler, PeriodicTasksInitialDelay.max(HeartbeatInterval).asInstanceOf[FiniteDuration], HeartbeatInterval) { self ! HeartbeatTick } // start periodic cluster failure detector reaping (moving nodes condemned by the failure detector to unreachable list) val failureDetectorReaperTask = - FixedRateTask(scheduler, PeriodicTasksInitialDelay.max(UnreachableNodesReaperInterval), UnreachableNodesReaperInterval) { + FixedRateTask(scheduler, PeriodicTasksInitialDelay.max(UnreachableNodesReaperInterval).asInstanceOf[FiniteDuration], UnreachableNodesReaperInterval) { self ! ReapUnreachableTick } // start periodic leader action management (only applies for the current leader) private val leaderActionsTask = - FixedRateTask(scheduler, PeriodicTasksInitialDelay.max(LeaderActionsInterval), LeaderActionsInterval) { + FixedRateTask(scheduler, PeriodicTasksInitialDelay.max(LeaderActionsInterval).asInstanceOf[FiniteDuration], LeaderActionsInterval) { self ! LeaderActionsTick } // start periodic publish of current state private val publishStateTask: Option[Cancellable] = if (PublishStatsInterval == Duration.Zero) None - else Some(FixedRateTask(scheduler, PeriodicTasksInitialDelay.max(PublishStatsInterval), PublishStatsInterval) { + else Some(FixedRateTask(scheduler, PeriodicTasksInitialDelay.max(PublishStatsInterval).asInstanceOf[FiniteDuration], PublishStatsInterval) { self ! PublishStatsTick }) @@ -255,7 +258,6 @@ private[cluster] final class ClusterCoreDaemon extends Actor with ActorLogging { case Remove(address) ⇒ removing(address) case SendGossipTo(address) ⇒ gossipTo(address) case msg: SubscriptionMessage ⇒ publisher forward msg - case p: Ping ⇒ ping(p) } @@ -831,7 +833,6 @@ private[cluster] final class ClusterCoreDaemon extends Actor with ActorLogging { def publishInternalStats(): Unit = publisher ! CurrentInternalStats(stats) - def ping(p: Ping): Unit = sender ! Pong(p) } /** diff --git a/akka-cluster/src/main/scala/akka/cluster/ClusterEvent.scala b/akka-cluster/src/main/scala/akka/cluster/ClusterEvent.scala index 60172ed6a4..1747df4dbb 100644 --- a/akka-cluster/src/main/scala/akka/cluster/ClusterEvent.scala +++ b/akka-cluster/src/main/scala/akka/cluster/ClusterEvent.scala @@ -185,6 +185,7 @@ private[cluster] final class ClusterDomainEventPublisher extends Actor with Acto def receive = { case PublishChanges(oldGossip, newGossip) ⇒ publishChanges(oldGossip, newGossip) case currentStats: CurrentInternalStats ⇒ publishInternalStats(currentStats) + case PublishCurrentClusterState(receiver) ⇒ publishCurrentClusterState(receiver) case Subscribe(subscriber, to) ⇒ subscribe(subscriber, to) case Unsubscribe(subscriber) ⇒ unsubscribe(subscriber) case PublishDone ⇒ sender ! PublishDone @@ -192,13 +193,21 @@ private[cluster] final class ClusterDomainEventPublisher extends Actor with Acto def eventStream: EventStream = context.system.eventStream - def subscribe(subscriber: ActorRef, to: Class[_]): Unit = { - subscriber ! CurrentClusterState( + def publishCurrentClusterState(receiver: Option[ActorRef]): Unit = { + val state = CurrentClusterState( members = latestGossip.members, unreachable = latestGossip.overview.unreachable, convergence = latestGossip.convergence, seenBy = latestGossip.seenBy, leader = latestGossip.leader) + receiver match { + case Some(ref) ⇒ ref ! state + case None ⇒ eventStream publish state + } + } + + def subscribe(subscriber: ActorRef, to: Class[_]): Unit = { + publishCurrentClusterState(Some(subscriber)) eventStream.subscribe(subscriber, to) } diff --git a/akka-cluster/src/main/scala/akka/cluster/FixedRateTask.scala b/akka-cluster/src/main/scala/akka/cluster/FixedRateTask.scala index 396f5127ad..9e6eedf659 100644 --- a/akka-cluster/src/main/scala/akka/cluster/FixedRateTask.scala +++ b/akka-cluster/src/main/scala/akka/cluster/FixedRateTask.scala @@ -9,14 +9,15 @@ import java.util.concurrent.atomic.{ AtomicBoolean, AtomicLong } import akka.actor.{ Scheduler, Cancellable } import scala.concurrent.util.Duration import concurrent.ExecutionContext +import scala.concurrent.util.FiniteDuration /** * INTERNAL API */ private[akka] object FixedRateTask { def apply(scheduler: Scheduler, - initalDelay: Duration, - delay: Duration)(f: ⇒ Unit)(implicit executor: ExecutionContext): FixedRateTask = + initalDelay: FiniteDuration, + delay: FiniteDuration)(f: ⇒ Unit)(implicit executor: ExecutionContext): FixedRateTask = new FixedRateTask(scheduler, initalDelay, delay, new Runnable { def run(): Unit = f }) } @@ -28,8 +29,8 @@ private[akka] object FixedRateTask { * initialDelay. */ private[akka] class FixedRateTask(scheduler: Scheduler, - initalDelay: Duration, - delay: Duration, + initalDelay: FiniteDuration, + delay: FiniteDuration, task: Runnable)(implicit executor: ExecutionContext) extends Runnable with Cancellable { diff --git a/akka-cluster/src/main/scala/akka/cluster/routing/ClusterRouterConfig.scala b/akka-cluster/src/main/scala/akka/cluster/routing/ClusterRouterConfig.scala index d3a0b9cb88..52a9a55e21 100644 --- a/akka-cluster/src/main/scala/akka/cluster/routing/ClusterRouterConfig.scala +++ b/akka-cluster/src/main/scala/akka/cluster/routing/ClusterRouterConfig.scala @@ -28,7 +28,7 @@ import akka.routing.Route import akka.routing.RouteeProvider import akka.routing.Router import akka.routing.RouterConfig -import akka.routing.RemoteRouterConfig +import akka.remote.routing.RemoteRouterConfig import akka.actor.RootActorPath import akka.actor.ActorCell import akka.actor.RelativeActorPath diff --git a/akka-cluster/src/multi-jvm/scala/akka/cluster/LargeClusterSpec.scala b/akka-cluster/src/multi-jvm/scala/akka/cluster/LargeClusterSpec.scala index 0d122938ab..737d6be549 100644 --- a/akka-cluster/src/multi-jvm/scala/akka/cluster/LargeClusterSpec.scala +++ b/akka-cluster/src/multi-jvm/scala/akka/cluster/LargeClusterSpec.scala @@ -19,6 +19,7 @@ import akka.remote.testconductor.RoleName import akka.actor.Props import akka.actor.Actor import akka.cluster.MemberStatus._ +import scala.concurrent.util.FiniteDuration object LargeClusterMultiJvmSpec extends MultiNodeConfig { // each jvm simulates a datacenter with many nodes @@ -123,8 +124,9 @@ abstract class LargeClusterSpec systems foreach { Cluster(_) } } - def expectedMaxDuration(totalNodes: Int): Duration = - 5.seconds + (2.seconds * totalNodes) + def expectedMaxDuration(totalNodes: Int): FiniteDuration = + // this cast will always succeed, but the compiler does not know about it ... + (5.seconds + (2.seconds * totalNodes)).asInstanceOf[FiniteDuration] def joinAll(from: RoleName, to: RoleName, totalNodes: Int, runOnRoles: RoleName*): Unit = { val joiningClusters = systems.map(Cluster(_)).toSet @@ -272,7 +274,7 @@ abstract class LargeClusterSpec val unreachableNodes = nodesPerDatacenter val liveNodes = nodesPerDatacenter * 4 - within(30.seconds + (3.seconds * liveNodes)) { + within((30.seconds + (3.seconds * liveNodes)).asInstanceOf[FiniteDuration]) { val startGossipCounts = Map.empty[Cluster, Long] ++ systems.map(sys ⇒ (Cluster(sys) -> Cluster(sys).readView.latestStats.receivedGossipCount)) def gossipCount(c: Cluster): Long = { diff --git a/akka-cluster/src/multi-jvm/scala/akka/cluster/MultiNodeClusterSpec.scala b/akka-cluster/src/multi-jvm/scala/akka/cluster/MultiNodeClusterSpec.scala index f4594543e6..f38d80ace5 100644 --- a/akka-cluster/src/multi-jvm/scala/akka/cluster/MultiNodeClusterSpec.scala +++ b/akka-cluster/src/multi-jvm/scala/akka/cluster/MultiNodeClusterSpec.scala @@ -4,12 +4,11 @@ package akka.cluster import language.implicitConversions - import com.typesafe.config.Config import com.typesafe.config.ConfigFactory import akka.actor.{ Address, ExtendedActorSystem } import akka.remote.testconductor.RoleName -import akka.remote.testkit.MultiNodeSpec +import akka.remote.testkit.{STMultiNodeSpec, MultiNodeSpec} import akka.testkit._ import scala.concurrent.util.duration._ import scala.concurrent.util.Duration @@ -18,6 +17,7 @@ import org.scalatest.exceptions.TestFailedException import java.util.concurrent.ConcurrentHashMap import akka.actor.ActorPath import akka.actor.RootActorPath +import scala.concurrent.util.FiniteDuration object MultiNodeClusterSpec { @@ -48,7 +48,7 @@ object MultiNodeClusterSpec { """) } -trait MultiNodeClusterSpec extends Suite { self: MultiNodeSpec ⇒ +trait MultiNodeClusterSpec extends Suite with STMultiNodeSpec { self: MultiNodeSpec ⇒ override def initialParticipants = roles.size @@ -175,7 +175,7 @@ trait MultiNodeClusterSpec extends Suite { self: MultiNodeSpec ⇒ def awaitUpConvergence( numberOfMembers: Int, canNotBePartOfMemberRing: Seq[Address] = Seq.empty[Address], - timeout: Duration = 20.seconds): Unit = { + timeout: FiniteDuration = 20.seconds): Unit = { within(timeout) { awaitCond(clusterView.members.size == numberOfMembers) awaitCond(clusterView.members.forall(_.status == MemberStatus.Up)) diff --git a/akka-cluster/src/multi-jvm/scala/akka/cluster/TransitionSpec.scala b/akka-cluster/src/multi-jvm/scala/akka/cluster/TransitionSpec.scala index 4df6032116..c57a03bc8e 100644 --- a/akka-cluster/src/multi-jvm/scala/akka/cluster/TransitionSpec.scala +++ b/akka-cluster/src/multi-jvm/scala/akka/cluster/TransitionSpec.scala @@ -67,21 +67,11 @@ abstract class TransitionSpec memberStatus(address) == status } - def leaderActions(): Unit = { + def leaderActions(): Unit = cluster.clusterCore ! LeaderActionsTick - awaitPing() - } - def reapUnreachable(): Unit = { + def reapUnreachable(): Unit = cluster.clusterCore ! ReapUnreachableTick - awaitPing() - } - - def awaitPing(): Unit = { - val ping = Ping() - cluster.clusterCore ! ping - expectMsgPF() { case pong @ Pong(`ping`, _) ⇒ pong } - } // DSL sugar for `role1 gossipTo role2` implicit def roleExtras(role: RoleName): RoleWrapper = new RoleWrapper(role) diff --git a/akka-cluster/src/test/scala/akka/cluster/ClusterSpec.scala b/akka-cluster/src/test/scala/akka/cluster/ClusterSpec.scala index 6666e38cce..8edbdd1669 100644 --- a/akka-cluster/src/test/scala/akka/cluster/ClusterSpec.scala +++ b/akka-cluster/src/test/scala/akka/cluster/ClusterSpec.scala @@ -6,10 +6,8 @@ package akka.cluster import language.postfixOps import language.reflectiveCalls - import scala.concurrent.util.duration._ import scala.concurrent.util.Duration - import akka.testkit.AkkaSpec import akka.testkit.ImplicitSender import akka.actor.ExtendedActorSystem @@ -17,6 +15,7 @@ import akka.actor.Address import akka.cluster.InternalClusterAction._ import java.lang.management.ManagementFactory import javax.management.ObjectName +import akka.actor.ActorRef object ClusterSpec { val config = """ @@ -45,16 +44,8 @@ class ClusterSpec extends AkkaSpec(ClusterSpec.config) with ImplicitSender { val cluster = Cluster(system) def clusterView = cluster.readView - def leaderActions(): Unit = { + def leaderActions(): Unit = cluster.clusterCore ! LeaderActionsTick - awaitPing() - } - - def awaitPing(): Unit = { - val ping = Ping() - cluster.clusterCore ! ping - expectMsgPF() { case pong @ Pong(`ping`, _) ⇒ pong } - } "A Cluster" must { @@ -79,7 +70,25 @@ class ClusterSpec extends AkkaSpec(ClusterSpec.config) with ImplicitSender { clusterView.status must be(MemberStatus.Joining) clusterView.convergence must be(true) leaderActions() - clusterView.status must be(MemberStatus.Up) + awaitCond(clusterView.status == MemberStatus.Up) + } + + "publish CurrentClusterState to subscribers when requested" in { + try { + cluster.subscribe(testActor, classOf[ClusterEvent.ClusterDomainEvent]) + // first, is in response to the subscription + expectMsgClass(classOf[ClusterEvent.ClusterDomainEvent]) + + cluster.publishCurrentClusterState() + expectMsgClass(classOf[ClusterEvent.ClusterDomainEvent]) + } finally { + cluster.unsubscribe(testActor) + } + } + + "send CurrentClusterState to one receiver when requested" in { + cluster.sendCurrentClusterState(testActor) + expectMsgClass(classOf[ClusterEvent.ClusterDomainEvent]) } } diff --git a/akka-docs/common/circuitbreaker.rst b/akka-docs/common/circuitbreaker.rst index bd13927c8e..ddce833178 100644 --- a/akka-docs/common/circuitbreaker.rst +++ b/akka-docs/common/circuitbreaker.rst @@ -51,28 +51,7 @@ What do they do? * Callbacks can be provided for every state entry via `onOpen`, `onClose`, and `onHalfOpen` * These are executed in the :class:`ExecutionContext` provided. -.. graphviz:: - - digraph circuit_breaker { - rankdir = "LR"; - size = "6,5"; - graph [ bgcolor = "transparent" ] - node [ fontname = "Helvetica", - fontsize = 14, - shape = circle, - color = white, - style = filled ]; - edge [ fontname = "Helvetica", fontsize = 12 ] - Closed [ fillcolor = green2 ]; - "Half-Open" [fillcolor = yellow2 ]; - Open [ fillcolor = red2 ]; - Closed -> Closed [ label = "Success" ]; - "Half-Open" -> Open [ label = "Trip Breaker" ]; - "Half-Open" -> Closed [ label = "Reset Breaker" ]; - Closed -> Open [ label = "Trip Breaker" ]; - Open -> Open [ label = "Calls failing fast" ]; - Open -> "Half-Open" [ label = "Attempt Reset" ]; - } +.. image:: ../images/circuit-breaker-states.png ======== Examples diff --git a/akka-docs/common/code/docs/circuitbreaker/DangerousJavaActor.java b/akka-docs/common/code/docs/circuitbreaker/DangerousJavaActor.java index f8df40f30a..071affb831 100644 --- a/akka-docs/common/code/docs/circuitbreaker/DangerousJavaActor.java +++ b/akka-docs/common/code/docs/circuitbreaker/DangerousJavaActor.java @@ -27,7 +27,7 @@ public class DangerousJavaActor extends UntypedActor { public DangerousJavaActor() { this.breaker = new CircuitBreaker( getContext().dispatcher(), getContext().system().scheduler(), - 5, Duration.parse("10s"), Duration.parse("1m")) + 5, Duration.create(10, "s"), Duration.create(1, "m")) .onOpen(new Callable() { public Object call() throws Exception { notifyMeOnOpen(); diff --git a/akka-docs/common/code/docs/duration/Java.java b/akka-docs/common/code/docs/duration/Java.java new file mode 100644 index 0000000000..9b7fb93c3a --- /dev/null +++ b/akka-docs/common/code/docs/duration/Java.java @@ -0,0 +1,26 @@ +/** + * Copyright (C) 2012 Typesafe Inc. + */ + +package docs.duration; + +//#import +import scala.concurrent.util.Duration; +import scala.concurrent.util.Deadline; +//#import + +class Java { + public void demo() { + //#dsl + final Duration fivesec = Duration.create(5, "seconds"); + final Duration threemillis = Duration.parse("3 millis"); + final Duration diff = fivesec.minus(threemillis); + assert diff.lt(fivesec); + assert Duration.Zero().lt(Duration.Inf()); + //#dsl + //#deadline + final Deadline deadline = Duration.create(10, "seconds").fromNow(); + final Duration rest = deadline.timeLeft(); + //#deadline + } +} diff --git a/akka-docs/common/code/docs/duration/Sample.scala b/akka-docs/common/code/docs/duration/Sample.scala new file mode 100644 index 0000000000..b9e800524e --- /dev/null +++ b/akka-docs/common/code/docs/duration/Sample.scala @@ -0,0 +1,24 @@ +/** + * Copyright (C) 2012 Typesafe Inc. + */ + +package docs.duration + +object Scala { + //#dsl + import scala.concurrent.util.duration._ // notice the small d + + val fivesec = 5.seconds + val threemillis = 3.millis + val diff = fivesec - threemillis + assert(diff < fivesec) + val fourmillis = threemillis * 4 / 3 // though you cannot write it the other way around + val n = threemillis / (1 millisecond) + //#dsl + + //#deadline + val deadline = 10.seconds.fromNow + // do something + val rest = deadline.timeLeft + //#deadline +} diff --git a/akka-docs/common/duration.rst b/akka-docs/common/duration.rst index e6bfdd4a36..c159c99a8c 100644 --- a/akka-docs/common/duration.rst +++ b/akka-docs/common/duration.rst @@ -5,9 +5,9 @@ Duration ######## Durations are used throughout the Akka library, wherefore this concept is -represented by a special data type, :class:`Duration`. Values of this type may -represent infinite (:obj:`Duration.Inf`, :obj:`Duration.MinusInf`) or finite -durations. +represented by a special data type, :class:`scala.concurrent.util.Duration`. +Values of this type may represent infinite (:obj:`Duration.Inf`, +:obj:`Duration.MinusInf`) or finite durations, or be :obj:`Duration.Undefined`. Finite vs. Infinite =================== @@ -24,18 +24,10 @@ distinguishing the two kinds at compile time: Scala ===== -In Scala durations are constructable using a mini-DSL and support all expected operations: +In Scala durations are constructable using a mini-DSL and support all expected +arithmetic operations: -.. code-block:: scala - - import scala.concurrent.util.duration._ // notice the small d - - val fivesec = 5.seconds - val threemillis = 3.millis - val diff = fivesec - threemillis - assert (diff < fivesec) - val fourmillis = threemillis * 4 / 3 // though you cannot write it the other way around - val n = threemillis / (1 millisecond) +.. includecode:: code/docs/duration/Sample.scala#dsl .. note:: @@ -50,26 +42,19 @@ Java Java provides less syntactic sugar, so you have to spell out the operations as method calls instead: -.. code-block:: java - - final Duration fivesec = Duration.create(5, "seconds"); - final Duration threemillis = Duration.parse("3 millis"); - final Duration diff = fivesec.minus(threemillis); - assert (diff.lt(fivesec)); - assert (Duration.Zero().lt(Duration.Inf())); +.. includecode:: code/docs/duration/Java.java#import +.. includecode:: code/docs/duration/Java.java#dsl Deadline ======== -Durations have a brother name :class:`Deadline`, which is a class holding a representation +Durations have a brother named :class:`Deadline`, which is a class holding a representation of an absolute point in time, and support deriving a duration from this by calculating the difference between now and the deadline. This is useful when you want to keep one overall -deadline without having to take care of the book-keeping wrt. the passing of time yourself:: +deadline without having to take care of the book-keeping wrt. the passing of time yourself: - val deadline = 10 seconds fromNow - // do something which takes time - awaitCond(..., deadline.timeLeft) +.. includecode:: code/docs/duration/Sample.scala#deadline -In Java you create these from durations:: +In Java you create these from durations: - final Deadline d = Duration.create(5, "seconds").fromNow(); +.. includecode:: code/docs/duration/Java.java#deadline diff --git a/akka-docs/conf.py b/akka-docs/conf.py index 63afaa3bb8..8f58ba0d87 100644 --- a/akka-docs/conf.py +++ b/akka-docs/conf.py @@ -8,7 +8,7 @@ import sys, os # -- General configuration ----------------------------------------------------- sys.path.append(os.path.abspath('_sphinx/exts')) -extensions = ['sphinx.ext.todo', 'includecode', 'sphinx.ext.graphviz'] +extensions = ['sphinx.ext.todo', 'includecode'] templates_path = ['_templates'] source_suffix = '.rst' diff --git a/akka-docs/dev/multi-jvm-testing.rst b/akka-docs/dev/multi-jvm-testing.rst index e3bdf228a9..7658324a74 100644 --- a/akka-docs/dev/multi-jvm-testing.rst +++ b/akka-docs/dev/multi-jvm-testing.rst @@ -18,9 +18,7 @@ http://github.com/typesafehub/sbt-multi-jvm You can add it as a plugin by adding the following to your project/plugins.sbt:: - resolvers += Classpaths.typesafeResolver - - addSbtPlugin("com.typesafe.sbtmultijvm" % "sbt-multi-jvm" % "0.2.0-M4") + addSbtPlugin("com.typesafe.sbt" % "sbt-multi-jvm" % "0.3.0") You can then add multi-JVM testing to ``project/Build.scala`` by including the ``MultiJvm`` settings and config. For example, here is an example of how the akka-remote-tests project adds @@ -30,8 +28,8 @@ multi-JVM testing (Simplified for clarity): import sbt._ import Keys._ - import com.typesafe.sbtmultijvm.MultiJvmPlugin - import com.typesafe.sbtmultijvm.MultiJvmPlugin.{ MultiJvm, extraOptions } + import com.typesafe.sbt.SbtMultiJvm + import com.typesafe.sbt.SbtMultiJvm.MultiJvmKeys.{ MultiJvm, extraOptions } object AkkaBuild extends Build { @@ -49,7 +47,7 @@ multi-JVM testing (Simplified for clarity): ) ) configs (MultiJvm) - lazy val buildSettings = Defaults.defaultSettings ++ Seq( + lazy val buildSettings = Defaults.defaultSettings ++ SbtMultiJvm.multiJvmSettings ++ Seq( organization := "com.typesafe.akka", version := "2.1-SNAPSHOT", scalaVersion := "|scalaVersion|", @@ -78,14 +76,14 @@ the sbt prompt): .. code-block:: none - akka-remote/multi-jvm:test + akka-remote-tests/multi-jvm:test -Or one can change to the ``akka-remote`` project first, and then run the +Or one can change to the ``akka-remote-tests`` project first, and then run the tests: .. code-block:: none - project akka-remote + project akka-remote-tests multi-jvm:test To run individual tests use ``test-only``: diff --git a/akka-docs/general/addressing.rst b/akka-docs/general/addressing.rst index c59fba3808..34c64d96cb 100644 --- a/akka-docs/general/addressing.rst +++ b/akka-docs/general/addressing.rst @@ -210,7 +210,7 @@ Querying the Logical Actor Hierarchy ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Since the actor system forms a file-system like hierarchy, matching on paths is -possible in the same was as supported by Unix shells: you may replace (parts +possible in the same way as supported by Unix shells: you may replace (parts of) path element names with wildcards (`«*»` and `«?»`) to formulate a selection which may match zero or more actual actors. Because the result is not a single actor reference, it has a different type :class:`ActorSelection` and diff --git a/akka-docs/general/jmm.rst b/akka-docs/general/jmm.rst index 6d088c0c47..085a347451 100644 --- a/akka-docs/general/jmm.rst +++ b/akka-docs/general/jmm.rst @@ -45,8 +45,8 @@ To prevent visibility and reordering problems on actors, Akka guarantees the fol .. note:: - In layman's terms this means that changes to internal fields of the actor is visible when the next message - is processed by that actor. So fields in your actor does not need to be volatile or equivalent. + In layman's terms this means that changes to internal fields of the actor are visible when the next message + is processed by that actor. So fields in your actor need not be volatile or equivalent. Both rules only apply for the same actor instance and are not valid if different actors are used. diff --git a/akka-docs/images/circuit-breaker-states.dot b/akka-docs/images/circuit-breaker-states.dot new file mode 100644 index 0000000000..be7b628029 --- /dev/null +++ b/akka-docs/images/circuit-breaker-states.dot @@ -0,0 +1,19 @@ + digraph circuit_breaker { + rankdir = "LR"; + size = "6,5"; + graph [ bgcolor = "transparent" ] + node [ fontname = "Helvetica", + fontsize = 14, + shape = circle, + color = white, + style = filled ]; + edge [ fontname = "Helvetica", fontsize = 12 ] + Closed [ fillcolor = green2 ]; + "Half-Open" [fillcolor = yellow2 ]; + Open [ fillcolor = red2 ]; + Closed -> Closed [ label = "Success" ]; + "Half-Open" -> Open [ label = "Trip Breaker" ]; + "Half-Open" -> Closed [ label = "Reset Breaker" ]; + Closed -> Open [ label = "Trip Breaker" ]; + Open -> Open [ label = "Calls failing fast" ]; + Open -> "Half-Open" [ label = "Attempt Reset" ]; \ No newline at end of file diff --git a/akka-docs/images/circuit-breaker-states.png b/akka-docs/images/circuit-breaker-states.png new file mode 100644 index 0000000000..fc1d6a2d7f Binary files /dev/null and b/akka-docs/images/circuit-breaker-states.png differ diff --git a/akka-docs/java/code/docs/actor/japi/FaultHandlingDocSample.java b/akka-docs/java/code/docs/actor/japi/FaultHandlingDocSample.java index ff1e243aaf..70d3c35142 100644 --- a/akka-docs/java/code/docs/actor/japi/FaultHandlingDocSample.java +++ b/akka-docs/java/code/docs/actor/japi/FaultHandlingDocSample.java @@ -109,7 +109,7 @@ public class FaultHandlingDocSample { */ public static class Worker extends UntypedActor { final LoggingAdapter log = Logging.getLogger(getContext().system(), this); - final Timeout askTimeout = new Timeout(Duration.parse("5 seconds")); + final Timeout askTimeout = new Timeout(Duration.create(5, "seconds")); // The sender of the initial Start message will continuously be notified about progress ActorRef progressListener; @@ -139,7 +139,7 @@ public class FaultHandlingDocSample { if (msg.equals(Start) && progressListener == null) { progressListener = getSender(); getContext().system().scheduler().schedule( - Duration.Zero(), Duration.parse("1 second"), getSelf(), Do, getContext().dispatcher() + Duration.Zero(), Duration.create(1, "second"), getSelf(), Do, getContext().dispatcher() ); } else if (msg.equals(Do)) { counterService.tell(new Increment(1), getSelf()); @@ -299,7 +299,7 @@ public class FaultHandlingDocSample { counter.tell(new UseStorage(null), getSelf()); // Try to re-establish storage after while getContext().system().scheduler().scheduleOnce( - Duration.parse("10 seconds"), getSelf(), Reconnect, getContext().dispatcher() + Duration.create(10, "seconds"), getSelf(), Reconnect, getContext().dispatcher() ); } else if (msg.equals(Reconnect)) { // Re-establish storage after the scheduled delay diff --git a/akka-docs/java/code/docs/future/FutureDocTestBase.java b/akka-docs/java/code/docs/future/FutureDocTestBase.java index 9ee643c17e..ca23065661 100644 --- a/akka-docs/java/code/docs/future/FutureDocTestBase.java +++ b/akka-docs/java/code/docs/future/FutureDocTestBase.java @@ -106,7 +106,7 @@ public class FutureDocTestBase { ActorRef actor = system.actorOf(new Props(MyActor.class)); String msg = "hello"; //#ask-blocking - Timeout timeout = new Timeout(Duration.parse("5 seconds")); + Timeout timeout = new Timeout(Duration.create(5, "seconds")); Future future = Patterns.ask(actor, msg, timeout); String result = (String) Await.result(future, timeout.duration()); //#ask-blocking @@ -518,7 +518,7 @@ public class FutureDocTestBase { //#after final ExecutionContext ec = system.dispatcher(); Future failExc = Futures.failed(new IllegalStateException("OHNOES1")); - Future delayed = Patterns.after(Duration.parse("500 millis"), + Future delayed = Patterns.after(Duration.create(500, "millis"), system.scheduler(), ec, failExc); Future future = future(new Callable() { public String call() throws InterruptedException { diff --git a/akka-docs/java/code/docs/jrouting/ConsistentHashingRouterDocTest.scala b/akka-docs/java/code/docs/jrouting/ConsistentHashingRouterDocTest.scala new file mode 100644 index 0000000000..a043f9cb73 --- /dev/null +++ b/akka-docs/java/code/docs/jrouting/ConsistentHashingRouterDocTest.scala @@ -0,0 +1,5 @@ +package docs.jrouting; + +import org.scalatest.junit.JUnitSuite + +class ConsistentHashingRouterDocTest extends ConsistentHashingRouterDocTestBase with JUnitSuite diff --git a/akka-docs/java/code/docs/jrouting/ConsistentHashingRouterDocTestBase.java b/akka-docs/java/code/docs/jrouting/ConsistentHashingRouterDocTestBase.java new file mode 100644 index 0000000000..378049578f --- /dev/null +++ b/akka-docs/java/code/docs/jrouting/ConsistentHashingRouterDocTestBase.java @@ -0,0 +1,136 @@ +/** + * Copyright (C) 2009-2012 Typesafe Inc. + */ +package docs.jrouting; + +import org.junit.AfterClass; +import org.junit.BeforeClass; +import org.junit.Test; +import akka.testkit.JavaTestKit; +import akka.actor.ActorSystem; + +//#imports1 +import akka.actor.UntypedActor; +import akka.routing.ConsistentHashingRouter.ConsistentHashable; +import java.util.Map; +import java.util.HashMap; +import java.io.Serializable; +//#imports1 + +//#imports2 +import akka.actor.Props; +import akka.actor.ActorRef; +import akka.routing.ConsistentHashingRouter; +import akka.routing.ConsistentHashingRouter.ConsistentHashMapper; +import akka.routing.ConsistentHashingRouter.ConsistentHashableEnvelope; +//#imports2 + +public class ConsistentHashingRouterDocTestBase { + + static ActorSystem system; + + @BeforeClass + public static void setup() { + system = ActorSystem.create(); + } + + @AfterClass + public static void teardown() { + system.shutdown(); + } + + //#cache-actor + + public static class Cache extends UntypedActor { + Map cache = new HashMap(); + + public void onReceive(Object msg) { + if (msg instanceof Entry) { + Entry entry = (Entry) msg; + cache.put(entry.key, entry.value); + } else if (msg instanceof Get) { + Get get = (Get) msg; + Object value = cache.get(get.key); + getSender().tell(value == null ? NOT_FOUND : value, + getContext().self()); + } else if (msg instanceof Evict) { + Evict evict = (Evict) msg; + cache.remove(evict.key); + } else { + unhandled(msg); + } + } + } + + public static final class Evict implements Serializable { + public final String key; + public Evict(String key) { + this.key = key; + } + } + + public static final class Get implements Serializable, ConsistentHashable { + public final String key; + public Get(String key) { + this.key = key; + } + public Object consistentHashKey() { + return key; + } + } + + public static final class Entry implements Serializable { + public final String key; + public final String value; + public Entry(String key, String value) { + this.key = key; + this.value = value; + } + } + + public static final String NOT_FOUND = "NOT_FOUND"; + //#cache-actor + + + @Test + public void demonstrateUsageOfConsistentHashableRouter() { + + new JavaTestKit(system) {{ + + //#consistent-hashing-router + + final ConsistentHashMapper hashMapper = new ConsistentHashMapper() { + @Override + public Object hashKey(Object message) { + if (message instanceof Evict) { + return ((Evict) message).key; + } else { + return null; + } + } + }; + + ActorRef cache = system.actorOf(new Props(Cache.class).withRouter( + new ConsistentHashingRouter(10).withHashMapper(hashMapper)), + "cache"); + + cache.tell(new ConsistentHashableEnvelope( + new Entry("hello", "HELLO"), "hello"), getRef()); + cache.tell(new ConsistentHashableEnvelope( + new Entry("hi", "HI"), "hi"), getRef()); + + cache.tell(new Get("hello"), getRef()); + expectMsgEquals("HELLO"); + + cache.tell(new Get("hi"), getRef()); + expectMsgEquals("HI"); + + cache.tell(new Evict("hi"), getRef()); + cache.tell(new Get("hi"), getRef()); + expectMsgEquals(NOT_FOUND); + + //#consistent-hashing-router + }}; + } + +} diff --git a/akka-docs/java/code/docs/jrouting/CustomRouterDocTestBase.java b/akka-docs/java/code/docs/jrouting/CustomRouterDocTestBase.java index 42ebc6f89e..d47419ee60 100644 --- a/akka-docs/java/code/docs/jrouting/CustomRouterDocTestBase.java +++ b/akka-docs/java/code/docs/jrouting/CustomRouterDocTestBase.java @@ -72,7 +72,7 @@ public class CustomRouterDocTestBase { routedActor.tell(RepublicanVote); routedActor.tell(DemocratVote); routedActor.tell(RepublicanVote); - Timeout timeout = new Timeout(Duration.parse("1 seconds")); + Timeout timeout = new Timeout(Duration.create(1, "seconds")); Future democratsResult = ask(routedActor, DemocratCountResult, timeout); Future republicansResult = ask(routedActor, RepublicanCountResult, timeout); diff --git a/akka-docs/java/code/docs/jrouting/ParentActor.java b/akka-docs/java/code/docs/jrouting/ParentActor.java index c21aed2dc6..21ce628964 100644 --- a/akka-docs/java/code/docs/jrouting/ParentActor.java +++ b/akka-docs/java/code/docs/jrouting/ParentActor.java @@ -53,8 +53,8 @@ public class ParentActor extends UntypedActor { //#scatterGatherFirstCompletedRouter ActorRef scatterGatherFirstCompletedRouter = getContext().actorOf( new Props(FibonacciActor.class).withRouter(new ScatterGatherFirstCompletedRouter(5, Duration - .parse("2 seconds"))), "router"); - Timeout timeout = new Timeout(Duration.parse("5 seconds")); + .create(2, "seconds"))), "router"); + Timeout timeout = new Timeout(Duration.create(5, "seconds")); Future futureResult = akka.pattern.Patterns.ask(scatterGatherFirstCompletedRouter, new FibonacciActor.FibonacciNumber(10), timeout); int result = (Integer) Await.result(futureResult, timeout.duration()); diff --git a/akka-docs/java/code/docs/jrouting/RouterViaProgramExample.java b/akka-docs/java/code/docs/jrouting/RouterViaProgramExample.java index 72843b44c6..99b924d09b 100644 --- a/akka-docs/java/code/docs/jrouting/RouterViaProgramExample.java +++ b/akka-docs/java/code/docs/jrouting/RouterViaProgramExample.java @@ -5,7 +5,7 @@ package docs.jrouting; import akka.routing.RoundRobinRouter; import akka.routing.DefaultResizer; -import akka.routing.RemoteRouterConfig; +import akka.remote.routing.RemoteRouterConfig; import akka.actor.ActorRef; import akka.actor.Props; import akka.actor.UntypedActor; diff --git a/akka-docs/java/code/docs/testkit/TestKitDocTest.java b/akka-docs/java/code/docs/testkit/TestKitDocTest.java index 0b8b5c7a4e..f0c2263c3e 100644 --- a/akka-docs/java/code/docs/testkit/TestKitDocTest.java +++ b/akka-docs/java/code/docs/testkit/TestKitDocTest.java @@ -98,7 +98,7 @@ public class TestKitDocTest { //#test-within new JavaTestKit(system) {{ getRef().tell(42); - new Within(Duration.Zero(), Duration.parse("1 second")) { + new Within(Duration.Zero(), Duration.create(1, "second")) { // do not put code outside this method, will run afterwards public void run() { assertEquals((Integer) 42, expectMsgClass(Integer.class)); diff --git a/akka-docs/java/code/docs/zeromq/ZeromqDocTestBase.java b/akka-docs/java/code/docs/zeromq/ZeromqDocTestBase.java index f7b246bc57..c392cf131f 100644 --- a/akka-docs/java/code/docs/zeromq/ZeromqDocTestBase.java +++ b/akka-docs/java/code/docs/zeromq/ZeromqDocTestBase.java @@ -187,7 +187,7 @@ public class ZeromqDocTestBase { @Override public void preStart() { getContext().system().scheduler() - .schedule(Duration.parse("1 second"), Duration.parse("1 second"), getSelf(), TICK, getContext().dispatcher()); + .schedule(Duration.create(1, "second"), Duration.create(1, "second"), getSelf(), TICK, getContext().dispatcher()); } @Override diff --git a/akka-docs/java/routing.rst b/akka-docs/java/routing.rst index 845e2825e3..4653a07692 100644 --- a/akka-docs/java/routing.rst +++ b/akka-docs/java/routing.rst @@ -15,13 +15,14 @@ is really easy to create your own. The routers shipped with Akka are: * ``akka.routing.SmallestMailboxRouter`` * ``akka.routing.BroadcastRouter`` * ``akka.routing.ScatterGatherFirstCompletedRouter`` +* ``akka.routing.ConsistentHashingRouter`` Routers In Action ^^^^^^^^^^^^^^^^^ This is an example of how to create a router that is defined in configuration: -.. includecode:: ../scala/code/docs/routing/RouterViaConfigExample.scala#config +.. includecode:: ../scala/code/docs/routing/RouterViaConfigDocSpec.scala#config-round-robin .. includecode:: code/docs/jrouting/RouterViaConfigExample.java#configurableRouting @@ -177,6 +178,10 @@ is exactly what you would expect from a round-robin router to happen. (The name of an actor is automatically created in the format ``$letter`` unless you specify it - hence the names printed above.) +This is an example of how to define a round-robin router in configuration: + +.. includecode:: ../scala/code/docs/routing/RouterViaConfigDocSpec.scala#config-round-robin + RandomRouter ************ As the name implies this router type selects one of its routees randomly and forwards @@ -204,6 +209,10 @@ When run you should see a similar output to this: The result from running the random router should be different, or at least random, every time you run it. Try to run it a couple of times to verify its behavior if you don't trust us. +This is an example of how to define a random router in configuration: + +.. includecode:: ../scala/code/docs/routing/RouterViaConfigDocSpec.scala#config-random + SmallestMailboxRouter ********************* A Router that tries to send to the non-suspended routee with fewest messages in mailbox. @@ -219,6 +228,10 @@ Code example: .. includecode:: code/docs/jrouting/ParentActor.java#smallestMailboxRouter +This is an example of how to define a smallest-mailbox router in configuration: + +.. includecode:: ../scala/code/docs/routing/RouterViaConfigDocSpec.scala#config-smallest-mailbox + BroadcastRouter *************** A broadcast router forwards the message it receives to *all* its routees. @@ -238,6 +251,10 @@ When run you should see a similar output to this: As you can see here above each of the routees, five in total, received the broadcast message. +This is an example of how to define a broadcast router in configuration: + +.. includecode:: ../scala/code/docs/routing/RouterViaConfigDocSpec.scala#config-broadcast + ScatterGatherFirstCompletedRouter ********************************* The ScatterGatherFirstCompletedRouter will send the message on to all its routees as a future. @@ -255,6 +272,51 @@ When run you should see this: From the output above you can't really see that all the routees performed the calculation, but they did! The result you see is from the first routee that returned its calculation to the router. +This is an example of how to define a scatter-gather router in configuration: + +.. includecode:: ../scala/code/docs/routing/RouterViaConfigDocSpec.scala#config-scatter-gather + +ConsistentHashingRouter +*********************** + +The ConsistentHashingRouter uses `consistent hashing `_ +to select a connection based on the sent message. This +`article `_ gives good +insight into how consistent hashing is implemented. + +There is 3 ways to define what data to use for the consistent hash key. + +* You can define ``withHashMapper`` of the router to map incoming + messages to their consistent hash key. This makes the the decision + transparent for the sender. + +* The messages may implement ``akka.routing.ConsistentHashingRouter.ConsistentHashable``. + The key is part of the message and it's convenient to define it together + with the message definition. + +* The messages can be be wrapped in a ``akka.routing.ConsistentHashingRouter.ConsistentHashableEnvelope`` + to define what data to use for the consistent hash key. The sender knows + the key to use. + +These ways to define the consistent hash key can be use together and at +the same time for one router. The ``withHashMapper`` is tried first. + +Code example: + +.. includecode:: code/docs/jrouting/ConsistentHashingRouterDocTestBase.java + :include: imports1,cache-actor + +.. includecode:: code/docs/jrouting/ConsistentHashingRouterDocTestBase.java + :include: imports2,consistent-hashing-router + +In the above example you see that the ``Get`` message implements ``ConsistentHashable`` itself, +while the ``Entry`` message is wrapped in a ``ConsistentHashableEnvelope``. The ``Evict`` +message is handled by the ``withHashMapper``. + +This is an example of how to define a consistent-hashing router in configuration: + +.. includecode:: ../scala/code/docs/routing/RouterViaConfigDocSpec.scala#config-consistent-hashing + Broadcast Messages ^^^^^^^^^^^^^^^^^^ @@ -276,7 +338,7 @@ of routees dynamically. This is an example of how to create a resizable router that is defined in configuration: -.. includecode:: ../scala/code/docs/routing/RouterViaConfigExample.scala#config-resize +.. includecode:: ../scala/code/docs/routing/RouterViaConfigDocSpec.scala#config-resize .. includecode:: code/docs/jrouting/RouterViaConfigExample.java#configurableRoutingWithResizer diff --git a/akka-docs/modules/code/docs/actor/mailbox/DurableMailboxDocSpec.scala b/akka-docs/modules/code/docs/actor/mailbox/DurableMailboxDocSpec.scala index 1d4f714f7c..f93a213a15 100644 --- a/akka-docs/modules/code/docs/actor/mailbox/DurableMailboxDocSpec.scala +++ b/akka-docs/modules/code/docs/actor/mailbox/DurableMailboxDocSpec.scala @@ -25,7 +25,7 @@ object DurableMailboxDocSpec { val config = """ //#dispatcher-config my-dispatcher { - mailbox-type = akka.actor.mailbox.FileBasedMailboxType + mailbox-type = akka.actor.mailbox.filebased.FileBasedMailboxType } //#dispatcher-config """ diff --git a/akka-docs/project/migration-guide-2.0.x-2.1.x.rst b/akka-docs/project/migration-guide-2.0.x-2.1.x.rst index fd5629ea3b..ec04dacdda 100644 --- a/akka-docs/project/migration-guide-2.0.x-2.1.x.rst +++ b/akka-docs/project/migration-guide-2.0.x-2.1.x.rst @@ -6,7 +6,8 @@ The 2.1 release contains several structural changes that require some simple, mechanical source-level changes in client code. Several things have -been moved to Scala standard library, such as ``Future``. +been moved to Scala standard library, such as ``Future``, and some package +names have been changed in Remoting and Durable Mailboxes. When migrating from 1.3.x to 2.1.x you should first follow the instructions for migrating `1.3.x to 2.0.x `_. @@ -239,7 +240,7 @@ If the target actor of ``akka.pattern.gracefulStop`` isn't terminated within the timeout the ``Future`` is completed with failure ``akka.pattern.AskTimeoutException``. In 2.0 it was ``akka.actor.ActorTimeoutException``. -getInstance for singeltons - Java +getInstance for Singletons - Java ==================================== v2.0:: @@ -286,7 +287,6 @@ Both Actors and UntypedActors using ``Stash`` now overrides postStop to make sur stashed messages are put into the dead letters when the actor stops, make sure you call super.postStop if you override it. - Forward of Terminated message ============================= @@ -358,4 +358,103 @@ v2.1:: else if (requestedCapacity < 0) routeeProvider.removeRoutees( -requestedCapacity, stopDelay) +Duration and Timeout +==================== + +The Duration class in the scala library is an improved version of the previous +:class:`akka.util.Duration`. Among others it keeps the static type of +:class:`FiniteDuration` more consistently, which has been used to tighten APIs. +The advantage is that instead of runtime exceptions you’ll get compiler errors +telling you if you try to pass a possibly non-finite duration where it does not +belong. + +The main source incompatibility is that you may have to change the declared +type of fields from ``Duration`` to ``FiniteDuration`` (factory methods already +return the more precise type wherever possible). + +Another change is that ``Duration.parse`` was not accepted by the scala-library +maintainers, use ``Duration.create`` instead. + +v2.0:: + + final Duration d = Duration.parse("1 second"); + final Timeout t = new Timeout(d); + +v2.1:: + + final FiniteDuration d = Duration.create("1 second"); + final Timeout t = new Timeout(d); // always required finite duration, now also in type + +Package Name Changes in Remoting +================================ + +The package name of all classes in the ``akka-remote.jar`` artifact now starts with ``akka.remote``. +This has been done to enable OSGi bundles that don't have conflicting package names. + +Change the following import statements. Please note that the serializers are often referenced from configuration. + +================================================ ======================================================= +Search Replace with +================================================ ======================================================= +``akka.routing.RemoteRouterConfig`` ``akka.remote.routing.RemoteRouterConfig`` +``akka.serialization.ProtobufSerializer`` ``akka.remote.serialization.ProtobufSerializer`` +``akka.serialization.DaemonMsgCreateSerializer`` ``akka.remote.serialization.DaemonMsgCreateSerializer`` +================================================ ======================================================= + +Package Name Changes in Durable Mailboxes +========================================= + +The package name of all classes in the ``akka-file-mailbox.jar`` artifact now starts with ``akka.actor.mailbox.filebased``. +This has been done to enable OSGi bundles that don't have conflicting package names. + +Change the following import statements. Please note that the ``FileBasedMailboxType`` is often referenced from configuration. + +================================================ ========================================================= +Search Replace with +================================================ ========================================================= +``akka.actor.mailbox.FileBasedMailboxType`` ``akka.actor.mailbox.filebased.FileBasedMailboxType`` +``akka.actor.mailbox.FileBasedMailboxSettings`` ``akka.actor.mailbox.filebased.FileBasedMailboxSettings`` +``akka.actor.mailbox.FileBasedMessageQueue`` ``akka.actor.mailbox.filebased.FileBasedMessageQueue`` +``akka.actor.mailbox.filequeue.*`` ``akka.actor.mailbox.filebased.filequeue.*`` +================================================ ========================================================= + +Actor Receive Timeout +===================== + +The API for setting and querying the receive timeout has been made more +consisten in always taking and returning a ``Duration``, the wrapping in +``Option`` has been removed. + +(Samples for Java, Scala sources are affected in exactly the same way.) + +v2.0:: + + getContext().setReceiveTimeout(Duration.create(10, SECONDS)); + final Option timeout = getContext().receiveTimeout(); + final isSet = timeout.isDefined(); + resetReceiveTimeout(); + +v2.1:: + + getContext().setReceiveTimeout(Duration.create(10, SECONDS)); + final Duration timeout = getContext().receiveTimeout(); + final isSet = timeout.isFinite(); + getContext().setReceiveTimeout(Duration.Undefined()); + +ConsistentHash +============== + +``akka.routing.ConsistentHash`` has been changed to an immutable data structure. + +v2.0:: + + val consistentHash = new ConsistentHash(Seq(a1, a2, a3), replicas = 10) + consistentHash += a4 + val a = consistentHash.nodeFor(data) + +v2.1:: + + var consistentHash = ConsistentHash(Seq(a1, a2, a3), replicas = 10) + consistentHash = consistentHash :+ a4 + val a = consistentHash.nodeFor(data) diff --git a/akka-docs/scala/actors.rst b/akka-docs/scala/actors.rst index 16c6510c89..519e3dae66 100644 --- a/akka-docs/scala/actors.rst +++ b/akka-docs/scala/actors.rst @@ -51,6 +51,16 @@ be able to handle unknown messages then you need to have a default case as in the example above. Otherwise an ``akka.actor.UnhandledMessage(message, sender, recipient)`` will be published to the ``ActorSystem``'s ``EventStream``. +The result of the :meth:`receive` method is a partial function object, which is +stored within the actor as its “initial behavior”, see `Become/Unbecome`_ for +further information on changing the behavior of an actor after its +construction. + +.. note:: + The initial behavior of an Actor is extracted prior to constructor is run, + so if you want to base your initial behavior on member state, you should + use ``become`` in the constructor. + Creating Actors with default constructor ---------------------------------------- diff --git a/akka-docs/scala/code/docs/extension/ExtensionDocSpec.scala b/akka-docs/scala/code/docs/extension/ExtensionDocSpec.scala index c2558fb4f1..a7973826da 100644 --- a/akka-docs/scala/code/docs/extension/ExtensionDocSpec.scala +++ b/akka-docs/scala/code/docs/extension/ExtensionDocSpec.scala @@ -45,7 +45,7 @@ object ExtensionDocSpec { val config = """ //#config akka { - extensions = ["docs.extension.CountExtension$"] + extensions = ["docs.extension.CountExtension"] } //#config """ diff --git a/akka-docs/scala/code/docs/routing/ConsistentHashingRouterDocSpec.scala b/akka-docs/scala/code/docs/routing/ConsistentHashingRouterDocSpec.scala new file mode 100644 index 0000000000..dcb40b0c67 --- /dev/null +++ b/akka-docs/scala/code/docs/routing/ConsistentHashingRouterDocSpec.scala @@ -0,0 +1,73 @@ +/** + * Copyright (C) 2009-2012 Typesafe Inc. + */ +package docs.routing + +import akka.testkit.AkkaSpec +import akka.testkit.ImplicitSender + +object ConsistentHashingRouterDocSpec { + + //#cache-actor + import akka.actor.Actor + import akka.routing.ConsistentHashingRouter.ConsistentHashable + + class Cache extends Actor { + var cache = Map.empty[String, String] + + def receive = { + case Entry(key, value) ⇒ cache += (key -> value) + case Get(key) ⇒ sender ! cache.get(key) + case Evict(key) ⇒ cache -= key + } + } + + case class Evict(key: String) + + case class Get(key: String) extends ConsistentHashable { + override def consistentHashKey: Any = key + } + + case class Entry(key: String, value: String) + //#cache-actor + +} + +class ConsistentHashingRouterDocSpec extends AkkaSpec with ImplicitSender { + + import ConsistentHashingRouterDocSpec._ + + "demonstrate usage of ConsistentHashableRouter" in { + + //#consistent-hashing-router + import akka.actor.Props + import akka.routing.ConsistentHashingRouter + import akka.routing.ConsistentHashingRouter.ConsistentHashMapping + import akka.routing.ConsistentHashingRouter.ConsistentHashableEnvelope + + def hashMapping: ConsistentHashMapping = { + case Evict(key) ⇒ key + } + + val cache = system.actorOf(Props[Cache].withRouter(ConsistentHashingRouter(10, + hashMapping = hashMapping)), name = "cache") + + cache ! ConsistentHashableEnvelope( + message = Entry("hello", "HELLO"), hashKey = "hello") + cache ! ConsistentHashableEnvelope( + message = Entry("hi", "HI"), hashKey = "hi") + + cache ! Get("hello") + expectMsg(Some("HELLO")) + + cache ! Get("hi") + expectMsg(Some("HI")) + + cache ! Evict("hi") + cache ! Get("hi") + expectMsg(None) + + //#consistent-hashing-router + } + +} diff --git a/akka-docs/scala/code/docs/routing/RouterViaConfigDocSpec.scala b/akka-docs/scala/code/docs/routing/RouterViaConfigDocSpec.scala new file mode 100644 index 0000000000..24dc291087 --- /dev/null +++ b/akka-docs/scala/code/docs/routing/RouterViaConfigDocSpec.scala @@ -0,0 +1,158 @@ +/** + * Copyright (C) 2009-2012 Typesafe Inc. + */ +package docs.routing + +import akka.actor.{ Actor, Props, ActorSystem, ActorLogging } +import com.typesafe.config.ConfigFactory +import akka.routing.FromConfig +import akka.routing.ConsistentHashingRouter.ConsistentHashable +import akka.testkit.AkkaSpec +import akka.testkit.ImplicitSender + +object RouterWithConfigDocSpec { + + val config = ConfigFactory.parseString(""" + + //#config-round-robin + akka.actor.deployment { + /myrouter1 { + router = round-robin + nr-of-instances = 5 + } + } + //#config-round-robin + + //#config-resize + akka.actor.deployment { + /myrouter2 { + router = round-robin + resizer { + lower-bound = 2 + upper-bound = 15 + } + } + } + //#config-resize + + //#config-random + akka.actor.deployment { + /myrouter3 { + router = random + nr-of-instances = 5 + } + } + //#config-random + + //#config-smallest-mailbox + akka.actor.deployment { + /myrouter4 { + router = smallest-mailbox + nr-of-instances = 5 + } + } + //#config-smallest-mailbox + + //#config-broadcast + akka.actor.deployment { + /myrouter5 { + router = broadcast + nr-of-instances = 5 + } + } + //#config-broadcast + + //#config-scatter-gather + akka.actor.deployment { + /myrouter6 { + router = scatter-gather + nr-of-instances = 5 + within = 10 seconds + } + } + //#config-scatter-gather + + //#config-consistent-hashing + akka.actor.deployment { + /myrouter7 { + router = consistent-hashing + nr-of-instances = 5 + virtual-nodes-factor = 10 + } + } + //#config-consistent-hashing + + """) + + case class Message(nbr: Int) extends ConsistentHashable { + override def consistentHashKey = nbr + } + + class ExampleActor extends Actor with ActorLogging { + def receive = { + case Message(nbr) ⇒ + log.debug("Received %s in router %s".format(nbr, self.path.name)) + sender ! nbr + } + } + +} + +class RouterWithConfigDocSpec extends AkkaSpec(RouterWithConfigDocSpec.config) with ImplicitSender { + + import RouterWithConfigDocSpec._ + + "demonstrate configured round-robin router" in { + //#configurableRouting + val router = system.actorOf(Props[ExampleActor].withRouter(FromConfig()), + "myrouter1") + //#configurableRouting + 1 to 10 foreach { i ⇒ router ! Message(i) } + receiveN(10) + } + + "demonstrate configured random router" in { + val router = system.actorOf(Props[ExampleActor].withRouter(FromConfig()), + "myrouter3") + 1 to 10 foreach { i ⇒ router ! Message(i) } + receiveN(10) + } + + "demonstrate configured smallest-mailbox router" in { + val router = system.actorOf(Props[ExampleActor].withRouter(FromConfig()), + "myrouter4") + 1 to 10 foreach { i ⇒ router ! Message(i) } + receiveN(10) + } + + "demonstrate configured broadcast router" in { + val router = system.actorOf(Props[ExampleActor].withRouter(FromConfig()), + "myrouter5") + 1 to 10 foreach { i ⇒ router ! Message(i) } + receiveN(5 * 10) + } + + "demonstrate configured scatter-gather router" in { + val router = system.actorOf(Props[ExampleActor].withRouter(FromConfig()), + "myrouter6") + 1 to 10 foreach { i ⇒ router ! Message(i) } + receiveN(10) + } + + "demonstrate configured consistent-hashing router" in { + val router = system.actorOf(Props[ExampleActor].withRouter(FromConfig()), + "myrouter7") + 1 to 10 foreach { i ⇒ router ! Message(i) } + receiveN(10) + } + + "demonstrate configured round-robin router with resizer" in { + //#configurableRoutingWithResizer + val router = system.actorOf(Props[ExampleActor].withRouter(FromConfig()), + "myrouter2") + //#configurableRoutingWithResizer + 1 to 10 foreach { i ⇒ router ! Message(i) } + receiveN(10) + } + +} \ No newline at end of file diff --git a/akka-docs/scala/code/docs/routing/RouterViaConfigExample.scala b/akka-docs/scala/code/docs/routing/RouterViaConfigExample.scala deleted file mode 100644 index 5d34e429bb..0000000000 --- a/akka-docs/scala/code/docs/routing/RouterViaConfigExample.scala +++ /dev/null @@ -1,52 +0,0 @@ -/** - * Copyright (C) 2009-2012 Typesafe Inc. - */ -package docs.routing - -import akka.actor.{ Actor, Props, ActorSystem } -import com.typesafe.config.ConfigFactory -import akka.routing.FromConfig - -case class Message(nbr: Int) - -class ExampleActor extends Actor { - def receive = { - case Message(nbr) ⇒ println("Received %s in router %s".format(nbr, self.path.name)) - } -} - -object RouterWithConfigExample extends App { - val config = ConfigFactory.parseString(""" - //#config - akka.actor.deployment { - /router { - router = round-robin - nr-of-instances = 5 - } - } - //#config - //#config-resize - akka.actor.deployment { - /router2 { - router = round-robin - resizer { - lower-bound = 2 - upper-bound = 15 - } - } - } - //#config-resize - """) - val system = ActorSystem("Example", config) - //#configurableRouting - val router = system.actorOf(Props[ExampleActor].withRouter(FromConfig()), - "router") - //#configurableRouting - 1 to 10 foreach { i ⇒ router ! Message(i) } - - //#configurableRoutingWithResizer - val router2 = system.actorOf(Props[ExampleActor].withRouter(FromConfig()), - "router2") - //#configurableRoutingWithResizer - 1 to 10 foreach { i ⇒ router2 ! Message(i) } -} \ No newline at end of file diff --git a/akka-docs/scala/code/docs/routing/RouterViaProgramExample.scala b/akka-docs/scala/code/docs/routing/RouterViaProgramExample.scala index 79219b742b..6269e7f698 100644 --- a/akka-docs/scala/code/docs/routing/RouterViaProgramExample.scala +++ b/akka-docs/scala/code/docs/routing/RouterViaProgramExample.scala @@ -6,7 +6,7 @@ package docs.routing import akka.routing.RoundRobinRouter import akka.actor.{ ActorRef, Props, Actor, ActorSystem } import akka.routing.DefaultResizer -import akka.routing.RemoteRouterConfig +import akka.remote.routing.RemoteRouterConfig case class Message1(nbr: Int) diff --git a/akka-docs/scala/code/docs/serialization/SerializationDocSpec.scala b/akka-docs/scala/code/docs/serialization/SerializationDocSpec.scala index 9b222436da..d979952887 100644 --- a/akka-docs/scala/code/docs/serialization/SerializationDocSpec.scala +++ b/akka-docs/scala/code/docs/serialization/SerializationDocSpec.scala @@ -104,7 +104,7 @@ package docs.serialization { actor { serializers { java = "akka.serialization.JavaSerializer" - proto = "akka.serialization.ProtobufSerializer" + proto = "akka.remote.serialization.ProtobufSerializer" myown = "docs.serialization.MyOwnSerializer" } } @@ -122,7 +122,7 @@ package docs.serialization { actor { serializers { java = "akka.serialization.JavaSerializer" - proto = "akka.serialization.ProtobufSerializer" + proto = "akka.remote.serialization.ProtobufSerializer" myown = "docs.serialization.MyOwnSerializer" } diff --git a/akka-docs/scala/extending-akka.rst b/akka-docs/scala/extending-akka.rst index 9c890de252..52b7a715c5 100644 --- a/akka-docs/scala/extending-akka.rst +++ b/akka-docs/scala/extending-akka.rst @@ -58,8 +58,6 @@ in the ``akka.extensions`` section of the config you provide to your ``ActorSyst .. includecode:: code/docs/extension/ExtensionDocSpec.scala :include: config -Note that in this case ``CountExtension`` is an object and therefore the class name ends with ``$``. - Applicability ============= diff --git a/akka-docs/scala/routing.rst b/akka-docs/scala/routing.rst index 353e82d277..c1fa21b23f 100644 --- a/akka-docs/scala/routing.rst +++ b/akka-docs/scala/routing.rst @@ -15,15 +15,16 @@ is really easy to create your own. The routers shipped with Akka are: * ``akka.routing.SmallestMailboxRouter`` * ``akka.routing.BroadcastRouter`` * ``akka.routing.ScatterGatherFirstCompletedRouter`` +* ``akka.routing.ConsistentHashingRouter`` Routers In Action ^^^^^^^^^^^^^^^^^ This is an example of how to create a router that is defined in configuration: -.. includecode:: code/docs/routing/RouterViaConfigExample.scala#config +.. includecode:: code/docs/routing/RouterViaConfigDocSpec.scala#config-round-robin -.. includecode:: code/docs/routing/RouterViaConfigExample.scala#configurableRouting +.. includecode:: code/docs/routing/RouterViaConfigDocSpec.scala#configurableRouting This is an example of how to programmatically create a router and set the number of routees it should create: @@ -125,7 +126,7 @@ not have an effect on the number of actors in the pool. Setting the strategy is easily done: -.. includecode:: ../../akka-actor-tests/src/test/scala/akka/routing/RoutingSpec.scala +.. includecode:: ../../akka-actor-tests/src/test/scala/akka/routing/RoutingSpec.scala#supervision :include: supervision :exclude: custom-strategy @@ -179,6 +180,10 @@ is exactly what you would expect from a round-robin router to happen. (The name of an actor is automatically created in the format ``$letter`` unless you specify it - hence the names printed above.) +This is an example of how to define a round-robin router in configuration: + +.. includecode:: code/docs/routing/RouterViaConfigDocSpec.scala#config-round-robin + RandomRouter ************ As the name implies this router type selects one of its routees randomly and forwards @@ -206,6 +211,10 @@ When run you should see a similar output to this: The result from running the random router should be different, or at least random, every time you run it. Try to run it a couple of times to verify its behavior if you don't trust us. +This is an example of how to define a random router in configuration: + +.. includecode:: code/docs/routing/RouterViaConfigDocSpec.scala#config-random + SmallestMailboxRouter ********************* A Router that tries to send to the non-suspended routee with fewest messages in mailbox. @@ -221,6 +230,11 @@ Code example: .. includecode:: code/docs/routing/RouterTypeExample.scala#smallestMailboxRouter + +This is an example of how to define a smallest-mailbox router in configuration: + +.. includecode:: code/docs/routing/RouterViaConfigDocSpec.scala#config-smallest-mailbox + BroadcastRouter *************** A broadcast router forwards the message it receives to *all* its routees. @@ -240,6 +254,11 @@ When run you should see a similar output to this: As you can see here above each of the routees, five in total, received the broadcast message. +This is an example of how to define a broadcast router in configuration: + +.. includecode:: code/docs/routing/RouterViaConfigDocSpec.scala#config-broadcast + + ScatterGatherFirstCompletedRouter ********************************* The ScatterGatherFirstCompletedRouter will send the message on to all its routees as a future. @@ -257,6 +276,51 @@ When run you should see this: From the output above you can't really see that all the routees performed the calculation, but they did! The result you see is from the first routee that returned its calculation to the router. +This is an example of how to define a scatter-gather router in configuration: + +.. includecode:: code/docs/routing/RouterViaConfigDocSpec.scala#config-scatter-gather + + +ConsistentHashingRouter +*********************** + +The ConsistentHashingRouter uses `consistent hashing `_ +to select a connection based on the sent message. This +`article `_ gives good +insight into how consistent hashing is implemented. + +There is 3 ways to define what data to use for the consistent hash key. + +* You can define ``hashMapping`` of the router to map incoming + messages to their consistent hash key. This makes the decision + transparent for the sender. + +* The messages may implement ``akka.routing.ConsistentHashingRouter.ConsistentHashable``. + The key is part of the message and it's convenient to define it together + with the message definition. + +* The messages can be be wrapped in a ``akka.routing.ConsistentHashingRouter.ConsistentHashableEnvelope`` + to define what data to use for the consistent hash key. The sender knows + the key to use. + +These ways to define the consistent hash key can be use together and at +the same time for one router. The ``hashMapping`` is tried first. + +Code example: + +.. includecode:: code/docs/routing/ConsistentHashingRouterDocSpec.scala#cache-actor + +.. includecode:: code/docs/routing/ConsistentHashingRouterDocSpec.scala#consistent-hashing-router + +In the above example you see that the ``Get`` message implements ``ConsistentHashable`` itself, +while the ``Entry`` message is wrapped in a ``ConsistentHashableEnvelope``. The ``Evict`` +message is handled by the ``hashMapping`` partial function. + +This is an example of how to define a consistent-hashing router in configuration: + +.. includecode:: code/docs/routing/RouterViaConfigDocSpec.scala#config-consistent-hashing + + Broadcast Messages ^^^^^^^^^^^^^^^^^^ @@ -278,9 +342,9 @@ of routees dynamically. This is an example of how to create a resizable router that is defined in configuration: -.. includecode:: code/docs/routing/RouterViaConfigExample.scala#config-resize +.. includecode:: code/docs/routing/RouterViaConfigDocSpec.scala#config-resize -.. includecode:: code/docs/routing/RouterViaConfigExample.scala#configurableRoutingWithResizer +.. includecode:: code/docs/routing/RouterViaConfigDocSpec.scala#configurableRoutingWithResizer Several more configuration options are available and described in ``akka.actor.deployment.default.resizer`` section of the reference :ref:`configuration`. diff --git a/akka-durable-mailboxes/akka-file-mailbox/src/main/scala/akka/actor/mailbox/FileBasedMailbox.scala b/akka-durable-mailboxes/akka-file-mailbox/src/main/scala/akka/actor/mailbox/filebased/FileBasedMailbox.scala similarity index 98% rename from akka-durable-mailboxes/akka-file-mailbox/src/main/scala/akka/actor/mailbox/FileBasedMailbox.scala rename to akka-durable-mailboxes/akka-file-mailbox/src/main/scala/akka/actor/mailbox/filebased/FileBasedMailbox.scala index 2341f47fd5..59e5780849 100644 --- a/akka-durable-mailboxes/akka-file-mailbox/src/main/scala/akka/actor/mailbox/FileBasedMailbox.scala +++ b/akka-durable-mailboxes/akka-file-mailbox/src/main/scala/akka/actor/mailbox/filebased/FileBasedMailbox.scala @@ -2,8 +2,9 @@ * Copyright (C) 2009-2012 Typesafe Inc. */ -package akka.actor.mailbox +package akka.actor.mailbox.filebased +import akka.actor.mailbox._ import akka.actor.{ ActorContext, ActorRef, ActorSystem, ExtendedActorSystem } import akka.event.Logging import com.typesafe.config.Config diff --git a/akka-durable-mailboxes/akka-file-mailbox/src/main/scala/akka/actor/mailbox/FileBasedMailboxSettings.scala b/akka-durable-mailboxes/akka-file-mailbox/src/main/scala/akka/actor/mailbox/filebased/FileBasedMailboxSettings.scala similarity index 96% rename from akka-durable-mailboxes/akka-file-mailbox/src/main/scala/akka/actor/mailbox/FileBasedMailboxSettings.scala rename to akka-durable-mailboxes/akka-file-mailbox/src/main/scala/akka/actor/mailbox/filebased/FileBasedMailboxSettings.scala index 551a01ed00..7ac8d0a044 100644 --- a/akka-durable-mailboxes/akka-file-mailbox/src/main/scala/akka/actor/mailbox/FileBasedMailboxSettings.scala +++ b/akka-durable-mailboxes/akka-file-mailbox/src/main/scala/akka/actor/mailbox/filebased/FileBasedMailboxSettings.scala @@ -1,8 +1,9 @@ /** * Copyright (C) 2009-2012 Typesafe Inc. */ -package akka.actor.mailbox +package akka.actor.mailbox.filebased +import akka.actor.mailbox._ import com.typesafe.config.Config import scala.concurrent.util.Duration import java.util.concurrent.TimeUnit.MILLISECONDS diff --git a/akka-durable-mailboxes/akka-file-mailbox/src/main/scala/akka/actor/mailbox/filequeue/BrokenItemException.scala b/akka-durable-mailboxes/akka-file-mailbox/src/main/scala/akka/actor/mailbox/filebased/filequeue/BrokenItemException.scala similarity index 94% rename from akka-durable-mailboxes/akka-file-mailbox/src/main/scala/akka/actor/mailbox/filequeue/BrokenItemException.scala rename to akka-durable-mailboxes/akka-file-mailbox/src/main/scala/akka/actor/mailbox/filebased/filequeue/BrokenItemException.scala index bc96778ec6..636cbfc4aa 100644 --- a/akka-durable-mailboxes/akka-file-mailbox/src/main/scala/akka/actor/mailbox/filequeue/BrokenItemException.scala +++ b/akka-durable-mailboxes/akka-file-mailbox/src/main/scala/akka/actor/mailbox/filebased/filequeue/BrokenItemException.scala @@ -15,7 +15,7 @@ * limitations under the License. */ -package akka.actor.mailbox.filequeue +package akka.actor.mailbox.filebased.filequeue import java.io.IOException diff --git a/akka-durable-mailboxes/akka-file-mailbox/src/main/scala/akka/actor/mailbox/filequeue/Counter.scala b/akka-durable-mailboxes/akka-file-mailbox/src/main/scala/akka/actor/mailbox/filebased/filequeue/Counter.scala similarity index 95% rename from akka-durable-mailboxes/akka-file-mailbox/src/main/scala/akka/actor/mailbox/filequeue/Counter.scala rename to akka-durable-mailboxes/akka-file-mailbox/src/main/scala/akka/actor/mailbox/filebased/filequeue/Counter.scala index 707515d345..c2de226d20 100644 --- a/akka-durable-mailboxes/akka-file-mailbox/src/main/scala/akka/actor/mailbox/filequeue/Counter.scala +++ b/akka-durable-mailboxes/akka-file-mailbox/src/main/scala/akka/actor/mailbox/filebased/filequeue/Counter.scala @@ -15,7 +15,7 @@ * limitations under the License. */ -package akka.actor.mailbox.filequeue +package akka.actor.mailbox.filebased.filequeue import java.util.concurrent.atomic.AtomicLong diff --git a/akka-durable-mailboxes/akka-file-mailbox/src/main/scala/akka/actor/mailbox/filequeue/Journal.scala b/akka-durable-mailboxes/akka-file-mailbox/src/main/scala/akka/actor/mailbox/filebased/filequeue/Journal.scala similarity index 99% rename from akka-durable-mailboxes/akka-file-mailbox/src/main/scala/akka/actor/mailbox/filequeue/Journal.scala rename to akka-durable-mailboxes/akka-file-mailbox/src/main/scala/akka/actor/mailbox/filebased/filequeue/Journal.scala index 5d5ed54ae7..6862ad9600 100644 --- a/akka-durable-mailboxes/akka-file-mailbox/src/main/scala/akka/actor/mailbox/filequeue/Journal.scala +++ b/akka-durable-mailboxes/akka-file-mailbox/src/main/scala/akka/actor/mailbox/filebased/filequeue/Journal.scala @@ -15,7 +15,7 @@ * limitations under the License. */ -package akka.actor.mailbox.filequeue +package akka.actor.mailbox.filebased.filequeue import java.io._ import java.nio.{ ByteBuffer, ByteOrder } diff --git a/akka-durable-mailboxes/akka-file-mailbox/src/main/scala/akka/actor/mailbox/filequeue/PersistentQueue.scala b/akka-durable-mailboxes/akka-file-mailbox/src/main/scala/akka/actor/mailbox/filebased/filequeue/PersistentQueue.scala similarity index 99% rename from akka-durable-mailboxes/akka-file-mailbox/src/main/scala/akka/actor/mailbox/filequeue/PersistentQueue.scala rename to akka-durable-mailboxes/akka-file-mailbox/src/main/scala/akka/actor/mailbox/filebased/filequeue/PersistentQueue.scala index 9b012e34f0..83d539361c 100644 --- a/akka-durable-mailboxes/akka-file-mailbox/src/main/scala/akka/actor/mailbox/filequeue/PersistentQueue.scala +++ b/akka-durable-mailboxes/akka-file-mailbox/src/main/scala/akka/actor/mailbox/filebased/filequeue/PersistentQueue.scala @@ -15,14 +15,14 @@ * limitations under the License. */ -package akka.actor.mailbox.filequeue +package akka.actor.mailbox.filebased.filequeue import java.io._ import scala.collection.mutable import akka.event.LoggingAdapter import scala.concurrent.util.Duration import java.util.concurrent.TimeUnit -import akka.actor.mailbox.FileBasedMailboxSettings +import akka.actor.mailbox.filebased.FileBasedMailboxSettings // a config value that's backed by a global setting but may be locally overridden class OverlaySetting[T](base: ⇒ T) { diff --git a/akka-durable-mailboxes/akka-file-mailbox/src/main/scala/akka/actor/mailbox/filequeue/QItem.scala b/akka-durable-mailboxes/akka-file-mailbox/src/main/scala/akka/actor/mailbox/filebased/filequeue/QItem.scala similarity index 97% rename from akka-durable-mailboxes/akka-file-mailbox/src/main/scala/akka/actor/mailbox/filequeue/QItem.scala rename to akka-durable-mailboxes/akka-file-mailbox/src/main/scala/akka/actor/mailbox/filebased/filequeue/QItem.scala index 1a05070cb7..1d0eb543d6 100644 --- a/akka-durable-mailboxes/akka-file-mailbox/src/main/scala/akka/actor/mailbox/filequeue/QItem.scala +++ b/akka-durable-mailboxes/akka-file-mailbox/src/main/scala/akka/actor/mailbox/filebased/filequeue/QItem.scala @@ -15,7 +15,7 @@ * limitations under the License. */ -package akka.actor.mailbox.filequeue +package akka.actor.mailbox.filebased.filequeue import java.nio.{ ByteBuffer, ByteOrder } diff --git a/akka-durable-mailboxes/akka-file-mailbox/src/main/scala/akka/actor/mailbox/filequeue/QueueCollection.scala b/akka-durable-mailboxes/akka-file-mailbox/src/main/scala/akka/actor/mailbox/filebased/filequeue/QueueCollection.scala similarity index 98% rename from akka-durable-mailboxes/akka-file-mailbox/src/main/scala/akka/actor/mailbox/filequeue/QueueCollection.scala rename to akka-durable-mailboxes/akka-file-mailbox/src/main/scala/akka/actor/mailbox/filebased/filequeue/QueueCollection.scala index 568428dfc6..95550f6603 100644 --- a/akka-durable-mailboxes/akka-file-mailbox/src/main/scala/akka/actor/mailbox/filequeue/QueueCollection.scala +++ b/akka-durable-mailboxes/akka-file-mailbox/src/main/scala/akka/actor/mailbox/filebased/filequeue/QueueCollection.scala @@ -15,13 +15,13 @@ * limitations under the License. */ -package akka.actor.mailbox.filequeue +package akka.actor.mailbox.filebased.filequeue import java.io.File import java.util.concurrent.CountDownLatch import scala.collection.mutable import akka.event.LoggingAdapter -import akka.actor.mailbox.FileBasedMailboxSettings +import akka.actor.mailbox.filebased.FileBasedMailboxSettings class InaccessibleQueuePath extends Exception("Inaccessible queue path: Must be a directory and writable") diff --git a/akka-durable-mailboxes/akka-file-mailbox/src/main/scala/akka/actor/mailbox/filequeue/tools/QDumper.scala b/akka-durable-mailboxes/akka-file-mailbox/src/main/scala/akka/actor/mailbox/filebased/filequeue/tools/QDumper.scala similarity index 97% rename from akka-durable-mailboxes/akka-file-mailbox/src/main/scala/akka/actor/mailbox/filequeue/tools/QDumper.scala rename to akka-durable-mailboxes/akka-file-mailbox/src/main/scala/akka/actor/mailbox/filebased/filequeue/tools/QDumper.scala index 1ede4428f1..f0944d0fe7 100644 --- a/akka-durable-mailboxes/akka-file-mailbox/src/main/scala/akka/actor/mailbox/filequeue/tools/QDumper.scala +++ b/akka-durable-mailboxes/akka-file-mailbox/src/main/scala/akka/actor/mailbox/filebased/filequeue/tools/QDumper.scala @@ -15,13 +15,13 @@ * limitations under the License. */ -package akka.actor.mailbox.filequeue.tools +package akka.actor.mailbox.filebased.filequeue.tools import language.reflectiveCalls import java.io.{ FileNotFoundException, IOException } import scala.collection.mutable -import akka.actor.mailbox.filequeue._ +import akka.actor.mailbox.filebased.filequeue._ import akka.event.LoggingAdapter import akka.actor.ActorSystem diff --git a/akka-durable-mailboxes/akka-file-mailbox/src/main/scala/akka/actor/mailbox/filequeue/tools/Util.scala b/akka-durable-mailboxes/akka-file-mailbox/src/main/scala/akka/actor/mailbox/filebased/filequeue/tools/Util.scala similarity index 96% rename from akka-durable-mailboxes/akka-file-mailbox/src/main/scala/akka/actor/mailbox/filequeue/tools/Util.scala rename to akka-durable-mailboxes/akka-file-mailbox/src/main/scala/akka/actor/mailbox/filebased/filequeue/tools/Util.scala index c1b9c35594..5a6f492815 100644 --- a/akka-durable-mailboxes/akka-file-mailbox/src/main/scala/akka/actor/mailbox/filequeue/tools/Util.scala +++ b/akka-durable-mailboxes/akka-file-mailbox/src/main/scala/akka/actor/mailbox/filebased/filequeue/tools/Util.scala @@ -15,7 +15,7 @@ * limitations under the License. */ -package akka.actor.mailbox.filequeue.tools +package akka.actor.mailbox.filebased.filequeue.tools object Util { val KILOBYTE = 1024L diff --git a/akka-durable-mailboxes/akka-file-mailbox/src/test/scala/akka/actor/mailbox/FileBasedMailboxSpec.scala b/akka-durable-mailboxes/akka-file-mailbox/src/test/scala/akka/actor/mailbox/filebased/FileBasedMailboxSpec.scala similarity index 90% rename from akka-durable-mailboxes/akka-file-mailbox/src/test/scala/akka/actor/mailbox/FileBasedMailboxSpec.scala rename to akka-durable-mailboxes/akka-file-mailbox/src/test/scala/akka/actor/mailbox/filebased/FileBasedMailboxSpec.scala index 0e10c3c7ad..5b982523ee 100644 --- a/akka-durable-mailboxes/akka-file-mailbox/src/test/scala/akka/actor/mailbox/FileBasedMailboxSpec.scala +++ b/akka-durable-mailboxes/akka-file-mailbox/src/test/scala/akka/actor/mailbox/filebased/FileBasedMailboxSpec.scala @@ -1,14 +1,15 @@ -package akka.actor.mailbox +package akka.actor.mailbox.filebased import language.postfixOps +import akka.actor.mailbox._ import org.apache.commons.io.FileUtils import akka.dispatch.Mailbox object FileBasedMailboxSpec { val config = """ File-dispatcher { - mailbox-type = akka.actor.mailbox.FileBasedMailboxType + mailbox-type = akka.actor.mailbox.filebased.FileBasedMailboxType throughput = 1 file-based.directory-path = "file-based" file-based.circuit-breaker.max-failures = 5 diff --git a/akka-remote-tests/src/main/scala/akka/remote/testconductor/Conductor.scala b/akka-remote-tests/src/main/scala/akka/remote/testconductor/Conductor.scala index 3822647ce8..84b2f17173 100644 --- a/akka-remote-tests/src/main/scala/akka/remote/testconductor/Conductor.scala +++ b/akka-remote-tests/src/main/scala/akka/remote/testconductor/Conductor.scala @@ -23,6 +23,7 @@ import java.util.concurrent.TimeUnit.MILLISECONDS import akka.util.{ Timeout } import scala.concurrent.util.{ Deadline, Duration } import scala.reflect.classTag +import scala.concurrent.util.FiniteDuration sealed trait Direction { def includes(other: Direction): Boolean @@ -559,7 +560,7 @@ private[akka] class BarrierCoordinator extends Actor with LoggingFSM[BarrierCoor } onTransition { - case Idle -> Waiting ⇒ setTimer("Timeout", StateTimeout, nextStateData.deadline.timeLeft, false) + case Idle -> Waiting ⇒ setTimer("Timeout", StateTimeout, nextStateData.deadline.timeLeft.asInstanceOf[FiniteDuration], false) case Waiting -> Idle ⇒ cancelTimer("Timeout") } @@ -570,7 +571,7 @@ private[akka] class BarrierCoordinator extends Actor with LoggingFSM[BarrierCoor val enterDeadline = getDeadline(timeout) // we only allow the deadlines to get shorter if (enterDeadline.timeLeft < deadline.timeLeft) { - setTimer("Timeout", StateTimeout, enterDeadline.timeLeft, false) + setTimer("Timeout", StateTimeout, enterDeadline.timeLeft.asInstanceOf[FiniteDuration], false) handleBarrier(d.copy(arrived = together, deadline = enterDeadline)) } else handleBarrier(d.copy(arrived = together)) diff --git a/akka-remote-tests/src/main/scala/akka/remote/testconductor/NetworkFailureInjector.scala b/akka-remote-tests/src/main/scala/akka/remote/testconductor/NetworkFailureInjector.scala index 6c8352d880..e1d5fb0854 100644 --- a/akka-remote-tests/src/main/scala/akka/remote/testconductor/NetworkFailureInjector.scala +++ b/akka-remote-tests/src/main/scala/akka/remote/testconductor/NetworkFailureInjector.scala @@ -4,20 +4,17 @@ package akka.remote.testconductor import language.postfixOps - import java.net.InetSocketAddress - import scala.annotation.tailrec import scala.collection.immutable.Queue - import org.jboss.netty.buffer.ChannelBuffer import org.jboss.netty.channel.{ SimpleChannelHandler, MessageEvent, Channels, ChannelStateEvent, ChannelHandlerContext, ChannelFutureListener, ChannelFuture } - import akka.actor.{ Props, LoggingFSM, Address, ActorSystem, ActorRef, ActorLogging, Actor, FSM } import akka.event.Logging import akka.remote.netty.ChannelAddress import scala.concurrent.util.Duration import scala.concurrent.util.duration._ +import scala.concurrent.util.FiniteDuration /** * INTERNAL API. @@ -331,20 +328,20 @@ private[akka] class ThrottleActor(channelContext: ChannelHandlerContext) * lead to the correct rate on average, with increased latency of the order of * HWT granularity. */ - private def schedule(d: Data): (Data, Seq[Send], Option[Duration]) = { + private def schedule(d: Data): (Data, Seq[Send], Option[FiniteDuration]) = { val now = System.nanoTime - @tailrec def rec(d: Data, toSend: Seq[Send]): (Data, Seq[Send], Option[Duration]) = { + @tailrec def rec(d: Data, toSend: Seq[Send]): (Data, Seq[Send], Option[FiniteDuration]) = { if (d.queue.isEmpty) (d, toSend, None) else { val timeForPacket = d.lastSent + (1000 * size(d.queue.head.msg) / d.rateMBit).toLong if (timeForPacket <= now) rec(Data(timeForPacket, d.rateMBit, d.queue.tail), toSend :+ d.queue.head) else { val splitThreshold = d.lastSent + packetSplitThreshold.toNanos - if (now < splitThreshold) (d, toSend, Some((timeForPacket - now).nanos min (splitThreshold - now).nanos)) + if (now < splitThreshold) (d, toSend, Some(((timeForPacket - now).nanos min (splitThreshold - now).nanos).asInstanceOf[FiniteDuration])) else { val microsToSend = (now - d.lastSent) / 1000 val (s1, s2) = split(d.queue.head, (microsToSend * d.rateMBit / 8).toInt) - (d.copy(queue = s2 +: d.queue.tail), toSend :+ s1, Some((timeForPacket - now).nanos min packetSplitThreshold)) + (d.copy(queue = s2 +: d.queue.tail), toSend :+ s1, Some(((timeForPacket - now).nanos min packetSplitThreshold).asInstanceOf[FiniteDuration])) } } } diff --git a/akka-remote-tests/src/main/scala/akka/remote/testconductor/Player.scala b/akka-remote-tests/src/main/scala/akka/remote/testconductor/Player.scala index 2e22a0d819..03b07486f0 100644 --- a/akka-remote-tests/src/main/scala/akka/remote/testconductor/Player.scala +++ b/akka-remote-tests/src/main/scala/akka/remote/testconductor/Player.scala @@ -4,7 +4,6 @@ package akka.remote.testconductor import language.postfixOps - import akka.actor.{ Actor, ActorRef, ActorSystem, LoggingFSM, Props, PoisonPill, Status, Address, Scheduler } import RemoteConnection.getAddrString import scala.concurrent.util.{ Duration, Deadline } @@ -20,6 +19,7 @@ import akka.event.{ LoggingAdapter, Logging } import java.net.{ InetSocketAddress, ConnectException } import scala.reflect.classTag import concurrent.{ ExecutionContext, Await, Future } +import scala.concurrent.util.FiniteDuration /** * The Player is the client component of the @@ -81,13 +81,13 @@ trait Player { this: TestConductorExt ⇒ system.log.debug("entering barriers " + name.mkString("(", ", ", ")")) val stop = Deadline.now + timeout.duration name foreach { b ⇒ - val barrierTimeout = stop.timeLeft + val barrierTimeout = stop.timeLeft.asInstanceOf[FiniteDuration] if (barrierTimeout < Duration.Zero) { client ! ToServer(FailBarrier(b)) throw new TimeoutException("Server timed out while waiting for barrier " + b); } try { - implicit val timeout = Timeout(barrierTimeout + Settings.QueryTimeout.duration) + implicit val timeout = Timeout((barrierTimeout + Settings.QueryTimeout.duration).asInstanceOf[FiniteDuration]) Await.result(client ? ToServer(EnterBarrier(b, Option(barrierTimeout))), Duration.Inf) } catch { case e: AskTimeoutException ⇒ @@ -278,7 +278,7 @@ private[akka] class PlayerHandler( event.getCause match { case c: ConnectException if reconnects > 0 ⇒ reconnects -= 1 - scheduler.scheduleOnce(nextAttempt.timeLeft)(reconnect()) + scheduler.scheduleOnce(nextAttempt.timeLeft.asInstanceOf[FiniteDuration])(reconnect()) case e ⇒ fsm ! ConnectionFailure(e.getMessage) } } diff --git a/akka-remote-tests/src/test/scala/akka/remote/testkit/MultiNodeSpec.scala b/akka-remote-tests/src/main/scala/akka/remote/testkit/MultiNodeSpec.scala similarity index 74% rename from akka-remote-tests/src/test/scala/akka/remote/testkit/MultiNodeSpec.scala rename to akka-remote-tests/src/main/scala/akka/remote/testkit/MultiNodeSpec.scala index 8ae386d4eb..04cf9bf3c9 100644 --- a/akka-remote-tests/src/test/scala/akka/remote/testkit/MultiNodeSpec.scala +++ b/akka-remote-tests/src/main/scala/akka/remote/testkit/MultiNodeSpec.scala @@ -4,18 +4,23 @@ package akka.remote.testkit import language.implicitConversions +import language.postfixOps import java.net.InetSocketAddress import com.typesafe.config.{ ConfigObject, ConfigFactory, Config } -import akka.actor.{ RootActorPath, ActorPath, ActorSystem, ExtendedActorSystem } +import akka.actor._ import akka.util.Timeout import akka.remote.testconductor.{ TestConductorExt, TestConductor, RoleName } import akka.remote.RemoteActorRefProvider -import akka.testkit.AkkaSpec +import akka.testkit.TestKit import scala.concurrent.{ Await, Awaitable } import scala.util.control.NonFatal import scala.concurrent.util.Duration import scala.concurrent.util.duration._ +import java.util.concurrent.TimeoutException +import akka.remote.testconductor.RoleName +import akka.actor.RootActorPath +import akka.event.{ Logging, LoggingAdapter } /** * Configure the role names and participants of the test, including configuration settings. @@ -82,7 +87,7 @@ abstract class MultiNodeConfig { } private[testkit] def config: Config = { - val configs = (_nodeConf get myself).toList ::: _commonConf.toList ::: MultiNodeSpec.nodeConfig :: AkkaSpec.testConf :: Nil + val configs = (_nodeConf get myself).toList ::: _commonConf.toList ::: MultiNodeSpec.nodeConfig :: MultiNodeSpec.baseConfig :: Nil configs reduce (_ withFallback _) } @@ -119,12 +124,43 @@ object MultiNodeSpec { require(selfIndex >= 0 && selfIndex < nodeNames.size, "selfIndex out of bounds: " + selfIndex) - val nodeConfig = AkkaSpec.mapToConfig(Map( + private[testkit] val nodeConfig = mapToConfig(Map( "akka.actor.provider" -> "akka.remote.RemoteActorRefProvider", "akka.remote.transport" -> "akka.remote.testconductor.TestConductorTransport", "akka.remote.netty.hostname" -> nodeNames(selfIndex), "akka.remote.netty.port" -> 0)) + private[testkit] val baseConfig: Config = ConfigFactory.parseString(""" + akka { + event-handlers = ["akka.testkit.TestEventListener"] + loglevel = "WARNING" + stdout-loglevel = "WARNING" + actor { + default-dispatcher { + executor = "fork-join-executor" + fork-join-executor { + parallelism-min = 8 + parallelism-factor = 2.0 + parallelism-max = 8 + } + } + } + } + """) + + private def mapToConfig(map: Map[String, Any]): Config = { + import scala.collection.JavaConverters._ + ConfigFactory.parseMap(map.asJava) + } + + private def getCallerName(clazz: Class[_]): String = { + val s = Thread.currentThread.getStackTrace map (_.getClassName) drop 1 dropWhile (_ matches ".*MultiNodeSpec.?$") + val reduced = s.lastIndexWhere(_ == clazz.getName) match { + case -1 ⇒ s + case z ⇒ s drop (z + 1) + } + reduced.head.replaceFirst(""".*\.""", "").replaceAll("[^a-zA-Z_0-9]", "_") + } } /** @@ -136,17 +172,52 @@ object MultiNodeSpec { * val is fine. */ abstract class MultiNodeSpec(val myself: RoleName, _system: ActorSystem, _roles: Seq[RoleName], deployments: RoleName ⇒ Seq[String]) - extends AkkaSpec(_system) { + extends TestKit(_system) with MultiNodeSpecCallbacks { import MultiNodeSpec._ def this(config: MultiNodeConfig) = - this(config.myself, ActorSystem(AkkaSpec.getCallerName(classOf[MultiNodeSpec]), ConfigFactory.load(config.config)), + this(config.myself, ActorSystem(MultiNodeSpec.getCallerName(classOf[MultiNodeSpec]), ConfigFactory.load(config.config)), config.roles, config.deployments) + val log: LoggingAdapter = Logging(system, this.getClass) + + final override def multiNodeSpecBeforeAll { + atStartup() + } + + final override def multiNodeSpecAfterAll { + // wait for all nodes to remove themselves before we shut the conductor down + if (selfIndex == 0) { + testConductor.removeNode(myself) + within(testConductor.Settings.BarrierTimeout.duration) { + awaitCond { + testConductor.getNodes.await.filterNot(_ == myself).isEmpty + } + } + } + system.shutdown() + try system.awaitTermination(5 seconds) catch { + case _: TimeoutException ⇒ + system.log.warning("Failed to stop [{}] within 5 seconds", system.name) + println(system.asInstanceOf[ActorSystemImpl].printTree) + } + atTermination() + } + /* - * Test Class Interface + * Test Class Interface + */ + + /** + * Override this method to do something when the whole test is starting up. */ + protected def atStartup(): Unit = {} + + /** + * Override this method to do something when the whole test is terminating. + */ + protected def atTermination(): Unit = {} /** * All registered roles @@ -267,16 +338,30 @@ abstract class MultiNodeSpec(val myself: RoleName, _system: ActorSystem, _roles: log.info("Role [{}] started with address [{}]", myself.name, system.asInstanceOf[ExtendedActorSystem].provider.asInstanceOf[RemoteActorRefProvider].transport.address) - // wait for all nodes to remove themselves before we shut the conductor down - final override def beforeShutdown() = { - if (selfIndex == 0) { - testConductor.removeNode(myself) - within(testConductor.Settings.BarrierTimeout.duration) { - awaitCond { - testConductor.getNodes.await.filterNot(_ == myself).isEmpty - } - } - } - } - +} + +/** + * Use this to hook MultiNodeSpec into your test framework lifecycle, either by having your test extend MultiNodeSpec + * and call these methods or by creating a trait that calls them and then mixing that trait with your test together + * with MultiNodeSpec. + * + * Example trait for MultiNodeSpec with ScalaTest + * + * {{{ + * trait STMultiNodeSpec extends MultiNodeSpecCallbacks with WordSpec with MustMatchers with BeforeAndAfterAll { + * override def beforeAll() = multiNodeSpecBeforeAll() + * override def afterAll() = multiNodeSpecAfterAll() + * } + * }}} + */ +trait MultiNodeSpecCallbacks { + /** + * Call this before the start of the test run. NOT before every test case. + */ + def multiNodeSpecBeforeAll(): Unit + + /** + * Call this after the all test cases have run. NOT after every test case. + */ + def multiNodeSpecAfterAll(): Unit } diff --git a/akka-remote-tests/src/multi-jvm/scala/akka/remote/LookupRemoteActorSpec.scala b/akka-remote-tests/src/multi-jvm/scala/akka/remote/LookupRemoteActorSpec.scala index 58d41cc003..c4084bde50 100644 --- a/akka-remote-tests/src/multi-jvm/scala/akka/remote/LookupRemoteActorSpec.scala +++ b/akka-remote-tests/src/multi-jvm/scala/akka/remote/LookupRemoteActorSpec.scala @@ -7,8 +7,7 @@ import akka.actor.Actor import akka.actor.ActorRef import akka.actor.Props import akka.pattern.ask -import akka.remote.testkit.MultiNodeConfig -import akka.remote.testkit.MultiNodeSpec +import testkit.{STMultiNodeSpec, MultiNodeConfig, MultiNodeSpec} import akka.testkit._ object LookupRemoteActorMultiJvmSpec extends MultiNodeConfig { @@ -30,7 +29,7 @@ class LookupRemoteActorMultiJvmNode1 extends LookupRemoteActorSpec class LookupRemoteActorMultiJvmNode2 extends LookupRemoteActorSpec class LookupRemoteActorSpec extends MultiNodeSpec(LookupRemoteActorMultiJvmSpec) - with ImplicitSender with DefaultTimeout { + with STMultiNodeSpec with ImplicitSender with DefaultTimeout { import LookupRemoteActorMultiJvmSpec._ def initialParticipants = 2 diff --git a/akka-remote-tests/src/multi-jvm/scala/akka/remote/NewRemoteActorSpec.scala b/akka-remote-tests/src/multi-jvm/scala/akka/remote/NewRemoteActorSpec.scala index 094e6692f7..6b3f081968 100644 --- a/akka-remote-tests/src/multi-jvm/scala/akka/remote/NewRemoteActorSpec.scala +++ b/akka-remote-tests/src/multi-jvm/scala/akka/remote/NewRemoteActorSpec.scala @@ -9,8 +9,7 @@ import akka.actor.Actor import akka.actor.ActorRef import akka.actor.Props import akka.pattern.ask -import akka.remote.testkit.MultiNodeConfig -import akka.remote.testkit.MultiNodeSpec +import testkit.{STMultiNodeSpec, MultiNodeConfig, MultiNodeSpec} import akka.testkit._ object NewRemoteActorMultiJvmSpec extends MultiNodeConfig { @@ -35,7 +34,7 @@ class NewRemoteActorMultiJvmNode1 extends NewRemoteActorSpec class NewRemoteActorMultiJvmNode2 extends NewRemoteActorSpec class NewRemoteActorSpec extends MultiNodeSpec(NewRemoteActorMultiJvmSpec) - with ImplicitSender with DefaultTimeout { + with STMultiNodeSpec with ImplicitSender with DefaultTimeout { import NewRemoteActorMultiJvmSpec._ def initialParticipants = 2 diff --git a/akka-remote-tests/src/multi-jvm/scala/akka/remote/router/RandomRoutedRemoteActorSpec.scala b/akka-remote-tests/src/multi-jvm/scala/akka/remote/router/RandomRoutedRemoteActorSpec.scala index 3d1286fd10..040d91ad57 100644 --- a/akka-remote-tests/src/multi-jvm/scala/akka/remote/router/RandomRoutedRemoteActorSpec.scala +++ b/akka-remote-tests/src/multi-jvm/scala/akka/remote/router/RandomRoutedRemoteActorSpec.scala @@ -12,8 +12,7 @@ import akka.actor.PoisonPill import akka.actor.Address import scala.concurrent.Await import akka.pattern.ask -import akka.remote.testkit.MultiNodeConfig -import akka.remote.testkit.MultiNodeSpec +import akka.remote.testkit.{STMultiNodeSpec, MultiNodeConfig, MultiNodeSpec} import akka.routing.Broadcast import akka.routing.RandomRouter import akka.routing.RoutedActorRef @@ -48,7 +47,7 @@ class RandomRoutedRemoteActorMultiJvmNode3 extends RandomRoutedRemoteActorSpec class RandomRoutedRemoteActorMultiJvmNode4 extends RandomRoutedRemoteActorSpec class RandomRoutedRemoteActorSpec extends MultiNodeSpec(RandomRoutedRemoteActorMultiJvmSpec) - with ImplicitSender with DefaultTimeout { + with STMultiNodeSpec with ImplicitSender with DefaultTimeout { import RandomRoutedRemoteActorMultiJvmSpec._ def initialParticipants = 4 diff --git a/akka-remote-tests/src/multi-jvm/scala/akka/remote/router/RoundRobinRoutedRemoteActorSpec.scala b/akka-remote-tests/src/multi-jvm/scala/akka/remote/router/RoundRobinRoutedRemoteActorSpec.scala index c6925c0984..5a629abc37 100644 --- a/akka-remote-tests/src/multi-jvm/scala/akka/remote/router/RoundRobinRoutedRemoteActorSpec.scala +++ b/akka-remote-tests/src/multi-jvm/scala/akka/remote/router/RoundRobinRoutedRemoteActorSpec.scala @@ -12,8 +12,7 @@ import akka.actor.PoisonPill import akka.actor.Address import scala.concurrent.Await import akka.pattern.ask -import akka.remote.testkit.MultiNodeConfig -import akka.remote.testkit.MultiNodeSpec +import akka.remote.testkit.{STMultiNodeSpec, MultiNodeConfig, MultiNodeSpec} import akka.routing.Broadcast import akka.routing.RoundRobinRouter import akka.routing.RoutedActorRef @@ -60,7 +59,7 @@ class RoundRobinRoutedRemoteActorMultiJvmNode3 extends RoundRobinRoutedRemoteAct class RoundRobinRoutedRemoteActorMultiJvmNode4 extends RoundRobinRoutedRemoteActorSpec class RoundRobinRoutedRemoteActorSpec extends MultiNodeSpec(RoundRobinRoutedRemoteActorMultiJvmSpec) - with ImplicitSender with DefaultTimeout { + with STMultiNodeSpec with ImplicitSender with DefaultTimeout { import RoundRobinRoutedRemoteActorMultiJvmSpec._ def initialParticipants = 4 diff --git a/akka-remote-tests/src/multi-jvm/scala/akka/remote/router/ScatterGatherRoutedRemoteActorSpec.scala b/akka-remote-tests/src/multi-jvm/scala/akka/remote/router/ScatterGatherRoutedRemoteActorSpec.scala index 40f25dce73..a6d7b73d85 100644 --- a/akka-remote-tests/src/multi-jvm/scala/akka/remote/router/ScatterGatherRoutedRemoteActorSpec.scala +++ b/akka-remote-tests/src/multi-jvm/scala/akka/remote/router/ScatterGatherRoutedRemoteActorSpec.scala @@ -10,8 +10,7 @@ import akka.actor.ActorRef import akka.actor.Props import scala.concurrent.Await import akka.pattern.ask -import akka.remote.testkit.MultiNodeConfig -import akka.remote.testkit.MultiNodeSpec +import akka.remote.testkit.{STMultiNodeSpec, MultiNodeConfig, MultiNodeSpec} import akka.routing.Broadcast import akka.routing.ScatterGatherFirstCompletedRouter import akka.routing.RoutedActorRef @@ -48,7 +47,7 @@ class ScatterGatherRoutedRemoteActorMultiJvmNode3 extends ScatterGatherRoutedRem class ScatterGatherRoutedRemoteActorMultiJvmNode4 extends ScatterGatherRoutedRemoteActorSpec class ScatterGatherRoutedRemoteActorSpec extends MultiNodeSpec(ScatterGatherRoutedRemoteActorMultiJvmSpec) - with ImplicitSender with DefaultTimeout { + with STMultiNodeSpec with ImplicitSender with DefaultTimeout { import ScatterGatherRoutedRemoteActorMultiJvmSpec._ def initialParticipants = 4 diff --git a/akka-remote-tests/src/multi-jvm/scala/akka/remote/testconductor/TestConductorSpec.scala b/akka-remote-tests/src/multi-jvm/scala/akka/remote/testconductor/TestConductorSpec.scala index 7315bf914d..8df14b09a7 100644 --- a/akka-remote-tests/src/multi-jvm/scala/akka/remote/testconductor/TestConductorSpec.scala +++ b/akka-remote-tests/src/multi-jvm/scala/akka/remote/testconductor/TestConductorSpec.scala @@ -16,8 +16,7 @@ import akka.testkit.ImplicitSender import akka.testkit.LongRunningTest import java.net.InetSocketAddress import java.net.InetAddress -import akka.remote.testkit.MultiNodeSpec -import akka.remote.testkit.MultiNodeConfig +import akka.remote.testkit.{STMultiNodeSpec, MultiNodeSpec, MultiNodeConfig} object TestConductorMultiJvmSpec extends MultiNodeConfig { commonConfig(debugConfig(on = false)) @@ -29,7 +28,7 @@ object TestConductorMultiJvmSpec extends MultiNodeConfig { class TestConductorMultiJvmNode1 extends TestConductorSpec class TestConductorMultiJvmNode2 extends TestConductorSpec -class TestConductorSpec extends MultiNodeSpec(TestConductorMultiJvmSpec) with ImplicitSender { +class TestConductorSpec extends MultiNodeSpec(TestConductorMultiJvmSpec) with STMultiNodeSpec with ImplicitSender { import TestConductorMultiJvmSpec._ diff --git a/akka-remote-tests/src/multi-jvm/scala/akka/remote/testkit/MultiNodeSpecSpec.scala b/akka-remote-tests/src/multi-jvm/scala/akka/remote/testkit/MultiNodeSpecSpec.scala index 2a709a99a7..6a316b708d 100644 --- a/akka-remote-tests/src/multi-jvm/scala/akka/remote/testkit/MultiNodeSpecSpec.scala +++ b/akka-remote-tests/src/multi-jvm/scala/akka/remote/testkit/MultiNodeSpecSpec.scala @@ -19,7 +19,7 @@ class MultiNodeSpecSpecMultiJvmNode2 extends MultiNodeSpecSpec class MultiNodeSpecSpecMultiJvmNode3 extends MultiNodeSpecSpec class MultiNodeSpecSpecMultiJvmNode4 extends MultiNodeSpecSpec -class MultiNodeSpecSpec extends MultiNodeSpec(MultiNodeSpecMultiJvmSpec) { +class MultiNodeSpecSpec extends MultiNodeSpec(MultiNodeSpecMultiJvmSpec) with STMultiNodeSpec { import MultiNodeSpecMultiJvmSpec._ diff --git a/akka-remote-tests/src/multi-jvm/scala/akka/remote/QuietReporter.scala b/akka-remote-tests/src/test/scala/akka/remote/QuietReporter.scala similarity index 100% rename from akka-remote-tests/src/multi-jvm/scala/akka/remote/QuietReporter.scala rename to akka-remote-tests/src/test/scala/akka/remote/QuietReporter.scala diff --git a/akka-remote-tests/src/test/scala/akka/remote/testkit/LogRoleReplace.scala b/akka-remote-tests/src/test/scala/akka/remote/testkit/LogRoleReplace.scala index 5dd41365bf..51a189b7f9 100644 --- a/akka-remote-tests/src/test/scala/akka/remote/testkit/LogRoleReplace.scala +++ b/akka-remote-tests/src/test/scala/akka/remote/testkit/LogRoleReplace.scala @@ -1,3 +1,7 @@ +/** + * Copyright (C) 2009-2012 Typesafe Inc. + */ + package akka.remote.testkit import java.awt.Toolkit diff --git a/akka-remote-tests/src/test/scala/akka/remote/testkit/STMultiNodeSpec.scala b/akka-remote-tests/src/test/scala/akka/remote/testkit/STMultiNodeSpec.scala new file mode 100644 index 0000000000..e10604d1ac --- /dev/null +++ b/akka-remote-tests/src/test/scala/akka/remote/testkit/STMultiNodeSpec.scala @@ -0,0 +1,19 @@ +/** + * Copyright (C) 2009-2012 Typesafe Inc. + */ + +package akka.remote.testkit + +import org.scalatest.{ BeforeAndAfterAll, WordSpec } +import org.scalatest.matchers.MustMatchers + +/** + * Hooks up MultiNodeSpec with ScalaTest + */ +trait STMultiNodeSpec extends MultiNodeSpecCallbacks with WordSpec with MustMatchers with BeforeAndAfterAll { + + override def beforeAll() = multiNodeSpecBeforeAll() + + override def afterAll() = multiNodeSpecAfterAll() + +} diff --git a/akka-remote/src/main/resources/reference.conf b/akka-remote/src/main/resources/reference.conf index 66f6b660ba..4af8f4478f 100644 --- a/akka-remote/src/main/resources/reference.conf +++ b/akka-remote/src/main/resources/reference.conf @@ -13,8 +13,8 @@ akka { actor { serializers { - proto = "akka.serialization.ProtobufSerializer" - daemon-create = "akka.serialization.DaemonMsgCreateSerializer" + proto = "akka.remote.serialization.ProtobufSerializer" + daemon-create = "akka.remote.serialization.DaemonMsgCreateSerializer" } diff --git a/akka-remote/src/main/scala/akka/remote/RemoteDeployer.scala b/akka-remote/src/main/scala/akka/remote/RemoteDeployer.scala index 16c65986ee..fbc9c7b913 100644 --- a/akka-remote/src/main/scala/akka/remote/RemoteDeployer.scala +++ b/akka-remote/src/main/scala/akka/remote/RemoteDeployer.scala @@ -5,6 +5,7 @@ package akka.remote import akka.actor._ import akka.routing._ +import akka.remote.routing._ import com.typesafe.config._ import akka.ConfigurationException diff --git a/akka-remote/src/main/scala/akka/remote/netty/NettySSLSupport.scala b/akka-remote/src/main/scala/akka/remote/netty/NettySSLSupport.scala index 83fdb781a7..bc4f39dca4 100644 --- a/akka-remote/src/main/scala/akka/remote/netty/NettySSLSupport.scala +++ b/akka-remote/src/main/scala/akka/remote/netty/NettySSLSupport.scala @@ -9,7 +9,7 @@ import javax.net.ssl.{ KeyManagerFactory, TrustManager, TrustManagerFactory, SSL import akka.remote.RemoteTransportException import akka.event.LoggingAdapter import java.io.{ IOException, FileNotFoundException, FileInputStream } -import akka.security.provider.AkkaProvider +import akka.remote.security.provider.AkkaProvider import java.security._ /** diff --git a/akka-remote/src/main/scala/akka/routing/RemoteRouterConfig.scala b/akka-remote/src/main/scala/akka/remote/routing/RemoteRouterConfig.scala similarity index 97% rename from akka-remote/src/main/scala/akka/routing/RemoteRouterConfig.scala rename to akka-remote/src/main/scala/akka/remote/routing/RemoteRouterConfig.scala index 1d05dae3bc..8a4e3bce7c 100644 --- a/akka-remote/src/main/scala/akka/routing/RemoteRouterConfig.scala +++ b/akka-remote/src/main/scala/akka/remote/routing/RemoteRouterConfig.scala @@ -1,8 +1,9 @@ /** * Copyright (C) 2009-2012 Typesafe Inc. */ -package akka.routing +package akka.remote.routing +import akka.routing.{ Route, Router, RouterConfig, RouteeProvider, Resizer } import com.typesafe.config.ConfigFactory import akka.actor.ActorContext import akka.actor.ActorRef diff --git a/akka-remote/src/main/scala/akka/security/provider/AES128CounterInetRNG.scala b/akka-remote/src/main/scala/akka/remote/security/provider/AES128CounterInetRNG.scala similarity index 97% rename from akka-remote/src/main/scala/akka/security/provider/AES128CounterInetRNG.scala rename to akka-remote/src/main/scala/akka/remote/security/provider/AES128CounterInetRNG.scala index 9944f3d6d4..7019b4b1a3 100644 --- a/akka-remote/src/main/scala/akka/security/provider/AES128CounterInetRNG.scala +++ b/akka-remote/src/main/scala/akka/remote/security/provider/AES128CounterInetRNG.scala @@ -1,7 +1,7 @@ /** * Copyright (C) 2009-2012 Typesafe Inc. */ -package akka.security.provider +package akka.remote.security.provider import org.uncommons.maths.random.{ AESCounterRNG } import SeedSize.Seed128 diff --git a/akka-remote/src/main/scala/akka/security/provider/AES128CounterSecureRNG.scala b/akka-remote/src/main/scala/akka/remote/security/provider/AES128CounterSecureRNG.scala similarity index 97% rename from akka-remote/src/main/scala/akka/security/provider/AES128CounterSecureRNG.scala rename to akka-remote/src/main/scala/akka/remote/security/provider/AES128CounterSecureRNG.scala index bd422a249b..acf936d46e 100644 --- a/akka-remote/src/main/scala/akka/security/provider/AES128CounterSecureRNG.scala +++ b/akka-remote/src/main/scala/akka/remote/security/provider/AES128CounterSecureRNG.scala @@ -1,7 +1,7 @@ /** * Copyright (C) 2009-2012 Typesafe Inc. */ -package akka.security.provider +package akka.remote.security.provider import org.uncommons.maths.random.{ AESCounterRNG, SecureRandomSeedGenerator } import SeedSize.Seed128 diff --git a/akka-remote/src/main/scala/akka/security/provider/AES256CounterInetRNG.scala b/akka-remote/src/main/scala/akka/remote/security/provider/AES256CounterInetRNG.scala similarity index 97% rename from akka-remote/src/main/scala/akka/security/provider/AES256CounterInetRNG.scala rename to akka-remote/src/main/scala/akka/remote/security/provider/AES256CounterInetRNG.scala index 4c7de74990..edd6b3cab5 100644 --- a/akka-remote/src/main/scala/akka/security/provider/AES256CounterInetRNG.scala +++ b/akka-remote/src/main/scala/akka/remote/security/provider/AES256CounterInetRNG.scala @@ -1,7 +1,7 @@ /** * Copyright (C) 2009-2012 Typesafe Inc. */ -package akka.security.provider +package akka.remote.security.provider import org.uncommons.maths.random.{ AESCounterRNG } import SeedSize.Seed256 diff --git a/akka-remote/src/main/scala/akka/security/provider/AES256CounterSecureRNG.scala b/akka-remote/src/main/scala/akka/remote/security/provider/AES256CounterSecureRNG.scala similarity index 97% rename from akka-remote/src/main/scala/akka/security/provider/AES256CounterSecureRNG.scala rename to akka-remote/src/main/scala/akka/remote/security/provider/AES256CounterSecureRNG.scala index 8650cd75c4..aadb2131e3 100644 --- a/akka-remote/src/main/scala/akka/security/provider/AES256CounterSecureRNG.scala +++ b/akka-remote/src/main/scala/akka/remote/security/provider/AES256CounterSecureRNG.scala @@ -1,7 +1,7 @@ /** * Copyright (C) 2009-2012 Typesafe Inc. */ -package akka.security.provider +package akka.remote.security.provider import org.uncommons.maths.random.{ AESCounterRNG, SecureRandomSeedGenerator } import SeedSize.Seed256 diff --git a/akka-remote/src/main/scala/akka/security/provider/AkkaProvider.scala b/akka-remote/src/main/scala/akka/remote/security/provider/AkkaProvider.scala similarity index 97% rename from akka-remote/src/main/scala/akka/security/provider/AkkaProvider.scala rename to akka-remote/src/main/scala/akka/remote/security/provider/AkkaProvider.scala index 8cbebe4190..ed32cd04d8 100644 --- a/akka-remote/src/main/scala/akka/security/provider/AkkaProvider.scala +++ b/akka-remote/src/main/scala/akka/remote/security/provider/AkkaProvider.scala @@ -1,7 +1,7 @@ /** * Copyright (C) 2009-2012 Typesafe Inc. */ -package akka.security.provider +package akka.remote.security.provider import java.security.{ PrivilegedAction, AccessController, Provider, Security } diff --git a/akka-remote/src/main/scala/akka/security/provider/InternetSeedGenerator.scala b/akka-remote/src/main/scala/akka/remote/security/provider/InternetSeedGenerator.scala similarity index 98% rename from akka-remote/src/main/scala/akka/security/provider/InternetSeedGenerator.scala rename to akka-remote/src/main/scala/akka/remote/security/provider/InternetSeedGenerator.scala index e28cbf4f17..f049a4e678 100644 --- a/akka-remote/src/main/scala/akka/security/provider/InternetSeedGenerator.scala +++ b/akka-remote/src/main/scala/akka/remote/security/provider/InternetSeedGenerator.scala @@ -13,7 +13,7 @@ // See the License for the specific language governing permissions and // limitations under the License. // ============================================================================ -package akka.security.provider +package akka.remote.security.provider import org.uncommons.maths.random.{ SeedGenerator, SeedException, SecureRandomSeedGenerator, RandomDotOrgSeedGenerator, DevRandomSeedGenerator } diff --git a/akka-remote/src/main/scala/akka/security/provider/SeedSize.scala b/akka-remote/src/main/scala/akka/remote/security/provider/SeedSize.scala similarity index 91% rename from akka-remote/src/main/scala/akka/security/provider/SeedSize.scala rename to akka-remote/src/main/scala/akka/remote/security/provider/SeedSize.scala index c8c7c0e661..a659c924bf 100644 --- a/akka-remote/src/main/scala/akka/security/provider/SeedSize.scala +++ b/akka-remote/src/main/scala/akka/remote/security/provider/SeedSize.scala @@ -2,7 +2,7 @@ * Copyright (C) 2009-2012 Typesafe Inc. */ -package akka.security.provider +package akka.remote.security.provider /** * Internal API diff --git a/akka-remote/src/main/scala/akka/serialization/DaemonMsgCreateSerializer.scala b/akka-remote/src/main/scala/akka/remote/serialization/DaemonMsgCreateSerializer.scala similarity index 98% rename from akka-remote/src/main/scala/akka/serialization/DaemonMsgCreateSerializer.scala rename to akka-remote/src/main/scala/akka/remote/serialization/DaemonMsgCreateSerializer.scala index 19aabd398f..caa0e2d9af 100644 --- a/akka-remote/src/main/scala/akka/serialization/DaemonMsgCreateSerializer.scala +++ b/akka-remote/src/main/scala/akka/remote/serialization/DaemonMsgCreateSerializer.scala @@ -2,8 +2,9 @@ * Copyright (C) 2009-2012 Typesafe Inc. */ -package akka.serialization +package akka.remote.serialization +import akka.serialization.{ Serializer, SerializationExtension } import java.io.Serializable import com.google.protobuf.ByteString import com.typesafe.config.{ Config, ConfigFactory } diff --git a/akka-remote/src/main/scala/akka/serialization/ProtobufSerializer.scala b/akka-remote/src/main/scala/akka/remote/serialization/ProtobufSerializer.scala similarity index 95% rename from akka-remote/src/main/scala/akka/serialization/ProtobufSerializer.scala rename to akka-remote/src/main/scala/akka/remote/serialization/ProtobufSerializer.scala index 77f6702a77..26bf6621ac 100644 --- a/akka-remote/src/main/scala/akka/serialization/ProtobufSerializer.scala +++ b/akka-remote/src/main/scala/akka/remote/serialization/ProtobufSerializer.scala @@ -2,8 +2,9 @@ * Copyright (C) 2009-2012 Typesafe Inc. */ -package akka.serialization +package akka.remote.serialization +import akka.serialization.{ Serializer, Serialization } import com.google.protobuf.Message import akka.actor.DynamicAccess import akka.remote.RemoteProtocol.ActorRefProtocol diff --git a/akka-remote/src/test/java/akka/actor/ProtobufProtocol.java b/akka-remote/src/test/java/akka/remote/ProtobufProtocol.java similarity index 86% rename from akka-remote/src/test/java/akka/actor/ProtobufProtocol.java rename to akka-remote/src/test/java/akka/remote/ProtobufProtocol.java index f29c9efee3..1b7f13a1da 100644 --- a/akka-remote/src/test/java/akka/actor/ProtobufProtocol.java +++ b/akka-remote/src/test/java/akka/remote/ProtobufProtocol.java @@ -1,7 +1,9 @@ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: ProtobufProtocol.proto -package akka.actor; +package akka.remote; + +import com.google.protobuf.AbstractMessage; public final class ProtobufProtocol { private ProtobufProtocol() {} @@ -43,12 +45,12 @@ public final class ProtobufProtocol { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return akka.actor.ProtobufProtocol.internal_static_akka_actor_MyMessage_descriptor; + return ProtobufProtocol.internal_static_akka_actor_MyMessage_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return akka.actor.ProtobufProtocol.internal_static_akka_actor_MyMessage_fieldAccessorTable; + return ProtobufProtocol.internal_static_akka_actor_MyMessage_fieldAccessorTable; } private int bitField0_; @@ -175,41 +177,41 @@ public final class ProtobufProtocol { return super.writeReplace(); } - public static akka.actor.ProtobufProtocol.MyMessage parseFrom( + public static ProtobufProtocol.MyMessage parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } - public static akka.actor.ProtobufProtocol.MyMessage parseFrom( + public static ProtobufProtocol.MyMessage parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } - public static akka.actor.ProtobufProtocol.MyMessage parseFrom(byte[] data) + public static ProtobufProtocol.MyMessage parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } - public static akka.actor.ProtobufProtocol.MyMessage parseFrom( + public static ProtobufProtocol.MyMessage parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } - public static akka.actor.ProtobufProtocol.MyMessage parseFrom(java.io.InputStream input) + public static ProtobufProtocol.MyMessage parseFrom(java.io.InputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } - public static akka.actor.ProtobufProtocol.MyMessage parseFrom( + public static ProtobufProtocol.MyMessage parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } - public static akka.actor.ProtobufProtocol.MyMessage parseDelimitedFrom(java.io.InputStream input) + public static ProtobufProtocol.MyMessage parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input)) { @@ -218,7 +220,7 @@ public final class ProtobufProtocol { return null; } } - public static akka.actor.ProtobufProtocol.MyMessage parseDelimitedFrom( + public static ProtobufProtocol.MyMessage parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -229,12 +231,12 @@ public final class ProtobufProtocol { return null; } } - public static akka.actor.ProtobufProtocol.MyMessage parseFrom( + public static ProtobufProtocol.MyMessage parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } - public static akka.actor.ProtobufProtocol.MyMessage parseFrom( + public static ProtobufProtocol.MyMessage parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -244,7 +246,7 @@ public final class ProtobufProtocol { public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(akka.actor.ProtobufProtocol.MyMessage prototype) { + public static Builder newBuilder(ProtobufProtocol.MyMessage prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @@ -257,15 +259,15 @@ public final class ProtobufProtocol { } public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder - implements akka.actor.ProtobufProtocol.MyMessageOrBuilder { + implements ProtobufProtocol.MyMessageOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return akka.actor.ProtobufProtocol.internal_static_akka_actor_MyMessage_descriptor; + return ProtobufProtocol.internal_static_akka_actor_MyMessage_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return akka.actor.ProtobufProtocol.internal_static_akka_actor_MyMessage_fieldAccessorTable; + return ProtobufProtocol.internal_static_akka_actor_MyMessage_fieldAccessorTable; } // Construct using akka.actor.ProtobufProtocol.MyMessage.newBuilder() @@ -302,33 +304,33 @@ public final class ProtobufProtocol { public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return akka.actor.ProtobufProtocol.MyMessage.getDescriptor(); + return ProtobufProtocol.MyMessage.getDescriptor(); } - public akka.actor.ProtobufProtocol.MyMessage getDefaultInstanceForType() { - return akka.actor.ProtobufProtocol.MyMessage.getDefaultInstance(); + public ProtobufProtocol.MyMessage getDefaultInstanceForType() { + return ProtobufProtocol.MyMessage.getDefaultInstance(); } - public akka.actor.ProtobufProtocol.MyMessage build() { - akka.actor.ProtobufProtocol.MyMessage result = buildPartial(); + public ProtobufProtocol.MyMessage build() { + ProtobufProtocol.MyMessage result = buildPartial(); if (!result.isInitialized()) { - throw newUninitializedMessageException(result); + throw AbstractMessage.Builder.newUninitializedMessageException(result); } return result; } - private akka.actor.ProtobufProtocol.MyMessage buildParsed() + private ProtobufProtocol.MyMessage buildParsed() throws com.google.protobuf.InvalidProtocolBufferException { - akka.actor.ProtobufProtocol.MyMessage result = buildPartial(); + ProtobufProtocol.MyMessage result = buildPartial(); if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); + throw AbstractMessage.Builder.newUninitializedMessageException( + result).asInvalidProtocolBufferException(); } return result; } - public akka.actor.ProtobufProtocol.MyMessage buildPartial() { - akka.actor.ProtobufProtocol.MyMessage result = new akka.actor.ProtobufProtocol.MyMessage(this); + public ProtobufProtocol.MyMessage buildPartial() { + ProtobufProtocol.MyMessage result = new ProtobufProtocol.MyMessage(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { @@ -349,16 +351,16 @@ public final class ProtobufProtocol { } public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof akka.actor.ProtobufProtocol.MyMessage) { - return mergeFrom((akka.actor.ProtobufProtocol.MyMessage)other); + if (other instanceof ProtobufProtocol.MyMessage) { + return mergeFrom((ProtobufProtocol.MyMessage)other); } else { super.mergeFrom(other); return this; } } - public Builder mergeFrom(akka.actor.ProtobufProtocol.MyMessage other) { - if (other == akka.actor.ProtobufProtocol.MyMessage.getDefaultInstance()) return this; + public Builder mergeFrom(ProtobufProtocol.MyMessage other) { + if (other == ProtobufProtocol.MyMessage.getDefaultInstance()) return this; if (other.hasId()) { setId(other.getId()); } @@ -550,8 +552,8 @@ public final class ProtobufProtocol { com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_akka_actor_MyMessage_descriptor, new java.lang.String[] { "Id", "Name", "Status", }, - akka.actor.ProtobufProtocol.MyMessage.class, - akka.actor.ProtobufProtocol.MyMessage.Builder.class); + ProtobufProtocol.MyMessage.class, + ProtobufProtocol.MyMessage.Builder.class); return null; } }; diff --git a/akka-remote/src/test/scala/akka/remote/RemoteRouterSpec.scala b/akka-remote/src/test/scala/akka/remote/RemoteRouterSpec.scala index 2c44ece031..ab7fb23356 100644 --- a/akka-remote/src/test/scala/akka/remote/RemoteRouterSpec.scala +++ b/akka-remote/src/test/scala/akka/remote/RemoteRouterSpec.scala @@ -6,6 +6,7 @@ package akka.remote import akka.testkit._ import akka.routing._ import akka.actor._ +import akka.remote.routing._ import com.typesafe.config._ object RemoteRouterSpec { diff --git a/akka-remote/src/test/scala/akka/serialization/DaemonMsgCreateSerializerSpec.scala b/akka-remote/src/test/scala/akka/remote/serialization/DaemonMsgCreateSerializerSpec.scala similarity index 97% rename from akka-remote/src/test/scala/akka/serialization/DaemonMsgCreateSerializerSpec.scala rename to akka-remote/src/test/scala/akka/remote/serialization/DaemonMsgCreateSerializerSpec.scala index 38a50ea886..2c80c99615 100644 --- a/akka-remote/src/test/scala/akka/serialization/DaemonMsgCreateSerializerSpec.scala +++ b/akka-remote/src/test/scala/akka/remote/serialization/DaemonMsgCreateSerializerSpec.scala @@ -2,10 +2,11 @@ * Copyright (C) 2009-2012 Typesafe Inc. */ -package akka.serialization +package akka.remote.serialization import language.postfixOps +import akka.serialization.SerializationExtension import com.typesafe.config.ConfigFactory import akka.testkit.AkkaSpec import akka.actor.{ Actor, Address, Props, Deploy, OneForOneStrategy, SupervisorStrategy, FromClassCreator } diff --git a/akka-remote/src/test/scala/akka/serialization/ProtobufSerializerSpec.scala b/akka-remote/src/test/scala/akka/remote/serialization/ProtobufSerializerSpec.scala similarity index 81% rename from akka-remote/src/test/scala/akka/serialization/ProtobufSerializerSpec.scala rename to akka-remote/src/test/scala/akka/remote/serialization/ProtobufSerializerSpec.scala index 474ef485d7..f9fac112bd 100644 --- a/akka-remote/src/test/scala/akka/serialization/ProtobufSerializerSpec.scala +++ b/akka-remote/src/test/scala/akka/remote/serialization/ProtobufSerializerSpec.scala @@ -2,11 +2,12 @@ * Copyright (C) 2009-2012 Typesafe Inc. */ -package akka.serialization +package akka.remote.serialization +import akka.serialization.SerializationExtension import akka.testkit.AkkaSpec import akka.remote.RemoteProtocol.MessageProtocol -import akka.actor.ProtobufProtocol.MyMessage +import akka.remote.ProtobufProtocol.MyMessage @org.junit.runner.RunWith(classOf[org.scalatest.junit.JUnitRunner]) class ProtobufSerializerSpec extends AkkaSpec { diff --git a/akka-samples/akka-sample-cluster/src/main/resources/application.conf b/akka-samples/akka-sample-cluster/src/main/resources/application.conf index dc0f09445c..62554a65cf 100644 --- a/akka-samples/akka-sample-cluster/src/main/resources/application.conf +++ b/akka-samples/akka-sample-cluster/src/main/resources/application.conf @@ -11,7 +11,7 @@ akka { } } - extensions = ["akka.cluster.Cluster$"] + extensions = ["akka.cluster.Cluster"] cluster { seed-nodes = [ diff --git a/akka-samples/akka-sample-cluster/src/multi-jvm/scala/sample/cluster/stats/StatsSampleSingleMasterSpec.scala b/akka-samples/akka-sample-cluster/src/multi-jvm/scala/sample/cluster/stats/StatsSampleSingleMasterSpec.scala index 9c7315c902..e23504d084 100644 --- a/akka-samples/akka-sample-cluster/src/multi-jvm/scala/sample/cluster/stats/StatsSampleSingleMasterSpec.scala +++ b/akka-samples/akka-sample-cluster/src/multi-jvm/scala/sample/cluster/stats/StatsSampleSingleMasterSpec.scala @@ -16,6 +16,7 @@ import akka.cluster.ClusterEvent.CurrentClusterState import akka.cluster.ClusterEvent.MemberUp import akka.remote.testkit.MultiNodeConfig import akka.remote.testkit.MultiNodeSpec +import akka.remote.testkit.STMultiNodeSpec import akka.testkit.ImplicitSender object StatsSampleSingleMasterSpec extends MultiNodeConfig { @@ -55,7 +56,7 @@ class StatsSampleSingleMasterMultiJvmNode2 extends StatsSampleSingleMasterSpec class StatsSampleSingleMasterMultiJvmNode3 extends StatsSampleSingleMasterSpec abstract class StatsSampleSingleMasterSpec extends MultiNodeSpec(StatsSampleSingleMasterSpec) - with ImplicitSender { + with STMultiNodeSpec with ImplicitSender { import StatsSampleSpec._ diff --git a/akka-samples/akka-sample-cluster/src/multi-jvm/scala/sample/cluster/stats/StatsSampleSpec.scala b/akka-samples/akka-sample-cluster/src/multi-jvm/scala/sample/cluster/stats/StatsSampleSpec.scala index 1b0a9e08b8..7398aa025b 100644 --- a/akka-samples/akka-sample-cluster/src/multi-jvm/scala/sample/cluster/stats/StatsSampleSpec.scala +++ b/akka-samples/akka-sample-cluster/src/multi-jvm/scala/sample/cluster/stats/StatsSampleSpec.scala @@ -13,6 +13,7 @@ import akka.cluster.ClusterEvent.CurrentClusterState import akka.cluster.ClusterEvent.MemberUp import akka.remote.testkit.MultiNodeConfig import akka.remote.testkit.MultiNodeSpec +import akka.remote.testkit.STMultiNodeSpec import akka.testkit.ImplicitSender object StatsSampleSpec extends MultiNodeConfig { @@ -51,7 +52,7 @@ class StatsSampleMultiJvmNode2 extends StatsSampleSpec class StatsSampleMultiJvmNode3 extends StatsSampleSpec abstract class StatsSampleSpec extends MultiNodeSpec(StatsSampleSpec) - with ImplicitSender { + with STMultiNodeSpec with ImplicitSender { import StatsSampleSpec._ diff --git a/akka-samples/akka-sample-cluster/src/multi-jvm/scala/sample/cluster/transformation/TransformationSampleSpec.scala b/akka-samples/akka-sample-cluster/src/multi-jvm/scala/sample/cluster/transformation/TransformationSampleSpec.scala index a18a4a4896..98b4cb6526 100644 --- a/akka-samples/akka-sample-cluster/src/multi-jvm/scala/sample/cluster/transformation/TransformationSampleSpec.scala +++ b/akka-samples/akka-sample-cluster/src/multi-jvm/scala/sample/cluster/transformation/TransformationSampleSpec.scala @@ -9,6 +9,7 @@ import akka.actor.Props import akka.cluster.Cluster import akka.remote.testkit.MultiNodeConfig import akka.remote.testkit.MultiNodeSpec +import akka.remote.testkit.STMultiNodeSpec import akka.testkit.ImplicitSender object TransformationSampleSpec extends MultiNodeConfig { @@ -37,7 +38,7 @@ class TransformationSampleMultiJvmNode4 extends TransformationSampleSpec class TransformationSampleMultiJvmNode5 extends TransformationSampleSpec abstract class TransformationSampleSpec extends MultiNodeSpec(TransformationSampleSpec) - with ImplicitSender { + with STMultiNodeSpec with ImplicitSender { import TransformationSampleSpec._ diff --git a/akka-samples/akka-sample-fsm/src/main/scala/Buncher.scala b/akka-samples/akka-sample-fsm/src/main/scala/Buncher.scala index fe1b31ce45..64dc611396 100644 --- a/akka-samples/akka-sample-fsm/src/main/scala/Buncher.scala +++ b/akka-samples/akka-sample-fsm/src/main/scala/Buncher.scala @@ -7,6 +7,7 @@ import akka.actor.ActorRefFactory import scala.reflect.ClassTag import scala.concurrent.util.Duration import akka.actor.{ FSM, Actor, ActorRef } +import scala.concurrent.util.FiniteDuration /* * generic typed object buncher. @@ -35,7 +36,7 @@ object GenericBuncher { } } -abstract class GenericBuncher[A: ClassTag, B](val singleTimeout: Duration, val multiTimeout: Duration) +abstract class GenericBuncher[A: ClassTag, B](val singleTimeout: FiniteDuration, val multiTimeout: FiniteDuration) extends Actor with FSM[GenericBuncher.State, B] { import GenericBuncher._ import FSM._ @@ -85,7 +86,7 @@ object Buncher { val Flush = GenericBuncher.Flush } -class Buncher[A: ClassTag](singleTimeout: Duration, multiTimeout: Duration) +class Buncher[A: ClassTag](singleTimeout: FiniteDuration, multiTimeout: FiniteDuration) extends GenericBuncher[A, List[A]](singleTimeout, multiTimeout) { import Buncher._ diff --git a/akka-samples/akka-sample-fsm/src/main/scala/DiningHakkersOnFsm.scala b/akka-samples/akka-sample-fsm/src/main/scala/DiningHakkersOnFsm.scala index e97c76e6af..902eb797d2 100644 --- a/akka-samples/akka-sample-fsm/src/main/scala/DiningHakkersOnFsm.scala +++ b/akka-samples/akka-sample-fsm/src/main/scala/DiningHakkersOnFsm.scala @@ -4,11 +4,11 @@ package sample.fsm.dining.fsm import language.postfixOps - import akka.actor._ import akka.actor.FSM._ import scala.concurrent.util.Duration import scala.concurrent.util.duration._ +import scala.concurrent.util.FiniteDuration /* * Some messages for the chopstick @@ -159,7 +159,7 @@ class FSMHakker(name: String, left: ActorRef, right: ActorRef) extends Actor wit // Initialize the hakker initialize - private def startThinking(duration: Duration): State = { + private def startThinking(duration: FiniteDuration): State = { goto(Thinking) using TakenChopsticks(None, None) forMax duration } } diff --git a/akka-testkit/src/main/java/akka/testkit/JavaTestKit.java b/akka-testkit/src/main/java/akka/testkit/JavaTestKit.java index e8ce90a9ed..88fe0d940e 100644 --- a/akka-testkit/src/main/java/akka/testkit/JavaTestKit.java +++ b/akka-testkit/src/main/java/akka/testkit/JavaTestKit.java @@ -11,6 +11,7 @@ import akka.event.Logging.LogEvent; import akka.japi.JavaPartialFunction; import akka.japi.Util; import scala.concurrent.util.Duration; +import scala.concurrent.util.FiniteDuration; /** * Java API for the TestProbe. Proper JavaDocs to come once JavaDoccing is implemented. @@ -30,8 +31,10 @@ public class JavaTestKit { return p.system(); } - static public Duration duration(String s) { - return Duration.parse(s); + static public FiniteDuration duration(String s) { + final Duration ret = Duration.apply(s); + if (ret instanceof FiniteDuration) return (FiniteDuration) ret; + else throw new IllegalArgumentException("duration() is only for finite durations, use Duration.Inf() and friends"); } public Duration dilated(Duration d) { @@ -58,11 +61,11 @@ public class JavaTestKit { p.lastMessage().sender().tell(msg, p.ref()); } - public Duration getRemainingTime() { + public FiniteDuration getRemainingTime() { return p.remaining(); } - public Duration getRemainingTimeOr(Duration def) { + public FiniteDuration getRemainingTimeOr(FiniteDuration def) { return p.remainingOr(def); } @@ -97,7 +100,7 @@ public class JavaTestKit { public abstract class Within { protected abstract void run(); - public Within(Duration max) { + public Within(FiniteDuration max) { p.within(max, new AbstractFunction0() { public Object apply() { run(); @@ -106,7 +109,7 @@ public class JavaTestKit { }); } - public Within(Duration min, Duration max) { + public Within(FiniteDuration min, FiniteDuration max) { p.within(min, max, new AbstractFunction0() { public Object apply() { run(); @@ -168,7 +171,7 @@ public class JavaTestKit { return p.expectMsg(msg); } - public T expectMsgEquals(Duration max, T msg) { + public T expectMsgEquals(FiniteDuration max, T msg) { return p.expectMsg(max, msg); } @@ -176,7 +179,7 @@ public class JavaTestKit { return p.expectMsgClass(clazz); } - public T expectMsgClass(Duration max, Class clazz) { + public T expectMsgClass(FiniteDuration max, Class clazz) { return p.expectMsgClass(max, clazz); } @@ -184,7 +187,7 @@ public class JavaTestKit { return p.expectMsgAnyOf(Util.arrayToSeq(msgs)); } - public Object expectMsgAnyOf(Duration max, Object... msgs) { + public Object expectMsgAnyOf(FiniteDuration max, Object... msgs) { return p.expectMsgAnyOf(max, Util.arrayToSeq(msgs)); } @@ -193,7 +196,7 @@ public class JavaTestKit { Util.classTag(Object.class)); } - public Object[] expectMsgAllOf(Duration max, Object... msgs) { + public Object[] expectMsgAllOf(FiniteDuration max, Object... msgs) { return (Object[]) p.expectMsgAllOf(max, Util.arrayToSeq(msgs)).toArray( Util.classTag(Object.class)); } @@ -204,7 +207,7 @@ public class JavaTestKit { return (T) result; } - public Object expectMsgAnyClassOf(Duration max, Class... classes) { + public Object expectMsgAnyClassOf(FiniteDuration max, Class... classes) { return p.expectMsgAnyClassOf(max, Util.arrayToSeq(classes)); } @@ -212,7 +215,7 @@ public class JavaTestKit { p.expectNoMsg(); } - public void expectNoMsg(Duration max) { + public void expectNoMsg(FiniteDuration max) { p.expectNoMsg(max); } diff --git a/akka-testkit/src/main/scala/akka/testkit/TestBarrier.scala b/akka-testkit/src/main/scala/akka/testkit/TestBarrier.scala index dd13c22309..929838a8b5 100644 --- a/akka-testkit/src/main/scala/akka/testkit/TestBarrier.scala +++ b/akka-testkit/src/main/scala/akka/testkit/TestBarrier.scala @@ -7,6 +7,7 @@ package akka.testkit import scala.concurrent.util.Duration import java.util.concurrent.{ CyclicBarrier, TimeUnit, TimeoutException } import akka.actor.ActorSystem +import scala.concurrent.util.FiniteDuration class TestBarrierTimeoutException(message: String) extends RuntimeException(message) @@ -27,7 +28,7 @@ class TestBarrier(count: Int) { def await()(implicit system: ActorSystem): Unit = await(TestBarrier.DefaultTimeout) - def await(timeout: Duration)(implicit system: ActorSystem) { + def await(timeout: FiniteDuration)(implicit system: ActorSystem) { try { barrier.await(timeout.dilated.toNanos, TimeUnit.NANOSECONDS) } catch { diff --git a/akka-testkit/src/main/scala/akka/testkit/TestFSMRef.scala b/akka-testkit/src/main/scala/akka/testkit/TestFSMRef.scala index bab4601587..90f3b2f545 100644 --- a/akka-testkit/src/main/scala/akka/testkit/TestFSMRef.scala +++ b/akka-testkit/src/main/scala/akka/testkit/TestFSMRef.scala @@ -7,6 +7,7 @@ package akka.testkit import akka.actor._ import scala.concurrent.util.Duration import akka.dispatch.DispatcherPrerequisites +import scala.concurrent.util.FiniteDuration /** * This is a specialised form of the TestActorRef with support for querying and @@ -59,14 +60,14 @@ class TestFSMRef[S, D, T <: Actor]( * corresponding transition initiated from within the FSM, including timeout * and stop handling. */ - def setState(stateName: S = fsm.stateName, stateData: D = fsm.stateData, timeout: Duration = null, stopReason: Option[FSM.Reason] = None) { + def setState(stateName: S = fsm.stateName, stateData: D = fsm.stateData, timeout: FiniteDuration = null, stopReason: Option[FSM.Reason] = None) { fsm.applyState(FSM.State(stateName, stateData, Option(timeout), stopReason)) } /** * Proxy for FSM.setTimer. */ - def setTimer(name: String, msg: Any, timeout: Duration, repeat: Boolean) { + def setTimer(name: String, msg: Any, timeout: FiniteDuration, repeat: Boolean) { fsm.setTimer(name, msg, timeout, repeat) } diff --git a/akka-testkit/src/main/scala/akka/testkit/TestKit.scala b/akka-testkit/src/main/scala/akka/testkit/TestKit.scala index a1721d2ffe..a187dfbeef 100644 --- a/akka-testkit/src/main/scala/akka/testkit/TestKit.scala +++ b/akka-testkit/src/main/scala/akka/testkit/TestKit.scala @@ -7,7 +7,7 @@ import language.postfixOps import akka.actor._ import akka.actor.Actor._ -import scala.concurrent.util.Duration +import scala.concurrent.util.{ Duration, FiniteDuration } import scala.concurrent.util.duration._ import java.util.concurrent.{ BlockingDeque, LinkedBlockingDeque, TimeUnit, atomic } import atomic.AtomicInteger @@ -173,20 +173,30 @@ trait TestKitBase { /** * Obtain current time (`System.nanoTime`) as Duration. */ - def now: Duration = System.nanoTime.nanos + def now: FiniteDuration = System.nanoTime.nanos /** * Obtain time remaining for execution of the innermost enclosing `within` * block or missing that it returns the properly dilated default for this * case from settings (key "akka.test.single-expect-default"). */ - def remaining: Duration = remainingOr(testKitSettings.SingleExpectDefaultTimeout.dilated) + def remaining: FiniteDuration = remainingOr(testKitSettings.SingleExpectDefaultTimeout.dilated) /** * Obtain time remaining for execution of the innermost enclosing `within` * block or missing that it returns the given duration. */ - def remainingOr(duration: Duration): Duration = if (end == Duration.Undefined) duration else end - now + def remainingOr(duration: FiniteDuration): FiniteDuration = end match { + case x if x eq Duration.Undefined ⇒ duration + case x if !x.isFinite ⇒ throw new IllegalArgumentException("`end` cannot be infinite") + case f: FiniteDuration ⇒ (end - now).asInstanceOf[FiniteDuration] // RK FIXME after next Scala milestone + } + + private def remainingOrDilated(max: Duration): FiniteDuration = max match { + case x if x eq Duration.Undefined ⇒ remaining + case x if !x.isFinite ⇒ throw new IllegalArgumentException("max duration cannot be infinite") + case f: FiniteDuration ⇒ f.dilated + } /** * Query queue status. @@ -204,7 +214,7 @@ trait TestKitBase { * which uses the configuration entry "akka.test.timefactor". */ def awaitCond(p: ⇒ Boolean, max: Duration = Duration.Undefined, interval: Duration = 100.millis) { - val _max = if (max eq Duration.Undefined) remaining else max.dilated + val _max = remainingOrDilated(max) val stop = now + _max @tailrec @@ -235,7 +245,7 @@ trait TestKitBase { * } * */ - def within[T](min: Duration, max: Duration)(f: ⇒ T): T = { + def within[T](min: FiniteDuration, max: FiniteDuration)(f: ⇒ T): T = { val _max = max.dilated val start = now val rem = if (end == Duration.Undefined) Duration.Inf else end - start @@ -261,7 +271,7 @@ trait TestKitBase { /** * Same as calling `within(0 seconds, max)(f)`. */ - def within[T](max: Duration)(f: ⇒ T): T = within(0 seconds, max)(f) + def within[T](max: FiniteDuration)(f: ⇒ T): T = within(0 seconds, max)(f) /** * Same as `expectMsg(remaining, obj)`, but correctly treating the timeFactor. @@ -275,7 +285,7 @@ trait TestKitBase { * * @return the received object */ - def expectMsg[T](max: Duration, obj: T): T = expectMsg_internal(max.dilated, obj) + def expectMsg[T](max: FiniteDuration, obj: T): T = expectMsg_internal(max.dilated, obj) private def expectMsg_internal[T](max: Duration, obj: T): T = { val o = receiveOne(max) @@ -295,7 +305,7 @@ trait TestKitBase { * @return the received object as transformed by the partial function */ def expectMsgPF[T](max: Duration = Duration.Undefined, hint: String = "")(f: PartialFunction[Any, T]): T = { - val _max = if (max eq Duration.Undefined) remaining else max.dilated + val _max = remainingOrDilated(max) val o = receiveOne(_max) assert(o ne null, "timeout (" + _max + ") during expectMsg: " + hint) assert(f.isDefinedAt(o), "expected: " + hint + " but got unexpected message " + o) @@ -311,7 +321,7 @@ trait TestKitBase { * partial function returned true */ def fishForMessage(max: Duration = Duration.Undefined, hint: String = "")(f: PartialFunction[Any, Boolean]): Any = { - val _max = if (max eq Duration.Undefined) remaining else max.dilated + val _max = remainingOrDilated(max) val end = now + _max @tailrec def recv: Any = { @@ -335,7 +345,7 @@ trait TestKitBase { * * @return the received object */ - def expectMsgType[T](max: Duration)(implicit t: ClassTag[T]): T = expectMsgClass_internal(max.dilated, t.runtimeClass.asInstanceOf[Class[T]]) + def expectMsgType[T](max: FiniteDuration)(implicit t: ClassTag[T]): T = expectMsgClass_internal(max.dilated, t.runtimeClass.asInstanceOf[Class[T]]) /** * Same as `expectMsgClass(remaining, c)`, but correctly treating the timeFactor. @@ -349,9 +359,9 @@ trait TestKitBase { * * @return the received object */ - def expectMsgClass[C](max: Duration, c: Class[C]): C = expectMsgClass_internal(max.dilated, c) + def expectMsgClass[C](max: FiniteDuration, c: Class[C]): C = expectMsgClass_internal(max.dilated, c) - private def expectMsgClass_internal[C](max: Duration, c: Class[C]): C = { + private def expectMsgClass_internal[C](max: FiniteDuration, c: Class[C]): C = { val o = receiveOne(max) assert(o ne null, "timeout (" + max + ") during expectMsgClass waiting for " + c) assert(BoxedType(c) isInstance o, "expected " + c + ", found " + o.getClass) @@ -370,9 +380,9 @@ trait TestKitBase { * * @return the received object */ - def expectMsgAnyOf[T](max: Duration, obj: T*): T = expectMsgAnyOf_internal(max.dilated, obj: _*) + def expectMsgAnyOf[T](max: FiniteDuration, obj: T*): T = expectMsgAnyOf_internal(max.dilated, obj: _*) - private def expectMsgAnyOf_internal[T](max: Duration, obj: T*): T = { + private def expectMsgAnyOf_internal[T](max: FiniteDuration, obj: T*): T = { val o = receiveOne(max) assert(o ne null, "timeout (" + max + ") during expectMsgAnyOf waiting for " + obj.mkString("(", ", ", ")")) assert(obj exists (_ == o), "found unexpected " + o) @@ -391,9 +401,9 @@ trait TestKitBase { * * @return the received object */ - def expectMsgAnyClassOf[C](max: Duration, obj: Class[_ <: C]*): C = expectMsgAnyClassOf_internal(max.dilated, obj: _*) + def expectMsgAnyClassOf[C](max: FiniteDuration, obj: Class[_ <: C]*): C = expectMsgAnyClassOf_internal(max.dilated, obj: _*) - private def expectMsgAnyClassOf_internal[C](max: Duration, obj: Class[_ <: C]*): C = { + private def expectMsgAnyClassOf_internal[C](max: FiniteDuration, obj: Class[_ <: C]*): C = { val o = receiveOne(max) assert(o ne null, "timeout (" + max + ") during expectMsgAnyClassOf waiting for " + obj.mkString("(", ", ", ")")) assert(obj exists (c ⇒ BoxedType(c) isInstance o), "found unexpected " + o) @@ -418,9 +428,9 @@ trait TestKitBase { * expectMsgAllOf(1 second, Result1(), Result2()) * */ - def expectMsgAllOf[T](max: Duration, obj: T*): Seq[T] = expectMsgAllOf_internal(max.dilated, obj: _*) + def expectMsgAllOf[T](max: FiniteDuration, obj: T*): Seq[T] = expectMsgAllOf_internal(max.dilated, obj: _*) - private def expectMsgAllOf_internal[T](max: Duration, obj: T*): Seq[T] = { + private def expectMsgAllOf_internal[T](max: FiniteDuration, obj: T*): Seq[T] = { val recv = receiveN_internal(obj.size, max) obj foreach (x ⇒ assert(recv exists (x == _), "not found " + x)) recv foreach (x ⇒ assert(obj exists (x == _), "found unexpected " + x)) @@ -440,9 +450,9 @@ trait TestKitBase { * Wait time is bounded by the given duration, with an AssertionFailure * being thrown in case of timeout. */ - def expectMsgAllClassOf[T](max: Duration, obj: Class[_ <: T]*): Seq[T] = internalExpectMsgAllClassOf(max.dilated, obj: _*) + def expectMsgAllClassOf[T](max: FiniteDuration, obj: Class[_ <: T]*): Seq[T] = internalExpectMsgAllClassOf(max.dilated, obj: _*) - private def internalExpectMsgAllClassOf[T](max: Duration, obj: Class[_ <: T]*): Seq[T] = { + private def internalExpectMsgAllClassOf[T](max: FiniteDuration, obj: Class[_ <: T]*): Seq[T] = { val recv = receiveN_internal(obj.size, max) obj foreach (x ⇒ assert(recv exists (_.getClass eq BoxedType(x)), "not found " + x)) recv foreach (x ⇒ assert(obj exists (c ⇒ BoxedType(c) eq x.getClass), "found non-matching object " + x)) @@ -465,9 +475,9 @@ trait TestKitBase { * Beware that one object may satisfy all given class constraints, which * may be counter-intuitive. */ - def expectMsgAllConformingOf[T](max: Duration, obj: Class[_ <: T]*): Seq[T] = internalExpectMsgAllConformingOf(max.dilated, obj: _*) + def expectMsgAllConformingOf[T](max: FiniteDuration, obj: Class[_ <: T]*): Seq[T] = internalExpectMsgAllConformingOf(max.dilated, obj: _*) - private def internalExpectMsgAllConformingOf[T](max: Duration, obj: Class[_ <: T]*): Seq[T] = { + private def internalExpectMsgAllConformingOf[T](max: FiniteDuration, obj: Class[_ <: T]*): Seq[T] = { val recv = receiveN_internal(obj.size, max) obj foreach (x ⇒ assert(recv exists (BoxedType(x) isInstance _), "not found " + x)) recv foreach (x ⇒ assert(obj exists (c ⇒ BoxedType(c) isInstance x), "found non-matching object " + x)) @@ -482,9 +492,9 @@ trait TestKitBase { /** * Assert that no message is received for the specified time. */ - def expectNoMsg(max: Duration) { expectNoMsg_internal(max.dilated) } + def expectNoMsg(max: FiniteDuration) { expectNoMsg_internal(max.dilated) } - private def expectNoMsg_internal(max: Duration) { + private def expectNoMsg_internal(max: FiniteDuration) { val o = receiveOne(max) assert(o eq null, "received unexpected message " + o) lastWasNoMsg = true @@ -509,7 +519,7 @@ trait TestKitBase { * */ def receiveWhile[T](max: Duration = Duration.Undefined, idle: Duration = Duration.Inf, messages: Int = Int.MaxValue)(f: PartialFunction[AnyRef, T]): Seq[T] = { - val stop = now + (if (max eq Duration.Undefined) remaining else max.dilated) + val stop = now + remainingOrDilated(max) var msg: Message = NullMessage @tailrec @@ -546,7 +556,7 @@ trait TestKitBase { /** * Receive N messages in a row before the given deadline. */ - def receiveN(n: Int, max: Duration): Seq[AnyRef] = receiveN_internal(n, max.dilated) + def receiveN(n: Int, max: FiniteDuration): Seq[AnyRef] = receiveN_internal(n, max.dilated) private def receiveN_internal(n: Int, max: Duration): Seq[AnyRef] = { val stop = max + now diff --git a/akka-testkit/src/main/scala/akka/testkit/TestLatch.scala b/akka-testkit/src/main/scala/akka/testkit/TestLatch.scala index f045552d44..cedf351551 100644 --- a/akka-testkit/src/main/scala/akka/testkit/TestLatch.scala +++ b/akka-testkit/src/main/scala/akka/testkit/TestLatch.scala @@ -8,6 +8,7 @@ import scala.concurrent.util.Duration import akka.actor.ActorSystem import scala.concurrent.{ Await, CanAwait, Awaitable } import java.util.concurrent.{ TimeoutException, CountDownLatch, TimeUnit } +import scala.concurrent.util.FiniteDuration /** * A count down latch wrapper for use in testing. @@ -32,7 +33,11 @@ class TestLatch(count: Int = 1)(implicit system: ActorSystem) extends Awaitable[ @throws(classOf[TimeoutException]) def ready(atMost: Duration)(implicit permit: CanAwait) = { - val opened = latch.await(atMost.dilated.toNanos, TimeUnit.NANOSECONDS) + val waitTime = atMost match { + case f: FiniteDuration ⇒ f + case _ ⇒ throw new IllegalArgumentException("TestLatch does not support waiting for " + atMost) + } + val opened = latch.await(waitTime.dilated.toNanos, TimeUnit.NANOSECONDS) if (!opened) throw new TimeoutException( "Timeout of %s with time factor of %s" format (atMost.toString, TestKitExtension(system).TestTimeFactor)) this diff --git a/akka-testkit/src/main/scala/akka/testkit/package.scala b/akka-testkit/src/main/scala/akka/testkit/package.scala index e9d37d8e70..247cf9e17f 100644 --- a/akka-testkit/src/main/scala/akka/testkit/package.scala +++ b/akka-testkit/src/main/scala/akka/testkit/package.scala @@ -3,7 +3,7 @@ package akka import language.implicitConversions import akka.actor.ActorSystem -import scala.concurrent.util.Duration +import scala.concurrent.util.{ Duration, FiniteDuration } import java.util.concurrent.TimeUnit.MILLISECONDS import scala.reflect.ClassTag @@ -41,14 +41,15 @@ package object testkit { * * Corresponding Java API is available in TestKit.dilated */ - implicit def duration2TestDuration(duration: Duration) = new TestDuration(duration) + implicit def duration2TestDuration(duration: FiniteDuration) = new TestDuration(duration) /** * Wrapper for implicit conversion to add dilated function to Duration. */ - class TestDuration(duration: Duration) { - def dilated(implicit system: ActorSystem): Duration = { - duration * TestKitExtension(system).TestTimeFactor + class TestDuration(duration: FiniteDuration) { + def dilated(implicit system: ActorSystem): FiniteDuration = { + // this cast will succeed unless TestTimeFactor is non-finite (which would be a misconfiguration) + (duration * TestKitExtension(system).TestTimeFactor).asInstanceOf[FiniteDuration] } } } diff --git a/akka-testkit/src/test/scala/akka/testkit/AkkaSpec.scala b/akka-testkit/src/test/scala/akka/testkit/AkkaSpec.scala index a1d32e019e..bd4de8b906 100644 --- a/akka-testkit/src/test/scala/akka/testkit/AkkaSpec.scala +++ b/akka-testkit/src/test/scala/akka/testkit/AkkaSpec.scala @@ -16,9 +16,6 @@ import java.util.concurrent.TimeoutException import akka.dispatch.Dispatchers import akka.pattern.ask -object TimingTest extends Tag("timing") -object LongRunningTest extends Tag("long-running") - object AkkaSpec { val testConf: Config = ConfigFactory.parseString(""" akka { @@ -36,7 +33,7 @@ object AkkaSpec { } } } - """) + """) def mapToConfig(map: Map[String, Any]): Config = { import scala.collection.JavaConverters._ @@ -73,7 +70,6 @@ abstract class AkkaSpec(_system: ActorSystem) } final override def afterAll { - beforeShutdown() system.shutdown() try system.awaitTermination(5 seconds) catch { case _: TimeoutException ⇒ @@ -85,95 +81,8 @@ abstract class AkkaSpec(_system: ActorSystem) protected def atStartup() {} - protected def beforeShutdown() {} - protected def atTermination() {} def spawn(dispatcherId: String = Dispatchers.DefaultDispatcherId)(body: ⇒ Unit): Unit = Future(body)(system.dispatchers.lookup(dispatcherId)) } - -@org.junit.runner.RunWith(classOf[org.scalatest.junit.JUnitRunner]) -class AkkaSpecSpec extends WordSpec with MustMatchers { - - "An AkkaSpec" must { - - "warn about unhandled messages" in { - implicit val system = ActorSystem("AkkaSpec0", AkkaSpec.testConf) - try { - val a = system.actorOf(Props.empty) - EventFilter.warning(start = "unhandled message", occurrences = 1) intercept { - a ! 42 - } - } finally { - system.shutdown() - } - } - - "terminate all actors" in { - // verbose config just for demonstration purposes, please leave in in case of debugging - import scala.collection.JavaConverters._ - val conf = Map( - "akka.actor.debug.lifecycle" -> true, "akka.actor.debug.event-stream" -> true, - "akka.loglevel" -> "DEBUG", "akka.stdout-loglevel" -> "DEBUG") - val system = ActorSystem("AkkaSpec1", ConfigFactory.parseMap(conf.asJava).withFallback(AkkaSpec.testConf)) - val spec = new AkkaSpec(system) { val ref = Seq(testActor, system.actorOf(Props.empty, "name")) } - spec.ref foreach (_.isTerminated must not be true) - system.shutdown() - spec.awaitCond(spec.ref forall (_.isTerminated), 2 seconds) - } - - "must stop correctly when sending PoisonPill to rootGuardian" in { - val system = ActorSystem("AkkaSpec2", AkkaSpec.testConf) - val spec = new AkkaSpec(system) {} - val latch = new TestLatch(1)(system) - system.registerOnTermination(latch.countDown()) - - system.actorFor("/") ! PoisonPill - - Await.ready(latch, 2 seconds) - } - - "must enqueue unread messages from testActor to deadLetters" in { - val system, otherSystem = ActorSystem("AkkaSpec3", AkkaSpec.testConf) - - try { - var locker = Seq.empty[DeadLetter] - implicit val timeout = TestKitExtension(system).DefaultTimeout - implicit val davyJones = otherSystem.actorOf(Props(new Actor { - def receive = { - case m: DeadLetter ⇒ locker :+= m - case "Die!" ⇒ sender ! "finally gone"; context.stop(self) - } - }), "davyJones") - - system.eventStream.subscribe(davyJones, classOf[DeadLetter]) - - val probe = new TestProbe(system) - probe.ref ! 42 - /* - * this will ensure that the message is actually received, otherwise it - * may happen that the system.stop() suspends the testActor before it had - * a chance to put the message into its private queue - */ - probe.receiveWhile(1 second) { - case null ⇒ - } - - val latch = new TestLatch(1)(system) - system.registerOnTermination(latch.countDown()) - system.shutdown() - Await.ready(latch, 2 seconds) - Await.result(davyJones ? "Die!", timeout.duration) must be === "finally gone" - - // this will typically also contain log messages which were sent after the logger shutdown - locker must contain(DeadLetter(42, davyJones, probe.ref)) - } finally { - system.shutdown() - otherSystem.shutdown() - } - } - - } -} - diff --git a/akka-testkit/src/test/scala/akka/testkit/AkkaSpecSpec.scala b/akka-testkit/src/test/scala/akka/testkit/AkkaSpecSpec.scala new file mode 100644 index 0000000000..c8eee623f7 --- /dev/null +++ b/akka-testkit/src/test/scala/akka/testkit/AkkaSpecSpec.scala @@ -0,0 +1,101 @@ +/** + * Copyright (C) 2009-2012 Typesafe Inc. + */ +package akka.testkit + +import language.reflectiveCalls +import language.postfixOps + +import org.scalatest.WordSpec +import org.scalatest.matchers.MustMatchers +import akka.actor._ +import com.typesafe.config.ConfigFactory +import concurrent.Await +import scala.concurrent.util.duration._ +import akka.actor.DeadLetter +import akka.pattern.ask + +@org.junit.runner.RunWith(classOf[org.scalatest.junit.JUnitRunner]) +class AkkaSpecSpec extends WordSpec with MustMatchers { + + "An AkkaSpec" must { + + "warn about unhandled messages" in { + implicit val system = ActorSystem("AkkaSpec0", AkkaSpec.testConf) + try { + val a = system.actorOf(Props.empty) + EventFilter.warning(start = "unhandled message", occurrences = 1) intercept { + a ! 42 + } + } finally { + system.shutdown() + } + } + + "terminate all actors" in { + // verbose config just for demonstration purposes, please leave in in case of debugging + import scala.collection.JavaConverters._ + val conf = Map( + "akka.actor.debug.lifecycle" -> true, "akka.actor.debug.event-stream" -> true, + "akka.loglevel" -> "DEBUG", "akka.stdout-loglevel" -> "DEBUG") + val system = ActorSystem("AkkaSpec1", ConfigFactory.parseMap(conf.asJava).withFallback(AkkaSpec.testConf)) + val spec = new AkkaSpec(system) { + val ref = Seq(testActor, system.actorOf(Props.empty, "name")) + } + spec.ref foreach (_.isTerminated must not be true) + system.shutdown() + spec.awaitCond(spec.ref forall (_.isTerminated), 2 seconds) + } + + "must stop correctly when sending PoisonPill to rootGuardian" in { + val system = ActorSystem("AkkaSpec2", AkkaSpec.testConf) + val spec = new AkkaSpec(system) {} + val latch = new TestLatch(1)(system) + system.registerOnTermination(latch.countDown()) + + system.actorFor("/") ! PoisonPill + + Await.ready(latch, 2 seconds) + } + + "must enqueue unread messages from testActor to deadLetters" in { + val system, otherSystem = ActorSystem("AkkaSpec3", AkkaSpec.testConf) + + try { + var locker = Seq.empty[DeadLetter] + implicit val timeout = TestKitExtension(system).DefaultTimeout + implicit val davyJones = otherSystem.actorOf(Props(new Actor { + def receive = { + case m: DeadLetter ⇒ locker :+= m + case "Die!" ⇒ sender ! "finally gone"; context.stop(self) + } + }), "davyJones") + + system.eventStream.subscribe(davyJones, classOf[DeadLetter]) + + val probe = new TestProbe(system) + probe.ref ! 42 + /* + * this will ensure that the message is actually received, otherwise it + * may happen that the system.stop() suspends the testActor before it had + * a chance to put the message into its private queue + */ + probe.receiveWhile(1 second) { + case null ⇒ + } + + val latch = new TestLatch(1)(system) + system.registerOnTermination(latch.countDown()) + system.shutdown() + Await.ready(latch, 2 seconds) + Await.result(davyJones ? "Die!", timeout.duration) must be === "finally gone" + + // this will typically also contain log messages which were sent after the logger shutdown + locker must contain(DeadLetter(42, davyJones, probe.ref)) + } finally { + system.shutdown() + otherSystem.shutdown() + } + } + } +} diff --git a/akka-testkit/src/test/scala/akka/testkit/TestActorRefSpec.scala b/akka-testkit/src/test/scala/akka/testkit/TestActorRefSpec.scala index f033df302f..2e835b408f 100644 --- a/akka-testkit/src/test/scala/akka/testkit/TestActorRefSpec.scala +++ b/akka-testkit/src/test/scala/akka/testkit/TestActorRefSpec.scala @@ -4,7 +4,6 @@ package akka.testkit import language.{ postfixOps, reflectiveCalls } - import org.scalatest.matchers.MustMatchers import org.scalatest.{ BeforeAndAfterEach, WordSpec } import akka.actor._ @@ -14,6 +13,7 @@ import scala.concurrent.util.duration._ import akka.actor.ActorSystem import akka.pattern.ask import akka.dispatch.Dispatcher +import scala.concurrent.util.Duration /** * Test whether TestActorRef behaves as an ActorRef should, besides its own spec. @@ -244,7 +244,7 @@ class TestActorRefSpec extends AkkaSpec("disp1.type=Dispatcher") with BeforeAndA "set receiveTimeout to None" in { val a = TestActorRef[WorkerActor] - a.underlyingActor.context.receiveTimeout must be(None) + a.underlyingActor.context.receiveTimeout must be theSameInstanceAs Duration.Undefined } "set CallingThreadDispatcher" in { diff --git a/akka-testkit/src/test/scala/akka/testkit/TestTags.scala b/akka-testkit/src/test/scala/akka/testkit/TestTags.scala new file mode 100644 index 0000000000..df33b511e8 --- /dev/null +++ b/akka-testkit/src/test/scala/akka/testkit/TestTags.scala @@ -0,0 +1,9 @@ +/** + * Copyright (C) 2009-2012 Typesafe Inc. + */ +package akka.testkit + +import org.scalatest.Tag + +object TimingTest extends Tag("timing") +object LongRunningTest extends Tag("long-running") diff --git a/akka-zeromq/src/main/scala/akka/zeromq/SocketOption.scala b/akka-zeromq/src/main/scala/akka/zeromq/SocketOption.scala index 1d393afef0..ea7fb82d07 100644 --- a/akka-zeromq/src/main/scala/akka/zeromq/SocketOption.scala +++ b/akka-zeromq/src/main/scala/akka/zeromq/SocketOption.scala @@ -9,6 +9,7 @@ import akka.actor.ActorRef import scala.concurrent.util.duration._ import scala.concurrent.util.Duration import org.zeromq.ZMQ.{ Poller, Socket } +import scala.concurrent.util.FiniteDuration /** * Marker trait representing request messages for zeromq @@ -152,7 +153,7 @@ case class PollDispatcher(name: String) extends SocketMeta * An option containing the duration a poll cycle should wait for a message before it loops * @param duration */ -case class PollTimeoutDuration(duration: Duration = 100 millis) extends SocketMeta +case class PollTimeoutDuration(duration: FiniteDuration = 100 millis) extends SocketMeta /** * Start listening with this server socket on the specified address diff --git a/project/AkkaBuild.scala b/project/AkkaBuild.scala index 7e5f6fb067..e60ec4606f 100644 --- a/project/AkkaBuild.scala +++ b/project/AkkaBuild.scala @@ -6,8 +6,8 @@ package akka import sbt._ import sbt.Keys._ -import com.typesafe.sbtmultijvm.MultiJvmPlugin -import com.typesafe.sbtmultijvm.MultiJvmPlugin.{ MultiJvm, extraOptions, jvmOptions, scalatestOptions, multiNodeExecuteTests, multiNodeJavaName } +import com.typesafe.sbt.SbtMultiJvm +import com.typesafe.sbt.SbtMultiJvm.MultiJvmKeys.{ MultiJvm, extraOptions, jvmOptions, scalatestOptions, multiNodeExecuteTests, multiNodeJavaName } import com.typesafe.sbtscalariform.ScalariformPlugin import com.typesafe.sbtscalariform.ScalariformPlugin.ScalariformKeys import com.typesafe.sbtosgi.OsgiPlugin.{ OsgiKeys, osgiSettings } @@ -36,7 +36,7 @@ object AkkaBuild extends Build { parallelExecution in GlobalScope := System.getProperty("akka.parallelExecution", "false").toBoolean, Publish.defaultPublishTo in ThisBuild <<= crossTarget / "repository", Unidoc.unidocExclude := Seq(samples.id), - Dist.distExclude := Seq(actorTests.id, akkaSbtPlugin.id, docs.id), + Dist.distExclude := Seq(actorTests.id, akkaSbtPlugin.id, docs.id, samples.id), initialCommands in ThisBuild := """|import language.postfixOps |import akka.actor._ @@ -69,7 +69,7 @@ object AkkaBuild extends Build { sphinxLatex <<= sphinxLatex in LocalProject(docs.id) map identity, sphinxPdf <<= sphinxPdf in LocalProject(docs.id) map identity ), - aggregate = Seq(actor, testkit, actorTests, dataflow, remote, remoteTests, camel, cluster, slf4j, agent, transactor, mailboxes, zeroMQ, kernel, akkaSbtPlugin, samples, osgi, osgiAries, docs) + aggregate = Seq(actor, testkit, actorTests, dataflow, remote, remoteTests, camel, cluster, slf4j, agent, transactor, mailboxes, zeroMQ, kernel, akkaSbtPlugin, osgi, osgiAries, docs) ) lazy val actor = Project( @@ -77,8 +77,6 @@ object AkkaBuild extends Build { base = file("akka-actor"), settings = defaultSettings ++ OSGi.actor ++ Seq( autoCompilerPlugins := true, - packagedArtifact in (Compile, packageBin) <<= (artifact in (Compile, packageBin), OsgiKeys.bundle).identityMap, - artifact in (Compile, packageBin) ~= (_.copy(`type` = "bundle")), // to fix scaladoc generation fullClasspath in doc in Compile <<= fullClasspath in Compile, libraryDependencies ++= Dependencies.actor, @@ -131,8 +129,9 @@ object AkkaBuild extends Build { lazy val remoteTests = Project( id = "akka-remote-tests-experimental", base = file("akka-remote-tests"), - dependencies = Seq(remote, actorTests % "test->test", testkit % "test->test"), + dependencies = Seq(remote, actorTests % "test->test", testkit), settings = defaultSettings ++ multiJvmSettings ++ Seq( + libraryDependencies ++= Dependencies.remoteTests, // disable parallel tests parallelExecution in Test := false, extraOptions in MultiJvm <<= (sourceDirectory in MultiJvm) { src => @@ -147,7 +146,7 @@ object AkkaBuild extends Build { lazy val cluster = Project( id = "akka-cluster-experimental", base = file("akka-cluster"), - dependencies = Seq(remote, remoteTests % "compile;test->test;multi-jvm->multi-jvm", testkit % "test->test"), + dependencies = Seq(remote, remoteTests % "test->test" , testkit % "test->test"), settings = defaultSettings ++ multiJvmSettings ++ OSGi.cluster ++ Seq( libraryDependencies ++= Dependencies.cluster, // disable parallel tests @@ -275,6 +274,8 @@ object AkkaBuild extends Build { base = file("akka-sbt-plugin"), settings = defaultSettings ++ Seq( sbtPlugin := true, + publishMavenStyle := false, // SBT Plugins should be published as Ivy + publishTo <<= Publish.akkaPluginPublishTo, scalacOptions in Compile := Seq("-encoding", "UTF-8", "-deprecation", "-unchecked"), scalaVersion := "2.9.1", scalaBinaryVersion <<= scalaVersion @@ -293,7 +294,8 @@ object AkkaBuild extends Build { base = file("akka-samples/akka-sample-camel"), dependencies = Seq(actor, camel), settings = defaultSettings ++ Seq( - libraryDependencies ++= Dependencies.camelSample + libraryDependencies ++= Dependencies.camelSample, + publishArtifact in Compile := false ) ) @@ -301,28 +303,28 @@ object AkkaBuild extends Build { id = "akka-sample-fsm", base = file("akka-samples/akka-sample-fsm"), dependencies = Seq(actor), - settings = defaultSettings + settings = defaultSettings ++ Seq( publishArtifact in Compile := false ) ) lazy val helloSample = Project( id = "akka-sample-hello", base = file("akka-samples/akka-sample-hello"), dependencies = Seq(actor), - settings = defaultSettings + settings = defaultSettings ++ Seq( publishArtifact in Compile := false ) ) lazy val helloKernelSample = Project( id = "akka-sample-hello-kernel", base = file("akka-samples/akka-sample-hello-kernel"), dependencies = Seq(kernel), - settings = defaultSettings + settings = defaultSettings ++ Seq( publishArtifact in Compile := false ) ) lazy val remoteSample = Project( id = "akka-sample-remote", base = file("akka-samples/akka-sample-remote"), dependencies = Seq(actor, remote, kernel), - settings = defaultSettings + settings = defaultSettings ++ Seq( publishArtifact in Compile := false ) ) lazy val clusterSample = Project( @@ -336,7 +338,8 @@ object AkkaBuild extends Build { (name: String) => (src ** (name + ".conf")).get.headOption.map("-Dakka.config=" + _.absolutePath).toSeq }, scalatestOptions in MultiJvm := defaultMultiJvmScalatestOptions, - jvmOptions in MultiJvm := defaultMultiJvmOptions + jvmOptions in MultiJvm := defaultMultiJvmOptions, + publishArtifact in Compile := false ) ) configs (MultiJvm) @@ -355,11 +358,17 @@ object AkkaBuild extends Build { // Settings - override lazy val settings = super.settings ++ buildSettings ++ Seq( - resolvers += "Scala Community 2.10.0-SNAPSHOT" at "https://scala-webapps.epfl.ch/jenkins/job/community-nightly/ws/target/repositories/8e83577d99af1d718fe369c4a4ee92737b9cf669", - resolvers += "Sonatype Snapshot Repo" at "https://oss.sonatype.org/content/repositories/snapshots/", - resolvers += "Sonatype Releases Repo" at "https://oss.sonatype.org/content/repositories/releases/", - shellPrompt := { s => Project.extract(s).currentProject.id + " > " } + override lazy val settings = + super.settings ++ + buildSettings ++ + Seq( + shellPrompt := { s => Project.extract(s).currentProject.id + " > " }, + resolvers <<= (resolvers, scalaVersion) apply { + case (res, "2.10.0-SNAPSHOT") => + res :+ ("Scala Community 2.10.0-SNAPSHOT" at "https://scala-webapps.epfl.ch/jenkins/job/community-nightly/ws/target/repositories/8e83577d99af1d718fe369c4a4ee92737b9cf669") + case (res, _) => + res + } ) lazy val baseSettings = Defaults.defaultSettings ++ Publish.settings @@ -424,8 +433,6 @@ object AkkaBuild extends Build { } lazy val defaultSettings = baseSettings ++ formatSettings ++ mimaSettings ++ Seq( - resolvers += "Typesafe Repo" at "http://repo.typesafe.com/typesafe/releases/", - // compile options scalacOptions in Compile ++= Seq("-encoding", "UTF-8", "-target:jvm-1.6", "-deprecation", "-feature", "-unchecked", "-Xlog-reflective-calls", "-Ywarn-adapted-args"), javacOptions in Compile ++= Seq("-source", "1.6", "-target", "1.6", "-Xlint:unchecked", "-Xlint:deprecation"), @@ -473,7 +480,7 @@ object AkkaBuild extends Build { .setPreference(AlignSingleLineCaseStatements, true) } - lazy val multiJvmSettings = MultiJvmPlugin.settings ++ inConfig(MultiJvm)(ScalariformPlugin.scalariformSettings) ++ Seq( + lazy val multiJvmSettings = SbtMultiJvm.multiJvmSettings ++ inConfig(MultiJvm)(ScalariformPlugin.scalariformSettings) ++ Seq( compileInputs in MultiJvm <<= (compileInputs in MultiJvm) dependsOn (ScalariformKeys.format in MultiJvm), compile in MultiJvm <<= (compile in MultiJvm) triggeredBy (compile in Test), ScalariformKeys.preferences in MultiJvm := formattingPreferences) ++ @@ -516,7 +523,7 @@ object AkkaBuild extends Build { val cluster = exports(Seq("akka.cluster.*")) - val fileMailbox = exports(Seq("akka.actor.mailbox.*")) + val fileMailbox = exports(Seq("akka.actor.mailbox.filebased.*")) val mailboxesCommon = exports(Seq("akka.actor.mailbox.*")) @@ -524,7 +531,7 @@ object AkkaBuild extends Build { val osgiAries = exports() ++ Seq(OsgiKeys.privatePackage := Seq("akka.osgi.aries.*")) - val remote = exports(Seq("akka.remote.*", "akka.routing.*", "akka.serialization.*")) + val remote = exports(Seq("akka.remote.*")) val slf4j = exports(Seq("akka.event.slf4j.*")) @@ -536,16 +543,13 @@ object AkkaBuild extends Build { def exports(packages: Seq[String] = Seq()) = osgiSettings ++ Seq( OsgiKeys.importPackage := defaultImports, - OsgiKeys.exportPackage := packages, - packagedArtifact in (Compile, packageBin) <<= (artifact in (Compile, packageBin), OsgiKeys.bundle).identityMap, - artifact in (Compile, packageBin) ~= (_.copy(`type` = "bundle")) + OsgiKeys.exportPackage := packages ) def defaultImports = Seq("!sun.misc", akkaImport(), configImport(), scalaImport(), "*") def akkaImport(packageName: String = "akka.*") = "%s;version=\"[2.1,2.2)\"".format(packageName) def configImport(packageName: String = "com.typesafe.config.*") = "%s;version=\"[0.4.1,0.5)\"".format(packageName) def scalaImport(packageName: String = "scala.*") = "%s;version=\"[2.10,2.11)\"".format(packageName) - } } @@ -556,12 +560,14 @@ object Dependencies { val actor = Seq(config) - val testkit = Seq(Test.scalatest, Test.junit) + val testkit = Seq(Test.junit, Test.scalatest) val actorTests = Seq(Test.junit, Test.scalatest, Test.commonsMath, Test.mockito, Test.scalacheck, protobuf) val remote = Seq(netty, protobuf, uncommonsMath, Test.junit, Test.scalatest) + val remoteTests = Seq(Test.junit, Test.scalatest) + val cluster = Seq(Test.junit, Test.scalatest) val slf4j = Seq(slf4jApi, Test.logback) diff --git a/project/Publish.scala b/project/Publish.scala index 3c1b3dc148..c1a315e2a5 100644 --- a/project/Publish.scala +++ b/project/Publish.scala @@ -74,6 +74,15 @@ object Publish { } } + def akkaPluginPublishTo: Initialize[Option[Resolver]] = { + (version) { version: String => + akkaPublishRepository orElse { + val name = if (version.contains("-SNAPSHOT")) "sbt-plugin-snapshots" else "sbt-plugin-releases" + Some(Resolver.url(name, url("http://scalasbt.artifactoryonline.com/scalasbt/" + name))(Resolver.ivyStylePatterns)) + } + } + } + def sonatypeRepo(version: String): Option[Resolver] = { Option(sys.props("publish.maven.central")) filter (_.toLowerCase == "true") map { _ => val nexus = "https://oss.sonatype.org/" @@ -83,15 +92,11 @@ object Publish { } - def akkaPublishRepository: Option[Resolver] = { - val property = Option(System.getProperty("akka.publish.repository")) - property map { "Akka Publish Repository" at _ } - } + def akkaPublishRepository: Option[Resolver] = + Option(System.getProperty("akka.publish.repository", null)) map { "Akka Publish Repository" at _ } - def akkaCredentials: Seq[Credentials] = { - val property = Option(System.getProperty("akka.publish.credentials")) - property map (f => Credentials(new File(f))) toSeq - } + def akkaCredentials: Seq[Credentials] = + Option(System.getProperty("akka.publish.credentials", null)) map (f => Credentials(new File(f))) toSeq // timestamped versions diff --git a/project/plugins.sbt b/project/plugins.sbt index 69ae35efef..d5d8cbcc0c 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -1,11 +1,11 @@ resolvers += Classpaths.typesafeResolver -addSbtPlugin("com.typesafe.sbtmultijvm" % "sbt-multi-jvm" % "0.2.0-M5" cross CrossVersion.full) +addSbtPlugin("com.typesafe.sbt" % "sbt-multi-jvm" % "0.3.0") addSbtPlugin("com.typesafe.sbtscalariform" % "sbtscalariform" % "0.4.0") -addSbtPlugin("com.typesafe.sbtosgi" % "sbtosgi" % "0.2.0") +addSbtPlugin("com.typesafe.sbtosgi" % "sbtosgi" % "0.3.0") addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "0.1.3")