From 392c060530fa19786d4c7807babda80c534779fa Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jonas=20Bone=CC=81r?= Date: Mon, 5 Dec 2011 11:16:29 +0100 Subject: [PATCH 01/34] Simplified multi-jvm test by adding all settings and config into the test source itself --- .../scala/akka/remote/AkkaRemoteSpec.scala | 2 +- .../DirectRoutedRemoteActorMultiJvmSpec.scala | 41 ++++-- .../remote/GossipMembershipMultiJvmSpec.scala | 134 ++++++++++++++++++ .../remote/NewRemoteActorMultiJvmSpec.scala | 82 +++++++++++ .../RandomRoutedRemoteActorMultiJvmSpec.scala | 49 ++++++- ...ndRobinRoutedRemoteActorMultiJvmSpec.scala | 49 ++++++- ...rGatherRoutedRemoteActorMultiJvmSpec.scala | 49 ++++++- .../DirectRoutedRemoteActorMultiJvmNode1.conf | 11 -- .../DirectRoutedRemoteActorMultiJvmNode1.opts | 1 - .../DirectRoutedRemoteActorMultiJvmNode2.conf | 11 -- .../DirectRoutedRemoteActorMultiJvmNode2.opts | 1 - .../NewRemoteActorMultiJvmNode1.conf | 9 -- .../NewRemoteActorMultiJvmNode1.opts | 1 - .../NewRemoteActorMultiJvmNode2.conf | 9 -- .../NewRemoteActorMultiJvmNode2.opts | 1 - .../NewRemoteActorMultiJvmSpec.scala | 57 -------- .../RandomRoutedRemoteActorMultiJvmNode1.conf | 11 -- .../RandomRoutedRemoteActorMultiJvmNode1.opts | 1 - .../RandomRoutedRemoteActorMultiJvmNode2.conf | 9 -- .../RandomRoutedRemoteActorMultiJvmNode2.opts | 1 - .../RandomRoutedRemoteActorMultiJvmNode3.conf | 11 -- .../RandomRoutedRemoteActorMultiJvmNode3.opts | 1 - .../RandomRoutedRemoteActorMultiJvmNode4.conf | 11 -- .../RandomRoutedRemoteActorMultiJvmNode4.opts | 1 - ...ndRobinRoutedRemoteActorMultiJvmNode1.conf | 11 -- ...ndRobinRoutedRemoteActorMultiJvmNode1.opts | 1 - ...ndRobinRoutedRemoteActorMultiJvmNode2.conf | 11 -- ...ndRobinRoutedRemoteActorMultiJvmNode2.opts | 1 - ...ndRobinRoutedRemoteActorMultiJvmNode3.conf | 11 -- ...ndRobinRoutedRemoteActorMultiJvmNode3.opts | 1 - ...ndRobinRoutedRemoteActorMultiJvmNode4.conf | 11 -- ...ndRobinRoutedRemoteActorMultiJvmNode4.opts | 1 - ...rGatherRoutedRemoteActorMultiJvmNode1.conf | 11 -- ...rGatherRoutedRemoteActorMultiJvmNode1.opts | 1 - ...rGatherRoutedRemoteActorMultiJvmNode2.conf | 11 -- ...rGatherRoutedRemoteActorMultiJvmNode2.opts | 1 - ...rGatherRoutedRemoteActorMultiJvmNode3.conf | 11 -- ...rGatherRoutedRemoteActorMultiJvmNode3.opts | 1 - ...rGatherRoutedRemoteActorMultiJvmNode4.conf | 11 -- ...rGatherRoutedRemoteActorMultiJvmNode4.opts | 1 - 40 files changed, 379 insertions(+), 270 deletions(-) rename akka-remote/src/multi-jvm/scala/akka/remote/{direct_routed => }/DirectRoutedRemoteActorMultiJvmSpec.scala (56%) create mode 100644 akka-remote/src/multi-jvm/scala/akka/remote/GossipMembershipMultiJvmSpec.scala create mode 100644 akka-remote/src/multi-jvm/scala/akka/remote/NewRemoteActorMultiJvmSpec.scala rename akka-remote/src/multi-jvm/scala/akka/remote/{random_routed => }/RandomRoutedRemoteActorMultiJvmSpec.scala (63%) rename akka-remote/src/multi-jvm/scala/akka/remote/{round_robin_routed => }/RoundRobinRoutedRemoteActorMultiJvmSpec.scala (62%) rename akka-remote/src/multi-jvm/scala/akka/remote/{scatter_gather_routed => }/ScatterGatherRoutedRemoteActorMultiJvmSpec.scala (63%) delete mode 100644 akka-remote/src/multi-jvm/scala/akka/remote/direct_routed/DirectRoutedRemoteActorMultiJvmNode1.conf delete mode 100644 akka-remote/src/multi-jvm/scala/akka/remote/direct_routed/DirectRoutedRemoteActorMultiJvmNode1.opts delete mode 100644 akka-remote/src/multi-jvm/scala/akka/remote/direct_routed/DirectRoutedRemoteActorMultiJvmNode2.conf delete mode 100644 akka-remote/src/multi-jvm/scala/akka/remote/direct_routed/DirectRoutedRemoteActorMultiJvmNode2.opts delete mode 100644 akka-remote/src/multi-jvm/scala/akka/remote/new_remote_actor/NewRemoteActorMultiJvmNode1.conf delete mode 100644 akka-remote/src/multi-jvm/scala/akka/remote/new_remote_actor/NewRemoteActorMultiJvmNode1.opts delete mode 100644 akka-remote/src/multi-jvm/scala/akka/remote/new_remote_actor/NewRemoteActorMultiJvmNode2.conf delete mode 100644 akka-remote/src/multi-jvm/scala/akka/remote/new_remote_actor/NewRemoteActorMultiJvmNode2.opts delete mode 100644 akka-remote/src/multi-jvm/scala/akka/remote/new_remote_actor/NewRemoteActorMultiJvmSpec.scala delete mode 100644 akka-remote/src/multi-jvm/scala/akka/remote/random_routed/RandomRoutedRemoteActorMultiJvmNode1.conf delete mode 100644 akka-remote/src/multi-jvm/scala/akka/remote/random_routed/RandomRoutedRemoteActorMultiJvmNode1.opts delete mode 100644 akka-remote/src/multi-jvm/scala/akka/remote/random_routed/RandomRoutedRemoteActorMultiJvmNode2.conf delete mode 100644 akka-remote/src/multi-jvm/scala/akka/remote/random_routed/RandomRoutedRemoteActorMultiJvmNode2.opts delete mode 100644 akka-remote/src/multi-jvm/scala/akka/remote/random_routed/RandomRoutedRemoteActorMultiJvmNode3.conf delete mode 100644 akka-remote/src/multi-jvm/scala/akka/remote/random_routed/RandomRoutedRemoteActorMultiJvmNode3.opts delete mode 100644 akka-remote/src/multi-jvm/scala/akka/remote/random_routed/RandomRoutedRemoteActorMultiJvmNode4.conf delete mode 100644 akka-remote/src/multi-jvm/scala/akka/remote/random_routed/RandomRoutedRemoteActorMultiJvmNode4.opts delete mode 100644 akka-remote/src/multi-jvm/scala/akka/remote/round_robin_routed/RoundRobinRoutedRemoteActorMultiJvmNode1.conf delete mode 100644 akka-remote/src/multi-jvm/scala/akka/remote/round_robin_routed/RoundRobinRoutedRemoteActorMultiJvmNode1.opts delete mode 100644 akka-remote/src/multi-jvm/scala/akka/remote/round_robin_routed/RoundRobinRoutedRemoteActorMultiJvmNode2.conf delete mode 100644 akka-remote/src/multi-jvm/scala/akka/remote/round_robin_routed/RoundRobinRoutedRemoteActorMultiJvmNode2.opts delete mode 100644 akka-remote/src/multi-jvm/scala/akka/remote/round_robin_routed/RoundRobinRoutedRemoteActorMultiJvmNode3.conf delete mode 100644 akka-remote/src/multi-jvm/scala/akka/remote/round_robin_routed/RoundRobinRoutedRemoteActorMultiJvmNode3.opts delete mode 100644 akka-remote/src/multi-jvm/scala/akka/remote/round_robin_routed/RoundRobinRoutedRemoteActorMultiJvmNode4.conf delete mode 100644 akka-remote/src/multi-jvm/scala/akka/remote/round_robin_routed/RoundRobinRoutedRemoteActorMultiJvmNode4.opts delete mode 100644 akka-remote/src/multi-jvm/scala/akka/remote/scatter_gather_routed/ScatterGatherRoutedRemoteActorMultiJvmNode1.conf delete mode 100644 akka-remote/src/multi-jvm/scala/akka/remote/scatter_gather_routed/ScatterGatherRoutedRemoteActorMultiJvmNode1.opts delete mode 100644 akka-remote/src/multi-jvm/scala/akka/remote/scatter_gather_routed/ScatterGatherRoutedRemoteActorMultiJvmNode2.conf delete mode 100644 akka-remote/src/multi-jvm/scala/akka/remote/scatter_gather_routed/ScatterGatherRoutedRemoteActorMultiJvmNode2.opts delete mode 100644 akka-remote/src/multi-jvm/scala/akka/remote/scatter_gather_routed/ScatterGatherRoutedRemoteActorMultiJvmNode3.conf delete mode 100644 akka-remote/src/multi-jvm/scala/akka/remote/scatter_gather_routed/ScatterGatherRoutedRemoteActorMultiJvmNode3.opts delete mode 100644 akka-remote/src/multi-jvm/scala/akka/remote/scatter_gather_routed/ScatterGatherRoutedRemoteActorMultiJvmNode4.conf delete mode 100644 akka-remote/src/multi-jvm/scala/akka/remote/scatter_gather_routed/ScatterGatherRoutedRemoteActorMultiJvmNode4.opts diff --git a/akka-remote/src/multi-jvm/scala/akka/remote/AkkaRemoteSpec.scala b/akka-remote/src/multi-jvm/scala/akka/remote/AkkaRemoteSpec.scala index 01cc597d49..58679eaccd 100644 --- a/akka-remote/src/multi-jvm/scala/akka/remote/AkkaRemoteSpec.scala +++ b/akka-remote/src/multi-jvm/scala/akka/remote/AkkaRemoteSpec.scala @@ -26,7 +26,7 @@ object AkkaRemoteSpec { } } -abstract class AkkaRemoteSpec extends AkkaSpec(AkkaRemoteSpec.testConf) with MultiJvmSync { +abstract class AkkaRemoteSpec(config: Config = AkkaRemoteSpec.testConf) extends AkkaSpec(config) with MultiJvmSync { /** * Helper function for accessing the underlying remoting. diff --git a/akka-remote/src/multi-jvm/scala/akka/remote/direct_routed/DirectRoutedRemoteActorMultiJvmSpec.scala b/akka-remote/src/multi-jvm/scala/akka/remote/DirectRoutedRemoteActorMultiJvmSpec.scala similarity index 56% rename from akka-remote/src/multi-jvm/scala/akka/remote/direct_routed/DirectRoutedRemoteActorMultiJvmSpec.scala rename to akka-remote/src/multi-jvm/scala/akka/remote/DirectRoutedRemoteActorMultiJvmSpec.scala index f1a6745d91..50043f80c1 100644 --- a/akka-remote/src/multi-jvm/scala/akka/remote/direct_routed/DirectRoutedRemoteActorMultiJvmSpec.scala +++ b/akka-remote/src/multi-jvm/scala/akka/remote/DirectRoutedRemoteActorMultiJvmSpec.scala @@ -1,4 +1,4 @@ -package akka.remote.direct_routed +package akka.remote import akka.remote._ import akka.routing._ @@ -13,38 +13,57 @@ object DirectRoutedRemoteActorMultiJvmSpec { case "identify" ⇒ sender ! system.nodename } } + + import com.typesafe.config.ConfigFactory + val commonConfig = ConfigFactory.parseString(""" + akka { + loglevel = "WARNING" + actor { + provider = "akka.remote.RemoteActorRefProvider" + deployment { + /app/service-hello.router = "direct" + /app/service-hello.nr-of-instances = 1 + /app/service-hello.remote.nodes = ["localhost:9991"] + } + } + remote.server.hostname = "localhost" + }""") + + val node1Config = ConfigFactory.parseString(""" + akka { + remote.server.port = "9991" + cluster.nodename = "node1" + }""") withFallback commonConfig + + val node2Config = ConfigFactory.parseString(""" + akka { + remote.server.port = "9992" + cluster.nodename = "node2" + }""") withFallback commonConfig } -class DirectRoutedRemoteActorMultiJvmNode1 extends AkkaRemoteSpec { - +class DirectRoutedRemoteActorMultiJvmNode1 extends AkkaRemoteSpec(DirectRoutedRemoteActorMultiJvmSpec.node1Config) { import DirectRoutedRemoteActorMultiJvmSpec._ - val nodes = NrOfNodes "___" must { "___" in { barrier("setup") - remote.start() - barrier("start") barrier("done") } } } -class DirectRoutedRemoteActorMultiJvmNode2 extends AkkaRemoteSpec { - +class DirectRoutedRemoteActorMultiJvmNode2 extends AkkaRemoteSpec(DirectRoutedRemoteActorMultiJvmSpec.node2Config) { import DirectRoutedRemoteActorMultiJvmSpec._ - val nodes = NrOfNodes "A new remote actor configured with a Direct router" must { "be locally instantiated on a remote node and be able to communicate through its RemoteActorRef" in { barrier("setup") - remote.start() - barrier("start") val actor = system.actorOf[SomeActor]("service-hello") diff --git a/akka-remote/src/multi-jvm/scala/akka/remote/GossipMembershipMultiJvmSpec.scala b/akka-remote/src/multi-jvm/scala/akka/remote/GossipMembershipMultiJvmSpec.scala new file mode 100644 index 0000000000..878b7840b0 --- /dev/null +++ b/akka-remote/src/multi-jvm/scala/akka/remote/GossipMembershipMultiJvmSpec.scala @@ -0,0 +1,134 @@ +// package akka.remote + +// import akka.actor.Actor +// import akka.remote._ +// import akka.routing._ +// import akka.routing.Routing.Broadcast + +// object GossipMembershipMultiJvmSpec { +// val NrOfNodes = 4 +// class SomeActor extends Actor with Serializable { +// def receive = { +// case "hit" ⇒ sender ! system.nodename +// case "end" ⇒ self.stop() +// } +// } + +// import com.typesafe.config.ConfigFactory +// val commonConfig = ConfigFactory.parseString(""" +// akka { +// loglevel = "WARNING" +// cluster { +// seed-nodes = ["localhost:9991"] +// } +// remote.server.hostname = "localhost" +// }""") + +// val node1Config = ConfigFactory.parseString(""" +// akka { +// remote.server.port = "9991" +// cluster.nodename = "node1" +// }""") withFallback commonConfig + +// val node2Config = ConfigFactory.parseString(""" +// akka { +// remote.server.port = "9992" +// cluster.nodename = "node2" +// }""") withFallback commonConfig + +// val node3Config = ConfigFactory.parseString(""" +// akka { +// remote.server.port = "9993" +// cluster.nodename = "node3" +// }""") withFallback commonConfig + +// val node4Config = ConfigFactory.parseString(""" +// akka { +// remote.server.port = "9994" +// cluster.nodename = "node4" +// }""") withFallback commonConfig +// } + +// class GossipMembershipMultiJvmNode1 extends AkkaRemoteSpec(GossipMembershipMultiJvmSpec.node1Config) { +// import GossipMembershipMultiJvmSpec._ +// val nodes = NrOfNodes +// "A cluster" must { +// "allow new node to join and should reach convergence with new membership table" in { + +// barrier("setup") +// remote.start() + +// barrier("start") +// val actor = system.actorOf[SomeActor]("service-hello") +// actor.isInstanceOf[RoutedActorRef] must be(true) + +// val connectionCount = NrOfNodes - 1 +// val iterationCount = 10 + +// var replies = Map( +// "node1" -> 0, +// "node2" -> 0, +// "node3" -> 0) + +// for (i ← 0 until iterationCount) { +// for (k ← 0 until connectionCount) { +// val nodeName = (actor ? "hit").as[String].getOrElse(fail("No id returned by actor")) +// replies = replies + (nodeName -> (replies(nodeName) + 1)) +// } +// } + +// barrier("broadcast-end") +// actor ! Broadcast("end") + +// barrier("end") +// replies.values foreach { _ must be > (0) } + +// barrier("done") +// } +// } +// } + +// class GossipMembershipMultiJvmNode2 extends AkkaRemoteSpec(GossipMembershipMultiJvmSpec.node2Config) { +// import GossipMembershipMultiJvmSpec._ +// val nodes = NrOfNodes +// "___" must { +// "___" in { +// barrier("setup") +// remote.start() +// barrier("start") +// barrier("broadcast-end") +// barrier("end") +// barrier("done") +// } +// } +// } + +// class GossipMembershipMultiJvmNode3 extends AkkaRemoteSpec(GossipMembershipMultiJvmSpec.node3Config) { +// import GossipMembershipMultiJvmSpec._ +// val nodes = NrOfNodes +// "___" must { +// "___" in { +// barrier("setup") +// remote.start() +// barrier("start") +// barrier("broadcast-end") +// barrier("end") +// barrier("done") +// } +// } +// } + +// class GossipMembershipMultiJvmNode4 extends AkkaRemoteSpec(GossipMembershipMultiJvmSpec.node4Config) { +// import GossipMembershipMultiJvmSpec._ +// val nodes = NrOfNodes +// "___" must { +// "___" in { +// barrier("setup") +// remote.start() +// barrier("start") +// barrier("broadcast-end") +// barrier("end") +// barrier("done") +// } +// } +// } diff --git a/akka-remote/src/multi-jvm/scala/akka/remote/NewRemoteActorMultiJvmSpec.scala b/akka-remote/src/multi-jvm/scala/akka/remote/NewRemoteActorMultiJvmSpec.scala new file mode 100644 index 0000000000..f179e36d3a --- /dev/null +++ b/akka-remote/src/multi-jvm/scala/akka/remote/NewRemoteActorMultiJvmSpec.scala @@ -0,0 +1,82 @@ +package akka.remote + +import akka.actor.Actor +import akka.remote._ + +object NewRemoteActorMultiJvmSpec { + val NrOfNodes = 2 + + class SomeActor extends Actor with Serializable { + def receive = { + case "identify" ⇒ sender ! system.nodename + } + } + + import com.typesafe.config.ConfigFactory + val commonConfig = ConfigFactory.parseString(""" + akka { + loglevel = "WARNING" + actor { + provider = "akka.remote.RemoteActorRefProvider" + deployment { + /app/service-hello.remote.nodes = ["localhost:9991"] + } + } + remote.server.hostname = "localhost" + }""") + + val node1Config = ConfigFactory.parseString(""" + akka { + remote.server.port = "9991" + cluster.nodename = "node1" + }""") withFallback commonConfig + + val node2Config = ConfigFactory.parseString(""" + akka { + remote.server.port = "9992" + cluster.nodename = "node2" + }""") withFallback commonConfig +} + +class NewRemoteActorMultiJvmNode1 extends AkkaRemoteSpec(NewRemoteActorMultiJvmSpec.node1Config) { + + import NewRemoteActorMultiJvmSpec._ + + val nodes = NrOfNodes + + "___" must { + "___" in { + barrier("setup") + + remote.start() + + barrier("start") + + barrier("done") + } + } +} + +class NewRemoteActorMultiJvmNode2 extends AkkaRemoteSpec(NewRemoteActorMultiJvmSpec.node2Config) { + + import NewRemoteActorMultiJvmSpec._ + + val nodes = NrOfNodes + + "A new remote actor" must { + "be locally instantiated on a remote node and be able to communicate through its RemoteActorRef" in { + barrier("setup") + + remote.start() + + barrier("start") + + val actor = system.actorOf[SomeActor]("service-hello") + val result = (actor ? "identify").get + result must equal("node1") + + barrier("done") + } + } +} + diff --git a/akka-remote/src/multi-jvm/scala/akka/remote/random_routed/RandomRoutedRemoteActorMultiJvmSpec.scala b/akka-remote/src/multi-jvm/scala/akka/remote/RandomRoutedRemoteActorMultiJvmSpec.scala similarity index 63% rename from akka-remote/src/multi-jvm/scala/akka/remote/random_routed/RandomRoutedRemoteActorMultiJvmSpec.scala rename to akka-remote/src/multi-jvm/scala/akka/remote/RandomRoutedRemoteActorMultiJvmSpec.scala index a5701cccd4..786d2f6d27 100644 --- a/akka-remote/src/multi-jvm/scala/akka/remote/random_routed/RandomRoutedRemoteActorMultiJvmSpec.scala +++ b/akka-remote/src/multi-jvm/scala/akka/remote/RandomRoutedRemoteActorMultiJvmSpec.scala @@ -1,4 +1,4 @@ -package akka.remote.random_routed +package akka.remote import akka.actor.Actor import akka.remote._ @@ -13,9 +13,48 @@ object RandomRoutedRemoteActorMultiJvmSpec { case "end" ⇒ self.stop() } } + + import com.typesafe.config.ConfigFactory + val commonConfig = ConfigFactory.parseString(""" + akka { + loglevel = "WARNING" + actor { + provider = "akka.remote.RemoteActorRefProvider" + deployment { + /app/service-hello.router = "random" + /app/service-hello.nr-of-instances = 3 + /app/service-hello.remote.nodes = ["localhost:9991","localhost:9992","localhost:9993"] + } + } + remote.server.hostname = "localhost" + }""") + + val node1Config = ConfigFactory.parseString(""" + akka { + remote.server.port = "9991" + cluster.nodename = "node1" + }""") withFallback commonConfig + + val node2Config = ConfigFactory.parseString(""" + akka { + remote.server.port = "9992" + cluster.nodename = "node2" + }""") withFallback commonConfig + + val node3Config = ConfigFactory.parseString(""" + akka { + remote.server.port = "9993" + cluster.nodename = "node3" + }""") withFallback commonConfig + + val node4Config = ConfigFactory.parseString(""" + akka { + remote.server.port = "9994" + cluster.nodename = "node4" + }""") withFallback commonConfig } -class RandomRoutedRemoteActorMultiJvmNode1 extends AkkaRemoteSpec { +class RandomRoutedRemoteActorMultiJvmNode1 extends AkkaRemoteSpec(RandomRoutedRemoteActorMultiJvmSpec.node1Config) { import RandomRoutedRemoteActorMultiJvmSpec._ val nodes = NrOfNodes "___" must { @@ -30,7 +69,7 @@ class RandomRoutedRemoteActorMultiJvmNode1 extends AkkaRemoteSpec { } } -class RandomRoutedRemoteActorMultiJvmNode2 extends AkkaRemoteSpec { +class RandomRoutedRemoteActorMultiJvmNode2 extends AkkaRemoteSpec(RandomRoutedRemoteActorMultiJvmSpec.node2Config) { import RandomRoutedRemoteActorMultiJvmSpec._ val nodes = NrOfNodes "___" must { @@ -45,7 +84,7 @@ class RandomRoutedRemoteActorMultiJvmNode2 extends AkkaRemoteSpec { } } -class RandomRoutedRemoteActorMultiJvmNode3 extends AkkaRemoteSpec { +class RandomRoutedRemoteActorMultiJvmNode3 extends AkkaRemoteSpec(RandomRoutedRemoteActorMultiJvmSpec.node3Config) { import RandomRoutedRemoteActorMultiJvmSpec._ val nodes = NrOfNodes "___" must { @@ -60,7 +99,7 @@ class RandomRoutedRemoteActorMultiJvmNode3 extends AkkaRemoteSpec { } } -class RandomRoutedRemoteActorMultiJvmNode4 extends AkkaRemoteSpec { +class RandomRoutedRemoteActorMultiJvmNode4 extends AkkaRemoteSpec(RandomRoutedRemoteActorMultiJvmSpec.node4Config) { import RandomRoutedRemoteActorMultiJvmSpec._ val nodes = NrOfNodes "A new remote actor configured with a Random router" must { diff --git a/akka-remote/src/multi-jvm/scala/akka/remote/round_robin_routed/RoundRobinRoutedRemoteActorMultiJvmSpec.scala b/akka-remote/src/multi-jvm/scala/akka/remote/RoundRobinRoutedRemoteActorMultiJvmSpec.scala similarity index 62% rename from akka-remote/src/multi-jvm/scala/akka/remote/round_robin_routed/RoundRobinRoutedRemoteActorMultiJvmSpec.scala rename to akka-remote/src/multi-jvm/scala/akka/remote/RoundRobinRoutedRemoteActorMultiJvmSpec.scala index 413d7814a5..fa0d30bf86 100644 --- a/akka-remote/src/multi-jvm/scala/akka/remote/round_robin_routed/RoundRobinRoutedRemoteActorMultiJvmSpec.scala +++ b/akka-remote/src/multi-jvm/scala/akka/remote/RoundRobinRoutedRemoteActorMultiJvmSpec.scala @@ -1,4 +1,4 @@ -package akka.remote.round_robin_routed +package akka.remote import akka.actor.Actor import akka.remote._ @@ -13,9 +13,48 @@ object RoundRobinRoutedRemoteActorMultiJvmSpec { case "end" ⇒ self.stop() } } + + import com.typesafe.config.ConfigFactory + val commonConfig = ConfigFactory.parseString(""" + akka { + loglevel = "WARNING" + actor { + provider = "akka.remote.RemoteActorRefProvider" + deployment { + /app/service-hello.router = "round-robin" + /app/service-hello.nr-of-instances = 3 + /app/service-hello.remote.nodes = ["localhost:9991","localhost:9992","localhost:9993"] + } + } + remote.server.hostname = "localhost" + }""") + + val node1Config = ConfigFactory.parseString(""" + akka { + remote.server.port = "9991" + cluster.nodename = "node1" + }""") withFallback commonConfig + + val node2Config = ConfigFactory.parseString(""" + akka { + remote.server.port = "9992" + cluster.nodename = "node2" + }""") withFallback commonConfig + + val node3Config = ConfigFactory.parseString(""" + akka { + remote.server.port = "9993" + cluster.nodename = "node3" + }""") withFallback commonConfig + + val node4Config = ConfigFactory.parseString(""" + akka { + remote.server.port = "9994" + cluster.nodename = "node4" + }""") withFallback commonConfig } -class RoundRobinRoutedRemoteActorMultiJvmNode1 extends AkkaRemoteSpec { +class RoundRobinRoutedRemoteActorMultiJvmNode1 extends AkkaRemoteSpec(RoundRobinRoutedRemoteActorMultiJvmSpec.node1Config) { import RoundRobinRoutedRemoteActorMultiJvmSpec._ val nodes = NrOfNodes "___" must { @@ -30,7 +69,7 @@ class RoundRobinRoutedRemoteActorMultiJvmNode1 extends AkkaRemoteSpec { } } -class RoundRobinRoutedRemoteActorMultiJvmNode2 extends AkkaRemoteSpec { +class RoundRobinRoutedRemoteActorMultiJvmNode2 extends AkkaRemoteSpec(RoundRobinRoutedRemoteActorMultiJvmSpec.node2Config) { import RoundRobinRoutedRemoteActorMultiJvmSpec._ val nodes = NrOfNodes "___" must { @@ -45,7 +84,7 @@ class RoundRobinRoutedRemoteActorMultiJvmNode2 extends AkkaRemoteSpec { } } -class RoundRobinRoutedRemoteActorMultiJvmNode3 extends AkkaRemoteSpec { +class RoundRobinRoutedRemoteActorMultiJvmNode3 extends AkkaRemoteSpec(RoundRobinRoutedRemoteActorMultiJvmSpec.node3Config) { import RoundRobinRoutedRemoteActorMultiJvmSpec._ val nodes = NrOfNodes "___" must { @@ -60,7 +99,7 @@ class RoundRobinRoutedRemoteActorMultiJvmNode3 extends AkkaRemoteSpec { } } -class RoundRobinRoutedRemoteActorMultiJvmNode4 extends AkkaRemoteSpec { +class RoundRobinRoutedRemoteActorMultiJvmNode4 extends AkkaRemoteSpec(RoundRobinRoutedRemoteActorMultiJvmSpec.node4Config) { import RoundRobinRoutedRemoteActorMultiJvmSpec._ val nodes = NrOfNodes "A new remote actor configured with a RoundRobin router" must { diff --git a/akka-remote/src/multi-jvm/scala/akka/remote/scatter_gather_routed/ScatterGatherRoutedRemoteActorMultiJvmSpec.scala b/akka-remote/src/multi-jvm/scala/akka/remote/ScatterGatherRoutedRemoteActorMultiJvmSpec.scala similarity index 63% rename from akka-remote/src/multi-jvm/scala/akka/remote/scatter_gather_routed/ScatterGatherRoutedRemoteActorMultiJvmSpec.scala rename to akka-remote/src/multi-jvm/scala/akka/remote/ScatterGatherRoutedRemoteActorMultiJvmSpec.scala index 95c5037e8f..1a43c4ad5e 100644 --- a/akka-remote/src/multi-jvm/scala/akka/remote/scatter_gather_routed/ScatterGatherRoutedRemoteActorMultiJvmSpec.scala +++ b/akka-remote/src/multi-jvm/scala/akka/remote/ScatterGatherRoutedRemoteActorMultiJvmSpec.scala @@ -1,4 +1,4 @@ -package akka.remote.scatter_gather_routed +package akka.remote import akka.actor.Actor import akka.remote._ @@ -13,9 +13,48 @@ object ScatterGatherRoutedRemoteActorMultiJvmSpec { case "end" ⇒ self.stop() } } + + import com.typesafe.config.ConfigFactory + val commonConfig = ConfigFactory.parseString(""" + akka { + loglevel = "WARNING" + actor { + provider = "akka.remote.RemoteActorRefProvider" + deployment { + /app/service-hello.router = "scatter-gather" + /app/service-hello.nr-of-instances = 3 + /app/service-hello.remote.nodes = ["localhost:9991","localhost:9992","localhost:9993"] + } + } + remote.server.hostname = "localhost" + }""") + + val node1Config = ConfigFactory.parseString(""" + akka { + remote.server.port = "9991" + cluster.nodename = "node1" + }""") withFallback commonConfig + + val node2Config = ConfigFactory.parseString(""" + akka { + remote.server.port = "9992" + cluster.nodename = "node2" + }""") withFallback commonConfig + + val node3Config = ConfigFactory.parseString(""" + akka { + remote.server.port = "9993" + cluster.nodename = "node3" + }""") withFallback commonConfig + + val node4Config = ConfigFactory.parseString(""" + akka { + remote.server.port = "9994" + cluster.nodename = "node4" + }""") withFallback commonConfig } -class ScatterGatherRoutedRemoteActorMultiJvmNode1 extends AkkaRemoteSpec { +class ScatterGatherRoutedRemoteActorMultiJvmNode1 extends AkkaRemoteSpec(ScatterGatherRoutedRemoteActorMultiJvmSpec.node1Config) { import ScatterGatherRoutedRemoteActorMultiJvmSpec._ val nodes = NrOfNodes "___" must { @@ -30,7 +69,7 @@ class ScatterGatherRoutedRemoteActorMultiJvmNode1 extends AkkaRemoteSpec { } } -class ScatterGatherRoutedRemoteActorMultiJvmNode2 extends AkkaRemoteSpec { +class ScatterGatherRoutedRemoteActorMultiJvmNode2 extends AkkaRemoteSpec(ScatterGatherRoutedRemoteActorMultiJvmSpec.node2Config) { import ScatterGatherRoutedRemoteActorMultiJvmSpec._ val nodes = NrOfNodes "___" must { @@ -45,7 +84,7 @@ class ScatterGatherRoutedRemoteActorMultiJvmNode2 extends AkkaRemoteSpec { } } -class ScatterGatherRoutedRemoteActorMultiJvmNode3 extends AkkaRemoteSpec { +class ScatterGatherRoutedRemoteActorMultiJvmNode3 extends AkkaRemoteSpec(ScatterGatherRoutedRemoteActorMultiJvmSpec.node3Config) { import ScatterGatherRoutedRemoteActorMultiJvmSpec._ val nodes = NrOfNodes "___" must { @@ -60,7 +99,7 @@ class ScatterGatherRoutedRemoteActorMultiJvmNode3 extends AkkaRemoteSpec { } } -class ScatterGatherRoutedRemoteActorMultiJvmNode4 extends AkkaRemoteSpec { +class ScatterGatherRoutedRemoteActorMultiJvmNode4 extends AkkaRemoteSpec(ScatterGatherRoutedRemoteActorMultiJvmSpec.node4Config) { import ScatterGatherRoutedRemoteActorMultiJvmSpec._ val nodes = NrOfNodes "A new remote actor configured with a ScatterGather router" must { diff --git a/akka-remote/src/multi-jvm/scala/akka/remote/direct_routed/DirectRoutedRemoteActorMultiJvmNode1.conf b/akka-remote/src/multi-jvm/scala/akka/remote/direct_routed/DirectRoutedRemoteActorMultiJvmNode1.conf deleted file mode 100644 index 1b1c7b398c..0000000000 --- a/akka-remote/src/multi-jvm/scala/akka/remote/direct_routed/DirectRoutedRemoteActorMultiJvmNode1.conf +++ /dev/null @@ -1,11 +0,0 @@ -akka { - loglevel = "WARNING" - actor { - provider = "akka.remote.RemoteActorRefProvider" - deployment { - /app/service-hello.router = "direct" - /app/service-hello.nr-of-instances = 1 - /app/service-hello.remote.nodes = ["localhost:9991"] - } - } -} diff --git a/akka-remote/src/multi-jvm/scala/akka/remote/direct_routed/DirectRoutedRemoteActorMultiJvmNode1.opts b/akka-remote/src/multi-jvm/scala/akka/remote/direct_routed/DirectRoutedRemoteActorMultiJvmNode1.opts deleted file mode 100644 index a3218fe698..0000000000 --- a/akka-remote/src/multi-jvm/scala/akka/remote/direct_routed/DirectRoutedRemoteActorMultiJvmNode1.opts +++ /dev/null @@ -1 +0,0 @@ --Dakka.cluster.nodename=node1 -Dakka.remote.server.hostname=localhost -Dakka.remote.server.port=9991 diff --git a/akka-remote/src/multi-jvm/scala/akka/remote/direct_routed/DirectRoutedRemoteActorMultiJvmNode2.conf b/akka-remote/src/multi-jvm/scala/akka/remote/direct_routed/DirectRoutedRemoteActorMultiJvmNode2.conf deleted file mode 100644 index 1b1c7b398c..0000000000 --- a/akka-remote/src/multi-jvm/scala/akka/remote/direct_routed/DirectRoutedRemoteActorMultiJvmNode2.conf +++ /dev/null @@ -1,11 +0,0 @@ -akka { - loglevel = "WARNING" - actor { - provider = "akka.remote.RemoteActorRefProvider" - deployment { - /app/service-hello.router = "direct" - /app/service-hello.nr-of-instances = 1 - /app/service-hello.remote.nodes = ["localhost:9991"] - } - } -} diff --git a/akka-remote/src/multi-jvm/scala/akka/remote/direct_routed/DirectRoutedRemoteActorMultiJvmNode2.opts b/akka-remote/src/multi-jvm/scala/akka/remote/direct_routed/DirectRoutedRemoteActorMultiJvmNode2.opts deleted file mode 100644 index dcecc85ffb..0000000000 --- a/akka-remote/src/multi-jvm/scala/akka/remote/direct_routed/DirectRoutedRemoteActorMultiJvmNode2.opts +++ /dev/null @@ -1 +0,0 @@ --Dakka.cluster.nodename=node2 -Dakka.remote.server.hostname=localhost -Dakka.remote.server.port=9992 diff --git a/akka-remote/src/multi-jvm/scala/akka/remote/new_remote_actor/NewRemoteActorMultiJvmNode1.conf b/akka-remote/src/multi-jvm/scala/akka/remote/new_remote_actor/NewRemoteActorMultiJvmNode1.conf deleted file mode 100644 index 9073ed4ed3..0000000000 --- a/akka-remote/src/multi-jvm/scala/akka/remote/new_remote_actor/NewRemoteActorMultiJvmNode1.conf +++ /dev/null @@ -1,9 +0,0 @@ -akka { - loglevel = "WARNING" - actor { - provider = "akka.remote.RemoteActorRefProvider" - deployment { - /app/service-hello.remote.nodes = ["localhost:9991"] - } - } -} diff --git a/akka-remote/src/multi-jvm/scala/akka/remote/new_remote_actor/NewRemoteActorMultiJvmNode1.opts b/akka-remote/src/multi-jvm/scala/akka/remote/new_remote_actor/NewRemoteActorMultiJvmNode1.opts deleted file mode 100644 index a3218fe698..0000000000 --- a/akka-remote/src/multi-jvm/scala/akka/remote/new_remote_actor/NewRemoteActorMultiJvmNode1.opts +++ /dev/null @@ -1 +0,0 @@ --Dakka.cluster.nodename=node1 -Dakka.remote.server.hostname=localhost -Dakka.remote.server.port=9991 diff --git a/akka-remote/src/multi-jvm/scala/akka/remote/new_remote_actor/NewRemoteActorMultiJvmNode2.conf b/akka-remote/src/multi-jvm/scala/akka/remote/new_remote_actor/NewRemoteActorMultiJvmNode2.conf deleted file mode 100644 index 9073ed4ed3..0000000000 --- a/akka-remote/src/multi-jvm/scala/akka/remote/new_remote_actor/NewRemoteActorMultiJvmNode2.conf +++ /dev/null @@ -1,9 +0,0 @@ -akka { - loglevel = "WARNING" - actor { - provider = "akka.remote.RemoteActorRefProvider" - deployment { - /app/service-hello.remote.nodes = ["localhost:9991"] - } - } -} diff --git a/akka-remote/src/multi-jvm/scala/akka/remote/new_remote_actor/NewRemoteActorMultiJvmNode2.opts b/akka-remote/src/multi-jvm/scala/akka/remote/new_remote_actor/NewRemoteActorMultiJvmNode2.opts deleted file mode 100644 index dcecc85ffb..0000000000 --- a/akka-remote/src/multi-jvm/scala/akka/remote/new_remote_actor/NewRemoteActorMultiJvmNode2.opts +++ /dev/null @@ -1 +0,0 @@ --Dakka.cluster.nodename=node2 -Dakka.remote.server.hostname=localhost -Dakka.remote.server.port=9992 diff --git a/akka-remote/src/multi-jvm/scala/akka/remote/new_remote_actor/NewRemoteActorMultiJvmSpec.scala b/akka-remote/src/multi-jvm/scala/akka/remote/new_remote_actor/NewRemoteActorMultiJvmSpec.scala deleted file mode 100644 index 3be4979964..0000000000 --- a/akka-remote/src/multi-jvm/scala/akka/remote/new_remote_actor/NewRemoteActorMultiJvmSpec.scala +++ /dev/null @@ -1,57 +0,0 @@ -package akka.remote.new_remote_actor - -import akka.actor.Actor -import akka.remote._ - -object NewRemoteActorMultiJvmSpec { - val NrOfNodes = 2 - - class SomeActor extends Actor with Serializable { - def receive = { - case "identify" ⇒ sender ! system.nodename - } - } -} - -class NewRemoteActorMultiJvmNode1 extends AkkaRemoteSpec { - - import NewRemoteActorMultiJvmSpec._ - - val nodes = NrOfNodes - - "___" must { - "___" in { - barrier("setup") - - remote.start() - - barrier("start") - - barrier("done") - } - } -} - -class NewRemoteActorMultiJvmNode2 extends AkkaRemoteSpec { - - import NewRemoteActorMultiJvmSpec._ - - val nodes = NrOfNodes - - "A new remote actor" must { - "be locally instantiated on a remote node and be able to communicate through its RemoteActorRef" in { - barrier("setup") - - remote.start() - - barrier("start") - - val actor = system.actorOf[SomeActor]("service-hello") - val result = (actor ? "identify").get - result must equal("node1") - - barrier("done") - } - } -} - diff --git a/akka-remote/src/multi-jvm/scala/akka/remote/random_routed/RandomRoutedRemoteActorMultiJvmNode1.conf b/akka-remote/src/multi-jvm/scala/akka/remote/random_routed/RandomRoutedRemoteActorMultiJvmNode1.conf deleted file mode 100644 index e373bc9c0e..0000000000 --- a/akka-remote/src/multi-jvm/scala/akka/remote/random_routed/RandomRoutedRemoteActorMultiJvmNode1.conf +++ /dev/null @@ -1,11 +0,0 @@ -akka { - loglevel = "WARNING" - actor { - provider = "akka.remote.RemoteActorRefProvider" - deployment { - /app/service-hello.router = "random" - /app/service-hello.nr-of-instances = 3 - /app/service-hello.remote.nodes = ["localhost:9991","localhost:9992","localhost:9993"] - } - } -} diff --git a/akka-remote/src/multi-jvm/scala/akka/remote/random_routed/RandomRoutedRemoteActorMultiJvmNode1.opts b/akka-remote/src/multi-jvm/scala/akka/remote/random_routed/RandomRoutedRemoteActorMultiJvmNode1.opts deleted file mode 100644 index a3218fe698..0000000000 --- a/akka-remote/src/multi-jvm/scala/akka/remote/random_routed/RandomRoutedRemoteActorMultiJvmNode1.opts +++ /dev/null @@ -1 +0,0 @@ --Dakka.cluster.nodename=node1 -Dakka.remote.server.hostname=localhost -Dakka.remote.server.port=9991 diff --git a/akka-remote/src/multi-jvm/scala/akka/remote/random_routed/RandomRoutedRemoteActorMultiJvmNode2.conf b/akka-remote/src/multi-jvm/scala/akka/remote/random_routed/RandomRoutedRemoteActorMultiJvmNode2.conf deleted file mode 100644 index b6d6e7b3f9..0000000000 --- a/akka-remote/src/multi-jvm/scala/akka/remote/random_routed/RandomRoutedRemoteActorMultiJvmNode2.conf +++ /dev/null @@ -1,9 +0,0 @@ -akka { - loglevel = "WARNING" - actor { - provider = "akka.remote.RemoteActorRefProvider" - deployment./app/service-hello.router = "random" - deployment./app/service-hello.nr-of-instances = 3 - deployment./app/service-hello.remote.nodes = ["localhost:9991","localhost:9992","localhost:9993"] - } -} diff --git a/akka-remote/src/multi-jvm/scala/akka/remote/random_routed/RandomRoutedRemoteActorMultiJvmNode2.opts b/akka-remote/src/multi-jvm/scala/akka/remote/random_routed/RandomRoutedRemoteActorMultiJvmNode2.opts deleted file mode 100644 index dcecc85ffb..0000000000 --- a/akka-remote/src/multi-jvm/scala/akka/remote/random_routed/RandomRoutedRemoteActorMultiJvmNode2.opts +++ /dev/null @@ -1 +0,0 @@ --Dakka.cluster.nodename=node2 -Dakka.remote.server.hostname=localhost -Dakka.remote.server.port=9992 diff --git a/akka-remote/src/multi-jvm/scala/akka/remote/random_routed/RandomRoutedRemoteActorMultiJvmNode3.conf b/akka-remote/src/multi-jvm/scala/akka/remote/random_routed/RandomRoutedRemoteActorMultiJvmNode3.conf deleted file mode 100644 index e373bc9c0e..0000000000 --- a/akka-remote/src/multi-jvm/scala/akka/remote/random_routed/RandomRoutedRemoteActorMultiJvmNode3.conf +++ /dev/null @@ -1,11 +0,0 @@ -akka { - loglevel = "WARNING" - actor { - provider = "akka.remote.RemoteActorRefProvider" - deployment { - /app/service-hello.router = "random" - /app/service-hello.nr-of-instances = 3 - /app/service-hello.remote.nodes = ["localhost:9991","localhost:9992","localhost:9993"] - } - } -} diff --git a/akka-remote/src/multi-jvm/scala/akka/remote/random_routed/RandomRoutedRemoteActorMultiJvmNode3.opts b/akka-remote/src/multi-jvm/scala/akka/remote/random_routed/RandomRoutedRemoteActorMultiJvmNode3.opts deleted file mode 100644 index cabc575688..0000000000 --- a/akka-remote/src/multi-jvm/scala/akka/remote/random_routed/RandomRoutedRemoteActorMultiJvmNode3.opts +++ /dev/null @@ -1 +0,0 @@ --Dakka.cluster.nodename=node3 -Dakka.remote.server.hostname=localhost -Dakka.remote.server.port=9993 diff --git a/akka-remote/src/multi-jvm/scala/akka/remote/random_routed/RandomRoutedRemoteActorMultiJvmNode4.conf b/akka-remote/src/multi-jvm/scala/akka/remote/random_routed/RandomRoutedRemoteActorMultiJvmNode4.conf deleted file mode 100644 index e373bc9c0e..0000000000 --- a/akka-remote/src/multi-jvm/scala/akka/remote/random_routed/RandomRoutedRemoteActorMultiJvmNode4.conf +++ /dev/null @@ -1,11 +0,0 @@ -akka { - loglevel = "WARNING" - actor { - provider = "akka.remote.RemoteActorRefProvider" - deployment { - /app/service-hello.router = "random" - /app/service-hello.nr-of-instances = 3 - /app/service-hello.remote.nodes = ["localhost:9991","localhost:9992","localhost:9993"] - } - } -} diff --git a/akka-remote/src/multi-jvm/scala/akka/remote/random_routed/RandomRoutedRemoteActorMultiJvmNode4.opts b/akka-remote/src/multi-jvm/scala/akka/remote/random_routed/RandomRoutedRemoteActorMultiJvmNode4.opts deleted file mode 100644 index 4c7670d733..0000000000 --- a/akka-remote/src/multi-jvm/scala/akka/remote/random_routed/RandomRoutedRemoteActorMultiJvmNode4.opts +++ /dev/null @@ -1 +0,0 @@ --Dakka.cluster.nodename=node4 -Dakka.remote.server.hostname=localhost -Dakka.remote.server.port=9994 diff --git a/akka-remote/src/multi-jvm/scala/akka/remote/round_robin_routed/RoundRobinRoutedRemoteActorMultiJvmNode1.conf b/akka-remote/src/multi-jvm/scala/akka/remote/round_robin_routed/RoundRobinRoutedRemoteActorMultiJvmNode1.conf deleted file mode 100644 index a0ec833383..0000000000 --- a/akka-remote/src/multi-jvm/scala/akka/remote/round_robin_routed/RoundRobinRoutedRemoteActorMultiJvmNode1.conf +++ /dev/null @@ -1,11 +0,0 @@ -akka { - loglevel = "WARNING" - actor { - provider = "akka.remote.RemoteActorRefProvider" - deployment { - /app/service-hello.router = "round-robin" - /app/service-hello.nr-of-instances = 3 - /app/service-hello.remote.nodes = ["localhost:9991","localhost:9992","localhost:9993"] - } - } -} diff --git a/akka-remote/src/multi-jvm/scala/akka/remote/round_robin_routed/RoundRobinRoutedRemoteActorMultiJvmNode1.opts b/akka-remote/src/multi-jvm/scala/akka/remote/round_robin_routed/RoundRobinRoutedRemoteActorMultiJvmNode1.opts deleted file mode 100644 index a3218fe698..0000000000 --- a/akka-remote/src/multi-jvm/scala/akka/remote/round_robin_routed/RoundRobinRoutedRemoteActorMultiJvmNode1.opts +++ /dev/null @@ -1 +0,0 @@ --Dakka.cluster.nodename=node1 -Dakka.remote.server.hostname=localhost -Dakka.remote.server.port=9991 diff --git a/akka-remote/src/multi-jvm/scala/akka/remote/round_robin_routed/RoundRobinRoutedRemoteActorMultiJvmNode2.conf b/akka-remote/src/multi-jvm/scala/akka/remote/round_robin_routed/RoundRobinRoutedRemoteActorMultiJvmNode2.conf deleted file mode 100644 index a0ec833383..0000000000 --- a/akka-remote/src/multi-jvm/scala/akka/remote/round_robin_routed/RoundRobinRoutedRemoteActorMultiJvmNode2.conf +++ /dev/null @@ -1,11 +0,0 @@ -akka { - loglevel = "WARNING" - actor { - provider = "akka.remote.RemoteActorRefProvider" - deployment { - /app/service-hello.router = "round-robin" - /app/service-hello.nr-of-instances = 3 - /app/service-hello.remote.nodes = ["localhost:9991","localhost:9992","localhost:9993"] - } - } -} diff --git a/akka-remote/src/multi-jvm/scala/akka/remote/round_robin_routed/RoundRobinRoutedRemoteActorMultiJvmNode2.opts b/akka-remote/src/multi-jvm/scala/akka/remote/round_robin_routed/RoundRobinRoutedRemoteActorMultiJvmNode2.opts deleted file mode 100644 index dcecc85ffb..0000000000 --- a/akka-remote/src/multi-jvm/scala/akka/remote/round_robin_routed/RoundRobinRoutedRemoteActorMultiJvmNode2.opts +++ /dev/null @@ -1 +0,0 @@ --Dakka.cluster.nodename=node2 -Dakka.remote.server.hostname=localhost -Dakka.remote.server.port=9992 diff --git a/akka-remote/src/multi-jvm/scala/akka/remote/round_robin_routed/RoundRobinRoutedRemoteActorMultiJvmNode3.conf b/akka-remote/src/multi-jvm/scala/akka/remote/round_robin_routed/RoundRobinRoutedRemoteActorMultiJvmNode3.conf deleted file mode 100644 index a0ec833383..0000000000 --- a/akka-remote/src/multi-jvm/scala/akka/remote/round_robin_routed/RoundRobinRoutedRemoteActorMultiJvmNode3.conf +++ /dev/null @@ -1,11 +0,0 @@ -akka { - loglevel = "WARNING" - actor { - provider = "akka.remote.RemoteActorRefProvider" - deployment { - /app/service-hello.router = "round-robin" - /app/service-hello.nr-of-instances = 3 - /app/service-hello.remote.nodes = ["localhost:9991","localhost:9992","localhost:9993"] - } - } -} diff --git a/akka-remote/src/multi-jvm/scala/akka/remote/round_robin_routed/RoundRobinRoutedRemoteActorMultiJvmNode3.opts b/akka-remote/src/multi-jvm/scala/akka/remote/round_robin_routed/RoundRobinRoutedRemoteActorMultiJvmNode3.opts deleted file mode 100644 index cabc575688..0000000000 --- a/akka-remote/src/multi-jvm/scala/akka/remote/round_robin_routed/RoundRobinRoutedRemoteActorMultiJvmNode3.opts +++ /dev/null @@ -1 +0,0 @@ --Dakka.cluster.nodename=node3 -Dakka.remote.server.hostname=localhost -Dakka.remote.server.port=9993 diff --git a/akka-remote/src/multi-jvm/scala/akka/remote/round_robin_routed/RoundRobinRoutedRemoteActorMultiJvmNode4.conf b/akka-remote/src/multi-jvm/scala/akka/remote/round_robin_routed/RoundRobinRoutedRemoteActorMultiJvmNode4.conf deleted file mode 100644 index a0ec833383..0000000000 --- a/akka-remote/src/multi-jvm/scala/akka/remote/round_robin_routed/RoundRobinRoutedRemoteActorMultiJvmNode4.conf +++ /dev/null @@ -1,11 +0,0 @@ -akka { - loglevel = "WARNING" - actor { - provider = "akka.remote.RemoteActorRefProvider" - deployment { - /app/service-hello.router = "round-robin" - /app/service-hello.nr-of-instances = 3 - /app/service-hello.remote.nodes = ["localhost:9991","localhost:9992","localhost:9993"] - } - } -} diff --git a/akka-remote/src/multi-jvm/scala/akka/remote/round_robin_routed/RoundRobinRoutedRemoteActorMultiJvmNode4.opts b/akka-remote/src/multi-jvm/scala/akka/remote/round_robin_routed/RoundRobinRoutedRemoteActorMultiJvmNode4.opts deleted file mode 100644 index 4c7670d733..0000000000 --- a/akka-remote/src/multi-jvm/scala/akka/remote/round_robin_routed/RoundRobinRoutedRemoteActorMultiJvmNode4.opts +++ /dev/null @@ -1 +0,0 @@ --Dakka.cluster.nodename=node4 -Dakka.remote.server.hostname=localhost -Dakka.remote.server.port=9994 diff --git a/akka-remote/src/multi-jvm/scala/akka/remote/scatter_gather_routed/ScatterGatherRoutedRemoteActorMultiJvmNode1.conf b/akka-remote/src/multi-jvm/scala/akka/remote/scatter_gather_routed/ScatterGatherRoutedRemoteActorMultiJvmNode1.conf deleted file mode 100644 index 80ad72e3de..0000000000 --- a/akka-remote/src/multi-jvm/scala/akka/remote/scatter_gather_routed/ScatterGatherRoutedRemoteActorMultiJvmNode1.conf +++ /dev/null @@ -1,11 +0,0 @@ -akka { - loglevel = "WARNING" - actor { - provider = "akka.remote.RemoteActorRefProvider" - deployment { - /app/service-hello.router = "scatter-gather" - /app/service-hello.nr-of-instances = 3 - /app/service-hello.remote.nodes = ["localhost:9991","localhost:9992","localhost:9993"] - } - } -} diff --git a/akka-remote/src/multi-jvm/scala/akka/remote/scatter_gather_routed/ScatterGatherRoutedRemoteActorMultiJvmNode1.opts b/akka-remote/src/multi-jvm/scala/akka/remote/scatter_gather_routed/ScatterGatherRoutedRemoteActorMultiJvmNode1.opts deleted file mode 100644 index a3218fe698..0000000000 --- a/akka-remote/src/multi-jvm/scala/akka/remote/scatter_gather_routed/ScatterGatherRoutedRemoteActorMultiJvmNode1.opts +++ /dev/null @@ -1 +0,0 @@ --Dakka.cluster.nodename=node1 -Dakka.remote.server.hostname=localhost -Dakka.remote.server.port=9991 diff --git a/akka-remote/src/multi-jvm/scala/akka/remote/scatter_gather_routed/ScatterGatherRoutedRemoteActorMultiJvmNode2.conf b/akka-remote/src/multi-jvm/scala/akka/remote/scatter_gather_routed/ScatterGatherRoutedRemoteActorMultiJvmNode2.conf deleted file mode 100644 index 80ad72e3de..0000000000 --- a/akka-remote/src/multi-jvm/scala/akka/remote/scatter_gather_routed/ScatterGatherRoutedRemoteActorMultiJvmNode2.conf +++ /dev/null @@ -1,11 +0,0 @@ -akka { - loglevel = "WARNING" - actor { - provider = "akka.remote.RemoteActorRefProvider" - deployment { - /app/service-hello.router = "scatter-gather" - /app/service-hello.nr-of-instances = 3 - /app/service-hello.remote.nodes = ["localhost:9991","localhost:9992","localhost:9993"] - } - } -} diff --git a/akka-remote/src/multi-jvm/scala/akka/remote/scatter_gather_routed/ScatterGatherRoutedRemoteActorMultiJvmNode2.opts b/akka-remote/src/multi-jvm/scala/akka/remote/scatter_gather_routed/ScatterGatherRoutedRemoteActorMultiJvmNode2.opts deleted file mode 100644 index dcecc85ffb..0000000000 --- a/akka-remote/src/multi-jvm/scala/akka/remote/scatter_gather_routed/ScatterGatherRoutedRemoteActorMultiJvmNode2.opts +++ /dev/null @@ -1 +0,0 @@ --Dakka.cluster.nodename=node2 -Dakka.remote.server.hostname=localhost -Dakka.remote.server.port=9992 diff --git a/akka-remote/src/multi-jvm/scala/akka/remote/scatter_gather_routed/ScatterGatherRoutedRemoteActorMultiJvmNode3.conf b/akka-remote/src/multi-jvm/scala/akka/remote/scatter_gather_routed/ScatterGatherRoutedRemoteActorMultiJvmNode3.conf deleted file mode 100644 index 80ad72e3de..0000000000 --- a/akka-remote/src/multi-jvm/scala/akka/remote/scatter_gather_routed/ScatterGatherRoutedRemoteActorMultiJvmNode3.conf +++ /dev/null @@ -1,11 +0,0 @@ -akka { - loglevel = "WARNING" - actor { - provider = "akka.remote.RemoteActorRefProvider" - deployment { - /app/service-hello.router = "scatter-gather" - /app/service-hello.nr-of-instances = 3 - /app/service-hello.remote.nodes = ["localhost:9991","localhost:9992","localhost:9993"] - } - } -} diff --git a/akka-remote/src/multi-jvm/scala/akka/remote/scatter_gather_routed/ScatterGatherRoutedRemoteActorMultiJvmNode3.opts b/akka-remote/src/multi-jvm/scala/akka/remote/scatter_gather_routed/ScatterGatherRoutedRemoteActorMultiJvmNode3.opts deleted file mode 100644 index cabc575688..0000000000 --- a/akka-remote/src/multi-jvm/scala/akka/remote/scatter_gather_routed/ScatterGatherRoutedRemoteActorMultiJvmNode3.opts +++ /dev/null @@ -1 +0,0 @@ --Dakka.cluster.nodename=node3 -Dakka.remote.server.hostname=localhost -Dakka.remote.server.port=9993 diff --git a/akka-remote/src/multi-jvm/scala/akka/remote/scatter_gather_routed/ScatterGatherRoutedRemoteActorMultiJvmNode4.conf b/akka-remote/src/multi-jvm/scala/akka/remote/scatter_gather_routed/ScatterGatherRoutedRemoteActorMultiJvmNode4.conf deleted file mode 100644 index 80ad72e3de..0000000000 --- a/akka-remote/src/multi-jvm/scala/akka/remote/scatter_gather_routed/ScatterGatherRoutedRemoteActorMultiJvmNode4.conf +++ /dev/null @@ -1,11 +0,0 @@ -akka { - loglevel = "WARNING" - actor { - provider = "akka.remote.RemoteActorRefProvider" - deployment { - /app/service-hello.router = "scatter-gather" - /app/service-hello.nr-of-instances = 3 - /app/service-hello.remote.nodes = ["localhost:9991","localhost:9992","localhost:9993"] - } - } -} diff --git a/akka-remote/src/multi-jvm/scala/akka/remote/scatter_gather_routed/ScatterGatherRoutedRemoteActorMultiJvmNode4.opts b/akka-remote/src/multi-jvm/scala/akka/remote/scatter_gather_routed/ScatterGatherRoutedRemoteActorMultiJvmNode4.opts deleted file mode 100644 index 4c7670d733..0000000000 --- a/akka-remote/src/multi-jvm/scala/akka/remote/scatter_gather_routed/ScatterGatherRoutedRemoteActorMultiJvmNode4.opts +++ /dev/null @@ -1 +0,0 @@ --Dakka.cluster.nodename=node4 -Dakka.remote.server.hostname=localhost -Dakka.remote.server.port=9994 From 064a8a7be1685be95be9a17f5fd8e8453d20fbbb Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jonas=20Bone=CC=81r?= Date: Mon, 5 Dec 2011 11:42:52 +0100 Subject: [PATCH 02/34] Added fallback to testConfig in AkkaRemoteSpec --- .../src/multi-jvm/scala/akka/remote/AkkaRemoteSpec.scala | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/akka-remote/src/multi-jvm/scala/akka/remote/AkkaRemoteSpec.scala b/akka-remote/src/multi-jvm/scala/akka/remote/AkkaRemoteSpec.scala index 58679eaccd..2c9274c84f 100644 --- a/akka-remote/src/multi-jvm/scala/akka/remote/AkkaRemoteSpec.scala +++ b/akka-remote/src/multi-jvm/scala/akka/remote/AkkaRemoteSpec.scala @@ -26,7 +26,9 @@ object AkkaRemoteSpec { } } -abstract class AkkaRemoteSpec(config: Config = AkkaRemoteSpec.testConf) extends AkkaSpec(config) with MultiJvmSync { +abstract class AkkaRemoteSpec(config: Config) + extends AkkaSpec(config.withFallback(AkkaRemoteSpec.testConf)) + with MultiJvmSync { /** * Helper function for accessing the underlying remoting. From 553d1da347dc9236ca98289abc01327779545d66 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jonas=20Bone=CC=81r?= Date: Mon, 5 Dec 2011 11:48:17 +0100 Subject: [PATCH 03/34] Cleaned up inconsistent logging messages --- .../src/main/scala/akka/actor/ActorCell.scala | 38 +++++++++---------- .../src/main/scala/akka/actor/ActorRef.scala | 8 ++-- 2 files changed, 23 insertions(+), 23 deletions(-) diff --git a/akka-actor/src/main/scala/akka/actor/ActorCell.scala b/akka-actor/src/main/scala/akka/actor/ActorCell.scala index c7a37de589..ed9b90c32f 100644 --- a/akka-actor/src/main/scala/akka/actor/ActorCell.scala +++ b/akka-actor/src/main/scala/akka/actor/ActorCell.scala @@ -192,22 +192,22 @@ private[akka] class ActorCell( actor = created created.preStart() checkReceiveTimeout - if (system.settings.DebugLifecycle) system.eventStream.publish(Debug(self.toString, "started (" + actor + ")")) + if (system.settings.DebugLifecycle) system.eventStream.publish(Debug(self.toString, "Started [" + actor + "]")) } catch { // FIXME catching all and continue isn't good for OOME, ticket #1418 case e ⇒ try { - system.eventStream.publish(Error(e, self.toString, "error while creating actor")) + system.eventStream.publish(Error(e, self.toString, "Error while creating actor")) // prevent any further messages to be processed until the actor has been restarted dispatcher.suspend(this) } finally { - parent.tell(Failed(ActorInitializationException(self, "exception during creation", e)), self) + parent.tell(Failed(ActorInitializationException(self, "Exception during creation", e)), self) } } def recreate(cause: Throwable): Unit = try { val failedActor = actor - if (system.settings.DebugLifecycle) system.eventStream.publish(Debug(self.toString, "restarting")) + if (system.settings.DebugLifecycle) system.eventStream.publish(Debug(self.toString, "Restarting")) val freshActor = newActor() if (failedActor ne null) { val c = currentMessage //One read only plz @@ -221,7 +221,7 @@ private[akka] class ActorCell( } actor = freshActor // assign it here so if preStart fails, we can null out the sef-refs next call freshActor.postRestart(cause) - if (system.settings.DebugLifecycle) system.eventStream.publish(Debug(self.toString, "restarted")) + if (system.settings.DebugLifecycle) system.eventStream.publish(Debug(self.toString, "Restarted")) dispatcher.resume(this) //FIXME should this be moved down? @@ -229,11 +229,11 @@ private[akka] class ActorCell( } catch { // FIXME catching all and continue isn't good for OOME, ticket #1418 case e ⇒ try { - system.eventStream.publish(Error(e, self.toString, "error while creating actor")) + system.eventStream.publish(Error(e, self.toString, "Error while creating actor")) // prevent any further messages to be processed until the actor has been restarted dispatcher.suspend(this) } finally { - parent.tell(Failed(ActorInitializationException(self, "exception during re-creation", e)), self) + parent.tell(Failed(ActorInitializationException(self, "Exception during re-creation", e)), self) } } @@ -248,7 +248,7 @@ private[akka] class ActorCell( val c = children if (c.isEmpty) doTerminate() else { - if (system.settings.DebugLifecycle) system.eventStream.publish(Debug(self.toString, "stopping")) + if (system.settings.DebugLifecycle) system.eventStream.publish(Debug(self.toString, "Stopping")) for (child ← c) child.stop() stopping = true } @@ -258,12 +258,12 @@ private[akka] class ActorCell( val stat = childrenRefs.get(child.name) if (stat.isDefined) { if (stat.get.child == child) - system.eventStream.publish(Warning(self.toString, "Already supervising " + child)) + system.eventStream.publish(Warning(self.toString, "Already supervising [" + child + "]")) else - system.eventStream.publish(Warning(self.toString, "Already supervising other child with same name '" + child.name + "', old: " + stat.get + " new: " + child)) + system.eventStream.publish(Warning(self.toString, "Already supervising other child with same name [" + child.name + "], old: [" + stat.get + "], new: [" + child + "]")) } else { childrenRefs = childrenRefs.updated(child.name, ChildRestartStats(child)) - if (system.settings.DebugLifecycle) system.eventStream.publish(Debug(self.toString, "now supervising " + child)) + if (system.settings.DebugLifecycle) system.eventStream.publish(Debug(self.toString, "Now supervising [" + child + "]")) } } @@ -277,10 +277,10 @@ private[akka] class ActorCell( case Recreate(cause) ⇒ recreate(cause) case Link(subject) ⇒ system.deathWatch.subscribe(self, subject) - if (system.settings.DebugLifecycle) system.eventStream.publish(Debug(self.toString, "now monitoring " + subject)) + if (system.settings.DebugLifecycle) system.eventStream.publish(Debug(self.toString, "Now monitoring [" + subject + "]")) case Unlink(subject) ⇒ system.deathWatch.unsubscribe(self, subject) - if (system.settings.DebugLifecycle) system.eventStream.publish(Debug(self.toString, "stopped monitoring " + subject)) + if (system.settings.DebugLifecycle) system.eventStream.publish(Debug(self.toString, "Stopped monitoring [" + subject + "]")) case Suspend() ⇒ suspend() case Resume() ⇒ resume() case Terminate() ⇒ terminate() @@ -288,7 +288,7 @@ private[akka] class ActorCell( } } catch { case e ⇒ //Should we really catch everything here? - system.eventStream.publish(Error(e, self.toString, "error while processing " + message)) + system.eventStream.publish(Error(e, self.toString, "Error while processing [" + message + "]")) //TODO FIXME How should problems here be handled??? throw e } @@ -347,7 +347,7 @@ private[akka] class ActorCell( } def autoReceiveMessage(msg: Envelope) { - if (system.settings.DebugAutoReceive) system.eventStream.publish(Debug(self.toString, "received AutoReceiveMessage " + msg)) + if (system.settings.DebugAutoReceive) system.eventStream.publish(Debug(self.toString, "Received AutoReceiveMessage " + msg)) if (stopping) msg.message match { case ChildTerminated ⇒ handleChildTerminated(sender) @@ -365,7 +365,7 @@ private[akka] class ActorCell( private def doTerminate() { if (!system.provider.evict(self.path.toString)) - system.eventStream.publish(Warning(self.toString, "evict of " + self.path.toString + " failed")) + system.eventStream.publish(Warning(self.toString, "Evict of [" + self.path.toString + "] failed")) dispatcher.detach(this) @@ -376,7 +376,7 @@ private[akka] class ActorCell( try { parent.tell(ChildTerminated, self) system.deathWatch.publish(Terminated(self)) - if (system.settings.DebugLifecycle) system.eventStream.publish(Debug(self.toString, "stopped")) + if (system.settings.DebugLifecycle) system.eventStream.publish(Debug(self.toString, "Stopped")) } finally { currentMessage = null clearActorFields() @@ -386,8 +386,8 @@ private[akka] class ActorCell( final def handleFailure(child: ActorRef, cause: Throwable): Unit = childrenRefs.get(child.name) match { case Some(stats) if stats.child == child ⇒ if (!props.faultHandler.handleFailure(child, cause, stats, childrenRefs.values)) throw cause - case Some(stats) ⇒ system.eventStream.publish(Warning(self.toString, "dropping Failed(" + cause + ") from unknown child " + child + " matching names but not the same, was: " + stats.child)) - case None ⇒ system.eventStream.publish(Warning(self.toString, "dropping Failed(" + cause + ") from unknown child " + child)) + case Some(stats) ⇒ system.eventStream.publish(Warning(self.toString, "Dropping Failed[" + cause + "] from unknown child [" + child + "] matching names but not the same, was: " + stats.child)) + case None ⇒ system.eventStream.publish(Warning(self.toString, "Dropping Failed[" + cause + "] from unknown child [" + child + "]")) } final def handleChildTerminated(child: ActorRef): Unit = { diff --git a/akka-actor/src/main/scala/akka/actor/ActorRef.scala b/akka-actor/src/main/scala/akka/actor/ActorRef.scala index c3865c001b..3c27298821 100644 --- a/akka-actor/src/main/scala/akka/actor/ActorRef.scala +++ b/akka-actor/src/main/scala/akka/actor/ActorRef.scala @@ -271,7 +271,7 @@ case class SerializedActorRef(hostname: String, port: Int, path: String) { def readResolve(): AnyRef = currentSystem.value match { case null ⇒ throw new IllegalStateException( "Trying to deserialize a serialized ActorRef without an ActorSystem in scope." + - " Use akka.serialization.Serialization.currentSystem.withValue(system) { ... }") + " Use 'akka.serialization.Serialization.currentSystem.withValue(system) { ... }'") case someSystem ⇒ someSystem.provider.deserialize(this) match { case Some(actor) ⇒ actor case None ⇒ throw new IllegalStateException("Could not deserialize ActorRef") @@ -287,7 +287,7 @@ trait MinimalActorRef extends ActorRef with ScalaActorRef with RefInternals { private[akka] val uuid: Uuid = newUuid() def name: String = uuid.toString - //FIXME REMOVE THIS, ticket #1416 + //FIXME REMOVE THIS, ticket #1416 //FIXME REMOVE THIS, ticket #1415 def suspend(): Unit = () def resume(): Unit = () @@ -299,7 +299,7 @@ trait MinimalActorRef extends ActorRef with ScalaActorRef with RefInternals { def !(message: Any)(implicit sender: ActorRef = null): Unit = () def ?(message: Any)(implicit timeout: Timeout): Future[Any] = - throw new UnsupportedOperationException("Not supported for %s".format(getClass.getName)) + throw new UnsupportedOperationException("Not supported for [%s]".format(getClass.getName)) protected[akka] def sendSystemMessage(message: SystemMessage): Unit = () protected[akka] def restart(cause: Throwable): Unit = () @@ -380,7 +380,7 @@ abstract class AskActorRef(val path: ActorPath, provider: ActorRefProvider, deat } override def ?(message: Any)(implicit timeout: Timeout): Future[Any] = - new KeptPromise[Any](Left(new UnsupportedOperationException("Ask/? is not supported for %s".format(getClass.getName))))(dispatcher) + new KeptPromise[Any](Left(new UnsupportedOperationException("'ask/?'' is not supported for [%s]".format(getClass.getName))))(dispatcher) override def isTerminated = result.isCompleted || result.isExpired From 991a4a3fd9ea2faea8d2ae1e44941a1560d252a4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jonas=20Bone=CC=81r?= Date: Fri, 9 Dec 2011 17:46:29 +0100 Subject: [PATCH 04/34] Removed multi-jvm test for gossip. Will reintroduce later, but first write in-process tests for the gossip using the new remoting. MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Jonas Bonér --- .../remote/GossipMembershipMultiJvmSpec.scala | 134 ------------------ 1 file changed, 134 deletions(-) delete mode 100644 akka-remote/src/multi-jvm/scala/akka/remote/GossipMembershipMultiJvmSpec.scala diff --git a/akka-remote/src/multi-jvm/scala/akka/remote/GossipMembershipMultiJvmSpec.scala b/akka-remote/src/multi-jvm/scala/akka/remote/GossipMembershipMultiJvmSpec.scala deleted file mode 100644 index 878b7840b0..0000000000 --- a/akka-remote/src/multi-jvm/scala/akka/remote/GossipMembershipMultiJvmSpec.scala +++ /dev/null @@ -1,134 +0,0 @@ -// package akka.remote - -// import akka.actor.Actor -// import akka.remote._ -// import akka.routing._ -// import akka.routing.Routing.Broadcast - -// object GossipMembershipMultiJvmSpec { -// val NrOfNodes = 4 -// class SomeActor extends Actor with Serializable { -// def receive = { -// case "hit" ⇒ sender ! system.nodename -// case "end" ⇒ self.stop() -// } -// } - -// import com.typesafe.config.ConfigFactory -// val commonConfig = ConfigFactory.parseString(""" -// akka { -// loglevel = "WARNING" -// cluster { -// seed-nodes = ["localhost:9991"] -// } -// remote.server.hostname = "localhost" -// }""") - -// val node1Config = ConfigFactory.parseString(""" -// akka { -// remote.server.port = "9991" -// cluster.nodename = "node1" -// }""") withFallback commonConfig - -// val node2Config = ConfigFactory.parseString(""" -// akka { -// remote.server.port = "9992" -// cluster.nodename = "node2" -// }""") withFallback commonConfig - -// val node3Config = ConfigFactory.parseString(""" -// akka { -// remote.server.port = "9993" -// cluster.nodename = "node3" -// }""") withFallback commonConfig - -// val node4Config = ConfigFactory.parseString(""" -// akka { -// remote.server.port = "9994" -// cluster.nodename = "node4" -// }""") withFallback commonConfig -// } - -// class GossipMembershipMultiJvmNode1 extends AkkaRemoteSpec(GossipMembershipMultiJvmSpec.node1Config) { -// import GossipMembershipMultiJvmSpec._ -// val nodes = NrOfNodes -// "A cluster" must { -// "allow new node to join and should reach convergence with new membership table" in { - -// barrier("setup") -// remote.start() - -// barrier("start") -// val actor = system.actorOf[SomeActor]("service-hello") -// actor.isInstanceOf[RoutedActorRef] must be(true) - -// val connectionCount = NrOfNodes - 1 -// val iterationCount = 10 - -// var replies = Map( -// "node1" -> 0, -// "node2" -> 0, -// "node3" -> 0) - -// for (i ← 0 until iterationCount) { -// for (k ← 0 until connectionCount) { -// val nodeName = (actor ? "hit").as[String].getOrElse(fail("No id returned by actor")) -// replies = replies + (nodeName -> (replies(nodeName) + 1)) -// } -// } - -// barrier("broadcast-end") -// actor ! Broadcast("end") - -// barrier("end") -// replies.values foreach { _ must be > (0) } - -// barrier("done") -// } -// } -// } - -// class GossipMembershipMultiJvmNode2 extends AkkaRemoteSpec(GossipMembershipMultiJvmSpec.node2Config) { -// import GossipMembershipMultiJvmSpec._ -// val nodes = NrOfNodes -// "___" must { -// "___" in { -// barrier("setup") -// remote.start() -// barrier("start") -// barrier("broadcast-end") -// barrier("end") -// barrier("done") -// } -// } -// } - -// class GossipMembershipMultiJvmNode3 extends AkkaRemoteSpec(GossipMembershipMultiJvmSpec.node3Config) { -// import GossipMembershipMultiJvmSpec._ -// val nodes = NrOfNodes -// "___" must { -// "___" in { -// barrier("setup") -// remote.start() -// barrier("start") -// barrier("broadcast-end") -// barrier("end") -// barrier("done") -// } -// } -// } - -// class GossipMembershipMultiJvmNode4 extends AkkaRemoteSpec(GossipMembershipMultiJvmSpec.node4Config) { -// import GossipMembershipMultiJvmSpec._ -// val nodes = NrOfNodes -// "___" must { -// "___" in { -// barrier("setup") -// remote.start() -// barrier("start") -// barrier("broadcast-end") -// barrier("end") -// barrier("done") -// } -// } -// } From ba9ed982caf3baac4c4f3bae13f3adbba24de38c Mon Sep 17 00:00:00 2001 From: Peter Vlugter Date: Wed, 14 Dec 2011 16:19:16 +1300 Subject: [PATCH 05/34] Re-enable akka-kernel and add small sample Created a new simple version of the microkernel for inclusion in the akka download and to be able to start working on sample applications --- akka-actor/src/main/resources/reference.conf | 31 ++-- .../main/scala/akka/actor/ActorSystem.scala | 3 - .../actor/BootableActorLoaderService.scala | 66 -------- .../src/main/scala/akka/util/AkkaLoader.scala | 91 ----------- .../src/main/scala/akka/util/Bootable.scala | 11 -- akka-kernel/src/main/resources/reference.conf | 19 +++ .../scala/akka/kernel/DefaultAkkaLoader.scala | 23 --- .../scala/akka/kernel/EmbeddedAppServer.scala | 73 --------- .../src/main/scala/akka/kernel/Kernel.scala | 36 ----- .../src/main/scala/akka/kernel/Main.scala | 143 ++++++++++++++++++ .../main/scala/akka/servlet/Initializer.scala | 33 ---- .../remote/BootableRemoteActorService.scala | 47 ------ .../src/main/config/akka.conf | 8 + .../sample/kernel/hello/HelloKernel.scala | 33 ++++ config/akka.conf | 4 +- config/microkernel-server.xml | 106 ------------- project/AkkaBuild.scala | 31 ++-- project/Dist.scala | 5 +- scripts/microkernel/akka | 15 +- 19 files changed, 247 insertions(+), 531 deletions(-) delete mode 100644 akka-actor/src/main/scala/akka/actor/BootableActorLoaderService.scala delete mode 100644 akka-actor/src/main/scala/akka/util/AkkaLoader.scala delete mode 100644 akka-actor/src/main/scala/akka/util/Bootable.scala create mode 100644 akka-kernel/src/main/resources/reference.conf delete mode 100644 akka-kernel/src/main/scala/akka/kernel/DefaultAkkaLoader.scala delete mode 100644 akka-kernel/src/main/scala/akka/kernel/EmbeddedAppServer.scala delete mode 100644 akka-kernel/src/main/scala/akka/kernel/Kernel.scala create mode 100644 akka-kernel/src/main/scala/akka/kernel/Main.scala delete mode 100644 akka-kernel/src/main/scala/akka/servlet/Initializer.scala delete mode 100644 akka-remote/src/main/scala/akka/remote/BootableRemoteActorService.scala create mode 100644 akka-samples/akka-sample-hello-kernel/src/main/config/akka.conf create mode 100644 akka-samples/akka-sample-hello-kernel/src/main/scala/sample/kernel/hello/HelloKernel.scala delete mode 100644 config/microkernel-server.xml diff --git a/akka-actor/src/main/resources/reference.conf b/akka-actor/src/main/resources/reference.conf index f31e61bcbe..b97252b915 100644 --- a/akka-actor/src/main/resources/reference.conf +++ b/akka-actor/src/main/resources/reference.conf @@ -7,10 +7,8 @@ akka { version = "2.0-SNAPSHOT" # Akka version, checked against the runtime version of Akka. - - home = "" # Home directory of Akka, modules in the deploy directory will be loaded - enabled-modules = [] # Comma separated list of the enabled modules. Options: ["cluster", "camel", "http"] + home = "" # Home directory of Akka, modules in the deploy directory will be loaded event-handlers = ["akka.event.Logging$DefaultLogger"] # Event handlers to register at boot time (Logging$DefaultLogger logs to STDOUT) loglevel = "INFO" # Options: ERROR, WARNING, INFO, DEBUG @@ -19,20 +17,11 @@ akka { stdout-loglevel = "WARNING" # Loglevel for the very basic logger activated during AkkaApplication startup # FIXME: Is there any sensible reason why we have 2 different log levels? - logConfigOnStart = off # Log the complete configuration at INFO level when the actor system is started. + logConfigOnStart = off # Log the complete configuration at INFO level when the actor system is started. # This is useful when you are uncertain of what configuration is used. - extensions = [] # List FQCN of extensions which shall be loaded at actor system startup. - # FIXME: clarify "extensions" here, "Akka Extensions ()" - - # These boot classes are loaded (and created) automatically when the Akka Microkernel boots up - # Can be used to bootstrap your application(s) - # Should be the FQN (Fully Qualified Name) of the boot class which needs to have a default constructor - # boot = ["sample.camel.Boot", - # "sample.rest.java.Boot", - # "sample.rest.scala.Boot", - # "sample.security.Boot"] - boot = [] + extensions = [] # List FQCN of extensions which shall be loaded at actor system startup. + # FIXME: clarify "extensions" here, "Akka Extensions ()" actor { provider = "akka.actor.LocalActorRefProvider" @@ -45,7 +34,7 @@ akka { dispatcher-shutdown-timeout = 1s # How long dispatchers by default will wait for new actors until they shut down deployment { - + default { # deployment id pattern, e.g. /app/service-ping router = "direct" # routing (load-balance) scheme to use @@ -56,7 +45,7 @@ akka { # in several ways: # - nr-of-instances: will create that many children given the actor factory # supplied in the source code (overridable using create-as below) - # - target.paths: will look the paths up using actorFor and route to + # - target.paths: will look the paths up using actorFor and route to # them, i.e. will not create children nr-of-instances = 1 # number of children to create in case of a non-direct router; this setting @@ -67,11 +56,11 @@ akka { } target { - paths = [] # Alternatively to giving nr-of-instances you can specify the full paths of + paths = [] # Alternatively to giving nr-of-instances you can specify the full paths of # those actors which should be routed to. This setting takes precedence over # nr-of-instances } - + } } @@ -109,7 +98,7 @@ akka { fsm = off # enable DEBUG logging of all LoggingFSMs for events, transitions and timers event-stream = off # enable DEBUG logging of subscription changes on the eventStream } - + # Entries for pluggable serializers and their bindings. If a binding for a specific class is not found, # then the default serializer (Java serialization) is used. # @@ -146,5 +135,5 @@ akka { tickDuration = 100ms ticksPerWheel = 512 } - + } diff --git a/akka-actor/src/main/scala/akka/actor/ActorSystem.scala b/akka-actor/src/main/scala/akka/actor/ActorSystem.scala index af0ec81d7b..b24380b000 100644 --- a/akka-actor/src/main/scala/akka/actor/ActorSystem.scala +++ b/akka-actor/src/main/scala/akka/actor/ActorSystem.scala @@ -96,9 +96,6 @@ object ActorSystem { case "" ⇒ None case x ⇒ Some(x) } - val BootClasses: Seq[String] = getStringList("akka.boot").asScala - - val EnabledModules: Seq[String] = getStringList("akka.enabled-modules").asScala val SchedulerTickDuration = Duration(getMilliseconds("akka.scheduler.tickDuration"), MILLISECONDS) val SchedulerTicksPerWheel = getInt("akka.scheduler.ticksPerWheel") diff --git a/akka-actor/src/main/scala/akka/actor/BootableActorLoaderService.scala b/akka-actor/src/main/scala/akka/actor/BootableActorLoaderService.scala deleted file mode 100644 index 47c2cd86c7..0000000000 --- a/akka-actor/src/main/scala/akka/actor/BootableActorLoaderService.scala +++ /dev/null @@ -1,66 +0,0 @@ -/** - * Copyright (C) 2009-2011 Typesafe Inc. - */ - -package akka.actor - -import java.io.File -import java.net.{ URL, URLClassLoader } -import java.util.jar.JarFile -import akka.util.Bootable - -/** - * Handles all modules in the deploy directory (load and unload) - */ -trait BootableActorLoaderService extends Bootable { - - def system: ActorSystem - - val BOOT_CLASSES = system.settings.BootClasses - lazy val applicationLoader = createApplicationClassLoader() - - protected def createApplicationClassLoader(): Option[ClassLoader] = Some({ - if (system.settings.Home.isDefined) { - val DEPLOY = system.settings.Home.get + "/deploy" - val DEPLOY_DIR = new File(DEPLOY) - if (!DEPLOY_DIR.exists) { - System.exit(-1) - } - val filesToDeploy = DEPLOY_DIR.listFiles.toArray.toList - .asInstanceOf[List[File]].filter(_.getName.endsWith(".jar")) - var dependencyJars: List[URL] = Nil - filesToDeploy.map { file ⇒ - val jarFile = new JarFile(file) - val en = jarFile.entries - while (en.hasMoreElements) { - val name = en.nextElement.getName - if (name.endsWith(".jar")) dependencyJars ::= new File( - String.format("jar:file:%s!/%s", jarFile.getName, name)).toURI.toURL - } - } - val toDeploy = filesToDeploy.map(_.toURI.toURL) - val allJars = toDeploy ::: dependencyJars - - new URLClassLoader(allJars.toArray, Thread.currentThread.getContextClassLoader) - } else Thread.currentThread.getContextClassLoader - }) - - abstract override def onLoad() = { - super.onLoad() - - applicationLoader foreach Thread.currentThread.setContextClassLoader - - for (loader ← applicationLoader; clazz ← BOOT_CLASSES) { - loader.loadClass(clazz).newInstance - } - } - - abstract override def onUnload() = { - super.onUnload() - } -} - -/** - * Java API for the default JAX-RS/Mist Initializer - */ -class DefaultBootableActorLoaderService(val system: ActorSystem) extends BootableActorLoaderService diff --git a/akka-actor/src/main/scala/akka/util/AkkaLoader.scala b/akka-actor/src/main/scala/akka/util/AkkaLoader.scala deleted file mode 100644 index f2bf63c137..0000000000 --- a/akka-actor/src/main/scala/akka/util/AkkaLoader.scala +++ /dev/null @@ -1,91 +0,0 @@ -/** - * Copyright (C) 2009-2011 Typesafe Inc. - */ - -package akka.util -import akka.actor.ActorSystem - -/* - * This class is responsible for booting up a stack of bundles and then shutting them down - */ -class AkkaLoader(system: ActorSystem) { - private val hasBooted = new Switch(false) - - @volatile - private var _bundles: Option[Bootable] = None - - def bundles = _bundles; - - /* - * Boot initializes the specified bundles - */ - def boot(withBanner: Boolean, b: Bootable): Unit = hasBooted switchOn { - if (withBanner) printBanner() - println("Starting Akka...") - b.onLoad() - Thread.currentThread.setContextClassLoader(getClass.getClassLoader) - _bundles = Some(b) - println("Akka started successfully") - } - - /* - * Shutdown, well, shuts down the bundles used in boot - */ - def shutdown() { - hasBooted switchOff { - println("Shutting down Akka...") - _bundles.foreach(_.onUnload()) - _bundles = None - println("Akka succesfully shut down") - } - } - - private def printBanner() { - println(""" -============================================================================== - - ZZ: - ZZZZ - ZZZZZZ - ZZZ' ZZZ - ~7 7ZZ' ZZZ - :ZZZ: IZZ' ZZZ - ,OZZZZ.~ZZ? ZZZ - ZZZZ' 'ZZZ$ ZZZ - . $ZZZ ~ZZ$ ZZZ - .=Z?. .ZZZO ~ZZ7 OZZ - .ZZZZ7..:ZZZ~ 7ZZZ ZZZ~ - .$ZZZ$Z+.ZZZZ ZZZ: ZZZ$ - .,ZZZZ?' =ZZO= .OZZ 'ZZZ - .$ZZZZ+ .ZZZZ IZZZ ZZZ$ - .ZZZZZ' .ZZZZ' .ZZZ$ ?ZZZ - .ZZZZZZ' .OZZZ? ?ZZZ 'ZZZ$ - .?ZZZZZZ' .ZZZZ? .ZZZ? 'ZZZO - .+ZZZZZZ?' .7ZZZZ' .ZZZZ :ZZZZ - .ZZZZZZ$' .?ZZZZZ' .~ZZZZ 'ZZZZ. - - - NNNNN $NNNN+ - NNNNN $NNNN+ - NNNNN $NNNN+ - NNNNN $NNNN+ - NNNNN $NNNN+ - =NNNNNNNNND$ NNNNN DDDDDD: $NNNN+ DDDDDN NDDNNNNNNNN, - NNNNNNNNNNNNND NNNNN DNNNNN $NNNN+ 8NNNNN= :NNNNNNNNNNNNNN - NNNNN$ DNNNNN NNNNN $NNNNN~ $NNNN+ NNNNNN NNNNN, :NNNNN+ - ?DN~ NNNNN NNNNN MNNNNN $NNNN+:NNNNN7 $ND =NNNNN - DNNNNN NNNNNDNNNN$ $NNNNDNNNNN :DNNNNN - ZNDNNNNNNNNND NNNNNNNNNND, $NNNNNNNNNNN DNDNNNNNNNNNN - NNNNNNNDDINNNNN NNNNNNNNNNND $NNNNNNNNNNND ONNNNNNND8+NNNNN - :NNNND NNNNN NNNNNN DNNNN, $NNNNNO 7NNNND NNNNNO :NNNNN - DNNNN NNNNN NNNNN DNNNN $NNNN+ 8NNNNN NNNNN $NNNNN - DNNNNO NNNNNN NNNNN NNNNN $NNNN+ NNNNN$ NNNND, ,NNNNND - NNNNNNDDNNNNNNNN NNNNN =NNNNN $NNNN+ DNNNN? DNNNNNNDNNNNNNNND - NNNNNNNNN NNNN$ NNNNN 8NNNND $NNNN+ NNNNN= ,DNNNNNNND NNNNN$ - -============================================================================== - Running version %s -============================================================================== -""".format(ActorSystem.Version)) - } -} diff --git a/akka-actor/src/main/scala/akka/util/Bootable.scala b/akka-actor/src/main/scala/akka/util/Bootable.scala deleted file mode 100644 index a7a55f58e7..0000000000 --- a/akka-actor/src/main/scala/akka/util/Bootable.scala +++ /dev/null @@ -1,11 +0,0 @@ -/** - * Copyright (C) 2009-2011 Typesafe Inc. - */ - -package akka.util -import akka.actor.ActorSystem - -trait Bootable { - def onLoad() {} - def onUnload() {} -} diff --git a/akka-kernel/src/main/resources/reference.conf b/akka-kernel/src/main/resources/reference.conf new file mode 100644 index 0000000000..3c27d985df --- /dev/null +++ b/akka-kernel/src/main/resources/reference.conf @@ -0,0 +1,19 @@ +##################################### +# Akka Kernel Reference Config File # +##################################### + +# This reference config file has all the default settings +# Make your edits/overrides in your akka.conf + + +akka { + + kernel { + # The name of the actor system created by the Akka Microkernel + system.name = "default" + + # Boot classes are loaded and created automatically when the Akka Microkernel starts up + # A list of FQNs (Fully Qualified Names) of classes that implement akka.kernel.Bootable + boot = [] + } +} diff --git a/akka-kernel/src/main/scala/akka/kernel/DefaultAkkaLoader.scala b/akka-kernel/src/main/scala/akka/kernel/DefaultAkkaLoader.scala deleted file mode 100644 index 88645ceff8..0000000000 --- a/akka-kernel/src/main/scala/akka/kernel/DefaultAkkaLoader.scala +++ /dev/null @@ -1,23 +0,0 @@ -/** - * Copyright (C) 2009-2011 Typesafe Inc. - */ - -package akka.http - -import akka.config.Config -import akka.util.{ Bootable, AkkaLoader } -import akka.cluster.BootableRemoteActorService -import akka.actor.BootableActorLoaderService - -class DefaultAkkaLoader extends AkkaLoader { - def boot(): Unit = boot(true, new EmbeddedAppServer with BootableActorLoaderService with BootableRemoteActorService) -} - -/** - * Can be used to boot Akka - * - * java -cp ... akka.http.Main - */ -object Main extends DefaultAkkaLoader { - def main(args: Array[String]) = boot -} diff --git a/akka-kernel/src/main/scala/akka/kernel/EmbeddedAppServer.scala b/akka-kernel/src/main/scala/akka/kernel/EmbeddedAppServer.scala deleted file mode 100644 index 84d0006ea6..0000000000 --- a/akka-kernel/src/main/scala/akka/kernel/EmbeddedAppServer.scala +++ /dev/null @@ -1,73 +0,0 @@ -/** - * Copyright (C) 2009-2011 Typesafe Inc. - */ - -package akka.http - -import javax.ws.rs.core.UriBuilder -import javax.servlet.ServletConfig -import java.io.File - -import akka.actor.BootableActorLoaderService -import akka.util.Bootable - -import org.eclipse.jetty.xml.XmlConfiguration -import org.eclipse.jetty.server.{ Handler, Server } -import org.eclipse.jetty.server.handler.{ HandlerList, HandlerCollection, ContextHandler } -import java.net.URL -import akka.AkkaException - -/** - * Handles the Akka Comet Support (load/unload) - */ -trait EmbeddedAppServer extends Bootable { - self: BootableActorLoaderService ⇒ - - import akka.config.Config._ - - val REST_HOSTNAME = config.getString("akka.http.hostname", "localhost") - val REST_PORT = config.getInt("akka.http.port", 9998) - - val isRestEnabled = config.getList("akka.enabled-modules").exists(_ == "http") - - protected var server: Option[Server] = None - - protected def findJettyConfigXML: Option[URL] = - Option(applicationLoader.getOrElse(this.getClass.getClassLoader).getResource("microkernel-server.xml")) orElse - HOME.map(home ⇒ new File(home + "/config/microkernel-server.xml").toURI.toURL) - - abstract override def onLoad = { - super.onLoad - if (isRestEnabled) { - - val configuration = new XmlConfiguration(findJettyConfigXML.getOrElse(sys.error("microkernel-server.xml not found!"))) - - System.setProperty("jetty.port", REST_PORT.toString) - System.setProperty("jetty.host", REST_HOSTNAME) - - HOME.foreach(home ⇒ System.setProperty("jetty.home", home + "/deploy/root")) - - server = Option(configuration.configure.asInstanceOf[Server]) map { s ⇒ //Set the correct classloader to our contexts - applicationLoader foreach { loader ⇒ - //We need to provide the correct classloader to the servlets - def setClassLoader(handlers: Seq[Handler]) { - handlers foreach { - case c: ContextHandler ⇒ c.setClassLoader(loader) - case c: HandlerCollection ⇒ setClassLoader(c.getHandlers) - case _ ⇒ - } - } - setClassLoader(s.getHandlers) - } - //Start the server - s.start() - s - } - } - } - - abstract override def onUnload = { - super.onUnload - server foreach { _.stop() } - } -} diff --git a/akka-kernel/src/main/scala/akka/kernel/Kernel.scala b/akka-kernel/src/main/scala/akka/kernel/Kernel.scala deleted file mode 100644 index 74c90b47c7..0000000000 --- a/akka-kernel/src/main/scala/akka/kernel/Kernel.scala +++ /dev/null @@ -1,36 +0,0 @@ -/** - * Copyright (C) 2009-2010 Typesafe Inc. - */ - -package akka.kernel - -import akka.http.EmbeddedAppServer -import akka.util.AkkaLoader -import akka.cluster.BootableRemoteActorService -import akka.actor.BootableActorLoaderService -import akka.camel.CamelService - -import java.util.concurrent.CountDownLatch - -object Main { - val keepAlive = new CountDownLatch(2) - - def main(args: Array[String]) = { - Kernel.boot - keepAlive.await - } -} - -/** - * The Akka Kernel, is used to start And postStop Akka in standalone/kernel mode. - */ -object Kernel extends AkkaLoader { - - def boot(): Unit = boot(true, new EmbeddedAppServer with BootableActorLoaderService with BootableRemoteActorService with CamelService) - - // For testing purposes only - def startRemoteService(): Unit = bundles.foreach(_ match { - case x: BootableRemoteActorService ⇒ x.startRemoteService() - case _ ⇒ - }) -} diff --git a/akka-kernel/src/main/scala/akka/kernel/Main.scala b/akka-kernel/src/main/scala/akka/kernel/Main.scala new file mode 100644 index 0000000000..915847c7c7 --- /dev/null +++ b/akka-kernel/src/main/scala/akka/kernel/Main.scala @@ -0,0 +1,143 @@ +/** + * Copyright (C) 2009-2010 Typesafe Inc. + */ + +package akka.kernel + +import akka.actor.ActorSystem +import com.typesafe.config.ConfigFactory +import java.io.File +import java.lang.Boolean.getBoolean +import java.net.{ URL, URLClassLoader } +import java.util.jar.JarFile +import scala.collection.JavaConverters._ + +trait Bootable { + def startup(system: ActorSystem): Unit + def shutdown(system: ActorSystem): Unit +} + +object Main { + val quiet = getBoolean("akka.kernel.quiet") + + def log(s: String) = if (!quiet) println(s) + + def main(args: Array[String]) = { + log(banner) + log("Starting Akka...") + log("Running Akka " + ActorSystem.Version) + + val config = ConfigFactory.load("akka.conf") + val name = config.getString("akka.kernel.system.name") + val system = ActorSystem(name, config) + val classLoader = deployJars(system) + + log("Created actor system '%s'" format name) + + Thread.currentThread.setContextClassLoader(classLoader) + + val bootClasses: Seq[String] = system.settings.config.getStringList("akka.kernel.boot").asScala + val bootables: Seq[Bootable] = bootClasses map { c ⇒ classLoader.loadClass(c).newInstance.asInstanceOf[Bootable] } + + for (bootable ← bootables) { + log("Starting up " + bootable.getClass.getName) + bootable.startup(system) + } + + addShutdownHook(system, bootables) + + log("Successfully started Akka") + } + + def deployJars(system: ActorSystem): ClassLoader = { + if (system.settings.Home.isEmpty) { + log("Akka home is not defined") + System.exit(1) + Thread.currentThread.getContextClassLoader + } else { + val home = system.settings.Home.get + val deploy = new File(home, "deploy") + + if (!deploy.exists) { + log("No deploy dir found at " + deploy) + log("Please check that akka home is defined correctly") + System.exit(1) + } + + val jars = deploy.listFiles.filter(_.getName.endsWith(".jar")) + + val nestedJars = jars flatMap { jar ⇒ + val jarFile = new JarFile(jar) + val jarEntries = jarFile.entries.asScala.toArray.filter(_.getName.endsWith(".jar")) + jarEntries map { entry ⇒ new File("jar:file:%s!/%s" format (jarFile.getName, entry.getName)) } + } + + val urls = (jars ++ nestedJars) map { _.toURI.toURL } + + urls foreach { url ⇒ log("Deploying " + url) } + + new URLClassLoader(urls, Thread.currentThread.getContextClassLoader) + } + } + + def addShutdownHook(system: ActorSystem, bootables: Seq[Bootable]): Unit = { + Runtime.getRuntime.addShutdownHook(new Thread(new Runnable { + def run = { + log("") + log("Received signal to stop") + log("Shutting down Akka...") + for (bootable ← bootables) { + log("Shutting down " + bootable.getClass.getName) + bootable.shutdown(system) + } + system.stop() + log("Successfully shut down Akka") + } + })) + } + + def banner = """ +============================================================================== + + ZZ: + ZZZZ + ZZZZZZ + ZZZ' ZZZ + ~7 7ZZ' ZZZ + :ZZZ: IZZ' ZZZ + ,OZZZZ.~ZZ? ZZZ + ZZZZ' 'ZZZ$ ZZZ + . $ZZZ ~ZZ$ ZZZ + .=Z?. .ZZZO ~ZZ7 OZZ + .ZZZZ7..:ZZZ~ 7ZZZ ZZZ~ + .$ZZZ$Z+.ZZZZ ZZZ: ZZZ$ + .,ZZZZ?' =ZZO= .OZZ 'ZZZ + .$ZZZZ+ .ZZZZ IZZZ ZZZ$ + .ZZZZZ' .ZZZZ' .ZZZ$ ?ZZZ + .ZZZZZZ' .OZZZ? ?ZZZ 'ZZZ$ + .?ZZZZZZ' .ZZZZ? .ZZZ? 'ZZZO + .+ZZZZZZ?' .7ZZZZ' .ZZZZ :ZZZZ + .ZZZZZZ$' .?ZZZZZ' .~ZZZZ 'ZZZZ. + + + NNNNN $NNNN+ + NNNNN $NNNN+ + NNNNN $NNNN+ + NNNNN $NNNN+ + NNNNN $NNNN+ + =NNNNNNNNND$ NNNNN DDDDDD: $NNNN+ DDDDDN NDDNNNNNNNN, + NNNNNNNNNNNNND NNNNN DNNNNN $NNNN+ 8NNNNN= :NNNNNNNNNNNNNN + NNNNN$ DNNNNN NNNNN $NNNNN~ $NNNN+ NNNNNN NNNNN, :NNNNN+ + ?DN~ NNNNN NNNNN MNNNNN $NNNN+:NNNNN7 $ND =NNNNN + DNNNNN NNNNNDNNNN$ $NNNNDNNNNN :DNNNNN + ZNDNNNNNNNNND NNNNNNNNNND, $NNNNNNNNNNN DNDNNNNNNNNNN + NNNNNNNDDINNNNN NNNNNNNNNNND $NNNNNNNNNNND ONNNNNNND8+NNNNN + :NNNND NNNNN NNNNNN DNNNN, $NNNNNO 7NNNND NNNNNO :NNNNN + DNNNN NNNNN NNNNN DNNNN $NNNN+ 8NNNNN NNNNN $NNNNN + DNNNNO NNNNNN NNNNN NNNNN $NNNN+ NNNNN$ NNNND, ,NNNNND + NNNNNNDDNNNNNNNN NNNNN =NNNNN $NNNN+ DNNNN? DNNNNNNDNNNNNNNND + NNNNNNNNN NNNN$ NNNNN 8NNNND $NNNN+ NNNNN= ,DNNNNNNND NNNNN$ + +============================================================================== +""" +} diff --git a/akka-kernel/src/main/scala/akka/servlet/Initializer.scala b/akka-kernel/src/main/scala/akka/servlet/Initializer.scala deleted file mode 100644 index b91e5ae439..0000000000 --- a/akka-kernel/src/main/scala/akka/servlet/Initializer.scala +++ /dev/null @@ -1,33 +0,0 @@ -/** - * Copyright (C) 2009-2011 Typesafe Inc. - */ - -package akka.servlet - -import akka.cluster.BootableRemoteActorService -import akka.actor.BootableActorLoaderService -import akka.config.Config -import akka.util.{ Bootable, AkkaLoader } - -import javax.servlet.{ ServletContextListener, ServletContextEvent } - -/** - * This class can be added to web.xml mappings as a listener to start and postStop Akka. - * - * - * ... - * - * akka.servlet.Initializer - * - * ... - * - */ -class Initializer extends ServletContextListener { - lazy val loader = new AkkaLoader - - def contextDestroyed(e: ServletContextEvent): Unit = - loader.shutdown - - def contextInitialized(e: ServletContextEvent): Unit = - loader.boot(true, new BootableActorLoaderService with BootableRemoteActorService) -} diff --git a/akka-remote/src/main/scala/akka/remote/BootableRemoteActorService.scala b/akka-remote/src/main/scala/akka/remote/BootableRemoteActorService.scala deleted file mode 100644 index e3bd903c07..0000000000 --- a/akka-remote/src/main/scala/akka/remote/BootableRemoteActorService.scala +++ /dev/null @@ -1,47 +0,0 @@ -/** - * Copyright (C) 2009-2011 Typesafe Inc. - */ - -package akka.remote - -import akka.actor.{ Actor, BootableActorLoaderService } -import akka.util.{ ReflectiveAccess, Bootable } - -// TODO: remove me - remoting is enabled through the RemoteActorRefProvider - -/** - * This bundle/service is responsible for booting up and shutting down the remote actors facility. - *

- * It is used in Kernel. - */ -/* -trait BootableRemoteActorService extends Bootable { - self: BootableActorLoaderService ⇒ - - def settings: RemoteServerSettings - - protected lazy val remoteServerThread = new Thread(new Runnable() { - def run = system.remote.start(self.applicationLoader.getOrElse(null)) //Use config host/port - }, "Akka RemoteModule Service") - - def startRemoteService() { remoteServerThread.start() } - - abstract override def onLoad() { - if (system.reflective.ClusterModule.isEnabled && settings.isRemotingEnabled) { - system.eventHandler.info(this, "Initializing Remote Actors Service...") - startRemoteService() - system.eventHandler.info(this, "Remote Actors Service initialized") - } - super.onLoad() - } - - abstract override def onUnload() { - system.eventHandler.info(this, "Shutting down Remote Actors Service") - - system.remote.shutdown() - if (remoteServerThread.isAlive) remoteServerThread.join(1000) - system.eventHandler.info(this, "Remote Actors Service has been shut down") - super.onUnload() - } -} -*/ diff --git a/akka-samples/akka-sample-hello-kernel/src/main/config/akka.conf b/akka-samples/akka-sample-hello-kernel/src/main/config/akka.conf new file mode 100644 index 0000000000..181b1e10b1 --- /dev/null +++ b/akka-samples/akka-sample-hello-kernel/src/main/config/akka.conf @@ -0,0 +1,8 @@ +# Config for the Hello Kernel sample + +akka { + kernel { + system.name = "hellokernel" + boot = ["sample.kernel.hello.HelloKernel"] + } +} diff --git a/akka-samples/akka-sample-hello-kernel/src/main/scala/sample/kernel/hello/HelloKernel.scala b/akka-samples/akka-sample-hello-kernel/src/main/scala/sample/kernel/hello/HelloKernel.scala new file mode 100644 index 0000000000..bea62f6176 --- /dev/null +++ b/akka-samples/akka-sample-hello-kernel/src/main/scala/sample/kernel/hello/HelloKernel.scala @@ -0,0 +1,33 @@ +/** + * Copyright (C) 2009-2011 Typesafe Inc. + */ +package sample.kernel.hello + +import akka.actor.{ Actor, ActorSystem, Props } +import akka.kernel.Bootable + +case object Start + +class HelloActor extends Actor { + val worldActor = context.actorOf(Props[WorldActor]) + + def receive = { + case Start ⇒ worldActor ! "Hello" + case message: String ⇒ + println("Received message '%s'" format message) + } +} + +class WorldActor extends Actor { + def receive = { + case message: String ⇒ sender ! (message.toUpperCase + " world!") + } +} + +class HelloKernel extends Bootable { + def startup(system: ActorSystem) = { + system.actorOf(Props[HelloActor]) ! Start + } + + def shutdown(system: ActorSystem) = {} +} diff --git a/config/akka.conf b/config/akka.conf index 64883cf7c1..2f7ad95abd 100644 --- a/config/akka.conf +++ b/config/akka.conf @@ -1,2 +1,2 @@ -# In this file you can override any option defined in the 'akka-reference.conf' file. -# Copy in all or parts of the 'akka-reference.conf' file and modify as you please. +# In this file you can override any option defined in the 'reference.conf' files. +# Copy in all or parts of the 'reference.conf' files and modify as you please. diff --git a/config/microkernel-server.xml b/config/microkernel-server.xml deleted file mode 100644 index 07fda30fcf..0000000000 --- a/config/microkernel-server.xml +++ /dev/null @@ -1,106 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 300000 - 2 - false - 8443 - 20000 - 5000 - - - - - - - - - - - - - - - - - / - - akka.http.AkkaMistServlet - /* - - - - - - - - - - - - - - - true - true - true - 1000 - - diff --git a/project/AkkaBuild.scala b/project/AkkaBuild.scala index cbad5fda90..dee67c7803 100644 --- a/project/AkkaBuild.scala +++ b/project/AkkaBuild.scala @@ -30,7 +30,7 @@ object AkkaBuild extends Build { Unidoc.unidocExclude := Seq(samples.id, tutorials.id), Dist.distExclude := Seq(actorTests.id, akkaSbtPlugin.id, docs.id) ), - aggregate = Seq(actor, testkit, actorTests, stm, remote, slf4j, amqp, mailboxes, akkaSbtPlugin, samples, tutorials, docs) + aggregate = Seq(actor, testkit, actorTests, stm, remote, slf4j, amqp, mailboxes, kernel, akkaSbtPlugin, samples, tutorials, docs) ) lazy val actor = Project( @@ -192,14 +192,14 @@ object AkkaBuild extends Build { // ) // ) - // lazy val kernel = Project( - // id = "akka-kernel", - // base = file("akka-kernel"), - // dependencies = Seq(cluster, slf4j, spring), - // settings = defaultSettings ++ Seq( - // libraryDependencies ++= Dependencies.kernel - // ) - // ) + lazy val kernel = Project( + id = "akka-kernel", + base = file("akka-kernel"), + dependencies = Seq(actor), + settings = defaultSettings ++ Seq( + libraryDependencies ++= Dependencies.kernel + ) + ) lazy val akkaSbtPlugin = Project( id = "akka-sbt-plugin", @@ -213,7 +213,7 @@ object AkkaBuild extends Build { id = "akka-samples", base = file("akka-samples"), settings = parentSettings, - aggregate = Seq(fsmSample, helloSample) + aggregate = Seq(fsmSample, helloSample, helloKernelSample) ) lazy val fsmSample = Project( @@ -230,6 +230,13 @@ object AkkaBuild extends Build { settings = defaultSettings ) + lazy val helloKernelSample = Project( + id = "akka-sample-hello-kernel", + base = file("akka-samples/akka-sample-hello-kernel"), + dependencies = Seq(kernel), + settings = defaultSettings + ) + lazy val tutorials = Project( id = "akka-tutorials", base = file("akka-tutorials"), @@ -388,9 +395,7 @@ object Dependencies { val spring = Seq(springBeans, springContext, Test.junit, Test.scalatest) - val kernel = Seq( - jettyUtil, jettyXml, jettyServlet, jacksonCore, staxApi - ) + val kernel = Seq() // TODO: resolve Jetty version conflict // val sampleCamel = Seq(camelCore, camelSpring, commonsCodec, Runtime.camelJms, Runtime.activemq, Runtime.springJms, diff --git a/project/Dist.scala b/project/Dist.scala index 3f3af68098..291962229d 100644 --- a/project/Dist.scala +++ b/project/Dist.scala @@ -66,9 +66,8 @@ object Dist { val libAkka = lib / "akka" val src = base / "src" / "akka" IO.delete(unzipped) - // TODO: re-enable bin and config dirs, and add deploy dir, when akka-kernel is enabled - //copyFilesTo(scripts, bin, setExecutable = true) - //IO.copyDirectory(configSources, config) + copyFilesTo(scripts, bin, setExecutable = true) + IO.copyDirectory(configSources, config) IO.copyDirectory(allSources.api, api) IO.copyDirectory(allSources.docs, docs) copyFilesTo(allSources.docJars, docJars) diff --git a/scripts/microkernel/akka b/scripts/microkernel/akka index 4241d2693d..013fdea25a 100755 --- a/scripts/microkernel/akka +++ b/scripts/microkernel/akka @@ -1,9 +1,18 @@ -#!/bin/bash +#!/usr/bin/env bash -AKKA_HOME="$(cd "$(cd "$(dirname "$0")"; pwd -P)"/..; pwd)" +declare quiet="false" + +while true; do + case "$1" in + -q | --quiet ) quiet="true"; shift ;; + * ) break ;; + esac +done + +declare AKKA_HOME="$(cd "$(cd "$(dirname "$0")"; pwd -P)"/..; pwd)" [ -n "$JAVA_OPTS" ] || JAVA_OPTS="-Xms1536M -Xmx1536M -Xss1M -XX:MaxPermSize=256M -XX:+UseParallelGC" [ -n "$AKKA_CLASSPATH" ] || AKKA_CLASSPATH="$AKKA_HOME/lib/scala-library.jar:$AKKA_HOME/lib/akka/*:$AKKA_HOME/config" -java $JAVA_OPTS -cp "$AKKA_CLASSPATH" -Dakka.home="$AKKA_HOME" akka.kernel.Main +java $JAVA_OPTS -cp "$AKKA_CLASSPATH" -Dakka.home="$AKKA_HOME" -Dakka.kernel.quiet=$quiet akka.kernel.Main From b058e6a4712561a98d093788970ac99975984871 Mon Sep 17 00:00:00 2001 From: Peter Vlugter Date: Wed, 14 Dec 2011 17:15:06 +1300 Subject: [PATCH 06/34] Add config spec for akka kernel --- .../test/scala/akka/config/ConfigSpec.scala | 1 - .../test/scala/akka/kernel/ConfigSpec.scala | 23 +++++++++++++++++++ project/AkkaBuild.scala | 4 ++-- 3 files changed, 25 insertions(+), 3 deletions(-) create mode 100644 akka-kernel/src/test/scala/akka/kernel/ConfigSpec.scala diff --git a/akka-actor-tests/src/test/scala/akka/config/ConfigSpec.scala b/akka-actor-tests/src/test/scala/akka/config/ConfigSpec.scala index b3a625e7b4..b0529e19cf 100644 --- a/akka-actor-tests/src/test/scala/akka/config/ConfigSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/config/ConfigSpec.scala @@ -20,7 +20,6 @@ class ConfigSpec extends AkkaSpec(ConfigFactory.defaultReference) { val config = settings.config import config._ - getList("akka.boot").asScala.toSeq must equal(Nil) getString("akka.version") must equal("2.0-SNAPSHOT") settings.ConfigVersion must equal("2.0-SNAPSHOT") diff --git a/akka-kernel/src/test/scala/akka/kernel/ConfigSpec.scala b/akka-kernel/src/test/scala/akka/kernel/ConfigSpec.scala new file mode 100644 index 0000000000..938ed34b6b --- /dev/null +++ b/akka-kernel/src/test/scala/akka/kernel/ConfigSpec.scala @@ -0,0 +1,23 @@ +/** + * Copyright (C) 2009-2011 Typesafe Inc. + */ + +package akka.kernel + +import akka.testkit.AkkaSpec +import com.typesafe.config.ConfigFactory +import scala.collection.JavaConverters._ + +@org.junit.runner.RunWith(classOf[org.scalatest.junit.JUnitRunner]) +class ConfigSpec extends AkkaSpec(ConfigFactory.defaultReference) { + + "The default configuration file (i.e. reference.conf)" must { + "contain correct defaults for akka-kernel" in { + + val config = system.settings.config + + config.getString("akka.kernel.system.name") must be === "default" + config.getList("akka.kernel.boot").asScala.toList must be === Nil + } + } +} diff --git a/project/AkkaBuild.scala b/project/AkkaBuild.scala index dee67c7803..e4f7bbc1e3 100644 --- a/project/AkkaBuild.scala +++ b/project/AkkaBuild.scala @@ -195,7 +195,7 @@ object AkkaBuild extends Build { lazy val kernel = Project( id = "akka-kernel", base = file("akka-kernel"), - dependencies = Seq(actor), + dependencies = Seq(actor, testkit % "test->test"), settings = defaultSettings ++ Seq( libraryDependencies ++= Dependencies.kernel ) @@ -395,7 +395,7 @@ object Dependencies { val spring = Seq(springBeans, springContext, Test.junit, Test.scalatest) - val kernel = Seq() + val kernel = Seq(Test.scalatest, Test.junit) // TODO: resolve Jetty version conflict // val sampleCamel = Seq(camelCore, camelSpring, commonsCodec, Runtime.camelJms, Runtime.activemq, Runtime.springJms, From 49e350a81571c5268d1809b4ed09fdd49fe1a55c Mon Sep 17 00:00:00 2001 From: Henrik Engstrom Date: Wed, 14 Dec 2011 12:08:47 +0100 Subject: [PATCH 07/34] Updated introduction documents to Akka 2.0. Fixes #1480 --- akka-docs/intro/deployment-scenarios.rst | 51 +- .../intro/getting-started-first-java.rst | 499 +++--------------- .../getting-started-first-scala-eclipse.rst | 340 ++++++------ .../intro/getting-started-first-scala.rst | 137 +++-- akka-docs/intro/use-cases.rst | 3 +- akka-docs/intro/what-is-akka.rst | 11 +- akka-docs/intro/why-akka.rst | 10 +- .../java/akka/tutorial/first/java/Pi.java | 13 +- .../src/main/scala/Pi.scala | 7 +- 9 files changed, 327 insertions(+), 744 deletions(-) diff --git a/akka-docs/intro/deployment-scenarios.rst b/akka-docs/intro/deployment-scenarios.rst index a5da196d24..bf4cd26f02 100644 --- a/akka-docs/intro/deployment-scenarios.rst +++ b/akka-docs/intro/deployment-scenarios.rst @@ -13,8 +13,7 @@ Akka can be used in two different ways: - As a library: used as a regular JAR on the classpath and/or in a web app, to be put into ``WEB-INF/lib`` -- As a microkernel: stand-alone microkernel, embedding a servlet container along - with many other services +- As a stand alone application by instantiating ActorSystem Using Akka as library @@ -35,51 +34,3 @@ Actors as regular services referenced from your Web application. You should also be able to use the Remoting service, e.g. be able to make certain Actors remote on other hosts. Please note that remoting service does not speak HTTP over port 80, but a custom protocol over the port is specified in :ref:`configuration`. - - -Using Akka as a stand alone microkernel ---------------------------------------- - -Akka can also be run as a stand-alone microkernel. It implements a full -enterprise stack. See the :ref:`microkernel` for more information. - -Using the Akka sbt plugin to package your application -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -The Akka sbt plugin can create a full Akka microkernel deployment for your sbt -project. - -To use the plugin, first add a plugin definition to your sbt project by creating -``project/plugins.sbt`` with:: - - resolvers += Classpaths.typesafeResolver - - addSbtPlugin("com.typesafe.akka" % "akka-sbt-plugin" % "2.0-SNAPSHOT") - -Then use the AkkaKernelPlugin settings. In a 'light' configuration (build.sbt):: - - seq(akka.sbt.AkkaKernelPlugin.distSettings: _*) - -Or in a 'full' configuration (Build.scala). For example:: - - import sbt._ - import sbt.Keys._ - import akka.sbt.AkkaKernelPlugin - - object SomeBuild extends Build { - lazy val someProject = Project( - id = "some-project", - base = file("."), - settings = Defaults.defaultSettings ++ AkkaKernelPlugin.distSettings ++ Seq( - organization := "org.some", - version := "0.1", - scalaVersion := "2.9.1" - resolvers += "Typesafe Repo" at "http://repo.typesafe.com/typesafe/releases/", - libraryDependencies += "com.typesafe.akka" % "akka-kernel" % "2.0-SNAPSHOT" - ) - ) - } - -To build a microkernel deployment use the ``dist`` task:: - - sbt dist diff --git a/akka-docs/intro/getting-started-first-java.rst b/akka-docs/intro/getting-started-first-java.rst index 6d429b160d..34a41c7c94 100644 --- a/akka-docs/intro/getting-started-first-java.rst +++ b/akka-docs/intro/getting-started-first-java.rst @@ -167,7 +167,8 @@ It should now look something like this: + xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 + http://maven.apache.org/xsd/maven-4.0.0.xsd"> 4.0.0 akka-tutorial-first-java @@ -213,28 +214,15 @@ Start writing the code Now it's about time to start hacking. -We start by creating a ``Pi.java`` file and adding these import statements at the top of the file:: +We start by creating a ``Pi.java`` file and adding these import statements at the top of the file: - package akka.tutorial.first.java; - - import static akka.actor.Actors.actorOf; - import static akka.actor.Actors.poisonPill; - import static java.util.Arrays.asList; - - import akka.actor.Props; - import akka.actor.ActorRef; - import akka.actor.UntypedActor; - import akka.actor.UntypedActorFactory; - import akka.routing.CyclicIterator; - import akka.routing.InfiniteIterator; - import akka.routing.Routing.Broadcast; - import akka.routing.UntypedLoadBalancer; - - import java.util.concurrent.CountDownLatch; +.. includecode:: ../../akka-tutorials/akka-tutorial-first/src/main/java/akka/tutorial/first/java/Pi.java#imports If you are using Maven in this tutorial then create the file in the ``src/main/java/akka/tutorial/first/java`` directory. -If you are using the command line tools then create the file wherever you want. I will create it in a directory called ``tutorial`` at the root of the Akka distribution, e.g. in ``$AKKA_HOME/tutorial/akka/tutorial/first/java/Pi.java``. +If you are using the command line tools then create the file wherever you want. +We will create it in a directory called ``tutorial`` at the root of the Akka distribution, +e.g. in ``$AKKA_HOME/tutorial/akka/tutorial/first/java/Pi.java``. Creating the messages --------------------- @@ -247,466 +235,101 @@ With this in mind, let's now create the messages that we want to have flowing in - ``Work`` -- sent from the ``Master`` actor to the ``Worker`` actors containing the work assignment - ``Result`` -- sent from the ``Worker`` actors to the ``Master`` actor containing the result from the worker's calculation -Messages sent to actors should always be immutable to avoid sharing mutable state. So let's start by creating three messages as immutable POJOs. We also create a wrapper ``Pi`` class to hold our implementation:: +Messages sent to actors should always be immutable to avoid sharing mutable state. So let's start by creating three messages as immutable POJOs. We also create a wrapper ``Pi`` class to hold our implementation: - public class Pi { - - static class Calculate {} - - static class Work { - private final int start; - private final int nrOfElements; - - public Work(int start, int nrOfElements) { - this.start = start; - this.nrOfElements = nrOfElements; - } - - public int getStart() { return start; } - public int getNrOfElements() { return nrOfElements; } - } - - static class Result { - private final double value; - - public Result(double value) { - this.value = value; - } - - public double getValue() { return value; } - } - } +.. includecode:: ../../akka-tutorials/akka-tutorial-first/src/main/java/akka/tutorial/first/java/Pi.java#messages Creating the worker ------------------- -Now we can create the worker actor. This is done by extending in the ``UntypedActor`` base class and defining the ``onReceive`` method. The ``onReceive`` method defines our message handler. We expect it to be able to handle the ``Work`` message so we need to add a handler for this message:: +Now we can create the worker actor. This is done by extending in the ``UntypedActor`` base class and defining the ``onReceive`` method. The ``onReceive`` method defines our message handler. We expect it to be able to handle the ``Work`` message so we need to add a handler for this message: - static class Worker extends UntypedActor { - - // message handler - public void onReceive(Object message) { - if (message instanceof Work) { - Work work = (Work) message; - - // perform the work - double result = calculatePiFor(work.getStart(), work.getNrOfElements()); - - // reply with the result - getContext().reply(new Result(result)); - - } else throw new IllegalArgumentException("Unknown message [" + message + "]"); - } - } +.. includecode:: ../../akka-tutorials/akka-tutorial-first/src/main/java/akka/tutorial/first/java/Pi.java#worker + :exclude: calculatePiFor As you can see we have now created an ``UntypedActor`` with a ``onReceive`` method as a handler for the ``Work`` message. In this handler we invoke the ``calculatePiFor(..)`` method, wrap the result in a ``Result`` message and send it back to the original sender using ``getContext().reply(..)``. In Akka the sender reference is implicitly passed along with the message so that the receiver can always reply or store away the sender reference for future use. -The only thing missing in our ``Worker`` actor is the implementation on the ``calculatePiFor(..)`` method:: +The only thing missing in our ``Worker`` actor is the implementation on the ``calculatePiFor(..)`` method: - // define the work - private double calculatePiFor(int start, int nrOfElements) { - double acc = 0.0; - for (int i = start * nrOfElements; i <= ((start + 1) * nrOfElements - 1); i++) { - acc += 4.0 * (1 - (i % 2) * 2) / (2 * i + 1); - } - return acc; - } +.. includecode:: ../../akka-tutorials/akka-tutorial-first/src/main/java/akka/tutorial/first/java/Pi.java#calculatePiFor Creating the master ------------------- -The master actor is a little bit more involved. In its constructor we need to create the workers (the ``Worker`` actors) and start them. We will also wrap them in a load-balancing router to make it easier to spread out the work evenly between the workers. Let's do that first:: +The master actor is a little bit more involved. In its constructor we create a round-robin router +to make it easier to spread out the work evenly between the workers. Let's do that first: - static class Master extends UntypedActor { - ... +.. includecode:: ../../akka-tutorials/akka-tutorial-first/src/main/java/akka/tutorial/first/java/Pi.java#create-router - static class PiRouter extends UntypedLoadBalancer { - private final InfiniteIterator workers; - - public PiRouter(ActorRef[] workers) { - this.workers = new CyclicIterator(asList(workers)); - } - - public InfiniteIterator seq() { - return workers; - } - } - - public Master(...) { - ... - - // create the workers - final ActorRef[] workers = new ActorRef[nrOfWorkers]; - for (int i = 0; i < nrOfWorkers; i++) { - workers[i] = actorOf(new Props(Worker.class)); - } - - // wrap them with a load-balancing router - ActorRef router = actorOf(new Props(new UntypedActorFactory() { - public UntypedActor create() { - return new PiRouter(workers); - } - })); - } - } - -As you can see we are using the ``actorOf`` factory method to create actors, this method returns as an ``ActorRef`` which is a reference to our newly created actor. This method is available in the ``Actors`` object but is usually imported:: - - import static akka.actor.Actors.actorOf; - -One thing to note is that we used two different versions of the ``actorOf`` method. For creating the ``Worker`` actor we just pass in the class but to create the ``PiRouter`` actor we can't do that since the constructor in the ``PiRouter`` class takes arguments, instead we need to use the ``UntypedActorFactory`` which unfortunately is a bit more verbose. - -``actorOf`` is the only way to create an instance of an Actor, this is enforced by Akka runtime. The ``actorOf`` method instantiates the actor and returns, not an instance to the actor, but an instance to an ``ActorRef``. This reference is the handle through which you communicate with the actor. It is immutable, serializable and location-aware meaning that it "remembers" its original actor even if it is sent to other nodes across the network and can be seen as the equivalent to the Erlang actor's PID. - -The actor's life-cycle is: - -- Created & Started -- ``Actor.actorOf(Props[MyActor]`` -- can receive messages -- Stopped -- ``actorRef.stop()`` -- can **not** receive messages - -Once the actor has been stopped it is dead and can not be started again. - -Now we have a router that is representing all our workers in a single abstraction. If you paid attention to the code above, you saw that we were using the ``nrOfWorkers`` variable. This variable and others we have to pass to the ``Master`` actor in its constructor. So now let's create the master actor. We have to pass in three integer variables: +Now we have a router that is representing all our workers in a single +abstraction. So now let's create the master actor. We pass it three integer variables: - ``nrOfWorkers`` -- defining how many workers we should start up - ``nrOfMessages`` -- defining how many number chunks to send out to the workers - ``nrOfElements`` -- defining how big the number chunks sent to each worker should be -Here is the master actor:: +Here is the master actor: - static class Master extends UntypedActor { - private final int nrOfMessages; - private final int nrOfElements; - private final CountDownLatch latch; - - private double pi; - private int nrOfResults; - private long start; - - private ActorRef router; - - static class PiRouter extends UntypedLoadBalancer { - private final InfiniteIterator workers; - - public PiRouter(ActorRef[] workers) { - this.workers = new CyclicIterator(asList(workers)); - } - - public InfiniteIterator seq() { - return workers; - } - } - - public Master( - int nrOfWorkers, int nrOfMessages, int nrOfElements, CountDownLatch latch) { - this.nrOfMessages = nrOfMessages; - this.nrOfElements = nrOfElements; - this.latch = latch; - - // create the workers - final ActorRef[] workers = new ActorRef[nrOfWorkers]; - for (int i = 0; i < nrOfWorkers; i++) { - workers[i] = actorOf(new Props(Worker.class)); - } - - // wrap them with a load-balancing router - router = actorOf(new Props(new UntypedActorFactory() { - public UntypedActor create() { - return new PiRouter(workers); - } - })); - } - - // message handler - public void onReceive(Object message) { ... } - - @Override - public void preStart() { - start = System.currentTimeMillis(); - } - - @Override - public void postStop() { - // tell the world that the calculation is complete - System.out.println(String.format( - "\n\tPi estimate: \t\t%s\n\tCalculation time: \t%s millis", - pi, (System.currentTimeMillis() - start))); - latch.countDown(); - } - } +.. includecode:: ../../akka-tutorials/akka-tutorial-first/src/main/java/akka/tutorial/first/java/Pi.java#master + :exclude: handle-messages A couple of things are worth explaining further. -First, we are passing in a ``java.util.concurrent.CountDownLatch`` to the ``Master`` actor. This latch is only used for plumbing (in this specific tutorial), to have a simple way of letting the outside world knowing when the master can deliver the result and shut down. In more idiomatic Akka code, as we will see in part two of this tutorial series, we would not use a latch but other abstractions and functions like ``Channel``, ``Future`` and ``ask()`` to achieve the same thing in a non-blocking way. But for simplicity let's stick to a ``CountDownLatch`` for now. +First, we are passing in a ``java.util.concurrent.CountDownLatch`` to the +``Master`` actor. This latch is only used for plumbing (in this specific +tutorial), to have a simple way of letting the outside world knowing when the +master can deliver the result and shut down. In more idiomatic Akka code +we would not use a latch but other abstractions and functions like ``Future`` +and ``ask()`` to achieve the same thing in a non-blocking way. +But for simplicity let's stick to a ``CountDownLatch`` for now. -Second, we are adding a couple of life-cycle callback methods; ``preStart`` and ``postStop``. In the ``preStart`` callback we are recording the time when the actor is started and in the ``postStop`` callback we are printing out the result (the approximation of Pi) and the time it took to calculate it. In this call we also invoke ``latch.countDown()`` to tell the outside world that we are done. +Second, we are adding a couple of life-cycle callback methods; ``preStart`` and +``postStop``. In the ``preStart`` callback we are recording the time when the +actor is started and in the ``postStop`` callback we are printing out the result +(the approximation of Pi) and the time it took to calculate it. In this call we +also invoke ``latch.countDown()`` to tell the outside world that we are done. -But we are not done yet. We are missing the message handler for the ``Master`` actor. This message handler needs to be able to react to two different messages: +But we are not done yet. We are missing the message handler for the ``Master`` actor. +This message handler needs to be able to react to two different messages: - ``Calculate`` -- which should start the calculation - ``Result`` -- which should aggregate the different results -The ``Calculate`` handler is sending out work to all the ``Worker`` actors and after doing that it also sends a ``new Broadcast(poisonPill())`` message to the router, which will send out the ``PoisonPill`` message to all the actors it is representing (in our case all the ``Worker`` actors). ``PoisonPill`` is a special kind of message that tells the receiver to shut itself down using the normal shutdown method; ``getContext().stop()``, and is created through the ``poisonPill()`` method. We also send a ``PoisonPill`` to the router itself (since it's also an actor that we want to shut down). +The ``Calculate`` handler is sending out work to all the ``Worker`` via its router. -The ``Result`` handler is simpler, here we get the value from the ``Result`` message and aggregate it to our ``pi`` member variable. We also keep track of how many results we have received back, and if that matches the number of tasks sent out, the ``Master`` actor considers itself done and shuts down. +The ``Result`` handler gets the value from the ``Result`` message and aggregates it to +our ``pi`` member variable. We also keep track of how many results we have received back, +and if that matches the number of tasks sent out, the ``Master`` actor considers itself done and +invokes the ``self.stop()`` method to stop itself *and* all its supervised actors. +In this case it has one supervised actor, the router, and this in turn has ``nrOfWorkers`` supervised actors. +All of them will be stopped automatically as the invocation of any supervisor's ``stop`` method +will propagate down to all its supervised 'children'. -Let's capture this in code:: +Let's capture this in code: - // message handler - public void onReceive(Object message) { - - if (message instanceof Calculate) { - // schedule work - for (int start = 0; start < nrOfMessages; start++) { - router.tell(new Work(start, nrOfElements), getContext()); - } - - // send a PoisonPill to all workers telling them to shut down themselves - router.tell(new Broadcast(poisonPill())); - - // send a PoisonPill to the router, telling him to shut himself down - router.tell(poisonPill()); - - } else if (message instanceof Result) { - - // handle result from the worker - Result result = (Result) message; - pi += result.getValue(); - nrOfResults += 1; - if (nrOfResults == nrOfMessages) getContext().stop(); - - } else throw new IllegalArgumentException("Unknown message [" + message + "]"); - } +.. includecode:: ../../akka-tutorials/akka-tutorial-first/src/main/java/akka/tutorial/first/java/Pi.java#master-receive Bootstrap the calculation ------------------------- -Now the only thing that is left to implement is the runner that should bootstrap and run the calculation for us. We do that by adding a ``main`` method to the enclosing ``Pi`` class in which we create a new instance of ``Pi`` and invoke method ``calculate`` in which we start up the ``Master`` actor and wait for it to finish:: +Now the only thing that is left to implement is the runner that should bootstrap and run the calculation for us. +We do that by adding a ``main`` method to the enclosing ``Pi`` class in which we create a new instance of ``Pi`` and +invoke method ``calculate`` in which we start up the ``Master`` actor and wait for it to finish: - public class Pi { +.. includecode:: ../../akka-tutorials/akka-tutorial-first/src/main/java/akka/tutorial/first/java/Pi.java#app + :exclude: actors-and-messages - public static void main(String[] args) throws Exception { - Pi pi = new Pi(); - pi.calculate(4, 10000, 10000); - } - - public void calculate(final int nrOfWorkers, final int nrOfElements, final int nrOfMessages) - throws Exception { - - // this latch is only plumbing to know when the calculation is completed - final CountDownLatch latch = new CountDownLatch(1); - - // create the master - ActorRef master = actorOf(new Props(new UntypedActorFactory() { - public UntypedActor create() { - return new Master(nrOfWorkers, nrOfMessages, nrOfElements, latch); - } - })); - - // start the calculation - master.tell(new Calculate()); - - // wait for master to shut down - latch.await(); - } - } +As you can see the *calculate* method above it creates an ActorSystem and this is the Akka container which +will contain all actors created in that "context". An example of how to create actors in the container +is the *'system.actorOf(...)'* line in the calculate method. In this case we create a top level actor. +If you instead where in an actor context, i.e. inside an actor creating other actors, you should use +*this.getContext.actorOf(...)*. This is illustrated in the Master code above. That's it. Now we are done. -Before we package it up and run it, let's take a look at the full code now, with package declaration, imports and all:: - - package akka.tutorial.first.java; - - import static akka.actor.Actors.actorOf; - import static akka.actor.Actors.poisonPill; - import static java.util.Arrays.asList; - - import akka.actor.Props; - import akka.actor.ActorRef; - import akka.actor.UntypedActor; - import akka.actor.UntypedActorFactory; - import akka.routing.CyclicIterator; - import akka.routing.InfiniteIterator; - import akka.routing.Routing.Broadcast; - import akka.routing.UntypedLoadBalancer; - - import java.util.concurrent.CountDownLatch; - - public class Pi { - - public static void main(String[] args) throws Exception { - Pi pi = new Pi(); - pi.calculate(4, 10000, 10000); - } - - // ==================== - // ===== Messages ===== - // ==================== - static class Calculate {} - - static class Work { - private final int start; - private final int nrOfElements; - - public Work(int start, int nrOfElements) { - this.start = start; - this.nrOfElements = nrOfElements; - } - - public int getStart() { return start; } - public int getNrOfElements() { return nrOfElements; } - } - - static class Result { - private final double value; - - public Result(double value) { - this.value = value; - } - - public double getValue() { return value; } - } - - // ================== - // ===== Worker ===== - // ================== - static class Worker extends UntypedActor { - - // define the work - private double calculatePiFor(int start, int nrOfElements) { - double acc = 0.0; - for (int i = start * nrOfElements; i <= ((start + 1) * nrOfElements - 1); i++) { - acc += 4.0 * (1 - (i % 2) * 2) / (2 * i + 1); - } - return acc; - } - - // message handler - public void onReceive(Object message) { - if (message instanceof Work) { - Work work = (Work) message; - - // perform the work - double result = calculatePiFor(work.getStart(), work.getNrOfElements()) - - // reply with the result - getContext().reply(new Result(result)); - - } else throw new IllegalArgumentException("Unknown message [" + message + "]"); - } - } - - // ================== - // ===== Master ===== - // ================== - static class Master extends UntypedActor { - private final int nrOfMessages; - private final int nrOfElements; - private final CountDownLatch latch; - - private double pi; - private int nrOfResults; - private long start; - - private ActorRef router; - - static class PiRouter extends UntypedLoadBalancer { - private final InfiniteIterator workers; - - public PiRouter(ActorRef[] workers) { - this.workers = new CyclicIterator(asList(workers)); - } - - public InfiniteIterator seq() { - return workers; - } - } - - public Master( - int nrOfWorkers, int nrOfMessages, int nrOfElements, CountDownLatch latch) { - - this.nrOfMessages = nrOfMessages; - this.nrOfElements = nrOfElements; - this.latch = latch; - - // create the workers - final ActorRef[] workers = new ActorRef[nrOfWorkers]; - for (int i = 0; i < nrOfWorkers; i++) { - workers[i] = actorOf(new Props(Worker.class)); - } - - // wrap them with a load-balancing router - router = actorOf(new Props(new UntypedActorFactory() { - public UntypedActor create() { - return new PiRouter(workers); - } - })); - } - - // message handler - public void onReceive(Object message) { - - if (message instanceof Calculate) { - // schedule work - for (int start = 0; start < nrOfMessages; start++) { - router.tell(new Work(start, nrOfElements), getContext()); - } - - // send a PoisonPill to all workers telling them to shut down themselves - router.tell(new Broadcast(poisonPill())); - - // send a PoisonPill to the router, telling him to shut himself down - router.tell(poisonPill()); - - } else if (message instanceof Result) { - - // handle result from the worker - Result result = (Result) message; - pi += result.getValue(); - nrOfResults += 1; - if (nrOfResults == nrOfMessages) getContext().stop(); - - } else throw new IllegalArgumentException("Unknown message [" + message + "]"); - } - - @Override - public void preStart() { - start = System.currentTimeMillis(); - } - - @Override - public void postStop() { - // tell the world that the calculation is complete - System.out.println(String.format( - "\n\tPi estimate: \t\t%s\n\tCalculation time: \t%s millis", - pi, (System.currentTimeMillis() - start))); - latch.countDown(); - } - } - - // ================== - // ===== Run it ===== - // ================== - public void calculate(final int nrOfWorkers, final int nrOfElements, final int nrOfMessages) - throws Exception { - - // this latch is only plumbing to know when the calculation is completed - final CountDownLatch latch = new CountDownLatch(1); - - // create the master - ActorRef master = actorOf(new Props(new UntypedActorFactory() { - public UntypedActor create() { - return new Master(nrOfWorkers, nrOfMessages, nrOfElements, latch); - } - })); - - // start the calculation - master.tell(new Calculate()); - - // wait for master to shut down - latch.await(); - } - } +Before we package it up and run it, let's take a look at the full code now, with package declaration, imports and all: +.. includecode:: ../../akka-tutorials/akka-tutorial-first/src/main/java/akka/tutorial/first/java/Pi.java Run it as a command line application ------------------------------------ @@ -729,11 +352,11 @@ and the ``scala-library.jar`` JAR files to the classpath as well as the classes we compiled ourselves:: $ java \ - -cp lib/scala-library.jar:lib/akka/akka-actor-2.0-SNAPSHOT.jar:tutorial \ + -cp lib/scala-library.jar:lib/akka/akka-actor-2.0-SNAPSHOT.jar:. \ akka.tutorial.java.first.Pi Pi estimate: 3.1435501812459323 - Calculation time: 822 millis + Calculation time: 609 millis Yippee! It is working. @@ -750,7 +373,7 @@ When this in done we can run our application directly inside Maven:: $ mvn exec:java -Dexec.mainClass="akka.tutorial.first.java.Pi" ... Pi estimate: 3.1435501812459323 - Calculation time: 939 millis + Calculation time: 597 millis Yippee! It is working. diff --git a/akka-docs/intro/getting-started-first-scala-eclipse.rst b/akka-docs/intro/getting-started-first-scala-eclipse.rst index da473990d1..88b7c50b77 100644 --- a/akka-docs/intro/getting-started-first-scala-eclipse.rst +++ b/akka-docs/intro/getting-started-first-scala-eclipse.rst @@ -6,32 +6,69 @@ Getting Started Tutorial (Scala with Eclipse): First Chapter Introduction ------------ -Welcome to the first tutorial on how to get started with `Akka `_ and `Scala `_. We assume that you already know what Akka and Scala are and will now focus on the steps necessary to start your first project. We will be using `Eclipse `_, and the `Scala plugin for Eclipse `_. +Welcome to the first tutorial on how to get started with Akka and Scala. We +assume that you already know what Akka and Scala are and will now focus on the +steps necessary to start your first project. -The sample application that we will create is using actors to calculate the value of Pi. Calculating Pi is a CPU intensive operation and we will utilize Akka Actors to write a concurrent solution that scales out to multi-core processors. This sample will be extended in future tutorials to use Akka Remote Actors to scale out on multiple machines in a cluster. +There are two variations of this first tutorial: -We will be using an algorithm that is called "embarrassingly parallel" which just means that each job is completely isolated and not coupled with any other job. Since this algorithm is so parallelizable it suits the actor model very well. +- creating a standalone project and run it from the command line +- creating a SBT (Simple Build Tool) project and running it from within SBT + +Since they are so similar we will present them both. + +The sample application that we will create is using actors to calculate the +value of Pi. Calculating Pi is a CPU intensive operation and we will utilize +Akka Actors to write a concurrent solution that scales out to multi-core +processors. This sample will be extended in future tutorials to use Akka Remote +Actors to scale out on multiple machines in a cluster. + +We will be using an algorithm that is called "embarrassingly parallel" which +just means that each job is completely isolated and not coupled with any other +job. Since this algorithm is so parallelizable it suits the actor model very +well. Here is the formula for the algorithm we will use: .. image:: ../images/pi-formula.png -In this particular algorithm the master splits the series into chunks which are sent out to each worker actor to be processed. When each worker has processed its chunk it sends a result back to the master which aggregates the total result. +In this particular algorithm the master splits the series into chunks which are +sent out to each worker actor to be processed. When each worker has processed +its chunk it sends a result back to the master which aggregates the total +result. + Tutorial source code -------------------- -If you want don't want to type in the code and/or set up an SBT project then you can check out the full tutorial from the Akka GitHub repository. It is in the ``akka-tutorials/akka-tutorial-first`` module. You can also browse it online `here`__, with the actual source code `here`__. +If you want don't want to type in the code and/or set up an SBT project then you can +check out the full tutorial from the Akka GitHub repository. It is in the +``akka-tutorials/akka-tutorial-first`` module. You can also browse it online +`here`__, with the actual source code `here`__. __ https://github.com/jboner/akka/tree/master/akka-tutorials/akka-tutorial-first __ https://github.com/jboner/akka/blob/master/akka-tutorials/akka-tutorial-first/src/main/scala/Pi.scala +To check out the code using Git invoke the following:: + + $ git clone git://github.com/jboner/akka.git + +Then you can navigate down to the tutorial:: + + $ cd akka/akka-tutorials/akka-tutorial-first + + Prerequisites ------------- -This tutorial assumes that you have Java 1.6 or later installed on you machine and ``java`` on your ``PATH``. You also need to know how to run commands in a shell (ZSH, Bash, DOS etc.) and a recent version of Eclipse (at least `3.6 - Helios `_). +This tutorial assumes that you have Java 1.6 or later installed on you machine +and ``java`` on your ``PATH``. You also need to know how to run commands in a +shell (ZSH, Bash, DOS etc.) and a decent text editor or IDE to type in the Scala +code. -If you want to run the example from the command line as well, you need to make sure that ``$JAVA_HOME`` environment variable is set to the root of the Java distribution. You also need to make sure that the ``$JAVA_HOME/bin`` is on your ``PATH``:: +You need to make sure that ``$JAVA_HOME`` environment variable is set to the +root of the Java distribution. You also need to make sure that the +``$JAVA_HOME/bin`` is on your ``PATH``:: $ export JAVA_HOME=..root of java distribution.. $ export PATH=$PATH:$JAVA_HOME/bin @@ -107,58 +144,85 @@ The Akka Microkernel distribution also includes these jars: Downloading and installing the Scala IDE for Eclipse ---------------------------------------------------- -If you want to use Eclipse for coding your Akka tutorial, you need to install the Scala plugin for Eclipse. This plugin comes with its own version of Scala, so if you don't plan to run the example from the command line, you don't need to download the Scala distribution (and you can skip the next section). +If you want to use Eclipse for coding your Akka tutorial, you need to install the Scala plugin for Eclipse. +This plugin comes with its own version of Scala, so if you don't plan to run the example from the command line, +you don't need to download the Scala distribution (and you can skip the next section). -You can install this plugin using the regular update mechanism. First choose a version of the IDE from `http://download.scala-ide.org `_. We recommend you choose 2.0.x, which comes with Scala 2.9. Copy the corresponding URL and then choose ``Help/Install New Software`` and paste the URL you just copied. You should see something similar to the following image. +You can install this plugin using the regular update mechanism. First choose a version of the IDE from +`http://download.scala-ide.org `_. We recommend you choose 2.0.x, which +comes with Scala 2.9. Copy the corresponding URL and then choose ``Help/Install New Software`` and paste +the URL you just copied. You should see something similar to the following image. .. image:: ../images/install-beta2-updatesite.png -Make sure you select both the ``JDT Weaving for Scala`` and the ``Scala IDE for Eclipse`` plugins. The other plugin is optional, and contains the source code of the plugin itself. +Make sure you select both the ``JDT Weaving for Scala`` and the ``Scala IDE for Eclipse`` plugins. +The other plugin is optional, and contains the source code of the plugin itself. -Once the installation is finished, you need to restart Eclipse. The first time the plugin starts it will open a diagnostics window and offer to fix several settings, such as the delay for content assist (code-completion) or the shown completion proposal types. +Once the installation is finished, you need to restart Eclipse. The first time the plugin starts it will +open a diagnostics window and offer to fix several settings, such as the delay for content assist (code-completion) +or the shown completion proposal types. .. image:: ../images/diagnostics-window.png Accept the recommended settings, and follow the instructions if you need to increase the heap size of Eclipse. -Check that the installation succeeded by creating a new Scala project (``File/New>Scala Project``), and typing some code. You should have content-assist, hyperlinking to definitions, instant error reporting, and so on. +Check that the installation succeeded by creating a new Scala project (``File/New>Scala Project``), and typing some code. +You should have content-assist, hyperlinking to definitions, instant error reporting, and so on. .. image:: ../images/example-code.png You are ready to code now! + Downloading and installing Scala -------------------------------- -To build and run the tutorial sample from the command line, you have to install the Scala distribution. If you prefer to use Eclipse to build and run the sample then you can skip this section and jump to the next one. +To build and run the tutorial sample from the command line, you have to install +the Scala distribution. If you prefer to use SBT to build and run the sample +then you can skip this section and jump to the next one. -Scala can be downloaded from `http://www.scala-lang.org/downloads `_. Browse there and download the Scala 2.9.0 release. If you pick the ``tgz`` or ``zip`` distribution then just unzip it where you want it installed. If you pick the IzPack Installer then double click on it and follow the instructions. +Scala can be downloaded from http://www.scala-lang.org/downloads. Browse there +and download the Scala 2.9.1 release. If you pick the ``tgz`` or ``zip`` +distribution then just unzip it where you want it installed. If you pick the +IzPack Installer then double click on it and follow the instructions. -You also need to make sure that the ``scala-2.9.0/bin`` (if that is the directory where you installed Scala) is on your ``PATH``:: +You also need to make sure that the ``scala-2.9.1/bin`` (if that is the +directory where you installed Scala) is on your ``PATH``:: - $ export PATH=$PATH:scala-2.9.0/bin + $ export PATH=$PATH:scala-2.9.1/bin You can test your installation by invoking scala:: $ scala -version - Scala code runner version 2.9.0.final -- Copyright 2002-2011, LAMP/EPFL + Scala code runner version 2.9.1.final -- Copyright 2002-2011, LAMP/EPFL -Looks like we are all good. Finally let's create a source file ``Pi.scala`` for the tutorial and put it in the root of the Akka distribution in the ``tutorial`` directory (you have to create it first). +Looks like we are all good. Finally let's create a source file ``Pi.scala`` for +the tutorial and put it in the root of the Akka distribution in the ``tutorial`` +directory (you have to create it first). + +Some tools require you to set the ``SCALA_HOME`` environment variable to the +root of the Scala distribution, however Akka does not require that. -Some tools require you to set the ``SCALA_HOME`` environment variable to the root of the Scala distribution, however Akka does not require that. Creating an Akka project in Eclipse ---------------------------------------- +----------------------------------- -If you have not already done so, now is the time to create an Eclipse project for our tutorial. Use the ``New Scala Project`` wizard and accept the default settings. Once the project is open, we need to add the akka libraries to the *build path*. Right click on the project and choose ``Properties``, then click on ``Java Build Path``. Go to ``Libraries`` and click on ``Add External Jars..``, then navigate to the location where you installed akka and choose ``akka-actor.jar``. You should see something similar to this: +If you have not already done so, now is the time to create an Eclipse project for our tutorial. +Use the ``New Scala Project`` wizard and accept the default settings. Once the project is open, +we need to add the akka libraries to the *build path*. Right click on the project and choose ``Properties``, +then click on ``Java Build Path``. Go to ``Libraries`` and click on ``Add External Jars..``, then navigate +to the location where you installed akka and choose ``akka-actor.jar``. You should see something similar to this: .. image:: ../images/build-path.png + Using SBT in Eclipse ^^^^^^^^^^^^^^^^^^^^ -If you are an `SBT `_ user, you can follow the :ref:`getting-started-first-scala-download-sbt` instruction and additionally install the ``sbteclipse`` plugin. This adds support for generating Eclipse project files from your SBT project. -You need to install the plugin as described in the `README of sbteclipse `_ +If you are an `SBT `_ user, you can follow the :ref:`getting-started-first-scala-download-sbt` +instruction and additionally install the ``sbteclipse`` plugin. This adds support for generating Eclipse project files +from your SBT project. You need to install the plugin as described in the `README of sbteclipse +`_ Then run the ``eclipse`` target to generate the Eclipse project:: @@ -173,12 +237,14 @@ The options `create-src` and `with-sources` are useful:: * create-src to create the common source directories, e.g. src/main/scala, src/main/test * with-sources to create source attachments for the library dependencies -Next you need to import this project in Eclipse, by choosing ``Eclipse/Import.. Existing Projects into Workspace``. Navigate to the directory where you defined your SBT project and choose import: +Next you need to import this project in Eclipse, by choosing ``Eclipse/Import.. Existing Projects into Workspace``. +Navigate to the directory where you defined your SBT project and choose import: .. image:: ../images/import-project.png Now we have the basis for an Akka Eclipse application, so we can.. + Start writing the code ---------------------- @@ -186,10 +252,14 @@ The design we are aiming for is to have one ``Master`` actor initiating the comp With this in mind, let's now create the messages that we want to have flowing in the system. + Creating the messages --------------------- -We start by creating a package for our application, let's call it ``akka.tutorial.first.scala``. We start by creating case classes for each type of message in our application, so we can place them in a hierarchy, call it ``PiMessage``. Right click on the package and choose ``New Scala Class``, and enter ``PiMessage`` for the name of the class. +We start by creating a package for our application, let's call it ``akka.tutorial.first.scala``. +We start by creating case classes for each type of message in our application, so we can place them in a hierarchy, +call it ``PiMessage``. Right click on the package and choose ``New Scala Class``, and enter ``PiMessage`` as +the name of the class. We need three different messages: @@ -197,204 +267,156 @@ We need three different messages: - ``Work`` -- sent from the ``Master`` actor to the ``Worker`` actors containing the work assignment - ``Result`` -- sent from the ``Worker`` actors to the ``Master`` actor containing the result from the worker's calculation -Messages sent to actors should always be immutable to avoid sharing mutable state. In Scala we have 'case classes' which make excellent messages. So let's start by creating three messages as case classes. We also create a common base trait for our messages (that we define as being ``sealed`` in order to prevent creating messages outside our control):: +Messages sent to actors should always be immutable to avoid sharing mutable state. +In Scala we have 'case classes' which make excellent messages. So let's start by creating three messages as case classes. +We also create a common base trait for our messages (that we define as being ``sealed`` in order to prevent creating messages +outside our control): - package akka.tutorial.first.scala +.. includecode:: ../../akka-tutorials/akka-tutorial-first/src/main/scala/Pi.scala#messages - sealed trait PiMessage - - case object Calculate extends PiMessage - - case class Work(start: Int, nrOfElements: Int) extends PiMessage - - case class Result(value: Double) extends PiMessage Creating the worker ------------------- -Now we can create the worker actor. Create a new class called ``Worker`` as before. We need to mix in the ``Actor`` trait and defining the ``receive`` method. The ``receive`` method defines our message handler. We expect it to be able to handle the ``Work`` message so we need to add a handler for this message:: +Now we can create the worker actor. This is done by mixing in the ``Actor`` +trait and defining the ``receive`` method. The ``receive`` method defines our +message handler. We expect it to be able to handle the ``Work`` message so we +need to add a handler for this message: - class Worker extends Actor { - def receive = { - case Work(start, nrOfElements) => - self reply Result(calculatePiFor(start, nrOfElements)) // perform the work - } - } +.. includecode:: ../../akka-tutorials/akka-tutorial-first/src/main/scala/Pi.scala#worker + :exclude: calculatePiFor -The ``Actor`` trait is defined in ``akka.actor`` and you can either import it explicitly, or let Eclipse do it for you when it cannot resolve the ``Actor`` trait. The quick fix option (``Ctrl-F1``) will offer two options: +The ``Actor`` trait is defined in ``akka.actor`` and you can either import it explicitly, +or let Eclipse do it for you when it cannot resolve the ``Actor`` trait. +The quick fix option (``Ctrl-F1``) will offer two options: .. image:: ../images/quickfix.png Choose the Akka Actor and move on. -As you can see we have now created an ``Actor`` with a ``receive`` method as a handler for the ``Work`` message. In this handler we invoke the ``calculatePiFor(..)`` method, wrap the result in a ``Result`` message and send it back to the original sender using ``self.reply``. In Akka the sender reference is implicitly passed along with the message so that the receiver can always reply or store away the sender reference for future use. +As you can see we have now created an ``Actor`` with a ``receive`` method as a +handler for the ``Work`` message. In this handler we invoke the +``calculatePiFor(..)`` method, wrap the result in a ``Result`` message and send +it back asynchronously to the original sender using the ``sender`` reference. +In Akka the sender reference is implicitly passed along with the message so that +the receiver can always reply or store away the sender reference for future use. -The only thing missing in our ``Worker`` actor is the implementation on the ``calculatePiFor(..)`` method. While there are many ways we can implement this algorithm in Scala, in this introductory tutorial we have chosen an imperative style using a for comprehension and an accumulator:: +The only thing missing in our ``Worker`` actor is the implementation on the +``calculatePiFor(..)`` method. While there are many ways we can implement this +algorithm in Scala, in this introductory tutorial we have chosen an imperative +style using a for comprehension and an accumulator: + +.. includecode:: ../../akka-tutorials/akka-tutorial-first/src/main/scala/Pi.scala#calculatePiFor - def calculatePiFor(start: Int, nrOfElements: Int): Double = { - var acc = 0.0 - for (i <- start until (start + nrOfElements)) - acc += 4.0 * (1 - (i % 2) * 2) / (2 * i + 1) - acc - } Creating the master ------------------- -Now create a new class for the master actor. The master actor is a little bit more involved. In its constructor we need to create the workers (the ``Worker`` actors) and start them. We will also wrap them in a load-balancing router to make it easier to spread out the work evenly between the workers. First we need to add some imports:: +Now create a new class for the master actor. The master actor is a little bit +more involved. In its constructor we create a round-robin router to make it easier +to spread out the work evenly between the workers. First we need to add some imports: - import akka.actor.{Actor, PoisonPill} - import akka.routing.{Routing, CyclicIterator} - import Routing._ - import akka.dispatch.Dispatchers +.. includecode:: ../../akka-tutorials/akka-tutorial-first/src/main/scala/Pi.scala#imports - import java.util.concurrent.CountDownLatch +and then we can create the router: -and then we can create the workers:: +.. includecode:: ../../akka-tutorials/akka-tutorial-first/src/main/scala/Pi.scala#create-router - // create the workers - val workers = Vector.fill(nrOfWorkers)(actorOf(Props[Worker]) - - // wrap them with a load-balancing router - val router = Routing.loadBalancerActor(CyclicIterator(workers)) - -As you can see we are using the ``actorOf`` factory method to create actors, this method returns as an ``ActorRef`` which is a reference to our newly created actor. This method is available in the ``Actor`` object but is usually imported:: - - import akka.actor.Actor.actorOf - -There are two versions of ``actorOf``; one of them taking a actor type and the other one an instance of an actor. The former one (``actorOf(Props[MyActor]``) is used when the actor class has a no-argument constructor while the second one (``actorOf(Props(new MyActor(..))``) is used when the actor class has a constructor that takes arguments. This is the only way to create an instance of an Actor and the ``actorOf`` method ensures this. The latter version is using call-by-name and lazily creates the actor within the scope of the ``actorOf`` method. The ``actorOf`` method instantiates the actor and returns, not an instance to the actor, but an instance to an ``ActorRef``. This reference is the handle through which you communicate with the actor. It is immutable, serializable and location-aware meaning that it "remembers" its original actor even if it is sent to other nodes across the network and can be seen as the equivalent to the Erlang actor's PID. - -The actor's life-cycle is: - -- Created -- ``Actor.actorOf(Props[MyActor]`` -- can **not** receive messages -- Started -- ``actorRef`` -- can receive messages -- Stopped -- ``actorRef.stop()`` -- can **not** receive messages - -Once the actor has been stopped it is dead and can not be started again. - -Now we have a router that is representing all our workers in a single abstraction. If you paid attention to the code above, you saw that we were using the ``nrOfWorkers`` variable. This variable and others we have to pass to the ``Master`` actor in its constructor. So now let's create the master actor. We have to pass in three integer variables: +Now we have a router that is representing all our workers in a single +abstraction. So now let's create the master actor. We pass it three integer variables: - ``nrOfWorkers`` -- defining how many workers we should start up - ``nrOfMessages`` -- defining how many number chunks to send out to the workers - ``nrOfElements`` -- defining how big the number chunks sent to each worker should be -Here is the master actor:: +Here is the master actor: - class Master( - nrOfWorkers: Int, nrOfMessages: Int, nrOfElements: Int, latch: CountDownLatch) - extends Actor { - - var pi: Double = _ - var nrOfResults: Int = _ - var start: Long = _ - - // create the workers - val workers = Vector.fill(nrOfWorkers)(actorOf(Props[Worker]) - - // wrap them with a load-balancing router - val router = Routing.loadBalancerActor(CyclicIterator(workers)) - - def receive = { ... } - - override def preStart() { - start = System.currentTimeMillis - } - - override def postStop() { - // tell the world that the calculation is complete - println( - "\n\tPi estimate: \t\t%s\n\tCalculation time: \t%s millis" - .format(pi, (System.currentTimeMillis - start))) - latch.countDown() - } - } +.. includecode:: ../../akka-tutorials/akka-tutorial-first/src/main/scala/Pi.scala#master + :exclude: handle-messages A couple of things are worth explaining further. -First, we are passing in a ``java.util.concurrent.CountDownLatch`` to the ``Master`` actor. This latch is only used for plumbing (in this specific tutorial), to have a simple way of letting the outside world knowing when the master can deliver the result and shut down. In more idiomatic Akka code, as we will see in part two of this tutorial series, we would not use a latch but other abstractions and functions like ``Channel``, ``Future`` and ``?`` to achieve the same thing in a non-blocking way. But for simplicity let's stick to a ``CountDownLatch`` for now. +First, we are passing in a ``java.util.concurrent.CountDownLatch`` to the +``Master`` actor. This latch is only used for plumbing (in this specific +tutorial), to have a simple way of letting the outside world knowing when the +master can deliver the result and shut down. In more idiomatic Akka code +we would not use a latch but other abstractions and functions like ``Future`` +and ``?`` to achieve the same thing in a non-blocking way. +But for simplicity let's stick to a ``CountDownLatch`` for now. -Second, we are adding a couple of life-cycle callback methods; ``preStart`` and ``postStop``. In the ``preStart`` callback we are recording the time when the actor is started and in the ``postStop`` callback we are printing out the result (the approximation of Pi) and the time it took to calculate it. In this call we also invoke ``latch.countDown`` to tell the outside world that we are done. +Second, we are adding a couple of life-cycle callback methods; ``preStart`` and +``postStop``. In the ``preStart`` callback we are recording the time when the +actor is started and in the ``postStop`` callback we are printing out the result +(the approximation of Pi) and the time it took to calculate it. In this call we +also invoke ``latch.countDown()`` to tell the outside world that we are done. -But we are not done yet. We are missing the message handler for the ``Master`` actor. This message handler needs to be able to react to two different messages: +But we are not done yet. We are missing the message handler for the ``Master`` +actor. This message handler needs to be able to react to two different messages: - ``Calculate`` -- which should start the calculation - ``Result`` -- which should aggregate the different results -The ``Calculate`` handler is sending out work to all the ``Worker`` actors and after doing that it also sends a ``Broadcast(PoisonPill)`` message to the router, which will send out the ``PoisonPill`` message to all the actors it is representing (in our case all the ``Worker`` actors). ``PoisonPill`` is a special kind of message that tells the receiver to shut itself down using the normal shutdown method; ``self.stop``. We also send a ``PoisonPill`` to the router itself (since it's also an actor that we want to shut down). +The ``Calculate`` handler is sending out work to all the ``Worker`` via its router. -The ``Result`` handler is simpler, here we get the value from the ``Result`` message and aggregate it to our ``pi`` member variable. We also keep track of how many results we have received back, and if that matches the number of tasks sent out, the ``Master`` actor considers itself done and shuts down. +The ``Result`` handler gets the value from the ``Result`` message and aggregates it to +our ``pi`` member variable. We also keep track of how many results we have received back, +and if that matches the number of tasks sent out, the ``Master`` actor considers itself done and +invokes the ``self.stop()`` method to stop itself *and* all its supervised actors. +In this case it has one supervised actor, the router, and this in turn has ``nrOfWorkers`` supervised actors. +All of them will be stopped automatically as the invocation of any supervisor's ``stop`` method +will propagate down to all its supervised 'children'. -Let's capture this in code:: +Let's capture this in code: - // message handler - def receive = { - case Calculate => - // schedule work - for (i <- 0 until nrOfMessages) router ! Work(i * nrOfElements, nrOfElements) +.. includecode:: ../../akka-tutorials/akka-tutorial-first/src/main/scala/Pi.scala#master-receive - // send a PoisonPill to all workers telling them to shut down themselves - router ! Broadcast(PoisonPill) - - // send a PoisonPill to the router, telling him to shut himself down - router ! PoisonPill - - case Result(value) => - // handle result from the worker - pi += value - nrOfResults += 1 - if (nrOfResults == nrOfMessages) self.stop() - } Bootstrap the calculation ------------------------- -Now the only thing that is left to implement is the runner that should bootstrap and run the calculation for us. We do that by creating an object that we call ``Pi``, here we can extend the ``App`` trait in Scala, which means that we will be able to run this as an application directly from the command line or using the Eclipse Runner. +Now the only thing that is left to implement is the runner that should bootstrap and run the calculation for us. +We do that by creating an object that we call ``Pi``, here we can extend the ``App`` trait in Scala, +which means that we will be able to run this as an application directly from the command line or using the Eclipse Runner. -The ``Pi`` object is a perfect container module for our actors and messages, so let's put them all there. We also create a method ``calculate`` in which we start up the ``Master`` actor and wait for it to finish:: +The ``Pi`` object is a perfect container module for our actors and messages, so let's put them all there. +We also create a method ``calculate`` in which we start up the ``Master`` actor and wait for it to finish: - object Pi extends App { - - calculate(nrOfWorkers = 4, nrOfElements = 10000, nrOfMessages = 10000) - - ... // actors and messages - - def calculate(nrOfWorkers: Int, nrOfElements: Int, nrOfMessages: Int) { - - // this latch is only plumbing to know when the calculation is completed - val latch = new CountDownLatch(1) - - // create the master - val master = actorOf(Props(new Master(nrOfWorkers, nrOfMessages, nrOfElements, latch))) - - // start the calculation - master ! Calculate - - // wait for master to shut down - latch.await() - } - } +.. includecode:: ../../akka-tutorials/akka-tutorial-first/src/main/scala/Pi.scala#app + :exclude: actors-and-messages That's it. Now we are done. Run it from Eclipse ------------------- -Eclipse builds your project on every save when ``Project/Build Automatically`` is set. If not, bring you project up to date by clicking ``Project/Build Project``. If there are no compilation errors, you can right-click in the editor where ``Pi`` is defined, and choose ``Run as.. /Scala application``. If everything works fine, you should see:: +Eclipse builds your project on every save when ``Project/Build Automatically`` is set. +If not, bring you project up to date by clicking ``Project/Build Project``. If there are no compilation errors, +you can right-click in the editor where ``Pi`` is defined, and choose ``Run as.. /Scala application``. +If everything works fine, you should see:: Pi estimate: 3.1435501812459323 - Calculation time: 858 millis + Calculation time: 632 millis -You can also define a new Run configuration, by going to ``Run/Run Configurations``. Create a new ``Scala application`` and choose the tutorial project and the main class to be ``akkatutorial.Pi``. You can pass additional command line arguments to the JVM on the ``Arguments`` page, for instance to define where :ref:`configuration` is: +You can also define a new Run configuration, by going to ``Run/Run Configurations``. Create a new ``Scala application`` +and choose the tutorial project and the main class to be ``akkatutorial.Pi``. You can pass additional command line +arguments to the JVM on the ``Arguments`` page, for instance to define where :ref:`configuration` is: .. image:: ../images/run-config.png -Once you finished your run configuration, click ``Run``. You should see the same output in the ``Console`` window. You can use the same configuration for debugging the application, by choosing ``Run/Debug History`` or just ``Debug As``. +Once you finished your run configuration, click ``Run``. You should see the same output in the ``Console`` window. +You can use the same configuration for debugging the application, by choosing ``Run/Debug History`` or just ``Debug As``. Conclusion ---------- -We have learned how to create our first Akka project using Akka's actors to speed up a computation-intensive problem by scaling out on multi-core processors (also known as scaling up). We have also learned to compile and run an Akka project using Eclipse. +We have learned how to create our first Akka project using Akka's actors to +speed up a computation-intensive problem by scaling out on multi-core processors +(also known as scaling up). We have also learned to compile and run an Akka +project using either the tools on the command line or the SBT build system. -If you have a multi-core machine then I encourage you to try out different number of workers (number of working actors) by tweaking the ``nrOfWorkers`` variable to for example; 2, 4, 6, 8 etc. to see performance improvement by scaling up. +If you have a multi-core machine then I encourage you to try out different +number of workers (number of working actors) by tweaking the ``nrOfWorkers`` +variable to for example; 2, 4, 6, 8 etc. to see performance improvement by +scaling up. Happy hakking. diff --git a/akka-docs/intro/getting-started-first-scala.rst b/akka-docs/intro/getting-started-first-scala.rst index 35ac80bcd5..5f0b9f2c0d 100644 --- a/akka-docs/intro/getting-started-first-scala.rst +++ b/akka-docs/intro/getting-started-first-scala.rst @@ -40,6 +40,24 @@ sent out to each worker actor to be processed. When each worker has processed its chunk it sends a result back to the master which aggregates the total result. +Tutorial source code +-------------------- + +If you want don't want to type in the code and/or set up an SBT project then you can +check out the full tutorial from the Akka GitHub repository. It is in the +``akka-tutorials/akka-tutorial-first`` module. You can also browse it online +`here`__, with the actual source code `here`__. + +__ https://github.com/jboner/akka/tree/master/akka-tutorials/akka-tutorial-first +__ https://github.com/jboner/akka/blob/master/akka-tutorials/akka-tutorial-first/src/main/scala/Pi.scala + +To check out the code using Git invoke the following:: + + $ git clone git://github.com/jboner/akka.git + +Then you can navigate down to the tutorial:: + + $ cd akka/akka-tutorials/akka-tutorial-first Prerequisites ============= @@ -133,19 +151,19 @@ the Scala distribution. If you prefer to use SBT to build and run the sample then you can skip this section and jump to the next one. Scala can be downloaded from http://www.scala-lang.org/downloads. Browse there -and download the Scala 2.9.0 release. If you pick the ``tgz`` or ``zip`` +and download the Scala 2.9.1 release. If you pick the ``tgz`` or ``zip`` distribution then just unzip it where you want it installed. If you pick the IzPack Installer then double click on it and follow the instructions. -You also need to make sure that the ``scala-2.9.0/bin`` (if that is the +You also need to make sure that the ``scala-2.9.1/bin`` (if that is the directory where you installed Scala) is on your ``PATH``:: - $ export PATH=$PATH:scala-2.9.0/bin + $ export PATH=$PATH:scala-2.9.1/bin You can test your installation by invoking scala:: $ scala -version - Scala code runner version 2.9.0.final -- Copyright 2002-2011, LAMP/EPFL + Scala code runner version 2.9.1.final -- Copyright 2002-2011, LAMP/EPFL Looks like we are all good. Finally let's create a source file ``Pi.scala`` for the tutorial and put it in the root of the Akka distribution in the ``tutorial`` @@ -221,7 +239,7 @@ Now it's about time to start hacking. We start by creating a ``Pi.scala`` file and adding these import statements at the top of the file: -.. includecode:: code/tutorials/first/Pi.scala#imports +.. includecode:: ../../akka-tutorials/akka-tutorial-first/src/main/scala/Pi.scala#imports If you are using SBT in this tutorial then create the file in the ``src/main/scala`` directory. @@ -256,7 +274,7 @@ start by creating three messages as case classes. We also create a common base trait for our messages (that we define as being ``sealed`` in order to prevent creating messages outside our control): -.. includecode:: code/tutorials/first/Pi.scala#messages +.. includecode:: ../../akka-tutorials/akka-tutorial-first/src/main/scala/Pi.scala#messages Creating the worker @@ -267,67 +285,34 @@ trait and defining the ``receive`` method. The ``receive`` method defines our message handler. We expect it to be able to handle the ``Work`` message so we need to add a handler for this message: -.. includecode:: code/tutorials/first/Pi.scala#worker +.. includecode:: ../../akka-tutorials/akka-tutorial-first/src/main/scala/Pi.scala#worker :exclude: calculatePiFor As you can see we have now created an ``Actor`` with a ``receive`` method as a handler for the ``Work`` message. In this handler we invoke the ``calculatePiFor(..)`` method, wrap the result in a ``Result`` message and send -it back to the original sender using ``self.reply``. In Akka the sender -reference is implicitly passed along with the message so that the receiver can -always reply or store away the sender reference for future use. +it back asynchronously to the original sender using the ``sender`` reference. +In Akka the sender reference is implicitly passed along with the message so that +the receiver can always reply or store away the sender reference for future use. The only thing missing in our ``Worker`` actor is the implementation on the ``calculatePiFor(..)`` method. While there are many ways we can implement this algorithm in Scala, in this introductory tutorial we have chosen an imperative style using a for comprehension and an accumulator: -.. includecode:: code/tutorials/first/Pi.scala#calculatePiFor +.. includecode:: ../../akka-tutorials/akka-tutorial-first/src/main/scala/Pi.scala#calculatePiFor Creating the master =================== -The master actor is a little bit more involved. In its constructor we need to -create the workers (the ``Worker`` actors) and start them. We will also wrap -them in a load-balancing router to make it easier to spread out the work evenly -between the workers. Let's do that first: +The master actor is a little bit more involved. In its constructor we create a round-robin router +to make it easier to spread out the work evenly between the workers. Let's do that first: -.. includecode:: code/tutorials/first/Pi.scala#create-workers - -As you can see we are using the ``actorOf`` factory method to create actors, -this method returns as an ``ActorRef`` which is a reference to our newly created -actor. This method is available in the ``Actor`` object but is usually -imported:: - - import akka.actor.Actor.actorOf - -There are two versions of ``actorOf``; one of them taking a actor type and the -other one an instance of an actor. The former one (``actorOf(Props[MyActor]``) is used -when the actor class has a no-argument constructor while the second one -(``actorOf(Props(new MyActor(..))``) is used when the actor class has a constructor -that takes arguments. This is the only way to create an instance of an Actor and -the ``actorOf`` method ensures this. The latter version is using call-by-name -and lazily creates the actor within the scope of the ``actorOf`` method. The -``actorOf`` method instantiates the actor and returns, not an instance to the -actor, but an instance to an ``ActorRef``. This reference is the handle through -which you communicate with the actor. It is immutable, serializable and -location-aware meaning that it "remembers" its original actor even if it is sent -to other nodes across the network and can be seen as the equivalent to the -Erlang actor's PID. - -The actor's life-cycle is: - -- Created & Started -- ``Actor.actorOf(Props[MyActor])`` -- can receive messages -- Stopped -- ``actorRef.stop()`` -- can **not** receive messages - -Once the actor has been stopped it is dead and can not be started again. +.. includecode:: ../../akka-tutorials/akka-tutorial-first/src/main/scala/Pi.scala#create-router Now we have a router that is representing all our workers in a single -abstraction. If you paid attention to the code above, you saw that we were using -the ``nrOfWorkers`` variable. This variable and others we have to pass to the -``Master`` actor in its constructor. So now let's create the master actor. We -have to pass in three integer variables: +abstraction. So now let's create the master actor. We pass it three integer variables: - ``nrOfWorkers`` -- defining how many workers we should start up - ``nrOfMessages`` -- defining how many number chunks to send out to the workers @@ -335,7 +320,7 @@ have to pass in three integer variables: Here is the master actor: -.. includecode:: code/tutorials/first/Pi.scala#master +.. includecode:: ../../akka-tutorials/akka-tutorial-first/src/main/scala/Pi.scala#master :exclude: handle-messages A couple of things are worth explaining further. @@ -343,17 +328,16 @@ A couple of things are worth explaining further. First, we are passing in a ``java.util.concurrent.CountDownLatch`` to the ``Master`` actor. This latch is only used for plumbing (in this specific tutorial), to have a simple way of letting the outside world knowing when the -master can deliver the result and shut down. In more idiomatic Akka code, as we -will see in part two of this tutorial series, we would not use a latch but other -abstractions and functions like ``Channel``, ``Future`` and ``?`` to achieve the -same thing in a non-blocking way. But for simplicity let's stick to a -``CountDownLatch`` for now. +master can deliver the result and shut down. In more idiomatic Akka code +we would not use a latch but other abstractions and functions like ``Future`` +and ``?`` to achieve the same thing in a non-blocking way. +But for simplicity let's stick to a ``CountDownLatch`` for now. Second, we are adding a couple of life-cycle callback methods; ``preStart`` and ``postStop``. In the ``preStart`` callback we are recording the time when the actor is started and in the ``postStop`` callback we are printing out the result (the approximation of Pi) and the time it took to calculate it. In this call we -also invoke ``latch.countDown`` to tell the outside world that we are done. +also invoke ``latch.countDown()`` to tell the outside world that we are done. But we are not done yet. We are missing the message handler for the ``Master`` actor. This message handler needs to be able to react to two different messages: @@ -361,22 +345,19 @@ actor. This message handler needs to be able to react to two different messages: - ``Calculate`` -- which should start the calculation - ``Result`` -- which should aggregate the different results -The ``Calculate`` handler is sending out work to all the ``Worker`` actors and -after doing that it also sends a ``Broadcast(PoisonPill)`` message to the -router, which will send out the ``PoisonPill`` message to all the actors it is -representing (in our case all the ``Worker`` actors). ``PoisonPill`` is a -special kind of message that tells the receiver to shut itself down using the -normal shutdown method; ``self.stop``. We also send a ``PoisonPill`` to the -router itself (since it's also an actor that we want to shut down). +The ``Calculate`` handler is sending out work to all the ``Worker`` via its router. -The ``Result`` handler is simpler, here we get the value from the ``Result`` -message and aggregate it to our ``pi`` member variable. We also keep track of -how many results we have received back, and if that matches the number of tasks -sent out, the ``Master`` actor considers itself done and shuts down. +The ``Result`` handler gets the value from the ``Result`` message and aggregates it to +our ``pi`` member variable. We also keep track of how many results we have received back, +and if that matches the number of tasks sent out, the ``Master`` actor considers itself done and +invokes the ``self.stop()`` method to stop itself *and* all its supervised actors. +In this case it has one supervised actor, the router, and this in turn has ``nrOfWorkers`` supervised actors. +All of them will be stopped automatically as the invocation of any supervisor's ``stop`` method +will propagate down to all its supervised 'children'. Let's capture this in code: -.. includecode:: code/tutorials/first/Pi.scala#master-receive +.. includecode:: ../../akka-tutorials/akka-tutorial-first/src/main/scala/Pi.scala#master-receive Bootstrap the calculation @@ -391,29 +372,35 @@ The ``Pi`` object is a perfect container module for our actors and messages, so let's put them all there. We also create a method ``calculate`` in which we start up the ``Master`` actor and wait for it to finish: -.. includecode:: code/tutorials/first/Pi.scala#app +.. includecode:: ../../akka-tutorials/akka-tutorial-first/src/main/scala/Pi.scala#app :exclude: actors-and-messages +As you can see the *calculate* method above it creates an ActorSystem and this is the Akka container which +will contain all actors created in that "context". An example of how to create actors in the container +is the *'system.actorOf(...)'* line in the calculate method. In this case we create a top level actor. +If you instead where in an actor context, i.e. inside an actor creating other actors, you should use +*context.actorOf(...)*. This is illustrated in the Master code above. + That's it. Now we are done. But before we package it up and run it, let's take a look at the full code now, with package declaration, imports and all: -.. includecode:: code/tutorials/first/Pi.scala +.. includecode:: ../../akka-tutorials/akka-tutorial-first/src/main/scala/Pi.scala Run it as a command line application ==================================== -If you have not typed in (or copied) the code for the tutorial as -``$AKKA_HOME/tutorial/Pi.scala`` then now is the time. When that's done open up -a shell and step in to the Akka distribution (``cd $AKKA_HOME``). +If you have not typed in (or copied) the code for the tutorial as in +``$AKKA_HOME/akka-tutorials/akka-tutorial-first/src/main/scala/Pi.scala`` then now is the time. +When that's done open up a shell and step in to the Akka distribution (``cd $AKKA_HOME``). First we need to compile the source file. That is done with Scala's compiler ``scalac``. Our application depends on the ``akka-actor-2.0-SNAPSHOT.jar`` JAR file, so let's add that to the compiler classpath when we compile the source:: - $ scalac -cp lib/akka/akka-actor-2.0-SNAPSHOT.jar tutorial/Pi.scala + $ scalac -cp lib/akka/akka-actor-2.0-SNAPSHOT.jar Pi.scala When we have compiled the source file we are ready to run the application. This is done with ``java`` but yet again we need to add the @@ -426,7 +413,7 @@ compiled ourselves:: akka.tutorial.first.scala.Pi Pi estimate: 3.1435501812459323 - Calculation time: 858 millis + Calculation time: 553 millis Yippee! It is working. @@ -445,7 +432,7 @@ When this in done we can run our application directly inside SBT:: > run ... Pi estimate: 3.1435501812459323 - Calculation time: 942 millis + Calculation time: 531 millis Yippee! It is working. diff --git a/akka-docs/intro/use-cases.rst b/akka-docs/intro/use-cases.rst index fd434e89cc..498ce7a997 100644 --- a/akka-docs/intro/use-cases.rst +++ b/akka-docs/intro/use-cases.rst @@ -1,7 +1,8 @@ Examples of use-cases for Akka ============================== -There is a great discussion on use-cases for Akka with some good write-ups by production users `here `_ +There is a great discussion on use-cases for Akka with some good write-ups by production +users `here `_ Here are some of the areas where Akka is being deployed into production ----------------------------------------------------------------------- diff --git a/akka-docs/intro/what-is-akka.rst b/akka-docs/intro/what-is-akka.rst index 9400da5691..9ba05f5c26 100644 --- a/akka-docs/intro/what-is-akka.rst +++ b/akka-docs/intro/what-is-akka.rst @@ -34,7 +34,7 @@ Actors give you: - Simple and high-level abstractions for concurrency and parallelism. - Asynchronous, non-blocking and highly performant event-driven programming model. - - Very lightweight event-driven processes (create ~6.5 million actors on 4GB RAM). + - Very lightweight event-driven processes (approximately 2.7 million actors per GB RAM). See :ref:`actors-scala` and :ref:`untyped-actors-java` @@ -47,16 +47,11 @@ systems that self-heal. See :ref:`fault-tolerance-scala` and :ref:`fault-tolerance-java` -Software Transactional Memory (STM) ------------------------------------ - -See :ref:`stm-scala` and :ref:`stm-java` - Transactors ----------- -Transactors combine actors and STM into transactional actors. Allows you to -compose atomic message flows with automatic retry and rollback. +Transactors combine actors and STM (Software Transactional Memory) into transactional actors. +It allows you to compose atomic message flows with automatic retry and rollback. See :ref:`transactors-scala` and :ref:`transactors-java` diff --git a/akka-docs/intro/why-akka.rst b/akka-docs/intro/why-akka.rst index c6846c1872..6ad15a9f76 100644 --- a/akka-docs/intro/why-akka.rst +++ b/akka-docs/intro/why-akka.rst @@ -26,10 +26,8 @@ of concurrency-paradigms, allowing for users to choose the right tool for the job. The integration possibilities for Akka Actors are immense through the Apache -Camel integration. We provide Software Transactional Memory concurrency control -through the excellent Multiverse project, and have integrated that with Actors, -creating Transactors for coordinated concurrent transactions. We have Agents and -Dataflow concurrency as well. +Camel integration. We have Transactors for coordinated concurrent transactions, +as well as Agents and Dataflow concurrency. What's a good use-case for Akka? @@ -54,14 +52,12 @@ have a commercial product called Akka Atmos which provides the following features: #. Management through Dashboard, JMX and REST -#. Monitoring through Dashboard, JMX and SNMP #. Dapper-style tracing of messages across components and remote nodes #. A configurable alert system #. Real-time statistics #. Very low overhead monitoring agents (should always be on in production) #. Consolidation of statistics and logging information to a single node -#. Data analysis through Hadoop #. Storage of statistics data for later processing -#. Provisioning and rolling upgrades through a dashboard +#. Provisioning and rolling upgrades Read more `here `_. diff --git a/akka-tutorials/akka-tutorial-first/src/main/java/akka/tutorial/first/java/Pi.java b/akka-tutorials/akka-tutorial-first/src/main/java/akka/tutorial/first/java/Pi.java index 577c32fe3e..7c90b2f310 100644 --- a/akka-tutorials/akka-tutorial-first/src/main/java/akka/tutorial/first/java/Pi.java +++ b/akka-tutorials/akka-tutorial-first/src/main/java/akka/tutorial/first/java/Pi.java @@ -92,13 +92,16 @@ public class Pi { private ActorRef router; - public Master(final int nrOfWorkers, int nrOfMessages, int nrOfElements, CountDownLatch latch) { + public Master(final int nrOfWorkers, int nrOfMessages, + int nrOfElements, CountDownLatch latch) { this.nrOfMessages = nrOfMessages; this.nrOfElements = nrOfElements; this.latch = latch; //#create-router - router = this.getContext().actorOf(new Props().withCreator(Worker.class).withRouter(new RoundRobinRouter(nrOfWorkers)), "pi"); + router = this.getContext().actorOf(new Props().withCreator( + Worker.class).withRouter(new RoundRobinRouter(nrOfWorkers)), + "pi"); //#create-router } @@ -135,8 +138,10 @@ public class Pi { //#master //#actors-and-messages - public void calculate(final int nrOfWorkers, final int nrOfElements, final int nrOfMessages) - throws Exception { + public void calculate(final int nrOfWorkers, + final int nrOfElements, + final int nrOfMessages) + throws Exception { // Create an Akka system final ActorSystem system = ActorSystem.create(); diff --git a/akka-tutorials/akka-tutorial-first/src/main/scala/Pi.scala b/akka-tutorials/akka-tutorial-first/src/main/scala/Pi.scala index 0a6c0ed04e..10395405f7 100644 --- a/akka-tutorials/akka-tutorial-first/src/main/scala/Pi.scala +++ b/akka-tutorials/akka-tutorial-first/src/main/scala/Pi.scala @@ -50,7 +50,8 @@ object Pi extends App { var start: Long = _ //#create-router - val router = context.actorOf(Props(new Worker).withRouter(RoundRobinRouter(nrOfInstances = nrOfWorkers)), "pi") + val router = context.actorOf(Props(new Worker).withRouter( + RoundRobinRouter(nrOfInstances = nrOfWorkers)), "pi") //#create-router //#master-receive @@ -89,7 +90,9 @@ object Pi extends App { val latch = new CountDownLatch(1) // create the master - val master = system.actorOf(Props(new Master(nrOfWorkers, nrOfMessages, nrOfElements, latch)), "master") + val master = system.actorOf(Props(new Master( + nrOfWorkers, nrOfMessages, nrOfElements, latch)), + "master") // start the calculation master ! Calculate From 06a08c5823ecc2c7a5f36f765e4930f481fa0ca5 Mon Sep 17 00:00:00 2001 From: Patrik Nordwall Date: Wed, 14 Dec 2011 12:39:27 +0100 Subject: [PATCH 08/34] Removed STM module. See #1503 * Removed from build. Didn't remove akka-stm directory, contains transactors also. * Replaced usage of org.multiverse.api.latches.StandardLatch in some tests with testkit.TestLatch --- .../akka/actor/RestartStrategySpec.scala | 81 +- .../test/scala/akka/actor/SchedulerSpec.scala | 6 +- .../test/scala/akka/dispatch/FutureSpec.scala | 106 +- akka-docs/java/index.rst | 1 - akka-docs/java/transactors.rst | 6 + akka-docs/modules/camel.rst | 2897 +---------------- .../project/migration-guide-0.10.x-1.0.x.rst | 447 --- .../project/migration-guide-0.8.x-0.9.x.rst | 172 - akka-docs/scala/code/StmDocSpec.scala | 27 - akka-docs/scala/index.rst | 1 - akka-docs/scala/transactors.rst | 6 + .../main/scala/akka/testkit/TestLatch.scala | 4 +- project/AkkaBuild.scala | 22 +- 13 files changed, 117 insertions(+), 3659 deletions(-) create mode 100644 akka-docs/java/transactors.rst delete mode 100644 akka-docs/project/migration-guide-0.10.x-1.0.x.rst delete mode 100644 akka-docs/project/migration-guide-0.8.x-0.9.x.rst delete mode 100644 akka-docs/scala/code/StmDocSpec.scala create mode 100644 akka-docs/scala/transactors.rst diff --git a/akka-actor-tests/src/test/scala/akka/actor/RestartStrategySpec.scala b/akka-actor-tests/src/test/scala/akka/actor/RestartStrategySpec.scala index f3f70a09d7..06ab5553a9 100644 --- a/akka-actor-tests/src/test/scala/akka/actor/RestartStrategySpec.scala +++ b/akka-actor-tests/src/test/scala/akka/actor/RestartStrategySpec.scala @@ -9,9 +9,10 @@ import org.scalatest.BeforeAndAfterAll import akka.testkit.TestEvent._ import akka.testkit.EventFilter import java.util.concurrent.{ TimeUnit, CountDownLatch } -import org.multiverse.api.latches.StandardLatch import akka.testkit.AkkaSpec import akka.testkit.DefaultTimeout +import akka.testkit.TestLatch +import akka.util.duration._ @org.junit.runner.RunWith(classOf[org.scalatest.junit.JUnitRunner]) class RestartStrategySpec extends AkkaSpec with DefaultTimeout { @@ -28,10 +29,10 @@ class RestartStrategySpec extends AkkaSpec with DefaultTimeout { "ensure that slave stays dead after max restarts within time range" in { val boss = system.actorOf(Props[Supervisor].withFaultHandler(OneForOneStrategy(List(classOf[Throwable]), 2, 1000))) - val restartLatch = new StandardLatch - val secondRestartLatch = new StandardLatch + val restartLatch = new TestLatch + val secondRestartLatch = new TestLatch val countDownLatch = new CountDownLatch(3) - val stopLatch = new StandardLatch + val stopLatch = new TestLatch val slaveProps = Props(new Actor { @@ -42,13 +43,13 @@ class RestartStrategySpec extends AkkaSpec with DefaultTimeout { override def postRestart(reason: Throwable) = { if (!restartLatch.isOpen) - restartLatch.open + restartLatch.open() else - secondRestartLatch.open + secondRestartLatch.open() } override def postStop() = { - stopLatch.open + stopLatch.open() } }) val slave = (boss ? slaveProps).as[ActorRef].get @@ -58,17 +59,17 @@ class RestartStrategySpec extends AkkaSpec with DefaultTimeout { slave ! Ping // test restart and post restart ping - assert(restartLatch.tryAwait(10, TimeUnit.SECONDS)) + assert(restartLatch.await(10 seconds)) // now crash again... should not restart slave ! Crash slave ! Ping - assert(secondRestartLatch.tryAwait(10, TimeUnit.SECONDS)) + assert(secondRestartLatch.await(10 seconds)) assert(countDownLatch.await(10, TimeUnit.SECONDS)) slave ! Crash - assert(stopLatch.tryAwait(10, TimeUnit.SECONDS)) + assert(stopLatch.await(10 seconds)) } "ensure that slave is immortal without max restarts and time range" in { @@ -96,11 +97,11 @@ class RestartStrategySpec extends AkkaSpec with DefaultTimeout { "ensure that slave restarts after number of crashes not within time range" in { val boss = system.actorOf(Props[Supervisor].withFaultHandler(OneForOneStrategy(List(classOf[Throwable]), 2, 500))) - val restartLatch = new StandardLatch - val secondRestartLatch = new StandardLatch - val thirdRestartLatch = new StandardLatch - val pingLatch = new StandardLatch - val secondPingLatch = new StandardLatch + val restartLatch = new TestLatch + val secondRestartLatch = new TestLatch + val thirdRestartLatch = new TestLatch + val pingLatch = new TestLatch + val secondPingLatch = new TestLatch val slaveProps = Props(new Actor { @@ -111,16 +112,16 @@ class RestartStrategySpec extends AkkaSpec with DefaultTimeout { } override def postRestart(reason: Throwable) = { if (!restartLatch.isOpen) - restartLatch.open + restartLatch.open() else if (!secondRestartLatch.isOpen) - secondRestartLatch.open + secondRestartLatch.open() else - thirdRestartLatch.open + thirdRestartLatch.open() } override def postStop() = { if (restartLatch.isOpen) { - secondRestartLatch.open + secondRestartLatch.open() } } }) @@ -129,14 +130,14 @@ class RestartStrategySpec extends AkkaSpec with DefaultTimeout { slave ! Ping slave ! Crash - assert(restartLatch.tryAwait(10, TimeUnit.SECONDS)) - assert(pingLatch.tryAwait(10, TimeUnit.SECONDS)) + assert(restartLatch.await(10 seconds)) + assert(pingLatch.await(10 seconds)) slave ! Ping slave ! Crash - assert(secondRestartLatch.tryAwait(10, TimeUnit.SECONDS)) - assert(secondPingLatch.tryAwait(10, TimeUnit.SECONDS)) + assert(secondRestartLatch.await(10 seconds)) + assert(secondPingLatch.await(10 seconds)) // sleep to go out of the restart strategy's time range sleep(700L) @@ -145,7 +146,7 @@ class RestartStrategySpec extends AkkaSpec with DefaultTimeout { slave ! Crash slave ! Ping - assert(thirdRestartLatch.tryAwait(1, TimeUnit.SECONDS)) + assert(thirdRestartLatch.await(1 second)) assert(!slave.isTerminated) } @@ -153,10 +154,10 @@ class RestartStrategySpec extends AkkaSpec with DefaultTimeout { "ensure that slave is not restarted after max retries" in { val boss = system.actorOf(Props[Supervisor].withFaultHandler(OneForOneStrategy(List(classOf[Throwable]), Some(2), None))) - val restartLatch = new StandardLatch - val secondRestartLatch = new StandardLatch + val restartLatch = new TestLatch + val secondRestartLatch = new TestLatch val countDownLatch = new CountDownLatch(3) - val stopLatch = new StandardLatch + val stopLatch = new TestLatch val slaveProps = Props(new Actor { @@ -166,13 +167,13 @@ class RestartStrategySpec extends AkkaSpec with DefaultTimeout { } override def postRestart(reason: Throwable) = { if (!restartLatch.isOpen) - restartLatch.open + restartLatch.open() else - secondRestartLatch.open + secondRestartLatch.open() } override def postStop() = { - stopLatch.open + stopLatch.open() } }) val slave = (boss ? slaveProps).as[ActorRef].get @@ -182,7 +183,7 @@ class RestartStrategySpec extends AkkaSpec with DefaultTimeout { slave ! Ping // test restart and post restart ping - assert(restartLatch.tryAwait(10, TimeUnit.SECONDS)) + assert(restartLatch.await(10 seconds)) assert(!slave.isTerminated) @@ -190,25 +191,25 @@ class RestartStrategySpec extends AkkaSpec with DefaultTimeout { slave ! Crash slave ! Ping - assert(secondRestartLatch.tryAwait(10, TimeUnit.SECONDS)) + assert(secondRestartLatch.await(10 seconds)) assert(countDownLatch.await(10, TimeUnit.SECONDS)) sleep(700L) slave ! Crash - assert(stopLatch.tryAwait(10, TimeUnit.SECONDS)) + assert(stopLatch.await(10 seconds)) sleep(500L) assert(slave.isTerminated) } "ensure that slave is not restarted within time range" in { - val restartLatch, stopLatch, maxNoOfRestartsLatch = new StandardLatch + val restartLatch, stopLatch, maxNoOfRestartsLatch = new TestLatch val countDownLatch = new CountDownLatch(2) val boss = system.actorOf(Props(new Actor { def receive = { case p: Props ⇒ sender ! context.watch(context.actorOf(p)) - case t: Terminated ⇒ maxNoOfRestartsLatch.open + case t: Terminated ⇒ maxNoOfRestartsLatch.open() } }).withFaultHandler(OneForOneStrategy(List(classOf[Throwable]), None, Some(1000)))) @@ -220,11 +221,11 @@ class RestartStrategySpec extends AkkaSpec with DefaultTimeout { } override def postRestart(reason: Throwable) = { - restartLatch.open + restartLatch.open() } override def postStop() = { - stopLatch.open + stopLatch.open() } }) val slave = (boss ? slaveProps).as[ActorRef].get @@ -234,7 +235,7 @@ class RestartStrategySpec extends AkkaSpec with DefaultTimeout { slave ! Ping // test restart and post restart ping - assert(restartLatch.tryAwait(10, TimeUnit.SECONDS)) + assert(restartLatch.await(10 seconds)) assert(!slave.isTerminated) @@ -248,9 +249,9 @@ class RestartStrategySpec extends AkkaSpec with DefaultTimeout { // may not be running slave ! Crash - assert(stopLatch.tryAwait(10, TimeUnit.SECONDS)) + assert(stopLatch.await(10 seconds)) - assert(maxNoOfRestartsLatch.tryAwait(10, TimeUnit.SECONDS)) + assert(maxNoOfRestartsLatch.await(10 seconds)) sleep(500L) assert(slave.isTerminated) } diff --git a/akka-actor-tests/src/test/scala/akka/actor/SchedulerSpec.scala b/akka-actor-tests/src/test/scala/akka/actor/SchedulerSpec.scala index ceeb768b6c..e4a46bc8ae 100644 --- a/akka-actor-tests/src/test/scala/akka/actor/SchedulerSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/actor/SchedulerSpec.scala @@ -1,12 +1,12 @@ package akka.actor import org.scalatest.BeforeAndAfterEach -import org.multiverse.api.latches.StandardLatch import akka.testkit.AkkaSpec import akka.testkit.EventFilter import akka.util.duration._ import java.util.concurrent.{ CountDownLatch, ConcurrentLinkedQueue, TimeUnit } import akka.testkit.DefaultTimeout +import akka.testkit.TestLatch @org.junit.runner.RunWith(classOf[org.scalatest.junit.JUnitRunner]) class SchedulerSpec extends AkkaSpec with BeforeAndAfterEach with DefaultTimeout { @@ -101,7 +101,7 @@ class SchedulerSpec extends AkkaSpec with BeforeAndAfterEach with DefaultTimeout object Ping object Crash - val restartLatch = new StandardLatch + val restartLatch = new TestLatch val pingLatch = new CountDownLatch(6) val supervisor = system.actorOf(Props[Supervisor].withFaultHandler(AllForOneStrategy(List(classOf[Exception]), 3, 1000))) @@ -121,7 +121,7 @@ class SchedulerSpec extends AkkaSpec with BeforeAndAfterEach with DefaultTimeout collectCancellable(system.scheduler.scheduleOnce(1000 milliseconds, actor, Crash)) } - assert(restartLatch.tryAwait(2, TimeUnit.SECONDS)) + assert(restartLatch.await(2 seconds)) // should be enough time for the ping countdown to recover and reach 6 pings assert(pingLatch.await(4, TimeUnit.SECONDS)) } diff --git a/akka-actor-tests/src/test/scala/akka/dispatch/FutureSpec.scala b/akka-actor-tests/src/test/scala/akka/dispatch/FutureSpec.scala index 44ddf4f8bc..5a6180b130 100644 --- a/akka-actor-tests/src/test/scala/akka/dispatch/FutureSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/dispatch/FutureSpec.scala @@ -9,12 +9,12 @@ import org.scalacheck.Gen._ import akka.actor._ import akka.testkit.{ EventFilter, filterEvents, filterException } import akka.util.duration._ -import org.multiverse.api.latches.StandardLatch import java.util.concurrent.{ TimeUnit, CountDownLatch } import akka.testkit.AkkaSpec import org.scalatest.junit.JUnitSuite import java.lang.ArithmeticException import akka.testkit.DefaultTimeout +import akka.testkit.TestLatch object FutureSpec { class TestActor extends Actor { @@ -26,7 +26,7 @@ object FutureSpec { } } - class TestDelayActor(await: StandardLatch) extends Actor { + class TestDelayActor(await: TestLatch) extends Actor { def receive = { case "Hello" ⇒ await.await; sender ! "World" case "NoReply" ⇒ await.await @@ -70,26 +70,26 @@ class FutureSpec extends AkkaSpec with Checkers with BeforeAndAfterAll with Defa "awaiting a result" that { "is not completed" must { behave like emptyFuture { test ⇒ - val latch = new StandardLatch + val latch = new TestLatch val result = "test value" val future = Future { latch.await result } test(future) - latch.open + latch.open() future.await } } "is completed" must { behave like futureWithResult { test ⇒ - val latch = new StandardLatch + val latch = new TestLatch val result = "test value" val future = Future { latch.await result } - latch.open + latch.open() future.await test(future, result) } @@ -392,10 +392,10 @@ class FutureSpec extends AkkaSpec with Checkers with BeforeAndAfterAll with Defa } "receiveShouldExecuteOnComplete" in { - val latch = new StandardLatch + val latch = new TestLatch val actor = system.actorOf(Props[TestActor]) - actor ? "Hello" onResult { case "World" ⇒ latch.open } - assert(latch.tryAwait(5, TimeUnit.SECONDS)) + actor ? "Hello" onResult { case "World" ⇒ latch.open() } + assert(latch.await(5 seconds)) actor.stop() } @@ -425,12 +425,12 @@ class FutureSpec extends AkkaSpec with Checkers with BeforeAndAfterAll with Defa f1.await intercept[ThrowableTest] { f1.get } - val latch = new StandardLatch - val f2 = Future { latch.tryAwait(5, TimeUnit.SECONDS); "success" } + val latch = new TestLatch + val f2 = Future { latch.await(5 seconds); "success" } f2 foreach (_ ⇒ throw new ThrowableTest("dispatcher foreach")) f2 onResult { case _ ⇒ throw new ThrowableTest("dispatcher receive") } val f3 = f2 map (s ⇒ s.toUpperCase) - latch.open + latch.open() f2.await assert(f2.get === "success") f2 foreach (_ ⇒ throw new ThrowableTest("current thread foreach")) @@ -441,13 +441,13 @@ class FutureSpec extends AkkaSpec with Checkers with BeforeAndAfterAll with Defa } "shouldBlockUntilResult" in { - val latch = new StandardLatch + val latch = new TestLatch val f = Future({ latch.await; 5 }) val f2 = Future({ f.get + 5 }) assert(f2.resultOrException === None) - latch.open + latch.open() assert(f2.get === 10) val f3 = Future({ Thread.sleep(10); 5 }, 10 millis) @@ -520,19 +520,19 @@ class FutureSpec extends AkkaSpec with Checkers with BeforeAndAfterAll with Defa import Future.flow val x, y, z = Promise[Int]() - val ly, lz = new StandardLatch + val ly, lz = new TestLatch val result = flow { y completeWith x - ly.open // not within continuation + ly.open() // not within continuation z << x - lz.open // within continuation, will wait for 'z' to complete + lz.open() // within continuation, will wait for 'z' to complete z() + y() } - assert(ly.tryAwaitUninterruptible(100, TimeUnit.MILLISECONDS)) - assert(!lz.tryAwaitUninterruptible(100, TimeUnit.MILLISECONDS)) + assert(ly.await(100 milliseconds)) + lz.awaitTimeout(100 milliseconds) flow { x << 5 } @@ -557,10 +557,10 @@ class FutureSpec extends AkkaSpec with Checkers with BeforeAndAfterAll with Defa } "shouldNotAddOrRunCallbacksAfterFailureToBeCompletedBeforeExpiry" in { - val latch = new StandardLatch + val latch = new TestLatch val f = Promise[Int](0) Thread.sleep(25) - f.onComplete(_ ⇒ latch.open) //Shouldn't throw any exception here + f.onComplete(_ ⇒ latch.open()) //Shouldn't throw any exception here assert(f.isExpired) //Should be expired @@ -599,7 +599,7 @@ class FutureSpec extends AkkaSpec with Checkers with BeforeAndAfterAll with Defa "futureDataFlowShouldEmulateBlocking2" in { import Future.flow val x1, x2, y1, y2 = Promise[Int](1000 * 60) - val lx, ly, lz = new StandardLatch + val lx, ly, lz = new TestLatch val result = flow { lx.open() x1 << y1 @@ -608,20 +608,20 @@ class FutureSpec extends AkkaSpec with Checkers with BeforeAndAfterAll with Defa lz.open() x1() + x2() } - assert(lx.tryAwaitUninterruptible(2000, TimeUnit.MILLISECONDS)) + assert(lx.await(2000 milliseconds)) assert(!ly.isOpen) assert(!lz.isOpen) assert(List(x1, x2, y1, y2).forall(_.isCompleted == false)) flow { y1 << 1 } // When this is set, it should cascade down the line - assert(ly.tryAwaitUninterruptible(2000, TimeUnit.MILLISECONDS)) + assert(ly.await(2000 milliseconds)) assert(x1.get === 1) assert(!lz.isOpen) flow { y2 << 9 } // When this is set, it should cascade down the line - assert(lz.tryAwaitUninterruptible(2000, TimeUnit.MILLISECONDS)) + assert(lz.await(2000 milliseconds)) assert(x2.get === 9) assert(List(x1, x2, y1, y2).forall(_.isCompleted == true)) @@ -632,20 +632,20 @@ class FutureSpec extends AkkaSpec with Checkers with BeforeAndAfterAll with Defa "dataFlowAPIshouldbeSlick" in { import Future.flow - val i1, i2, s1, s2 = new StandardLatch + val i1, i2, s1, s2 = new TestLatch - val callService1 = Future { i1.open; s1.awaitUninterruptible; 1 } - val callService2 = Future { i2.open; s2.awaitUninterruptible; 9 } + val callService1 = Future { i1.open(); s1.await; 1 } + val callService2 = Future { i2.open(); s2.await; 9 } val result = flow { callService1() + callService2() } assert(!s1.isOpen) assert(!s2.isOpen) assert(!result.isCompleted) - assert(i1.tryAwaitUninterruptible(2000, TimeUnit.MILLISECONDS)) - assert(i2.tryAwaitUninterruptible(2000, TimeUnit.MILLISECONDS)) - s1.open - s2.open + assert(i1.await(2000 milliseconds)) + assert(i2.await(2000 milliseconds)) + s1.open() + s2.open() assert(result.get === 10) } @@ -654,19 +654,19 @@ class FutureSpec extends AkkaSpec with Checkers with BeforeAndAfterAll with Defa import Future.flow val x, y, z = Promise[Int]() - val ly, lz = new StandardLatch + val ly, lz = new TestLatch val result = flow { y << x - ly.open + ly.open() val oops = 1 / 0 z << x - lz.open + lz.open() z() + y() + oops } - assert(!ly.tryAwaitUninterruptible(100, TimeUnit.MILLISECONDS)) - assert(!lz.tryAwaitUninterruptible(100, TimeUnit.MILLISECONDS)) + ly.awaitTimeout(100 milliseconds) + lz.awaitTimeout(100 milliseconds) flow { x << 5 } @@ -680,7 +680,7 @@ class FutureSpec extends AkkaSpec with Checkers with BeforeAndAfterAll with Defa "futureContinuationsShouldNotBlock" in { import Future.flow - val latch = new StandardLatch + val latch = new TestLatch val future = Future { latch.await "Hello" @@ -692,7 +692,7 @@ class FutureSpec extends AkkaSpec with Checkers with BeforeAndAfterAll with Defa assert(!result.isCompleted) - latch.open + latch.open() assert(result.get === Some("Hello")) } @@ -763,39 +763,39 @@ class FutureSpec extends AkkaSpec with Checkers with BeforeAndAfterAll with Defa } "run callbacks async" in { - val latch = Vector.fill(10)(new StandardLatch) + val latch = Vector.fill(10)(new TestLatch) - val f1 = Future { latch(0).open; latch(1).await; "Hello" } - val f2 = f1 map { s ⇒ latch(2).open; latch(3).await; s.length } - f2 foreach (_ ⇒ latch(4).open) + val f1 = Future { latch(0).open(); latch(1).await; "Hello" } + val f2 = f1 map { s ⇒ latch(2).open(); latch(3).await; s.length } + f2 foreach (_ ⇒ latch(4).open()) latch(0).await f1 must not be ('completed) f2 must not be ('completed) - latch(1).open + latch(1).open() latch(2).await f1 must be('completed) f2 must not be ('completed) - val f3 = f1 map { s ⇒ latch(5).open; latch(6).await; s.length * 2 } - f3 foreach (_ ⇒ latch(3).open) + val f3 = f1 map { s ⇒ latch(5).open(); latch(6).await; s.length * 2 } + f3 foreach (_ ⇒ latch(3).open()) latch(5).await f3 must not be ('completed) - latch(6).open + latch(6).open() latch(4).await f2 must be('completed) f3 must be('completed) val p1 = Promise[String]() - val f4 = p1 map { s ⇒ latch(7).open; latch(8).await; s.length } - f4 foreach (_ ⇒ latch(9).open) + val f4 = p1 map { s ⇒ latch(7).open(); latch(8).await; s.length } + f4 foreach (_ ⇒ latch(9).open()) p1 must not be ('completed) f4 must not be ('completed) @@ -807,7 +807,7 @@ class FutureSpec extends AkkaSpec with Checkers with BeforeAndAfterAll with Defa p1 must be('completed) f4 must not be ('completed) - latch(8).open + latch(8).open() latch(9).await f4.await must be('completed) @@ -817,13 +817,13 @@ class FutureSpec extends AkkaSpec with Checkers with BeforeAndAfterAll with Defa val simple = Future() map (_ ⇒ (Future(()) map (_ ⇒ ())).get) simple.await must be('completed) - val l1, l2 = new StandardLatch + val l1, l2 = new TestLatch val complex = Future() map { _ ⇒ Future.blocking() val nested = Future() - nested foreach (_ ⇒ l1.open) + nested foreach (_ ⇒ l1.open()) l1.await // make sure nested is completed - nested foreach (_ ⇒ l2.open) + nested foreach (_ ⇒ l2.open()) l2.await } assert(complex.await.isCompleted) diff --git a/akka-docs/java/index.rst b/akka-docs/java/index.rst index b7db493c09..e864b9d63c 100644 --- a/akka-docs/java/index.rst +++ b/akka-docs/java/index.rst @@ -11,7 +11,6 @@ Java API logging futures dataflow - stm transactors fault-tolerance dispatchers diff --git a/akka-docs/java/transactors.rst b/akka-docs/java/transactors.rst new file mode 100644 index 0000000000..994ad00cb5 --- /dev/null +++ b/akka-docs/java/transactors.rst @@ -0,0 +1,6 @@ +.. _transactors-java: + +Transactors (Java) +================== + +The Akka Transactors module has not been migrated to Akka 2.0-SNAPSHOT yet. \ No newline at end of file diff --git a/akka-docs/modules/camel.rst b/akka-docs/modules/camel.rst index 4aa988d609..51601d5881 100644 --- a/akka-docs/modules/camel.rst +++ b/akka-docs/modules/camel.rst @@ -5,2899 +5,4 @@ Camel ####### -For an introduction to akka-camel, see also the `Appendix E - Akka and Camel`_ -(pdf) of the book `Camel in Action`_. - -.. _Appendix E - Akka and Camel: http://www.manning.com/ibsen/appEsample.pdf -.. _Camel in Action: http://www.manning.com/ibsen/ - -Contents: - -.. contents:: :local: - -Other, more advanced external articles are: - -* `Akka Consumer Actors: New Features and Best Practices `_ -* `Akka Producer Actors: New Features and Best Practices `_ - - -Introduction -============ - -The akka-camel module allows actors, untyped actors, and typed actors to receive -and send messages over a great variety of protocols and APIs. This section gives -a brief overview of the general ideas behind the akka-camel module, the -remaining sections go into the details. In addition to the native Scala and Java -actor API, actors can now exchange messages with other systems over large number -of protocols and APIs such as HTTP, SOAP, TCP, FTP, SMTP or JMS, to mention a -few. At the moment, approximately 80 protocols and APIs are supported. - -The akka-camel module is based on `Apache Camel`_, a powerful and leight-weight -integration framework for the JVM. For an introduction to Apache Camel you may -want to read this `Apache Camel article`_. Camel comes with a -large number of `components`_ that provide bindings to different protocols and -APIs. The `camel-extra`_ project provides further components. - -.. _Apache Camel: http://camel.apache.org/ -.. _Apache Camel article: http://architects.dzone.com/articles/apache-camel-integration -.. _components: http://camel.apache.org/components.html -.. _camel-extra: http://code.google.com/p/camel-extra/ - -Usage of Camel's integration components in Akka is essentially a -one-liner. Here's an example. - -.. code-block:: scala - - import akka.actor.Actor - import akka.actor.Actor._ - import akka.camel.{Message, Consumer} - - class MyActor extends Actor with Consumer { - def endpointUri = "mina:tcp://localhost:6200?textline=true" - - def receive = { - case msg: Message => { /* ... */} - case _ => { /* ... */} - } - } - - // start and expose actor via tcp - val myActor = actorOf(Props[MyActor]) - -The above example exposes an actor over a tcp endpoint on port 6200 via Apache -Camel's `Mina component`_. The actor implements the endpointUri method to define -an endpoint from which it can receive messages. After starting the actor, tcp -clients can immediately send messages to and receive responses from that -actor. If the message exchange should go over HTTP (via Camel's `Jetty -component`_), only the actor's endpointUri method must be changed. - -.. _Mina component: http://camel.apache.org/mina.html -.. _Jetty component: http://camel.apache.org/jetty.html - -.. code-block:: scala - - class MyActor extends Actor with Consumer { - def endpointUri = "jetty:http://localhost:8877/example" - - def receive = { - case msg: Message => { /* ... */} - case _ => { /* ... */} - } - } - -Actors can also trigger message exchanges with external systems i.e. produce to -Camel endpoints. - -.. code-block:: scala - - import akka.actor.Actor - import akka.camel.{Producer, Oneway} - - class MyActor extends Actor with Producer with Oneway { - def endpointUri = "jms:queue:example" - } - -In the above example, any message sent to this actor will be added (produced) to -the example JMS queue. Producer actors may choose from the same set of Camel -components as Consumer actors do. - -The number of Camel components is constantly increasing. The akka-camel module -can support these in a plug-and-play manner. Just add them to your application's -classpath, define a component-specific endpoint URI and use it to exchange -messages over the component-specific protocols or APIs. This is possible because -Camel components bind protocol-specific message formats to a Camel-specific -`normalized message format`__. The normalized message format hides -protocol-specific details from Akka and makes it therefore very easy to support -a large number of protocols through a uniform Camel component interface. The -akka-camel module further converts mutable Camel messages into `immutable -representations`__ which are used by Consumer and Producer actors for pattern -matching, transformation, serialization or storage, for example. - -__ https://svn.apache.org/repos/asf/camel/trunk/camel-core/src/main/java/org/apache/camel/Message.java -__ http://github.com/jboner/akka/blob/v0.8/akka-camel/src/main/scala/akka/Message.scala#L17 - - -Dependencies -============ - -Akka's Camel Integration consists of two modules - -* akka-camel - this module depends on akka-actor and camel-core (+ transitive - dependencies) and implements the Camel integration for (untyped) actors - -* akka-camel-typed - this module depends on akka-typed-actor and akka-camel (+ - transitive dependencies) and implements the Camel integration for typed actors - -The akka-camel-typed module is optional. To have both untyped and typed actors -working with Camel, add the following dependencies to your SBT project -definition. - -.. code-block:: scala - - import sbt._ - - class Project(info: ProjectInfo) extends DefaultProject(info) with AkkaProject { - // ... - val akkaCamel = akkaModule("camel") - val akkaCamelTyped = akkaModule("camel-typed") // optional typed actor support - // ... - } - - -.. _camel-consume-messages: - -Consume messages -================ - -Actors (untyped) ----------------- - -For actors (Scala) to receive messages, they must mixin the `Consumer`_ -trait. For example, the following actor class (Consumer1) implements the -endpointUri method, which is declared in the Consumer trait, in order to receive -messages from the ``file:data/input/actor`` Camel endpoint. Untyped actors -(Java) need to extend the abstract UntypedConsumerActor class and implement the -getEndpointUri() and onReceive(Object) methods. - -.. _Consumer: http://github.com/jboner/akka/blob/master/akka-camel/src/main/scala/akka/camel/Consumer.scala - -**Scala** - -.. code-block:: scala - - import akka.actor.Actor - import akka.camel.{Message, Consumer} - - class Consumer1 extends Actor with Consumer { - def endpointUri = "file:data/input/actor" - - def receive = { - case msg: Message => println("received %s" format msg.bodyAs[String]) - } - } - -**Java** - -.. code-block:: java - - import akka.camel.Message; - import akka.camel.UntypedConsumerActor; - - public class Consumer1 extends UntypedConsumerActor { - public String getEndpointUri() { - return "file:data/input/actor"; - } - - public void onReceive(Object message) { - Message msg = (Message)message; - String body = msg.getBodyAs(String.class); - System.out.println(String.format("received %s", body)) - } - } - -Whenever a file is put into the data/input/actor directory, its content is -picked up by the Camel `file component`_ and sent as message to the -actor. Messages consumed by actors from Camel endpoints are of type -`Message`_. These are immutable representations of Camel messages. - -.. _file component: http://camel.apache.org/file2.html -.. _Message: http://github.com/jboner/akka/blob/master/akka-camel/src/main/scala/akka/camel/Message.scala - -For Message usage examples refer to the unit tests: - -* Message unit tests - `Scala API `_ -* Message unit tests - `Java API `_ - -Here's another example that sets the endpointUri to -``jetty:http://localhost:8877/camel/default``. It causes Camel's `Jetty -component`_ to start an embedded `Jetty`_ server, accepting HTTP connections -from localhost on port 8877. - -.. _Jetty component: http://camel.apache.org/jetty.html -.. _Jetty: http://www.eclipse.org/jetty/ - -**Scala** - -.. code-block:: scala - - import akka.actor.Actor - import akka.camel.{Message, Consumer} - - class Consumer2 extends Actor with Consumer { - def endpointUri = "jetty:http://localhost:8877/camel/default" - - def receive = { - case msg: Message => self.reply("Hello %s" format msg.bodyAs[String]) - } - } - -**Java** - -.. code-block:: java - - import akka.camel.Message; - import akka.camel.UntypedConsumerActor; - - public class Consumer2 extends UntypedConsumerActor { - public String getEndpointUri() { - return "jetty:http://localhost:8877/camel/default"; - } - - public void onReceive(Object message) { - Message msg = (Message)message; - String body = msg.getBodyAs(String.class); - getContext().tryReply(String.format("Hello %s", body)); - } - } - -After starting the actor, clients can send messages to that actor by POSTing to -``http://localhost:8877/camel/default``. The actor sends a response by using the -self.reply method (Scala). For returning a message body and headers to the HTTP -client the response type should be `Message`_. For any other response type, a -new Message object is created by akka-camel with the actor response as message -body. - -.. _Message: http://github.com/jboner/akka/blob/master/akka-camel/src/main/scala/akka/camel/Message.scala - - -Typed actors ------------- - -Typed actors can also receive messages from Camel endpoints. In contrast to -(untyped) actors, which only implement a single receive or onReceive method, a -typed actor may define several (message processing) methods, each of which can -receive messages from a different Camel endpoint. For a typed actor method to be -exposed as Camel endpoint it must be annotated with the `@consume -annotation`_. For example, the following typed consumer actor defines two -methods, foo and bar. - -.. _@consume annotation: http://github.com/jboner/akka/blob/master/akka-camel/src/main/java/akka/camel/consume.java - -**Scala** - -.. code-block:: scala - - import org.apache.camel.{Body, Header} - import akka.actor.TypedActor - import akka.camel.consume - - trait TypedConsumer1 { - @consume("file:data/input/foo") - def foo(body: String): Unit - - @consume("jetty:http://localhost:8877/camel/bar") - def bar(@Body body: String, @Header("X-Whatever") header: String): String - } - - class TypedConsumer1Impl extends TypedActor with TypedConsumer1 { - def foo(body: String) = println("Received message: %s" format body) - def bar(body: String, header: String) = "body=%s header=%s" format (body, header) - } - -**Java** - -.. code-block:: java - - import org.apache.camel.Body; - import org.apache.camel.Header; - import akka.actor.TypedActor; - import akka.camel.consume; - - public interface TypedConsumer1 { - @consume("file:data/input/foo") - public void foo(String body); - - @consume("jetty:http://localhost:8877/camel/bar") - public String bar(@Body String body, @Header("X-Whatever") String header); - } - - public class TypedConsumer1Impl extends TypedActor implements TypedConsumer1 { - public void foo(String body) { - System.out.println(String.format("Received message: ", body)); - } - - public String bar(String body, String header) { - return String.format("body=%s header=%s", body, header); - } - } - -The foo method can be invoked by placing a file in the data/input/foo -directory. Camel picks up the file from this directory and akka-camel invokes -foo with the file content as argument (converted to a String). Camel -automatically tries to convert messages to appropriate types as defined by the -method parameter(s). The conversion rules are described in detail on the -following pages: - -* `Bean integration `_ -* `Bean binding `_ -* `Parameter binding `_ - -The bar method can be invoked by POSTing a message to -http://localhost:8877/camel/bar. Here, parameter binding annotations are used to -tell Camel how to extract data from the HTTP message. The @Body annotation binds -the HTTP request body to the first parameter, the @Header annotation binds the -X-Whatever header to the second parameter. The return value is sent as HTTP -response message body to the client. - -Parameter binding annotations must be placed on the interface, the @consume -annotation can also be placed on the methods in the implementation class. - - -.. _camel-publishing: - -Consumer publishing -------------------- - -Actors (untyped) -^^^^^^^^^^^^^^^^ - -Publishing a consumer actor at its Camel endpoint occurs when the actor is -started. Publication is done asynchronously; setting up an endpoint (more -precisely, the route from that endpoint to the actor) may still be in progress -after the ActorRef method returned. - -**Scala** - -.. code-block:: scala - - import akka.actor.Actor._ - - val actor = actorOf(Props[Consumer1]) // create Consumer actor and activate endpoint in background - -**Java** - -.. code-block:: java - - import static akka.actor.Actors.*; - import akka.actor.ActorRef; - - ActorRef actor = actorOf(new Props(Consumer1.class)); // create Consumer actor and activate endpoint in background - - -Typed actors -^^^^^^^^^^^^ - -Publishing of typed actor methods is done when the typed actor is created with -one of the TypedActor.newInstance(..) methods. Publication is done in the -background here as well i.e. it may still be in progress when -TypedActor.newInstance(..) returns. - -**Scala** - -.. code-block:: scala - - import akka.actor.TypedActor - - // create TypedConsumer1 object and activate endpoint(s) in background - val consumer = TypedActor.newInstance(classOf[TypedConsumer1], classOf[TypedConumer1Impl]) - -**Java** - -.. code-block:: java - - import akka.actor.TypedActor; - - // create TypedConsumer1 object and activate endpoint(s) in background - TypedConsumer1 consumer = TypedActor.newInstance(TypedConsumer1.class, TypedConumer1Impl.class); - - -.. _camel-consumers-and-camel-service: - -Consumers and the CamelService ------------------------------- - -Publishing of consumer actors or typed actor methods requires a running -CamelService. The Akka :ref:`microkernel` can start a CamelService automatically -(see :ref:`camel-configuration`). When using Akka in other environments, a -CamelService must be started manually. Applications can do that by calling the -CamelServiceManager.startCamelService method. - -**Scala** - -.. code-block:: scala - - import akka.camel.CamelServiceManager._ - - startCamelService - -**Java** - -.. code-block:: java - - import static akka.camel.CamelServiceManager.*; - - startCamelService(); - -If applications need to wait for a certain number of consumer actors or typed -actor methods to be published they can do so with the -``CamelServiceManager.mandatoryService.awaitEndpointActivation`` method, where -``CamelServiceManager.mandatoryService`` is the current CamelService instance -(or throws an IllegalStateException there's no current CamelService). - -**Scala** - -.. code-block:: scala - - import akka.camel.CamelServiceManager._ - - startCamelService - - // Wait for three conumer endpoints to be activated - mandatoryService.awaitEndpointActivation(3) { - // Start three consumer actors (for example) - // ... - } - - // Communicate with consumer actors via their activated endpoints - // ... - -**Java** - -.. code-block:: java - - import akka.japi.SideEffect; - import static akka.camel.CamelServiceManager.*; - - startCamelService(); - - // Wait for three conumer endpoints to be activated - getMandatoryService().awaitEndpointActivation(3, new SideEffect() { - public void apply() { - // Start three consumer actors (for example) - // ... - } - }); - - // Communicate with consumer actors via their activated endpoints - // ... - -Alternatively, one can also use ``Option[CamelService]`` returned by -``CamelServiceManager.service``. - -**Scala** - -.. code-block:: scala - - import akka.camel.CamelServiceManager._ - - startCamelService - - for(s <- service) s.awaitEndpointActivation(3) { - // ... - } - -**Java** - -.. code-block:: java - - import java.util.concurrent.CountDownLatch; - - import akka.camel.CamelService; - import static akka.camel.CamelServiceManager.*; - - startCamelService(); - - for (CamelService s : getService()) s.awaitEndpointActivation(3, new SideEffect() { - public void apply() { - // ... - } - }); - -:ref:`camel-configuration` additionally describes how a CamelContext, that is -managed by a CamelService, can be cutomized before starting the service. When -the CamelService is no longer needed, it should be stopped. - -**Scala** - -.. code-block:: scala - - import akka.camel.CamelServiceManager._ - - stopCamelService - -**Java** - -.. code-block:: java - - import static akka.camel.CamelServiceManager.*; - - stopCamelService(); - - -.. _camel-unpublishing: - -Consumer un-publishing ----------------------- - -Actors (untyped) -^^^^^^^^^^^^^^^^ - -When an actor is stopped, the route from the endpoint to that actor is stopped -as well. For example, stopping an actor that has been previously published at -``http://localhost:8877/camel/test`` will cause a connection failure when trying -to access that endpoint. Stopping the route is done asynchronously; it may be -still in progress after the ``ActorRef.stop`` method returned. - -**Scala** - -.. code-block:: scala - - import akka.actor.Actor._ - - val actor = actorOf(Props[Consumer1]) // create Consumer actor - actor // activate endpoint in background - // ... - actor.stop // deactivate endpoint in background - -**Java** - -.. code-block:: java - - import static akka.actor.Actors.*; - import akka.actor.ActorRef; - - ActorRef actor = actorOf(new Props(Consumer1.class)); // create Consumer actor and activate endpoint in background - // ... - actor.stop(); // deactivate endpoint in background - - -Typed actors -^^^^^^^^^^^^ - -When a typed actor is stopped, routes to @consume annotated methods of this -typed actors are stopped as well. Stopping the routes is done asynchronously; it -may be still in progress after the TypedActor.stop method returned. - -**Scala** - -.. code-block:: scala - - import akka.actor.TypedActor - - // create TypedConsumer1 object and activate endpoint(s) in background - val consumer = TypedActor.newInstance(classOf[TypedConsumer1], classOf[TypedConumer1Impl]) - - // deactivate endpoints in background - TypedActor.stop(consumer) - -**Java** - -.. code-block:: java - - import akka.actor.TypedActor; - - // Create typed consumer actor and activate endpoints in background - TypedConsumer1 consumer = TypedActor.newInstance(TypedConsumer1.class, TypedConumer1Impl.class); - - // Deactivate endpoints in background - TypedActor.stop(consumer); - - -.. _camel-acknowledgements: - -Acknowledgements ----------------- - -Actors (untyped) -^^^^^^^^^^^^^^^^ - -With in-out message exchanges, clients usually know that a message exchange is -done when they receive a reply from a consumer actor. The reply message can be a -Message (or any object which is then internally converted to a Message) on -success, and a Failure message on failure. - -With in-only message exchanges, by default, an exchange is done when a message -is added to the consumer actor's mailbox. Any failure or exception that occurs -during processing of that message by the consumer actor cannot be reported back -to the endpoint in this case. To allow consumer actors to positively or -negatively acknowledge the receipt of a message from an in-only message -exchange, they need to override the ``autoack`` (Scala) or ``isAutoack`` (Java) -method to return false. In this case, consumer actors must reply either with a -special Ack message (positive acknowledgement) or a Failure (negative -acknowledgement). - -**Scala** - -.. code-block:: scala - - import akka.camel.{Ack, Failure} - // ... other imports omitted - - class Consumer3 extends Actor with Consumer { - override def autoack = false - - def endpointUri = "jms:queue:test" - - def receive = { - // ... - self.reply(Ack) // on success - // ... - self.reply(Failure(...)) // on failure - } - } - -**Java** - -.. code-block:: java - - import akka.camel.Failure - import static akka.camel.Ack.ack; - // ... other imports omitted - - public class Consumer3 extends UntypedConsumerActor { - - public String getEndpointUri() { - return "jms:queue:test"; - } - - public boolean isAutoack() { - return false; - } - - public void onReceive(Object message) { - // ... - getContext().reply(ack()) // on success - // ... - val e: Exception = ... - getContext().reply(new Failure(e)) // on failure - } - } - - -.. _camel-blocking-exchanges: - -Blocking exchanges ------------------- - -By default, message exchanges between a Camel endpoint and a consumer actor are -non-blocking because, internally, the ! (bang) operator is used to commicate -with the actor. The route to the actor does not block waiting for a reply. The -reply is sent asynchronously (see also :ref:`camel-asynchronous-routing`). -Consumer actors however can be configured to make this interaction blocking. - -**Scala** - -.. code-block:: scala - - class ExampleConsumer extends Actor with Consumer { - override def blocking = true - - def endpointUri = ... - def receive = { - // ... - } - } - -**Java** - -.. code-block:: java - - public class ExampleConsumer extends UntypedConsumerActor { - - public boolean isBlocking() { - return true; - } - - public String getEndpointUri() { - // ... - } - - public void onReceive(Object message) { - // ... - } - } - -In this case, the ``!!`` (bangbang) operator is used internally to communicate -with the actor which blocks a thread until the consumer sends a response or -throws an exception within receive. Although it may decrease scalability, this -setting can simplify error handling (see `this article`_) or allows timeout -configurations on actor-level (see :ref:`camel-timeout`). - -.. _this article: http://krasserm.blogspot.com/2011/02/akka-consumer-actors-new-features-and.html - - -.. _camel-timeout: - -Consumer timeout ----------------- - -Endpoints that support two-way communications need to wait for a response from -an (untyped) actor or typed actor before returning it to the initiating client. -For some endpoint types, timeout values can be defined in an endpoint-specific -way which is described in the documentation of the individual `Camel -components`_. Another option is to configure timeouts on the level of consumer -actors and typed consumer actors. - -.. _Camel components: http://camel.apache.org/components.html - - -Typed actors -^^^^^^^^^^^^ - -For typed actors, timeout values for method calls that return a result can be -set when the typed actor is created. In the following example, the timeout is -set to 20 seconds (default is 5 seconds). - -**Scala** - -.. code-block:: scala - - import akka.actor.TypedActor - - val consumer = TypedActor.newInstance(classOf[TypedConsumer1], classOf[TypedConumer1Impl], 20000 /* 20 seconds */) - -**Java** - -.. code-block:: java - - import akka.actor.TypedActor; - - TypedConsumer1 consumer = TypedActor.newInstance(TypedConsumer1.class, TypedConumer1Impl.class, 20000 /* 20 seconds */); - - -Actors (untyped) -^^^^^^^^^^^^^^^^ - -Two-way communications between a Camel endpoint and an (untyped) actor are -initiated by sending the request message to the actor with the ``!`` (bang) -operator and the actor replies to the endpoint when the response is ready. In -order to support timeouts on actor-level, endpoints need to send the request -message with the ``!!`` (bangbang) operator for which a timeout value is -applicable. This can be achieved by overriding the Consumer.blocking method to -return true. - -**Scala** - -.. code-block:: scala - - class Consumer2 extends Actor with Consumer { - self.timeout = 20000 // timeout set to 20 seconds - - override def blocking = true - - def endpointUri = "direct:example" - - def receive = { - // ... - } - } - -**Java** - -.. code-block:: java - - public class Consumer2 extends UntypedConsumerActor { - - public Consumer2() { - getContext().setTimeout(20000); // timeout set to 20 seconds - } - - public String getEndpointUri() { - return "direct:example"; - } - - public boolean isBlocking() { - return true; - } - - public void onReceive(Object message) { - // ... - } - } - -This is a valid approach for all endpoint types that do not "natively" support -asynchronous two-way message exchanges. For all other endpoint types (like -`Jetty`_ endpoints) is it not recommended to switch to blocking mode but rather -to configure timeouts in an endpoint-specific way (see -also :ref:`camel-asynchronous-routing`). - - -Remote consumers ----------------- - -Actors (untyped) -^^^^^^^^^^^^^^^^ - -Publishing of remote consumer actors is always done on the server side, local -proxies are never published. Hence the CamelService must be started on the -remote node. For example, to publish an (untyped) actor on a remote node at -endpoint URI ``jetty:http://localhost:6644/remote-actor-1``, define the -following consumer actor class. - -**Scala** - -.. code-block:: scala - - import akka.actor.Actor - import akka.annotation.consume - import akka.camel.Consumer - - class RemoteActor1 extends Actor with Consumer { - def endpointUri = "jetty:http://localhost:6644/remote-actor-1" - - protected def receive = { - case msg => self.reply("response from remote actor 1") - } - } - -**Java** - -.. code-block:: java - - import akka.camel.UntypedConsumerActor; - - public class RemoteActor1 extends UntypedConsumerActor { - public String getEndpointUri() { - return "jetty:http://localhost:6644/remote-actor-1"; - } - - public void onReceive(Object message) { - getContext().tryReply("response from remote actor 1"); - } - } - -On the remote node, start a `CamelService`_, start a remote server, create the -actor and register it at the remote server. - -.. _CamelService: http://github.com/jboner/akka/blob/master/akka-camel/src/main/scala/akka/camel/CamelService.scala - -**Scala** - -.. code-block:: scala - - import akka.camel.CamelServiceManager._ - import akka.actor.Actor._ - import akka.actor.ActorRef - - // ... - startCamelService - - val consumer = val consumer = actorOf(Props[RemoteActor1]) - - remote.start("localhost", 7777) - remote.register(consumer) // register and start remote consumer - // ... - -**Java** - -.. code-block:: java - - import akka.camel.CamelServiceManager; - import static akka.actor.Actors.*; - - // ... - CamelServiceManager.startCamelService(); - - ActorRef actor = actorOf(new Props(RemoteActor1.class)); - - remote().start("localhost", 7777); - remote().register(actor); // register and start remote consumer - // ... - -Explicitly starting a CamelService can be omitted when Akka is running in Kernel -mode, for example (see also :ref:`camel-configuration`). - - -Typed actors -^^^^^^^^^^^^ - -Remote typed consumer actors can be registered with one of the -``registerTyped*`` methods on the remote server. The following example registers -the actor with the custom id "123". - -**Scala** - -.. code-block:: scala - - import akka.actor.TypedActor - - // ... - val obj = TypedActor.newRemoteInstance( - classOf[SampleRemoteTypedConsumer], - classOf[SampleRemoteTypedConsumerImpl]) - - remote.registerTypedActor("123", obj) - // ... - -**Java** - -.. code-block:: java - - import akka.actor.TypedActor; - - SampleRemoteTypedConsumer obj = (SampleRemoteTypedConsumer)TypedActor.newInstance( - SampleRemoteTypedConsumer.class, - SampleRemoteTypedConsumerImpl.class); - - remote.registerTypedActor("123", obj) - // ... - - -Produce messages -================ - -A minimum pre-requisite for producing messages to Camel endpoints with producer -actors (see below) is an initialized and started CamelContextManager. - -**Scala** - -.. code-block:: scala - - import akka.camel.CamelContextManager - - CamelContextManager.init // optionally takes a CamelContext as argument - CamelContextManager.start // starts the managed CamelContext - -**Java** - -.. code-block:: java - - import akka.camel.CamelContextManager; - - CamelContextManager.init(); // optionally takes a CamelContext as argument - CamelContextManager; // starts the managed CamelContext - -For using producer actors, application may also start a CamelService. This will -not only setup a CamelContextManager behind the scenes but also register -listeners at the actor registry (needed to publish consumer actors). If your -application uses producer actors only and you don't want to have the (very -small) overhead generated by the registry listeners then setting up a -CamelContextManager without starting CamelService is recommended. Otherwise, -just start a CamelService as described for consumer -actors: :ref:`camel-consumers-and-camel-service`. - - -Producer trait --------------- - -Actors (untyped) -^^^^^^^^^^^^^^^^ - -For sending messages to Camel endpoints, actors - -* written in Scala need to mixin the `Producer`_ trait and implement the - endpointUri method. - -* written in Java need to extend the abstract UntypedProducerActor class and - implement the getEndpointUri() method. By extending the UntypedProducerActor - class, untyped actors (Java) inherit the behaviour of the Producer trait. - -.. _Producer: http://github.com/jboner/akka/blob/master/akka-camel/src/main/scala/akka/camel/Producer.scala - -**Scala** - -.. code-block:: scala - - import akka.actor.Actor - import akka.camel.Producer - - class Producer1 extends Actor with Producer { - def endpointUri = "http://localhost:8080/news" - } - -**Java** - -.. code-block:: java - - import akka.camel.UntypedProducerActor; - - public class Producer1 extends UntypedProducerActor { - public String getEndpointUri() { - return "http://localhost:8080/news"; - } - } - -Producer1 inherits a default implementation of the receive method from the -Producer trait. To customize a producer actor's default behavior it is -recommended to override the Producer.receiveBeforeProduce and -Producer.receiveAfterProduce methods. This is explained later in more detail. -Actors should not override the default Producer.receive method. - -Any message sent to a Producer actor (or UntypedProducerActor) will be sent to -the associated Camel endpoint, in the above example to -``http://localhost:8080/news``. Response messages (if supported by the -configured endpoint) will, by default, be returned to the original sender. The -following example uses the ``?`` operator (Scala) to send a message to a -Producer actor and waits for a response. In Java, the sendRequestReply method is -used. - -**Scala** - -.. code-block:: scala - - import akka.actor.Actor._ - import akka.actor.ActorRef - - val producer = actorOf(Props[Producer1]) - val response = (producer ? "akka rocks").get - val body = response.bodyAs[String] - -**Java** - -.. code-block:: java - - import akka.actor.ActorRef; - import static akka.actor.Actors.*; - import akka.camel.Message; - - ActorRef producer = actorOf(new Props(Producer1.class)); - Message response = (Message)producer.sendRequestReply("akka rocks"); - String body = response.getBodyAs(String.class) - -If the message is sent using the ! operator (or the tell method in Java) -then the response message is sent back asynchronously to the original sender. In -the following example, a Sender actor sends a message (a String) to a producer -actor using the ! operator and asynchronously receives a response (of type -Message). - -**Scala** - -.. code-block:: scala - - import akka.actor.{Actor, ActorRef} - import akka.camel.Message - - class Sender(producer: ActorRef) extends Actor { - def receive = { - case request: String => producer ! request - case response: Message => { - /* process response ... */ - } - // ... - } - } - -**Java** - -.. code-block:: java - - // TODO - - -.. _camel-custom-processing: - -Custom Processing -^^^^^^^^^^^^^^^^^ - -Instead of replying to the initial sender, producer actors can implement custom -reponse processing by overriding the receiveAfterProduce method (Scala) or -onReceiveAfterProduce method (Java). In the following example, the reponse -message is forwarded to a target actor instead of being replied to the original -sender. - -**Scala** - -.. code-block:: scala - - import akka.actor.{Actor, ActorRef} - import akka.camel.Producer - - class Producer1(target: ActorRef) extends Actor with Producer { - def endpointUri = "http://localhost:8080/news" - - override protected def receiveAfterProduce = { - // do not reply but forward result to target - case msg => target forward msg - } - } - -**Java** - -.. code-block:: java - - import akka.actor.ActorRef; - import akka.camel.UntypedProducerActor; - - public class Producer1 extends UntypedProducerActor { - private ActorRef target; - - public Producer1(ActorRef target) { - this.target = target; - } - - public String getEndpointUri() { - return "http://localhost:8080/news"; - } - - @Override - public void onReceiveAfterProduce(Object message) { - target.forward((Message)message, getContext()); - } - } - -To create an untyped actor instance with a constructor argument, a factory is -needed (this should be doable without a factory in upcoming Akka versions). - -.. code-block:: java - - import akka.actor.ActorRef; - import akka.actor.UntypedActorFactory; - import akka.actor.UntypedActor; - - public class Producer1Factory implements UntypedActorFactory { - - private ActorRef target; - - public Producer1Factory(ActorRef target) { - this.target = target; - } - - public UntypedActor create() { - return new Producer1(target); - } - } - -The instanitation is done with the Actors.actorOf method and the factory as -argument. - -.. code-block:: java - - import static akka.actor.Actors.*; - import akka.actor.ActorRef; - - ActorRef target = ... - ActorRef producer = actorOf(Props(new Producer1Factory(target))); - producer; - -Before producing messages to endpoints, producer actors can pre-process them by -overriding the receiveBeforeProduce method (Scala) or onReceiveBeforeProduce -method (Java). - -**Scala** - -.. code-block:: scala - - import akka.actor.{Actor, ActorRef} - import akka.camel.{Message, Producer} - - class Producer1(target: ActorRef) extends Actor with Producer { - def endpointUri = "http://localhost:8080/news" - - override protected def receiveBeforeProduce = { - case msg: Message => { - // do some pre-processing (e.g. add endpoint-specific message headers) - // ... - - // and return the modified message - msg - } - } - } - -**Java** - -.. code-block:: java - - import akka.actor.ActorRef; - import akka.camel.Message - import akka.camel.UntypedProducerActor; - - public class Producer1 extends UntypedProducerActor { - private ActorRef target; - - public Producer1(ActorRef target) { - this.target = target; - } - - public String getEndpointUri() { - return "http://localhost:8080/news"; - } - - @Override - public Object onReceiveBeforeProduce(Object message) { - Message msg = (Message)message; - // do some pre-processing (e.g. add endpoint-specific message headers) - // ... - - // and return the modified message - return msg - } - } - - -Producer configuration options -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -The interaction of producer actors with Camel endpoints can be configured to be -one-way or two-way (by initiating in-only or in-out message exchanges, -respectively). By default, the producer initiates an in-out message exchange -with the endpoint. For initiating an in-only exchange, producer actors - -* written in Scala either have to override the oneway method to return true -* written in Java have to override the isOneway method to return true. - -**Scala** - -.. code-block:: scala - - import akka.camel.Producer - - class Producer2 extends Actor with Producer { - def endpointUri = "jms:queue:test" - override def oneway = true - } - -**Java** - -.. code-block:: java - - import akka.camel.UntypedProducerActor; - - public class SampleUntypedReplyingProducer extends UntypedProducerActor { - public String getEndpointUri() { - return "jms:queue:test"; - } - - @Override - public boolean isOneway() { - return true; - } - } - -Message correlation -^^^^^^^^^^^^^^^^^^^ - -To correlate request with response messages, applications can set the -Message.MessageExchangeId message header. - -**Scala** - -.. code-block:: scala - - import akka.camel.Message - - producer ! Message("bar", Map(Message.MessageExchangeId -> "123")) - -**Java** - -.. code-block:: java - - // TODO - -Responses of type Message or Failure will contain that header as well. When -receiving messages from Camel endpoints this message header is already set (see -:ref:`camel-consume-messages`). - - -Matching responses -^^^^^^^^^^^^^^^^^^ - -The following code snippet shows how to best match responses when sending -messages with the ``?`` operator (Scala) or with the ``ask`` method -(Java). - -**Scala** - -.. code-block:: scala - - val response = (producer ? message).get - - response match { - case Some(Message(body, headers)) => ... - case Some(Failure(exception, headers)) => ... - case _ => ... - } - -**Java** - -.. code-block:: java - - // TODO - - -ProducerTemplate ----------------- - -The `Producer`_ trait (and the abstract UntypedProducerActor class) is a very -convenient way for actors to produce messages to Camel endpoints. (Untyped) -actors and typed actors may also use a Camel `ProducerTemplate`_ for producing -messages to endpoints. For typed actors it's the only way to produce messages to -Camel endpoints. - -At the moment, only the Producer trait fully supports asynchronous in-out -message exchanges with Camel endpoints without allocating a thread for the full -duration of the exchange. For example, when using endpoints that support -asynchronous message exchanges (such as Jetty endpoints that internally use -`Jetty's asynchronous HTTP client`_) then usage of the Producer trait is highly -recommended (see also :ref:`camel-asynchronous-routing`). - -.. _Producer: http://github.com/jboner/akka/blob/master/akka-camel/src/main/scala/akka/camel/Producer.scala -.. _ProducerTemplate: http://camel.apache.org/maven/camel-2.2.0/camel-core/apidocs/index.html -.. _Jetty's asynchronous HTTP client: http://wiki.eclipse.org/Jetty/Tutorial/HttpClient - - -Actors (untyped) -^^^^^^^^^^^^^^^^ - -A managed ProducerTemplate instance can be obtained via -CamelContextManager.mandatoryTemplate. In the following example, an actor uses a -ProducerTemplate to send a one-way message to a ``direct:news`` endpoint. - -**Scala** - -.. code-block:: scala - - import akka.actor.Actor - import akka.camel.CamelContextManager - - class ProducerActor extends Actor { - protected def receive = { - // one-way message exchange with direct:news endpoint - case msg => CamelContextManager.mandatoryTemplate.sendBody("direct:news", msg) - } - } - -**Java** - -.. code-block:: java - - import akka.actor.UntypedActor; - import akka.camel.CamelContextManager; - - public class SampleUntypedActor extends UntypedActor { - public void onReceive(Object msg) { - CamelContextManager.getMandatoryTemplate().sendBody("direct:news", msg); - } - } - -Alternatively, one can also use ``Option[ProducerTemplate]`` returned by -``CamelContextManager.template``. - -**Scala** - -.. code-block:: scala - - import akka.actor.Actor - import akka.camel.CamelContextManager - - class ProducerActor extends Actor { - protected def receive = { - // one-way message exchange with direct:news endpoint - case msg => for(t <- CamelContextManager.template) t.sendBody("direct:news", msg) - } - } - -**Java** - -.. code-block:: java - - import org.apache.camel.ProducerTemplate - - import akka.actor.UntypedActor; - import akka.camel.CamelContextManager; - - public class SampleUntypedActor extends UntypedActor { - public void onReceive(Object msg) { - for (ProducerTemplate t : CamelContextManager.getTemplate()) { - t.sendBody("direct:news", msg); - } - } - } - -For initiating a a two-way message exchange, one of the -``ProducerTemplate.request*`` methods must be used. - -**Scala** - -.. code-block:: scala - - import akka.actor.Actor - import akka.camel.CamelContextManager - - class ProducerActor extends Actor { - protected def receive = { - // two-way message exchange with direct:news endpoint - case msg => self.reply(CamelContextManager.mandatoryTemplate.requestBody("direct:news", msg)) - } - } - -**Java** - -.. code-block:: java - - import akka.actor.UntypedActor; - import akka.camel.CamelContextManager; - - public class SampleUntypedActor extends UntypedActor { - public void onReceive(Object msg) { - getContext().tryReply(CamelContextManager.getMandatoryTemplate().requestBody("direct:news", msg)); - } - } - - -Typed actors -^^^^^^^^^^^^ - -Typed Actors get access to a managed ProducerTemplate in the same way, as shown -in the next example. - -**Scala** - -.. code-block:: scala - - // TODO - -**Java** - -.. code-block:: java - - import akka.actor.TypedActor; - import akka.camel.CamelContextManager; - - public class SampleProducerImpl extends TypedActor implements SampleProducer { - public void foo(String msg) { - ProducerTemplate template = CamelContextManager.getMandatoryTemplate(); - template.sendBody("direct:news", msg); - } - } - - -.. _camel-asynchronous-routing: - -Asynchronous routing -==================== - -Since Akka 0.10, in-out message exchanges between endpoints and actors are -designed to be asynchronous. This is the case for both, consumer and producer -actors. - -* A consumer endpoint sends request messages to its consumer actor using the ``!`` - (bang) operator and the actor returns responses with self.reply once they are - ready. The sender reference used for reply is an adapter to Camel's asynchronous - routing engine that implements the ActorRef trait. - -* A producer actor sends request messages to its endpoint using Camel's - asynchronous routing engine. Asynchronous responses are wrapped and added to the - producer actor's mailbox for later processing. By default, response messages are - returned to the initial sender but this can be overridden by Producer - implementations (see also description of the ``receiveAfterProcessing`` method - in :ref:`camel-custom-processing`). - -However, asynchronous two-way message exchanges, without allocating a thread for -the full duration of exchange, cannot be generically supported by Camel's -asynchronous routing engine alone. This must be supported by the individual -`Camel components`_ (from which endpoints are created) as well. They must be -able to suspend any work started for request processing (thereby freeing threads -to do other work) and resume processing when the response is ready. This is -currently the case for a `subset of components`_ such as the `Jetty component`_. -All other Camel components can still be used, of course, but they will cause -allocation of a thread for the duration of an in-out message exchange. There's -also a :ref:`camel-async-example` that implements both, an asynchronous -consumer and an asynchronous producer, with the jetty component. - -.. _Camel components: http://camel.apache.org/components.html -.. _subset of components: http://camel.apache.org/asynchronous-routing-engine.html -.. _Jetty component: http://camel.apache.org/jetty.html - - -Fault tolerance -=============== - -Consumer actors and typed actors can be also managed by supervisors. If a -consumer is configured to be restarted upon failure the associated Camel -endpoint is not restarted. It's behaviour during restart is as follows. - -* A one-way (in-only) message exchange will be queued by the consumer and - processed once restart completes. - -* A two-way (in-out) message exchange will wait and either succeed after restart - completes or time-out when the restart duration exceeds - the :ref:`camel-timeout`. - -If a consumer is configured to be shut down upon failure, the associated -endpoint is shut down as well. For details refer to :ref:`camel-unpublishing`. - -For examples, tips and trick how to implement fault-tolerant consumer and -producer actors, take a look at these two articles. - -* `Akka Consumer Actors: New Features and Best Practices `_ -* `Akka Producer Actors: New Features and Best Practices `_ - - -.. _camel-configuration: - -CamelService configuration -========================== - -For publishing consumer actors and typed actor methods -(:ref:`camel-publishing`), applications must start a CamelService. When starting -Akka in :ref:`microkernel` mode then a CamelService can be started automatically -when camel is added to the enabled-modules list in :ref:`configuration`, for example: - -.. code-block:: none - - akka { - ... - enabled-modules = ["camel"] # Options: ["remote", "camel", "http"] - ... - } - -Applications that do not use the Akka Kernel, such as standalone applications -for example, need to start a CamelService manually, as explained in the -following subsections.When starting a CamelService manually, settings in -:ref:`configuration` are ignored. - - -Standalone applications ------------------------ - -Standalone application should create and start a CamelService in the following way. - -**Scala** - -.. code-block:: scala - - import akka.camel.CamelServiceManager._ - - startCamelService - -**Java** - -.. code-block:: java - - import static akka.camel.CamelServiceManager.*; - - startCamelService(); - -Internally, a CamelService uses the CamelContextManager singleton to manage a -CamelContext. A CamelContext manages the routes from endpoints to consumer -actors and typed actors. These routes are added and removed at runtime (when -(untyped) consumer actors and typed consumer actors are started and stopped). -Applications may additionally want to add their own custom routes or modify the -CamelContext in some other way. This can be done by initializing the -CamelContextManager manually and making modifications to CamelContext **before** -the CamelService is started. - -**Scala** - -.. code-block:: scala - - import org.apache.camel.builder.RouteBuilder - - import akka.camel.CamelContextManager - import akka.camel.CamelServiceManager._ - - CamelContextManager.init - - // add a custom route to the managed CamelContext - CamelContextManager.mandatoryContext.addRoutes(new CustomRouteBuilder) - - startCamelService - - // an application-specific route builder - class CustomRouteBuilder extends RouteBuilder { - def configure { - // ... - } - } - -**Java** - -.. code-block:: java - - import org.apache.camel.builder.RouteBuilder; - - import akka.camel.CamelContextManager; - import static akka.camel.CamelServiceManager.*; - - CamelContextManager.init(); - - // add a custom route to the managed CamelContext - CamelContextManager.getMandatoryContext().addRoutes(new CustomRouteBuilder()); - - startCamelService(); - - // an application-specific route builder - private static class CustomRouteBuilder extends RouteBuilder { - public void configure() { - // ... - } - } - - -Applications may even provide their own CamelContext instance as argument to the -init method call as shown in the following snippet. Here, a DefaultCamelContext -is created using a Spring application context as `registry`_. - -.. _registry: http://camel.apache.org/registry.html - - -**Scala** - -.. code-block:: scala - - import org.apache.camel.impl.DefaultCamelContext - import org.apache.camel.spring.spi.ApplicationContextRegistry - import org.springframework.context.support.ClassPathXmlApplicationContext - - import akka.camel.CamelContextManager - import akka.camel.CamelServiceManager._ - - // create a custom Camel registry backed up by a Spring application context - val context = new ClassPathXmlApplicationContext("/context.xml") - val registry = new ApplicationContextRegistry(context) - - // initialize CamelContextManager with a DefaultCamelContext using the custom registry - CamelContextManager.init(new DefaultCamelContext(registry)) - - // ... - - startCamelService - -**Java** - -.. code-block:: java - - import org.apache.camel.impl.DefaultCamelContext - import org.apache.camel.spi.Registry; - import org.apache.camel.spring.spi.ApplicationContextRegistry; - - import org.springframework.context.ApplicationContext; - import org.springframework.context.support.ClassPathXmlApplicationContext; - - import akka.camel.CamelContextManager; - import static akka.camel.CamelServiceManager.*; - - // create a custom Camel registry backed up by a Spring application context - ApplicationContext context = new ClassPathXmlApplicationContext("/context.xml"); - Registry registry = new ApplicationContextRegistry(context); - - // initialize CamelContextManager with a DefaultCamelContext using the custom registry - CamelContextManager.init(new DefaultCamelContext(registry)); - - // ... - - startCamelService(); - - -.. _camel-spring-applications: - -Standalone Spring applications ------------------------------- - -A better approach to configure a Spring application context as registry for the -CamelContext is to use `Camel's Spring support`_. Furthermore, -the :ref:`spring-module` module additionally supports a element -for creating and starting a CamelService. An optional reference to a custom -CamelContext can be defined for as well. Here's an example. - -.. _Camel's Spring support: http://camel.apache.org/spring.html - -.. code-block:: xml - - - - - - - - - - - - - - - - - -Creating a CamelContext this way automatically adds the defining Spring -application context as registry to that CamelContext. The CamelService is -started when the application context is started and stopped when the application -context is closed. A simple usage example is shown in the following snippet. - -**Scala** - -.. code-block:: scala - - import org.springframework.context.support.ClassPathXmlApplicationContext - import akka.camel.CamelContextManager - - // Create and start application context (start CamelService) - val appctx = new ClassPathXmlApplicationContext("/context.xml") - - // Access to CamelContext (SpringCamelContext) - val ctx = CamelContextManager.mandatoryContext - // Access to ProducerTemplate of that CamelContext - val tpl = CamelContextManager.mandatoryTemplate - - // use ctx and tpl ... - - // Close application context (stop CamelService) - appctx.close - -**Java** - -.. code-block:: java - - // TODO - - -If the CamelService doesn't reference a custom CamelContext then a -DefaultCamelContext is created (and accessible via the CamelContextManager). - -.. code-block:: xml - - - - - - - - - -Kernel mode ------------ - -For classes that are loaded by the Kernel or the Initializer, starting the -CamelService can be omitted, as discussed in the previous section. Since these -classes are loaded and instantiated before the CamelService is started (by -Akka), applications can make modifications to a CamelContext here as well (and -even provide their own CamelContext). Assuming there's a boot class -sample.camel.Boot configured in :ref:`configuration`. - -.. code-block:: none - - akka { - ... - boot = ["sample.camel.Boot"] - ... - } - -Modifications to the CamelContext can be done like in the following snippet. - -**Scala** - -.. code-block:: scala - - package sample.camel - - import org.apache.camel.builder.RouteBuilder - - import akka.camel.CamelContextManager - - class Boot { - CamelContextManager.init - - // Customize CamelContext with application-specific routes - CamelContextManager.mandatoryContext.addRoutes(new CustomRouteBuilder) - - // No need to start CamelService here. It will be started - // when this classes has been loaded and instantiated. - } - - class CustomRouteBuilder extends RouteBuilder { - def configure { - // ... - } - } - -**Java** - -.. code-block:: java - - // TODO - - -Custom Camel routes -=================== - -In all the examples so far, routes to consumer actors have been automatically -constructed by akka-camel, when the actor was started. Although the default -route construction templates, used by akka-camel internally, are sufficient for -most use cases, some applications may require more specialized routes to actors. -The akka-camel module provides two mechanisms for customizing routes to actors, -which will be explained in this section. These are - -* Usage of :ref:`camel-components` to access (untyped) actor and actors. - Any Camel route can use these components to access Akka actors. - -* :ref:`camel-intercepting-route-construction` to (untyped) actor and actors. - Default routes to consumer actors are extended using predefined extension - points. - - -.. _camel-components: - -Akka Camel components ---------------------- - -Akka actors can be access from Camel routes using the `actor`_ and -`typed-actor`_ Camel components, respectively. These components can be used to -access any Akka actor (not only consumer actors) from Camel routes, as described -in the following sections. - -.. _actor: http://github.com/jboner/akka/blob/master/akka-camel/src/main/scala/akka/camel/component/ActorComponent.scala -.. _typed-actor: http://github.com/jboner/akka/blob/master/akka-camel/src/main/scala/akka/camel/component/TypedActorComponent.scala - - -Access to actors ----------------- - -To access (untyped) actors from custom Camel routes, the `actor`_ Camel -component should be used. It fully supports Camel's `asynchronous routing -engine`_. - -.. _actor: http://github.com/jboner/akka/blob/master/akka-camel/src/main/scala/akka/camel/component/ActorComponent.scala -.. _asynchronous routing engine: http://camel.apache.org/asynchronous-routing-engine.html - -This component accepts the following enpoint URI formats: - -* ``actor:[?]`` -* ``actor:id:[][?]`` -* ``actor:uuid:[][?]`` - -where ```` and ```` refer to ``actorRef.id`` and the -String-representation of ``actorRef.uuid``, respectively. The ```` are -name-value pairs separated by ``&`` (i.e. ``name1=value1&name2=value2&...``). - - -URI options -^^^^^^^^^^^ - -The following URI options are supported: - -+----------+---------+---------+-------------------------------------------+ -| Name | Type | Default | Description | -+==========+=========+=========+===========================================+ -| blocking | Boolean | false | If set to true, in-out message exchanges | -| | | | with the target actor will be made with | -| | | | the ``!!`` operator, otherwise with the | -| | | | ``!`` operator. | -| | | | | -| | | | See also :ref:`camel-timeout`. | -+----------+---------+---------+-------------------------------------------+ -| autoack | Boolean | true | If set to true, in-only message exchanges | -| | | | are auto-acknowledged when the message is | -| | | | added to the actor's mailbox. If set to | -| | | | false, actors must acknowledge the | -| | | | receipt of the message. | -| | | | | -| | | | See also :ref:`camel-acknowledgements`. | -+----------+---------+---------+-------------------------------------------+ - -Here's an actor endpoint URI example containing an actor uuid:: - - actor:uuid:12345678?blocking=true - -In actor endpoint URIs that contain id: or uuid:, an actor identifier (id or -uuid) is optional. In this case, the in-message of an exchange produced to an -actor endpoint must contain a message header with name CamelActorIdentifier -(which is defined by the ActorComponent.ActorIdentifier field) and a value that -is the target actor's identifier. On the other hand, if the URI contains an -actor identifier, it can be seen as a default actor identifier that can be -overridden by messages containing a CamelActorIdentifier header. - - -Message headers -^^^^^^^^^^^^^^^ - -+----------------------+--------+-------------------------------------------+ -| Name | Type | Description | -+======================+========+===========================================+ -| CamelActorIdentifier | String | Contains the identifier (id or uuid) of | -| | | the actor to route the message to. The | -| | | identifier is interpreted as actor id if | -| | | the URI contains id:, the identifier is | -| | | interpreted as uuid id the URI contains | -| | | uuid:. A uuid value may also be of type | -| | | Uuid (not only String). The header name | -| | | is defined by the | -| | | ActorComponent.ActorIdentifier field. | -+----------------------+--------+-------------------------------------------+ - -Here's another actor endpoint URI example that doesn't define an actor uuid. In -this case the target actor uuid must be defined by the CamelActorIdentifier -message header:: - - actor:uuid: - -In the following example, a custom route to an actor is created, using the -actor's uuid (i.e. actorRef.uuid). The route starts from a `Jetty`_ endpoint and -ends at the target actor. - - -**Scala** - -.. code-block:: scala - - import org.apache.camel.builder.RouteBuilder - - import akka.actor._ - import akka.actor.Actor - import akka.actor.Actor._ - import akka.camel.{Message, CamelContextManager, CamelServiceManager} - - object CustomRouteExample extends Application { - val target = actorOf(Props[CustomRouteTarget]) - - CamelServiceManager.startCamelService - CamelContextManager.mandatoryContext.addRoutes(new CustomRouteBuilder(target.uuid)) - } - - class CustomRouteTarget extends Actor { - def receive = { - case msg: Message => self.reply("Hello %s" format msg.bodyAs[String]) - } - } - - class CustomRouteBuilder(uuid: Uuid) extends RouteBuilder { - def configure { - val actorUri = "actor:uuid:%s" format uuid - from("jetty:http://localhost:8877/camel/custom").to(actorUri) - } - } - - -**Java** - -.. code-block:: java - - import com.eaio.uuid.UUID; - - import org.apache.camel.builder.RouteBuilder; - import static akka.actor.Actors.*; - import akka.actor.ActorRef; - import akka.actor.UntypedActor; - import akka.camel.CamelServiceManager; - import akka.camel.CamelContextManager; - import akka.camel.Message; - - public class CustomRouteExample { - public static void main(String... args) throws Exception { - ActorRef target = actorOf(new Props(CustomRouteTarget.class)); - CamelServiceManager.startCamelService(); - CamelContextManager.getMandatoryContext().addRoutes(new CustomRouteBuilder(target.getUuid())); - } - } - - public class CustomRouteTarget extends UntypedActor { - public void onReceive(Object message) { - Message msg = (Message) message; - String body = msg.getBodyAs(String.class); - getContext().tryReply(String.format("Hello %s", body)); - } - } - - public class CustomRouteBuilder extends RouteBuilder { - private UUID uuid; - - public CustomRouteBuilder(UUID uuid) { - this.uuid = uuid; - } - - public void configure() { - String actorUri = String.format("actor:uuid:%s", uuid); - from("jetty:http://localhost:8877/camel/custom").to(actorUri); - } - } - -When the example is started, messages POSTed to -``http://localhost:8877/camel/custom`` are routed to the target actor. - - -Access to typed actors ----------------------- - -To access typed actor methods from custom Camel routes, the `typed-actor`_ Camel -component should be used. It is a specialization of the Camel `bean`_ component. -Applications should use the interface (endpoint URI syntax and options) as -described in the bean component documentation but with the typed-actor schema. -Typed Actors must be added to a `Camel registry`_ for being accessible by the -typed-actor component. - -.. _typed-actor: http://github.com/jboner/akka/blob/master/akka-camel/src/main/scala/akka/camel/component/TypedActorComponent.scala -.. _bean: http://camel.apache.org/bean.html -.. _Camel registry: http://camel.apache.org/registry.html - - -.. _camel-typed-actors-using-spring: - -Using Spring -^^^^^^^^^^^^ - -The following example shows how to access typed actors in a Spring application -context. For adding typed actors to the application context and for starting -:ref:`camel-spring-applications` the :ref:`spring-module` module is used in the -following example. It offers a ```` element to define typed actor -factory beans and a ```` element to create and start a -CamelService. - -.. code-block:: xml - - - - - - - - - - - - - - - - - -SampleTypedActor is the typed actor interface and SampleTypedActorImpl in the -typed actor implementation class. - -**Scala** - -.. code-block:: scala - - package sample - - import akka.actor.TypedActor - - trait SampleTypedActor { - def foo(s: String): String - } - - class SampleTypedActorImpl extends TypedActor with SampleTypedActor { - def foo(s: String) = "hello %s" format s - } - -**Java** - -.. code-block:: java - - package sample; - - import akka.actor.TypedActor; - - public interface SampleTypedActor { - public String foo(String s); - } - - public class SampleTypedActorImpl extends TypedActor implements SampleTypedActor { - - public String foo(String s) { - return "hello " + s; - } - } - -The SampleRouteBuilder defines a custom route from the direct:test endpoint to -the sample typed actor using a typed-actor endpoint URI. - -**Scala** - -.. code-block:: scala - - package sample - - import org.apache.camel.builder.RouteBuilder - - class SampleRouteBuilder extends RouteBuilder { - def configure = { - // route to typed actor - from("direct:test").to("typed-actor:sample?method=foo") - } - } - -**Java** - -.. code-block:: java - - package sample; - - import org.apache.camel.builder.RouteBuilder; - - public class SampleRouteBuilder extends RouteBuilder { - public void configure() { - // route to typed actor - from("direct:test").to("typed-actor:sample?method=foo"); - } - } - -The typed-actor endpoint URI syntax is::: - - typed-actor:?method= - -where ```` is the id of the bean in the Spring application context and -```` is the name of the typed actor method to invoke. - -Usage of the custom route for sending a message to the typed actor is shown in -the following snippet. - -**Scala** - -.. code-block:: scala - - package sample - - import org.springframework.context.support.ClassPathXmlApplicationContext - import akka.camel.CamelContextManager - - // load Spring application context (starts CamelService) - val appctx = new ClassPathXmlApplicationContext("/context-standalone.xml") - - // access 'sample' typed actor via custom route - assert("hello akka" == CamelContextManager.mandatoryTemplate.requestBody("direct:test", "akka")) - - // close Spring application context (stops CamelService) - appctx.close - -**Java** - -.. code-block:: java - - package sample; - - import org.springframework.context.support.ClassPathXmlApplicationContext; - import akka.camel.CamelContextManager; - - // load Spring application context - ClassPathXmlApplicationContext appctx = new ClassPathXmlApplicationContext("/context-standalone.xml"); - - // access 'externally' registered typed actors with typed-actor component - assert("hello akka" == CamelContextManager.getMandatoryTemplate().requestBody("direct:test", "akka")); - - // close Spring application context (stops CamelService) - appctx.close(); - -The application uses a Camel `producer template`_ to access the typed actor via -the ``direct:test`` endpoint. - -.. _producer template: http://camel.apache.org/producertemplate.html - - -Without Spring -^^^^^^^^^^^^^^ - -Usage of :ref:`spring-module` for adding typed actors to the Camel registry and -starting a CamelService is optional. Setting up a Spring-less application for -accessing typed actors is shown in the next example. - -**Scala** - -.. code-block:: scala - - package sample - - import org.apache.camel.impl.{DefaultCamelContext, SimpleRegistry} - import akka.actor.TypedActor - import akka.camel.CamelContextManager - import akka.camel.CamelServiceManager._ - - // register typed actor - val registry = new SimpleRegistry - registry.put("sample", TypedActor.newInstance(classOf[SampleTypedActor], classOf[SampleTypedActorImpl])) - - // customize CamelContext - CamelContextManager.init(new DefaultCamelContext(registry)) - CamelContextManager.mandatoryContext.addRoutes(new SampleRouteBuilder) - - startCamelService - - // access 'sample' typed actor via custom route - assert("hello akka" == CamelContextManager.mandatoryTemplate.requestBody("direct:test", "akka")) - - stopCamelService - -**Java** - -.. code-block:: java - - package sample; - - // register typed actor - SimpleRegistry registry = new SimpleRegistry(); - registry.put("sample", TypedActor.newInstance(SampleTypedActor.class, SampleTypedActorImpl.class)); - - // customize CamelContext - CamelContextManager.init(new DefaultCamelContext(registry)); - CamelContextManager.getMandatoryContext().addRoutes(new SampleRouteBuilder()); - - startCamelService(); - - // access 'sample' typed actor via custom route - assert("hello akka" == CamelContextManager.getMandatoryTemplate().requestBody("direct:test", "akka")); - - stopCamelService(); - -Here, `SimpleRegistry`_, a java.util.Map based registry, is used to register -typed actors. The CamelService is started and stopped programmatically. - -.. _SimpleRegistry: https://svn.apache.org/repos/asf/camel/trunk/camel-core/src/main/java/org/apache/camel/impl/SimpleRegistry.java - - -.. _camel-intercepting-route-construction: - -Intercepting route construction -------------------------------- - -The previous section, :ref:`camel-components`, explained how to setup a route to -an (untyped) actor or typed actor manually. It was the application's -responsibility to define the route and add it to the current CamelContext. This -section explains a more conventient way to define custom routes: akka-camel is -still setting up the routes to consumer actors (and adds these routes to the -current CamelContext) but applications can define extensions to these routes. -Extensions can be defined with Camel's `Java DSL`_ or `Scala DSL`_. For example, -an extension could be a custom error handler that redelivers messages from an -endpoint to an actor's bounded mailbox when the mailbox was full. - -.. _Java DSL: http://camel.apache.org/dsl.html -.. _Scala DSL: http://camel.apache.org/scala-dsl.html - -The following examples demonstrate how to extend a route to a consumer actor for -handling exceptions thrown by that actor. To simplify the example, we configure -:ref:`camel-blocking-exchanges` which reports any exception, that is thrown by -receive, directly back to the Camel route. One could also report exceptions -asynchronously using a Failure reply (see also `this article`__) but we'll do it -differently here. - -__ http://krasserm.blogspot.com/2011/02/akka-consumer-actors-new-features-and.html - - -Actors (untyped) -^^^^^^^^^^^^^^^^ - -**Scala** - -.. code-block:: scala - - import akka.actor.Actor - import akka.camel.Consumer - - import org.apache.camel.builder.Builder - import org.apache.camel.model.RouteDefinition - - class ErrorHandlingConsumer extends Actor with Consumer { - def endpointUri = "direct:error-handler-test" - - // Needed to propagate exception back to caller - override def blocking = true - - onRouteDefinition {rd: RouteDefinition => - // Catch any exception and handle it by returning the exception message as response - rd.onException(classOf[Exception]).handled(true).transform(Builder.exceptionMessage).end - } - - protected def receive = { - case msg: Message => throw new Exception("error: %s" format msg.body) - } - } - -**Java** - -.. code-block:: java - - import akka.camel.UntypedConsumerActor; - - import org.apache.camel.builder.Builder; - import org.apache.camel.model.ProcessorDefinition; - import org.apache.camel.model.RouteDefinition; - - public class SampleErrorHandlingConsumer extends UntypedConsumerActor { - - public String getEndpointUri() { - return "direct:error-handler-test"; - } - - // Needed to propagate exception back to caller - public boolean isBlocking() { - return true; - } - - public void preStart() { - onRouteDefinition(new RouteDefinitionHandler() { - public ProcessorDefinition onRouteDefinition(RouteDefinition rd) { - // Catch any exception and handle it by returning the exception message as response - return rd.onException(Exception.class).handled(true).transform(Builder.exceptionMessage()).end(); - } - }); - } - - public void onReceive(Object message) throws Exception { - Message msg = (Message)message; - String body = msg.getBodyAs(String.class); - throw new Exception(String.format("error: %s", body)); - } - - } - - - -For (untyped) actors, consumer route extensions are defined by calling the -onRouteDefinition method with a route definition handler. In Scala, this is a -function of type ``RouteDefinition => ProcessorDefinition[_]``, in Java it is an -instance of ``RouteDefinitionHandler`` which is defined as follows. - -.. code-block:: scala - - package akka.camel - - import org.apache.camel.model.RouteDefinition - import org.apache.camel.model.ProcessorDefinition - - trait RouteDefinitionHandler { - def onRouteDefinition(rd: RouteDefinition): ProcessorDefinition[_] - } - -The akka-camel module creates a RouteDefinition instance by calling -from(endpointUri) on a Camel RouteBuilder (where endpointUri is the endpoint URI -of the consumer actor) and passes that instance as argument to the route -definition handler \*). The route definition handler then extends the route and -returns a ProcessorDefinition (in the above example, the ProcessorDefinition -returned by the end method. See the `org.apache.camel.model`__ package for -details). After executing the route definition handler, akka-camel finally calls -a to(actor:uuid:actorUuid) on the returned ProcessorDefinition to complete the -route to the comsumer actor (where actorUuid is the uuid of the consumer actor). - -\*) Before passing the RouteDefinition instance to the route definition handler, -akka-camel may make some further modifications to it. - -__ https://svn.apache.org/repos/asf/camel/trunk/camel-core/src/main/java/org/apache/camel/model/ - - -Typed actors -^^^^^^^^^^^^ - -For typed consumer actors to define a route definition handler, they must -provide a RouteDefinitionHandler implementation class with the @consume -annotation. The implementation class must have a no-arg constructor. Here's an -example (in Java). - -.. code-block:: java - - import org.apache.camel.builder.Builder; - import org.apache.camel.model.ProcessorDefinition; - import org.apache.camel.model.RouteDefinition; - - public class SampleRouteDefinitionHandler implements RouteDefinitionHandler { - public ProcessorDefinition onRouteDefinition(RouteDefinition rd) { - return rd.onException(Exception.class).handled(true).transform(Builder.exceptionMessage()).end(); - } - } - -It can be used as follows. - -**Scala** - -.. code-block:: scala - - trait TestTypedConsumer { - @consume(value="direct:error-handler-test", routeDefinitionHandler=classOf[SampleRouteDefinitionHandler]) - def foo(s: String): String - } - - // implementation class omitted - -**Java** - -.. code-block:: java - - public interface SampleErrorHandlingTypedConsumer { - - @consume(value="direct:error-handler-test", routeDefinitionHandler=SampleRouteDefinitionHandler.class) - String foo(String s); - - } - - // implementation class omitted - - -.. _camel-examples: - -Examples -======== - -For all features described so far, there's running sample code in -`akka-sample-camel`_. The examples in `sample.camel.Boot`_ are started during -Kernel startup because this class has been added to the boot :ref:`configuration`. - -.. _akka-sample-camel: http://github.com/jboner/akka/tree/master/akka-samples/akka-sample-camel/ -.. _sample.camel.Boot: http://github.com/jboner/akka/blob/master/akka-samples/akka-sample-camel/src/main/scala/sample/camel/Boot.scala - -.. code-block:: none - - akka { - ... - boot = ["sample.camel.Boot", ...] - ... - } - -If you don't want to have these examples started during Kernel startup, delete -it from the :ref:`configuration`. Other examples are standalone applications (i.e. classes with a -main method) that can be started from `sbt`_. - -.. _sbt: http://code.google.com/p/simple-build-tool/ - -.. code-block:: none - - $ sbt - [info] Building project akka 2.0-SNAPSHOT against Scala 2.9.0 - [info] using AkkaModulesParentProject with sbt 0.7.7 and Scala 2.7.7 - > project akka-sample-camel - Set current project to akka-sample-camel 2.0-SNAPSHOT - > run - ... - Multiple main classes detected, select one to run: - - [1] sample.camel.ClientApplication - [2] sample.camel.ServerApplication - [3] sample.camel.StandaloneSpringApplication - [4] sample.camel.StandaloneApplication - [5] sample.camel.StandaloneFileApplication - [6] sample.camel.StandaloneJmsApplication - - -Some of the examples in `akka-sample-camel`_ are described in more detail in the -following subsections. - - -.. _camel-async-example: - -Asynchronous routing and transformation example ------------------------------------------------ - -This example demonstrates how to implement consumer and producer actors that -support :ref:`camel-asynchronous-routing` with their Camel endpoints. The sample -application transforms the content of the Akka homepage, http://akka.io, by -replacing every occurrence of *Akka* with *AKKA*. After starting -the :ref:`microkernel`, direct the browser to http://localhost:8875 and the -transformed Akka homepage should be displayed. Please note that this example -will probably not work if you're behind an HTTP proxy. - -The following figure gives an overview how the example actors interact with -external systems and with each other. A browser sends a GET request to -http://localhost:8875 which is the published endpoint of the ``HttpConsumer`` -actor. The ``HttpConsumer`` actor forwards the requests to the ``HttpProducer`` -actor which retrieves the Akka homepage from http://akka.io. The retrieved HTML -is then forwarded to the ``HttpTransformer`` actor which replaces all occurences -of *Akka* with *AKKA*. The transformation result is sent back the HttpConsumer -which finally returns it to the browser. - -.. image:: camel-async-interact.png - -Implementing the example actor classes and wiring them together is rather easy -as shown in the following snippet (see also `sample.camel.Boot`_). - -.. code-block:: scala - - import org.apache.camel.Exchange - import akka.actor.Actor._ - import akka.actor.{Actor, ActorRef} - import akka.camel.{Producer, Message, Consumer} - - class HttpConsumer(producer: ActorRef) extends Actor with Consumer { - def endpointUri = "jetty:http://0.0.0.0:8875/" - - protected def receive = { - case msg => producer forward msg - } - } - - class HttpProducer(transformer: ActorRef) extends Actor with Producer { - def endpointUri = "jetty://http://akka.io/?bridgeEndpoint=true" - - override protected def receiveBeforeProduce = { - // only keep Exchange.HTTP_PATH message header (which needed by bridge endpoint) - case msg: Message => msg.setHeaders(msg.headers(Set(Exchange.HTTP_PATH))) - } - - override protected def receiveAfterProduce = { - // do not reply but forward result to transformer - case msg => transformer forward msg - } - } - - class HttpTransformer extends Actor { - protected def receive = { - case msg: Message => self.reply(msg.transformBody {body: String => body replaceAll ("Akka ", "AKKA ")}) - case msg: Failure => self.reply(msg) - } - } - - // Wire and start the example actors - val httpTransformer = actorOf(Props(new HttpTransformer)) - val httpProducer = actorOf(Props(new HttpProducer(httpTransformer))) - val httpConsumer = actorOf(Props(new HttpConsumer(httpProducer))) - -The `jetty endpoints`_ of HttpConsumer and HttpProducer support asynchronous -in-out message exchanges and do not allocate threads for the full duration of -the exchange. This is achieved by using `Jetty continuations`_ on the -consumer-side and by using `Jetty's asynchronous HTTP client`_ on the producer -side. The following high-level sequence diagram illustrates that. - -.. _jetty endpoints: http://camel.apache.org/jetty.html -.. _Jetty continuations: http://wiki.eclipse.org/Jetty/Feature/Continuations -.. _Jetty's asynchronous HTTP client: http://wiki.eclipse.org/Jetty/Tutorial/HttpClient - -.. image:: camel-async-sequence.png - - -Custom Camel route example --------------------------- - -This section also demonstrates the combined usage of a ``Producer`` and a -``Consumer`` actor as well as the inclusion of a custom Camel route. The -following figure gives an overview. - -.. image:: camel-custom-route.png - -* A consumer actor receives a message from an HTTP client - -* It forwards the message to another actor that transforms the message (encloses - the original message into hyphens) - -* The transformer actor forwards the transformed message to a producer actor - -* The producer actor sends the message to a custom Camel route beginning at the - ``direct:welcome`` endpoint - -* A processor (transformer) in the custom Camel route prepends "Welcome" to the - original message and creates a result message - -* The producer actor sends the result back to the consumer actor which returns - it to the HTTP client - - -The example is part of `sample.camel.Boot`_. The consumer, transformer and -producer actor implementations are as follows. - -.. code-block:: scala - - package sample.camel - - import akka.actor.{Actor, ActorRef} - import akka.camel.{Message, Consumer} - - class Consumer3(transformer: ActorRef) extends Actor with Consumer { - def endpointUri = "jetty:http://0.0.0.0:8877/camel/welcome" - - def receive = { - // Forward a string representation of the message body to transformer - case msg: Message => transformer.forward(msg.setBodyAs[String]) - } - } - - class Transformer(producer: ActorRef) extends Actor { - protected def receive = { - // example: transform message body "foo" to "- foo -" and forward result to producer - case msg: Message => producer.forward(msg.transformBody((body: String) => "- %s -" format body)) - } - } - - class Producer1 extends Actor with Producer { - def endpointUri = "direct:welcome" - } - -The producer actor knows where to reply the message to because the consumer and -transformer actors have forwarded the original sender reference as well. The -application configuration and the route starting from direct:welcome are as -follows. - -.. code-block:: scala - - package sample.camel - - import org.apache.camel.builder.RouteBuilder - import org.apache.camel.{Exchange, Processor} - - import akka.actor.Actor._ - import akka.camel.CamelContextManager - - class Boot { - CamelContextManager.init() - CamelContextManager.mandatoryContext.addRoutes(new CustomRouteBuilder) - - val producer = actorOf(Props[Producer1]) - val mediator = actorOf(Props(new Transformer(producer))) - val consumer = actorOf(Props(new Consumer3(mediator))) - } - - class CustomRouteBuilder extends RouteBuilder { - def configure { - from("direct:welcome").process(new Processor() { - def process(exchange: Exchange) { - // Create a 'welcome' message from the input message - exchange.getOut.setBody("Welcome %s" format exchange.getIn.getBody) - } - }) - } - } - -To run the example, start the :ref:`microkernel` and POST a message to -``http://localhost:8877/camel/welcome``. - -.. code-block:: none - - curl -H "Content-Type: text/plain" -d "Anke" http://localhost:8877/camel/welcome - -The response should be: - -.. code-block:: none - - Welcome - Anke - - - -Publish-subcribe example ------------------------- - -JMS -^^^ - -This section demonstrates how akka-camel can be used to implement -publish/subscribe for actors. The following figure sketches an example for -JMS-based publish/subscribe. - -.. image:: camel-pubsub.png - -A consumer actor receives a message from an HTTP client. It sends the message to -a JMS producer actor (publisher). The JMS producer actor publishes the message -to a JMS topic. Two other actors that subscribed to that topic both receive the -message. The actor classes used in this example are shown in the following -snippet. - -.. code-block:: scala - - package sample.camel - - import akka.actor.{Actor, ActorRef} - import akka.camel.{Producer, Message, Consumer} - - class Subscriber(name:String, uri: String) extends Actor with Consumer { - def endpointUri = uri - - protected def receive = { - case msg: Message => println("%s received: %s" format (name, msg.body)) - } - } - - class Publisher(name: String, uri: String) extends Actor with Producer { - self.id = name - - def endpointUri = uri - - // one-way communication with JMS - override def oneway = true - } - - class PublisherBridge(uri: String, publisher: ActorRef) extends Actor with Consumer { - def endpointUri = uri - - protected def receive = { - case msg: Message => { - publisher ! msg.bodyAs[String] - self.reply("message published") - } - } - } - -Wiring these actors to implement the above example is as simple as - -.. code-block:: scala - - package sample.camel - - import org.apache.camel.impl.DefaultCamelContext - import org.apache.camel.spring.spi.ApplicationContextRegistry - import org.springframework.context.support.ClassPathXmlApplicationContext - - import akka.actor.Actor._ - import akka.camel.CamelContextManager - - class Boot { - // Create CamelContext with Spring-based registry and custom route builder - val context = new ClassPathXmlApplicationContext("/context-jms.xml", getClass) - val registry = new ApplicationContextRegistry(context) - CamelContextManager.init(new DefaultCamelContext(registry)) - - // Setup publish/subscribe example - val jmsUri = "jms:topic:test" - val jmsSubscriber1 = actorOf(Props(new Subscriber("jms-subscriber-1", jmsUri))) - val jmsSubscriber2 = actorOf(Props(new Subscriber("jms-subscriber-2", jmsUri))) - val jmsPublisher = actorOf(Props(new Publisher("jms-publisher", jmsUri))) - - val jmsPublisherBridge = actorOf(Props(new PublisherBridge("jetty:http://0.0.0.0:8877/camel/pub/jms", jmsPublisher))) - } - -To publish messages to subscribers one could of course also use the JMS API -directly; there's no need to do that over a JMS producer actor as in this -example. For the example to work, Camel's `jms`_ component needs to be -configured with a JMS connection factory which is done in a Spring application -context XML file (context-jms.xml). - -.. _jms: http://camel.apache.org/jms.html - -.. code-block:: xml - - - - - - - - - - - - - - - - - - - - - - - - - -To run the example, start the :ref:`microkernel` and POST a -message to ``http://localhost:8877/camel/pub/jms``. - -.. code-block:: none - - curl -H "Content-Type: text/plain" -d "Happy hAkking" http://localhost:8877/camel/pub/jms - -The HTTP response body should be - -.. code-block:: none - - message published - -On the console, where you started the Akka Kernel, you should see something like - -.. code-block:: none - - ... - INF [20100622-11:49:57.688] camel: jms-subscriber-2 received: Happy hAkking - INF [20100622-11:49:57.688] camel: jms-subscriber-1 received: Happy hAkking - - -Cometd -^^^^^^ - -Publish/subscribe with `CometD`_ is equally easy using `Camel's cometd -component`_. - -.. _CometD: http://cometd.org/ -.. _Camel's cometd component: http://camel.apache.org/cometd.html - -.. image:: camel-pubsub2.png - -All actor classes from the JMS example can re-used, only the endpoint URIs need -to be changed. - -.. code-block:: scala - - package sample.camel - - import org.apache.camel.impl.DefaultCamelContext - import org.apache.camel.spring.spi.ApplicationContextRegistry - import org.springframework.context.support.ClassPathXmlApplicationContext - - import akka.actor.Actor._ - import akka.camel.CamelContextManager - - class Boot { - // ... - - // Setup publish/subscribe example - val cometdUri = "cometd://localhost:8111/test/abc?resourceBase=target" - val cometdSubscriber = actorOf(Props(new Subscriber("cometd-subscriber", cometdUri))) - val cometdPublisher = actorOf(Props(new Publisher("cometd-publisher", cometdUri))) - - val cometdPublisherBridge = actorOf(Props(new PublisherBridge("jetty:http://0.0.0.0:8877/camel/pub/cometd", cometdPublisher))) - } - - -Quartz Scheduler Example ------------------------- - -Here is an example showing how simple is to implement a cron-style scheduler by -using the Camel Quartz component in Akka. - -The following example creates a "timer" actor which fires a message every 2 -seconds: - -.. code-block:: scala - - package com.dimingo.akka - - import akka.actor.Actor - import akka.actor.Actor.actorOf - - import akka.camel.{Consumer, Message} - import akka.camel.CamelServiceManager._ - - class MyQuartzActor extends Actor with Consumer { - - def endpointUri = "quartz://example?cron=0/2+*+*+*+*+?" - - def receive = { - - case msg => println("==============> received %s " format msg) - - } // end receive - - } // end MyQuartzActor - - object MyQuartzActor { - - def main(str: Array[String]) { - - // start the Camel service - startCamelService - - // create and start a quartz actor - val myActor = actorOf(Props[MyQuartzActor]) - - } // end main - - } // end MyQuartzActor - -The full working example is available for download here: -http://www.dimingo.com/akka/examples/example-akka-quartz.tar.gz - -You can launch it using the maven command: - -.. code-block:: none - - $ mvn scala:run -DmainClass=com.dimingo.akka.MyQuartzActor - -For more information about the Camel Quartz component, see here: -http://camel.apache.org/quartz.html +The Akka Camel module has not been migrated to Akka 2.0-SNAPSHOT yet. \ No newline at end of file diff --git a/akka-docs/project/migration-guide-0.10.x-1.0.x.rst b/akka-docs/project/migration-guide-0.10.x-1.0.x.rst deleted file mode 100644 index 6352e63061..0000000000 --- a/akka-docs/project/migration-guide-0.10.x-1.0.x.rst +++ /dev/null @@ -1,447 +0,0 @@ -Migration Guide 0.10.x to 1.0.x -==================================== - -Akka & Akka Modules separated into two different repositories and distributions -------------------------------------------------------------------------------- - -Akka is split up into two different parts: -* Akka - Reflects all the sections under 'Scala API' and 'Java API' in the navigation bar. -* Akka Modules - Reflects all the sections under 'Add-on modules' in the navigation bar. - -Download the release you need (Akka core or Akka Modules) from ``_ and unzip it. - ----- - -Changed Akka URI ----------------- - -http://akkasource.org changed to http://akka.io - -Reflects XSDs, Maven repositories, ScalaDoc etc. - ----- - -Removed 'se.scalablesolutions' prefix -------------------------------------- - -We have removed some boilerplate by shortening the Akka package from -**se.scalablesolutions.akka** to just **akka** so just do a search-replace in your project, -we apologize for the inconvenience, but we did it for our users. - ----- - -Akka-core is no more --------------------- - -Akka-core has been split into akka-actor, akka-stm, akka-typed-actor & akka-remote this means that you need to update any deps you have on akka-core. - ----- - -Config ------- - -Turning on/off modules -^^^^^^^^^^^^^^^^^^^^^^ - -All the 'service = on' elements for turning modules on and off have been replaced by a top-level list of the enabled services. - -Services available for turning on/off are: -* "remote" -* "http" -* "camel" - -**All** services are **OFF** by default. Enable the ones you are using. - -.. code-block:: ruby - - akka { - enabled-modules = [] # Comma separated list of the enabled modules. Options: ["remote", "camel", "http"] - } - -Renames -^^^^^^^ - -* 'rest' section - has been renamed to 'http' to align with the module name 'akka-http'. -* 'storage' section - has been renamed to 'persistence' to align with the module name 'akka-persistence'. - -.. code-block:: ruby - - akka { - http { - .. - } - - persistence { - .. - } - } - ----- - -Important changes from RC2-RC3 ------------------------------- - -**akka.config.SupervisionSupervise** - -**Scala** - -.. code-block:: scala - - def apply(actorRef: ActorRef, lifeCycle: LifeCycle, registerAsRemoteService: Boolean = false) - -- boolean instead of remoteAddress, registers that actor with it's id as service name on the local server - -**akka.actor.Actors now is the API for Java to interact with Actors, Remoting and ActorRegistry:** - -**Java** - -.. code-block:: java - - import static akka.actor.Actors.*; // <-- The important part - - actorOf(); - remote().actorOf(); - registry().actorsFor("foo"); - -***akka.actor.Actor now is the API for Scala to interact with Actors, Remoting and ActorRegistry:*** - -**Scala** - -.. code-block:: scala - - import akka.actor.Actor._ // <-- The important part - - actorOf().method - remote.actorOf() - registry.actorsFor("foo") - -**object UntypedActor has been deleted and replaced with akka.actor.Actors/akka.actor.Actor (Java/Scala)** - -- UntypedActor.actorOf -> Actors.actorOf (Java) or Actor.actorOf (Scala) - -**object ActorRegistry has been deleted and replaced with akka.actor.Actors.registry()/akka.actor.Actor.registry (Java/Scala)** - -- ActorRegistry. -> Actors.registry(). (Java) or Actor.registry. (Scala) - -**object RemoteClient has been deleted and replaced with akka.actor.Actors.remote()/akka.actor.Actor.remote (Java/Scala)** - -- RemoteClient -> Actors.remote() (Java) or Actor.remote (Scala) - -**object RemoteServer has been deleted and replaced with akka.actor.Actors.remote()/akka.actor.Actor.remote (Java/Scala)** - -- RemoteServer - deleted -> Actors.remote() (Java) or Actor.remote (Scala) - -**classes RemoteActor, RemoteUntypedActor and RemoteUntypedConsumerActors has been deleted and replaced with akka.actor.Actors.remote().actorOf(x, host port)/akka.actor.Actor.remote.actorOf(x, host, port)** - -- RemoteActor, RemoteUntypedActor - deleted, use: remote().actorOf(YourActor.class, host, port) (Java) or remote.actorOf(Props[YourActor](host, port) - -**Remoted spring-actors now default to spring id as service-name, use "service-name" attribute on "remote"-tag to override** - -**Listeners for RemoteServer and RemoteClient** are now registered on Actors.remote().addListener (Java) or Actor.remote.addListener (Scala), this means that all listeners get all remote events, both remote server evens and remote client events, **so adjust your code accordingly.** - -**ActorRef.startLinkRemote has been removed since one specified on creation wether the actor is client-managed or not.** - -Important change from RC3 to RC4 --------------------------------- - -The Akka-Spring namespace has changed from akkasource.org and scalablesolutions.se to http://akka.io/schema and http://akka.io/akka-.xsd - -Module akka-actor ------------------ - -The Actor.init callback has been renamed to "preStart" to align with the general callback naming and is more clear about when it's called. - -The Actor.shutdown callback has been renamed to "postStop" to align with the general callback naming and is more clear about when it's called. - -The Actor.initTransactionalState callback has been removed, logic should be moved to preStart and be wrapped in an atomic block - -**se.scalablesolutions.akka.config.ScalaConfig** and **se.scalablesolutions.akka.config.JavaConfig** have been merged into **akka.config.Supervision** - -**RemoteAddress** has moved from **se.scalablesolutions.akka.config.ScalaConfig** to **akka.config** - -The ActorRef.lifeCycle has changed signature from Option[LifeCycle] to LifeCycle, this means you need to change code that looks like this: -**self.lifeCycle = Some(LifeCycle(Permanent))** to **self.lifeCycle = Permanent** - -The equivalent to **self.lifeCycle = None** is **self.lifeCycle = UndefinedLifeCycle** -**LifeCycle(Permanent)** becomes **Permanent** -**new LifeCycle(permanent())** becomes **permanent()** (need to do: import static se.scalablesolutions.akka.config.Supervision.*; first) - -**JavaConfig.Component** and **ScalaConfig.Component** have been consolidated and renamed as **Supervision.SuperviseTypedActor** - -**self.trapExit** has been moved into the FaultHandlingStrategy, and **ActorRef.faultHandler** has switched type from Option[FaultHandlingStrategy] -to FaultHandlingStrategy: - -**Scala** - -.. code-block:: scala - - import akka.config.Supervision._ - - self.faultHandler = OneForOneStrategy(List(classOf[Exception]), 3, 5000) - -**Java** - -.. code-block:: java - - import static akka.Supervision.*; - - getContext().setFaultHandler(new OneForOneStrategy(new Class[] { Exception.class },50,1000)) - -**RestartStrategy, AllForOne, OneForOne** have been replaced with **AllForOneStrategy** and **OneForOneStrategy** in **se.scalablesolutions.akka.config.Supervision** - -**Scala** - -.. code-block:: scala - - import akka.config.Supervision._ - SupervisorConfig( - OneForOneStrategy(List(classOf[Exception]), 3, 5000), - Supervise(pingpong1,Permanent) :: Nil - ) - -**Java** - -.. code-block:: java - - import static akka.Supervision.*; - - new SupervisorConfig( - new OneForOneStrategy(new Class[] { Exception.class },50,1000), - new Server[] { new Supervise(pingpong1, permanent()) } - ) - -***We have removed the following factory methods:*** - -**Actor.actor { case foo => bar }** -**Actor.transactor { case foo => bar }** -**Actor.temporaryActor { case foo => bar }** -**Actor.init {} receive { case foo => bar }** - -They started the actor and no config was possible, it was inconsistent and irreparable. - -replace with your own factories, or: - -**Scala** - -.. code-block:: scala - - actorOf( new Actor { def receive = { case foo => bar } } ).start - actorOf( new Actor { self.lifeCycle = Temporary; def receive = { case foo => bar } } ).start - -ReceiveTimeout is now rescheduled after every message, before there was only an initial timeout. -To stop rescheduling of ReceiveTimeout, set **receiveTimeout = None** - -HotSwap -------- - -HotSwap does no longer use behavior stacking by default, but that is an option to both "become" and HotSwap. - -HotSwap now takes for Scala a Function from ActorRef to a Receive, the ActorRef passed in is the reference to self, so you can do self.reply() etc. - ----- - -Module akka-stm ---------------- - -The STM stuff is now in its own module. This means that there is no support for transactions or transactors in akka-actor. - -Local and global -^^^^^^^^^^^^^^^^ - -The **local/global** distinction has been dropped. This means that if the following general import was being used: - -**Scala** - -.. code-block:: scala - - import akka.stm.local._ - -this is now just: - -**Scala** - -.. code-block:: scala - - import akka.stm._ - -Coordinated is the new global -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -There is a new explicit mechanism for coordinated transactions. See the `Scala Transactors `_ and `Java Transactors `_ documentation for more information. Coordinated transactions and transactors are found in the ``akka.transactor`` package now. The usage of transactors has changed. - -Agents -^^^^^^ - -Agent is now in the akka-stm module and has moved to the ``akka.agent`` package. The implementation has been reworked and is now closer to Clojure agents. There is not much difference in general usage, the main changes involve interaction with the STM. - -While updates to Agents are asynchronous, the state of an Agent is always immediately available for reading by any thread. Agents are integrated with the STM - any dispatches made in a transaction are held until that transaction commits, and are discarded if it is retried or aborted. There is a new ``sendOff`` method for long-running or blocking update functions. - ----- - -Module akka-camel ------------------ - -Access to the CamelService managed by CamelServiceManager has changed: - -* Method service renamed to mandatoryService (Scala) -* Method service now returns Option[CamelService] (Scala) -* Introduced method getMandatoryService() (Java) -* Introduced method getService() (Java) - -**Scala** - -.. code-block:: scala - - import se.scalablesolutions.akka.camel.CamelServiceManager._ - import se.scalablesolutions.akka.camel.CamelService - - val o: Option[CamelService] = service - val s: CamelService = mandatoryService - -**Java** - -.. code-block:: java - - import se.scalablesolutions.akka.camel.CamelService; - import se.scalablesolutions.akka.japi.Option; - import static se.scalablesolutions.akka.camel.CamelServiceManager.*; - - Option o = getService(); - CamelService s = getMandatoryService(); - -Access to the CamelContext and ProducerTemplate managed by CamelContextManager has changed: - -* Method context renamed to mandatoryContext (Scala) -* Method template renamed to mandatoryTemplate (Scala) -* Method service now returns Option[CamelContext] (Scala) -* Method template now returns Option[ProducerTemplate] (Scala) -* Introduced method getMandatoryContext() (Java) -* Introduced method getContext() (Java) -* Introduced method getMandatoryTemplate() (Java) -* Introduced method getTemplate() (Java) - -**Scala** - -.. code-block:: scala - - import org.apache.camel.CamelContext - import org.apache.camel.ProducerTemplate - - import se.scalablesolutions.akka.camel.CamelContextManager._ - - val co: Option[CamelContext] = context - val to: Option[ProducerTemplate] = template - - val c: CamelContext = mandatoryContext - val t: ProducerTemplate = mandatoryTemplate - -**Java** - -.. code-block:: java - - import org.apache.camel.CamelContext; - import org.apache.camel.ProducerTemplate; - - import se.scalablesolutions.akka.japi.Option; - import static se.scalablesolutions.akka.camel.CamelContextManager.*; - - Option co = getContext(); - Option to = getTemplate(); - - CamelContext c = getMandatoryContext(); - ProducerTemplate t = getMandatoryTemplate(); - -The following methods have been renamed on class se.scalablesolutions.akka.camel.Message: - -* bodyAs(Class) has been renamed to getBodyAs(Class) -* headerAs(String, Class) has been renamed to getHeaderAs(String, Class) - -The API for waiting for consumer endpoint activation and de-activation has been changed - -* CamelService.expectEndpointActivationCount has been removed and replaced by CamelService.awaitEndpointActivation -* CamelService.expectEndpointDeactivationCount has been removed and replaced by CamelService.awaitEndpointDeactivation - -**Scala** - -.. code-block:: scala - - import se.scalablesolutions.akka.actor.Actor - import se.scalablesolutions.akka.camel.CamelServiceManager._ - - val s = startCamelService - val actor = Actor.actorOf(Props[SampleConsumer] - - // wait for 1 consumer being activated - s.awaitEndpointActivation(1) { - actor.start - } - - // wait for 1 consumer being de-activated - s.awaitEndpointDeactivation(1) { - actor.stop - } - - s.stop - -**Java** - -.. code-block:: java - - import java.util.concurrent.TimeUnit; - import se.scalablesolutions.akka.actor.ActorRef; - import se.scalablesolutions.akka.actor.Actors; - import se.scalablesolutions.akka.camel.CamelService; - import se.scalablesolutions.akka.japi.SideEffect; - import static se.scalablesolutions.akka.camel.CamelServiceManager.*; - - CamelService s = startCamelService(); - final ActorRef actor = Actors.actorOf(SampleUntypedConsumer.class); - - // wait for 1 consumer being activated - s.awaitEndpointActivation(1, new SideEffect() { - public void apply() { - actor.start(); - } - }); - - // wait for 1 consumer being de-activated - s.awaitEndpointDeactivation(1, new SideEffect() { - public void apply() { - actor.stop(); - } - }); - - s.stop(); - -Module Akka-Http ----------------- - -Atmosphere support has been removed. If you were using akka.comet.AkkaServlet for Jersey support only, -you can switch that to: akka.http.AkkaRestServlet and it should work just like before. - -Atmosphere has been removed because we have a new async http support in the form of Akka Mist, a very thin bridge -between Servlet3.0/JettyContinuations and Actors, enabling Http-as-messages, read more about it here: -http://doc.akka.io/http#Mist%20-%20Lightweight%20Asynchronous%20HTTP - -If you really need Atmosphere support, you can add it yourself by following the steps listed at the start of: -http://doc.akka.io/comet - -Module akka-spring ------------------- - -The Akka XML schema URI has changed to http://akka.io/schema/akka - -.. code-block:: xml - - - - - - diff --git a/akka-docs/project/migration-guide-0.8.x-0.9.x.rst b/akka-docs/project/migration-guide-0.8.x-0.9.x.rst deleted file mode 100644 index 868879a5b0..0000000000 --- a/akka-docs/project/migration-guide-0.8.x-0.9.x.rst +++ /dev/null @@ -1,172 +0,0 @@ -Migration Guide 0.8.x to 0.9.x -============================== - -**This document describes between the 0.8.x and the 0.9 release.** - -Background for the new ActorRef -------------------------------- - -In the work towards 0.9 release we have now done a major change to how Actors are created. In short we have separated identity and value, created an 'ActorRef' that holds the actual Actor instance. This allows us to do many great things such as for example: - -* Create serializable, immutable, network-aware Actor references that can be freely shared across the network. They "remember" their origin and will always work as expected. -* Not only kill and restart the same supervised Actor instance when it has crashed (as we do now), but dereference it, throw it away and make it eligible for garbage collection. -* etc. much more - -These work very much like the 'PID' (process id) in Erlang. - -These changes means that there is no difference in defining Actors. You still use the old Actor trait, all methods are there etc. But you can't just new this Actor up and send messages to it since all its public API methods are gone. They now reside in a new class; 'ActorRef' and use need to use instances of this class to interact with the Actor (sending messages etc.). - -Here is a short migration guide with the things that you have to change. It is a big conceptual change but in practice you don't have to change much. - - - -Creating Actors with default constructor ----------------------------------------- - -From: - -.. code-block:: scala - - val a = new MyActor - a ! msg - -To: - -.. code-block:: scala - - import Actor._ - val a = actorOf(Props[MyActor] - a ! msg - -You can also start it in the same statement: - -.. code-block:: scala - - val a = actorOf(Props[MyActor] - -Creating Actors with non-default constructor --------------------------------------------- - -From: - -.. code-block:: scala - - val a = new MyActor(..) - a ! msg - -To: - -.. code-block:: scala - - import Actor._ - val a = actorOf(Props(new MyActor(..)) - a ! msg - -Use of 'self' ActorRef API --------------------------- - -Where you have used 'this' to refer to the Actor from within itself now use 'self': - -.. code-block:: scala - - self ! MessageToMe - -Now the Actor trait only has the callbacks you can implement: -* receive -* postRestart/preRestart -* init/shutdown - -It has no state at all. - -All API has been moved to ActorRef. The Actor is given its ActorRef through the 'self' member variable. -Here you find functions like: -* !, !!, !!! and forward -* link, unlink, startLink, spawnLink etc -* makeTransactional, makeRemote etc. -* start, stop -* etc. - -Here you also find fields like -* dispatcher = ... -* id = ... -* lifeCycle = ... -* faultHandler = ... -* trapExit = ... -* etc. - -This means that to use them you have to prefix them with 'self', like this: - -.. code-block:: scala - - self ! Message - -However, for convenience you can import these functions and fields like below, which will allow you do drop the 'self' prefix: - -.. code-block:: scala - - class MyActor extends Actor { - import self._ - id = ... - dispatcher = ... - spawnLink[OtherActor] - ... - } - -Serialization -------------- - -If you want to serialize it yourself, here is how to do it: - -.. code-block:: scala - - val actorRef1 = actorOf(Props[MyActor] - - val bytes = actorRef1.toBinary - - val actorRef2 = ActorRef.fromBinary(bytes) - -If you are also using Protobuf then you can use the methods that work with Protobuf's Messages directly. - -.. code-block:: scala - - val actorRef1 = actorOf(Props[MyActor] - - val protobufMessage = actorRef1.toProtocol - - val actorRef2 = ActorRef.fromProtocol(protobufMessage) - -Camel ------ - -Some methods of the se.scalablesolutions.akka.camel.Message class have been deprecated in 0.9. These are - -.. code-block:: scala - - package se.scalablesolutions.akka.camel - - case class Message(...) { - // ... - @deprecated def bodyAs[T](clazz: Class[T]): T - @deprecated def setBodyAs[T](clazz: Class[T]): Message - // ... - } - -They will be removed in 1.0. Instead use - -.. code-block:: scala - - package se.scalablesolutions.akka.camel - - case class Message(...) { - // ... - def bodyAs[T](implicit m: Manifest[T]): T = - def setBodyAs[T](implicit m: Manifest[T]): Message - // ... - } - -Usage example: -.. code-block:: scala - - val m = Message(1.4) - val b = m.bodyAs[String] - diff --git a/akka-docs/scala/code/StmDocSpec.scala b/akka-docs/scala/code/StmDocSpec.scala deleted file mode 100644 index 99c2e051ae..0000000000 --- a/akka-docs/scala/code/StmDocSpec.scala +++ /dev/null @@ -1,27 +0,0 @@ -package akka.docs.stm - -import org.scalatest.WordSpec -import org.scalatest.matchers.MustMatchers - -class StmDocSpec extends WordSpec with MustMatchers { - - "simple counter example" in { - //#simple - import akka.stm._ - - val ref = Ref(0) - - def counter = atomic { - ref alter (_ + 1) - } - - counter - // -> 1 - - counter - // -> 2 - //#simple - - ref.get must be === 2 - } -} diff --git a/akka-docs/scala/index.rst b/akka-docs/scala/index.rst index 7e24497f5f..978d9a47bb 100644 --- a/akka-docs/scala/index.rst +++ b/akka-docs/scala/index.rst @@ -12,7 +12,6 @@ Scala API futures dataflow agents - stm transactors fault-tolerance dispatchers diff --git a/akka-docs/scala/transactors.rst b/akka-docs/scala/transactors.rst new file mode 100644 index 0000000000..cdd284ae43 --- /dev/null +++ b/akka-docs/scala/transactors.rst @@ -0,0 +1,6 @@ +.. _transactors-scala: + +Transactors (Scala) +=================== + +The Akka Transactors module has not been migrated to Akka 2.0-SNAPSHOT yet. \ No newline at end of file diff --git a/akka-testkit/src/main/scala/akka/testkit/TestLatch.scala b/akka-testkit/src/main/scala/akka/testkit/TestLatch.scala index 1af4785525..13bb9d84ab 100644 --- a/akka-testkit/src/main/scala/akka/testkit/TestLatch.scala +++ b/akka-testkit/src/main/scala/akka/testkit/TestLatch.scala @@ -29,7 +29,9 @@ class TestLatch(count: Int = 1)(implicit system: ActorSystem) { def countDown() = latch.countDown() - def open() = countDown() + def isOpen: Boolean = latch.getCount == 0 + + def open() = while (!isOpen) countDown() def await(): Boolean = await(TestLatch.DefaultTimeout) diff --git a/project/AkkaBuild.scala b/project/AkkaBuild.scala index cbad5fda90..d1e0299d3c 100644 --- a/project/AkkaBuild.scala +++ b/project/AkkaBuild.scala @@ -30,7 +30,7 @@ object AkkaBuild extends Build { Unidoc.unidocExclude := Seq(samples.id, tutorials.id), Dist.distExclude := Seq(actorTests.id, akkaSbtPlugin.id, docs.id) ), - aggregate = Seq(actor, testkit, actorTests, stm, remote, slf4j, amqp, mailboxes, akkaSbtPlugin, samples, tutorials, docs) + aggregate = Seq(actor, testkit, actorTests, remote, slf4j, amqp, mailboxes, akkaSbtPlugin, samples, tutorials, docs) ) lazy val actor = Project( @@ -66,19 +66,10 @@ object AkkaBuild extends Build { ) ) - lazy val stm = Project( - id = "akka-stm", - base = file("akka-stm"), - dependencies = Seq(actor, testkit % "test->test"), - settings = defaultSettings ++ Seq( - libraryDependencies ++= Dependencies.stm - ) - ) - lazy val remote = Project( id = "akka-remote", base = file("akka-remote"), - dependencies = Seq(stm, actorTests % "test->test", testkit % "test->test"), + dependencies = Seq(actor, actorTests % "test->test", testkit % "test->test"), settings = defaultSettings ++ multiJvmSettings ++ Seq( libraryDependencies ++= Dependencies.cluster, extraOptions in MultiJvm <<= (sourceDirectory in MultiJvm) { src => @@ -256,7 +247,7 @@ object AkkaBuild extends Build { lazy val docs = Project( id = "akka-docs", base = file("akka-docs"), - dependencies = Seq(actor, testkit % "test->test", stm, remote, slf4j, fileMailbox, mongoMailbox, redisMailbox, beanstalkMailbox, zookeeperMailbox), + dependencies = Seq(actor, testkit % "test->test", remote, slf4j, fileMailbox, mongoMailbox, redisMailbox, beanstalkMailbox, zookeeperMailbox), settings = defaultSettings ++ Seq( unmanagedSourceDirectories in Test <<= baseDirectory { _ ** "code" get }, libraryDependencies ++= Dependencies.docs, @@ -359,12 +350,10 @@ object Dependencies { val testkit = Seq(Test.scalatest, Test.junit) val actorTests = Seq( - Test.junit, Test.scalatest, Test.multiverse, Test.commonsMath, Test.mockito, + Test.junit, Test.scalatest, Test.commonsMath, Test.mockito, Test.scalacheck, protobuf, jacksonMapper, sjson ) - val stm = Seq(multiverse, Test.junit, Test.scalatest) - val cluster = Seq( bookkeeper, commonsCodec, commonsIo, guice, h2Lzf, jacksonCore, jacksonMapper, log4j, netty, protobuf, sjson, zkClient, zookeeper, zookeeperLock, Test.junit, Test.scalatest @@ -412,7 +401,6 @@ object Dependency { val Jersey = "1.3" val Jetty = "7.4.0.v20110414" val Logback = "0.9.28" - val Multiverse = "0.6.2" val Netty = "3.2.5.Final" val Protobuf = "2.4.1" val Scalatest = "1.6.1" @@ -439,7 +427,6 @@ object Dependency { val jettyServlet = "org.eclipse.jetty" % "jetty-servlet" % V.Jetty // Eclipse license val log4j = "log4j" % "log4j" % "1.2.15" // ApacheV2 val mongoAsync = "com.mongodb.async" % "mongo-driver_2.9.0-1" % "0.2.9-1" // ApacheV2 - val multiverse = "org.multiverse" % "multiverse-alpha" % V.Multiverse // ApacheV2 val netty = "org.jboss.netty" % "netty" % V.Netty // ApacheV2 val osgi = "org.osgi" % "org.osgi.core" % "4.2.0" // ApacheV2 val protobuf = "com.google.protobuf" % "protobuf-java" % V.Protobuf // New BSD @@ -482,7 +469,6 @@ object Dependency { val junit = "junit" % "junit" % "4.5" % "test" // Common Public License 1.0 val logback = "ch.qos.logback" % "logback-classic" % V.Logback % "test" // EPL 1.0 / LGPL 2.1 val mockito = "org.mockito" % "mockito-all" % "1.8.1" % "test" // MIT - val multiverse = "org.multiverse" % "multiverse-alpha" % V.Multiverse % "test" // ApacheV2 val scalatest = "org.scalatest" %% "scalatest" % V.Scalatest % "test" // ApacheV2 val scalacheck = "org.scala-tools.testing" %% "scalacheck" % "1.9" % "test" // New BSD } From 80600abc3373aa35bce85d321d699f7072f3726e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jonas=20Bone=CC=81r?= Date: Wed, 14 Dec 2011 14:05:44 +0100 Subject: [PATCH 09/34] Added 'withRouter[TYPE]' to 'Props'. Added docs (Scala and Java) and (code for the docs) for 'Props'. Renamed UntypedActorTestBase to UntypedActorDocTestBase. MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Jonas Bonér --- .../test/scala/akka/routing/RoutingSpec.scala | 24 ++--- .../src/main/scala/akka/actor/Props.scala | 62 +++++++++--- .../src/main/scala/akka/routing/package.scala | 7 +- .../akka/docs/actor/UntypedActorDocTest.scala | 5 + ...Base.java => UntypedActorDocTestBase.java} | 23 ++++- .../akka/docs/actor/UntypedActorTest.scala | 5 - akka-docs/java/untyped-actors.rst | 99 ++++++++++--------- akka-docs/scala/actors.rst | 14 ++- akka-docs/scala/code/ActorDocSpec.scala | 20 ++++ .../src/main/scala/Pi.scala | 2 +- 10 files changed, 178 insertions(+), 83 deletions(-) create mode 100644 akka-docs/java/code/akka/docs/actor/UntypedActorDocTest.scala rename akka-docs/java/code/akka/docs/actor/{UntypedActorTestBase.java => UntypedActorDocTestBase.java} (90%) delete mode 100644 akka-docs/java/code/akka/docs/actor/UntypedActorTest.scala diff --git a/akka-actor-tests/src/test/scala/akka/routing/RoutingSpec.scala b/akka-actor-tests/src/test/scala/akka/routing/RoutingSpec.scala index e6e0f1c898..e4e0e01541 100644 --- a/akka-actor-tests/src/test/scala/akka/routing/RoutingSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/routing/RoutingSpec.scala @@ -62,7 +62,7 @@ class RoutingSpec extends AkkaSpec with DefaultTimeout with ImplicitSender { "no router" must { "be started when constructed" in { - val routedActor = system.actorOf(Props(new TestActor).withRouter(NoRouter)) + val routedActor = system.actorOf(Props[TestActor].withRouter(NoRouter)) routedActor.isTerminated must be(false) } @@ -90,7 +90,7 @@ class RoutingSpec extends AkkaSpec with DefaultTimeout with ImplicitSender { "round robin router" must { "be started when constructed" in { - val routedActor = system.actorOf(Props(new TestActor).withRouter(RoundRobinRouter(nrOfInstances = 1))) + val routedActor = system.actorOf(Props[TestActor].withRouter(RoundRobinRouter(nrOfInstances = 1))) routedActor.isTerminated must be(false) } @@ -118,7 +118,7 @@ class RoutingSpec extends AkkaSpec with DefaultTimeout with ImplicitSender { actors = actors :+ actor } - val routedActor = system.actorOf(Props(new TestActor).withRouter(RoundRobinRouter(targets = actors))) + val routedActor = system.actorOf(Props[TestActor].withRouter(RoundRobinRouter(targets = actors))) //send messages to the actor. for (i ← 0 until iterationCount) { @@ -156,7 +156,7 @@ class RoutingSpec extends AkkaSpec with DefaultTimeout with ImplicitSender { } })) - val routedActor = system.actorOf(Props(new TestActor).withRouter(RoundRobinRouter(targets = List(actor1, actor2)))) + val routedActor = system.actorOf(Props[TestActor].withRouter(RoundRobinRouter(targets = List(actor1, actor2)))) routedActor ! Broadcast(1) routedActor ! Broadcast("end") @@ -171,7 +171,7 @@ class RoutingSpec extends AkkaSpec with DefaultTimeout with ImplicitSender { "random router" must { "be started when constructed" in { - val routedActor = system.actorOf(Props(new TestActor).withRouter(RandomRouter(nrOfInstances = 1))) + val routedActor = system.actorOf(Props[TestActor].withRouter(RandomRouter(nrOfInstances = 1))) routedActor.isTerminated must be(false) } @@ -194,7 +194,7 @@ class RoutingSpec extends AkkaSpec with DefaultTimeout with ImplicitSender { } })) - val routedActor = system.actorOf(Props(new TestActor).withRouter(RandomRouter(targets = List(actor1, actor2)))) + val routedActor = system.actorOf(Props[TestActor].withRouter(RandomRouter(targets = List(actor1, actor2)))) routedActor ! Broadcast(1) routedActor ! Broadcast("end") @@ -208,7 +208,7 @@ class RoutingSpec extends AkkaSpec with DefaultTimeout with ImplicitSender { "broadcast router" must { "be started when constructed" in { - val routedActor = system.actorOf(Props(new TestActor).withRouter(BroadcastRouter(nrOfInstances = 1))) + val routedActor = system.actorOf(Props[TestActor].withRouter(BroadcastRouter(nrOfInstances = 1))) routedActor.isTerminated must be(false) } @@ -231,7 +231,7 @@ class RoutingSpec extends AkkaSpec with DefaultTimeout with ImplicitSender { } })) - val routedActor = system.actorOf(Props(new TestActor).withRouter(BroadcastRouter(targets = List(actor1, actor2)))) + val routedActor = system.actorOf(Props[TestActor].withRouter(BroadcastRouter(targets = List(actor1, actor2)))) routedActor ! 1 routedActor ! "end" @@ -262,7 +262,7 @@ class RoutingSpec extends AkkaSpec with DefaultTimeout with ImplicitSender { } })) - val routedActor = system.actorOf(Props(new TestActor).withRouter(BroadcastRouter(targets = List(actor1, actor2)))) + val routedActor = system.actorOf(Props[TestActor].withRouter(BroadcastRouter(targets = List(actor1, actor2)))) routedActor ? 1 routedActor ! "end" @@ -276,7 +276,7 @@ class RoutingSpec extends AkkaSpec with DefaultTimeout with ImplicitSender { "Scatter-gather router" must { "be started when constructed" in { - val routedActor = system.actorOf(Props(new TestActor).withRouter(ScatterGatherFirstCompletedRouter(targets = List(newActor(0))))) + val routedActor = system.actorOf(Props[TestActor].withRouter(ScatterGatherFirstCompletedRouter(targets = List(newActor(0))))) routedActor.isTerminated must be(false) } @@ -299,7 +299,7 @@ class RoutingSpec extends AkkaSpec with DefaultTimeout with ImplicitSender { } })) - val routedActor = system.actorOf(Props(new TestActor).withRouter(ScatterGatherFirstCompletedRouter(targets = List(actor1, actor2)))) + val routedActor = system.actorOf(Props[TestActor].withRouter(ScatterGatherFirstCompletedRouter(targets = List(actor1, actor2)))) routedActor ! Broadcast(1) routedActor ! Broadcast("end") @@ -313,7 +313,7 @@ class RoutingSpec extends AkkaSpec with DefaultTimeout with ImplicitSender { val shutdownLatch = new TestLatch(1) val actor1 = newActor(1, Some(shutdownLatch)) val actor2 = newActor(22, Some(shutdownLatch)) - val routedActor = system.actorOf(Props(new TestActor).withRouter(ScatterGatherFirstCompletedRouter(targets = List(actor1, actor2)))) + val routedActor = system.actorOf(Props[TestActor].withRouter(ScatterGatherFirstCompletedRouter(targets = List(actor1, actor2)))) routedActor ! Broadcast(Stop(Some(1))) shutdownLatch.await diff --git a/akka-actor/src/main/scala/akka/actor/Props.scala b/akka-actor/src/main/scala/akka/actor/Props.scala index 9824803aea..29d1374dbc 100644 --- a/akka-actor/src/main/scala/akka/actor/Props.scala +++ b/akka-actor/src/main/scala/akka/actor/Props.scala @@ -8,11 +8,13 @@ import akka.dispatch._ import akka.japi.Creator import akka.util._ import collection.immutable.Stack -import akka.routing.{ NoRouter, RouterConfig } +import akka.routing._ /** * Factory for Props instances. + * * Props is a ActorRef configuration object, that is thread safe and fully sharable. + * * Used when creating new actors through; ActorSystem.actorOf and ActorContext.actorOf. */ object Props { @@ -47,6 +49,8 @@ object Props { /** * Returns a Props that has default values except for "creator" which will be a function that creates an instance * of the supplied type using the default constructor. + * + * Scala API. */ def apply[T <: Actor: ClassManifest]: Props = default.withCreator(implicitly[ClassManifest[T]].erasure.asInstanceOf[Class[_ <: Actor]].newInstance) @@ -61,6 +65,8 @@ object Props { /** * Returns a Props that has default values except for "creator" which will be a function that creates an instance * using the supplied thunk. + * + * Scala API. */ def apply(creator: ⇒ Actor): Props = default.withCreator(creator) @@ -87,7 +93,17 @@ object Props { * {{{ * val props = Props[MyActor] * val props = Props(new MyActor) + * val props = Props { + * creator = .., + * dispatcher = .., + * timeout = .., + * faultHandler = .., + * routerConfig = .. + * } + * val props = Props().withCreator(new MyActor) * val props = Props[MyActor].withTimeout(timeout) + * val props = Props[MyActor].withRouter[RoundRobinRouter] + * val props = Props[MyActor].withRouter(new RoundRobinRouter(..)) * val props = Props[MyActor].withFaultHandler(OneForOneStrategy { * case e: IllegalStateException ⇒ Resume * }) @@ -103,19 +119,20 @@ object Props { * } * }); * Props props = new Props().withCreator(new UntypedActorFactory() { ... }); - * Props props = new Props().withTimeout(timeout); - * Props props = new Props().withFaultHandler(new OneForOneStrategy(...)); + * Props props = new Props(MyActor.class).withTimeout(timeout); + * Props props = new Props(MyActor.class).withFaultHandler(new OneForOneStrategy(...)); + * Props props = new Props(MyActor.class).withRouter(new RoundRobinRouter(..)); * }}} */ -case class Props(creator: () ⇒ Actor = Props.defaultCreator, - @transient dispatcher: MessageDispatcher = Props.defaultDispatcher, - timeout: Timeout = Props.defaultTimeout, - faultHandler: FaultHandlingStrategy = Props.defaultFaultHandler, - routerConfig: RouterConfig = Props.defaultRoutedProps) { +case class Props( + creator: () ⇒ Actor = Props.defaultCreator, + @transient dispatcher: MessageDispatcher = Props.defaultDispatcher, + timeout: Timeout = Props.defaultTimeout, + faultHandler: FaultHandlingStrategy = Props.defaultFaultHandler, + routerConfig: RouterConfig = Props.defaultRoutedProps) { /** * No-args constructor that sets all the default values. - * Java API. */ def this() = this( creator = Props.defaultCreator, @@ -144,43 +161,58 @@ case class Props(creator: () ⇒ Actor = Props.defaultCreator, /** * Returns a new Props with the specified creator set. + * * Scala API. */ def withCreator(c: ⇒ Actor) = copy(creator = () ⇒ c) /** * Returns a new Props with the specified creator set. + * * Java API. */ def withCreator(c: Creator[Actor]) = copy(creator = () ⇒ c.create) /** * Returns a new Props with the specified creator set. + * * Java API. */ def withCreator(c: Class[_ <: Actor]) = copy(creator = () ⇒ c.newInstance) /** * Returns a new Props with the specified dispatcher set. - * Java API. */ def withDispatcher(d: MessageDispatcher) = copy(dispatcher = d) /** - * Returns a new Props with the specified timeout set - * Java API. + * Returns a new Props with the specified timeout set. */ def withTimeout(t: Timeout) = copy(timeout = t) /** * Returns a new Props with the specified faulthandler set. - * Java API. */ def withFaultHandler(f: FaultHandlingStrategy) = copy(faultHandler = f) /** - * Returns a new Props with the specified router config set - * Java API + * Returns a new Props with the specified router config set. */ def withRouter(r: RouterConfig) = copy(routerConfig = r) + + /** + * Returns a new Props with the specified router config set. + * + * Scala API. + */ + def withRouter[T <: RouterConfig: ClassManifest] = { + val routerConfig = implicitly[ClassManifest[T]].erasure.asInstanceOf[Class[_ <: RouterConfig]] match { + case RoundRobinRouterClass ⇒ RoundRobinRouter() + case RandomRouterClass ⇒ RandomRouter() + case BroadcastRouterClass ⇒ BroadcastRouter() + case ScatterGatherRouterClass ⇒ ScatterGatherFirstCompletedRouter() + case unknown ⇒ throw new akka.config.ConfigurationException("Router not supported [" + unknown.getName + "]") + } + copy(routerConfig = routerConfig) + } } diff --git a/akka-actor/src/main/scala/akka/routing/package.scala b/akka-actor/src/main/scala/akka/routing/package.scala index 1f2e343a33..579484493c 100644 --- a/akka-actor/src/main/scala/akka/routing/package.scala +++ b/akka-actor/src/main/scala/akka/routing/package.scala @@ -8,4 +8,9 @@ package object routing { type Route = PartialFunction[(akka.actor.ActorRef, Any), Iterable[Destination]] -} \ No newline at end of file + // To allow pattern matching on the class types + val RoundRobinRouterClass = classOf[RoundRobinRouter] + val RandomRouterClass = classOf[RandomRouter] + val BroadcastRouterClass = classOf[BroadcastRouter] + val ScatterGatherRouterClass = classOf[ScatterGatherFirstCompletedRouter] +} diff --git a/akka-docs/java/code/akka/docs/actor/UntypedActorDocTest.scala b/akka-docs/java/code/akka/docs/actor/UntypedActorDocTest.scala new file mode 100644 index 0000000000..76b3b990fa --- /dev/null +++ b/akka-docs/java/code/akka/docs/actor/UntypedActorDocTest.scala @@ -0,0 +1,5 @@ +package akka.docs.actor + +import org.scalatest.junit.JUnitSuite + +class UntypedActorDocTest extends UntypedActorDocTestBase with JUnitSuite diff --git a/akka-docs/java/code/akka/docs/actor/UntypedActorTestBase.java b/akka-docs/java/code/akka/docs/actor/UntypedActorDocTestBase.java similarity index 90% rename from akka-docs/java/code/akka/docs/actor/UntypedActorTestBase.java rename to akka-docs/java/code/akka/docs/actor/UntypedActorDocTestBase.java index c2a877d962..fd1eb2f630 100644 --- a/akka-docs/java/code/akka/docs/actor/UntypedActorTestBase.java +++ b/akka-docs/java/code/akka/docs/actor/UntypedActorDocTestBase.java @@ -1,5 +1,7 @@ package akka.docs.actor; +import akka.actor.Timeout; + //#imports import akka.actor.ActorRef; import akka.actor.ActorSystem; @@ -33,7 +35,26 @@ import scala.Option; import static org.junit.Assert.*; -public class UntypedActorTestBase { +public class UntypedActorDocTestBase { + + @Test + public void createProps() { + //#creating-props-config + Props props1 = new Props(); + Props props2 = new Props(MyUntypedActor.class); + Props props3 = new Props(new UntypedActorFactory() { + public UntypedActor create() { + return new MyUntypedActor(); + } + }); + Props props4 = props1.withCreator(new UntypedActorFactory() { + public UntypedActor create() { + return new MyUntypedActor(); + } + }); + Props props5 = props4.withTimeout(new Timeout(1000)); + //#creating-props-config + } @Test public void systemActorOf() { diff --git a/akka-docs/java/code/akka/docs/actor/UntypedActorTest.scala b/akka-docs/java/code/akka/docs/actor/UntypedActorTest.scala deleted file mode 100644 index 1747f30f92..0000000000 --- a/akka-docs/java/code/akka/docs/actor/UntypedActorTest.scala +++ /dev/null @@ -1,5 +0,0 @@ -package akka.docs.actor - -import org.scalatest.junit.JUnitSuite - -class UntypedActorTest extends UntypedActorTestBase with JUnitSuite diff --git a/akka-docs/java/untyped-actors.rst b/akka-docs/java/untyped-actors.rst index 4324aadf19..7ed3100fab 100644 --- a/akka-docs/java/untyped-actors.rst +++ b/akka-docs/java/untyped-actors.rst @@ -42,7 +42,7 @@ Here is an example: Creating Actors with default constructor ---------------------------------------- -.. includecode:: code/akka/docs/actor/UntypedActorTestBase.java +.. includecode:: code/akka/docs/actor/UntypedActorDocTestBase.java :include: imports,system-actorOf The call to :meth:`actorOf` returns an instance of ``ActorRef``. This is a handle to @@ -62,7 +62,7 @@ a top level actor, that is supervised by the system (internal guardian actor). .. includecode:: code/akka/docs/actor/FirstUntypedActor.java#context-actorOf Actors are automatically started asynchronously when created. -When you create the ``UntypedActor`` then it will automatically call the ``preStart`` +When you create the ``UntypedActor`` then it will automatically call the ``preStart`` callback method on the ``UntypedActor`` class. This is an excellent place to add initialization code for the actor. @@ -76,26 +76,35 @@ add initialization code for the actor. Creating Actors with non-default constructor -------------------------------------------- -If your UntypedActor has a constructor that takes parameters then you can't create it using 'actorOf(clazz)'. -Instead you can use a variant of ``actorOf`` that takes an instance of an 'UntypedActorFactory' -in which you can create the Actor in any way you like. If you use this method then you to make sure that -no one can get a reference to the actor instance. If they can get a reference it then they can -touch state directly in bypass the whole actor dispatching mechanism and create race conditions +If your UntypedActor has a constructor that takes parameters then you can't create it using 'actorOf(clazz)'. +Instead you can use a variant of ``actorOf`` that takes an instance of an 'UntypedActorFactory' +in which you can create the Actor in any way you like. If you use this method then you to make sure that +no one can get a reference to the actor instance. If they can get a reference it then they can +touch state directly in bypass the whole actor dispatching mechanism and create race conditions which can lead to corrupt data. Here is an example: -.. includecode:: code/akka/docs/actor/UntypedActorTestBase.java#creating-constructor +.. includecode:: code/akka/docs/actor/UntypedActorDocTestBase.java#creating-constructor This way of creating the Actor is also great for integrating with Dependency Injection (DI) frameworks like Guice or Spring. + +Props +----- + +``Props`` is a configuration object to specify configuration options for the creation +of actors. Here are some examples on how to create a ``Props`` instance. + +.. includecode:: code/akka/docs/actor/UntypedActorDocTestBase.java#creating-props-config + + Creating Actors with Props -------------------------- -``Props`` is a configuration object to specify additional things for the actor to -be created, such as the ``MessageDispatcher``. +Actors are created by passing in the ``Props`` object into the ``actorOf`` factory method. -.. includecode:: code/akka/docs/actor/UntypedActorTestBase.java#creating-props +.. includecode:: code/akka/docs/actor/UntypedActorDocTestBase.java#creating-props UntypedActor API @@ -119,7 +128,7 @@ In addition, it offers: The remaining visible methods are user-overridable life-cycle hooks which are described in the following: -.. includecode:: code/akka/docs/actor/UntypedActorTestBase.java#lifecycle-callbacks +.. includecode:: code/akka/docs/actor/UntypedActorDocTestBase.java#lifecycle-callbacks The implementations shown above are the defaults provided by the :class:`UntypedActor` class. @@ -162,7 +171,7 @@ processing a message. This restart involves the hooks mentioned above: An actor restart replaces only the actual actor object; the contents of the mailbox and the hotswap stack are unaffected by the restart, so processing of -messages will resume after the :meth:`postRestart` hook returns. The message +messages will resume after the :meth:`postRestart` hook returns. The message that triggered the exception will not be received again. Any message sent to an actor while it is being restarted will be queued to its mailbox as usual. @@ -172,9 +181,9 @@ Stop Hook After stopping an actor, its :meth:`postStop` hook is called, which may be used e.g. for deregistering this actor from other services. This hook is guaranteed -to run after message queuing has been disabled for this actor, i.e. messages -sent to a stopped actor will be redirected to the :obj:`deadLetters` of the -:obj:`ActorSystem`. +to run after message queuing has been disabled for this actor, i.e. messages +sent to a stopped actor will be redirected to the :obj:`deadLetters` of the +:obj:`ActorSystem`. Identifying Actors @@ -188,7 +197,7 @@ Messages and immutability **IMPORTANT**: Messages can be any kind of object but have to be immutable. Akka can’t enforce immutability (yet) so this has to be by -convention. +convention. Here is an example of an immutable message: @@ -207,8 +216,8 @@ Messages are sent to an Actor through one of the following methods. Message ordering is guaranteed on a per-sender basis. -In all these methods you have the option of passing along your own ``ActorRef``. -Make it a practice of doing so because it will allow the receiver actors to be able to respond +In all these methods you have the option of passing along your own ``ActorRef``. +Make it a practice of doing so because it will allow the receiver actors to be able to respond to your message, since the sender reference is sent along with the message. Tell: Fire-forget @@ -229,7 +238,7 @@ to reply to the original sender, by using ``getSender().tell(replyMsg)``. actor.tell("Hello", getSelf()); -If invoked without the sender parameter the sender will be +If invoked without the sender parameter the sender will be :obj:`deadLetters` actor reference in the target actor. Ask: Send-And-Receive-Future @@ -244,13 +253,13 @@ will immediately return a :class:`Future`: Future future = actorRef.ask("Hello", timeoutMillis); The receiving actor should reply to this message, which will complete the -future with the reply message as value; ``getSender.tell(result)``. +future with the reply message as value; ``getSender.tell(result)``. -To complete the future with an exception you need send a Failure message to the sender. -This is not done automatically when an actor throws an exception while processing a -message. +To complete the future with an exception you need send a Failure message to the sender. +This is not done automatically when an actor throws an exception while processing a +message. -.. includecode:: code/akka/docs/actor/UntypedActorTestBase.java#reply-exception +.. includecode:: code/akka/docs/actor/UntypedActorDocTestBase.java#reply-exception If the actor does not complete the future, it will expire after the timeout period, specified as parameter to the ``ask`` method. @@ -258,16 +267,16 @@ specified as parameter to the ``ask`` method. See :ref:`futures-java` for more information on how to await or query a future. -The ``onComplete``, ``onResult``, or ``onTimeout`` methods of the ``Future`` can be -used to register a callback to get a notification when the Future completes. +The ``onComplete``, ``onResult``, or ``onTimeout`` methods of the ``Future`` can be +used to register a callback to get a notification when the Future completes. Gives you a way to avoid blocking. .. warning:: When using future callbacks, inside actors you need to carefully avoid closing over - the containing actor’s reference, i.e. do not call methods or access mutable state - on the enclosing actor from within the callback. This would break the actor - encapsulation and may introduce synchronization bugs and race conditions because + the containing actor’s reference, i.e. do not call methods or access mutable state + on the enclosing actor from within the callback. This would break the actor + encapsulation and may introduce synchronization bugs and race conditions because the callback will be scheduled concurrently to the enclosing actor. Unfortunately there is not yet a way to detect these illegal accesses at compile time. See also: :ref:`jmm-shared-state` @@ -278,7 +287,7 @@ even if that entails waiting for it (but keep in mind that waiting inside an actor is prone to dead-locks, e.g. if obtaining the result depends on processing another message on this actor). -.. includecode:: code/akka/docs/actor/UntypedActorTestBase.java +.. includecode:: code/akka/docs/actor/UntypedActorDocTestBase.java :include: import-future,using-ask Forward message @@ -297,7 +306,7 @@ You need to pass along your context variable as well. Receive messages ================ -When an actor receives a message it is passed into the ``onReceive`` method, this is +When an actor receives a message it is passed into the ``onReceive`` method, this is an abstract method on the ``UntypedActor`` base class that needs to be defined. Here is an example: @@ -340,17 +349,17 @@ message. Stopping actors =============== -Actors are stopped by invoking the ``stop`` method of the ``ActorRef``. +Actors are stopped by invoking the ``stop`` method of the ``ActorRef``. The actual termination of the actor is performed asynchronously, i.e. -``stop`` may return before the actor is stopped. +``stop`` may return before the actor is stopped. .. code-block:: java actor.stop(); -Processing of the current message, if any, will continue before the actor is stopped, +Processing of the current message, if any, will continue before the actor is stopped, but additional messages in the mailbox will not be processed. By default these -messages are sent to the :obj:`deadLetters` of the :obj:`ActorSystem`, but that +messages are sent to the :obj:`deadLetters` of the :obj:`ActorSystem`, but that depends on the mailbox implementation. When stop is called then a call to the ``def postStop`` callback method will @@ -365,7 +374,7 @@ take place. The ``Actor`` can use this callback to implement shutdown behavior. All Actors are stopped when the ``ActorSystem`` is stopped. Supervised actors are stopped when the supervisor is stopped, i.e. children are stopped -when parent is stopped. +when parent is stopped. PoisonPill @@ -381,7 +390,7 @@ If the ``PoisonPill`` was sent with ``ask``, the ``Future`` will be completed wi Use it like this: -.. includecode:: code/akka/docs/actor/UntypedActorTestBase.java +.. includecode:: code/akka/docs/actor/UntypedActorDocTestBase.java :include: import-actors,poison-pill .. _UntypedActor.HotSwap: @@ -402,10 +411,10 @@ The hotswapped code is kept in a Stack which can be pushed and popped. To hotswap the Actor using ``getContext().become``: -.. includecode:: code/akka/docs/actor/UntypedActorTestBase.java +.. includecode:: code/akka/docs/actor/UntypedActorDocTestBase.java :include: import-procedure,hot-swap-actor -The ``become`` method is useful for many different things, such as to implement +The ``become`` method is useful for many different things, such as to implement a Finite State Machine (FSM). Here is another little cute example of ``become`` and ``unbecome`` in action: @@ -432,7 +441,7 @@ through regular supervisor semantics. Use it like this: -.. includecode:: code/akka/docs/actor/UntypedActorTestBase.java +.. includecode:: code/akka/docs/actor/UntypedActorDocTestBase.java :include: import-actors,kill Actors and exceptions @@ -462,9 +471,9 @@ messages on that mailbox, will be there as well. What happens to the actor ------------------------- -If an exception is thrown, the actor instance is discarded and a new instance is +If an exception is thrown, the actor instance is discarded and a new instance is created. This new instance will now be used in the actor references to this actor -(so this is done invisible to the developer). Note that this means that current -state of the failing actor instance is lost if you don't store and restore it in -``preRestart`` and ``postRestart`` callbacks. +(so this is done invisible to the developer). Note that this means that current +state of the failing actor instance is lost if you don't store and restore it in +``preRestart`` and ``postRestart`` callbacks. diff --git a/akka-docs/scala/actors.rst b/akka-docs/scala/actors.rst index 618d2618c9..7f06dbcc9d 100644 --- a/akka-docs/scala/actors.rst +++ b/akka-docs/scala/actors.rst @@ -54,7 +54,7 @@ Creating Actors with default constructor ---------------------------------------- .. includecode:: code/ActorDocSpec.scala -:include: imports2,system-actorOf + :include: imports2,system-actorOf The call to :meth:`actorOf` returns an instance of ``ActorRef``. This is a handle to the ``Actor`` instance which you can use to interact with the ``Actor``. The @@ -95,11 +95,19 @@ Here is an example: .. includecode:: code/ActorDocSpec.scala#creating-constructor +Props +----- + +``Props`` is a configuration object to specify configuration options for the creation +of actors. Here are some examples on how to create a ``Props`` instance. + +.. includecode:: code/ActorDocSpec.scala#creating-props-config + + Creating Actors with Props -------------------------- -``Props`` is a configuration object to specify additional things for the actor to -be created, such as the ``MessageDispatcher``. +Actors are created by passing in the ``Props`` object into the ``actorOf`` factory method. .. includecode:: code/ActorDocSpec.scala#creating-props diff --git a/akka-docs/scala/code/ActorDocSpec.scala b/akka-docs/scala/code/ActorDocSpec.scala index 5592572443..81f25af8b3 100644 --- a/akka-docs/scala/code/ActorDocSpec.scala +++ b/akka-docs/scala/code/ActorDocSpec.scala @@ -1,5 +1,7 @@ package akka.docs.actor +import akka.actor.Timeout + //#imports1 import akka.actor.Actor import akka.actor.Props @@ -13,6 +15,7 @@ import akka.actor.ActorSystem import org.scalatest.{ BeforeAndAfterAll, WordSpec } import org.scalatest.matchers.MustMatchers import akka.testkit._ +import akka.util._ import akka.util.duration._ //#my-actor @@ -185,6 +188,23 @@ class ActorDocSpec extends AkkaSpec(Map("akka.loglevel" -> "INFO")) { myActor.stop() } + "creating a Props config" in { + val dispatcher = system.dispatcherFactory.lookup("my-dispatcher") + //#creating-props-config + import akka.actor.Props + val props1 = Props() + val props2 = Props[MyActor] + val props3 = Props(new MyActor) + val props4 = Props( + creator = { () ⇒ new MyActor }, + dispatcher = dispatcher, + timeout = Timeout(100)) + val props5 = props1.withCreator(new MyActor) + val props6 = props5.withDispatcher(dispatcher) + val props7 = props6.withTimeout(Timeout(100)) + //#creating-props-config + } + "creating actor with Props" in { //#creating-props import akka.actor.Props diff --git a/akka-tutorials/akka-tutorial-first/src/main/scala/Pi.scala b/akka-tutorials/akka-tutorial-first/src/main/scala/Pi.scala index 0a6c0ed04e..4d19f47db4 100644 --- a/akka-tutorials/akka-tutorial-first/src/main/scala/Pi.scala +++ b/akka-tutorials/akka-tutorial-first/src/main/scala/Pi.scala @@ -50,7 +50,7 @@ object Pi extends App { var start: Long = _ //#create-router - val router = context.actorOf(Props(new Worker).withRouter(RoundRobinRouter(nrOfInstances = nrOfWorkers)), "pi") + val router = context.actorOf(Props[Worker].withRouter(RoundRobinRouter(nrOfInstances = nrOfWorkers)), "pi") //#create-router //#master-receive From 6045af59497d74e1224a0d0cd0a6fa08d339038a Mon Sep 17 00:00:00 2001 From: Patrik Nordwall Date: Wed, 14 Dec 2011 14:08:30 +0100 Subject: [PATCH 10/34] Updated to config lib 5302c1e --- .../main/java/com/typesafe/config/Config.java | 11 +- .../com/typesafe/config/ConfigException.java | 6 +- .../com/typesafe/config/ConfigFactory.java | 6 +- .../com/typesafe/config/ConfigMergeable.java | 10 +- .../com/typesafe/config/ConfigObject.java | 18 +- .../com/typesafe/config/ConfigOrigin.java | 17 +- .../typesafe/config/ConfigResolveOptions.java | 4 +- .../com/typesafe/config/ConfigSyntax.java | 8 +- .../java/com/typesafe/config/ConfigUtil.java | 8 +- .../typesafe/config/ConfigValueFactory.java | 6 +- .../config/impl/AbstractConfigObject.java | 18 +- .../config/impl/AbstractConfigValue.java | 21 +- .../typesafe/config/impl/ConfigBoolean.java | 5 + .../config/impl/ConfigDelayedMerge.java | 11 + .../config/impl/ConfigDelayedMergeObject.java | 6 +- .../typesafe/config/impl/ConfigDouble.java | 5 + .../com/typesafe/config/impl/ConfigInt.java | 5 + .../com/typesafe/config/impl/ConfigLong.java | 5 + .../com/typesafe/config/impl/ConfigNull.java | 5 + .../typesafe/config/impl/ConfigNumber.java | 2 +- .../typesafe/config/impl/ConfigString.java | 5 + .../config/impl/ConfigSubstitution.java | 4 +- .../java/com/typesafe/config/impl/Parser.java | 304 ++++++++++++------ .../config/impl/SimpleConfigList.java | 13 + .../config/impl/SimpleConfigObject.java | 5 +- .../config/impl/SimpleConfigOrigin.java | 50 ++- .../com/typesafe/config/impl/TokenType.java | 3 +- .../com/typesafe/config/impl/Tokenizer.java | 167 +++++----- .../java/com/typesafe/config/impl/Tokens.java | 57 +++- 29 files changed, 545 insertions(+), 240 deletions(-) diff --git a/akka-actor/src/main/java/com/typesafe/config/Config.java b/akka-actor/src/main/java/com/typesafe/config/Config.java index 44eebe1158..d3496c73ef 100644 --- a/akka-actor/src/main/java/com/typesafe/config/Config.java +++ b/akka-actor/src/main/java/com/typesafe/config/Config.java @@ -21,7 +21,7 @@ import java.util.Set; * is a key in a JSON object; it's just a string that's the key in a map. A * "path" is a parseable expression with a syntax and it refers to a series of * keys. Path expressions are described in the spec for + * href="https://github.com/typesafehub/config/blob/master/HOCON.md">spec for * Human-Optimized Config Object Notation. In brief, a path is * period-separated so "a.b.c" looks for key c in object b in object a in the * root object. Sometimes double quotes are needed around special characters in @@ -97,7 +97,7 @@ public interface Config extends ConfigMergeable { /** * Returns a replacement config with all substitutions (the * ${foo.bar} syntax, see the + * href="https://github.com/typesafehub/config/blob/master/HOCON.md">the * spec) resolved. Substitutions are looked up using this * Config as the root object, that is, a substitution * ${foo.bar} will be replaced with the result of @@ -395,7 +395,8 @@ public interface Config extends ConfigMergeable { * Gets a value as a size in bytes (parses special strings like "128M"). If * the value is already a number, then it's left alone; if it's a string, * it's parsed understanding unit suffixes such as "128K", as documented in - * the the + * the the * spec. * * @param path @@ -414,9 +415,9 @@ public interface Config extends ConfigMergeable { * Get value as a duration in milliseconds. If the value is already a * number, then it's left alone; if it's a string, it's parsed understanding * units suffixes like "10m" or "5ns" as documented in the the + * href="https://github.com/typesafehub/config/blob/master/HOCON.md">the * spec. - * + * * @param path * path expression * @return the duration value at the requested path, in milliseconds diff --git a/akka-actor/src/main/java/com/typesafe/config/ConfigException.java b/akka-actor/src/main/java/com/typesafe/config/ConfigException.java index 8c23d09533..b8dcb8ca00 100644 --- a/akka-actor/src/main/java/com/typesafe/config/ConfigException.java +++ b/akka-actor/src/main/java/com/typesafe/config/ConfigException.java @@ -5,7 +5,8 @@ package com.typesafe.config; /** - * All exceptions thrown by the library are subclasses of ConfigException. + * All exceptions thrown by the library are subclasses of + * ConfigException. */ public abstract class ConfigException extends RuntimeException { private static final long serialVersionUID = 1L; @@ -338,6 +339,9 @@ public abstract class ConfigException extends RuntimeException { sb.append(p.problem()); sb.append(", "); } + if (sb.length() == 0) + throw new ConfigException.BugOrBroken( + "ValidationFailed must have a non-empty list of problems"); sb.setLength(sb.length() - 2); // chop comma and space return sb.toString(); diff --git a/akka-actor/src/main/java/com/typesafe/config/ConfigFactory.java b/akka-actor/src/main/java/com/typesafe/config/ConfigFactory.java index dc851d7f2b..df5e762a5c 100644 --- a/akka-actor/src/main/java/com/typesafe/config/ConfigFactory.java +++ b/akka-actor/src/main/java/com/typesafe/config/ConfigFactory.java @@ -295,18 +295,18 @@ public final class ConfigFactory { /** * Converts a Java {@link java.util.Properties} object to a * {@link ConfigObject} using the rules documented in the HOCON + * href="https://github.com/typesafehub/config/blob/master/HOCON.md">HOCON * spec. The keys in the Properties object are split on the * period character '.' and treated as paths. The values will all end up as * string values. If you have both "a=foo" and "a.b=bar" in your properties * file, so "a" is both the object containing "b" and the string "foo", then * the string value is dropped. - * + * *

* If you want to have System.getProperties() as a * ConfigObject, it's better to use the {@link #systemProperties()} method * which returns a cached global singleton. - * + * * @param properties * a Java Properties object * @param options diff --git a/akka-actor/src/main/java/com/typesafe/config/ConfigMergeable.java b/akka-actor/src/main/java/com/typesafe/config/ConfigMergeable.java index c4280e93ea..1214db8c44 100644 --- a/akka-actor/src/main/java/com/typesafe/config/ConfigMergeable.java +++ b/akka-actor/src/main/java/com/typesafe/config/ConfigMergeable.java @@ -24,19 +24,19 @@ public interface ConfigMergeable { * method (they need to merge the fallback keys into themselves). All other * values just return the original value, since they automatically override * any fallback. - * + * *

* The semantics of merging are described in the spec for - * HOCON. - * + * href="https://github.com/typesafehub/config/blob/master/HOCON.md">spec + * for HOCON. + * *

* Note that objects do not merge "across" non-objects; if you write * object.withFallback(nonObject).withFallback(otherObject), * then otherObject will simply be ignored. This is an * intentional part of how merging works. Both non-objects, and any object * which has fallen back to a non-object, block subsequent fallbacks. - * + * * @param other * an object whose keys should be used if the keys are not * present in this one diff --git a/akka-actor/src/main/java/com/typesafe/config/ConfigObject.java b/akka-actor/src/main/java/com/typesafe/config/ConfigObject.java index 54cce1c39f..bb4d14da89 100644 --- a/akka-actor/src/main/java/com/typesafe/config/ConfigObject.java +++ b/akka-actor/src/main/java/com/typesafe/config/ConfigObject.java @@ -8,38 +8,38 @@ import java.util.Map; /** * Subtype of {@link ConfigValue} representing an object (dictionary, map) * value, as in JSON's { "a" : 42 } syntax. - * + * *

* {@code ConfigObject} implements {@code java.util.Map} so * you can use it like a regular Java map. Or call {@link #unwrapped()} to * unwrap the map to a map with plain Java values rather than * {@code ConfigValue}. - * + * *

* Like all {@link ConfigValue} subtypes, {@code ConfigObject} is immutable. * This makes it threadsafe and you never have to create "defensive copies." The * mutator methods from {@link java.util.Map} all throw * {@link java.lang.UnsupportedOperationException}. - * + * *

* The {@link ConfigValue#valueType} method on an object returns * {@link ConfigValueType#OBJECT}. - * + * *

* In most cases you want to use the {@link Config} interface rather than this * one. Call {@link #toConfig()} to convert a {@code ConfigObject} to a * {@code Config}. - * + * *

* The API for a {@code ConfigObject} is in terms of keys, while the API for a * {@link Config} is in terms of path expressions. Conceptually, * {@code ConfigObject} is a tree of maps from keys to values, while a - * {@code ConfigObject} is a one-level map from paths to values. - * + * {@code Config} is a one-level map from paths to values. + * *

* Use {@link ConfigUtil#joinPath} and {@link ConfigUtil#splitPath} to convert * between path expressions and individual path elements (keys). - * + * *

* A {@code ConfigObject} may contain null values, which will have * {@link ConfigValue#valueType()} equal to {@link ConfigValueType#NULL}. If @@ -47,7 +47,7 @@ import java.util.Map; * file (or wherever this value tree came from). If {@code get()} returns a * {@link ConfigValue} with type {@code ConfigValueType#NULL} then the key was * set to null explicitly in the config file. - * + * *

* Do not implement {@code ConfigObject}; it should only be implemented * by the config library. Arbitrary implementations will not work because the diff --git a/akka-actor/src/main/java/com/typesafe/config/ConfigOrigin.java b/akka-actor/src/main/java/com/typesafe/config/ConfigOrigin.java index 013d91eb9e..fbc1fe17c3 100644 --- a/akka-actor/src/main/java/com/typesafe/config/ConfigOrigin.java +++ b/akka-actor/src/main/java/com/typesafe/config/ConfigOrigin.java @@ -4,6 +4,7 @@ package com.typesafe.config; import java.net.URL; +import java.util.List; /** @@ -12,13 +13,13 @@ import java.net.URL; * with {@link ConfigValue#origin}. Exceptions may have an origin, see * {@link ConfigException#origin}, but be careful because * ConfigException.origin() may return null. - * + * *

* It's best to use this interface only for debugging; its accuracy is * "best effort" rather than guaranteed, and a potentially-noticeable amount of * memory could probably be saved if origins were not kept around, so in the * future there might be some option to discard origins. - * + * *

* Do not implement this interface; it should only be implemented by * the config library. Arbitrary implementations will not work because the @@ -66,4 +67,16 @@ public interface ConfigOrigin { * @return line number or -1 if none is available */ public int lineNumber(); + + /** + * Returns any comments that appeared to "go with" this place in the file. + * Often an empty list, but never null. The details of this are subject to + * change, but at the moment comments that are immediately before an array + * element or object field, with no blank line after the comment, "go with" + * that element or field. + * + * @return any comments that seemed to "go with" this origin, empty list if + * none + */ + public List comments(); } diff --git a/akka-actor/src/main/java/com/typesafe/config/ConfigResolveOptions.java b/akka-actor/src/main/java/com/typesafe/config/ConfigResolveOptions.java index 37c7b36d5b..3adb589f1d 100644 --- a/akka-actor/src/main/java/com/typesafe/config/ConfigResolveOptions.java +++ b/akka-actor/src/main/java/com/typesafe/config/ConfigResolveOptions.java @@ -6,11 +6,13 @@ package com.typesafe.config; /** * A set of options related to resolving substitutions. Substitutions use the * ${foo.bar} syntax and are documented in the HOCON spec. + * href="https://github.com/typesafehub/config/blob/master/HOCON.md">HOCON + * spec. *

* This object is immutable, so the "setters" return a new object. *

* Here is an example of creating a custom {@code ConfigResolveOptions}: + * *

  *     ConfigResolveOptions options = ConfigResolveOptions.defaults()
  *         .setUseSystemEnvironment(false)
diff --git a/akka-actor/src/main/java/com/typesafe/config/ConfigSyntax.java b/akka-actor/src/main/java/com/typesafe/config/ConfigSyntax.java
index 58e7fc020b..7626a92e6d 100644
--- a/akka-actor/src/main/java/com/typesafe/config/ConfigSyntax.java
+++ b/akka-actor/src/main/java/com/typesafe/config/ConfigSyntax.java
@@ -5,8 +5,8 @@ package com.typesafe.config;
 
 /**
  * The syntax of a character stream, JSON, HOCON aka
- * ".conf", or HOCON
+ * aka ".conf", or Java properties.
  *
@@ -19,8 +19,8 @@ public enum ConfigSyntax {
     JSON,
     /**
      * The JSON-superset HOCON
-     * format.
+     * href="https://github.com/typesafehub/config/blob/master/HOCON.md"
+     * >HOCON format.
      */
     CONF,
     /**
diff --git a/akka-actor/src/main/java/com/typesafe/config/ConfigUtil.java b/akka-actor/src/main/java/com/typesafe/config/ConfigUtil.java
index 1aa463f46c..cc936923fe 100644
--- a/akka-actor/src/main/java/com/typesafe/config/ConfigUtil.java
+++ b/akka-actor/src/main/java/com/typesafe/config/ConfigUtil.java
@@ -4,6 +4,10 @@ import java.util.List;
 
 import com.typesafe.config.impl.ConfigImplUtil;
 
+/**
+ * Contains static utility methods.
+ * 
+ */
 public final class ConfigUtil {
     private ConfigUtil() {
 
@@ -41,7 +45,7 @@ public final class ConfigUtil {
      * elements as needed and then joining them separated by a period. A path
      * expression is usable with a {@link Config}, while individual path
      * elements are usable with a {@link ConfigObject}.
-     * 
+     *
      * @param elements
      *            the keys in the path
      * @return a path expression
@@ -57,7 +61,7 @@ public final class ConfigUtil {
      * and unquoting the individual path elements. A path expression is usable
      * with a {@link Config}, while individual path elements are usable with a
      * {@link ConfigObject}.
-     * 
+     *
      * @param path
      *            a path expression
      * @return the individual keys in the path
diff --git a/akka-actor/src/main/java/com/typesafe/config/ConfigValueFactory.java b/akka-actor/src/main/java/com/typesafe/config/ConfigValueFactory.java
index 2f381f9ad1..14c2bff8f7 100644
--- a/akka-actor/src/main/java/com/typesafe/config/ConfigValueFactory.java
+++ b/akka-actor/src/main/java/com/typesafe/config/ConfigValueFactory.java
@@ -8,9 +8,9 @@ import java.util.Map;
 import com.typesafe.config.impl.ConfigImpl;
 
 /**
- * This class holds some static factory methods for building ConfigValue. See
- * also ConfigFactory which has methods for parsing files and certain in-memory
- * data structures.
+ * This class holds some static factory methods for building {@link ConfigValue}
+ * instances. See also {@link ConfigFactory} which has methods for parsing files
+ * and certain in-memory data structures.
  */
 public final class ConfigValueFactory {
     private ConfigValueFactory() {
diff --git a/akka-actor/src/main/java/com/typesafe/config/impl/AbstractConfigObject.java b/akka-actor/src/main/java/com/typesafe/config/impl/AbstractConfigObject.java
index 428a7b2b3f..a21ccd81f7 100644
--- a/akka-actor/src/main/java/com/typesafe/config/impl/AbstractConfigObject.java
+++ b/akka-actor/src/main/java/com/typesafe/config/impl/AbstractConfigObject.java
@@ -111,12 +111,12 @@ abstract class AbstractConfigObject extends AbstractConfigValue implements
         return ConfigValueType.OBJECT;
     }
 
-    protected abstract AbstractConfigObject newCopy(ResolveStatus status,
-            boolean ignoresFallbacks);
+    protected abstract AbstractConfigObject newCopy(ResolveStatus status, boolean ignoresFallbacks,
+            ConfigOrigin origin);
 
     @Override
-    protected AbstractConfigObject newCopy(boolean ignoresFallbacks) {
-            return newCopy(resolveStatus(), ignoresFallbacks);
+    protected AbstractConfigObject newCopy(boolean ignoresFallbacks, ConfigOrigin origin) {
+        return newCopy(resolveStatus(), ignoresFallbacks, origin);
     }
 
     @Override
@@ -173,7 +173,7 @@ abstract class AbstractConfigObject extends AbstractConfigValue implements
             return new SimpleConfigObject(mergeOrigins(this, fallback), merged, newResolveStatus,
                     newIgnoresFallbacks);
         else if (newResolveStatus != resolveStatus() || newIgnoresFallbacks != ignoresFallbacks())
-            return newCopy(newResolveStatus, newIgnoresFallbacks);
+            return newCopy(newResolveStatus, newIgnoresFallbacks, origin());
         else
             return this;
     }
@@ -234,7 +234,7 @@ abstract class AbstractConfigObject extends AbstractConfigValue implements
             }
         }
         if (changes == null) {
-            return newCopy(newResolveStatus, ignoresFallbacks());
+            return newCopy(newResolveStatus, ignoresFallbacks(), origin());
         } else {
             Map modified = new HashMap();
             for (String k : keySet()) {
@@ -306,6 +306,12 @@ abstract class AbstractConfigObject extends AbstractConfigValue implements
                     sb.append("# ");
                     sb.append(v.origin().description());
                     sb.append("\n");
+                    for (String comment : v.origin().comments()) {
+                        indent(sb, indent + 1);
+                        sb.append("# ");
+                        sb.append(comment);
+                        sb.append("\n");
+                    }
                     indent(sb, indent + 1);
                 }
                 v.render(sb, indent + 1, k, formatted);
diff --git a/akka-actor/src/main/java/com/typesafe/config/impl/AbstractConfigValue.java b/akka-actor/src/main/java/com/typesafe/config/impl/AbstractConfigValue.java
index 68ab5cc316..e51f4c6067 100644
--- a/akka-actor/src/main/java/com/typesafe/config/impl/AbstractConfigValue.java
+++ b/akka-actor/src/main/java/com/typesafe/config/impl/AbstractConfigValue.java
@@ -18,14 +18,14 @@ import com.typesafe.config.ConfigValue;
  */
 abstract class AbstractConfigValue implements ConfigValue, MergeableValue {
 
-    final private ConfigOrigin origin;
+    final private SimpleConfigOrigin origin;
 
     AbstractConfigValue(ConfigOrigin origin) {
-        this.origin = origin;
+        this.origin = (SimpleConfigOrigin) origin;
     }
 
     @Override
-    public ConfigOrigin origin() {
+    public SimpleConfigOrigin origin() {
         return this.origin;
     }
 
@@ -76,9 +76,7 @@ abstract class AbstractConfigValue implements ConfigValue, MergeableValue {
         return this;
     }
 
-    protected AbstractConfigValue newCopy(boolean ignoresFallbacks) {
-        return this;
-    }
+    protected abstract AbstractConfigValue newCopy(boolean ignoresFallbacks, ConfigOrigin origin);
 
     // this is virtualized rather than a field because only some subclasses
     // really need to store the boolean, and they may be able to pack it
@@ -105,6 +103,13 @@ abstract class AbstractConfigValue implements ConfigValue, MergeableValue {
         throw badMergeException();
     }
 
+    public AbstractConfigValue withOrigin(ConfigOrigin origin) {
+        if (this.origin == origin)
+            return this;
+        else
+            return newCopy(ignoresFallbacks(), origin);
+    }
+
     @Override
     public AbstractConfigValue withFallback(ConfigMergeable mergeable) {
         if (ignoresFallbacks()) {
@@ -118,7 +123,7 @@ abstract class AbstractConfigValue implements ConfigValue, MergeableValue {
                 AbstractConfigObject fallback = (AbstractConfigObject) other;
                 if (fallback.resolveStatus() == ResolveStatus.RESOLVED && fallback.isEmpty()) {
                     if (fallback.ignoresFallbacks())
-                        return newCopy(true /* ignoresFallbacks */);
+                        return newCopy(true /* ignoresFallbacks */, origin);
                     else
                         return this;
                 } else {
@@ -128,7 +133,7 @@ abstract class AbstractConfigValue implements ConfigValue, MergeableValue {
                 // falling back to a non-object doesn't merge anything, and also
                 // prohibits merging any objects that we fall back to later.
                 // so we have to switch to ignoresFallbacks mode.
-                return newCopy(true /* ignoresFallbacks */);
+                return newCopy(true /* ignoresFallbacks */, origin);
             }
         }
     }
diff --git a/akka-actor/src/main/java/com/typesafe/config/impl/ConfigBoolean.java b/akka-actor/src/main/java/com/typesafe/config/impl/ConfigBoolean.java
index d45dbd1326..c926c0c942 100644
--- a/akka-actor/src/main/java/com/typesafe/config/impl/ConfigBoolean.java
+++ b/akka-actor/src/main/java/com/typesafe/config/impl/ConfigBoolean.java
@@ -29,4 +29,9 @@ final class ConfigBoolean extends AbstractConfigValue {
     String transformToString() {
         return value ? "true" : "false";
     }
+
+    @Override
+    protected ConfigBoolean newCopy(boolean ignoresFallbacks, ConfigOrigin origin) {
+        return new ConfigBoolean(origin, value);
+    }
 }
diff --git a/akka-actor/src/main/java/com/typesafe/config/impl/ConfigDelayedMerge.java b/akka-actor/src/main/java/com/typesafe/config/impl/ConfigDelayedMerge.java
index 9846cc57f2..4cca7834bd 100644
--- a/akka-actor/src/main/java/com/typesafe/config/impl/ConfigDelayedMerge.java
+++ b/akka-actor/src/main/java/com/typesafe/config/impl/ConfigDelayedMerge.java
@@ -107,6 +107,11 @@ final class ConfigDelayedMerge extends AbstractConfigValue implements
         return ignoresFallbacks;
     }
 
+    @Override
+    protected AbstractConfigValue newCopy(boolean newIgnoresFallbacks, ConfigOrigin newOrigin) {
+        return new ConfigDelayedMerge(newOrigin, stack, newIgnoresFallbacks);
+    }
+
     @Override
     protected final ConfigDelayedMerge mergedWithTheUnmergeable(Unmergeable fallback) {
         if (ignoresFallbacks)
@@ -196,6 +201,12 @@ final class ConfigDelayedMerge extends AbstractConfigValue implements
                 i += 1;
                 sb.append(v.origin().description());
                 sb.append("\n");
+                for (String comment : v.origin().comments()) {
+                    indent(sb, indent);
+                    sb.append("# ");
+                    sb.append(comment);
+                    sb.append("\n");
+                }
                 indent(sb, indent);
             }
 
diff --git a/akka-actor/src/main/java/com/typesafe/config/impl/ConfigDelayedMergeObject.java b/akka-actor/src/main/java/com/typesafe/config/impl/ConfigDelayedMergeObject.java
index 5669f62f34..fe970d59c8 100644
--- a/akka-actor/src/main/java/com/typesafe/config/impl/ConfigDelayedMergeObject.java
+++ b/akka-actor/src/main/java/com/typesafe/config/impl/ConfigDelayedMergeObject.java
@@ -49,12 +49,12 @@ class ConfigDelayedMergeObject extends AbstractConfigObject implements
     }
 
     @Override
-    protected ConfigDelayedMergeObject newCopy(ResolveStatus status,
-            boolean ignoresFallbacks) {
+    protected ConfigDelayedMergeObject newCopy(ResolveStatus status, boolean ignoresFallbacks,
+            ConfigOrigin origin) {
         if (status != resolveStatus())
             throw new ConfigException.BugOrBroken(
                     "attempt to create resolved ConfigDelayedMergeObject");
-        return new ConfigDelayedMergeObject(origin(), stack, ignoresFallbacks);
+        return new ConfigDelayedMergeObject(origin, stack, ignoresFallbacks);
     }
 
     @Override
diff --git a/akka-actor/src/main/java/com/typesafe/config/impl/ConfigDouble.java b/akka-actor/src/main/java/com/typesafe/config/impl/ConfigDouble.java
index 3317974453..c26d3cd6a9 100644
--- a/akka-actor/src/main/java/com/typesafe/config/impl/ConfigDouble.java
+++ b/akka-actor/src/main/java/com/typesafe/config/impl/ConfigDouble.java
@@ -43,4 +43,9 @@ final class ConfigDouble extends ConfigNumber {
     protected double doubleValue() {
         return value;
     }
+
+    @Override
+    protected ConfigDouble newCopy(boolean ignoresFallbacks, ConfigOrigin origin) {
+        return new ConfigDouble(origin, value, originalText);
+    }
 }
diff --git a/akka-actor/src/main/java/com/typesafe/config/impl/ConfigInt.java b/akka-actor/src/main/java/com/typesafe/config/impl/ConfigInt.java
index 4ce4a58545..440b5ae8cf 100644
--- a/akka-actor/src/main/java/com/typesafe/config/impl/ConfigInt.java
+++ b/akka-actor/src/main/java/com/typesafe/config/impl/ConfigInt.java
@@ -43,4 +43,9 @@ final class ConfigInt extends ConfigNumber {
     protected double doubleValue() {
         return value;
     }
+
+    @Override
+    protected ConfigInt newCopy(boolean ignoresFallbacks, ConfigOrigin origin) {
+        return new ConfigInt(origin, value, originalText);
+    }
 }
diff --git a/akka-actor/src/main/java/com/typesafe/config/impl/ConfigLong.java b/akka-actor/src/main/java/com/typesafe/config/impl/ConfigLong.java
index feb3897bb3..6a72bc4cab 100644
--- a/akka-actor/src/main/java/com/typesafe/config/impl/ConfigLong.java
+++ b/akka-actor/src/main/java/com/typesafe/config/impl/ConfigLong.java
@@ -43,4 +43,9 @@ final class ConfigLong extends ConfigNumber {
     protected double doubleValue() {
         return value;
     }
+
+    @Override
+    protected ConfigLong newCopy(boolean ignoresFallbacks, ConfigOrigin origin) {
+        return new ConfigLong(origin, value, originalText);
+    }
 }
diff --git a/akka-actor/src/main/java/com/typesafe/config/impl/ConfigNull.java b/akka-actor/src/main/java/com/typesafe/config/impl/ConfigNull.java
index a45d2dbc40..fbdc21d7a5 100644
--- a/akka-actor/src/main/java/com/typesafe/config/impl/ConfigNull.java
+++ b/akka-actor/src/main/java/com/typesafe/config/impl/ConfigNull.java
@@ -39,4 +39,9 @@ final class ConfigNull extends AbstractConfigValue {
     protected void render(StringBuilder sb, int indent, boolean formatted) {
         sb.append("null");
     }
+
+    @Override
+    protected ConfigNull newCopy(boolean ignoresFallbacks, ConfigOrigin origin) {
+        return new ConfigNull(origin);
+    }
 }
diff --git a/akka-actor/src/main/java/com/typesafe/config/impl/ConfigNumber.java b/akka-actor/src/main/java/com/typesafe/config/impl/ConfigNumber.java
index 3c01d9b950..4a6bbd0b15 100644
--- a/akka-actor/src/main/java/com/typesafe/config/impl/ConfigNumber.java
+++ b/akka-actor/src/main/java/com/typesafe/config/impl/ConfigNumber.java
@@ -11,7 +11,7 @@ abstract class ConfigNumber extends AbstractConfigValue {
     // a sentence) we always have it exactly as the person typed it into the
     // config file. It's purely cosmetic; equals/hashCode don't consider this
     // for example.
-    final private String originalText;
+    final protected String originalText;
 
     protected ConfigNumber(ConfigOrigin origin, String originalText) {
         super(origin);
diff --git a/akka-actor/src/main/java/com/typesafe/config/impl/ConfigString.java b/akka-actor/src/main/java/com/typesafe/config/impl/ConfigString.java
index 0d1bc97920..9b41e7f7ab 100644
--- a/akka-actor/src/main/java/com/typesafe/config/impl/ConfigString.java
+++ b/akka-actor/src/main/java/com/typesafe/config/impl/ConfigString.java
@@ -34,4 +34,9 @@ final class ConfigString extends AbstractConfigValue {
     protected void render(StringBuilder sb, int indent, boolean formatted) {
         sb.append(ConfigImplUtil.renderJsonString(value));
     }
+
+    @Override
+    protected ConfigString newCopy(boolean ignoresFallbacks, ConfigOrigin origin) {
+        return new ConfigString(origin, value);
+    }
 }
diff --git a/akka-actor/src/main/java/com/typesafe/config/impl/ConfigSubstitution.java b/akka-actor/src/main/java/com/typesafe/config/impl/ConfigSubstitution.java
index 9a8590bade..f4441b81a5 100644
--- a/akka-actor/src/main/java/com/typesafe/config/impl/ConfigSubstitution.java
+++ b/akka-actor/src/main/java/com/typesafe/config/impl/ConfigSubstitution.java
@@ -61,8 +61,8 @@ final class ConfigSubstitution extends AbstractConfigValue implements
     }
 
     @Override
-    protected ConfigSubstitution newCopy(boolean ignoresFallbacks) {
-        return new ConfigSubstitution(origin(), pieces, prefixLength, ignoresFallbacks);
+    protected ConfigSubstitution newCopy(boolean ignoresFallbacks, ConfigOrigin newOrigin) {
+        return new ConfigSubstitution(newOrigin, pieces, prefixLength, ignoresFallbacks);
     }
 
     @Override
diff --git a/akka-actor/src/main/java/com/typesafe/config/impl/Parser.java b/akka-actor/src/main/java/com/typesafe/config/impl/Parser.java
index 6f0de1211c..5df0314fe6 100644
--- a/akka-actor/src/main/java/com/typesafe/config/impl/Parser.java
+++ b/akka-actor/src/main/java/com/typesafe/config/impl/Parser.java
@@ -32,9 +32,53 @@ final class Parser {
         return context.parse();
     }
 
+    static private final class TokenWithComments {
+        final Token token;
+        final List comments;
+
+        TokenWithComments(Token token, List comments) {
+            this.token = token;
+            this.comments = comments;
+        }
+
+        TokenWithComments(Token token) {
+            this(token, Collections. emptyList());
+        }
+
+        TokenWithComments prepend(List earlier) {
+            if (this.comments.isEmpty()) {
+                return new TokenWithComments(token, earlier);
+            } else {
+                List merged = new ArrayList();
+                merged.addAll(earlier);
+                merged.addAll(comments);
+                return new TokenWithComments(token, merged);
+            }
+        }
+
+        SimpleConfigOrigin setComments(SimpleConfigOrigin origin) {
+            if (comments.isEmpty()) {
+                return origin;
+            } else {
+                List newComments = new ArrayList();
+                for (Token c : comments) {
+                    newComments.add(Tokens.getCommentText(c));
+                }
+                return origin.setComments(newComments);
+            }
+        }
+
+        @Override
+        public String toString() {
+            // this ends up in user-visible error messages, so we don't want the
+            // comments
+            return token.toString();
+        }
+    }
+
     static private final class ParseContext {
         private int lineNumber;
-        final private Stack buffer;
+        final private Stack buffer;
         final private Iterator tokens;
         final private ConfigIncluder includer;
         final private ConfigIncludeContext includeContext;
@@ -50,7 +94,7 @@ final class Parser {
                 Iterator tokens, ConfigIncluder includer,
                 ConfigIncludeContext includeContext) {
             lineNumber = 1;
-            buffer = new Stack();
+            buffer = new Stack();
             this.tokens = tokens;
             this.flavor = flavor;
             this.baseOrigin = origin;
@@ -60,14 +104,67 @@ final class Parser {
             this.equalsCount = 0;
         }
 
-        private Token nextToken() {
-            Token t = null;
-            if (buffer.isEmpty()) {
-                t = tokens.next();
-            } else {
-                t = buffer.pop();
+        private void consolidateCommentBlock(Token commentToken) {
+            // a comment block "goes with" the following token
+            // unless it's separated from it by a blank line.
+            // we want to build a list of newline tokens followed
+            // by a non-newline non-comment token; with all comments
+            // associated with that final non-newline non-comment token.
+            List newlines = new ArrayList();
+            List comments = new ArrayList();
+
+            Token previous = null;
+            Token next = commentToken;
+            while (true) {
+                if (Tokens.isNewline(next)) {
+                    if (previous != null && Tokens.isNewline(previous)) {
+                        // blank line; drop all comments to this point and
+                        // start a new comment block
+                        comments.clear();
+                    }
+                    newlines.add(next);
+                } else if (Tokens.isComment(next)) {
+                    comments.add(next);
+                } else {
+                    // a non-newline non-comment token
+                    break;
+                }
+
+                previous = next;
+                next = tokens.next();
             }
 
+            // put our concluding token in the queue with all the comments
+            // attached
+            buffer.push(new TokenWithComments(next, comments));
+
+            // now put all the newlines back in front of it
+            ListIterator li = newlines.listIterator(newlines.size());
+            while (li.hasPrevious()) {
+                buffer.push(new TokenWithComments(li.previous()));
+            }
+        }
+
+        private TokenWithComments popToken() {
+            if (buffer.isEmpty()) {
+                Token t = tokens.next();
+                if (Tokens.isComment(t)) {
+                    consolidateCommentBlock(t);
+                    return buffer.pop();
+                } else {
+                    return new TokenWithComments(t);
+                }
+            } else {
+                return buffer.pop();
+            }
+        }
+
+        private TokenWithComments nextToken() {
+            TokenWithComments withComments = null;
+
+            withComments = popToken();
+            Token t = withComments.token;
+
             if (Tokens.isProblem(t)) {
                 ConfigOrigin origin = t.origin();
                 String message = Tokens.getProblemMessage(t);
@@ -79,32 +176,35 @@ final class Parser {
                     message = addKeyName(message);
                 }
                 throw new ConfigException.Parse(origin, message, cause);
-            }
-
-            if (flavor == ConfigSyntax.JSON) {
-                if (Tokens.isUnquotedText(t)) {
-                    throw parseError(addKeyName("Token not allowed in valid JSON: '"
-                            + Tokens.getUnquotedText(t) + "'"));
-                } else if (Tokens.isSubstitution(t)) {
-                    throw parseError(addKeyName("Substitutions (${} syntax) not allowed in JSON"));
+            } else {
+                if (flavor == ConfigSyntax.JSON) {
+                    if (Tokens.isUnquotedText(t)) {
+                        throw parseError(addKeyName("Token not allowed in valid JSON: '"
+                                + Tokens.getUnquotedText(t) + "'"));
+                    } else if (Tokens.isSubstitution(t)) {
+                        throw parseError(addKeyName("Substitutions (${} syntax) not allowed in JSON"));
+                    }
                 }
-            }
 
-            return t;
+                return withComments;
+            }
         }
 
-        private void putBack(Token token) {
+        private void putBack(TokenWithComments token) {
             buffer.push(token);
         }
 
-        private Token nextTokenIgnoringNewline() {
-            Token t = nextToken();
-            while (Tokens.isNewline(t)) {
+        private TokenWithComments nextTokenIgnoringNewline() {
+            TokenWithComments t = nextToken();
+
+            while (Tokens.isNewline(t.token)) {
                 // line number tokens have the line that was _ended_ by the
                 // newline, so we have to add one.
-                lineNumber = t.lineNumber() + 1;
+                lineNumber = t.token.lineNumber() + 1;
+
                 t = nextToken();
             }
+
             return t;
         }
 
@@ -116,8 +216,8 @@ final class Parser {
         // is left just after the comma or the newline.
         private boolean checkElementSeparator() {
             if (flavor == ConfigSyntax.JSON) {
-                Token t = nextTokenIgnoringNewline();
-                if (t == Tokens.COMMA) {
+                TokenWithComments t = nextTokenIgnoringNewline();
+                if (t.token == Tokens.COMMA) {
                     return true;
                 } else {
                     putBack(t);
@@ -125,15 +225,16 @@ final class Parser {
                 }
             } else {
                 boolean sawSeparatorOrNewline = false;
-                Token t = nextToken();
+                TokenWithComments t = nextToken();
                 while (true) {
-                    if (Tokens.isNewline(t)) {
+                    if (Tokens.isNewline(t.token)) {
                         // newline number is the line just ended, so add one
-                        lineNumber = t.lineNumber() + 1;
+                        lineNumber = t.token.lineNumber() + 1;
                         sawSeparatorOrNewline = true;
+
                         // we want to continue to also eat
                         // a comma if there is one.
-                    } else if (t == Tokens.COMMA) {
+                    } else if (t.token == Tokens.COMMA) {
                         return true;
                     } else {
                         // non-newline-or-comma
@@ -154,12 +255,17 @@ final class Parser {
                 return;
 
             List values = null; // create only if we have value tokens
-            Token t = nextTokenIgnoringNewline(); // ignore a newline up front
-            while (Tokens.isValue(t) || Tokens.isUnquotedText(t)
-                    || Tokens.isSubstitution(t)) {
-                if (values == null)
+            TokenWithComments firstValueWithComments = null;
+            TokenWithComments t = nextTokenIgnoringNewline(); // ignore a
+                                                              // newline up
+                                                              // front
+            while (Tokens.isValue(t.token) || Tokens.isUnquotedText(t.token)
+                    || Tokens.isSubstitution(t.token)) {
+                if (values == null) {
                     values = new ArrayList();
-                values.add(t);
+                    firstValueWithComments = t;
+                }
+                values.add(t.token);
                 t = nextToken(); // but don't consolidate across a newline
             }
             // the last one wasn't a value token
@@ -168,9 +274,9 @@ final class Parser {
             if (values == null)
                 return;
 
-            if (values.size() == 1 && Tokens.isValue(values.get(0))) {
+            if (values.size() == 1 && Tokens.isValue(firstValueWithComments.token)) {
                 // a single value token requires no consolidation
-                putBack(values.get(0));
+                putBack(firstValueWithComments);
                 return;
             }
 
@@ -235,7 +341,7 @@ final class Parser {
                         firstOrigin, minimized));
             }
 
-            putBack(consolidated);
+            putBack(new TokenWithComments(consolidated, firstValueWithComments.comments));
         }
 
         private ConfigOrigin lineOrigin() {
@@ -309,17 +415,23 @@ final class Parser {
                 return part + ")";
         }
 
-        private AbstractConfigValue parseValue(Token token) {
-            if (Tokens.isValue(token)) {
-                return Tokens.getValue(token);
-            } else if (token == Tokens.OPEN_CURLY) {
-                return parseObject(true);
-            } else if (token == Tokens.OPEN_SQUARE) {
-                return parseArray();
+        private AbstractConfigValue parseValue(TokenWithComments t) {
+            AbstractConfigValue v;
+
+            if (Tokens.isValue(t.token)) {
+                v = Tokens.getValue(t.token);
+            } else if (t.token == Tokens.OPEN_CURLY) {
+                v = parseObject(true);
+            } else if (t.token == Tokens.OPEN_SQUARE) {
+                v = parseArray();
             } else {
-                throw parseError(addQuoteSuggestion(token.toString(),
-                        "Expecting a value but got wrong token: " + token));
+                throw parseError(addQuoteSuggestion(t.token.toString(),
+                        "Expecting a value but got wrong token: " + t.token));
             }
+
+            v = v.withOrigin(t.setComments(v.origin()));
+
+            return v;
         }
 
         private static AbstractConfigObject createValueUnderPath(Path path,
@@ -339,24 +451,29 @@ final class Parser {
                     remaining = remaining.remainder();
                 }
             }
+
+            // the setComments(null) is to ensure comments are only
+            // on the exact leaf node they apply to.
+            // a comment before "foo.bar" applies to the full setting
+            // "foo.bar" not also to "foo"
             ListIterator i = keys.listIterator(keys.size());
             String deepest = i.previous();
-            AbstractConfigObject o = new SimpleConfigObject(value.origin(),
+            AbstractConfigObject o = new SimpleConfigObject(value.origin().setComments(null),
                     Collections. singletonMap(
                             deepest, value));
             while (i.hasPrevious()) {
                 Map m = Collections. singletonMap(
                         i.previous(), o);
-                o = new SimpleConfigObject(value.origin(), m);
+                o = new SimpleConfigObject(value.origin().setComments(null), m);
             }
 
             return o;
         }
 
-        private Path parseKey(Token token) {
+        private Path parseKey(TokenWithComments token) {
             if (flavor == ConfigSyntax.JSON) {
-                if (Tokens.isValueWithType(token, ConfigValueType.STRING)) {
-                    String key = (String) Tokens.getValue(token).unwrapped();
+                if (Tokens.isValueWithType(token.token, ConfigValueType.STRING)) {
+                    String key = (String) Tokens.getValue(token.token).unwrapped();
                     return Path.newKey(key);
                 } else {
                     throw parseError(addKeyName("Expecting close brace } or a field name here, got "
@@ -364,9 +481,9 @@ final class Parser {
                 }
             } else {
                 List expression = new ArrayList();
-                Token t = token;
-                while (Tokens.isValue(t) || Tokens.isUnquotedText(t)) {
-                    expression.add(t);
+                TokenWithComments t = token;
+                while (Tokens.isValue(t.token) || Tokens.isUnquotedText(t.token)) {
+                    expression.add(t.token);
                     t = nextToken(); // note: don't cross a newline
                 }
 
@@ -400,13 +517,13 @@ final class Parser {
         }
 
         private void parseInclude(Map values) {
-            Token t = nextTokenIgnoringNewline();
-            while (isUnquotedWhitespace(t)) {
+            TokenWithComments t = nextTokenIgnoringNewline();
+            while (isUnquotedWhitespace(t.token)) {
                 t = nextTokenIgnoringNewline();
             }
 
-            if (Tokens.isValueWithType(t, ConfigValueType.STRING)) {
-                String name = (String) Tokens.getValue(t).unwrapped();
+            if (Tokens.isValueWithType(t.token, ConfigValueType.STRING)) {
+                String name = (String) Tokens.getValue(t.token).unwrapped();
                 AbstractConfigObject obj = (AbstractConfigObject) includer
                         .include(includeContext, name);
 
@@ -448,8 +565,8 @@ final class Parser {
             boolean lastInsideEquals = false;
 
             while (true) {
-                Token t = nextTokenIgnoringNewline();
-                if (t == Tokens.CLOSE_CURLY) {
+                TokenWithComments t = nextTokenIgnoringNewline();
+                if (t.token == Tokens.CLOSE_CURLY) {
                     if (flavor == ConfigSyntax.JSON && afterComma) {
                         throw parseError(addQuoteSuggestion(t.toString(),
                                 "expecting a field name after a comma, got a close brace } instead"));
@@ -458,45 +575,45 @@ final class Parser {
                                 "unbalanced close brace '}' with no open brace"));
                     }
                     break;
-                } else if (t == Tokens.END && !hadOpenCurly) {
+                } else if (t.token == Tokens.END && !hadOpenCurly) {
                     putBack(t);
                     break;
-                } else if (flavor != ConfigSyntax.JSON && isIncludeKeyword(t)) {
+                } else if (flavor != ConfigSyntax.JSON && isIncludeKeyword(t.token)) {
                     parseInclude(values);
 
                     afterComma = false;
                 } else {
-                    Path path = parseKey(t);
-                    Token afterKey = nextTokenIgnoringNewline();
+                    TokenWithComments keyToken = t;
+                    Path path = parseKey(keyToken);
+                    TokenWithComments afterKey = nextTokenIgnoringNewline();
                     boolean insideEquals = false;
 
                     // path must be on-stack while we parse the value
                     pathStack.push(path);
 
-                    Token valueToken;
+                    TokenWithComments valueToken;
                     AbstractConfigValue newValue;
-                    if (flavor == ConfigSyntax.CONF
-                            && afterKey == Tokens.OPEN_CURLY) {
+                    if (flavor == ConfigSyntax.CONF && afterKey.token == Tokens.OPEN_CURLY) {
                         // can omit the ':' or '=' before an object value
                         valueToken = afterKey;
-                        newValue = parseObject(true);
                     } else {
-                        if (!isKeyValueSeparatorToken(afterKey)) {
+                        if (!isKeyValueSeparatorToken(afterKey.token)) {
                             throw parseError(addQuoteSuggestion(afterKey.toString(),
                                     "Key '" + path.render() + "' may not be followed by token: "
                                             + afterKey));
                         }
 
-                        if (afterKey == Tokens.EQUALS) {
+                        if (afterKey.token == Tokens.EQUALS) {
                             insideEquals = true;
                             equalsCount += 1;
                         }
 
                         consolidateValueTokens();
                         valueToken = nextTokenIgnoringNewline();
-                        newValue = parseValue(valueToken);
                     }
 
+                    newValue = parseValue(valueToken.prepend(keyToken.comments));
+
                     lastPath = pathStack.pop();
                     if (insideEquals) {
                         equalsCount -= 1;
@@ -547,7 +664,7 @@ final class Parser {
                     afterComma = true;
                 } else {
                     t = nextTokenIgnoringNewline();
-                    if (t == Tokens.CLOSE_CURLY) {
+                    if (t.token == Tokens.CLOSE_CURLY) {
                         if (!hadOpenCurly) {
                             throw parseError(addQuoteSuggestion(lastPath, lastInsideEquals,
                                     t.toString(), "unbalanced close brace '}' with no open brace"));
@@ -557,7 +674,7 @@ final class Parser {
                         throw parseError(addQuoteSuggestion(lastPath, lastInsideEquals,
                                 t.toString(), "Expecting close brace } or a comma, got " + t));
                     } else {
-                        if (t == Tokens.END) {
+                        if (t.token == Tokens.END) {
                             putBack(t);
                             break;
                         } else {
@@ -567,6 +684,7 @@ final class Parser {
                     }
                 }
             }
+
             return new SimpleConfigObject(objectOrigin, values);
         }
 
@@ -577,18 +695,15 @@ final class Parser {
 
             consolidateValueTokens();
 
-            Token t = nextTokenIgnoringNewline();
+            TokenWithComments t = nextTokenIgnoringNewline();
 
             // special-case the first element
-            if (t == Tokens.CLOSE_SQUARE) {
+            if (t.token == Tokens.CLOSE_SQUARE) {
                 return new SimpleConfigList(arrayOrigin,
                         Collections. emptyList());
-            } else if (Tokens.isValue(t)) {
+            } else if (Tokens.isValue(t.token) || t.token == Tokens.OPEN_CURLY
+                    || t.token == Tokens.OPEN_SQUARE) {
                 values.add(parseValue(t));
-            } else if (t == Tokens.OPEN_CURLY) {
-                values.add(parseObject(true));
-            } else if (t == Tokens.OPEN_SQUARE) {
-                values.add(parseArray());
             } else {
                 throw parseError(addKeyName("List should have ] or a first element after the open [, instead had token: "
                         + t
@@ -604,7 +719,7 @@ final class Parser {
                     // comma (or newline equivalent) consumed
                 } else {
                     t = nextTokenIgnoringNewline();
-                    if (t == Tokens.CLOSE_SQUARE) {
+                    if (t.token == Tokens.CLOSE_SQUARE) {
                         return new SimpleConfigList(arrayOrigin, values);
                     } else {
                         throw parseError(addKeyName("List should have ended with ] or had a comma, instead had token: "
@@ -619,14 +734,10 @@ final class Parser {
                 consolidateValueTokens();
 
                 t = nextTokenIgnoringNewline();
-                if (Tokens.isValue(t)) {
+                if (Tokens.isValue(t.token) || t.token == Tokens.OPEN_CURLY
+                        || t.token == Tokens.OPEN_SQUARE) {
                     values.add(parseValue(t));
-                } else if (t == Tokens.OPEN_CURLY) {
-                    values.add(parseObject(true));
-                } else if (t == Tokens.OPEN_SQUARE) {
-                    values.add(parseArray());
-                } else if (flavor != ConfigSyntax.JSON
-                        && t == Tokens.CLOSE_SQUARE) {
+                } else if (flavor != ConfigSyntax.JSON && t.token == Tokens.CLOSE_SQUARE) {
                     // we allow one trailing comma
                     putBack(t);
                 } else {
@@ -640,8 +751,8 @@ final class Parser {
         }
 
         AbstractConfigValue parse() {
-            Token t = nextTokenIgnoringNewline();
-            if (t == Tokens.START) {
+            TokenWithComments t = nextTokenIgnoringNewline();
+            if (t.token == Tokens.START) {
                 // OK
             } else {
                 throw new ConfigException.BugOrBroken(
@@ -650,13 +761,11 @@ final class Parser {
 
             t = nextTokenIgnoringNewline();
             AbstractConfigValue result = null;
-            if (t == Tokens.OPEN_CURLY) {
-                result = parseObject(true);
-            } else if (t == Tokens.OPEN_SQUARE) {
-                result = parseArray();
+            if (t.token == Tokens.OPEN_CURLY || t.token == Tokens.OPEN_SQUARE) {
+                result = parseValue(t);
             } else {
                 if (flavor == ConfigSyntax.JSON) {
-                    if (t == Tokens.END) {
+                    if (t.token == Tokens.END) {
                         throw parseError("Empty document");
                     } else {
                         throw parseError("Document must have an object or array at root, unexpected token: "
@@ -668,11 +777,14 @@ final class Parser {
                     // of it, so put it back.
                     putBack(t);
                     result = parseObject(false);
+                    // in this case we don't try to use commentsStack comments
+                    // since they would all presumably apply to fields not the
+                    // root object
                 }
             }
 
             t = nextTokenIgnoringNewline();
-            if (t == Tokens.END) {
+            if (t.token == Tokens.END) {
                 return result;
             } else {
                 throw parseError("Document has trailing tokens after first object or array: "
diff --git a/akka-actor/src/main/java/com/typesafe/config/impl/SimpleConfigList.java b/akka-actor/src/main/java/com/typesafe/config/impl/SimpleConfigList.java
index 6703540040..1921826352 100644
--- a/akka-actor/src/main/java/com/typesafe/config/impl/SimpleConfigList.java
+++ b/akka-actor/src/main/java/com/typesafe/config/impl/SimpleConfigList.java
@@ -145,6 +145,14 @@ final class SimpleConfigList extends AbstractConfigValue implements ConfigList {
                     sb.append("# ");
                     sb.append(v.origin().description());
                     sb.append("\n");
+
+                    for (String comment : v.origin().comments()) {
+                        indent(sb, indent + 1);
+                        sb.append("# ");
+                        sb.append(comment);
+                        sb.append("\n");
+                    }
+
                     indent(sb, indent + 1);
                 }
                 v.render(sb, indent + 1, formatted);
@@ -353,4 +361,9 @@ final class SimpleConfigList extends AbstractConfigValue implements ConfigList {
     public ConfigValue set(int index, ConfigValue element) {
         throw weAreImmutable("set");
     }
+
+    @Override
+    protected SimpleConfigList newCopy(boolean ignoresFallbacks, ConfigOrigin newOrigin) {
+        return new SimpleConfigList(newOrigin, value);
+    }
 }
diff --git a/akka-actor/src/main/java/com/typesafe/config/impl/SimpleConfigObject.java b/akka-actor/src/main/java/com/typesafe/config/impl/SimpleConfigObject.java
index 0c855ba879..953f26491f 100644
--- a/akka-actor/src/main/java/com/typesafe/config/impl/SimpleConfigObject.java
+++ b/akka-actor/src/main/java/com/typesafe/config/impl/SimpleConfigObject.java
@@ -45,8 +45,9 @@ final class SimpleConfigObject extends AbstractConfigObject {
     }
 
     @Override
-    protected SimpleConfigObject newCopy(ResolveStatus newStatus, boolean newIgnoresFallbacks) {
-        return new SimpleConfigObject(origin(), value, newStatus, newIgnoresFallbacks);
+    protected SimpleConfigObject newCopy(ResolveStatus newStatus, boolean newIgnoresFallbacks,
+            ConfigOrigin newOrigin) {
+        return new SimpleConfigObject(newOrigin, value, newStatus, newIgnoresFallbacks);
     }
 
     @Override
diff --git a/akka-actor/src/main/java/com/typesafe/config/impl/SimpleConfigOrigin.java b/akka-actor/src/main/java/com/typesafe/config/impl/SimpleConfigOrigin.java
index 01d5b6070b..f0a0dbd353 100644
--- a/akka-actor/src/main/java/com/typesafe/config/impl/SimpleConfigOrigin.java
+++ b/akka-actor/src/main/java/com/typesafe/config/impl/SimpleConfigOrigin.java
@@ -8,6 +8,7 @@ import java.net.MalformedURLException;
 import java.net.URL;
 import java.util.ArrayList;
 import java.util.Collection;
+import java.util.Collections;
 import java.util.Iterator;
 import java.util.List;
 
@@ -22,19 +23,21 @@ final class SimpleConfigOrigin implements ConfigOrigin {
     final private int endLineNumber;
     final private OriginType originType;
     final private String urlOrNull;
+    final private List commentsOrNull;
 
     protected SimpleConfigOrigin(String description, int lineNumber, int endLineNumber,
             OriginType originType,
-            String urlOrNull) {
+ String urlOrNull, List commentsOrNull) {
         this.description = description;
         this.lineNumber = lineNumber;
         this.endLineNumber = endLineNumber;
         this.originType = originType;
         this.urlOrNull = urlOrNull;
+        this.commentsOrNull = commentsOrNull;
     }
 
     static SimpleConfigOrigin newSimple(String description) {
-        return new SimpleConfigOrigin(description, -1, -1, OriginType.GENERIC, null);
+        return new SimpleConfigOrigin(description, -1, -1, OriginType.GENERIC, null, null);
     }
 
     static SimpleConfigOrigin newFile(String filename) {
@@ -44,17 +47,17 @@ final class SimpleConfigOrigin implements ConfigOrigin {
         } catch (MalformedURLException e) {
             url = null;
         }
-        return new SimpleConfigOrigin(filename, -1, -1, OriginType.FILE, url);
+        return new SimpleConfigOrigin(filename, -1, -1, OriginType.FILE, url, null);
     }
 
     static SimpleConfigOrigin newURL(URL url) {
         String u = url.toExternalForm();
-        return new SimpleConfigOrigin(u, -1, -1, OriginType.URL, u);
+        return new SimpleConfigOrigin(u, -1, -1, OriginType.URL, u, null);
     }
 
     static SimpleConfigOrigin newResource(String resource, URL url) {
         return new SimpleConfigOrigin(resource, -1, -1, OriginType.RESOURCE,
-                url != null ? url.toExternalForm() : null);
+                url != null ? url.toExternalForm() : null, null);
     }
 
     static SimpleConfigOrigin newResource(String resource) {
@@ -66,13 +69,22 @@ final class SimpleConfigOrigin implements ConfigOrigin {
             return this;
         } else {
             return new SimpleConfigOrigin(this.description, lineNumber, lineNumber,
-                    this.originType, this.urlOrNull);
+                    this.originType, this.urlOrNull, this.commentsOrNull);
         }
     }
 
     SimpleConfigOrigin addURL(URL url) {
-        return new SimpleConfigOrigin(this.description, this.lineNumber, this.endLineNumber, this.originType,
-                url != null ? url.toExternalForm() : null);
+        return new SimpleConfigOrigin(this.description, this.lineNumber, this.endLineNumber,
+                this.originType, url != null ? url.toExternalForm() : null, this.commentsOrNull);
+    }
+
+    SimpleConfigOrigin setComments(List comments) {
+        if (ConfigImplUtil.equalsHandlingNull(comments, this.commentsOrNull)) {
+            return this;
+        } else {
+            return new SimpleConfigOrigin(this.description, this.lineNumber, this.endLineNumber,
+                    this.originType, this.urlOrNull, comments);
+        }
     }
 
     @Override
@@ -172,12 +184,22 @@ final class SimpleConfigOrigin implements ConfigOrigin {
         return lineNumber;
     }
 
+    @Override
+    public List comments() {
+        if (commentsOrNull != null) {
+            return commentsOrNull;
+        } else {
+            return Collections.emptyList();
+        }
+    }
+
     static final String MERGE_OF_PREFIX = "merge of ";
 
     private static SimpleConfigOrigin mergeTwo(SimpleConfigOrigin a, SimpleConfigOrigin b) {
         String mergedDesc;
         int mergedStartLine;
         int mergedEndLine;
+        List mergedComments;
 
         OriginType mergedType;
         if (a.originType == b.originType) {
@@ -233,8 +255,18 @@ final class SimpleConfigOrigin implements ConfigOrigin {
             mergedURL = null;
         }
 
+        if (ConfigImplUtil.equalsHandlingNull(a.commentsOrNull, b.commentsOrNull)) {
+            mergedComments = a.commentsOrNull;
+        } else {
+            mergedComments = new ArrayList();
+            if (a.commentsOrNull != null)
+                mergedComments.addAll(a.commentsOrNull);
+            if (b.commentsOrNull != null)
+                mergedComments.addAll(b.commentsOrNull);
+        }
+
         return new SimpleConfigOrigin(mergedDesc, mergedStartLine, mergedEndLine, mergedType,
-                mergedURL);
+                mergedURL, mergedComments);
     }
 
     private static int similarity(SimpleConfigOrigin a, SimpleConfigOrigin b) {
diff --git a/akka-actor/src/main/java/com/typesafe/config/impl/TokenType.java b/akka-actor/src/main/java/com/typesafe/config/impl/TokenType.java
index ace12fa70b..fc617d9ee2 100644
--- a/akka-actor/src/main/java/com/typesafe/config/impl/TokenType.java
+++ b/akka-actor/src/main/java/com/typesafe/config/impl/TokenType.java
@@ -17,5 +17,6 @@ enum TokenType {
     NEWLINE,
     UNQUOTED_TEXT,
     SUBSTITUTION,
-    PROBLEM;
+    PROBLEM,
+    COMMENT;
 }
diff --git a/akka-actor/src/main/java/com/typesafe/config/impl/Tokenizer.java b/akka-actor/src/main/java/com/typesafe/config/impl/Tokenizer.java
index 2aeb7184bc..280a028077 100644
--- a/akka-actor/src/main/java/com/typesafe/config/impl/Tokenizer.java
+++ b/akka-actor/src/main/java/com/typesafe/config/impl/Tokenizer.java
@@ -168,40 +168,27 @@ final class Tokenizer {
             return c != '\n' && ConfigImplUtil.isWhitespace(c);
         }
 
-        private int slurpComment() {
-            for (;;) {
-                int c = nextCharRaw();
-                if (c == -1 || c == '\n') {
-                    return c;
-                }
-            }
-        }
-
-        // get next char, skipping comments
-        private int nextCharSkippingComments() {
-            for (;;) {
-                int c = nextCharRaw();
-
-                if (c == -1) {
-                    return -1;
-                } else {
-                    if (allowComments) {
-                        if (c == '#') {
-                            return slurpComment();
-                        } else if (c == '/') {
-                            int maybeSecondSlash = nextCharRaw();
-                            if (maybeSecondSlash == '/') {
-                                return slurpComment();
-                            } else {
-                                putBack(maybeSecondSlash);
-                                return c;
-                            }
+        private boolean startOfComment(int c) {
+            if (c == -1) {
+                return false;
+            } else {
+                if (allowComments) {
+                    if (c == '#') {
+                        return true;
+                    } else if (c == '/') {
+                        int maybeSecondSlash = nextCharRaw();
+                        // we want to predictably NOT consume any chars
+                        putBack(maybeSecondSlash);
+                        if (maybeSecondSlash == '/') {
+                            return true;
                         } else {
-                            return c;
+                            return false;
                         }
                     } else {
-                        return c;
+                        return false;
                     }
+                } else {
+                    return false;
                 }
             }
         }
@@ -209,7 +196,7 @@ final class Tokenizer {
         // get next char, skipping non-newline whitespace
         private int nextCharAfterWhitespace(WhitespaceSaver saver) {
             for (;;) {
-                int c = nextCharSkippingComments();
+                int c = nextCharRaw();
 
                 if (c == -1) {
                     return -1;
@@ -269,6 +256,27 @@ final class Tokenizer {
             return ((SimpleConfigOrigin) baseOrigin).setLineNumber(lineNumber);
         }
 
+        // ONE char has always been consumed, either the # or the first /, but
+        // not both slashes
+        private Token pullComment(int firstChar) {
+            if (firstChar == '/') {
+                int discard = nextCharRaw();
+                if (discard != '/')
+                    throw new ConfigException.BugOrBroken("called pullComment but // not seen");
+            }
+
+            StringBuilder sb = new StringBuilder();
+            for (;;) {
+                int c = nextCharRaw();
+                if (c == -1 || c == '\n') {
+                    putBack(c);
+                    return Tokens.newComment(lineOrigin, sb.toString());
+                } else {
+                    sb.appendCodePoint(c);
+                }
+            }
+        }
+
         // chars JSON allows a number to start with
         static final String firstNumberChars = "0123456789-";
         // chars JSON allows to be part of a number
@@ -283,7 +291,7 @@ final class Tokenizer {
         private Token pullUnquotedText() {
             ConfigOrigin origin = lineOrigin;
             StringBuilder sb = new StringBuilder();
-            int c = nextCharSkippingComments();
+            int c = nextCharRaw();
             while (true) {
                 if (c == -1) {
                     break;
@@ -291,6 +299,8 @@ final class Tokenizer {
                     break;
                 } else if (isWhitespace(c)) {
                     break;
+                } else if (startOfComment(c)) {
+                    break;
                 } else {
                     sb.appendCodePoint(c);
                 }
@@ -310,7 +320,7 @@ final class Tokenizer {
                         return Tokens.newBoolean(origin, false);
                 }
 
-                c = nextCharSkippingComments();
+                c = nextCharRaw();
             }
 
             // put back the char that ended the unquoted text
@@ -324,12 +334,12 @@ final class Tokenizer {
             StringBuilder sb = new StringBuilder();
             sb.appendCodePoint(firstChar);
             boolean containedDecimalOrE = false;
-            int c = nextCharSkippingComments();
+            int c = nextCharRaw();
             while (c != -1 && numberChars.indexOf(c) >= 0) {
                 if (c == '.' || c == 'e' || c == 'E')
                     containedDecimalOrE = true;
                 sb.appendCodePoint(c);
-                c = nextCharSkippingComments();
+                c = nextCharRaw();
             }
             // the last character we looked at wasn't part of the number, put it
             // back
@@ -382,7 +392,7 @@ final class Tokenizer {
                 // kind of absurdly slow, but screw it for now
                 char[] a = new char[4];
                 for (int i = 0; i < 4; ++i) {
-                    int c = nextCharSkippingComments();
+                    int c = nextCharRaw();
                     if (c == -1)
                         throw problem("End of input but expecting 4 hex digits for \\uXXXX escape");
                     a[i] = (char) c;
@@ -431,14 +441,14 @@ final class Tokenizer {
         private Token pullSubstitution() throws ProblemException {
             // the initial '$' has already been consumed
             ConfigOrigin origin = lineOrigin;
-            int c = nextCharSkippingComments();
+            int c = nextCharRaw();
             if (c != '{') {
                 throw problem(asString(c), "'$' not followed by {, '" + asString(c)
                         + "' not allowed after '$'", true /* suggestQuotes */);
             }
 
             boolean optional = false;
-            c = nextCharSkippingComments();
+            c = nextCharRaw();
             if (c == '?') {
                 optional = true;
             } else {
@@ -484,45 +494,49 @@ final class Tokenizer {
                 return line;
             } else {
                 Token t = null;
-                switch (c) {
-                case '"':
-                    t = pullQuotedString();
-                    break;
-                case '$':
-                    t = pullSubstitution();
-                    break;
-                case ':':
-                    t = Tokens.COLON;
-                    break;
-                case ',':
-                    t = Tokens.COMMA;
-                    break;
-                case '=':
-                    t = Tokens.EQUALS;
-                    break;
-                case '{':
-                    t = Tokens.OPEN_CURLY;
-                    break;
-                case '}':
-                    t = Tokens.CLOSE_CURLY;
-                    break;
-                case '[':
-                    t = Tokens.OPEN_SQUARE;
-                    break;
-                case ']':
-                    t = Tokens.CLOSE_SQUARE;
-                    break;
-                }
+                if (startOfComment(c)) {
+                    t = pullComment(c);
+                } else {
+                    switch (c) {
+                    case '"':
+                        t = pullQuotedString();
+                        break;
+                    case '$':
+                        t = pullSubstitution();
+                        break;
+                    case ':':
+                        t = Tokens.COLON;
+                        break;
+                    case ',':
+                        t = Tokens.COMMA;
+                        break;
+                    case '=':
+                        t = Tokens.EQUALS;
+                        break;
+                    case '{':
+                        t = Tokens.OPEN_CURLY;
+                        break;
+                    case '}':
+                        t = Tokens.CLOSE_CURLY;
+                        break;
+                    case '[':
+                        t = Tokens.OPEN_SQUARE;
+                        break;
+                    case ']':
+                        t = Tokens.CLOSE_SQUARE;
+                        break;
+                    }
 
-                if (t == null) {
-                    if (firstNumberChars.indexOf(c) >= 0) {
-                        t = pullNumber(c);
-                    } else if (notInUnquotedText.indexOf(c) >= 0) {
-                        throw problem(asString(c), "Reserved character '" + asString(c)
-                                + "' is not allowed outside quotes", true /* suggestQuotes */);
-                    } else {
-                        putBack(c);
-                        t = pullUnquotedText();
+                    if (t == null) {
+                        if (firstNumberChars.indexOf(c) >= 0) {
+                            t = pullNumber(c);
+                        } else if (notInUnquotedText.indexOf(c) >= 0) {
+                            throw problem(asString(c), "Reserved character '" + asString(c)
+                                    + "' is not allowed outside quotes", true /* suggestQuotes */);
+                        } else {
+                            putBack(c);
+                            t = pullUnquotedText();
+                        }
                     }
                 }
 
@@ -548,6 +562,7 @@ final class Tokenizer {
             Token whitespace = whitespaceSaver.check(t, origin, lineNumber);
             if (whitespace != null)
                 tokens.add(whitespace);
+
             tokens.add(t);
         }
 
diff --git a/akka-actor/src/main/java/com/typesafe/config/impl/Tokens.java b/akka-actor/src/main/java/com/typesafe/config/impl/Tokens.java
index 9f7bd42e7c..d726d83d53 100644
--- a/akka-actor/src/main/java/com/typesafe/config/impl/Tokens.java
+++ b/akka-actor/src/main/java/com/typesafe/config/impl/Tokens.java
@@ -52,7 +52,7 @@ final class Tokens {
 
         @Override
         public String toString() {
-            return "'\n'@" + lineNumber();
+            return "'\\n'@" + lineNumber();
         }
 
         @Override
@@ -167,6 +167,45 @@ final class Tokens {
         }
     }
 
+    static private class Comment extends Token {
+        final private String text;
+
+        Comment(ConfigOrigin origin, String text) {
+            super(TokenType.COMMENT, origin);
+            this.text = text;
+        }
+
+        String text() {
+            return text;
+        }
+
+        @Override
+        public String toString() {
+            StringBuilder sb = new StringBuilder();
+            sb.append("'#");
+            sb.append(text);
+            sb.append("' (COMMENT)");
+            return sb.toString();
+        }
+
+        @Override
+        protected boolean canEqual(Object other) {
+            return other instanceof Comment;
+        }
+
+        @Override
+        public boolean equals(Object other) {
+            return super.equals(other) && ((Comment) other).text.equals(text);
+        }
+
+        @Override
+        public int hashCode() {
+            int h = 41 * (41 + super.hashCode());
+            h = 41 * (h + text.hashCode());
+            return h;
+        }
+    }
+
     // This is not a Value, because it requires special processing
     static private class Substitution extends Token {
         final private boolean optional;
@@ -262,6 +301,18 @@ final class Tokens {
         }
     }
 
+    static boolean isComment(Token token) {
+        return token instanceof Comment;
+    }
+
+    static String getCommentText(Token token) {
+        if (token instanceof Comment) {
+            return ((Comment) token).text();
+        } else {
+            throw new ConfigException.BugOrBroken("tried to get comment text from " + token);
+        }
+    }
+
     static boolean isUnquotedText(Token token) {
         return token instanceof UnquotedText;
     }
@@ -316,6 +367,10 @@ final class Tokens {
         return new Problem(origin, what, message, suggestQuotes, cause);
     }
 
+    static Token newComment(ConfigOrigin origin, String text) {
+        return new Comment(origin, text);
+    }
+
     static Token newUnquotedText(ConfigOrigin origin, String s) {
         return new UnquotedText(origin, s);
     }

From 8289ac2a291278765f1cd40ad6f59296526c6916 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Jonas=20Bone=CC=81r?= 
Date: Wed, 14 Dec 2011 15:10:42 +0100
Subject: [PATCH 11/34] Minor doc changes to Props docs

---
 akka-docs/java/untyped-actors.rst | 4 ++--
 akka-docs/scala/actors.rst        | 4 ++--
 2 files changed, 4 insertions(+), 4 deletions(-)

diff --git a/akka-docs/java/untyped-actors.rst b/akka-docs/java/untyped-actors.rst
index 7ed3100fab..ab82599450 100644
--- a/akka-docs/java/untyped-actors.rst
+++ b/akka-docs/java/untyped-actors.rst
@@ -93,7 +93,7 @@ This way of creating the Actor is also great for integrating with Dependency Inj
 Props
 -----
 
-``Props`` is a configuration object to specify configuration options for the creation
+``Props`` is a configuration class to specify options for the creation
 of actors. Here are some examples on how to create a ``Props`` instance.
 
 .. includecode:: code/akka/docs/actor/UntypedActorDocTestBase.java#creating-props-config
@@ -102,7 +102,7 @@ of actors. Here are some examples on how to create a ``Props`` instance.
 Creating Actors with Props
 --------------------------
 
-Actors are created by passing in the ``Props`` object into the ``actorOf`` factory method.
+Actors are created by passing in a ``Props`` instance into the ``actorOf`` factory method.
 
 .. includecode:: code/akka/docs/actor/UntypedActorDocTestBase.java#creating-props
 
diff --git a/akka-docs/scala/actors.rst b/akka-docs/scala/actors.rst
index 7f06dbcc9d..92bdfe244c 100644
--- a/akka-docs/scala/actors.rst
+++ b/akka-docs/scala/actors.rst
@@ -98,7 +98,7 @@ Here is an example:
 Props
 -----
 
-``Props`` is a configuration object to specify configuration options for the creation
+``Props`` is a configuration class to specify options for the creation
 of actors. Here are some examples on how to create a ``Props`` instance.
 
 .. includecode:: code/ActorDocSpec.scala#creating-props-config
@@ -107,7 +107,7 @@ of actors. Here are some examples on how to create a ``Props`` instance.
 Creating Actors with Props
 --------------------------
 
-Actors are created by passing in the ``Props`` object into the ``actorOf`` factory method.
+Actors are created by passing in a ``Props`` instance into the ``actorOf`` factory method.
 
 .. includecode:: code/ActorDocSpec.scala#creating-props
 

From 8ffa85c5906741dfc51f0d217e760d2fc719cb95 Mon Sep 17 00:00:00 2001
From: Patrik Nordwall 
Date: Wed, 14 Dec 2011 15:12:40 +0100
Subject: [PATCH 12/34] DOC: Rewrite config comments. See #1505

---
 .../test/scala/akka/actor/DeployerSpec.scala  |   1 +
 akka-actor/src/main/resources/reference.conf  | 198 +++++++++++-------
 akka-docs/general/configuration.rst           |  56 ++---
 akka-docs/java/dispatchers.rst                |  15 +-
 akka-docs/java/logging.rst                    |  31 +--
 akka-docs/modules/camel.rst                   |   3 +-
 .../docs/dispatcher/DispatcherDocSpec.scala   |  30 ++-
 akka-docs/scala/dispatchers.rst               |  11 +-
 akka-docs/scala/logging.rst                   |  31 +--
 akka-docs/scala/typed-actors.rst              |   5 +-
 .../src/main/resources/reference.conf         |   2 +-
 .../src/main/resources/reference.conf         |   2 +-
 .../src/main/resources/reference.conf         |  12 +-
 .../src/main/resources/reference.conf         |   2 +-
 .../src/main/resources/reference.conf         |   2 +-
 akka-remote/src/main/resources/reference.conf |  92 ++++----
 akka-stm/src/main/resources/reference.conf    |  18 +-
 .../src/main/resources/reference.conf         |  11 +-
 18 files changed, 308 insertions(+), 214 deletions(-)

diff --git a/akka-actor-tests/src/test/scala/akka/actor/DeployerSpec.scala b/akka-actor-tests/src/test/scala/akka/actor/DeployerSpec.scala
index e38ea1c3d4..ff13edb373 100644
--- a/akka-actor-tests/src/test/scala/akka/actor/DeployerSpec.scala
+++ b/akka-actor-tests/src/test/scala/akka/actor/DeployerSpec.scala
@@ -51,6 +51,7 @@ class DeployerSpec extends AkkaSpec(DeployerSpec.deployerConf) {
   "A Deployer" must {
 
     "be able to parse 'akka.actor.deployment._' with all default values" in {
+      println(system.settings.toString)
       val service = "/user/service1"
       val deployment = system.asInstanceOf[ActorSystemImpl].provider.deployer.lookup(service)
       deployment must be('defined)
diff --git a/akka-actor/src/main/resources/reference.conf b/akka-actor/src/main/resources/reference.conf
index f31e61bcbe..8814a65a3b 100644
--- a/akka-actor/src/main/resources/reference.conf
+++ b/akka-actor/src/main/resources/reference.conf
@@ -3,27 +3,37 @@
 ##############################
 
 # This the reference config file has all the default settings.
-# Make your edits/overrides in your akka.conf.
+# Make your edits/overrides in your application.conf.
 
 akka {
-  version = "2.0-SNAPSHOT" # Akka version, checked against the runtime version of Akka.
-  
-  home = ""                # Home directory of Akka, modules in the deploy directory will be loaded
+  # Akka version, checked against the runtime version of Akka.
+  version = "2.0-SNAPSHOT"
 
-  enabled-modules = []     # Comma separated list of the enabled modules. Options: ["cluster", "camel", "http"]
+  # Home directory of Akka, modules in the deploy directory will be loaded
+  home = ""
 
-  event-handlers = ["akka.event.Logging$DefaultLogger"] # Event handlers to register at boot time (Logging$DefaultLogger logs to STDOUT)
-  loglevel        = "INFO"                              # Options: ERROR, WARNING, INFO, DEBUG
-                                                        # this level is used by the configured loggers (see "event-handlers") as soon
-                                                        # as they have been started; before that, see "stdout-loglevel"
-  stdout-loglevel = "WARNING"                           # Loglevel for the very basic logger activated during AkkaApplication startup
-                                                        # FIXME: Is there any sensible reason why we have 2 different log levels?
+  # Comma separated list of the enabled modules. Options: ["cluster", "camel", "http"]
+  enabled-modules = []
 
-  logConfigOnStart = off                                # Log the complete configuration at INFO level when the actor system is started. 
-                                                        # This is useful when you are uncertain of what configuration is used.
+  # Event handlers to register at boot time (Logging$DefaultLogger logs to STDOUT)
+  event-handlers = ["akka.event.Logging$DefaultLogger"]
 
-  extensions = []          # List FQCN of extensions which shall be loaded at actor system startup. 
-                           # FIXME: clarify "extensions" here, "Akka Extensions ()" 
+  # Log level used by the configured loggers (see "event-handlers") as soon
+  # as they have been started; before that, see "stdout-loglevel"
+  # Options: ERROR, WARNING, INFO, DEBUG
+  loglevel = "INFO"
+
+  # Log level for the very basic logger activated during AkkaApplication startup
+  # Options: ERROR, WARNING, INFO, DEBUG
+  stdout-loglevel = "WARNING"
+
+  # Log the complete configuration at INFO level when the actor system is started.
+  # This is useful when you are uncertain of what configuration is used.
+  logConfigOnStart = off
+
+  # List FQCN of extensions which shall be loaded at actor system startup.
+  # FIXME: clarify "extensions" here, "Akka Extensions ()"
+  extensions = []
 
   # These boot classes are loaded (and created) automatically when the Akka Microkernel boots up
   #     Can be used to bootstrap your application(s)
@@ -35,88 +45,127 @@ akka {
   boot = []
 
   actor {
+
     provider = "akka.actor.LocalActorRefProvider"
-    creation-timeout = 20s           # Timeout for ActorSystem.actorOf
-    timeout = 5s                     # Default timeout for Future based invocations
-                                     #    - Actor:        ask && ?
-                                     #    - UntypedActor: ask
-                                     #    - TypedActor:   methods with non-void return type
-    serialize-messages = off         # Does a deep clone of (non-primitive) messages to ensure immutability
-    dispatcher-shutdown-timeout = 1s # How long dispatchers by default will wait for new actors until they shut down
+
+    # Timeout for ActorSystem.actorOf
+    creation-timeout = 20s
+
+    # Default timeout for Future based invocations
+    #    - Actor:        ask && ?
+    #    - UntypedActor: ask
+    #    - TypedActor:   methods with non-void return type
+    timeout = 5s
+
+    # Does a deep clone of (non-primitive) messages to ensure immutability
+    serialize-messages = off
+
+    # How long dispatchers by default will wait for new actors until they shut down
+    dispatcher-shutdown-timeout = 1s
 
     deployment {
-    
-      default {                # deployment id pattern, e.g. /app/service-ping
 
-        router = "direct"      # routing (load-balance) scheme to use
-                               #     available: "direct", "round-robin", "random", "scatter-gather"
-                               #     or:        fully qualified class name of the router class
-                               #     default is "direct";
-                               # In case of non-direct routing, the actors to be routed to can be specified
-                               # in several ways:
-                               # - nr-of-instances: will create that many children given the actor factory
-                               #   supplied in the source code (overridable using create-as below)
-                               # - target.paths: will look the paths up using actorFor and route to 
-                               #   them, i.e. will not create children
+      # deployment id pattern, e.g. /user/service-ping
+      default {
 
-        nr-of-instances = 1    # number of children to create in case of a non-direct router; this setting
-                               # is ignored if target.paths is given
 
-        create-as {            # FIXME document 'create-as'
-          class = ""           # fully qualified class name of recipe implementation
+        # routing (load-balance) scheme to use
+        #     available: "direct", "round-robin", "random", "scatter-gather"
+        #     or:        fully qualified class name of the router class
+        #     default is "direct";
+        # In case of non-direct routing, the actors to be routed to can be specified
+        # in several ways:
+        # - nr-of-instances: will create that many children given the actor factory
+        #   supplied in the source code (overridable using create-as below)
+        # - target.paths: will look the paths up using actorFor and route to
+        #   them, i.e. will not create children
+        router = "direct"
+
+        # number of children to create in case of a non-direct router; this setting
+        # is ignored if target.paths is given
+        nr-of-instances = 1
+
+        # FIXME document 'create-as'
+        create-as {
+          # fully qualified class name of recipe implementation
+          class = ""
         }
 
         target {
-          paths = []           # Alternatively to giving nr-of-instances you can specify the full paths of 
-                               # those actors which should be routed to. This setting takes precedence over
-                               # nr-of-instances
+          # Alternatively to giving nr-of-instances you can specify the full paths of
+          # those actors which should be routed to. This setting takes precedence over
+          # nr-of-instances
+          paths = []
         }
-        
+
       }
     }
 
     default-dispatcher {
-      type = "Dispatcher"              # Must be one of the following
-                                       # Dispatcher, (BalancingDispatcher, only valid when all actors using it are of the same type),
-                                       # A FQCN to a class inheriting MessageDispatcherConfigurator with a no-arg visible constructor
-      name = "DefaultDispatcher"       # Name used in log messages and thread names.
-      daemonic = off                   # Toggles whether the threads created by this dispatcher should be daemons or not
-      keep-alive-time = 60s            # Keep alive time for threads
-      core-pool-size-min = 8           # minimum number of threads to cap factor-based core number to
-      core-pool-size-factor = 8.0      # No of core threads ... ceil(available processors * factor)
-      core-pool-size-max = 4096        # maximum number of threads to cap factor-based number to
-                                       # Hint: max-pool-size is only used for bounded task queues
-      max-pool-size-min = 8            # minimum number of threads to cap factor-based max number to
-      max-pool-size-factor  = 8.0      # Max no of threads ... ceil(available processors * factor)
-      max-pool-size-max = 4096         # maximum number of threads to cap factor-based max number to
-      task-queue-size = -1             # Specifies the bounded capacity of the task queue (< 1 == unbounded)
-      task-queue-type = "linked"       # Specifies which type of task queue will be used, can be "array" or "linked" (default)
-      allow-core-timeout = on          # Allow core threads to time out
-      throughput = 5                   # Throughput defines the number of messages that are processed in a batch before the
-                                       # thread is returned to the pool. Set to 1 for as fair as possible.
-      throughput-deadline-time =  0ms  # Throughput deadline for Dispatcher, set to 0 or negative for no deadline
-      mailbox-capacity = -1            # If negative (or zero) then an unbounded mailbox is used (default)
-                                       # If positive then a bounded mailbox is used and the capacity is set using the property
-                                       # NOTE: setting a mailbox to 'blocking' can be a bit dangerous, could lead to deadlock, use with care
-                                       # The following are only used for Dispatcher and only if mailbox-capacity > 0
-      mailbox-push-timeout-time = 10s  # Specifies the timeout to add a new message to a mailbox that is full - negative number means infinite timeout
+      # Must be one of the following
+      # Dispatcher, (BalancingDispatcher, only valid when all actors using it are of the same type),
+      # A FQCN to a class inheriting MessageDispatcherConfigurator with a no-arg visible constructor
+      type = "Dispatcher"
+      # Name used in log messages and thread names.
+      name = "DefaultDispatcher"
+      # Toggles whether the threads created by this dispatcher should be daemons or not
+      daemonic = off
+      # Keep alive time for threads
+      keep-alive-time = 60s
+      # minimum number of threads to cap factor-based core number to
+      core-pool-size-min = 8
+      # No of core threads ... ceil(available processors * factor)
+      core-pool-size-factor = 8.0
+      # maximum number of threads to cap factor-based number to
+      core-pool-size-max = 4096
+      # Hint: max-pool-size is only used for bounded task queues
+      # minimum number of threads to cap factor-based max number to
+      max-pool-size-min = 8
+      # Max no of threads ... ceil(available processors * factor)
+      max-pool-size-factor  = 8.0
+      # maximum number of threads to cap factor-based max number to
+      max-pool-size-max = 4096
+      # Specifies the bounded capacity of the task queue (< 1 == unbounded)
+      task-queue-size = -1
+      # Specifies which type of task queue will be used, can be "array" or "linked" (default)
+      task-queue-type = "linked"
+      # Allow core threads to time out
+      allow-core-timeout = on
+      # Throughput defines the number of messages that are processed in a batch before the
+      # thread is returned to the pool. Set to 1 for as fair as possible.
+      throughput = 5
+      # Throughput deadline for Dispatcher, set to 0 or negative for no deadline
+      throughput-deadline-time = 0ms
+      # If negative (or zero) then an unbounded mailbox is used (default)
+      # If positive then a bounded mailbox is used and the capacity is set using the property
+      # NOTE: setting a mailbox to 'blocking' can be a bit dangerous, could lead to deadlock, use with care
+      # The following are only used for Dispatcher and only if mailbox-capacity > 0
+      mailbox-capacity = -1
+      # Specifies the timeout to add a new message to a mailbox that is full -
+      # negative number means infinite timeout
+      mailbox-push-timeout-time = 10s
     }
 
     debug {
-      receive = off        # enable function of Actor.loggable(), which is to log any received message at DEBUG level
-      autoreceive = off    # enable DEBUG logging of all AutoReceiveMessages (Kill, PoisonPill and the like)
-      lifecycle = off      # enable DEBUG logging of actor lifecycle changes
-      fsm = off            # enable DEBUG logging of all LoggingFSMs for events, transitions and timers
-      event-stream = off   # enable DEBUG logging of subscription changes on the eventStream
+      # enable function of Actor.loggable(), which is to log any received message at DEBUG level
+      receive = off
+      # enable DEBUG logging of all AutoReceiveMessages (Kill, PoisonPill and the like)
+      autoreceive = off
+      # enable DEBUG logging of actor lifecycle changes
+      lifecycle = off
+      # enable DEBUG logging of all LoggingFSMs for events, transitions and timers
+      fsm = off
+      # enable DEBUG logging of subscription changes on the eventStream
+      event-stream = off
     }
-    
+
     # Entries for pluggable serializers and their bindings. If a binding for a specific class is not found,
     # then the default serializer (Java serialization) is used.
-    #
     serializers {
       # java = "akka.serialization.JavaSerializer"
       # proto = "akka.testing.ProtobufSerializer"
       # sjson = "akka.testing.SJSONSerializer"
+
       default = "akka.serialization.JavaSerializer"
     }
 
@@ -137,7 +186,6 @@ akka {
   #
   scheduler {
     # The HashedWheelTimer (HWT) implementation from Netty is used as the default scheduler in the system.
-    #
     # HWT does not execute the scheduled tasks on exact time.
     # It will, on every tick, check if there are any tasks behind the schedule and execute them.
     # You can increase or decrease the accuracy of the execution timing by specifying smaller or larger tick duration.
@@ -146,5 +194,5 @@ akka {
     tickDuration = 100ms
     ticksPerWheel = 512
   }
-  
+
 }
diff --git a/akka-docs/general/configuration.rst b/akka-docs/general/configuration.rst
index 6f00cae81f..5bbb012a1d 100644
--- a/akka-docs/general/configuration.rst
+++ b/akka-docs/general/configuration.rst
@@ -11,26 +11,26 @@ Configuration
 Specifying the configuration file
 ---------------------------------
 
-If you don't specify a configuration file then Akka uses default values, corresponding to the reference 
-configuration files that you see below. You can specify your own configuration file to override any 
-property in the reference config. You only have to define the properties that differ from the default 
+If you don't specify a configuration file then Akka uses default values, corresponding to the reference
+configuration files that you see below. You can specify your own configuration file to override any
+property in the reference config. You only have to define the properties that differ from the default
 configuration.
 
-By default the ``ConfigFactory.load`` method is used, which will load all ``application.conf`` (and 
+By default the ``ConfigFactory.load`` method is used, which will load all ``application.conf`` (and
 ``application.json`` and ``application.properties``) from the root of the classpath, if they exists.
-It uses ``ConfigFactory.defaultOverrides``, i.e. system properties, before falling back to 
+It uses ``ConfigFactory.defaultOverrides``, i.e. system properties, before falling back to
 application and reference configuration.
 
 Note that *all* ``application.{conf,json,properties}`` classpath resources, from all directories and
-jar files, are loaded and merged. Therefore it is a good practice to define separate sub-trees in the 
+jar files, are loaded and merged. Therefore it is a good practice to define separate sub-trees in the
 configuration for each actor system, and grab the specific configuration when instantiating the ActorSystem.
 
 ::
-  
-  myapp1 {  
+
+  myapp1 {
     akka.loglevel = WARNING
   }
-  myapp2 {  
+  myapp2 {
     akka.loglevel = ERROR
   }
 
@@ -44,7 +44,7 @@ classpath resource, file, or URL specified in those properties will be used rath
 ``application.{conf,json,properties}`` classpath resources. Note that classpath resource names start
 with ``/``. ``-Dconfig.resource=/dev.conf`` will load the ``dev.conf`` from the root of the classpath.
 
-You may also specify and parse the configuration programmatically in other ways when instantiating 
+You may also specify and parse the configuration programmatically in other ways when instantiating
 the ``ActorSystem``.
 
 .. includecode:: code/akka/docs/config/ConfigDocSpec.scala
@@ -66,7 +66,7 @@ Each Akka module has a reference configuration file with the default values.
 
 .. literalinclude:: ../../akka-remote/src/main/resources/reference.conf
    :language: none
-   
+
 *akka-testkit:*
 
 .. literalinclude:: ../../akka-testkit/src/main/resources/reference.conf
@@ -103,30 +103,30 @@ A custom ``application.conf`` might look like this::
   # Copy in parts of the reference files and modify as you please.
 
   akka {
+
+    # Event handlers to register at boot time (Logging$DefaultLogger logs to STDOUT)
     event-handlers = ["akka.event.slf4j.Slf4jEventHandler"]
-    loglevel        = DEBUG  # Options: ERROR, WARNING, INFO, DEBUG
-                             # this level is used by the configured loggers (see "event-handlers") as soon
-                             # as they have been started; before that, see "stdout-loglevel"
-    stdout-loglevel = DEBUG  # Loglevel for the very basic logger activated during AkkaApplication startup
 
-    # Comma separated list of the enabled modules.
-    enabled-modules = ["camel", "remote"]
+    # Log level used by the configured loggers (see "event-handlers") as soon
+    # as they have been started; before that, see "stdout-loglevel"
+    # Options: ERROR, WARNING, INFO, DEBUG
+    loglevel = DEBUG
 
-    # These boot classes are loaded (and created) automatically when the Akka Microkernel boots up
-    #     Can be used to bootstrap your application(s)
-    #     Should be the FQN (Fully Qualified Name) of the boot class which needs to have a default constructor
-    boot = ["sample.camel.Boot",
-            "sample.myservice.Boot"]
+    # Log level for the very basic logger activated during AkkaApplication startup
+    # Options: ERROR, WARNING, INFO, DEBUG
+    stdout-loglevel = DEBUG
 
     actor {
       default-dispatcher {
-        throughput = 10  # Throughput for default Dispatcher, set to 1 for as fair as possible
+        # Throughput for default Dispatcher, set to 1 for as fair as possible
+        throughput = 10
       }
     }
 
     remote {
       server {
-        port = 2562    # The port clients should connect to. Default is 2552 (AKKA)
+        # The port clients should connect to. Default is 2552 (AKKA)
+        port = 2562
       }
     }
   }
@@ -136,7 +136,7 @@ Config file format
 ------------------
 
 The configuration file syntax is described in the `HOCON `_
-specification. Note that it supports three formats; conf, json, and properties. 
+specification. Note that it supports three formats; conf, json, and properties.
 
 
 Including files
@@ -145,7 +145,7 @@ Including files
 Sometimes it can be useful to include another configuration file, for example if you have one ``application.conf`` with all
 environment independent settings and then override some settings for specific environments.
 
-Specifying system property with ``-Dconfig.resource=/dev.conf`` will load the ``dev.conf`` file, which includes the ``application.conf``  
+Specifying system property with ``-Dconfig.resource=/dev.conf`` will load the ``dev.conf`` file, which includes the ``application.conf``
 
 dev.conf:
 
@@ -166,6 +166,6 @@ specification.
 Logging of Configuration
 ------------------------
 
-If the system or config property ``akka.logConfigOnStart`` is set to ``on``, then the 
-complete configuration at INFO level when the actor system is started. This is useful 
+If the system or config property ``akka.logConfigOnStart`` is set to ``on``, then the
+complete configuration at INFO level when the actor system is started. This is useful
 when you are uncertain of what configuration is used.
diff --git a/akka-docs/java/dispatchers.rst b/akka-docs/java/dispatchers.rst
index 79397b7c66..d053501d78 100644
--- a/akka-docs/java/dispatchers.rst
+++ b/akka-docs/java/dispatchers.rst
@@ -6,7 +6,7 @@ Dispatchers (Java)
 .. sidebar:: Contents
 
    .. contents:: :local:
-   
+
 The Dispatcher is an important piece that allows you to configure the right semantics and parameters for optimal performance, throughput and scalability. Different Actors have different needs.
 
 Akka supports dispatchers for both event-driven lightweight threads, allowing creation of millions of threads on a single workstation, and thread-based Actors, where each dispatcher is bound to a dedicated OS thread.
@@ -44,7 +44,7 @@ There are 4 different types of message dispatchers:
 
 It is recommended to define the dispatcher in :ref:`configuration` to allow for tuning for different environments.
 
-Example of a custom event-based dispatcher, which can be fetched with ``system.dispatcherFactory().lookup("my-dispatcher")`` 
+Example of a custom event-based dispatcher, which can be fetched with ``system.dispatcherFactory().lookup("my-dispatcher")``
 as in the example above:
 
 .. includecode:: ../scala/code/akka/docs/dispatcher/DispatcherDocSpec.scala#my-dispatcher-config
@@ -115,7 +115,7 @@ Priority event-based
 ^^^^^^^^^^^^^^^^^^^^
 
 Sometimes it's useful to be able to specify priority order of messages, that is done by using Dispatcher and supply
-an UnboundedPriorityMailbox or BoundedPriorityMailbox with a ``java.util.Comparator[Envelope]`` or use a 
+an UnboundedPriorityMailbox or BoundedPriorityMailbox with a ``java.util.Comparator[Envelope]`` or use a
 ``akka.dispatch.PriorityGenerator`` (recommended).
 
 Creating a Dispatcher using PriorityGenerator:
@@ -129,9 +129,9 @@ Work-sharing event-based
 
 The ``BalancingDispatcher`` is a variation of the ``Dispatcher`` in which Actors of the same type can be set up to
 share this dispatcher and during execution time the different actors will steal messages from other actors if they
-have less messages to process. 
+have less messages to process.
 Although the technique used in this implementation is commonly known as "work stealing", the actual implementation is probably
-best described as "work donating" because the actor of which work is being stolen takes the initiative. 
+best described as "work donating" because the actor of which work is being stolen takes the initiative.
 This can be a great way to improve throughput at the cost of a little higher latency.
 
 .. includecode:: ../scala/code/akka/docs/dispatcher/DispatcherDocSpec.scala#my-balancing-config
@@ -154,8 +154,9 @@ if not specified otherwise.
   akka {
     actor {
       default-dispatcher {
-        task-queue-size = 1000   # If negative (or zero) then an unbounded mailbox is used (default)
-                                 # If positive then a bounded mailbox is used and the capacity is set to the number specified
+        # If negative (or zero) then an unbounded mailbox is used (default)
+        # If positive then a bounded mailbox is used and the capacity is set to the number specified
+        task-queue-size = 1000
       }
     }
   }
diff --git a/akka-docs/java/logging.rst b/akka-docs/java/logging.rst
index c9ad9256fc..20920d940b 100644
--- a/akka-docs/java/logging.rst
+++ b/akka-docs/java/logging.rst
@@ -25,14 +25,14 @@ The source object is translated to a String according to the following rules:
   * in case of a class an approximation of its simpleName
   * and in all other cases the simpleName of its class
 
-The log message may contain argument placeholders ``{}``, which will be substituted if the log level 
+The log message may contain argument placeholders ``{}``, which will be substituted if the log level
 is enabled.
 
 Event Handler
 =============
 
-Logging is performed asynchronously through an event bus. You can configure which event handlers that should 
-subscribe to the logging events. That is done using the 'event-handlers' element in the :ref:`configuration`. 
+Logging is performed asynchronously through an event bus. You can configure which event handlers that should
+subscribe to the logging events. That is done using the 'event-handlers' element in the :ref:`configuration`.
 Here you can also define the log level.
 
 .. code-block:: ruby
@@ -40,16 +40,17 @@ Here you can also define the log level.
   akka {
     # Event handlers to register at boot time (Logging$DefaultLogger logs to STDOUT)
     event-handlers = ["akka.event.Logging$DefaultLogger"]
-    loglevel = "DEBUG" # Options: ERROR, WARNING, INFO, DEBUG
+    # Options: ERROR, WARNING, INFO, DEBUG
+    loglevel = "DEBUG"
   }
 
-The default one logs to STDOUT and is registered by default. It is not intended to be used for production. There is also an :ref:`slf4j-java` 
+The default one logs to STDOUT and is registered by default. It is not intended to be used for production. There is also an :ref:`slf4j-java`
 event handler available in the 'akka-slf4j' module.
 
 Example of creating a listener:
 
 .. includecode:: code/akka/docs/event/LoggingDocTestBase.java
-   :include: imports,imports-listener,my-event-listener 
+   :include: imports,imports-listener,my-event-listener
 
 
 .. _slf4j-java:
@@ -57,7 +58,7 @@ Example of creating a listener:
 SLF4J
 =====
 
-Akka provides an event handler for `SL4FJ `_. This module is available in the 'akka-slf4j.jar'. 
+Akka provides an event handler for `SL4FJ `_. This module is available in the 'akka-slf4j.jar'.
 It has one single dependency; the slf4j-api jar. In runtime you also need a SLF4J backend, we recommend `Logback `_:
 
   .. code-block:: xml
@@ -69,10 +70,10 @@ It has one single dependency; the slf4j-api jar. In runtime you also need a SLF4
        runtime
      
 
-You need to enable the Slf4jEventHandler in the 'event-handlers' element in 
-the :ref:`configuration`. Here you can also define the log level of the event bus. 
+You need to enable the Slf4jEventHandler in the 'event-handlers' element in
+the :ref:`configuration`. Here you can also define the log level of the event bus.
 More fine grained log levels can be defined in the configuration of the SLF4J backend
-(e.g. logback.xml). The String representation of the source object that is used when 
+(e.g. logback.xml). The String representation of the source object that is used when
 creating the ``LoggingAdapter`` correspond to the name of the SL4FJ logger.
 
 .. code-block:: ruby
@@ -89,9 +90,9 @@ Since the logging is done asynchronously the thread in which the logging was per
 Mapped Diagnostic Context (MDC) with attribute name ``sourceThread``.
 With Logback the thread name is available with ``%X{sourceThread}`` specifier within the pattern layout configuration::
 
-   
-     
-      %date{ISO8601} %-5level %logger{36} %X{sourceThread} - %msg%n 
-     
-   
+  
+    
+      %date{ISO8601} %-5level %logger{36} %X{sourceThread} - %msg%n
+    
+  
 
diff --git a/akka-docs/modules/camel.rst b/akka-docs/modules/camel.rst
index 4aa988d609..f556914853 100644
--- a/akka-docs/modules/camel.rst
+++ b/akka-docs/modules/camel.rst
@@ -1528,7 +1528,8 @@ when camel is added to the enabled-modules list in :ref:`configuration`, for exa
 
    akka {
      ...
-     enabled-modules = ["camel"] # Options: ["remote", "camel", "http"]
+     # Options: ["remote", "camel", "http"]
+     enabled-modules = ["camel"]
      ...
    }
 
diff --git a/akka-docs/scala/code/akka/docs/dispatcher/DispatcherDocSpec.scala b/akka-docs/scala/code/akka/docs/dispatcher/DispatcherDocSpec.scala
index 27d3995c1c..ffe0e4ed4b 100644
--- a/akka-docs/scala/code/akka/docs/dispatcher/DispatcherDocSpec.scala
+++ b/akka-docs/scala/code/akka/docs/dispatcher/DispatcherDocSpec.scala
@@ -16,27 +16,35 @@ object DispatcherDocSpec {
   val config = """
     //#my-dispatcher-config
     my-dispatcher {
-      type = Dispatcher             # Dispatcher is the name of the event-based dispatcher
-      daemonic = off                # Toggles whether the threads created by this dispatcher should be daemons or not
-      core-pool-size-min = 2        # minimum number of threads to cap factor-based core number to
-      core-pool-size-factor = 2.0   # No of core threads ... ceil(available processors * factor)
-      core-pool-size-max = 10       # maximum number of threads to cap factor-based number to
-      throughput = 100              # Throughput defines the number of messages that are processed in a batch before the
-                                    # thread is returned to the pool. Set to 1 for as fair as possible.
+      # Dispatcher is the name of the event-based dispatcher
+      type = Dispatcher
+      # Toggles whether the threads created by this dispatcher should be daemons or not
+      daemonic = off
+      # minimum number of threads to cap factor-based core number to
+      core-pool-size-min = 2
+      # No of core threads ... ceil(available processors * factor)
+      core-pool-size-factor = 2.0
+      # maximum number of threads to cap factor-based number to
+      core-pool-size-max = 10
+      # Throughput defines the number of messages that are processed in a batch before the
+      # thread is returned to the pool. Set to 1 for as fair as possible.
+      throughput = 100
     }
     //#my-dispatcher-config
-    
+
     //#my-bounded-config
     my-dispatcher-bounded-queue {
       type = Dispatcher
       core-pool-size-factor = 8.0
       max-pool-size-factor  = 16.0
-      task-queue-size = 100         # Specifies the bounded capacity of the task queue
-      task-queue-type = "array"     # Specifies which type of task queue will be used, can be "array" or "linked" (default)
+      # Specifies the bounded capacity of the task queue
+      task-queue-size = 100
+      # Specifies which type of task queue will be used, can be "array" or "linked" (default)
+      task-queue-type = "array"
       throughput = 3
     }
     //#my-bounded-config
-    
+
     //#my-balancing-config
     my-balancing-dispatcher {
       type = BalancingDispatcher
diff --git a/akka-docs/scala/dispatchers.rst b/akka-docs/scala/dispatchers.rst
index dc3dd50e12..a5a4453e89 100644
--- a/akka-docs/scala/dispatchers.rst
+++ b/akka-docs/scala/dispatchers.rst
@@ -6,7 +6,7 @@ Dispatchers (Scala)
 .. sidebar:: Contents
 
    .. contents:: :local:
-   
+
 The Dispatcher is an important piece that allows you to configure the right semantics and parameters for optimal performance, throughput and scalability. Different Actors have different needs.
 
 Akka supports dispatchers for both event-driven lightweight threads, allowing creation of millions of threads on a single workstation, and thread-based Actors, where each dispatcher is bound to a dedicated OS thread.
@@ -127,9 +127,9 @@ Work-sharing event-based
 
 The ``BalancingDispatcher`` is a variation of the ``Dispatcher`` in which Actors of the same type can be set up to
 share this dispatcher and during execution time the different actors will steal messages from other actors if they
-have less messages to process. 
+have less messages to process.
 Although the technique used in this implementation is commonly known as "work stealing", the actual implementation is probably
-best described as "work donating" because the actor of which work is being stolen takes the initiative. 
+best described as "work donating" because the actor of which work is being stolen takes the initiative.
 This can be a great way to improve throughput at the cost of a little higher latency.
 
 .. includecode:: code/akka/docs/dispatcher/DispatcherDocSpec.scala#my-balancing-config
@@ -152,8 +152,9 @@ if not specified otherwise.
   akka {
     actor {
       default-dispatcher {
-        task-queue-size = 1000   # If negative (or zero) then an unbounded mailbox is used (default)
-                                 # If positive then a bounded mailbox is used and the capacity is set to the number specified
+        # If negative (or zero) then an unbounded mailbox is used (default)
+        # If positive then a bounded mailbox is used and the capacity is set to the number specified
+        task-queue-size = 1000
       }
     }
   }
diff --git a/akka-docs/scala/logging.rst b/akka-docs/scala/logging.rst
index e5cc7597a9..35f4e838ff 100644
--- a/akka-docs/scala/logging.rst
+++ b/akka-docs/scala/logging.rst
@@ -21,7 +21,7 @@ For convenience you can mixin the ``log`` member into actors, instead of definin
 
 .. code-block:: scala
 
-  class MyActor extends Actor with akka.actor.ActorLogging { 
+  class MyActor extends Actor with akka.actor.ActorLogging {
 
 The second parameter to the ``Logging`` is the source of this logging channel.
 The source object is translated to a String according to the following rules:
@@ -31,14 +31,14 @@ The source object is translated to a String according to the following rules:
   * in case of a class an approximation of its simpleName
   * and in all other cases the simpleName of its class
 
-The log message may contain argument placeholders ``{}``, which will be substituted if the log level 
+The log message may contain argument placeholders ``{}``, which will be substituted if the log level
 is enabled.
 
 Event Handler
 =============
 
-Logging is performed asynchronously through an event bus. You can configure which event handlers that should 
-subscribe to the logging events. That is done using the 'event-handlers' element in the :ref:`configuration`. 
+Logging is performed asynchronously through an event bus. You can configure which event handlers that should
+subscribe to the logging events. That is done using the 'event-handlers' element in the :ref:`configuration`.
 Here you can also define the log level.
 
 .. code-block:: ruby
@@ -46,10 +46,11 @@ Here you can also define the log level.
   akka {
     # Event handlers to register at boot time (Logging$DefaultLogger logs to STDOUT)
     event-handlers = ["akka.event.Logging$DefaultLogger"]
-    loglevel = "DEBUG" # Options: ERROR, WARNING, INFO, DEBUG
+    # Options: ERROR, WARNING, INFO, DEBUG
+    loglevel = "DEBUG"
   }
 
-The default one logs to STDOUT and is registered by default. It is not intended to be used for production. There is also an :ref:`slf4j-scala` 
+The default one logs to STDOUT and is registered by default. It is not intended to be used for production. There is also an :ref:`slf4j-scala`
 event handler available in the 'akka-slf4j' module.
 
 Example of creating a listener:
@@ -63,7 +64,7 @@ Example of creating a listener:
 SLF4J
 =====
 
-Akka provides an event handler for `SL4FJ `_. This module is available in the 'akka-slf4j.jar'. 
+Akka provides an event handler for `SL4FJ `_. This module is available in the 'akka-slf4j.jar'.
 It has one single dependency; the slf4j-api jar. In runtime you also need a SLF4J backend, we recommend `Logback `_:
 
   .. code-block:: scala
@@ -71,10 +72,10 @@ It has one single dependency; the slf4j-api jar. In runtime you also need a SLF4
      lazy val logback = "ch.qos.logback" % "logback-classic" % "1.0.0" % "runtime"
 
 
-You need to enable the Slf4jEventHandler in the 'event-handlers' element in 
-the :ref:`configuration`. Here you can also define the log level of the event bus. 
+You need to enable the Slf4jEventHandler in the 'event-handlers' element in
+the :ref:`configuration`. Here you can also define the log level of the event bus.
 More fine grained log levels can be defined in the configuration of the SLF4J backend
-(e.g. logback.xml). The String representation of the source object that is used when 
+(e.g. logback.xml). The String representation of the source object that is used when
 creating the ``LoggingAdapter`` correspond to the name of the SL4FJ logger.
 
 .. code-block:: ruby
@@ -91,9 +92,9 @@ Since the logging is done asynchronously the thread in which the logging was per
 Mapped Diagnostic Context (MDC) with attribute name ``sourceThread``.
 With Logback the thread name is available with ``%X{sourceThread}`` specifier within the pattern layout configuration::
 
-   
-     
-      %date{ISO8601} %-5level %logger{36} %X{sourceThread} - %msg%n 
-     
-   
+  
+    
+      %date{ISO8601} %-5level %logger{36} %X{sourceThread} - %msg%n
+    
+  
 
diff --git a/akka-docs/scala/typed-actors.rst b/akka-docs/scala/typed-actors.rst
index 0e70acd282..5f6920138a 100644
--- a/akka-docs/scala/typed-actors.rst
+++ b/akka-docs/scala/typed-actors.rst
@@ -4,7 +4,7 @@ Typed Actors (Scala)
 .. sidebar:: Contents
 
    .. contents:: :local:
-   
+
 The Typed Actors are implemented through `Typed Actors `_. It uses AOP through `AspectWerkz `_ to turn regular POJOs into asynchronous non-blocking Actors with semantics of the Actor Model. Each method dispatch is turned into a message that is put on a queue to be processed by the Typed Actor sequentially one by one.
 
 If you are using the `Spring Framework `_ then take a look at Akka's `Spring integration `_.
@@ -182,7 +182,8 @@ Akka can help you in this regard. It allows you to turn on an option for seriali
 
   akka {
     actor {
-      serialize-messages = on  # does a deep clone of messages to ensure immutability
+      # does a deep clone of messages to ensure immutability
+      serialize-messages = on
     }
   }
 
diff --git a/akka-durable-mailboxes/akka-beanstalk-mailbox/src/main/resources/reference.conf b/akka-durable-mailboxes/akka-beanstalk-mailbox/src/main/resources/reference.conf
index 3e6b914bf7..82beeeddd8 100644
--- a/akka-durable-mailboxes/akka-beanstalk-mailbox/src/main/resources/reference.conf
+++ b/akka-durable-mailboxes/akka-beanstalk-mailbox/src/main/resources/reference.conf
@@ -3,7 +3,7 @@
 ##################################################
 
 # This the reference config file has all the default settings.
-# Make your edits/overrides in your akka.conf.
+# Make your edits/overrides in your application.conf.
 
 akka {
   actor {
diff --git a/akka-durable-mailboxes/akka-file-mailbox/src/main/resources/reference.conf b/akka-durable-mailboxes/akka-file-mailbox/src/main/resources/reference.conf
index f81f8995f9..93ee52fcc7 100644
--- a/akka-durable-mailboxes/akka-file-mailbox/src/main/resources/reference.conf
+++ b/akka-durable-mailboxes/akka-file-mailbox/src/main/resources/reference.conf
@@ -3,7 +3,7 @@
 #############################################
 
 # This the reference config file has all the default settings.
-# Make your edits/overrides in your akka.conf.
+# Make your edits/overrides in your application.conf.
 
 akka {
   actor {
diff --git a/akka-durable-mailboxes/akka-mongo-mailbox/src/main/resources/reference.conf b/akka-durable-mailboxes/akka-mongo-mailbox/src/main/resources/reference.conf
index 09a0c316ec..991f638053 100644
--- a/akka-durable-mailboxes/akka-mongo-mailbox/src/main/resources/reference.conf
+++ b/akka-durable-mailboxes/akka-mongo-mailbox/src/main/resources/reference.conf
@@ -3,19 +3,23 @@
 ################################################
 
 # This the reference config file has all the default settings.
-# Make your edits/overrides in your akka.conf.
+# Make your edits/overrides in your application.conf.
 
 akka {
   actor {
     mailbox {
       mongodb {
+
         # Any specified collection name will be used as a prefix for collections that use durable mongo mailboxes
-        uri = "mongodb://localhost/akka.mailbox" # Follow Mongo URI Spec - http://www.mongodb.org/display/DOCS/Connections
+        # Follow Mongo URI Spec - http://www.mongodb.org/display/DOCS/Connections
+        uri = "mongodb://localhost/akka.mailbox"
 
         # Configurable timeouts for certain ops
         timeout {
-          read = 3000ms  # time to wait for a read to succeed before timing out the future
-          write = 3000ms # time to wait for a write to succeed before timing out the future
+          # time to wait for a read to succeed before timing out the future
+          read = 3000ms
+          # time to wait for a write to succeed before timing out the future
+          write = 3000ms
         }
       }
     }
diff --git a/akka-durable-mailboxes/akka-redis-mailbox/src/main/resources/reference.conf b/akka-durable-mailboxes/akka-redis-mailbox/src/main/resources/reference.conf
index 20f1d03abd..7b12dc24b2 100644
--- a/akka-durable-mailboxes/akka-redis-mailbox/src/main/resources/reference.conf
+++ b/akka-durable-mailboxes/akka-redis-mailbox/src/main/resources/reference.conf
@@ -3,7 +3,7 @@
 ##############################################
 
 # This the reference config file has all the default settings.
-# Make your edits/overrides in your akka.conf.
+# Make your edits/overrides in your application.conf.
 
 akka {
   actor {
diff --git a/akka-durable-mailboxes/akka-zookeeper-mailbox/src/main/resources/reference.conf b/akka-durable-mailboxes/akka-zookeeper-mailbox/src/main/resources/reference.conf
index b31de45f76..3dfea7a944 100644
--- a/akka-durable-mailboxes/akka-zookeeper-mailbox/src/main/resources/reference.conf
+++ b/akka-durable-mailboxes/akka-zookeeper-mailbox/src/main/resources/reference.conf
@@ -3,7 +3,7 @@
 ##################################################
 
 # This the reference config file has all the default settings.
-# Make your edits/overrides in your akka.conf.
+# Make your edits/overrides in your application.conf.
 
 akka {
   actor {
diff --git a/akka-remote/src/main/resources/reference.conf b/akka-remote/src/main/resources/reference.conf
index 4083a64ea2..1838ae47fa 100644
--- a/akka-remote/src/main/resources/reference.conf
+++ b/akka-remote/src/main/resources/reference.conf
@@ -3,7 +3,7 @@
 #####################################
 
 # This the reference config file has all the default settings.
-# Make your edits/overrides in your akka.conf.
+# Make your edits/overrides in your application.conf.
 
 akka {
 
@@ -13,18 +13,22 @@ akka {
 
       default {
 
-        remote = ""            # if this is set to a valid remote address, the named actor will be deployed at that node
-                               # e.g. "akka://sys@host:port"
+        # if this is set to a valid remote address, the named actor will be deployed at that node
+        # e.g. "akka://sys@host:port"
+        remote = ""
 
         target {
-          nodes = []           # A list of hostnames and ports for instantiating the children of a non-direct router
-                               #   The format should be on "akka://sys@host:port", where:
-                               #    - sys is the remote actor system name
-                               #    - hostname can be either hostname or IP address the remote actor should connect to
-                               #    - port should be the port for the remote server on the other node
-                               # The number of actor instances to be spawned is still taken from the nr-of-instances
-                               # setting as for local routers; the instances will be distributed round-robin among the
-                               # given nodes.
+
+          # A list of hostnames and ports for instantiating the children of a non-direct router
+          #   The format should be on "akka://sys@host:port", where:
+          #    - sys is the remote actor system name
+          #    - hostname can be either hostname or IP address the remote actor should connect to
+          #    - port should be the port for the remote server on the other node
+          # The number of actor instances to be spawned is still taken from the nr-of-instances
+          # setting as for local routers; the instances will be distributed round-robin among the
+          # given nodes.
+          nodes = []
+
         }
       }
     }
@@ -35,50 +39,68 @@ akka {
 
     use-compression = off
 
-    secure-cookie = ""                            # Generate your own with '$AKKA_HOME/scripts/generate_config_with_secure_cookie.sh'
-                                                  #     or using 'akka.util.Crypt.generateSecureCookie'
+    # Generate your own with '$AKKA_HOME/scripts/generate_config_with_secure_cookie.sh'
+    #     or using 'akka.util.Crypt.generateSecureCookie'
+    secure-cookie = ""
 
-    remote-daemon-ack-timeout = 30s               # Timeout for ACK of cluster operations, lik checking actor out etc.
+    # Timeout for ACK of cluster operations, lik checking actor out etc.
+    remote-daemon-ack-timeout = 30s
 
-    use-passive-connections = on                  # Reuse inbound connections for outbound messages
+    # Reuse inbound connections for outbound messages
+    use-passive-connections = on
 
-    failure-detector {                            # accrual failure detection config
-      threshold = 8                               # defines the failure detector threshold
-                                                  #     A low threshold is prone to generate many wrong suspicions but ensures a
-                                                  #     quick detection in the event of a real crash. Conversely, a high threshold
-                                                  #     generates fewer mistakes but needs more time to detect actual crashes
+    # accrual failure detection config
+    failure-detector {
+      # defines the failure detector threshold
+      #     A low threshold is prone to generate many wrong suspicions but ensures a
+      #     quick detection in the event of a real crash. Conversely, a high threshold
+      #     generates fewer mistakes but needs more time to detect actual crashes
+      threshold = 8
       max-sample-size = 1000
     }
-    
+
     gossip {
       initialDelay = 5s
       frequency = 1s
     }
-    
-    compute-grid-dispatcher {                     # The dispatcher used for remote system messages 
-      name = ComputeGridDispatcher                # defaults to same settings as default-dispatcher
+
+    # The dispatcher used for remote system messages
+    compute-grid-dispatcher {
+      # defaults to same settings as default-dispatcher
+      name = ComputeGridDispatcher
     }
 
     server {
-      hostname = ""                               # The hostname or ip to bind the remoting to, InetAddress.getLocalHost.getHostAddress is used if empty
-      port = 2552                                 # The default remote server port clients should connect to. Default is 2552 (AKKA)
-      message-frame-size = 1 MiB                  # Increase this if you want to be able to send messages with large payloads
-      connection-timeout = 120s                   # Timeout duration
-      require-cookie = off                        # Should the remote server require that it peers share the same secure-cookie (defined in the 'remote' section)?
-      untrusted-mode = off                        # Enable untrusted mode for full security of server managed actors, allows untrusted clients to connect.
-      backlog = 4096                              # Sets the size of the connection backlog
+      # The hostname or ip to bind the remoting to, InetAddress.getLocalHost.getHostAddress is used if empty
+      hostname = ""
+      # The default remote server port clients should connect to. Default is 2552 (AKKA)
+      port = 2552
+      # Increase this if you want to be able to send messages with large payloads
+      message-frame-size = 1 MiB
+      # Timeout duration
+      connection-timeout = 120s
+      # Should the remote server require that it peers share the same secure-cookie (defined in the 'remote' section)?
+      require-cookie = off
+      # Enable untrusted mode for full security of server managed actors, allows untrusted clients to connect.
+      untrusted-mode = off
+      # Sets the size of the connection backlog
+      backlog = 4096
     }
 
     client {
       buffering {
-        retry-message-send-on-failure = off       # Should message buffering on remote client error be used (buffer flushed on successful reconnect)
-        capacity = -1                             # If negative (or zero) then an unbounded mailbox is used (default)
-                                                  #     If positive then a bounded mailbox is used and the capacity is set using the property
+        # Should message buffering on remote client error be used (buffer flushed on successful reconnect)
+        retry-message-send-on-failure = off
+        # If negative (or zero) then an unbounded mailbox is used (default)
+        #     If positive then a bounded mailbox is used and the capacity is set using the property
+        capacity = -1
+
       }
       reconnect-delay = 5s
       read-timeout = 3600s
       message-frame-size = 1 MiB
-      reconnection-time-window = 600s             # Maximum time window that a client should try to reconnect for
+      # Maximum time window that a client should try to reconnect for
+      reconnection-time-window = 600s
     }
   }
 
diff --git a/akka-stm/src/main/resources/reference.conf b/akka-stm/src/main/resources/reference.conf
index 98a3e70d5d..05aa9b433c 100644
--- a/akka-stm/src/main/resources/reference.conf
+++ b/akka-stm/src/main/resources/reference.conf
@@ -3,19 +3,21 @@
 ##################################
 
 # This the reference config file has all the default settings.
-# Make your edits/overrides in your akka.conf.
+# Make your edits/overrides in your application.conf.
 
 akka {
 
   stm {
-    fair             = on     # Should global transactions be fair or non-fair (non fair yield better performance)
+    # Should global transactions be fair or non-fair (non fair yield better performance)
+    fair             = on
     max-retries      = 1000
-    timeout          = 5s     # Default timeout for blocking transactions and transaction set
-    write-skew       = on  
-    blocking-allowed = off  
-    interruptible    = off  
-    speculative      = on  
-    quick-release    = on  
+    # Default timeout for blocking transactions and transaction set
+    timeout          = 5s
+    write-skew       = on
+    blocking-allowed = off
+    interruptible    = off
+    speculative      = on
+    quick-release    = on
     propagation      = "requires"
     trace-level      = "none"
   }
diff --git a/akka-testkit/src/main/resources/reference.conf b/akka-testkit/src/main/resources/reference.conf
index 0aa150e4b5..d2a4859c30 100644
--- a/akka-testkit/src/main/resources/reference.conf
+++ b/akka-testkit/src/main/resources/reference.conf
@@ -3,12 +3,15 @@
 ######################################
 
 # This the reference config file has all the default settings.
-# Make your edits/overrides in your akka.conf.
+# Make your edits/overrides in your application.conf.
 
 akka {
   test {
-    timefactor =  1.0           # factor by which to scale timeouts during tests, e.g. to account for shared build system load
-    filter-leeway = 3s          # duration of EventFilter.intercept waits after the block is finished until all required messages are received
-    single-expect-default = 3s  # duration to wait in expectMsg and friends outside of within() block by default
+    # factor by which to scale timeouts during tests, e.g. to account for shared build system load
+    timefactor =  1.0
+    # duration of EventFilter.intercept waits after the block is finished until all required messages are received
+    filter-leeway = 3s
+    # duration to wait in expectMsg and friends outside of within() block by default
+    single-expect-default = 3s
   }
 }

From f2e36f060d99e1339f60bd91ec1b65912f9b8150 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Jonas=20Bone=CC=81r?= 
Date: Wed, 14 Dec 2011 15:20:17 +0100
Subject: [PATCH 13/34] Fix minor issue in the untyped actor docs

---
 akka-docs/java/untyped-actors.rst | 12 +++++-------
 1 file changed, 5 insertions(+), 7 deletions(-)

diff --git a/akka-docs/java/untyped-actors.rst b/akka-docs/java/untyped-actors.rst
index ab82599450..eddcadc9dc 100644
--- a/akka-docs/java/untyped-actors.rst
+++ b/akka-docs/java/untyped-actors.rst
@@ -76,18 +76,16 @@ add initialization code for the actor.
 Creating Actors with non-default constructor
 --------------------------------------------
 
-If your UntypedActor has a constructor that takes parameters then you can't create it using 'actorOf(clazz)'.
-Instead you can use a variant of ``actorOf`` that takes an instance of an 'UntypedActorFactory'
-in which you can create the Actor in any way you like. If you use this method then you to make sure that
-no one can get a reference to the actor instance. If they can get a reference it then they can
-touch state directly in bypass the whole actor dispatching mechanism and create race conditions
-which can lead to corrupt data.
+If your UntypedActor has a constructor that takes parameters then you can't create it using
+'actorOf(new Props(clazz))'. Then you can instead pass in 'new Props(new UntypedActorFactory() {..})'
+in which you can create the Actor in any way you like.
 
 Here is an example:
 
 .. includecode:: code/akka/docs/actor/UntypedActorDocTestBase.java#creating-constructor
 
-This way of creating the Actor is also great for integrating with Dependency Injection (DI) frameworks like Guice or Spring.
+This way of creating the Actor is also great for integrating with Dependency Injection
+(DI) frameworks like Guice or Spring.
 
 
 Props

From 7f93f560489f1047a67893a8fc48cf7b4f34a10f Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Jonas=20Bone=CC=81r?= 
Date: Wed, 14 Dec 2011 15:22:04 +0100
Subject: [PATCH 14/34] Rearranged ordering of sections in untyped actor docs

---
 akka-docs/java/untyped-actors.rst | 34 +++++++++++++++----------------
 1 file changed, 17 insertions(+), 17 deletions(-)

diff --git a/akka-docs/java/untyped-actors.rst b/akka-docs/java/untyped-actors.rst
index eddcadc9dc..f6af777acd 100644
--- a/akka-docs/java/untyped-actors.rst
+++ b/akka-docs/java/untyped-actors.rst
@@ -39,6 +39,23 @@ Here is an example:
 
 .. includecode:: code/akka/docs/actor/MyUntypedActor.java#my-untyped-actor
 
+Props
+-----
+
+``Props`` is a configuration class to specify options for the creation
+of actors. Here are some examples on how to create a ``Props`` instance.
+
+.. includecode:: code/akka/docs/actor/UntypedActorDocTestBase.java#creating-props-config
+
+
+Creating Actors with Props
+--------------------------
+
+Actors are created by passing in a ``Props`` instance into the ``actorOf`` factory method.
+
+.. includecode:: code/akka/docs/actor/UntypedActorDocTestBase.java#creating-props
+
+
 Creating Actors with default constructor
 ----------------------------------------
 
@@ -88,23 +105,6 @@ This way of creating the Actor is also great for integrating with Dependency Inj
 (DI) frameworks like Guice or Spring.
 
 
-Props
------
-
-``Props`` is a configuration class to specify options for the creation
-of actors. Here are some examples on how to create a ``Props`` instance.
-
-.. includecode:: code/akka/docs/actor/UntypedActorDocTestBase.java#creating-props-config
-
-
-Creating Actors with Props
---------------------------
-
-Actors are created by passing in a ``Props`` instance into the ``actorOf`` factory method.
-
-.. includecode:: code/akka/docs/actor/UntypedActorDocTestBase.java#creating-props
-
-
 UntypedActor API
 ================
 

From 328d62d16b2e856f76ee9ce66a48c4aa597e4902 Mon Sep 17 00:00:00 2001
From: Patrik Nordwall 
Date: Wed, 14 Dec 2011 16:00:50 +0100
Subject: [PATCH 15/34] Minor review comment fix

---
 akka-actor-tests/src/test/scala/akka/dispatch/FutureSpec.scala | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/akka-actor-tests/src/test/scala/akka/dispatch/FutureSpec.scala b/akka-actor-tests/src/test/scala/akka/dispatch/FutureSpec.scala
index 5a6180b130..61ac34005e 100644
--- a/akka-actor-tests/src/test/scala/akka/dispatch/FutureSpec.scala
+++ b/akka-actor-tests/src/test/scala/akka/dispatch/FutureSpec.scala
@@ -608,7 +608,7 @@ class FutureSpec extends AkkaSpec with Checkers with BeforeAndAfterAll with Defa
           lz.open()
           x1() + x2()
         }
-        assert(lx.await(2000 milliseconds))
+        assert(lx.await(2 seconds))
         assert(!ly.isOpen)
         assert(!lz.isOpen)
         assert(List(x1, x2, y1, y2).forall(_.isCompleted == false))

From 2fd43bc4b1eb4ee83910adb3824138c650569294 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Jonas=20Bone=CC=81r?= 
Date: Wed, 14 Dec 2011 16:31:26 +0100
Subject: [PATCH 16/34] Removed withRouter[TYPE] method and cleaned up some
 docs.
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit

Signed-off-by: Jonas Bonér 
---
 .../src/main/scala/akka/actor/Props.scala     | 23 +++----------------
 .../src/main/scala/akka/routing/package.scala |  7 ------
 .../src/main/scala/Pi.scala                   |  2 +-
 3 files changed, 4 insertions(+), 28 deletions(-)

diff --git a/akka-actor/src/main/scala/akka/actor/Props.scala b/akka-actor/src/main/scala/akka/actor/Props.scala
index 29d1374dbc..e96a5b37c9 100644
--- a/akka-actor/src/main/scala/akka/actor/Props.scala
+++ b/akka-actor/src/main/scala/akka/actor/Props.scala
@@ -93,17 +93,16 @@ object Props {
  * {{{
  *  val props = Props[MyActor]
  *  val props = Props(new MyActor)
- *  val props = Props {
+ *  val props = Props(
  *    creator = ..,
  *    dispatcher = ..,
  *    timeout = ..,
  *    faultHandler = ..,
  *    routerConfig = ..
- *  }
+ *  )
  *  val props = Props().withCreator(new MyActor)
  *  val props = Props[MyActor].withTimeout(timeout)
- *  val props = Props[MyActor].withRouter[RoundRobinRouter]
- *  val props = Props[MyActor].withRouter(new RoundRobinRouter(..))
+ *  val props = Props[MyActor].withRouter(RoundRobinRouter(..))
  *  val props = Props[MyActor].withFaultHandler(OneForOneStrategy {
  *    case e: IllegalStateException ⇒ Resume
  *  })
@@ -199,20 +198,4 @@ case class Props(
    * Returns a new Props with the specified router config set.
    */
   def withRouter(r: RouterConfig) = copy(routerConfig = r)
-
-  /**
-   * Returns a new Props with the specified router config set.
-   *
-   * Scala API.
-   */
-  def withRouter[T <: RouterConfig: ClassManifest] = {
-    val routerConfig = implicitly[ClassManifest[T]].erasure.asInstanceOf[Class[_ <: RouterConfig]] match {
-      case RoundRobinRouterClass    ⇒ RoundRobinRouter()
-      case RandomRouterClass        ⇒ RandomRouter()
-      case BroadcastRouterClass     ⇒ BroadcastRouter()
-      case ScatterGatherRouterClass ⇒ ScatterGatherFirstCompletedRouter()
-      case unknown                  ⇒ throw new akka.config.ConfigurationException("Router not supported [" + unknown.getName + "]")
-    }
-    copy(routerConfig = routerConfig)
-  }
 }
diff --git a/akka-actor/src/main/scala/akka/routing/package.scala b/akka-actor/src/main/scala/akka/routing/package.scala
index 579484493c..44662856b1 100644
--- a/akka-actor/src/main/scala/akka/routing/package.scala
+++ b/akka-actor/src/main/scala/akka/routing/package.scala
@@ -5,12 +5,5 @@
 package akka
 
 package object routing {
-
   type Route = PartialFunction[(akka.actor.ActorRef, Any), Iterable[Destination]]
-
-  // To allow pattern matching on the class types
-  val RoundRobinRouterClass = classOf[RoundRobinRouter]
-  val RandomRouterClass = classOf[RandomRouter]
-  val BroadcastRouterClass = classOf[BroadcastRouter]
-  val ScatterGatherRouterClass = classOf[ScatterGatherFirstCompletedRouter]
 }
diff --git a/akka-tutorials/akka-tutorial-first/src/main/scala/Pi.scala b/akka-tutorials/akka-tutorial-first/src/main/scala/Pi.scala
index 4d19f47db4..b46ed3f1f8 100644
--- a/akka-tutorials/akka-tutorial-first/src/main/scala/Pi.scala
+++ b/akka-tutorials/akka-tutorial-first/src/main/scala/Pi.scala
@@ -50,7 +50,7 @@ object Pi extends App {
     var start: Long = _
 
     //#create-router
-    val router = context.actorOf(Props[Worker].withRouter(RoundRobinRouter(nrOfInstances = nrOfWorkers)), "pi")
+    val router = context.actorOf(Props[Worker].withRouter(RoundRobinRouter(nrOfWorkers)), "pi")
     //#create-router
 
     //#master-receive

From 04cd2adee9746ab573d0090f0f3998bebb73e37b Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Jonas=20Bone=CC=81r?= 
Date: Wed, 14 Dec 2011 17:30:54 +0100
Subject: [PATCH 17/34] Moved Timeout classes from akka.actor._ to akka.util._.
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit

Signed-off-by: Jonas Bonér 
---
 _mb/mailbox_user__b                           | Bin 0 -> 111 bytes
 _mb/mailbox_user_myactor                      | Bin 0 -> 143 bytes
 .../java/akka/dispatch/JavaFutureTests.java   |   4 +-
 .../test/scala/akka/actor/ActorRefSpec.scala  |   1 +
 .../scala/akka/actor/ActorTimeoutSpec.scala   |   1 +
 .../actor/LocalActorRefProviderSpec.scala     |   1 +
 .../scala/akka/actor/TypedActorSpec.scala     |   1 +
 .../akka/actor/dispatch/ActorModelSpec.scala  |   3 +-
 .../akka/dispatch/PromiseStreamSpec.scala     |   2 +-
 .../src/main/scala/akka/actor/Actor.scala     |  26 ---------
 .../scala/akka/actor/ActorRefProvider.scala   |   4 +-
 .../main/scala/akka/actor/ActorSystem.scala   |   6 +-
 .../main/scala/akka/actor/TypedActor.scala    |   4 +-
 .../src/main/scala/akka/dispatch/Future.scala |   2 +-
 .../scala/akka/dispatch/PromiseStream.scala   |   2 +-
 .../scala/akka/dispatch/japi/Future.scala     |   2 +-
 .../src/main/scala/akka/event/Logging.scala   |   2 +-
 .../src/main/scala/akka/routing/Routing.scala |   1 +
 .../src/main/scala/akka/util/Duration.scala   |  26 +++++++++
 .../main/scala/akka/util/cps/package.scala    |   2 +-
 .../akka/camel/TypedConsumerJavaTestBase.java |   2 +-
 akka-docs/scala/actors.rst                    |  52 +++++++++---------
 akka-docs/scala/futures.rst                   |   6 +-
 .../akka/remote/RemoteActorRefProvider.scala  |   1 +
 .../src/main/scala/akka/agent/Agent.scala     |   1 +
 .../scala/akka/agent/test/AgentSpec.scala     |   2 +-
 .../test/CoordinatedIncrementSpec.scala       |   1 +
 .../transactor/test/FickleFriendsSpec.scala   |   1 +
 .../akka/transactor/test/TransactorSpec.scala |   1 +
 .../java/akka/tutorial/first/java/Pi.java     |   5 +-
 30 files changed, 89 insertions(+), 73 deletions(-)
 create mode 100644 _mb/mailbox_user__b
 create mode 100644 _mb/mailbox_user_myactor

diff --git a/_mb/mailbox_user__b b/_mb/mailbox_user__b
new file mode 100644
index 0000000000000000000000000000000000000000..c28d1e14467b20ab423315731de6d43bbfaa3f75
GIT binary patch
literal 111
zcmZQdVPIg8T$^ZX!pHyzT#{VkiP_nSR{HwBmBE$8C8@dkrNya5`YK66@?6|&-ZHS3
rFtC)Q7MBPKWF?j*>g6QnrRxQk6lLb6D~WN5pzF;m&BN{bSca#DR0Gjo#iD_ruEg9}oV^-GIW
zi}Z6V6O&8wi-Z)oc-FjSU@c)_%}CA3$rlpHN-Rs%%Sp^j*9$Hw%FIhwQsz>`Yg}Gw
KP7Wg*$a(-4>M8F4

literal 0
HcmV?d00001

diff --git a/akka-actor-tests/src/test/java/akka/dispatch/JavaFutureTests.java b/akka-actor-tests/src/test/java/akka/dispatch/JavaFutureTests.java
index b3c82bc957..4f718b036c 100644
--- a/akka-actor-tests/src/test/java/akka/dispatch/JavaFutureTests.java
+++ b/akka-actor-tests/src/test/java/akka/dispatch/JavaFutureTests.java
@@ -1,6 +1,6 @@
 package akka.dispatch;
 
-import akka.actor.Timeout;
+import akka.util.Timeout;
 import akka.actor.ActorSystem;
 
 import akka.japi.*;
@@ -21,7 +21,7 @@ public class JavaFutureTests {
 
   private static ActorSystem system;
   private static Timeout t;
-    
+
   private final Duration timeout = Duration.create(5, TimeUnit.SECONDS);
 
   @BeforeClass
diff --git a/akka-actor-tests/src/test/scala/akka/actor/ActorRefSpec.scala b/akka-actor-tests/src/test/scala/akka/actor/ActorRefSpec.scala
index 4e42c6d9d0..206de8498f 100644
--- a/akka-actor-tests/src/test/scala/akka/actor/ActorRefSpec.scala
+++ b/akka-actor-tests/src/test/scala/akka/actor/ActorRefSpec.scala
@@ -8,6 +8,7 @@ import org.scalatest.WordSpec
 import org.scalatest.matchers.MustMatchers
 
 import akka.testkit._
+import akka.util.Timeout
 import akka.util.duration._
 import java.lang.IllegalStateException
 import akka.util.ReflectiveAccess
diff --git a/akka-actor-tests/src/test/scala/akka/actor/ActorTimeoutSpec.scala b/akka-actor-tests/src/test/scala/akka/actor/ActorTimeoutSpec.scala
index 0d77a75e56..36874ef7dd 100644
--- a/akka-actor-tests/src/test/scala/akka/actor/ActorTimeoutSpec.scala
+++ b/akka-actor-tests/src/test/scala/akka/actor/ActorTimeoutSpec.scala
@@ -9,6 +9,7 @@ import akka.testkit.AkkaSpec
 import akka.testkit.DefaultTimeout
 import java.util.concurrent.TimeoutException
 import akka.dispatch.Await
+import akka.util.Timeout
 
 @org.junit.runner.RunWith(classOf[org.scalatest.junit.JUnitRunner])
 class ActorTimeoutSpec extends AkkaSpec with BeforeAndAfterAll with DefaultTimeout {
diff --git a/akka-actor-tests/src/test/scala/akka/actor/LocalActorRefProviderSpec.scala b/akka-actor-tests/src/test/scala/akka/actor/LocalActorRefProviderSpec.scala
index 1abc6896f9..de59320fd8 100644
--- a/akka-actor-tests/src/test/scala/akka/actor/LocalActorRefProviderSpec.scala
+++ b/akka-actor-tests/src/test/scala/akka/actor/LocalActorRefProviderSpec.scala
@@ -6,6 +6,7 @@ package akka.actor
 
 import akka.testkit._
 import akka.util.duration._
+import akka.util.Timeout
 import akka.dispatch.{ Await, Future }
 
 @org.junit.runner.RunWith(classOf[org.scalatest.junit.JUnitRunner])
diff --git a/akka-actor-tests/src/test/scala/akka/actor/TypedActorSpec.scala b/akka-actor-tests/src/test/scala/akka/actor/TypedActorSpec.scala
index 1637354b7f..426794f72a 100644
--- a/akka-actor-tests/src/test/scala/akka/actor/TypedActorSpec.scala
+++ b/akka-actor-tests/src/test/scala/akka/actor/TypedActorSpec.scala
@@ -6,6 +6,7 @@ package akka.actor
 
 import org.scalatest.{ BeforeAndAfterAll, BeforeAndAfterEach }
 import akka.util.Duration
+import akka.util.Timeout
 import akka.util.duration._
 import akka.serialization.Serialization
 import java.util.concurrent.atomic.AtomicReference
diff --git a/akka-actor-tests/src/test/scala/akka/actor/dispatch/ActorModelSpec.scala b/akka-actor-tests/src/test/scala/akka/actor/dispatch/ActorModelSpec.scala
index 82cabd800b..0ebe03ffac 100644
--- a/akka-actor-tests/src/test/scala/akka/actor/dispatch/ActorModelSpec.scala
+++ b/akka-actor-tests/src/test/scala/akka/actor/dispatch/ActorModelSpec.scala
@@ -6,6 +6,7 @@ package akka.actor.dispatch
 import org.scalatest.Assertions._
 import akka.testkit._
 import akka.dispatch._
+import akka.util.Timeout
 import java.util.concurrent.atomic.AtomicLong
 import java.util.concurrent.{ ConcurrentHashMap, CountDownLatch, TimeUnit }
 import akka.util.Switch
@@ -493,4 +494,4 @@ class BalancingDispatcherModelSpec extends ActorModelSpec {
       assertRefDefaultZero(b)(registers = 1, unregisters = 1, msgsReceived = 1, msgsProcessed = 1)
     }
   }
-}
\ No newline at end of file
+}
diff --git a/akka-actor-tests/src/test/scala/akka/dispatch/PromiseStreamSpec.scala b/akka-actor-tests/src/test/scala/akka/dispatch/PromiseStreamSpec.scala
index e41dc9c4cd..b3ce0108dd 100644
--- a/akka-actor-tests/src/test/scala/akka/dispatch/PromiseStreamSpec.scala
+++ b/akka-actor-tests/src/test/scala/akka/dispatch/PromiseStreamSpec.scala
@@ -2,7 +2,7 @@ package akka.dispatch
 
 import Future.flow
 import akka.util.cps._
-import akka.actor.Timeout
+import akka.util.Timeout
 import akka.util.duration._
 import akka.testkit.AkkaSpec
 import akka.testkit.DefaultTimeout
diff --git a/akka-actor/src/main/scala/akka/actor/Actor.scala b/akka-actor/src/main/scala/akka/actor/Actor.scala
index df1bb0e6a5..283fc05601 100644
--- a/akka-actor/src/main/scala/akka/actor/Actor.scala
+++ b/akka-actor/src/main/scala/akka/actor/Actor.scala
@@ -115,32 +115,6 @@ object Status {
   case class Failure(cause: Throwable) extends Status
 }
 
-case class Timeout(duration: Duration) {
-  def this(timeout: Long) = this(Duration(timeout, TimeUnit.MILLISECONDS))
-  def this(length: Long, unit: TimeUnit) = this(Duration(length, unit))
-}
-
-object Timeout {
-  /**
-   * A timeout with zero duration, will cause most requests to always timeout.
-   */
-  val zero = new Timeout(Duration.Zero)
-
-  /**
-   * A Timeout with infinite duration. Will never timeout. Use extreme caution with this
-   * as it may cause memory leaks, blocked threads, or may not even be supported by
-   * the receiver, which would result in an exception.
-   */
-  val never = new Timeout(Duration.Inf)
-
-  def apply(timeout: Long) = new Timeout(timeout)
-  def apply(length: Long, unit: TimeUnit) = new Timeout(length, unit)
-
-  implicit def durationToTimeout(duration: Duration) = new Timeout(duration)
-  implicit def intToTimeout(timeout: Int) = new Timeout(timeout)
-  implicit def longToTimeout(timeout: Long) = new Timeout(timeout)
-}
-
 trait ActorLogging { this: Actor ⇒
   val log = akka.event.Logging(context.system.eventStream, context.self)
 }
diff --git a/akka-actor/src/main/scala/akka/actor/ActorRefProvider.scala b/akka-actor/src/main/scala/akka/actor/ActorRefProvider.scala
index c8fdc70e08..26f33b3056 100644
--- a/akka-actor/src/main/scala/akka/actor/ActorRefProvider.scala
+++ b/akka-actor/src/main/scala/akka/actor/ActorRefProvider.scala
@@ -8,10 +8,12 @@ import java.util.concurrent.atomic.AtomicLong
 import java.util.concurrent.{ ConcurrentHashMap, TimeUnit }
 import scala.annotation.tailrec
 import org.jboss.netty.akka.util.{ TimerTask, HashedWheelTimer }
-import akka.actor.Timeout.intToTimeout
+import akka.util.Timeout
+import akka.util.Timeout.intToTimeout
 import akka.config.ConfigurationException
 import akka.dispatch._
 import akka.routing._
+import akka.util.Timeout
 import akka.AkkaException
 import com.eaio.uuid.UUID
 import akka.util.{ Duration, Switch, Helpers }
diff --git a/akka-actor/src/main/scala/akka/actor/ActorSystem.scala b/akka-actor/src/main/scala/akka/actor/ActorSystem.scala
index b0b69122e6..a078a61a14 100644
--- a/akka-actor/src/main/scala/akka/actor/ActorSystem.scala
+++ b/akka-actor/src/main/scala/akka/actor/ActorSystem.scala
@@ -8,6 +8,8 @@ import akka.actor._
 import akka.event._
 import akka.dispatch._
 import akka.util.duration._
+import akka.util.Timeout
+import akka.util.Timeout._
 import org.jboss.netty.akka.util.HashedWheelTimer
 import java.util.concurrent.TimeUnit.MILLISECONDS
 import java.util.concurrent.TimeUnit.NANOSECONDS
@@ -464,8 +466,8 @@ class ActorSystemImpl(val name: String, applicationConfig: Config) extends Actor
   }
 
   /*
-   * This is called after the last actor has signaled its termination, i.e. 
-   * after the last dispatcher has had its chance to schedule its shutdown 
+   * This is called after the last actor has signaled its termination, i.e.
+   * after the last dispatcher has had its chance to schedule its shutdown
    * action.
    */
   protected def stopScheduler(): Unit = scheduler match {
diff --git a/akka-actor/src/main/scala/akka/actor/TypedActor.scala b/akka-actor/src/main/scala/akka/actor/TypedActor.scala
index 5777f84277..2172351093 100644
--- a/akka-actor/src/main/scala/akka/actor/TypedActor.scala
+++ b/akka-actor/src/main/scala/akka/actor/TypedActor.scala
@@ -6,7 +6,7 @@ package akka.actor
 
 import akka.japi.{ Creator, Option ⇒ JOption }
 import java.lang.reflect.{ InvocationTargetException, Method, InvocationHandler, Proxy }
-import akka.util.{ Duration }
+import akka.util.{ Duration, Timeout }
 import java.util.concurrent.atomic.{ AtomicReference ⇒ AtomVar }
 import akka.serialization.{ Serializer, Serialization }
 import akka.dispatch._
@@ -481,4 +481,4 @@ class TypedActorExtension(system: ActorSystemImpl) extends TypedActorFactory wit
       }
     }
     else null
-}
\ No newline at end of file
+}
diff --git a/akka-actor/src/main/scala/akka/dispatch/Future.scala b/akka-actor/src/main/scala/akka/dispatch/Future.scala
index 2bbc1fcb15..bd436d79c9 100644
--- a/akka-actor/src/main/scala/akka/dispatch/Future.scala
+++ b/akka-actor/src/main/scala/akka/dispatch/Future.scala
@@ -7,7 +7,7 @@ package akka.dispatch
 
 import akka.AkkaException
 import akka.event.Logging.Error
-import akka.actor.Timeout
+import akka.util.Timeout
 import scala.Option
 import akka.japi.{ Procedure, Function ⇒ JFunc, Option ⇒ JOption }
 
diff --git a/akka-actor/src/main/scala/akka/dispatch/PromiseStream.scala b/akka-actor/src/main/scala/akka/dispatch/PromiseStream.scala
index 4ec0aaf300..b6305701b2 100644
--- a/akka-actor/src/main/scala/akka/dispatch/PromiseStream.scala
+++ b/akka-actor/src/main/scala/akka/dispatch/PromiseStream.scala
@@ -7,7 +7,7 @@ package akka.dispatch
 import java.util.concurrent.atomic.AtomicReference
 import scala.util.continuations._
 import scala.annotation.{ tailrec }
-import akka.actor.Timeout
+import akka.util.Timeout
 
 object PromiseStream {
   def apply[A]()(implicit dispatcher: MessageDispatcher, timeout: Timeout): PromiseStream[A] = new PromiseStream[A]
diff --git a/akka-actor/src/main/scala/akka/dispatch/japi/Future.scala b/akka-actor/src/main/scala/akka/dispatch/japi/Future.scala
index 64852912fe..789cf1bf21 100644
--- a/akka-actor/src/main/scala/akka/dispatch/japi/Future.scala
+++ b/akka-actor/src/main/scala/akka/dispatch/japi/Future.scala
@@ -3,7 +3,7 @@
  */
 package akka.dispatch.japi
 
-import akka.actor.Timeout
+import akka.util.Timeout
 import akka.japi.{ Procedure2, Procedure, Function ⇒ JFunc, Option ⇒ JOption }
 
 /* Java API */
diff --git a/akka-actor/src/main/scala/akka/event/Logging.scala b/akka-actor/src/main/scala/akka/event/Logging.scala
index 2bcd6b762b..a1008e80e8 100644
--- a/akka-actor/src/main/scala/akka/event/Logging.scala
+++ b/akka-actor/src/main/scala/akka/event/Logging.scala
@@ -10,7 +10,7 @@ import akka.util.ReflectiveAccess
 import akka.config.ConfigurationException
 import akka.util.ReentrantGuard
 import akka.util.duration._
-import akka.actor.Timeout
+import akka.util.Timeout
 import java.util.concurrent.atomic.AtomicInteger
 import akka.actor.ActorRefProvider
 import scala.util.control.NoStackTrace
diff --git a/akka-actor/src/main/scala/akka/routing/Routing.scala b/akka-actor/src/main/scala/akka/routing/Routing.scala
index a71f206959..8196dcfa8f 100644
--- a/akka-actor/src/main/scala/akka/routing/Routing.scala
+++ b/akka-actor/src/main/scala/akka/routing/Routing.scala
@@ -6,6 +6,7 @@ package akka.routing
 
 import akka.AkkaException
 import akka.actor._
+import akka.util.Timeout
 import akka.config.ConfigurationException
 import akka.dispatch.{ Future, MessageDispatcher }
 import akka.util.{ ReflectiveAccess, Duration }
diff --git a/akka-actor/src/main/scala/akka/util/Duration.scala b/akka-actor/src/main/scala/akka/util/Duration.scala
index 6e9310e5d8..3012f87486 100644
--- a/akka-actor/src/main/scala/akka/util/Duration.scala
+++ b/akka-actor/src/main/scala/akka/util/Duration.scala
@@ -545,3 +545,29 @@ class DurationDouble(d: Double) {
   def day[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(d, DAYS))
 }
 
+case class Timeout(duration: Duration) {
+  def this(timeout: Long) = this(Duration(timeout, TimeUnit.MILLISECONDS))
+  def this(length: Long, unit: TimeUnit) = this(Duration(length, unit))
+}
+
+object Timeout {
+  /**
+   * A timeout with zero duration, will cause most requests to always timeout.
+   */
+  val zero = new Timeout(Duration.Zero)
+
+  /**
+   * A Timeout with infinite duration. Will never timeout. Use extreme caution with this
+   * as it may cause memory leaks, blocked threads, or may not even be supported by
+   * the receiver, which would result in an exception.
+   */
+  val never = new Timeout(Duration.Inf)
+
+  def apply(timeout: Long) = new Timeout(timeout)
+  def apply(length: Long, unit: TimeUnit) = new Timeout(length, unit)
+
+  implicit def durationToTimeout(duration: Duration) = new Timeout(duration)
+  implicit def intToTimeout(timeout: Int) = new Timeout(timeout)
+  implicit def longToTimeout(timeout: Long) = new Timeout(timeout)
+}
+
diff --git a/akka-actor/src/main/scala/akka/util/cps/package.scala b/akka-actor/src/main/scala/akka/util/cps/package.scala
index 7cbf60aaf2..2182d412e3 100644
--- a/akka-actor/src/main/scala/akka/util/cps/package.scala
+++ b/akka-actor/src/main/scala/akka/util/cps/package.scala
@@ -2,7 +2,7 @@ package akka.util
 
 import scala.util.continuations._
 import akka.dispatch.MessageDispatcher
-import akka.actor.Timeout
+import akka.util.Timeout
 
 package object cps {
   def matchC[A, B, C, D](in: A)(pf: PartialFunction[A, B @cpsParam[C, D]]): B @cpsParam[C, D] = pf(in)
diff --git a/akka-camel-typed/src/test/java/akka/camel/TypedConsumerJavaTestBase.java b/akka-camel-typed/src/test/java/akka/camel/TypedConsumerJavaTestBase.java
index 232ef0d2df..d6ea1da9f4 100644
--- a/akka-camel-typed/src/test/java/akka/camel/TypedConsumerJavaTestBase.java
+++ b/akka-camel-typed/src/test/java/akka/camel/TypedConsumerJavaTestBase.java
@@ -3,7 +3,7 @@ package akka.camel;
 import akka.actor.Actor;
 import akka.actor.TypedActor;
 import akka.actor.Props;
-import akka.actor.Timeout;
+import akka.util.Timeout;
 import akka.dispatch.Dispatchers;
 import akka.japi.SideEffect;
 import akka.util.FiniteDuration;
diff --git a/akka-docs/scala/actors.rst b/akka-docs/scala/actors.rst
index 8a9a9ecfb7..eefd725711 100644
--- a/akka-docs/scala/actors.rst
+++ b/akka-docs/scala/actors.rst
@@ -73,7 +73,7 @@ a top level actor, that is supervised by the system (internal guardian actor).
 .. includecode:: code/ActorDocSpec.scala#context-actorOf
 
 Actors are automatically started asynchronously when created.
-When you create the ``Actor`` then it will automatically call the ``preStart`` 
+When you create the ``Actor`` then it will automatically call the ``preStart``
 callback method on the ``Actor`` trait. This is an excellent place to
 add initialization code for the actor.
 
@@ -98,7 +98,7 @@ Here is an example:
 Creating Actors with Props
 --------------------------
 
-``Props`` is a configuration object to specify additional things for the actor to 
+``Props`` is a configuration object to specify additional things for the actor to
 be created, such as the ``MessageDispatcher``.
 
 .. includecode:: code/ActorDocSpec.scala#creating-props
@@ -128,7 +128,7 @@ Actor API
 The :class:`Actor` trait defines only one abstract method, the above mentioned
 :meth:`receive`, which implements the behavior of the actor.
 
-If the current actor behavior does not match a received message, :meth:`unhandled` 
+If the current actor behavior does not match a received message, :meth:`unhandled`
 is called, which by default throws an :class:`UnhandledMessageException`.
 
 In addition, it offers:
@@ -145,7 +145,7 @@ In addition, it offers:
 
 You can import the members in the :obj:`context` to avoid prefixing access with ``context.``
 
-.. includecode:: code/ActorDocSpec.scala#import-context 
+.. includecode:: code/ActorDocSpec.scala#import-context
 
 The remaining visible methods are user-overridable life-cycle hooks which are
 described in the following::
@@ -195,7 +195,7 @@ processing a message. This restart involves the hooks mentioned above:
 
 An actor restart replaces only the actual actor object; the contents of the
 mailbox and the hotswap stack are unaffected by the restart, so processing of
-messages will resume after the :meth:`postRestart` hook returns. The message 
+messages will resume after the :meth:`postRestart` hook returns. The message
 that triggered the exception will not be received again. Any message
 sent to an actor while it is being restarted will be queued to its mailbox as
 usual.
@@ -205,9 +205,9 @@ Stop Hook
 
 After stopping an actor, its :meth:`postStop` hook is called, which may be used
 e.g. for deregistering this actor from other services. This hook is guaranteed
-to run after message queuing has been disabled for this actor, i.e. messages 
-sent to a stopped actor will be redirected to the :obj:`deadLetters` of the 
-:obj:`ActorSystem`. 
+to run after message queuing has been disabled for this actor, i.e. messages
+sent to a stopped actor will be redirected to the :obj:`deadLetters` of the
+:obj:`ActorSystem`.
 
 
 Identifying Actors
@@ -267,7 +267,7 @@ implicitly passed along with the message and available to the receiving Actor
 in its ``sender: ActorRef`` member field. The target actor can use this
 to reply to the original sender, by using ``sender ! replyMsg``.
 
-If invoked from an instance that is **not** an Actor the sender will be 
+If invoked from an instance that is **not** an Actor the sender will be
 :obj:`deadLetters` actor reference by default.
 
 Ask: Send-And-Receive-Future
@@ -281,11 +281,11 @@ will immediately return a :class:`Future`:
   val future = actor ? "hello"
 
 The receiving actor should reply to this message, which will complete the
-future with the reply message as value; ``sender ! result``. 
+future with the reply message as value; ``sender ! result``.
 
-To complete the future with an exception you need send a Failure message to the sender. 
-This is not done automatically when an actor throws an exception while processing a 
-message. 
+To complete the future with an exception you need send a Failure message to the sender.
+This is not done automatically when an actor throws an exception while processing a
+message.
 
 .. includecode:: code/ActorDocSpec.scala#reply-exception
 
@@ -293,11 +293,11 @@ If the actor does not complete the future, it will expire after the timeout peri
 which is taken from one of the following locations in order of precedence:
 
 #. explicitly given timeout as in ``actor.?("hello")(timeout = 12 millis)``
-#. implicit argument of type :class:`akka.actor.Timeout`, e.g.
+#. implicit argument of type :class:`akka.util.Timeout`, e.g.
 
    ::
-   
-     import akka.actor.Timeout
+
+     import akka.util.Timeout
      import akka.util.duration._
 
      implicit val timeout = Timeout(12 millis)
@@ -306,8 +306,8 @@ which is taken from one of the following locations in order of precedence:
 See :ref:`futures-scala` for more information on how to await or query a
 future.
 
-The ``onComplete``, ``onResult``, or ``onTimeout`` methods of the ``Future`` can be 
-used to register a callback to get a notification when the Future completes. 
+The ``onComplete``, ``onResult``, or ``onTimeout`` methods of the ``Future`` can be
+used to register a callback to get a notification when the Future completes.
 Gives you a way to avoid blocking.
 
 .. warning::
@@ -404,17 +404,17 @@ object.
 Stopping actors
 ===============
 
-Actors are stopped by invoking the ``stop`` method of the ``ActorRef``. 
+Actors are stopped by invoking the ``stop`` method of the ``ActorRef``.
 The actual termination of the actor is performed asynchronously, i.e.
-``stop`` may return before the actor is stopped. 
+``stop`` may return before the actor is stopped.
 
 .. code-block:: scala
 
   actor.stop()
 
-Processing of the current message, if any, will continue before the actor is stopped, 
+Processing of the current message, if any, will continue before the actor is stopped,
 but additional messages in the mailbox will not be processed. By default these
-messages are sent to the :obj:`deadLetters` of the :obj:`ActorSystem`, but that 
+messages are sent to the :obj:`deadLetters` of the :obj:`ActorSystem`, but that
 depends on the mailbox implementation.
 
 When stop is called then a call to the ``def postStop`` callback method will
@@ -541,11 +541,11 @@ messages on that mailbox, will be there as well.
 What happens to the actor
 -------------------------
 
-If an exception is thrown, the actor instance is discarded and a new instance is 
+If an exception is thrown, the actor instance is discarded and a new instance is
 created. This new instance will now be used in the actor references to this actor
-(so this is done invisible to the developer). Note that this means that current 
-state of the failing actor instance is lost if you don't store and restore it in 
-``preRestart`` and ``postRestart`` callbacks. 
+(so this is done invisible to the developer). Note that this means that current
+state of the failing actor instance is lost if you don't store and restore it in
+``preRestart`` and ``postRestart`` callbacks.
 
 
 Extending Actors using PartialFunction chaining
diff --git a/akka-docs/scala/futures.rst b/akka-docs/scala/futures.rst
index 623a24730a..442009beb6 100644
--- a/akka-docs/scala/futures.rst
+++ b/akka-docs/scala/futures.rst
@@ -198,7 +198,7 @@ Then there's a method that's called ``fold`` that takes a start-value, a sequenc
 .. code-block:: scala
 
   val futures = for(i <- 1 to 1000) yield Future(i * 2) // Create a sequence of Futures
-  
+
   val futureSum = Future.fold(0)(futures)(_ + _)
 
 That's all it takes!
@@ -244,7 +244,7 @@ In this example, if an ``ArithmeticException`` was thrown while the ``Actor`` pr
 Timeouts
 --------
 
-Waiting forever for a ``Future`` to be completed can be dangerous. It could cause your program to block indefinitly or produce a memory leak. ``Future`` has support for a timeout already builtin with a default of 5 seconds (taken from :ref:`configuration`). A timeout is an instance of ``akka.actor.Timeout`` which contains an ``akka.util.Duration``. A ``Duration`` can be finite, which needs a length and unit type, or infinite. An infinite ``Timeout`` can be dangerous since it will never actually expire.
+Waiting forever for a ``Future`` to be completed can be dangerous. It could cause your program to block indefinitly or produce a memory leak. ``Future`` has support for a timeout already builtin with a default of 5 seconds (taken from :ref:`configuration`). A timeout is an instance of ``akka.util.Timeout`` which contains an ``akka.util.Duration``. A ``Duration`` can be finite, which needs a length and unit type, or infinite. An infinite ``Timeout`` can be dangerous since it will never actually expire.
 
 A different ``Timeout`` can be supplied either explicitly or implicitly when a ``Future`` is created. An implicit ``Timeout`` has the benefit of being usable by a for-comprehension as well as being picked up by any methods looking for an implicit ``Timeout``, while an explicit ``Timeout`` can be used in a more controlled manner.
 
@@ -262,7 +262,7 @@ Implicit ``Timeout`` example:
 
 .. code-block:: scala
 
-  import akka.actor.Timeout
+  import akka.util.Timeout
   import akka.util.duration._
 
   implicit val longTimeout = Timeout(1 minute)
diff --git a/akka-remote/src/main/scala/akka/remote/RemoteActorRefProvider.scala b/akka-remote/src/main/scala/akka/remote/RemoteActorRefProvider.scala
index 63721395a0..619aedfda9 100644
--- a/akka-remote/src/main/scala/akka/remote/RemoteActorRefProvider.scala
+++ b/akka-remote/src/main/scala/akka/remote/RemoteActorRefProvider.scala
@@ -11,6 +11,7 @@ import akka.actor.Status._
 import akka.routing._
 import akka.dispatch._
 import akka.util.duration._
+import akka.util.Timeout
 import akka.config.ConfigurationException
 import akka.event.{ DeathWatch, Logging }
 import akka.serialization.Compression.LZF
diff --git a/akka-stm/src/main/scala/akka/agent/Agent.scala b/akka-stm/src/main/scala/akka/agent/Agent.scala
index cbda6b16f7..cededbca84 100644
--- a/akka-stm/src/main/scala/akka/agent/Agent.scala
+++ b/akka-stm/src/main/scala/akka/agent/Agent.scala
@@ -9,6 +9,7 @@ import akka.actor._
 import akka.stm._
 import akka.japi.{ Function ⇒ JFunc, Procedure ⇒ JProc }
 import akka.dispatch._
+import akka.util.Timeout
 
 /**
  * Used internally to send functions.
diff --git a/akka-stm/src/test/scala/akka/agent/test/AgentSpec.scala b/akka-stm/src/test/scala/akka/agent/test/AgentSpec.scala
index 901e45cd8a..b834489e6e 100644
--- a/akka-stm/src/test/scala/akka/agent/test/AgentSpec.scala
+++ b/akka-stm/src/test/scala/akka/agent/test/AgentSpec.scala
@@ -3,7 +3,7 @@ package akka.agent.test
 import org.scalatest.WordSpec
 import org.scalatest.matchers.MustMatchers
 import akka.actor.ActorSystem
-import akka.actor.Timeout
+import akka.util.Timeout
 import akka.agent.Agent
 import akka.stm._
 import akka.util.Duration
diff --git a/akka-stm/src/test/scala/akka/transactor/test/CoordinatedIncrementSpec.scala b/akka-stm/src/test/scala/akka/transactor/test/CoordinatedIncrementSpec.scala
index 26ed0f1034..560b286d66 100644
--- a/akka-stm/src/test/scala/akka/transactor/test/CoordinatedIncrementSpec.scala
+++ b/akka-stm/src/test/scala/akka/transactor/test/CoordinatedIncrementSpec.scala
@@ -6,6 +6,7 @@ import akka.actor.ActorSystem
 import akka.actor._
 import akka.stm.{ Ref, TransactionFactory }
 import akka.util.duration._
+import akka.util.Timeout
 import akka.testkit._
 import akka.dispatch.Await
 
diff --git a/akka-stm/src/test/scala/akka/transactor/test/FickleFriendsSpec.scala b/akka-stm/src/test/scala/akka/transactor/test/FickleFriendsSpec.scala
index c7774920da..6adafec808 100644
--- a/akka-stm/src/test/scala/akka/transactor/test/FickleFriendsSpec.scala
+++ b/akka-stm/src/test/scala/akka/transactor/test/FickleFriendsSpec.scala
@@ -5,6 +5,7 @@ import org.scalatest.matchers.MustMatchers
 import org.scalatest.BeforeAndAfterAll
 import akka.actor.ActorSystem
 import akka.actor._
+import akka.util.Timeout
 import akka.stm._
 import akka.util.duration._
 import akka.testkit._
diff --git a/akka-stm/src/test/scala/akka/transactor/test/TransactorSpec.scala b/akka-stm/src/test/scala/akka/transactor/test/TransactorSpec.scala
index 9ad8fabad4..8c51d73bca 100644
--- a/akka-stm/src/test/scala/akka/transactor/test/TransactorSpec.scala
+++ b/akka-stm/src/test/scala/akka/transactor/test/TransactorSpec.scala
@@ -5,6 +5,7 @@ import org.scalatest.matchers.MustMatchers
 
 import akka.actor.ActorSystem
 import akka.actor._
+import akka.util.Timeout
 import akka.stm._
 import akka.util.duration._
 import akka.testkit._
diff --git a/akka-tutorials/akka-tutorial-first/src/main/java/akka/tutorial/first/java/Pi.java b/akka-tutorials/akka-tutorial-first/src/main/java/akka/tutorial/first/java/Pi.java
index d4d75c34b4..f6d5fb696d 100644
--- a/akka-tutorials/akka-tutorial-first/src/main/java/akka/tutorial/first/java/Pi.java
+++ b/akka-tutorials/akka-tutorial-first/src/main/java/akka/tutorial/first/java/Pi.java
@@ -11,6 +11,7 @@ import akka.actor.UntypedActor;
 import akka.actor.UntypedActorFactory;
 import akka.japi.Creator;
 import akka.routing.*;
+import akka.util.Timeout;
 
 import java.util.LinkedList;
 import java.util.concurrent.CountDownLatch;
@@ -107,7 +108,7 @@ public class Pi {
             this.latch = latch;
             Creator routerCreator = new Creator() {
                 public Router create() {
-                    return new RoundRobinRouter(getContext().dispatcher(), new akka.actor.Timeout(-1));
+                    return new RoundRobinRouter(getContext().dispatcher(), new Timeout(-1));
                 }
             };
             LinkedList actors = new LinkedList() {
@@ -116,7 +117,7 @@ public class Pi {
                 }
             };
                         // FIXME routers are intended to be used like this
-            RoutedProps props = new RoutedProps(routerCreator, new LocalConnectionManager(actors), new akka.actor.Timeout(-1), true);
+            RoutedProps props = new RoutedProps(routerCreator, new LocalConnectionManager(actors), new Timeout(-1), true);
             router = new RoutedActorRef(getContext().system(), props, (InternalActorRef) getSelf(), "pi");
         }
 

From b243374ebd8541c9c7891411c208d9c09eb1e1bb Mon Sep 17 00:00:00 2001
From: Patrik Nordwall 
Date: Wed, 14 Dec 2011 17:14:57 +0100
Subject: [PATCH 18/34] Review comments. Config lib v0.2.0.

---
 .../test/scala/akka/actor/DeployerSpec.scala  |  1 -
 akka-actor/src/main/resources/reference.conf  | 24 +++++++++++++++++--
 akka-remote/src/main/resources/reference.conf | 10 +++++++-
 3 files changed, 31 insertions(+), 4 deletions(-)

diff --git a/akka-actor-tests/src/test/scala/akka/actor/DeployerSpec.scala b/akka-actor-tests/src/test/scala/akka/actor/DeployerSpec.scala
index c39600a1c5..83837012aa 100644
--- a/akka-actor-tests/src/test/scala/akka/actor/DeployerSpec.scala
+++ b/akka-actor-tests/src/test/scala/akka/actor/DeployerSpec.scala
@@ -51,7 +51,6 @@ class DeployerSpec extends AkkaSpec(DeployerSpec.deployerConf) {
   "A Deployer" must {
 
     "be able to parse 'akka.actor.deployment._' with all default values" in {
-      println(system.settings.toString)
       val service = "/user/service1"
       val deployment = system.asInstanceOf[ActorSystemImpl].provider.deployer.lookup(service)
       deployment must be('defined)
diff --git a/akka-actor/src/main/resources/reference.conf b/akka-actor/src/main/resources/reference.conf
index cb1f3aeb7b..5fb4ba507d 100644
--- a/akka-actor/src/main/resources/reference.conf
+++ b/akka-actor/src/main/resources/reference.conf
@@ -50,7 +50,7 @@ akka {
 
     # Timeout for ActorSystem.actorOf
     creation-timeout = 20s
-    
+
     # frequency with which stopping actors are prodded in case they had to be removed from their parents
     reaper-interval = 5s
 
@@ -88,7 +88,7 @@ akka {
         # is ignored if target.paths is given
         nr-of-instances = 1
 
-        # FIXME document 'create-as'
+        # FIXME document 'create-as', ticket 1511
         create-as {
           # fully qualified class name of recipe implementation
           class = ""
@@ -109,41 +109,57 @@ akka {
       # Dispatcher, (BalancingDispatcher, only valid when all actors using it are of the same type),
       # A FQCN to a class inheriting MessageDispatcherConfigurator with a no-arg visible constructor
       type = "Dispatcher"
+
       # Name used in log messages and thread names.
       name = "DefaultDispatcher"
+
       # Toggles whether the threads created by this dispatcher should be daemons or not
       daemonic = off
+
       # Keep alive time for threads
       keep-alive-time = 60s
+
       # minimum number of threads to cap factor-based core number to
       core-pool-size-min = 8
+
       # No of core threads ... ceil(available processors * factor)
       core-pool-size-factor = 8.0
+
       # maximum number of threads to cap factor-based number to
       core-pool-size-max = 4096
+
       # Hint: max-pool-size is only used for bounded task queues
       # minimum number of threads to cap factor-based max number to
       max-pool-size-min = 8
+
       # Max no of threads ... ceil(available processors * factor)
       max-pool-size-factor  = 8.0
+
       # maximum number of threads to cap factor-based max number to
       max-pool-size-max = 4096
+
       # Specifies the bounded capacity of the task queue (< 1 == unbounded)
       task-queue-size = -1
+
       # Specifies which type of task queue will be used, can be "array" or "linked" (default)
       task-queue-type = "linked"
+
       # Allow core threads to time out
       allow-core-timeout = on
+
       # Throughput defines the number of messages that are processed in a batch before the
       # thread is returned to the pool. Set to 1 for as fair as possible.
       throughput = 5
+
       # Throughput deadline for Dispatcher, set to 0 or negative for no deadline
       throughput-deadline-time = 0ms
+
       # If negative (or zero) then an unbounded mailbox is used (default)
       # If positive then a bounded mailbox is used and the capacity is set using the property
       # NOTE: setting a mailbox to 'blocking' can be a bit dangerous, could lead to deadlock, use with care
       # The following are only used for Dispatcher and only if mailbox-capacity > 0
       mailbox-capacity = -1
+
       # Specifies the timeout to add a new message to a mailbox that is full -
       # negative number means infinite timeout
       mailbox-push-timeout-time = 10s
@@ -152,12 +168,16 @@ akka {
     debug {
       # enable function of Actor.loggable(), which is to log any received message at DEBUG level
       receive = off
+
       # enable DEBUG logging of all AutoReceiveMessages (Kill, PoisonPill and the like)
       autoreceive = off
+
       # enable DEBUG logging of actor lifecycle changes
       lifecycle = off
+
       # enable DEBUG logging of all LoggingFSMs for events, transitions and timers
       fsm = off
+
       # enable DEBUG logging of subscription changes on the eventStream
       event-stream = off
     }
diff --git a/akka-remote/src/main/resources/reference.conf b/akka-remote/src/main/resources/reference.conf
index 1838ae47fa..0ffd461a25 100644
--- a/akka-remote/src/main/resources/reference.conf
+++ b/akka-remote/src/main/resources/reference.conf
@@ -51,11 +51,13 @@ akka {
 
     # accrual failure detection config
     failure-detector {
+
       # defines the failure detector threshold
       #     A low threshold is prone to generate many wrong suspicions but ensures a
       #     quick detection in the event of a real crash. Conversely, a high threshold
       #     generates fewer mistakes but needs more time to detect actual crashes
       threshold = 8
+
       max-sample-size = 1000
     }
 
@@ -73,16 +75,22 @@ akka {
     server {
       # The hostname or ip to bind the remoting to, InetAddress.getLocalHost.getHostAddress is used if empty
       hostname = ""
+
       # The default remote server port clients should connect to. Default is 2552 (AKKA)
       port = 2552
+
       # Increase this if you want to be able to send messages with large payloads
       message-frame-size = 1 MiB
+
       # Timeout duration
       connection-timeout = 120s
+
       # Should the remote server require that it peers share the same secure-cookie (defined in the 'remote' section)?
       require-cookie = off
+
       # Enable untrusted mode for full security of server managed actors, allows untrusted clients to connect.
       untrusted-mode = off
+
       # Sets the size of the connection backlog
       backlog = 4096
     }
@@ -91,10 +99,10 @@ akka {
       buffering {
         # Should message buffering on remote client error be used (buffer flushed on successful reconnect)
         retry-message-send-on-failure = off
+
         # If negative (or zero) then an unbounded mailbox is used (default)
         #     If positive then a bounded mailbox is used and the capacity is set using the property
         capacity = -1
-
       }
       reconnect-delay = 5s
       read-timeout = 3600s

From e96db77fe56e091ae4d064bc9d9c0077d08e5641 Mon Sep 17 00:00:00 2001
From: Roland 
Date: Wed, 14 Dec 2011 18:11:12 +0100
Subject: [PATCH 19/34] make infinite durations compare true to themselves, see
 #1514

---
 .../src/test/scala/akka/util/DurationSpec.scala        | 10 ++++++++++
 akka-actor/src/main/scala/akka/util/Duration.scala     |  6 ++----
 2 files changed, 12 insertions(+), 4 deletions(-)

diff --git a/akka-actor-tests/src/test/scala/akka/util/DurationSpec.scala b/akka-actor-tests/src/test/scala/akka/util/DurationSpec.scala
index 6a291872b8..4a04a648bf 100644
--- a/akka-actor-tests/src/test/scala/akka/util/DurationSpec.scala
+++ b/akka-actor-tests/src/test/scala/akka/util/DurationSpec.scala
@@ -38,6 +38,16 @@ class DurationSpec extends WordSpec with MustMatchers {
       (inf - minf) must be(inf)
       (minf - inf) must be(minf)
       (minf + minf) must be(minf)
+      assert(inf == inf)
+      assert(minf == minf)
+      inf.compareTo(inf) must be(0)
+      inf.compareTo(one) must be(1)
+      minf.compareTo(minf) must be(0)
+      minf.compareTo(one) must be(-1)
+      assert(inf != minf)
+      assert(minf != inf)
+      assert(one != inf)
+      assert(minf != one)
     }
 
     "support fromNow" in {
diff --git a/akka-actor/src/main/scala/akka/util/Duration.scala b/akka-actor/src/main/scala/akka/util/Duration.scala
index 6e9310e5d8..fba61c8a48 100644
--- a/akka-actor/src/main/scala/akka/util/Duration.scala
+++ b/akka-actor/src/main/scala/akka/util/Duration.scala
@@ -148,8 +148,6 @@ object Duration {
   trait Infinite {
     this: Duration ⇒
 
-    override def equals(other: Any) = false
-
     def +(other: Duration): Duration =
       other match {
         case _: this.type ⇒ this
@@ -192,7 +190,7 @@ object Duration {
    */
   val Inf: Duration = new Duration with Infinite {
     override def toString = "Duration.Inf"
-    def compare(other: Duration) = 1
+    def compare(other: Duration) = if (other eq this) 0 else 1
     def unary_- : Duration = MinusInf
   }
 
@@ -202,7 +200,7 @@ object Duration {
    */
   val MinusInf: Duration = new Duration with Infinite {
     override def toString = "Duration.MinusInf"
-    def compare(other: Duration) = -1
+    def compare(other: Duration) = if (other eq this) 0 else -1
     def unary_- : Duration = Inf
   }
 

From 34252c592c02cbaa1de603c2ebd2a3ebd15c6574 Mon Sep 17 00:00:00 2001
From: Patrik Nordwall 
Date: Wed, 14 Dec 2011 19:00:22 +0100
Subject: [PATCH 20/34] A few more 2000 milliseconds

---
 .../src/test/scala/akka/dispatch/FutureSpec.scala         | 8 ++++----
 1 file changed, 4 insertions(+), 4 deletions(-)

diff --git a/akka-actor-tests/src/test/scala/akka/dispatch/FutureSpec.scala b/akka-actor-tests/src/test/scala/akka/dispatch/FutureSpec.scala
index 441f404b27..08b6a766ab 100644
--- a/akka-actor-tests/src/test/scala/akka/dispatch/FutureSpec.scala
+++ b/akka-actor-tests/src/test/scala/akka/dispatch/FutureSpec.scala
@@ -595,13 +595,13 @@ class FutureSpec extends AkkaSpec with Checkers with BeforeAndAfterAll with Defa
 
         flow { y1 << 1 } // When this is set, it should cascade down the line
 
-        assert(ly.await(2000 milliseconds))
+        assert(ly.await(2 seconds))
         assert(Await.result(x1, 1 minute) === 1)
         assert(!lz.isOpen)
 
         flow { y2 << 9 } // When this is set, it should cascade down the line
 
-        assert(lz.await(2000 milliseconds))
+        assert(lz.await(2 seconds))
         assert(Await.result(x2, 1 minute) === 9)
 
         assert(List(x1, x2, y1, y2).forall(_.isCompleted))
@@ -622,8 +622,8 @@ class FutureSpec extends AkkaSpec with Checkers with BeforeAndAfterAll with Defa
         assert(!s1.isOpen)
         assert(!s2.isOpen)
         assert(!result.isCompleted)
-        assert(i1.await(2000 milliseconds))
-        assert(i2.await(2000 milliseconds))
+        assert(i1.await(2 seconds))
+        assert(i2.await(2 seconds))
         s1.open()
         s2.open()
         assert(Await.result(result, timeout.duration) === 10)

From 6bbbceaf6c21976907315f0322c79027722a5a72 Mon Sep 17 00:00:00 2001
From: Patrik Nordwall 
Date: Wed, 14 Dec 2011 19:25:32 +0100
Subject: [PATCH 21/34] DOC: Updated preRestart

---
 .../akka/docs/actor/UntypedActorTestBase.java |  5 +-
 akka-docs/java/untyped-actors.rst             | 72 +++++++++----------
 akka-docs/scala/actors.rst                    |  9 ++-
 3 files changed, 45 insertions(+), 41 deletions(-)

diff --git a/akka-docs/java/code/akka/docs/actor/UntypedActorTestBase.java b/akka-docs/java/code/akka/docs/actor/UntypedActorTestBase.java
index 6039286598..3cc234176b 100644
--- a/akka-docs/java/code/akka/docs/actor/UntypedActorTestBase.java
+++ b/akka-docs/java/code/akka/docs/actor/UntypedActorTestBase.java
@@ -78,8 +78,7 @@ public class UntypedActorTestBase {
     ActorSystem system = ActorSystem.create("MySystem");
     //#creating-props
     MessageDispatcher dispatcher = system.dispatcherFactory().lookup("my-dispatcher");
-    ActorRef myActor = system.actorOf(
-        new Props().withCreator(MyUntypedActor.class).withDispatcher(dispatcher),
+    ActorRef myActor = system.actorOf(new Props().withCreator(MyUntypedActor.class).withDispatcher(dispatcher),
         "myactor");
     //#creating-props
     myActor.tell("test");
@@ -166,6 +165,8 @@ public class UntypedActorTestBase {
     }
 
     public void preRestart(Throwable reason, Option message) {
+      for (ActorRef each : getContext().getChildren())
+        getContext().stop(each);
       postStop();
     }
 
diff --git a/akka-docs/java/untyped-actors.rst b/akka-docs/java/untyped-actors.rst
index 4324aadf19..c680865b6b 100644
--- a/akka-docs/java/untyped-actors.rst
+++ b/akka-docs/java/untyped-actors.rst
@@ -62,7 +62,7 @@ a top level actor, that is supervised by the system (internal guardian actor).
 .. includecode:: code/akka/docs/actor/FirstUntypedActor.java#context-actorOf
 
 Actors are automatically started asynchronously when created.
-When you create the ``UntypedActor`` then it will automatically call the ``preStart`` 
+When you create the ``UntypedActor`` then it will automatically call the ``preStart``
 callback method on the ``UntypedActor`` class. This is an excellent place to
 add initialization code for the actor.
 
@@ -76,11 +76,11 @@ add initialization code for the actor.
 Creating Actors with non-default constructor
 --------------------------------------------
 
-If your UntypedActor has a constructor that takes parameters then you can't create it using 'actorOf(clazz)'. 
-Instead you can use a variant of ``actorOf`` that takes an instance of an 'UntypedActorFactory' 
-in which you can create the Actor in any way you like. If you use this method then you to make sure that 
-no one can get a reference to the actor instance. If they can get a reference it then they can 
-touch state directly in bypass the whole actor dispatching mechanism and create race conditions 
+If your UntypedActor has a constructor that takes parameters then you can't create it using 'actorOf(clazz)'.
+Instead you can use a variant of ``actorOf`` that takes an instance of an 'UntypedActorFactory'
+in which you can create the Actor in any way you like. If you use this method then you to make sure that
+no one can get a reference to the actor instance. If they can get a reference it then they can
+touch state directly in bypass the whole actor dispatching mechanism and create race conditions
 which can lead to corrupt data.
 
 Here is an example:
@@ -92,7 +92,7 @@ This way of creating the Actor is also great for integrating with Dependency Inj
 Creating Actors with Props
 --------------------------
 
-``Props`` is a configuration object to specify additional things for the actor to 
+``Props`` is a configuration object to specify additional things for the actor to
 be created, such as the ``MessageDispatcher``.
 
 .. includecode:: code/akka/docs/actor/UntypedActorTestBase.java#creating-props
@@ -152,7 +152,7 @@ processing a message. This restart involves the hooks mentioned above:
    message, e.g. when a supervisor does not trap the exception and is restarted
    in turn by its supervisor. This method is the best place for cleaning up,
    preparing hand-over to the fresh actor instance, etc.
-   By default it calls :meth:`postStop`.
+   By default it stops all children and calls :meth:`postStop`.
 2. The initial factory from the ``actorOf`` call is used
    to produce the fresh instance.
 3. The new actor’s :meth:`postRestart` method is invoked with the exception
@@ -162,7 +162,7 @@ processing a message. This restart involves the hooks mentioned above:
 
 An actor restart replaces only the actual actor object; the contents of the
 mailbox and the hotswap stack are unaffected by the restart, so processing of
-messages will resume after the :meth:`postRestart` hook returns. The message 
+messages will resume after the :meth:`postRestart` hook returns. The message
 that triggered the exception will not be received again. Any message
 sent to an actor while it is being restarted will be queued to its mailbox as
 usual.
@@ -172,9 +172,9 @@ Stop Hook
 
 After stopping an actor, its :meth:`postStop` hook is called, which may be used
 e.g. for deregistering this actor from other services. This hook is guaranteed
-to run after message queuing has been disabled for this actor, i.e. messages 
-sent to a stopped actor will be redirected to the :obj:`deadLetters` of the 
-:obj:`ActorSystem`. 
+to run after message queuing has been disabled for this actor, i.e. messages
+sent to a stopped actor will be redirected to the :obj:`deadLetters` of the
+:obj:`ActorSystem`.
 
 
 Identifying Actors
@@ -188,7 +188,7 @@ Messages and immutability
 
 **IMPORTANT**: Messages can be any kind of object but have to be
 immutable. Akka can’t enforce immutability (yet) so this has to be by
-convention. 
+convention.
 
 Here is an example of an immutable message:
 
@@ -207,8 +207,8 @@ Messages are sent to an Actor through one of the following methods.
 
 Message ordering is guaranteed on a per-sender basis.
 
-In all these methods you have the option of passing along your own ``ActorRef``. 
-Make it a practice of doing so because it will allow the receiver actors to be able to respond 
+In all these methods you have the option of passing along your own ``ActorRef``.
+Make it a practice of doing so because it will allow the receiver actors to be able to respond
 to your message, since the sender reference is sent along with the message.
 
 Tell: Fire-forget
@@ -229,7 +229,7 @@ to reply to the original sender, by using ``getSender().tell(replyMsg)``.
 
   actor.tell("Hello", getSelf());
 
-If invoked without the sender parameter the sender will be 
+If invoked without the sender parameter the sender will be
 :obj:`deadLetters` actor reference in the target actor.
 
 Ask: Send-And-Receive-Future
@@ -244,11 +244,11 @@ will immediately return a :class:`Future`:
   Future future = actorRef.ask("Hello", timeoutMillis);
 
 The receiving actor should reply to this message, which will complete the
-future with the reply message as value; ``getSender.tell(result)``. 
+future with the reply message as value; ``getSender.tell(result)``.
 
-To complete the future with an exception you need send a Failure message to the sender. 
-This is not done automatically when an actor throws an exception while processing a 
-message. 
+To complete the future with an exception you need send a Failure message to the sender.
+This is not done automatically when an actor throws an exception while processing a
+message.
 
 .. includecode:: code/akka/docs/actor/UntypedActorTestBase.java#reply-exception
 
@@ -258,16 +258,16 @@ specified as parameter to the ``ask`` method.
 See :ref:`futures-java` for more information on how to await or query a
 future.
 
-The ``onComplete``, ``onResult``, or ``onTimeout`` methods of the ``Future`` can be 
-used to register a callback to get a notification when the Future completes. 
+The ``onComplete``, ``onResult``, or ``onTimeout`` methods of the ``Future`` can be
+used to register a callback to get a notification when the Future completes.
 Gives you a way to avoid blocking.
 
 .. warning::
 
   When using future callbacks, inside actors you need to carefully avoid closing over
-  the containing actor’s reference, i.e. do not call methods or access mutable state 
-  on the enclosing actor from within the callback. This would break the actor 
-  encapsulation and may introduce synchronization bugs and race conditions because 
+  the containing actor’s reference, i.e. do not call methods or access mutable state
+  on the enclosing actor from within the callback. This would break the actor
+  encapsulation and may introduce synchronization bugs and race conditions because
   the callback will be scheduled concurrently to the enclosing actor. Unfortunately
   there is not yet a way to detect these illegal accesses at compile time. See also:
   :ref:`jmm-shared-state`
@@ -297,7 +297,7 @@ You need to pass along your context variable as well.
 Receive messages
 ================
 
-When an actor receives a message it is passed into the ``onReceive`` method, this is 
+When an actor receives a message it is passed into the ``onReceive`` method, this is
 an abstract method on the ``UntypedActor`` base class that needs to be defined.
 
 Here is an example:
@@ -340,17 +340,17 @@ message.
 Stopping actors
 ===============
 
-Actors are stopped by invoking the ``stop`` method of the ``ActorRef``. 
+Actors are stopped by invoking the ``stop`` method of the ``ActorRef``.
 The actual termination of the actor is performed asynchronously, i.e.
-``stop`` may return before the actor is stopped. 
+``stop`` may return before the actor is stopped.
 
 .. code-block:: java
 
   actor.stop();
 
-Processing of the current message, if any, will continue before the actor is stopped, 
+Processing of the current message, if any, will continue before the actor is stopped,
 but additional messages in the mailbox will not be processed. By default these
-messages are sent to the :obj:`deadLetters` of the :obj:`ActorSystem`, but that 
+messages are sent to the :obj:`deadLetters` of the :obj:`ActorSystem`, but that
 depends on the mailbox implementation.
 
 When stop is called then a call to the ``def postStop`` callback method will
@@ -365,7 +365,7 @@ take place. The ``Actor`` can use this callback to implement shutdown behavior.
 
 All Actors are stopped when the ``ActorSystem`` is stopped.
 Supervised actors are stopped when the supervisor is stopped, i.e. children are stopped
-when parent is stopped. 
+when parent is stopped.
 
 
 PoisonPill
@@ -405,7 +405,7 @@ To hotswap the Actor using ``getContext().become``:
 .. includecode:: code/akka/docs/actor/UntypedActorTestBase.java
    :include: import-procedure,hot-swap-actor
 
-The ``become`` method is useful for many different things, such as to implement 
+The ``become`` method is useful for many different things, such as to implement
 a Finite State Machine (FSM).
 
 Here is another little cute example of ``become`` and ``unbecome`` in action:
@@ -462,9 +462,9 @@ messages on that mailbox, will be there as well.
 What happens to the actor
 -------------------------
 
-If an exception is thrown, the actor instance is discarded and a new instance is 
+If an exception is thrown, the actor instance is discarded and a new instance is
 created. This new instance will now be used in the actor references to this actor
-(so this is done invisible to the developer). Note that this means that current 
-state of the failing actor instance is lost if you don't store and restore it in 
-``preRestart`` and ``postRestart`` callbacks. 
+(so this is done invisible to the developer). Note that this means that current
+state of the failing actor instance is lost if you don't store and restore it in
+``preRestart`` and ``postRestart`` callbacks.
 
diff --git a/akka-docs/scala/actors.rst b/akka-docs/scala/actors.rst
index 2d11ea4894..39f4647b90 100644
--- a/akka-docs/scala/actors.rst
+++ b/akka-docs/scala/actors.rst
@@ -54,7 +54,7 @@ Creating Actors with default constructor
 ----------------------------------------
 
 .. includecode:: code/ActorDocSpec.scala
-:include: imports2,system-actorOf
+   :include: imports2,system-actorOf
 
 The call to :meth:`actorOf` returns an instance of ``ActorRef``. This is a handle to
 the ``Actor`` instance which you can use to interact with the ``Actor``. The
@@ -151,7 +151,10 @@ The remaining visible methods are user-overridable life-cycle hooks which are
 described in the following::
 
   def preStart() {}
-  def preRestart(reason: Throwable, message: Option[Any]) { postStop() }
+  def preRestart(reason: Throwable, message: Option[Any]) {
+    context.children foreach (context.stop(_))
+    postStop()
+  }
   def postRestart(reason: Throwable) { preStart() }
   def postStop() {}
 
@@ -185,7 +188,7 @@ processing a message. This restart involves the hooks mentioned above:
    message, e.g. when a supervisor does not trap the exception and is restarted
    in turn by its supervisor. This method is the best place for cleaning up,
    preparing hand-over to the fresh actor instance, etc.
-   By default it calls :meth:`postStop`.
+   By default it stops all children and calls :meth:`postStop`.
 2. The initial factory from the ``actorOf`` call is used
    to produce the fresh instance.
 3. The new actor’s :meth:`postRestart` method is invoked with the exception

From cdff927e0e7d0950c54f7d042d143c6957a9336b Mon Sep 17 00:00:00 2001
From: Roland 
Date: Wed, 14 Dec 2011 19:34:08 +0100
Subject: [PATCH 22/34] remove non-user API from ActorContext, see #1516

- handleChildTerminated/handleFailure: no discussion
- currentMessage was (ab)used by IO, fixed by down-casting which IO
  already does for writing to currentMessage (ewww)
- Actor.apply() could now as well be moved to ActorCell, leaving Actor
  as user-API-only, which would be nice but not for M1
---
 .../src/main/scala/akka/actor/Actor.scala     |  3 +-
 .../src/main/scala/akka/actor/ActorCell.scala | 16 +----
 .../src/main/scala/akka/actor/ActorRef.scala  | 69 ++++++++++++-------
 akka-actor/src/main/scala/akka/actor/IO.scala |  8 +--
 4 files changed, 53 insertions(+), 43 deletions(-)

diff --git a/akka-actor/src/main/scala/akka/actor/Actor.scala b/akka-actor/src/main/scala/akka/actor/Actor.scala
index 72c4ecabc3..ad95d28238 100644
--- a/akka-actor/src/main/scala/akka/actor/Actor.scala
+++ b/akka-actor/src/main/scala/akka/actor/Actor.scala
@@ -307,7 +307,8 @@ trait Actor {
   // =========================================
 
   private[akka] final def apply(msg: Any) = {
-    val behaviorStack = context.hotswap
+    // FIXME this should all go into ActorCell
+    val behaviorStack = context.asInstanceOf[ActorCell].hotswap
     msg match {
       case msg if behaviorStack.nonEmpty && behaviorStack.head.isDefinedAt(msg) ⇒ behaviorStack.head.apply(msg)
       case msg if behaviorStack.isEmpty && processingBehavior.isDefinedAt(msg) ⇒ processingBehavior.apply(msg)
diff --git a/akka-actor/src/main/scala/akka/actor/ActorCell.scala b/akka-actor/src/main/scala/akka/actor/ActorCell.scala
index be6bf2d1f4..d689be07c1 100644
--- a/akka-actor/src/main/scala/akka/actor/ActorCell.scala
+++ b/akka-actor/src/main/scala/akka/actor/ActorCell.scala
@@ -67,7 +67,7 @@ trait ActorContext extends ActorRefFactory {
   def setReceiveTimeout(timeout: Duration): Unit
 
   /**
-   * Resets the current receive timeout.
+   * Clears the receive timeout, i.e. deactivates this feature.
    */
   def resetReceiveTimeout(): Unit
 
@@ -83,16 +83,6 @@ trait ActorContext extends ActorRefFactory {
    */
   def unbecome(): Unit
 
-  /**
-   * Returns the current message envelope.
-   */
-  def currentMessage: Envelope
-
-  /**
-   * Returns a stack with the hotswapped behaviors (as Scala PartialFunction).
-   */
-  def hotswap: Stack[PartialFunction[Any, Unit]]
-
   /**
    * Returns the sender 'ActorRef' of the current message.
    */
@@ -109,10 +99,6 @@ trait ActorContext extends ActorRefFactory {
    */
   implicit def dispatcher: MessageDispatcher
 
-  def handleFailure(child: ActorRef, cause: Throwable): Unit
-
-  def handleChildTerminated(child: ActorRef): Unit
-
   /**
    * The system that the actor belongs to.
    * Importing this member will place a implicit MessageDispatcher in scope.
diff --git a/akka-actor/src/main/scala/akka/actor/ActorRef.scala b/akka-actor/src/main/scala/akka/actor/ActorRef.scala
index 7eab12c41b..1bdb9ae8ce 100644
--- a/akka-actor/src/main/scala/akka/actor/ActorRef.scala
+++ b/akka-actor/src/main/scala/akka/actor/ActorRef.scala
@@ -18,34 +18,57 @@ import akka.event.LoggingAdapter
 import java.util.concurrent.atomic.AtomicBoolean
 
 /**
- * ActorRef is an immutable and serializable handle to an Actor.
- * 

- * Create an ActorRef for an Actor by using the factory method on the Actor object. - *

- * Here is an example on how to create an actor with a default constructor. - *

- *   import Actor._
+ * Immutable and serializable handle to an actor, which may or may not reside
+ * on the local host or inside the same [[akka.actor.ActorSystem]]. An ActorRef
+ * can be obtained from an [[akka.actor.ActorRefFactory]], an interface which
+ * is implemented by ActorSystem and [[akka.actor.ActorContext]]. This means
+ * actors can be created top-level in the ActorSystem or as children of an
+ * existing actor, but only from within that actor.
  *
- *   val actor = actorOf(Props[MyActor]
- *   actor ! message
- *   actor.stop()
- * 
+ * ActorRefs can be freely shared among actors by message passing. Message + * passing conversely is their only purpose, as demonstrated in the following + * examples: * - * You can also create and start actors like this: - *
- *   val actor = actorOf(Props[MyActor]
- * 
+ * Scala: + * {{{ + * class ExampleActor extends Actor { + * val other = context.actorOf(Props[OtherActor], "childName") // will be destroyed and re-created upon restart by default * - * Here is an example on how to create an actor with a non-default constructor. - *
- *   import Actor._
+ *   def receive {
+ *     case Request1(msg) => other ! refine(msg)     // uses this actor as sender reference, reply goes to us
+ *     case Request2(msg) => other.tell(msg, sender) // forward sender reference, enabling direct reply
+ *     case Request3(msg) => sender ! (other ? msg)  // will reply with a Future for holding other’s reply (implicit timeout from "akka.actor.timeout")
+ *   }
+ * }
+ * }}}
  *
- *   val actor = actorOf(Props(new MyActor(...))
- *   actor ! message
- *   actor.stop()
- * 
+ * Java: + * {{{ + * public class ExampleActor Extends UntypedActor { + * // this child will be destroyed and re-created upon restart by default + * final ActorRef other = getContext().actorOf(new Props(OtherActor.class), "childName"); * - * The natural ordering of ActorRef is defined in terms of its [[akka.actor.ActorPath]]. + * @Override + * public void onReceive(Object o) { + * if (o instanceof Request1) { + * val msg = ((Request1) o).getMsg(); + * other.tell(msg); // uses this actor as sender reference, reply goes to us + * + * } else if (o instanceof Request2) { + * val msg = ((Request2) o).getMsg(); + * other.tell(msg, getSender()); // forward sender reference, enabling direct reply + * + * } else if (o instanceof Request3) { + * val msg = ((Request3) o).getMsg(); + * getSender().tell(other.ask(msg, 5000)); // reply with Future for holding the other’s reply (timeout 5 seconds) + * + * } + * } + * } + * }}} + * + * ActorRef does not have a method for terminating the actor it points to, use + * [[akka.actor.ActorRefFactory]]`.stop(child)` for this purpose. */ abstract class ActorRef extends java.lang.Comparable[ActorRef] with Serializable { scalaRef: InternalActorRef ⇒ diff --git a/akka-actor/src/main/scala/akka/actor/IO.scala b/akka-actor/src/main/scala/akka/actor/IO.scala index 1551eef2ec..28bad4f85e 100644 --- a/akka-actor/src/main/scala/akka/actor/IO.scala +++ b/akka-actor/src/main/scala/akka/actor/IO.scala @@ -46,15 +46,15 @@ object IO { override def asReadable = this def read(len: Int)(implicit actor: Actor with IO): ByteString @cps[IOSuspendable[Any]] = shift { cont: (ByteString ⇒ IOSuspendable[Any]) ⇒ - ByteStringLength(cont, this, actor.context.currentMessage, len) + ByteStringLength(cont, this, actor.context.asInstanceOf[ActorCell].currentMessage, len) } def read()(implicit actor: Actor with IO): ByteString @cps[IOSuspendable[Any]] = shift { cont: (ByteString ⇒ IOSuspendable[Any]) ⇒ - ByteStringAny(cont, this, actor.context.currentMessage) + ByteStringAny(cont, this, actor.context.asInstanceOf[ActorCell].currentMessage) } def read(delimiter: ByteString, inclusive: Boolean = false)(implicit actor: Actor with IO): ByteString @cps[IOSuspendable[Any]] = shift { cont: (ByteString ⇒ IOSuspendable[Any]) ⇒ - ByteStringDelimited(cont, this, actor.context.currentMessage, delimiter, inclusive, 0) + ByteStringDelimited(cont, this, actor.context.asInstanceOf[ActorCell].currentMessage, delimiter, inclusive, 0) } } @@ -158,7 +158,7 @@ trait IO { } run() case msg if _next ne Idle ⇒ - _messages enqueue context.currentMessage + _messages enqueue context.asInstanceOf[ActorCell].currentMessage case msg if _receiveIO.isDefinedAt(msg) ⇒ _next = reset { _receiveIO(msg); Idle } run() From ab1c4c683340463fb36ed336d740574c93e656d6 Mon Sep 17 00:00:00 2001 From: Patrik Nordwall Date: Wed, 14 Dec 2011 19:39:51 +0100 Subject: [PATCH 23/34] DOC: Updated stop description --- akka-docs/java/untyped-actors.rst | 7 ++++--- akka-docs/scala/actors.rst | 7 ++++--- 2 files changed, 8 insertions(+), 6 deletions(-) diff --git a/akka-docs/java/untyped-actors.rst b/akka-docs/java/untyped-actors.rst index c680865b6b..1b32481901 100644 --- a/akka-docs/java/untyped-actors.rst +++ b/akka-docs/java/untyped-actors.rst @@ -340,13 +340,14 @@ message. Stopping actors =============== -Actors are stopped by invoking the ``stop`` method of the ``ActorRef``. -The actual termination of the actor is performed asynchronously, i.e. +Actors are stopped by invoking the ``stop`` method of the ``ActorContext`` +for child actors or ``stop`` method of the ``ActorSystem`` for top level +actors. The actual termination of the actor is performed asynchronously, i.e. ``stop`` may return before the actor is stopped. .. code-block:: java - actor.stop(); + getContext().stop(childActorRef); Processing of the current message, if any, will continue before the actor is stopped, but additional messages in the mailbox will not be processed. By default these diff --git a/akka-docs/scala/actors.rst b/akka-docs/scala/actors.rst index 39f4647b90..663c07c70a 100644 --- a/akka-docs/scala/actors.rst +++ b/akka-docs/scala/actors.rst @@ -407,13 +407,14 @@ object. Stopping actors =============== -Actors are stopped by invoking the ``stop`` method of the ``ActorRef``. -The actual termination of the actor is performed asynchronously, i.e. +Actors are stopped by invoking the ``stop`` method of the ``ActorContext`` +for child actors or ``stop`` method of the ``ActorSystem`` for top level +actors. The actual termination of the actor is performed asynchronously, i.e. ``stop`` may return before the actor is stopped. .. code-block:: scala - actor.stop() + context.stop(childActorRef) Processing of the current message, if any, will continue before the actor is stopped, but additional messages in the mailbox will not be processed. By default these From 7b2349c0d9292a90f3ec2aa1605426b5b9c42bec Mon Sep 17 00:00:00 2001 From: Patrik Nordwall Date: Wed, 14 Dec 2011 20:26:27 +0100 Subject: [PATCH 24/34] DOC: Correction of stop description --- akka-docs/java/untyped-actors.rst | 15 +++++++-------- akka-docs/scala/actors.rst | 15 +++++++-------- 2 files changed, 14 insertions(+), 16 deletions(-) diff --git a/akka-docs/java/untyped-actors.rst b/akka-docs/java/untyped-actors.rst index 1b32481901..ca1005defb 100644 --- a/akka-docs/java/untyped-actors.rst +++ b/akka-docs/java/untyped-actors.rst @@ -340,14 +340,13 @@ message. Stopping actors =============== -Actors are stopped by invoking the ``stop`` method of the ``ActorContext`` -for child actors or ``stop`` method of the ``ActorSystem`` for top level -actors. The actual termination of the actor is performed asynchronously, i.e. -``stop`` may return before the actor is stopped. - -.. code-block:: java - - getContext().stop(childActorRef); +Actors are stopped by invoking the :meth:`stop` method of a ``ActorRefFactory``, +i.e. ``ActorContext`` or ``ActorSystem``. Typically the context is used for stopping +child actors and the system for stopping top level actors. When using the context +to stop an actor the actual termination of the actor is performed asynchronously, +i.e. :meth:`stop` may return before the actor is stopped. When using the system to +stop an actor the :meth:`stop` method will block until the actor is stopped, or +timeout occurs (``akka.actor.creation-timeout`` :ref:`configuration` property). Processing of the current message, if any, will continue before the actor is stopped, but additional messages in the mailbox will not be processed. By default these diff --git a/akka-docs/scala/actors.rst b/akka-docs/scala/actors.rst index 663c07c70a..526eed201a 100644 --- a/akka-docs/scala/actors.rst +++ b/akka-docs/scala/actors.rst @@ -407,14 +407,13 @@ object. Stopping actors =============== -Actors are stopped by invoking the ``stop`` method of the ``ActorContext`` -for child actors or ``stop`` method of the ``ActorSystem`` for top level -actors. The actual termination of the actor is performed asynchronously, i.e. -``stop`` may return before the actor is stopped. - -.. code-block:: scala - - context.stop(childActorRef) +Actors are stopped by invoking the :meth:`stop` method of a ``ActorRefFactory``, +i.e. ``ActorContext`` or ``ActorSystem``. Typically the context is used for stopping +child actors and the system for stopping top level actors. When using the context +to stop an actor the actual termination of the actor is performed asynchronously, +i.e. :meth:`stop` may return before the actor is stopped. When using the system to +stop an actor the :meth:`stop` method will block until the actor is stopped, or +timeout occurs (``akka.actor.creation-timeout`` :ref:`configuration` property). Processing of the current message, if any, will continue before the actor is stopped, but additional messages in the mailbox will not be processed. By default these From c57b2732e79bfe6c0f72b344c38272b71430762b Mon Sep 17 00:00:00 2001 From: Patrik Nordwall Date: Wed, 14 Dec 2011 20:40:01 +0100 Subject: [PATCH 25/34] DOC: Another correction of stop description --- akka-docs/java/untyped-actors.rst | 8 +++----- akka-docs/scala/actors.rst | 8 +++----- 2 files changed, 6 insertions(+), 10 deletions(-) diff --git a/akka-docs/java/untyped-actors.rst b/akka-docs/java/untyped-actors.rst index ca1005defb..e1fad292fc 100644 --- a/akka-docs/java/untyped-actors.rst +++ b/akka-docs/java/untyped-actors.rst @@ -342,11 +342,9 @@ Stopping actors Actors are stopped by invoking the :meth:`stop` method of a ``ActorRefFactory``, i.e. ``ActorContext`` or ``ActorSystem``. Typically the context is used for stopping -child actors and the system for stopping top level actors. When using the context -to stop an actor the actual termination of the actor is performed asynchronously, -i.e. :meth:`stop` may return before the actor is stopped. When using the system to -stop an actor the :meth:`stop` method will block until the actor is stopped, or -timeout occurs (``akka.actor.creation-timeout`` :ref:`configuration` property). +child actors and the system for stopping top level actors. The actual termination of +the actor is performed asynchronously, i.e. :meth:`stop` may return before the actor is +stopped. Processing of the current message, if any, will continue before the actor is stopped, but additional messages in the mailbox will not be processed. By default these diff --git a/akka-docs/scala/actors.rst b/akka-docs/scala/actors.rst index 526eed201a..04b13f3f92 100644 --- a/akka-docs/scala/actors.rst +++ b/akka-docs/scala/actors.rst @@ -409,11 +409,9 @@ Stopping actors Actors are stopped by invoking the :meth:`stop` method of a ``ActorRefFactory``, i.e. ``ActorContext`` or ``ActorSystem``. Typically the context is used for stopping -child actors and the system for stopping top level actors. When using the context -to stop an actor the actual termination of the actor is performed asynchronously, -i.e. :meth:`stop` may return before the actor is stopped. When using the system to -stop an actor the :meth:`stop` method will block until the actor is stopped, or -timeout occurs (``akka.actor.creation-timeout`` :ref:`configuration` property). +child actors and the system for stopping top level actors. The actual termination of +the actor is performed asynchronously, i.e. :meth:`stop` may return before the actor is +stopped. Processing of the current message, if any, will continue before the actor is stopped, but additional messages in the mailbox will not be processed. By default these From fabe475f640c86219a89a393e77f900f9aba71e9 Mon Sep 17 00:00:00 2001 From: Patrik Nordwall Date: Wed, 14 Dec 2011 21:52:39 +0100 Subject: [PATCH 26/34] DOC: Improved scheduler doc. Split into Java/Scala samples --- akka-docs/common/index.rst | 1 - ...ActorTest.scala => SchedulerDocTest.scala} | 2 +- .../akka/docs/actor/SchedulerDocTestBase.java | 85 +++++++++++++++++++ .../akka/docs/actor/UntypedActorDocTest.scala | 5 ++ ...Base.java => UntypedActorDocTestBase.java} | 2 +- .../dispatcher/DispatcherDocTestBase.java | 2 +- akka-docs/java/index.rst | 1 + akka-docs/java/scheduler.rst | 53 ++++++++++++ akka-docs/java/untyped-actors.rst | 18 ++-- akka-docs/scala/actors.rst | 30 +++---- .../{ => akka/docs/actor}/ActorDocSpec.scala | 0 .../akka/docs/actor}/SchedulerDocSpec.scala | 20 +---- .../docs/actor}/UnnestedReceives.scala | 0 akka-docs/scala/index.rst | 1 + akka-docs/{common => scala}/scheduler.rst | 25 +++--- 15 files changed, 188 insertions(+), 57 deletions(-) rename akka-docs/java/code/akka/docs/actor/{UntypedActorTest.scala => SchedulerDocTest.scala} (51%) create mode 100644 akka-docs/java/code/akka/docs/actor/SchedulerDocTestBase.java create mode 100644 akka-docs/java/code/akka/docs/actor/UntypedActorDocTest.scala rename akka-docs/java/code/akka/docs/actor/{UntypedActorTestBase.java => UntypedActorDocTestBase.java} (99%) create mode 100644 akka-docs/java/scheduler.rst rename akka-docs/scala/code/{ => akka/docs/actor}/ActorDocSpec.scala (100%) rename akka-docs/{common/code => scala/code/akka/docs/actor}/SchedulerDocSpec.scala (72%) rename akka-docs/scala/code/{ => akka/docs/actor}/UnnestedReceives.scala (100%) rename akka-docs/{common => scala}/scheduler.rst (72%) diff --git a/akka-docs/common/index.rst b/akka-docs/common/index.rst index f3ed26aa73..4e19d1a1aa 100644 --- a/akka-docs/common/index.rst +++ b/akka-docs/common/index.rst @@ -4,5 +4,4 @@ Common utilities .. toctree:: :maxdepth: 2 - scheduler duration diff --git a/akka-docs/java/code/akka/docs/actor/UntypedActorTest.scala b/akka-docs/java/code/akka/docs/actor/SchedulerDocTest.scala similarity index 51% rename from akka-docs/java/code/akka/docs/actor/UntypedActorTest.scala rename to akka-docs/java/code/akka/docs/actor/SchedulerDocTest.scala index 1747f30f92..b522a142d8 100644 --- a/akka-docs/java/code/akka/docs/actor/UntypedActorTest.scala +++ b/akka-docs/java/code/akka/docs/actor/SchedulerDocTest.scala @@ -2,4 +2,4 @@ package akka.docs.actor import org.scalatest.junit.JUnitSuite -class UntypedActorTest extends UntypedActorTestBase with JUnitSuite +class SchedulerDocTest extends SchedulerDocTestBase with JUnitSuite diff --git a/akka-docs/java/code/akka/docs/actor/SchedulerDocTestBase.java b/akka-docs/java/code/akka/docs/actor/SchedulerDocTestBase.java new file mode 100644 index 0000000000..bbcec2f4e5 --- /dev/null +++ b/akka-docs/java/code/akka/docs/actor/SchedulerDocTestBase.java @@ -0,0 +1,85 @@ +package akka.docs.actor; + +//#imports1 +import akka.actor.Props; +import akka.util.Duration; +import java.util.concurrent.TimeUnit; + +//#imports1 + +//#imports2 +import akka.actor.UntypedActor; +import akka.actor.UntypedActorFactory; +import akka.actor.Cancellable; + +//#imports2 + +import akka.actor.ActorRef; +import akka.actor.ActorSystem; +import akka.testkit.AkkaSpec; + +import org.junit.After; +import org.junit.Before; +import org.junit.Test; +import static org.junit.Assert.*; + +public class SchedulerDocTestBase { + + ActorSystem system; + ActorRef testActor; + + @Before + public void setUp() { + system = ActorSystem.create("MySystem", AkkaSpec.testConf()); + testActor = system.actorOf(new Props().withCreator(MyUntypedActor.class)); + } + + @After + public void tearDown() { + system.shutdown(); + } + + @Test + public void scheduleOneOffTask() { + //#schedule-one-off-message + //Schedules to send the "foo"-message to the testActor after 50ms + system.scheduler().scheduleOnce(Duration.create(50, TimeUnit.MILLISECONDS), testActor, "foo"); + //#schedule-one-off-message + + //#schedule-one-off-thunk + //Schedules a Runnable to be executed (send the current time) to the testActor after 50ms + system.scheduler().scheduleOnce(Duration.create(50, TimeUnit.MILLISECONDS), new Runnable() { + @Override + public void run() { + testActor.tell(System.currentTimeMillis()); + } + }); + //#schedule-one-off-thunk + } + + @Test + public void scheduleRecurringTask() { + //#schedule-recurring + ActorRef tickActor = system.actorOf(new Props().withCreator(new UntypedActorFactory() { + public UntypedActor create() { + return new UntypedActor() { + public void onReceive(Object message) { + if (message.equals("Tick")) { + // Do someting + } + } + }; + } + })); + + //This will schedule to send the Tick-message + //to the tickActor after 0ms repeating every 50ms + Cancellable cancellable = system.scheduler().schedule(Duration.Zero(), Duration.create(50, TimeUnit.MILLISECONDS), + tickActor, "Tick"); + + //This cancels further Ticks to be sent + cancellable.cancel(); + //#schedule-recurring + system.stop(tickActor); + } +} diff --git a/akka-docs/java/code/akka/docs/actor/UntypedActorDocTest.scala b/akka-docs/java/code/akka/docs/actor/UntypedActorDocTest.scala new file mode 100644 index 0000000000..76b3b990fa --- /dev/null +++ b/akka-docs/java/code/akka/docs/actor/UntypedActorDocTest.scala @@ -0,0 +1,5 @@ +package akka.docs.actor + +import org.scalatest.junit.JUnitSuite + +class UntypedActorDocTest extends UntypedActorDocTestBase with JUnitSuite diff --git a/akka-docs/java/code/akka/docs/actor/UntypedActorTestBase.java b/akka-docs/java/code/akka/docs/actor/UntypedActorDocTestBase.java similarity index 99% rename from akka-docs/java/code/akka/docs/actor/UntypedActorTestBase.java rename to akka-docs/java/code/akka/docs/actor/UntypedActorDocTestBase.java index 3cc234176b..1093f58caf 100644 --- a/akka-docs/java/code/akka/docs/actor/UntypedActorTestBase.java +++ b/akka-docs/java/code/akka/docs/actor/UntypedActorDocTestBase.java @@ -36,7 +36,7 @@ import java.util.concurrent.TimeUnit; import static org.junit.Assert.*; -public class UntypedActorTestBase { +public class UntypedActorDocTestBase { @Test public void systemActorOf() { diff --git a/akka-docs/java/code/akka/docs/dispatcher/DispatcherDocTestBase.java b/akka-docs/java/code/akka/docs/dispatcher/DispatcherDocTestBase.java index 2da942fdef..28c0ad4477 100644 --- a/akka-docs/java/code/akka/docs/dispatcher/DispatcherDocTestBase.java +++ b/akka-docs/java/code/akka/docs/dispatcher/DispatcherDocTestBase.java @@ -28,7 +28,7 @@ import com.typesafe.config.ConfigFactory; import akka.actor.ActorSystem; import akka.docs.actor.MyUntypedActor; -import akka.docs.actor.UntypedActorTestBase.MyActor; +import akka.docs.actor.UntypedActorDocTestBase.MyActor; import akka.testkit.AkkaSpec; public class DispatcherDocTestBase { diff --git a/akka-docs/java/index.rst b/akka-docs/java/index.rst index e864b9d63c..c04e5bc259 100644 --- a/akka-docs/java/index.rst +++ b/akka-docs/java/index.rst @@ -9,6 +9,7 @@ Java API untyped-actors typed-actors logging + scheduler futures dataflow transactors diff --git a/akka-docs/java/scheduler.rst b/akka-docs/java/scheduler.rst new file mode 100644 index 0000000000..3dde1345a6 --- /dev/null +++ b/akka-docs/java/scheduler.rst @@ -0,0 +1,53 @@ + +.. _scheduler-java: + +################## + Scheduler (Java) +################## + +Sometimes the need for making things happen in the future arises, and where do you go look then? +Look no further than ``ActorSystem``! There you find the :meth:`scheduler` method that returns an instance +of akka.actor.Scheduler, this instance is unique per ActorSystem and is used internally for scheduling things +to happen at specific points in time. Please note that the scheduled tasks are executed by the default +``MessageDispatcher`` of the ``ActorSystem``. + +You can schedule sending of messages to actors and execution of tasks (functions or Runnable). +You will get a ``Cancellable`` back that you can call :meth:`cancel` on to cancel the execution of the +scheduled operation. + +Some examples +------------- + +.. includecode:: code/akka/docs/actor/SchedulerDocTestBase.java + :include: imports1,schedule-one-off-message + +.. includecode:: code/akka/docs/actor/SchedulerDocTestBase.java + :include: schedule-one-off-thunk + +.. includecode:: code/akka/docs/actor/SchedulerDocTestBase.java + :include: imports1,imports2,schedule-recurring + +From ``akka.actor.ActorSystem`` +------------------------------- + +.. includecode:: ../../akka-actor/src/main/scala/akka/actor/ActorSystem.scala + :include: scheduler + + +The Scheduler interface +----------------------- + +.. includecode:: ../../akka-actor/src/main/scala/akka/actor/Scheduler.scala + :include: scheduler + +The Cancellable interface +------------------------- + +This allows you to ``cancel`` something that has been scheduled for execution. + +.. warning:: + This does not abort the execution of the task, if it had already been started. + +.. includecode:: ../../akka-actor/src/main/scala/akka/actor/Scheduler.scala + :include: cancellable + diff --git a/akka-docs/java/untyped-actors.rst b/akka-docs/java/untyped-actors.rst index e1fad292fc..8ad7a7f7b2 100644 --- a/akka-docs/java/untyped-actors.rst +++ b/akka-docs/java/untyped-actors.rst @@ -42,7 +42,7 @@ Here is an example: Creating Actors with default constructor ---------------------------------------- -.. includecode:: code/akka/docs/actor/UntypedActorTestBase.java +.. includecode:: code/akka/docs/actor/UntypedActorDocTestBase.java :include: imports,system-actorOf The call to :meth:`actorOf` returns an instance of ``ActorRef``. This is a handle to @@ -85,7 +85,7 @@ which can lead to corrupt data. Here is an example: -.. includecode:: code/akka/docs/actor/UntypedActorTestBase.java#creating-constructor +.. includecode:: code/akka/docs/actor/UntypedActorDocTestBase.java#creating-constructor This way of creating the Actor is also great for integrating with Dependency Injection (DI) frameworks like Guice or Spring. @@ -95,7 +95,7 @@ Creating Actors with Props ``Props`` is a configuration object to specify additional things for the actor to be created, such as the ``MessageDispatcher``. -.. includecode:: code/akka/docs/actor/UntypedActorTestBase.java#creating-props +.. includecode:: code/akka/docs/actor/UntypedActorDocTestBase.java#creating-props UntypedActor API @@ -119,7 +119,7 @@ In addition, it offers: The remaining visible methods are user-overridable life-cycle hooks which are described in the following: -.. includecode:: code/akka/docs/actor/UntypedActorTestBase.java#lifecycle-callbacks +.. includecode:: code/akka/docs/actor/UntypedActorDocTestBase.java#lifecycle-callbacks The implementations shown above are the defaults provided by the :class:`UntypedActor` class. @@ -250,7 +250,7 @@ To complete the future with an exception you need send a Failure message to the This is not done automatically when an actor throws an exception while processing a message. -.. includecode:: code/akka/docs/actor/UntypedActorTestBase.java#reply-exception +.. includecode:: code/akka/docs/actor/UntypedActorDocTestBase.java#reply-exception If the actor does not complete the future, it will expire after the timeout period, specified as parameter to the ``ask`` method. @@ -278,7 +278,7 @@ even if that entails waiting for it (but keep in mind that waiting inside an actor is prone to dead-locks, e.g. if obtaining the result depends on processing another message on this actor). -.. includecode:: code/akka/docs/actor/UntypedActorTestBase.java +.. includecode:: code/akka/docs/actor/UntypedActorDocTestBase.java :include: import-future,using-ask Forward message @@ -379,7 +379,7 @@ If the ``PoisonPill`` was sent with ``ask``, the ``Future`` will be completed wi Use it like this: -.. includecode:: code/akka/docs/actor/UntypedActorTestBase.java +.. includecode:: code/akka/docs/actor/UntypedActorDocTestBase.java :include: import-actors,poison-pill .. _UntypedActor.HotSwap: @@ -400,7 +400,7 @@ The hotswapped code is kept in a Stack which can be pushed and popped. To hotswap the Actor using ``getContext().become``: -.. includecode:: code/akka/docs/actor/UntypedActorTestBase.java +.. includecode:: code/akka/docs/actor/UntypedActorDocTestBase.java :include: import-procedure,hot-swap-actor The ``become`` method is useful for many different things, such as to implement @@ -430,7 +430,7 @@ through regular supervisor semantics. Use it like this: -.. includecode:: code/akka/docs/actor/UntypedActorTestBase.java +.. includecode:: code/akka/docs/actor/UntypedActorDocTestBase.java :include: import-actors,kill Actors and exceptions diff --git a/akka-docs/scala/actors.rst b/akka-docs/scala/actors.rst index 04b13f3f92..dcb7ed2795 100644 --- a/akka-docs/scala/actors.rst +++ b/akka-docs/scala/actors.rst @@ -40,7 +40,7 @@ along with the implementation of how the messages should be processed. Here is an example: -.. includecode:: code/ActorDocSpec.scala +.. includecode:: code/akka/docs/actor/ActorDocSpec.scala :include: imports1,my-actor Please note that the Akka Actor ``receive`` message loop is exhaustive, which is @@ -53,7 +53,7 @@ thrown and the actor is restarted when an unknown message is received. Creating Actors with default constructor ---------------------------------------- -.. includecode:: code/ActorDocSpec.scala +.. includecode:: code/akka/docs/actor/ActorDocSpec.scala :include: imports2,system-actorOf The call to :meth:`actorOf` returns an instance of ``ActorRef``. This is a handle to @@ -70,7 +70,7 @@ how the supervisor hierarchy is arranged. When using the context the current act will be supervisor of the created child actor. When using the system it will be a top level actor, that is supervised by the system (internal guardian actor). -.. includecode:: code/ActorDocSpec.scala#context-actorOf +.. includecode:: code/akka/docs/actor/ActorDocSpec.scala#context-actorOf Actors are automatically started asynchronously when created. When you create the ``Actor`` then it will automatically call the ``preStart`` @@ -92,7 +92,7 @@ a call-by-name block in which you can create the Actor in any way you like. Here is an example: -.. includecode:: code/ActorDocSpec.scala#creating-constructor +.. includecode:: code/akka/docs/actor/ActorDocSpec.scala#creating-constructor Creating Actors with Props @@ -101,7 +101,7 @@ Creating Actors with Props ``Props`` is a configuration object to specify additional things for the actor to be created, such as the ``MessageDispatcher``. -.. includecode:: code/ActorDocSpec.scala#creating-props +.. includecode:: code/akka/docs/actor/ActorDocSpec.scala#creating-props Creating Actors using anonymous classes @@ -109,7 +109,7 @@ Creating Actors using anonymous classes When spawning actors for specific sub-tasks from within an actor, it may be convenient to include the code to be executed directly in place, using an anonymous class. -.. includecode:: code/ActorDocSpec.scala#anonymous-actor +.. includecode:: code/akka/docs/actor/ActorDocSpec.scala#anonymous-actor .. warning:: @@ -145,7 +145,7 @@ In addition, it offers: You can import the members in the :obj:`context` to avoid prefixing access with ``context.`` -.. includecode:: code/ActorDocSpec.scala#import-context +.. includecode:: code/akka/docs/actor/ActorDocSpec.scala#import-context The remaining visible methods are user-overridable life-cycle hooks which are described in the following:: @@ -290,7 +290,7 @@ To complete the future with an exception you need send a Failure message to the This is not done automatically when an actor throws an exception while processing a message. -.. includecode:: code/ActorDocSpec.scala#reply-exception +.. includecode:: code/akka/docs/actor/ActorDocSpec.scala#reply-exception If the actor does not complete the future, it will expire after the timeout period, which is taken from one of the following locations in order of precedence: @@ -339,7 +339,7 @@ type, it will throw the exception or a :class:`ClassCastException` (if you want to get :obj:`None` in the latter case, use :meth:`Future.asSilently[T]`). In case of a timeout, :obj:`None` is returned. -.. includecode:: code/ActorDocSpec.scala#using-ask +.. includecode:: code/akka/docs/actor/ActorDocSpec.scala#using-ask Forward message --------------- @@ -371,7 +371,7 @@ This method should return a ``PartialFunction``, e.g. a ‘match/case’ clause which the message can be matched against the different case clauses using Scala pattern matching. Here is an example: -.. includecode:: code/ActorDocSpec.scala +.. includecode:: code/akka/docs/actor/ActorDocSpec.scala :include: imports1,my-actor @@ -401,7 +401,7 @@ received within a certain time. To receive this timeout you have to set the ``receiveTimeout`` property and declare a case handing the ReceiveTimeout object. -.. includecode:: code/ActorDocSpec.scala#receive-timeout +.. includecode:: code/akka/docs/actor/ActorDocSpec.scala#receive-timeout Stopping actors @@ -465,7 +465,7 @@ pushed and popped. To hotswap the Actor behavior using ``become``: -.. includecode:: code/ActorDocSpec.scala#hot-swap-actor +.. includecode:: code/akka/docs/actor/ActorDocSpec.scala#hot-swap-actor The ``become`` method is useful for many different things, but a particular nice example of it is in example where it is used to implement a Finite State Machine @@ -475,12 +475,12 @@ example of it is in example where it is used to implement a Finite State Machine Here is another little cute example of ``become`` and ``unbecome`` in action: -.. includecode:: code/ActorDocSpec.scala#swapper +.. includecode:: code/akka/docs/actor/ActorDocSpec.scala#swapper Encoding Scala Actors nested receives without accidentally leaking memory ------------------------------------------------------------------------- -See this `Unnested receive example `_. +See this `Unnested receive example `_. Downgrade @@ -556,4 +556,4 @@ A bit advanced but very useful way of defining a base message handler and then extend that, either through inheritance or delegation, is to use ``PartialFunction.orElse`` chaining. -.. includecode:: code/ActorDocSpec.scala#receive-orElse +.. includecode:: code/akka/docs/actor/ActorDocSpec.scala#receive-orElse diff --git a/akka-docs/scala/code/ActorDocSpec.scala b/akka-docs/scala/code/akka/docs/actor/ActorDocSpec.scala similarity index 100% rename from akka-docs/scala/code/ActorDocSpec.scala rename to akka-docs/scala/code/akka/docs/actor/ActorDocSpec.scala diff --git a/akka-docs/common/code/SchedulerDocSpec.scala b/akka-docs/scala/code/akka/docs/actor/SchedulerDocSpec.scala similarity index 72% rename from akka-docs/common/code/SchedulerDocSpec.scala rename to akka-docs/scala/code/akka/docs/actor/SchedulerDocSpec.scala index 5c4635b864..3192c67e06 100644 --- a/akka-docs/common/code/SchedulerDocSpec.scala +++ b/akka-docs/scala/code/akka/docs/actor/SchedulerDocSpec.scala @@ -1,4 +1,4 @@ -package akka.scheduler.actor +package akka.docs.actor //#imports1 import akka.actor.Actor @@ -10,7 +10,6 @@ import akka.util.duration._ import org.scalatest.{ BeforeAndAfterAll, WordSpec } import org.scalatest.matchers.MustMatchers import akka.testkit._ -import akka.util.duration._ class SchedulerDocSpec extends AkkaSpec(Map("akka.loglevel" -> "INFO")) { "schedule a one-off task" in { @@ -22,25 +21,12 @@ class SchedulerDocSpec extends AkkaSpec(Map("akka.loglevel" -> "INFO")) { expectMsg(1 second, "foo") //#schedule-one-off-thunk - //Schedules to send the "foo"-message to the testActor after 50ms + //Schedules a function to be executed (send the current time) to the testActor after 50ms system.scheduler.scheduleOnce(50 milliseconds) { - testActor ! "foo" + testActor ! System.currentTimeMillis } //#schedule-one-off-thunk - expectMsg(1 second, "foo") - - //#schedule-one-off-runnable - //Schedules to send the "foo"-message to the testActor after 50ms - system.scheduler.scheduleOnce( - 50 milliseconds, - new Runnable { - def run = testActor ! "foo" - }) - - //#schedule-one-off-runnable - - expectMsg(1 second, "foo") } "schedule a recurring task" in { diff --git a/akka-docs/scala/code/UnnestedReceives.scala b/akka-docs/scala/code/akka/docs/actor/UnnestedReceives.scala similarity index 100% rename from akka-docs/scala/code/UnnestedReceives.scala rename to akka-docs/scala/code/akka/docs/actor/UnnestedReceives.scala diff --git a/akka-docs/scala/index.rst b/akka-docs/scala/index.rst index 978d9a47bb..a36fe9513f 100644 --- a/akka-docs/scala/index.rst +++ b/akka-docs/scala/index.rst @@ -9,6 +9,7 @@ Scala API actors typed-actors logging + scheduler futures dataflow agents diff --git a/akka-docs/common/scheduler.rst b/akka-docs/scala/scheduler.rst similarity index 72% rename from akka-docs/common/scheduler.rst rename to akka-docs/scala/scheduler.rst index d05cea60aa..6089630625 100644 --- a/akka-docs/common/scheduler.rst +++ b/akka-docs/scala/scheduler.rst @@ -1,30 +1,31 @@ -Scheduler -========= + +.. _scheduler-scala: + +################### + Scheduler (Scala) +################### Sometimes the need for making things happen in the future arises, and where do you go look then? -Look no further than ``ActorSystem``! There you find the :meth:``scheduler`` method that returns an instance +Look no further than ``ActorSystem``! There you find the :meth:`scheduler` method that returns an instance of akka.actor.Scheduler, this instance is unique per ActorSystem and is used internally for scheduling things to happen at specific points in time. Please note that the scheduled tasks are executed by the default ``MessageDispatcher`` of the ``ActorSystem``. You can schedule sending of messages to actors and execution of tasks (functions or Runnable). -You will get a ``Cancellable`` back that you can call :meth:``cancel`` on to cancel the execution of the +You will get a ``Cancellable`` back that you can call :meth:`cancel` on to cancel the execution of the scheduled operation. Some examples ------------- -.. includecode:: code/SchedulerDocSpec.scala +.. includecode:: code/akka/docs/actor/SchedulerDocSpec.scala :include: imports1,schedule-one-off-message -.. includecode:: code/SchedulerDocSpec.scala - :include: imports1,schedule-one-off-thunk +.. includecode:: code/akka/docs/actor/SchedulerDocSpec.scala + :include: schedule-one-off-thunk -.. includecode:: code/SchedulerDocSpec.scala - :include: imports1,schedule-one-off-runnable - -.. includecode:: code/SchedulerDocSpec.scala - :include: imports1,schedule-recurring +.. includecode:: code/akka/docs/actor/SchedulerDocSpec.scala + :include: schedule-recurring From ``akka.actor.ActorSystem`` ------------------------------- From ad8a050d051f38a0327a09c88c3898054a1b6c62 Mon Sep 17 00:00:00 2001 From: Peter Vlugter Date: Thu, 15 Dec 2011 11:42:06 +1300 Subject: [PATCH 27/34] Updated microkernel - no config files used by microkernel - boot classes are specified as main arguments - actor system creation is left to user in Bootable - added on-out-of-memory handler to java args - updated docs --- akka-docs/modules/microkernel.rst | 30 ++++- akka-kernel/src/main/resources/reference.conf | 19 --- .../src/main/scala/akka/kernel/Main.scala | 126 ++++++++++++------ akka-kernel/src/main/scripts/akka | 24 ++++ .../src/main/scripts}/akka.bat | 5 +- .../test/scala/akka/kernel/ConfigSpec.scala | 23 ---- .../src/main/config/akka.conf | 8 -- .../sample/kernel/hello/HelloKernel.scala | 8 +- config/{akka.conf => application.conf} | 0 project/Dist.scala | 6 +- scripts/microkernel/akka | 18 --- 11 files changed, 151 insertions(+), 116 deletions(-) delete mode 100644 akka-kernel/src/main/resources/reference.conf create mode 100755 akka-kernel/src/main/scripts/akka rename {scripts/microkernel => akka-kernel/src/main/scripts}/akka.bat (70%) delete mode 100644 akka-kernel/src/test/scala/akka/kernel/ConfigSpec.scala delete mode 100644 akka-samples/akka-sample-hello-kernel/src/main/config/akka.conf rename config/{akka.conf => application.conf} (100%) delete mode 100755 scripts/microkernel/akka diff --git a/akka-docs/modules/microkernel.rst b/akka-docs/modules/microkernel.rst index 266d888b6c..7686dcb2e5 100644 --- a/akka-docs/modules/microkernel.rst +++ b/akka-docs/modules/microkernel.rst @@ -5,4 +5,32 @@ Microkernel ############# -The Akka Spring module has not been migrated to Akka 2.0-SNAPSHOT yet. +The Akka Microkernel is included in the Akka download found at `downloads`_. + +.. _downloads: http://akka.io/downloads + +To run an application with the microkernel you need to create a Bootable class +that handles the startup and shutdown the application. An example is included below. + +Put your application jar in the ``deploy`` directory to have it automatically +loaded. + +To start the kernel use the scripts in the ``bin`` directory, passing the boot +classes for your application. + +There is a simple example of an application setup for running with the +microkernel included in the akka download. This can be run with the following +command (on a unix-based system): + +.. code-block:: none + + bin/akka sample.kernel.hello.HelloKernel + +Use Ctrl-C to interrupt and exit the microkernel. + +On a Windows machine you can also use the bin/akka.bat script. + +The code for the Hello Kernel example (see the HelloKernel class for an example +of creating a Bootable): + +.. includecode:: ../../akka-samples/akka-sample-hello-kernel/src/main/scala/sample/kernel/hello/HelloKernel.scala diff --git a/akka-kernel/src/main/resources/reference.conf b/akka-kernel/src/main/resources/reference.conf deleted file mode 100644 index 3c27d985df..0000000000 --- a/akka-kernel/src/main/resources/reference.conf +++ /dev/null @@ -1,19 +0,0 @@ -##################################### -# Akka Kernel Reference Config File # -##################################### - -# This reference config file has all the default settings -# Make your edits/overrides in your akka.conf - - -akka { - - kernel { - # The name of the actor system created by the Akka Microkernel - system.name = "default" - - # Boot classes are loaded and created automatically when the Akka Microkernel starts up - # A list of FQNs (Fully Qualified Names) of classes that implement akka.kernel.Bootable - boot = [] - } -} diff --git a/akka-kernel/src/main/scala/akka/kernel/Main.scala b/akka-kernel/src/main/scala/akka/kernel/Main.scala index 915847c7c7..3b89cf4ec0 100644 --- a/akka-kernel/src/main/scala/akka/kernel/Main.scala +++ b/akka-kernel/src/main/scala/akka/kernel/Main.scala @@ -5,92 +5,134 @@ package akka.kernel import akka.actor.ActorSystem -import com.typesafe.config.ConfigFactory import java.io.File import java.lang.Boolean.getBoolean import java.net.{ URL, URLClassLoader } import java.util.jar.JarFile import scala.collection.JavaConverters._ +/** + * To use the microkernel at least one 'boot class' needs to be specified. + * A boot class implements this interface ([[akka.kernel.Bootable]]) and + * must have an empty default constructor. + * + * ActorSystems can be created within the boot class. + * + * An example of a simple boot class: + * {{{ + * class BootApp extends Bootable { + * val system = ActorSystem("app") + * + * def startup = { + * system.actorOf(Props[FirstActor]) ! FirstMessage + * } + * + * def shutdown = { + * system.shutdown() + * } + * } + * }}} + * + * Boot classes are specified as main arguments to the microkernel. + * + * For example, using the akka script an application can be started with + * the following at the command line: + * {{{ + * bin/akka org.app.BootApp + * }}} + */ trait Bootable { - def startup(system: ActorSystem): Unit - def shutdown(system: ActorSystem): Unit + /** + * Callback run on microkernel startup. + * Create initial actors and messages here. + */ + def startup(): Unit + + /** + * Callback run on microkernel shutdown. + * Shutdown actor systems here. + */ + def shutdown(): Unit } +/** + * Main class for running the microkernel. + */ object Main { val quiet = getBoolean("akka.kernel.quiet") def log(s: String) = if (!quiet) println(s) def main(args: Array[String]) = { + if (args.isEmpty) { + log("[error] No boot classes specified") + System.exit(1) + } + log(banner) log("Starting Akka...") log("Running Akka " + ActorSystem.Version) - val config = ConfigFactory.load("akka.conf") - val name = config.getString("akka.kernel.system.name") - val system = ActorSystem(name, config) - val classLoader = deployJars(system) - - log("Created actor system '%s'" format name) + val classLoader = createClassLoader() Thread.currentThread.setContextClassLoader(classLoader) - val bootClasses: Seq[String] = system.settings.config.getStringList("akka.kernel.boot").asScala + val bootClasses: Seq[String] = args.toSeq val bootables: Seq[Bootable] = bootClasses map { c ⇒ classLoader.loadClass(c).newInstance.asInstanceOf[Bootable] } for (bootable ← bootables) { log("Starting up " + bootable.getClass.getName) - bootable.startup(system) + bootable.startup() } - addShutdownHook(system, bootables) + addShutdownHook(bootables) log("Successfully started Akka") } - def deployJars(system: ActorSystem): ClassLoader = { - if (system.settings.Home.isEmpty) { - log("Akka home is not defined") - System.exit(1) - Thread.currentThread.getContextClassLoader - } else { - val home = system.settings.Home.get + def createClassLoader(): ClassLoader = { + if (ActorSystem.GlobalHome.isDefined) { + val home = ActorSystem.GlobalHome.get val deploy = new File(home, "deploy") - - if (!deploy.exists) { - log("No deploy dir found at " + deploy) - log("Please check that akka home is defined correctly") - System.exit(1) + if (deploy.exists) { + loadDeployJars(deploy) + } else { + log("[warning] No deploy dir found at " + deploy) + Thread.currentThread.getContextClassLoader } - - val jars = deploy.listFiles.filter(_.getName.endsWith(".jar")) - - val nestedJars = jars flatMap { jar ⇒ - val jarFile = new JarFile(jar) - val jarEntries = jarFile.entries.asScala.toArray.filter(_.getName.endsWith(".jar")) - jarEntries map { entry ⇒ new File("jar:file:%s!/%s" format (jarFile.getName, entry.getName)) } - } - - val urls = (jars ++ nestedJars) map { _.toURI.toURL } - - urls foreach { url ⇒ log("Deploying " + url) } - - new URLClassLoader(urls, Thread.currentThread.getContextClassLoader) + } else { + log("[warning] Akka home is not defined") + Thread.currentThread.getContextClassLoader } } - def addShutdownHook(system: ActorSystem, bootables: Seq[Bootable]): Unit = { + def loadDeployJars(deploy: File): ClassLoader = { + val jars = deploy.listFiles.filter(_.getName.endsWith(".jar")) + + val nestedJars = jars flatMap { jar ⇒ + val jarFile = new JarFile(jar) + val jarEntries = jarFile.entries.asScala.toArray.filter(_.getName.endsWith(".jar")) + jarEntries map { entry ⇒ new File("jar:file:%s!/%s" format (jarFile.getName, entry.getName)) } + } + + val urls = (jars ++ nestedJars) map { _.toURI.toURL } + + urls foreach { url ⇒ log("Deploying " + url) } + + new URLClassLoader(urls, Thread.currentThread.getContextClassLoader) + } + + def addShutdownHook(bootables: Seq[Bootable]): Unit = { Runtime.getRuntime.addShutdownHook(new Thread(new Runnable { def run = { log("") - log("Received signal to stop") log("Shutting down Akka...") + for (bootable ← bootables) { log("Shutting down " + bootable.getClass.getName) - bootable.shutdown(system) + bootable.shutdown() } - system.stop() + log("Successfully shut down Akka") } })) diff --git a/akka-kernel/src/main/scripts/akka b/akka-kernel/src/main/scripts/akka new file mode 100755 index 0000000000..595bc6e34c --- /dev/null +++ b/akka-kernel/src/main/scripts/akka @@ -0,0 +1,24 @@ +#!/usr/bin/env bash + +declare quiet="false" + +while true; do + case "$1" in + -q | --quiet ) quiet="true"; shift ;; + * ) break ;; + esac +done + +[[ "$@" ]] || { + echo "No boot classes specified" + echo "Usage: bin/akka org.somewhere.BootClass" + exit 1 +} + +declare AKKA_HOME="$(cd "$(cd "$(dirname "$0")"; pwd -P)"/..; pwd)" + +[ -n "$JAVA_OPTS" ] || JAVA_OPTS="-Xmx1024M -Xms1024M -Xss1M -XX:MaxPermSize=256M -XX:+UseParallelGC -XX:OnOutOfMemoryError=\"kill -9 %p\"" + +[ -n "$AKKA_CLASSPATH" ] || AKKA_CLASSPATH="$AKKA_HOME/lib/scala-library.jar:$AKKA_HOME/lib/akka/*:$AKKA_HOME/config" + +java "$JAVA_OPTS" -cp "$AKKA_CLASSPATH" -Dakka.home="$AKKA_HOME" -Dakka.kernel.quiet=$quiet akka.kernel.Main "$@" diff --git a/scripts/microkernel/akka.bat b/akka-kernel/src/main/scripts/akka.bat similarity index 70% rename from scripts/microkernel/akka.bat rename to akka-kernel/src/main/scripts/akka.bat index 59d1a91a48..b6c2f8628a 100644 --- a/scripts/microkernel/akka.bat +++ b/akka-kernel/src/main/scripts/akka.bat @@ -1,6 +1,7 @@ @echo off + set AKKA_HOME=%~dp0.. -set JAVA_OPTS=-Xms1024M -Xmx1024M -Xss1M -XX:MaxPermSize=256M -XX:+UseParallelGC +set JAVA_OPTS=-Xmx1024M -Xms1024M -Xss1M -XX:MaxPermSize=256M -XX:+UseParallelGC set AKKA_CLASSPATH=%AKKA_HOME%\lib\scala-library.jar;%AKKA_HOME%\config;%AKKA_HOME%\lib\akka\* -java %JAVA_OPTS% -cp "%AKKA_CLASSPATH%" -Dakka.home="%AKKA_HOME%" akka.kernel.Main +java %JAVA_OPTS% -cp "%AKKA_CLASSPATH%" -Dakka.home="%AKKA_HOME%" akka.kernel.Main %* diff --git a/akka-kernel/src/test/scala/akka/kernel/ConfigSpec.scala b/akka-kernel/src/test/scala/akka/kernel/ConfigSpec.scala deleted file mode 100644 index 938ed34b6b..0000000000 --- a/akka-kernel/src/test/scala/akka/kernel/ConfigSpec.scala +++ /dev/null @@ -1,23 +0,0 @@ -/** - * Copyright (C) 2009-2011 Typesafe Inc. - */ - -package akka.kernel - -import akka.testkit.AkkaSpec -import com.typesafe.config.ConfigFactory -import scala.collection.JavaConverters._ - -@org.junit.runner.RunWith(classOf[org.scalatest.junit.JUnitRunner]) -class ConfigSpec extends AkkaSpec(ConfigFactory.defaultReference) { - - "The default configuration file (i.e. reference.conf)" must { - "contain correct defaults for akka-kernel" in { - - val config = system.settings.config - - config.getString("akka.kernel.system.name") must be === "default" - config.getList("akka.kernel.boot").asScala.toList must be === Nil - } - } -} diff --git a/akka-samples/akka-sample-hello-kernel/src/main/config/akka.conf b/akka-samples/akka-sample-hello-kernel/src/main/config/akka.conf deleted file mode 100644 index 181b1e10b1..0000000000 --- a/akka-samples/akka-sample-hello-kernel/src/main/config/akka.conf +++ /dev/null @@ -1,8 +0,0 @@ -# Config for the Hello Kernel sample - -akka { - kernel { - system.name = "hellokernel" - boot = ["sample.kernel.hello.HelloKernel"] - } -} diff --git a/akka-samples/akka-sample-hello-kernel/src/main/scala/sample/kernel/hello/HelloKernel.scala b/akka-samples/akka-sample-hello-kernel/src/main/scala/sample/kernel/hello/HelloKernel.scala index bea62f6176..abf526484c 100644 --- a/akka-samples/akka-sample-hello-kernel/src/main/scala/sample/kernel/hello/HelloKernel.scala +++ b/akka-samples/akka-sample-hello-kernel/src/main/scala/sample/kernel/hello/HelloKernel.scala @@ -25,9 +25,13 @@ class WorldActor extends Actor { } class HelloKernel extends Bootable { - def startup(system: ActorSystem) = { + val system = ActorSystem("hellokernel") + + def startup = { system.actorOf(Props[HelloActor]) ! Start } - def shutdown(system: ActorSystem) = {} + def shutdown = { + system.shutdown() + } } diff --git a/config/akka.conf b/config/application.conf similarity index 100% rename from config/akka.conf rename to config/application.conf diff --git a/project/Dist.scala b/project/Dist.scala index 291962229d..002ae63fc1 100644 --- a/project/Dist.scala +++ b/project/Dist.scala @@ -52,10 +52,12 @@ object Dist { (baseDirectory, distSources, distUnzipped, version, distFile, streams) map { (projectBase, allSources, unzipped, version, zipFile, s) => { val base = unzipped / ("akka-" + version) - val scripts = (projectBase / "scripts" / "microkernel" * "*").get + val scripts = (projectBase / "akka-kernel" / "src" / "main" / "scripts" * "*").get val bin = base / "bin" val configSources = projectBase / "config" val config = base / "config" + val deploy = base / "deploy" + val deployReadme = deploy / "readme" val doc = base / "doc" / "akka" val api = doc / "api" val docs = doc / "docs" @@ -68,6 +70,8 @@ object Dist { IO.delete(unzipped) copyFilesTo(scripts, bin, setExecutable = true) IO.copyDirectory(configSources, config) + IO.createDirectory(deploy) + IO.write(deployReadme, "Place application jars in this directory") IO.copyDirectory(allSources.api, api) IO.copyDirectory(allSources.docs, docs) copyFilesTo(allSources.docJars, docJars) diff --git a/scripts/microkernel/akka b/scripts/microkernel/akka deleted file mode 100755 index 013fdea25a..0000000000 --- a/scripts/microkernel/akka +++ /dev/null @@ -1,18 +0,0 @@ -#!/usr/bin/env bash - -declare quiet="false" - -while true; do - case "$1" in - -q | --quiet ) quiet="true"; shift ;; - * ) break ;; - esac -done - -declare AKKA_HOME="$(cd "$(cd "$(dirname "$0")"; pwd -P)"/..; pwd)" - -[ -n "$JAVA_OPTS" ] || JAVA_OPTS="-Xms1536M -Xmx1536M -Xss1M -XX:MaxPermSize=256M -XX:+UseParallelGC" - -[ -n "$AKKA_CLASSPATH" ] || AKKA_CLASSPATH="$AKKA_HOME/lib/scala-library.jar:$AKKA_HOME/lib/akka/*:$AKKA_HOME/config" - -java $JAVA_OPTS -cp "$AKKA_CLASSPATH" -Dakka.home="$AKKA_HOME" -Dakka.kernel.quiet=$quiet akka.kernel.Main From 05461cd53170d7e0fffb5848c40beeeb4b3cd938 Mon Sep 17 00:00:00 2001 From: Roland Date: Wed, 14 Dec 2011 21:42:00 +0100 Subject: [PATCH 28/34] fix log statement in ActorModelSpec --- .../src/test/scala/akka/actor/dispatch/ActorModelSpec.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/akka-actor-tests/src/test/scala/akka/actor/dispatch/ActorModelSpec.scala b/akka-actor-tests/src/test/scala/akka/actor/dispatch/ActorModelSpec.scala index ced2b0d158..03a9405a8f 100644 --- a/akka-actor-tests/src/test/scala/akka/actor/dispatch/ActorModelSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/actor/dispatch/ActorModelSpec.scala @@ -204,7 +204,7 @@ object ActorModelSpec { await(deadline)(stats.restarts.get() == restarts) } catch { case e ⇒ - system.eventStream.publish(Error(e, dispatcher.toString, "actual: " + stats + ", required: InterceptorStats(susp=" + suspensions + + system.eventStream.publish(Error(e, Option(dispatcher).toString, "actual: " + stats + ", required: InterceptorStats(susp=" + suspensions + ",res=" + resumes + ",reg=" + registers + ",unreg=" + unregisters + ",recv=" + msgsReceived + ",proc=" + msgsProcessed + ",restart=" + restarts)) throw e From 1ef5145dc9b46bada744e915435d937310b02c3b Mon Sep 17 00:00:00 2001 From: Roland Date: Thu, 15 Dec 2011 01:34:17 +0100 Subject: [PATCH 29/34] fix hideous and well-hidden oversight MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Thank you, Jenkins and CallingThreadDispatcher, for without you both I would probably not have found this bug before the milestone release. So, what happened? Well, ActorModelSpec for the CallingThreadDispatcher tries to have an actor kill itself twice with InterruptedException (of all things!) and wants to verify that everything still works. Given the right timing (which was rare) the semantics of the CallingThreadDispatcher would mean that the actor would suspend itself (first failure), inform its supervisor (“/user”, not on CTD) and return to the spec. If the supervisor was too slow to finish the restart which includes a resume which will process all messages enqueued meanwhile from the test BEFORE the test enqueued the second failure, the failure would actually happen on the supervisor’s thread. Now InterruptedException is the ONLY exception which always gets passed up, and thus it is the only Exception which can ever occur genuinely within a top-level supervisor which will trigger its restart: if it’s an unhandled one, the actor system will stop, and if it’s a handled one from a child there will be no problem. So, having these very special circumstances combined just so, the “guardian” would restart, and after today’s changes that meant killing all its children. Boom. The fix is of course trivial, as always. And the bug is extremely obvious, once found. Cost me five hours. --- akka-actor/src/main/scala/akka/actor/ActorRefProvider.scala | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/akka-actor/src/main/scala/akka/actor/ActorRefProvider.scala b/akka-actor/src/main/scala/akka/actor/ActorRefProvider.scala index 4b081f6ee8..dfc0252b27 100644 --- a/akka-actor/src/main/scala/akka/actor/ActorRefProvider.scala +++ b/akka-actor/src/main/scala/akka/actor/ActorRefProvider.scala @@ -352,6 +352,9 @@ class LocalActorRefProvider( case StopChild(child) ⇒ context.stop(child); sender ! "ok" case m ⇒ deadLetters ! DeadLetter(m, sender, self) } + + // guardian MUST NOT lose its children during restart + override def preRestart(cause: Throwable, msg: Option[Any]) {} } /* @@ -369,6 +372,9 @@ class LocalActorRefProvider( case StopChild(child) ⇒ context.stop(child); sender ! "ok" case m ⇒ deadLetters ! DeadLetter(m, sender, self) } + + // guardian MUST NOT lose its children during restart + override def preRestart(cause: Throwable, msg: Option[Any]) {} } private val guardianFaultHandlingStrategy = { From 37efb72f2230791f183f0ba9929eb378913a52b3 Mon Sep 17 00:00:00 2001 From: Peter Vlugter Date: Thu, 15 Dec 2011 15:39:30 +1300 Subject: [PATCH 30/34] Some documentation fixes - microkernel information - repository links - akka download description --- akka-docs/dev/building-akka.rst | 10 +--- akka-docs/intro/deployment-scenarios.rst | 12 +++- .../intro/getting-started-first-java.rst | 57 +++++++++++-------- .../getting-started-first-scala-eclipse.rst | 48 +++++++++------- .../intro/getting-started-first-scala.rst | 49 +++++++++------- akka-docs/intro/getting-started.rst | 30 +++++----- akka-docs/intro/what-is-akka.rst | 3 +- akka-docs/project/links.rst | 51 +++++++++-------- 8 files changed, 142 insertions(+), 118 deletions(-) diff --git a/akka-docs/dev/building-akka.rst b/akka-docs/dev/building-akka.rst index a9db8fe7d2..d5e94107d1 100644 --- a/akka-docs/dev/building-akka.rst +++ b/akka-docs/dev/building-akka.rst @@ -84,14 +84,6 @@ to use from an sbt project) use the ``publish-local`` command:: sbt publish-local -Publish to local Maven repository ---------------------------------- - -If you want to deploy the artifacts to your local Maven repository use:: - - sbt publish - - sbt interactive mode -------------------- @@ -129,6 +121,6 @@ Dependencies You can look at the Ivy dependency resolution information that is created on ``sbt update`` and found in ``~/.ivy2/cache``. For example, the ``~/.ivy2/cache/com.typesafe.akka-akka-remote-compile.xml`` file contains -the resolution information for the akka-cluster module compile dependencies. If +the resolution information for the akka-remote module compile dependencies. If you open this file in a web browser you will get an easy to navigate view of dependencies. diff --git a/akka-docs/intro/deployment-scenarios.rst b/akka-docs/intro/deployment-scenarios.rst index bf4cd26f02..c76284d62c 100644 --- a/akka-docs/intro/deployment-scenarios.rst +++ b/akka-docs/intro/deployment-scenarios.rst @@ -8,12 +8,13 @@ How can I use and deploy Akka? ============================== -Akka can be used in two different ways: +Akka can be used in different ways: - As a library: used as a regular JAR on the classpath and/or in a web app, to be put into ``WEB-INF/lib`` -- As a stand alone application by instantiating ActorSystem +- As a stand alone application by instantiating ActorSystem in a main class or + using the :ref:`microkernel` Using Akka as library @@ -34,3 +35,10 @@ Actors as regular services referenced from your Web application. You should also be able to use the Remoting service, e.g. be able to make certain Actors remote on other hosts. Please note that remoting service does not speak HTTP over port 80, but a custom protocol over the port is specified in :ref:`configuration`. + + +Using Akka as a stand alone microkernel +---------------------------------------- + +Akka can also be run as a stand-alone microkernel. See :ref:`microkernel` for +more information. diff --git a/akka-docs/intro/getting-started-first-java.rst b/akka-docs/intro/getting-started-first-java.rst index 34a41c7c94..3fe706b2bd 100644 --- a/akka-docs/intro/getting-started-first-java.rst +++ b/akka-docs/intro/getting-started-first-java.rst @@ -63,10 +63,10 @@ Downloading and installing Akka ------------------------------- To build and run the tutorial sample from the command line, you have to download -Akka. If you prefer to use SBT to build and run the sample then you can skip -this section and jump to the next one. +Akka. If you prefer to use SBT to build and run the sample then you can skip this +section and jump to the next one. -Let's get the ``akka-actors-2.0-SNAPSHOT.zip`` distribution of Akka from +Let's get the ``akka-2.0-SNAPSHOT.zip`` distribution of Akka from http://akka.io/downloads/ which includes everything we need for this tutorial. Once you have downloaded the distribution unzip it in the folder you would like to have Akka installed in. In my case I choose to install it in @@ -77,46 +77,55 @@ You need to do one more thing in order to install Akka properly: set the I'm opening up a shell, navigating down to the distribution, and setting the ``AKKA_HOME`` variable:: - $ cd /Users/jboner/tools/akka-actors-2.0-SNAPSHOT + $ cd /Users/jboner/tools/akka-2.0-SNAPSHOT $ export AKKA_HOME=`pwd` $ echo $AKKA_HOME - /Users/jboner/tools/akka-actors-2.0-SNAPSHOT + /Users/jboner/tools/akka-2.0-SNAPSHOT The distribution looks like this:: $ ls -1 + bin config + deploy doc lib src +- In the ``bin`` directory we have scripts for starting the Akka Microkernel. - In the ``config`` directory we have the Akka conf files. -- In the ``doc`` directory we have the documentation, API, doc JARs, and also - the source files for the tutorials. +- In the ``deploy`` directory we can place applications to be run with the microkernel. +- In the ``doc`` directory we have the documentation, API, and doc JARs. - In the ``lib`` directory we have the Scala and Akka JARs. - In the ``src`` directory we have the source JARs for Akka. - The only JAR we will need for this tutorial (apart from the ``scala-library.jar`` JAR) is the ``akka-actor-2.0-SNAPSHOT.jar`` JAR in the ``lib/akka`` directory. This is a self-contained JAR with zero dependencies and contains everything we need to write a system using Actors. -Akka is very modular and has many JARs for containing different features. The core distribution has seven modules: +Akka is very modular and has many JARs for containing different features. The +modules are: -- ``akka-actor-2.0-SNAPSHOT.jar`` -- Standard Actors -- ``akka-typed-actor-2.0-SNAPSHOT.jar`` -- Typed Actors -- ``akka-remote-2.0-SNAPSHOT.jar`` -- Remote Actors -- ``akka-stm-2.0-SNAPSHOT.jar`` -- STM (Software Transactional Memory), transactors and transactional datastructures -- ``akka-slf4j-2.0-SNAPSHOT.jar`` -- SLF4J Event Handler Listener for logging with SLF4J -- ``akka-testkit-2.0-SNAPSHOT.jar`` -- Toolkit for testing Actors +- ``akka-actor`` -- Actors -The Akka Microkernel distribution also includes these jars: +- ``akka-remote`` -- Remote Actors + +- ``akka-slf4j`` -- SLF4J Event Handler Listener for logging with SLF4J + +- ``akka-testkit`` -- Toolkit for testing Actors + +- ``akka-kernel`` -- Akka microkernel for running a bare-bones mini application server + +- ``akka-durable-mailboxes`` -- Durable mailboxes: file-based, MongoDB, Redis, Zookeeper + +- ``akka-amqp`` -- AMQP integration + +.. - ``akka-stm-2.0-SNAPSHOT.jar`` -- STM (Software Transactional Memory), transactors and transactional datastructures +.. - ``akka-camel-2.0-SNAPSHOT.jar`` -- Apache Camel Actors integration (it's the best way to have your Akka application communicate with the rest of the world) +.. - ``akka-camel-typed-2.0-SNAPSHOT.jar`` -- Apache Camel Typed Actors integration +.. - ``akka-spring-2.0-SNAPSHOT.jar`` -- Spring framework integration -- ``akka-kernel-2.0-SNAPSHOT.jar`` -- Akka microkernel for running a bare-bones mini application server (embeds Jetty etc.) -- ``akka-camel-2.0-SNAPSHOT.jar`` -- Apache Camel Actors integration (it's the best way to have your Akka application communicate with the rest of the world) -- ``akka-camel-typed-2.0-SNAPSHOT.jar`` -- Apache Camel Typed Actors integration -- ``akka-spring-2.0-SNAPSHOT.jar`` -- Spring framework integration Downloading and installing Maven @@ -158,9 +167,11 @@ Here is the layout that Maven created:: As you can see we already have a Java source file called ``App.java``, let's now rename it to ``Pi.java``. -We also need to edit the ``pom.xml`` build file. Let's add the dependency we need as well as the Maven repository it should download it from. The Akka Maven repository can be found at ``_ -and Typesafe provides ``_ that proxies several other repositories, including akka.io. -It should now look something like this: +We also need to edit the ``pom.xml`` build file. Let's add the dependency we +need as well as the Maven repository it should download it from. The Akka Maven +repository can be found at http://akka.io/releases/ and Typesafe provides +http://repo.typesafe.com/typesafe/releases/ that proxies several other +repositories, including akka.io. It should now look something like this: .. code-block:: xml diff --git a/akka-docs/intro/getting-started-first-scala-eclipse.rst b/akka-docs/intro/getting-started-first-scala-eclipse.rst index 88b7c50b77..44878e89b9 100644 --- a/akka-docs/intro/getting-started-first-scala-eclipse.rst +++ b/akka-docs/intro/getting-started-first-scala-eclipse.rst @@ -85,10 +85,10 @@ Downloading and installing Akka ------------------------------- To build and run the tutorial sample from the command line, you have to download -Akka. If you prefer to use SBT to build and run the sample then you can skip -this section and jump to the next one. +Akka. If you prefer to use SBT to build and run the sample then you can skip this +section and jump to the next one. -Let's get the ``akka-actors-2.0-SNAPSHOT.zip`` distribution of Akka from +Let's get the ``akka-2.0-SNAPSHOT.zip`` distribution of Akka from http://akka.io/downloads/ which includes everything we need for this tutorial. Once you have downloaded the distribution unzip it in the folder you would like to have Akka installed in. In my case I choose to install it in @@ -99,46 +99,54 @@ You need to do one more thing in order to install Akka properly: set the I'm opening up a shell, navigating down to the distribution, and setting the ``AKKA_HOME`` variable:: - $ cd /Users/jboner/tools/akka-actors-2.0-SNAPSHOT + $ cd /Users/jboner/tools/akka-2.0-SNAPSHOT $ export AKKA_HOME=`pwd` $ echo $AKKA_HOME - /Users/jboner/tools/akka-actors-2.0-SNAPSHOT + /Users/jboner/tools/akka-2.0-SNAPSHOT The distribution looks like this:: $ ls -1 + bin config + deploy doc lib src +- In the ``bin`` directory we have scripts for starting the Akka Microkernel. - In the ``config`` directory we have the Akka conf files. -- In the ``doc`` directory we have the documentation, API, doc JARs, and also - the source files for the tutorials. +- In the ``deploy`` directory we can place applications to be run with the microkernel. +- In the ``doc`` directory we have the documentation, API, and doc JARs. - In the ``lib`` directory we have the Scala and Akka JARs. - In the ``src`` directory we have the source JARs for Akka. - The only JAR we will need for this tutorial (apart from the ``scala-library.jar`` JAR) is the ``akka-actor-2.0-SNAPSHOT.jar`` JAR in the ``lib/akka`` directory. This is a self-contained JAR with zero dependencies and contains everything we need to write a system using Actors. -Akka is very modular and has many JARs for containing different features. The core distribution has seven modules: +Akka is very modular and has many JARs for containing different features. The +modules are: -- ``akka-actor-2.0-SNAPSHOT.jar`` -- Standard Actors -- ``akka-typed-actor-2.0-SNAPSHOT.jar`` -- Typed Actors -- ``akka-remote-2.0-SNAPSHOT.jar`` -- Remote Actors -- ``akka-stm-2.0-SNAPSHOT.jar`` -- STM (Software Transactional Memory), transactors and transactional datastructures -- ``akka-slf4j-2.0-SNAPSHOT.jar`` -- SLF4J Event Handler Listener for logging with SLF4J -- ``akka-testkit-2.0-SNAPSHOT.jar`` -- Toolkit for testing Actors +- ``akka-actor`` -- Actors -The Akka Microkernel distribution also includes these jars: +- ``akka-remote`` -- Remote Actors -- ``akka-kernel-2.0-SNAPSHOT.jar`` -- Akka microkernel for running a bare-bones mini application server (embeds Jetty etc.) -- ``akka-camel-2.0-SNAPSHOT.jar`` -- Apache Camel Actors integration (it's the best way to have your Akka application communicate with the rest of the world) -- ``akka-camel-typed-2.0-SNAPSHOT.jar`` -- Apache Camel Typed Actors integration -- ``akka-spring-2.0-SNAPSHOT.jar`` -- Spring framework integration +- ``akka-slf4j`` -- SLF4J Event Handler Listener for logging with SLF4J + +- ``akka-testkit`` -- Toolkit for testing Actors + +- ``akka-kernel`` -- Akka microkernel for running a bare-bones mini application server + +- ``akka-durable-mailboxes`` -- Durable mailboxes: file-based, MongoDB, Redis, Zookeeper + +- ``akka-amqp`` -- AMQP integration + +.. - ``akka-stm-2.0-SNAPSHOT.jar`` -- STM (Software Transactional Memory), transactors and transactional datastructures +.. - ``akka-camel-2.0-SNAPSHOT.jar`` -- Apache Camel Actors integration (it's the best way to have your Akka application communicate with the rest of the world) +.. - ``akka-camel-typed-2.0-SNAPSHOT.jar`` -- Apache Camel Typed Actors integration +.. - ``akka-spring-2.0-SNAPSHOT.jar`` -- Spring framework integration Downloading and installing the Scala IDE for Eclipse diff --git a/akka-docs/intro/getting-started-first-scala.rst b/akka-docs/intro/getting-started-first-scala.rst index 5f0b9f2c0d..8759343a2e 100644 --- a/akka-docs/intro/getting-started-first-scala.rst +++ b/akka-docs/intro/getting-started-first-scala.rst @@ -86,10 +86,10 @@ Downloading and installing Akka =============================== To build and run the tutorial sample from the command line, you have to download -Akka. If you prefer to use SBT to build and run the sample then you can skipthis +Akka. If you prefer to use SBT to build and run the sample then you can skip this section and jump to the next one. -Let's get the ``akka-actors-2.0-SNAPSHOT.zip`` distribution of Akka from +Let's get the ``akka-2.0-SNAPSHOT.zip`` distribution of Akka from http://akka.io/downloads/ which includes everything we need for this tutorial. Once you have downloaded the distribution unzip it in the folder you would like to have Akka installed in. In my case I choose to install it in @@ -100,47 +100,54 @@ You need to do one more thing in order to install Akka properly: set the I'm opening up a shell, navigating down to the distribution, and setting the ``AKKA_HOME`` variable:: - $ cd /Users/jboner/tools/akka-actors-2.0-SNAPSHOT + $ cd /Users/jboner/tools/akka-2.0-SNAPSHOT $ export AKKA_HOME=`pwd` $ echo $AKKA_HOME - /Users/jboner/tools/akka-actors-2.0-SNAPSHOT + /Users/jboner/tools/akka-2.0-SNAPSHOT The distribution looks like this:: $ ls -1 + bin config + deploy doc lib src +- In the ``bin`` directory we have scripts for starting the Akka Microkernel. - In the ``config`` directory we have the Akka conf files. -- In the ``doc`` directory we have the documentation, API, doc JARs, and also - the source files for the tutorials. +- In the ``deploy`` directory we can place applications to be run with the microkernel. +- In the ``doc`` directory we have the documentation, API, and doc JARs. - In the ``lib`` directory we have the Scala and Akka JARs. - In the ``src`` directory we have the source JARs for Akka. - The only JAR we will need for this tutorial (apart from the ``scala-library.jar`` JAR) is the ``akka-actor-2.0-SNAPSHOT.jar`` JAR in the ``lib/akka`` directory. This is a self-contained JAR with zero dependencies and contains everything we need to write a system using Actors. Akka is very modular and has many JARs for containing different features. The -core distribution has seven modules: +modules are: -- ``akka-actor-2.0-SNAPSHOT.jar`` -- Standard Actors -- ``akka-typed-actor-2.0-SNAPSHOT.jar`` -- Typed Actors -- ``akka-remote-2.0-SNAPSHOT.jar`` -- Remote Actors -- ``akka-stm-2.0-SNAPSHOT.jar`` -- STM (Software Transactional Memory), transactors and transactional datastructures -- ``akka-slf4j-2.0-SNAPSHOT.jar`` -- SLF4J Event Handler Listener for logging with SLF4J -- ``akka-testkit-2.0-SNAPSHOT.jar`` -- Toolkit for testing Actors +- ``akka-actor`` -- Actors -The Akka Microkernel distribution also includes these jars: +- ``akka-remote`` -- Remote Actors -- ``akka-kernel-2.0-SNAPSHOT.jar`` -- Akka microkernel for running a bare-bones mini application server (embeds Jetty etc.) -- ``akka-camel-2.0-SNAPSHOT.jar`` -- Apache Camel Actors integration (it's the best way to have your Akka application communicate with the rest of the world) -- ``akka-camel-typed-2.0-SNAPSHOT.jar`` -- Apache Camel Typed Actors integration -- ``akka-spring-2.0-SNAPSHOT.jar`` -- Spring framework integration +- ``akka-slf4j`` -- SLF4J Event Handler Listener for logging with SLF4J + +- ``akka-testkit`` -- Toolkit for testing Actors + +- ``akka-kernel`` -- Akka microkernel for running a bare-bones mini application server + +- ``akka-durable-mailboxes`` -- Durable mailboxes: file-based, MongoDB, Redis, Zookeeper + +- ``akka-amqp`` -- AMQP integration + +.. - ``akka-stm-2.0-SNAPSHOT.jar`` -- STM (Software Transactional Memory), transactors and transactional datastructures +.. - ``akka-camel-2.0-SNAPSHOT.jar`` -- Apache Camel Actors integration (it's the best way to have your Akka application communicate with the rest of the world) +.. - ``akka-camel-typed-2.0-SNAPSHOT.jar`` -- Apache Camel Typed Actors integration +.. - ``akka-spring-2.0-SNAPSHOT.jar`` -- Spring framework integration Downloading and installing Scala @@ -217,14 +224,12 @@ files. Not needed in this tutorial, but if you would like to use additional Akka modules beyond ``akka-actor``, you can add these as ``libraryDependencies`` in ``build.sbt``. Note that there must be a blank line between each. Here is an -example adding ``akka-remote`` and ``akka-stm``:: +example adding ``akka-remote``:: libraryDependencies += "com.typesafe.akka" % "akka-actor" % "2.0-SNAPSHOT" libraryDependencies += "com.typesafe.akka" % "akka-remote" % "2.0-SNAPSHOT" - libraryDependencies += "com.typesafe.akka" % "akka-stm" % "2.0-SNAPSHOT" - So, now we are all set. SBT itself needs a whole bunch of dependencies but our project will only need diff --git a/akka-docs/intro/getting-started.rst b/akka-docs/intro/getting-started.rst index 5eeec4a12a..59e7e1d7fc 100644 --- a/akka-docs/intro/getting-started.rst +++ b/akka-docs/intro/getting-started.rst @@ -36,8 +36,7 @@ Download -------- There are several ways to download Akka. You can download the full distribution -with microkernel, which includes all modules. You can download just the core -distribution or just the actors distribution. Or you can use a build tool like +with microkernel, which includes all modules. Or you can use a build tool like Maven or sbt to download dependencies from the Akka Maven repository. Modules @@ -54,7 +53,7 @@ Akka is very modular and has many JARs for containing different features. - ``akka-camel-2.0-SNAPSHOT.jar`` -- Apache Camel Actors integration (it's the best way to have your Akka application communicate with the rest of the world) - ``akka-camel-typed-2.0-SNAPSHOT.jar`` -- Apache Camel Typed Actors integration - ``akka-spring-2.0-SNAPSHOT.jar`` -- Spring framework integration -- ``akka-kernel-2.0-SNAPSHOT.jar`` -- Akka microkernel for running a bare-bones mini application server (embeds Jetty etc.) +- ``akka-kernel-2.0-SNAPSHOT.jar`` -- Akka microkernel for running a bare-bones mini application server How to see the JARs dependencies of each Akka module is described in the :ref:`dependencies` section. Worth noting is that ``akka-actor`` has zero @@ -69,18 +68,18 @@ http://akka.io/downloads and unzip it. Using a snapshot version ------------------------ -The Akka nightly snapshots are published to -http://repo.typesafe.com/typesafe/maven-timestamps/ and are versioned with a -timestamp. You need to choose a timestamped version to work with and can decide -when to update to a newer version. +The Akka nightly snapshots are published to http://akka.io/snapshots/ and are +versioned with both ``SNAPSHOT`` and timestamps. You can choose a timestamped +version to work with and can decide when to update to a newer version. The Akka +snapshots repository is also proxied through http://repo.typesafe.com/typesafe/snapshots/ +which includes proxies for several other repositories that Akka modules depend on. Microkernel -^^^^^^^^^^^ +----------- -The Akka Modules distribution includes the microkernel. To run the microkernel: - -* Set the AKKA_HOME environment variable to the root of the Akka distribution. -* To start the kernel use the scripts in the ``bin`` directory and deploy all samples applications from ``./deploy`` dir. +The Akka distribution includes the microkernel. To run the microkernel put your +application jar in the ``deploy`` directory and use the scripts in the ``bin`` +directory. More information is available in the documentation of the :ref:`microkernel`. @@ -88,7 +87,7 @@ Using a build tool ------------------ Akka can be used with build tools that support Maven repositories. The Akka -Maven repository can be found at http://akka.io/repository/ and Typesafe provides +Maven repository can be found at http://akka.io/realeses/ and Typesafe provides http://repo.typesafe.com/typesafe/releases/ that proxies several other repositories, including akka.io. @@ -121,7 +120,7 @@ Summary of the essential parts for using Akka with Maven: 2.0-SNAPSHOT -**Note**: for snapshot versions akka uses specific timestamped versions. +**Note**: for snapshot versions both ``SNAPSHOT`` and timestamped versions are published. Using Akka with SBT @@ -153,7 +152,7 @@ Using Akka with Eclipse Information about how to use Akka with Eclipse, including how to create an Akka Eclipse project from scratch, can be found in the :ref:`getting-started-first-scala-eclipse`. -Setup SBT project and then use `sbteclipse `_ to generate Eclipse project. +Setup SBT project and then use `sbteclipse `_ to generate Eclipse project. Using Akka with IntelliJ IDEA ----------------------------- @@ -166,7 +165,6 @@ Build from sources Akka uses Git and is hosted at `Github `_. * Akka: clone the Akka repository from ``_ -* Akka Modules: clone the Akka Modules repository from ``_ Continue reading the page on :ref:`building-akka` diff --git a/akka-docs/intro/what-is-akka.rst b/akka-docs/intro/what-is-akka.rst index 9ba05f5c26..607faf8173 100644 --- a/akka-docs/intro/what-is-akka.rst +++ b/akka-docs/intro/what-is-akka.rst @@ -76,7 +76,6 @@ Akka can be used in two different ways - As a library: used by a web app, to be put into ‘WEB-INF/lib’ or as a regular JAR on your classpath. -- As a microkernel: stand-alone kernel, embedding a servlet container and all - the other modules. +- As a microkernel: stand-alone kernel to drop your application into. See the :ref:`deployment-scenarios` for details. diff --git a/akka-docs/project/links.rst b/akka-docs/project/links.rst index aba41acd3e..ae76c25234 100644 --- a/akka-docs/project/links.rst +++ b/akka-docs/project/links.rst @@ -27,41 +27,44 @@ Akka uses Git and is hosted at `Github `_. * Akka: clone the Akka repository from ``_ -`Maven Repository `_ -================================================ +`Releases Repository `_ +================================================= -The Akka Maven repository can be found at ``_. +The Akka Maven repository can be found at http://akka.io/releases/. -Typesafe provides ``_ that proxies several other repositories, including akka.io. -It is convenient to use the Typesafe repository, since it includes all external dependencies of Akka. -It is a "best-effort" service, and if it is unavailable you may need to use the underlying repositories -directly. +Typesafe provides http://repo.typesafe.com/typesafe/releases/ that proxies +several other repositories, including akka.io. It is convenient to use the +Typesafe repository, since it includes all external dependencies of Akka. It is +a "best-effort" service, and if it is unavailable you may need to use the +underlying repositories directly. -* http://akka.io/repository -* http://repository.codehaus.org +* http://akka.io/releases/ +* http://repository.codehaus.org/ * http://guiceyfruit.googlecode.com/svn/repo/releases/ * http://repository.jboss.org/nexus/content/groups/public/ -* http://download.java.net/maven/2 -* http://oss.sonatype.org/content/repositories/releases -* http://download.java.net/maven/glassfish -* http://databinder.net/repo +* http://download.java.net/maven/2/ +* http://oss.sonatype.org/content/repositories/releases/ +* http://download.java.net/maven/glassfish/ +* http://databinder.net/repo/ -SNAPSHOT Versions -================= -Nightly builds are available in ``_ repository as -timestamped snapshot versions. Pick a timestamp from -``_. +`Snapshots Repository `_ +=================================================== + +Nightly builds are available in http://akka.io/snapshots/ and proxied through +http://repo.typesafe.com/typesafe/snapshots/ as both ``SNAPSHOT`` and +timestamped versions. + +For timestamped versions, pick a timestamp from +http://repo.typesafe.com/typesafe/akka-snapshots/com/typesafe/akka/akka-actor/. All Akka modules that belong to the same build have the same timestamp. Make sure that you add the repository to the sbt resolvers or maven repositories:: - - resolvers += "Typesafe Timestamp Repo" at "http://repo.typesafe.com/typesafe/akka-snapshots/" - -Define the library dependencies with the timestamp as version:: - libraryDependencies += "com.typesafe.akka" % "akka-actor" % "2.0-20111118-000627" + resolvers += "Typesafe Snapshots" at "http://repo.typesafe.com/typesafe/snapshots/" - libraryDependencies += "com.typesafe.akka" % "akka-remote" % "2.0-20111118-000627" +Define the library dependencies with the timestamp as version. For example:: + libraryDependencies += "com.typesafe.akka" % "akka-actor" % "2.0-20111215-000549" + libraryDependencies += "com.typesafe.akka" % "akka-remote" % "2.0-20111215-000549" From 1d8bd1b67f4dd13a46ad2bce9e87dcfcba5552cc Mon Sep 17 00:00:00 2001 From: Peter Vlugter Date: Thu, 15 Dec 2011 17:23:40 +1300 Subject: [PATCH 31/34] Update akka sbt plugin - compatible with both 1.x and 2.x microkernels - fix sbt deprecation warnings --- .../src/main/scala/AkkaKernelPlugin.scala | 14 ++++++++------ 1 file changed, 8 insertions(+), 6 deletions(-) diff --git a/akka-sbt-plugin/src/main/scala/AkkaKernelPlugin.scala b/akka-sbt-plugin/src/main/scala/AkkaKernelPlugin.scala index 7ad323aaa6..ad6d2f13ea 100644 --- a/akka-sbt-plugin/src/main/scala/AkkaKernelPlugin.scala +++ b/akka-sbt-plugin/src/main/scala/AkkaKernelPlugin.scala @@ -95,7 +95,9 @@ object AkkaKernelPlugin extends Plugin { } def isKernelProject(dependencies: Seq[ModuleID]): Boolean = { - dependencies.exists(moduleId ⇒ moduleId.organization == "se.scalablesolutions.akka" && moduleId.name == "akka-kernel") + dependencies.exists { d ⇒ + (d.organization == "com.typesafe.akka" || d.organization == "se.scalablesolutions.akka") && d.name == "akka-kernel" + } } private def defaultConfigSourceDirs = (sourceDirectory, unmanagedResourceDirectories) map { (src, resources) ⇒ @@ -128,7 +130,7 @@ object AkkaKernelPlugin extends Plugin { |AKKA_CLASSPATH="$AKKA_HOME/lib/*:$AKKA_HOME/config" |JAVA_OPTS="%s" | - |java $JAVA_OPTS -cp "$AKKA_CLASSPATH" -Dakka.home="$AKKA_HOME" %s + |java $JAVA_OPTS -cp "$AKKA_CLASSPATH" -Dakka.home="$AKKA_HOME" %s "$@" |""".stripMargin.format(jvmOptions, mainClass) private def distBatScript = @@ -137,7 +139,7 @@ object AkkaKernelPlugin extends Plugin { |set AKKA_CLASSPATH=%%AKKA_HOME%%\lib\*;%%AKKA_HOME%%\config |set JAVA_OPTS=%s | - |java %%JAVA_OPTS%% -cp "%%AKKA_CLASSPATH%%" -Dakka.home="%%AKKA_HOME%%" %s + |java %%JAVA_OPTS%% -cp "%%AKKA_CLASSPATH%%" -Dakka.home="%%AKKA_HOME%%" %s %%* |""".stripMargin.format(jvmOptions, mainClass) private def setExecutable(target: File, executable: Boolean): Option[String] = { @@ -194,9 +196,9 @@ object AkkaKernelPlugin extends Plugin { private def projectInfo(projectRef: ProjectRef, project: ResolvedProject, buildStruct: BuildStructure, state: State, allProjects: Map[ProjectRef, ResolvedProject]): SubProjectInfo = { - def optionalSetting[A](key: ScopedSetting[A]) = key in projectRef get buildStruct.data + def optionalSetting[A](key: SettingKey[A]) = key in projectRef get buildStruct.data - def setting[A](key: ScopedSetting[A], errorMessage: ⇒ String) = { + def setting[A](key: SettingKey[A], errorMessage: ⇒ String) = { optionalSetting(key) getOrElse { logger(state).error(errorMessage); throw new IllegalArgumentException() @@ -204,7 +206,7 @@ object AkkaKernelPlugin extends Plugin { } def evaluateTask[T](taskKey: sbt.Project.ScopedKey[sbt.Task[T]]) = { - EvaluateTask.evaluateTask(buildStruct, taskKey, state, projectRef, false, EvaluateTask.SystemProcessors) + EvaluateTask(buildStruct, taskKey, state, projectRef).map(_._2) } val projDeps: Seq[ModuleID] = evaluateTask(Keys.projectDependencies) match { From 44a82be86f5957c3e413f0d60e3fafc0df85c8d2 Mon Sep 17 00:00:00 2001 From: Patrik Nordwall Date: Thu, 15 Dec 2011 08:36:00 +0100 Subject: [PATCH 32/34] DOC: Disabled agents chapter, since it's in the akka-stm module. See #1488 --- akka-docs/disabled/agents.rst | 147 ++++++++++++++++++++++++++++++++++ akka-docs/scala/agents.rst | 145 +-------------------------------- 2 files changed, 148 insertions(+), 144 deletions(-) create mode 100644 akka-docs/disabled/agents.rst diff --git a/akka-docs/disabled/agents.rst b/akka-docs/disabled/agents.rst new file mode 100644 index 0000000000..b12fc1643c --- /dev/null +++ b/akka-docs/disabled/agents.rst @@ -0,0 +1,147 @@ +Agents (Scala) +============== + +.. sidebar:: Contents + + .. contents:: :local: + +Agents in Akka were inspired by `agents in Clojure `_. + +Agents provide asynchronous change of individual locations. Agents are bound to a single storage location for their lifetime, and only allow mutation of that location (to a new state) to occur as a result of an action. Update actions are functions that are asynchronously applied to the Agent's state and whose return value becomes the Agent's new state. The state of an Agent should be immutable. + +While updates to Agents are asynchronous, the state of an Agent is always immediately available for reading by any thread (using ``get`` or ``apply``) without any messages. + +Agents are reactive. The update actions of all Agents get interleaved amongst threads in a thread pool. At any point in time, at most one ``send`` action for each Agent is being executed. Actions dispatched to an agent from another thread will occur in the order they were sent, potentially interleaved with actions dispatched to the same agent from other sources. + +If an Agent is used within an enclosing transaction, then it will participate in that transaction. Agents are integrated with the STM - any dispatches made in a transaction are held until that transaction commits, and are discarded if it is retried or aborted. + +Creating and stopping Agents +---------------------------- + +Agents are created by invoking ``Agent(value)`` passing in the Agent's initial value. + +.. code-block:: scala + + val agent = Agent(5) + +An Agent will be running until you invoke ``close`` on it. Then it will be eligible for garbage collection (unless you hold on to it in some way). + +.. code-block:: scala + + agent.close() + +Updating Agents +--------------- + +You update an Agent by sending a function that transforms the current value or by sending just a new value. The Agent will apply the new value or function atomically and asynchronously. The update is done in a fire-forget manner and you are only guaranteed that it will be applied. There is no guarantee of when the update will be applied but dispatches to an Agent from a single thread will occur in order. You apply a value or a function by invoking the ``send`` function. + +.. code-block:: scala + + // send a value + agent send 7 + + // send a function + agent send (_ + 1) + agent send (_ * 2) + +You can also dispatch a function to update the internal state but on its own thread. This does not use the reactive thread pool and can be used for long-running or blocking operations. You do this with the ``sendOff`` method. Dispatches using either ``sendOff`` or ``send`` will still be executed in order. + +.. code-block:: scala + + // sendOff a function + agent sendOff (longRunningOrBlockingFunction) + +Reading an Agent's value +------------------------ + +Agents can be dereferenced, e.g. you can get an Agent's value, by invoking the Agent with parenthesis like this: + +.. code-block:: scala + + val result = agent() + +Or by using the get method. + +.. code-block:: scala + + val result = agent.get + +Reading an Agent's current value does not involve any message passing and happens immediately. So while updates to an Agent are asynchronous, reading the state of an Agent is synchronous. + +Awaiting an Agent's value +------------------------- + +It is also possible to read the value after all currently queued ``send``\s have completed. You can do this with ``await``: + +.. code-block:: scala + + val result = agent.await + +You can also get a ``Future`` to this value, that will be completed after the currently queued updates have completed: + +.. code-block:: scala + + val future = agent.future + // ... + val result = future.await.result.get + +Transactional Agents +-------------------- + +If an Agent is used within an enclosing transaction, then it will participate in that transaction. If you send to an Agent within a transaction then the dispatch to the Agent will be held until that transaction commits, and discarded if the transaction is aborted. + +.. code-block:: scala + + import akka.agent.Agent + import akka.stm._ + + def transfer(from: Agent[Int], to: Agent[Int], amount: Int): Boolean = { + atomic { + if (from.get < amount) false + else { + from send (_ - amount) + to send (_ + amount) + true + } + } + } + + val from = Agent(100) + val to = Agent(20) + val ok = transfer(from, to, 50) + + from() // -> 50 + to() // -> 70 + +Monadic usage +------------- + +Agents are also monadic, allowing you to compose operations using for-comprehensions. In a monadic usage, new Agents are created leaving the original Agents untouched. So the old values (Agents) are still available as-is. They are so-called 'persistent'. + +Example of a monadic usage: + +.. code-block:: scala + + val agent1 = Agent(3) + val agent2 = Agent(5) + + // uses foreach + var result = 0 + for (value <- agent1) { + result = value + 1 + } + + // uses map + val agent3 = + for (value <- agent1) yield value + 1 + + // uses flatMap + val agent4 = for { + value1 <- agent1 + value2 <- agent2 + } yield value1 + value2 + + agent1.close() + agent2.close() + agent3.close() + agent4.close() diff --git a/akka-docs/scala/agents.rst b/akka-docs/scala/agents.rst index b12fc1643c..bb65c4fabf 100644 --- a/akka-docs/scala/agents.rst +++ b/akka-docs/scala/agents.rst @@ -1,147 +1,4 @@ Agents (Scala) ============== -.. sidebar:: Contents - - .. contents:: :local: - -Agents in Akka were inspired by `agents in Clojure `_. - -Agents provide asynchronous change of individual locations. Agents are bound to a single storage location for their lifetime, and only allow mutation of that location (to a new state) to occur as a result of an action. Update actions are functions that are asynchronously applied to the Agent's state and whose return value becomes the Agent's new state. The state of an Agent should be immutable. - -While updates to Agents are asynchronous, the state of an Agent is always immediately available for reading by any thread (using ``get`` or ``apply``) without any messages. - -Agents are reactive. The update actions of all Agents get interleaved amongst threads in a thread pool. At any point in time, at most one ``send`` action for each Agent is being executed. Actions dispatched to an agent from another thread will occur in the order they were sent, potentially interleaved with actions dispatched to the same agent from other sources. - -If an Agent is used within an enclosing transaction, then it will participate in that transaction. Agents are integrated with the STM - any dispatches made in a transaction are held until that transaction commits, and are discarded if it is retried or aborted. - -Creating and stopping Agents ----------------------------- - -Agents are created by invoking ``Agent(value)`` passing in the Agent's initial value. - -.. code-block:: scala - - val agent = Agent(5) - -An Agent will be running until you invoke ``close`` on it. Then it will be eligible for garbage collection (unless you hold on to it in some way). - -.. code-block:: scala - - agent.close() - -Updating Agents ---------------- - -You update an Agent by sending a function that transforms the current value or by sending just a new value. The Agent will apply the new value or function atomically and asynchronously. The update is done in a fire-forget manner and you are only guaranteed that it will be applied. There is no guarantee of when the update will be applied but dispatches to an Agent from a single thread will occur in order. You apply a value or a function by invoking the ``send`` function. - -.. code-block:: scala - - // send a value - agent send 7 - - // send a function - agent send (_ + 1) - agent send (_ * 2) - -You can also dispatch a function to update the internal state but on its own thread. This does not use the reactive thread pool and can be used for long-running or blocking operations. You do this with the ``sendOff`` method. Dispatches using either ``sendOff`` or ``send`` will still be executed in order. - -.. code-block:: scala - - // sendOff a function - agent sendOff (longRunningOrBlockingFunction) - -Reading an Agent's value ------------------------- - -Agents can be dereferenced, e.g. you can get an Agent's value, by invoking the Agent with parenthesis like this: - -.. code-block:: scala - - val result = agent() - -Or by using the get method. - -.. code-block:: scala - - val result = agent.get - -Reading an Agent's current value does not involve any message passing and happens immediately. So while updates to an Agent are asynchronous, reading the state of an Agent is synchronous. - -Awaiting an Agent's value -------------------------- - -It is also possible to read the value after all currently queued ``send``\s have completed. You can do this with ``await``: - -.. code-block:: scala - - val result = agent.await - -You can also get a ``Future`` to this value, that will be completed after the currently queued updates have completed: - -.. code-block:: scala - - val future = agent.future - // ... - val result = future.await.result.get - -Transactional Agents --------------------- - -If an Agent is used within an enclosing transaction, then it will participate in that transaction. If you send to an Agent within a transaction then the dispatch to the Agent will be held until that transaction commits, and discarded if the transaction is aborted. - -.. code-block:: scala - - import akka.agent.Agent - import akka.stm._ - - def transfer(from: Agent[Int], to: Agent[Int], amount: Int): Boolean = { - atomic { - if (from.get < amount) false - else { - from send (_ - amount) - to send (_ + amount) - true - } - } - } - - val from = Agent(100) - val to = Agent(20) - val ok = transfer(from, to, 50) - - from() // -> 50 - to() // -> 70 - -Monadic usage -------------- - -Agents are also monadic, allowing you to compose operations using for-comprehensions. In a monadic usage, new Agents are created leaving the original Agents untouched. So the old values (Agents) are still available as-is. They are so-called 'persistent'. - -Example of a monadic usage: - -.. code-block:: scala - - val agent1 = Agent(3) - val agent2 = Agent(5) - - // uses foreach - var result = 0 - for (value <- agent1) { - result = value + 1 - } - - // uses map - val agent3 = - for (value <- agent1) yield value + 1 - - // uses flatMap - val agent4 = for { - value1 <- agent1 - value2 <- agent2 - } yield value1 + value2 - - agent1.close() - agent2.close() - agent3.close() - agent4.close() +The Akka Agents module has not been migrated to Akka 2.0-SNAPSHOT yet. \ No newline at end of file From 866e47c97ccd4b79109ce3d10b238ca084838df1 Mon Sep 17 00:00:00 2001 From: Viktor Klang Date: Thu, 15 Dec 2011 01:27:59 +0100 Subject: [PATCH 33/34] Adding Scala documentation for Akka Extensions --- .../docs/extension/ExtensionDocSpec.scala | 74 +++++++++++++++++++ akka-docs/dev/extending-akka.rst | 71 ++++++++++++++++++ akka-docs/dev/index.rst | 1 + akka-docs/dev/team.rst | 6 +- 4 files changed, 150 insertions(+), 2 deletions(-) create mode 100644 akka-docs/dev/code/akka/docs/extension/ExtensionDocSpec.scala create mode 100644 akka-docs/dev/extending-akka.rst diff --git a/akka-docs/dev/code/akka/docs/extension/ExtensionDocSpec.scala b/akka-docs/dev/code/akka/docs/extension/ExtensionDocSpec.scala new file mode 100644 index 0000000000..58f9639559 --- /dev/null +++ b/akka-docs/dev/code/akka/docs/extension/ExtensionDocSpec.scala @@ -0,0 +1,74 @@ +package akka.docs.extension + +import org.scalatest.WordSpec +import org.scalatest.matchers.MustMatchers + +//#imports +import akka.actor._ +import java.util.concurrent.atomic.AtomicLong + +//#imports + +//#extension +class CountExtensionImpl extends Extension { + //Since this Extension is a shared instance + // per ActorSystem we need to be threadsafe + private val counter = new AtomicLong(0) + + //This is the operation this Extension provides + def increment() = counter.incrementAndGet() +} +//#extension + +//#extensionid +object CountExtension + extends ExtensionId[CountExtensionImpl] + with ExtensionIdProvider { + //The lookup method is required by ExtensionIdProvider, + // so we return ourselves here, this allows us + // to configure our extension to be loaded when + // the ActorSystem starts up + override def lookup = CountExtension + + //This method will be called by Akka + // to instantiate our Extension + override def createExtension(system: ActorSystemImpl) = new CountExtensionImpl +} +//#extensionid + +//#extension-usage-actor +import akka.actor.Actor + +class MyActor extends Actor { + def receive = { + case someMessage ⇒ + CountExtension(context.system).increment() + } +} +//#extension-usage-actor + +//#extension-usage-actor-trait +import akka.actor.Actor + +trait Counting { self: Actor ⇒ + def increment() = CountExtension(context.system).increment() +} +class MyCounterActor extends Actor with Counting { + def receive = { + case someMessage ⇒ increment() + } +} +//#extension-usage-actor-trait + +class ExtensionDocSpec extends WordSpec with MustMatchers { + + "demonstrate how to create an extension in Scala" in { + val system: ActorSystem = null + intercept[Exception] { + //#extension-usage + CountExtension(system).increment + //#extension-usage + } + } + +} diff --git a/akka-docs/dev/extending-akka.rst b/akka-docs/dev/extending-akka.rst new file mode 100644 index 0000000000..8af8e43b7f --- /dev/null +++ b/akka-docs/dev/extending-akka.rst @@ -0,0 +1,71 @@ +.. _extending-akka: + +Akka Extensions +=============== + +.. sidebar:: Contents + + .. contents:: :local: + +If you want to add features to Akka, there is a very elegant, but powerful mechanism for doing so. +It's called Akka Extensions and is comprised of 2 basic components: an ``Extension`` and an ``ExtensionId``. + +Extensions will only be loaded once per ``ActorSystem``, which will be managed by Akka. +You can choose to have your Extension loaded on-demand or at ``ActorSystem`` creation time through the Akka configuration. +Details on how to make that happens are below, in the "Loading from Configuration" section. + +.. warning:: + + Since an extension is a way to hook into Akka itself, the implementor of the extension needs to + ensure the thread safety of his/her extension. + +Loading from Configuration +-------------------------- + +To be able to load extensions from your Akka configuration you must add FQCNs of implementations of either ``ExtensionId`` or ``ExtensionIdProvider`` +in the "akka.extensions" section of the config you provide to your ``ActorSystem``. + +Building an Extension (Java) +---------------------------- + +So let's create a sample extension that just lets us count the number of times something has happened. + +FIXME + +Building an Extension (Scala) +----------------------------- + +So let's create a sample extension that just lets us count the number of times something has happened. + +First, we define what our ``Extension`` should do: + +.. includecode:: code/akka/docs/extension/ExtensionDocSpec.scala + :include: imports,extension + +Then we need to create an ``ExtensionId`` for our extension so we can grab ahold of it. + +.. includecode:: code/akka/docs/extension/ExtensionDocSpec.scala + :include: imports,extensionid + +Wicked! Now all we need to do is to actually use it: + +.. includecode:: code/akka/docs/extension/ExtensionDocSpec.scala + :include: extension-usage + +Or from inside of an Akka Actor: + +.. includecode:: code/akka/docs/extension/ExtensionDocSpec.scala + :include: extension-usage-actor + +You can also hide extension behind traits: + +.. includecode:: code/akka/docs/extension/ExtensionDocSpec.scala + :include: extension-usage-actor-trait + +That's all there is to it! + +Applicability +------------- + +The sky is the limit! +By the way, did you know that Akka's ``Typed Actors``, ``Serialization`` and other features are implemented as Akka Extensions? \ No newline at end of file diff --git a/akka-docs/dev/index.rst b/akka-docs/dev/index.rst index 17fcb2427c..8ea96ee473 100644 --- a/akka-docs/dev/index.rst +++ b/akka-docs/dev/index.rst @@ -5,6 +5,7 @@ Information for Developers :maxdepth: 2 building-akka + extending-akka multi-jvm-testing developer-guidelines documentation diff --git a/akka-docs/dev/team.rst b/akka-docs/dev/team.rst index 36e0cd1339..8c70e016d2 100644 --- a/akka-docs/dev/team.rst +++ b/akka-docs/dev/team.rst @@ -1,7 +1,9 @@ + .. _team: -Team -===== +###### + Team +###### =================== ========================== ==================================== Name Role Email From 73b79d6e3eb63f7b28a535667abbf37ac2bdcf8e Mon Sep 17 00:00:00 2001 From: Viktor Klang Date: Thu, 15 Dec 2011 13:46:37 +0100 Subject: [PATCH 34/34] Adding a Scala and a Java guide to Akka Extensions --- akka-docs/dev/index.rst | 1 - .../docs/extension/ExtensionDocTest.scala | 5 ++ .../docs/extension/ExtensionDocTestBase.java | 70 +++++++++++++++++++ akka-docs/{dev => java}/extending-akka.rst | 68 ++++++++---------- akka-docs/java/index.rst | 1 + .../docs/extension/ExtensionDocSpec.scala | 0 akka-docs/scala/extending-akka.rst | 53 ++++++++++++++ akka-docs/scala/index.rst | 1 + 8 files changed, 160 insertions(+), 39 deletions(-) create mode 100644 akka-docs/java/code/akka/docs/extension/ExtensionDocTest.scala create mode 100644 akka-docs/java/code/akka/docs/extension/ExtensionDocTestBase.java rename akka-docs/{dev => java}/extending-akka.rst (73%) rename akka-docs/{dev => scala}/code/akka/docs/extension/ExtensionDocSpec.scala (100%) create mode 100644 akka-docs/scala/extending-akka.rst diff --git a/akka-docs/dev/index.rst b/akka-docs/dev/index.rst index 8ea96ee473..17fcb2427c 100644 --- a/akka-docs/dev/index.rst +++ b/akka-docs/dev/index.rst @@ -5,7 +5,6 @@ Information for Developers :maxdepth: 2 building-akka - extending-akka multi-jvm-testing developer-guidelines documentation diff --git a/akka-docs/java/code/akka/docs/extension/ExtensionDocTest.scala b/akka-docs/java/code/akka/docs/extension/ExtensionDocTest.scala new file mode 100644 index 0000000000..4cd5014863 --- /dev/null +++ b/akka-docs/java/code/akka/docs/extension/ExtensionDocTest.scala @@ -0,0 +1,5 @@ +package akka.docs.extension + +import org.scalatest.junit.JUnitSuite + +class ExtensionDocTest extends ExtensionDocTestBase with JUnitSuite diff --git a/akka-docs/java/code/akka/docs/extension/ExtensionDocTestBase.java b/akka-docs/java/code/akka/docs/extension/ExtensionDocTestBase.java new file mode 100644 index 0000000000..2d8f914902 --- /dev/null +++ b/akka-docs/java/code/akka/docs/extension/ExtensionDocTestBase.java @@ -0,0 +1,70 @@ +package akka.docs.extension; + +//#imports +import akka.actor.*; +import java.util.concurrent.atomic.AtomicLong; + +//#imports + +import org.junit.After; +import org.junit.Before; +import org.junit.Test; +import static org.junit.Assert.*; + +public class ExtensionDocTestBase { + + //#extension + public static class CountExtensionImpl implements Extension { + //Since this Extension is a shared instance + // per ActorSystem we need to be threadsafe + private final AtomicLong counter = new AtomicLong(0); + + //This is the operation this Extension provides + public long increment() { + return counter.incrementAndGet(); + } + } + //#extension + + //#extensionid + static class CountExtensionId extends AbstractExtensionId { + //This method will be called by Akka + // to instantiate our Extension + public CountExtensionImpl createExtension(ActorSystemImpl i) { + return new CountExtensionImpl(); + } + } + + //This will be the identifier of our CountExtension + public final static CountExtensionId CountExtension = new CountExtensionId(); + //#extensionid + + //#extensionid-provider + static class CountExtensionIdProvider implements ExtensionIdProvider { + public CountExtensionId lookup() { + return CountExtension; //The public static final + } + } + //#extensionid-provider + + //#extension-usage-actor + static class MyActor extends UntypedActor { + public void onReceive(Object msg) { + CountExtension.get(getContext().system()).increment(); + } + } + //#extension-usage-actor + + + @Test public void demonstrateHowToCreateAndUseAnAkkaExtensionInJava() { + final ActorSystem system = null; + try { + //#extension-usage + CountExtension.get(system).increment(); + //#extension-usage + } catch(Exception e) { + //do nothing + } + } + +} diff --git a/akka-docs/dev/extending-akka.rst b/akka-docs/java/extending-akka.rst similarity index 73% rename from akka-docs/dev/extending-akka.rst rename to akka-docs/java/extending-akka.rst index 8af8e43b7f..7cc46cc93d 100644 --- a/akka-docs/dev/extending-akka.rst +++ b/akka-docs/java/extending-akka.rst @@ -19,50 +19,42 @@ Details on how to make that happens are below, in the "Loading from Configuratio Since an extension is a way to hook into Akka itself, the implementor of the extension needs to ensure the thread safety of his/her extension. + +Building an Extension +--------------------- + +So let's create a sample extension that just lets us count the number of times something has happened. + +First, we define what our ``Extension`` should do: + +.. includecode:: code/akka/docs/extension/ExtensionDocTestBase.java + :include: imports,extension + +Then we need to create an ``ExtensionId`` for our extension so we can grab ahold of it. + +.. includecode:: code/akka/docs/extension/ExtensionDocTestBase.java + :include: imports,extensionid + +Wicked! Now all we need to do is to actually use it: + +.. includecode:: code/akka/docs/extension/ExtensionDocTestBase.java + :include: extension-usage + +Or from inside of an Akka Actor: + +.. includecode:: code/akka/docs/extension/ExtensionDocTestBase.java + :include: extension-usage-actor + +That's all there is to it! + Loading from Configuration -------------------------- To be able to load extensions from your Akka configuration you must add FQCNs of implementations of either ``ExtensionId`` or ``ExtensionIdProvider`` in the "akka.extensions" section of the config you provide to your ``ActorSystem``. -Building an Extension (Java) ----------------------------- - -So let's create a sample extension that just lets us count the number of times something has happened. - -FIXME - -Building an Extension (Scala) ------------------------------ - -So let's create a sample extension that just lets us count the number of times something has happened. - -First, we define what our ``Extension`` should do: - -.. includecode:: code/akka/docs/extension/ExtensionDocSpec.scala - :include: imports,extension - -Then we need to create an ``ExtensionId`` for our extension so we can grab ahold of it. - -.. includecode:: code/akka/docs/extension/ExtensionDocSpec.scala - :include: imports,extensionid - -Wicked! Now all we need to do is to actually use it: - -.. includecode:: code/akka/docs/extension/ExtensionDocSpec.scala - :include: extension-usage - -Or from inside of an Akka Actor: - -.. includecode:: code/akka/docs/extension/ExtensionDocSpec.scala - :include: extension-usage-actor - -You can also hide extension behind traits: - -.. includecode:: code/akka/docs/extension/ExtensionDocSpec.scala - :include: extension-usage-actor-trait - -That's all there is to it! +.. includecode:: code/akka/docs/extension/ExtensionDocTestBase.java + :include: extensionid-provider Applicability ------------- diff --git a/akka-docs/java/index.rst b/akka-docs/java/index.rst index c04e5bc259..05273b84b5 100644 --- a/akka-docs/java/index.rst +++ b/akka-docs/java/index.rst @@ -17,3 +17,4 @@ Java API dispatchers routing guice-integration + extending-akka diff --git a/akka-docs/dev/code/akka/docs/extension/ExtensionDocSpec.scala b/akka-docs/scala/code/akka/docs/extension/ExtensionDocSpec.scala similarity index 100% rename from akka-docs/dev/code/akka/docs/extension/ExtensionDocSpec.scala rename to akka-docs/scala/code/akka/docs/extension/ExtensionDocSpec.scala diff --git a/akka-docs/scala/extending-akka.rst b/akka-docs/scala/extending-akka.rst new file mode 100644 index 0000000000..a13326b626 --- /dev/null +++ b/akka-docs/scala/extending-akka.rst @@ -0,0 +1,53 @@ +.. _extending-akka: + +Akka Extensions +=============== + +.. sidebar:: Contents + + .. contents:: :local: + + +Building an Extension +--------------------- + +So let's create a sample extension that just lets us count the number of times something has happened. + +First, we define what our ``Extension`` should do: + +.. includecode:: code/akka/docs/extension/ExtensionDocSpec.scala + :include: imports,extension + +Then we need to create an ``ExtensionId`` for our extension so we can grab ahold of it. + +.. includecode:: code/akka/docs/extension/ExtensionDocSpec.scala + :include: imports,extensionid + +Wicked! Now all we need to do is to actually use it: + +.. includecode:: code/akka/docs/extension/ExtensionDocSpec.scala + :include: extension-usage + +Or from inside of an Akka Actor: + +.. includecode:: code/akka/docs/extension/ExtensionDocSpec.scala + :include: extension-usage-actor + +You can also hide extension behind traits: + +.. includecode:: code/akka/docs/extension/ExtensionDocSpec.scala + :include: extension-usage-actor-trait + +That's all there is to it! + +Loading from Configuration +-------------------------- + +To be able to load extensions from your Akka configuration you must add FQCNs of implementations of either ``ExtensionId`` or ``ExtensionIdProvider`` +in the "akka.extensions" section of the config you provide to your ``ActorSystem``. + +Applicability +------------- + +The sky is the limit! +By the way, did you know that Akka's ``Typed Actors``, ``Serialization`` and other features are implemented as Akka Extensions? \ No newline at end of file diff --git a/akka-docs/scala/index.rst b/akka-docs/scala/index.rst index a36fe9513f..5f08468e39 100644 --- a/akka-docs/scala/index.rst +++ b/akka-docs/scala/index.rst @@ -19,3 +19,4 @@ Scala API routing fsm testing + extending-akka