diff --git a/akka-actor-tests/src/test/java/akka/actor/JavaAPI.java b/akka-actor-tests/src/test/java/akka/actor/JavaAPI.java index 9678cbc76d..7070e8bf67 100644 --- a/akka-actor-tests/src/test/java/akka/actor/JavaAPI.java +++ b/akka-actor-tests/src/test/java/akka/actor/JavaAPI.java @@ -20,7 +20,7 @@ public class JavaAPI { @AfterClass public static void afterAll() { - system.stop(); + system.shutdown(); system = null; } diff --git a/akka-actor-tests/src/test/java/akka/actor/JavaExtension.java b/akka-actor-tests/src/test/java/akka/actor/JavaExtension.java index 0a994b93d6..e7597309c4 100644 --- a/akka-actor-tests/src/test/java/akka/actor/JavaExtension.java +++ b/akka-actor-tests/src/test/java/akka/actor/JavaExtension.java @@ -58,7 +58,7 @@ public class JavaExtension { @AfterClass public static void afterAll() { - system.stop(); + system.shutdown(); system = null; } diff --git a/akka-actor-tests/src/test/java/akka/dispatch/JavaFutureTests.java b/akka-actor-tests/src/test/java/akka/dispatch/JavaFutureTests.java index d534d87103..cb9b99fadc 100644 --- a/akka-actor-tests/src/test/java/akka/dispatch/JavaFutureTests.java +++ b/akka-actor-tests/src/test/java/akka/dispatch/JavaFutureTests.java @@ -3,6 +3,8 @@ package akka.dispatch; import akka.actor.Timeout; import akka.actor.ActorSystem; +import akka.japi.*; +import akka.util.Duration; import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; @@ -13,124 +15,100 @@ import java.lang.Iterable; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; -import akka.japi.Function; -import akka.japi.Function2; -import akka.japi.Procedure; -import akka.japi.Option; import akka.testkit.AkkaSpec; public class JavaFutureTests { private static ActorSystem system; - private static FutureFactory ff; private static Timeout t; + + private final Duration timeout = Duration.create(5, TimeUnit.SECONDS); @BeforeClass public static void beforeAll() { system = ActorSystem.create("JavaFutureTests", AkkaSpec.testConf()); t = system.settings().ActorTimeout(); - ff = new FutureFactory(system.dispatcher(), t); } @AfterClass public static void afterAll() { - system.stop(); + system.shutdown(); system = null; } @Test public void mustBeAbleToMapAFuture() { - Future f1 = ff.future(new Callable() { + Future f1 = Futures.future(new Callable() { public String call() { return "Hello"; } - }); + }, system.dispatcher()); Future f2 = f1.map(new Function() { public String apply(String s) { return s + " World"; } - }, t); + }); - assertEquals("Hello World", f2.get()); + assertEquals("Hello World", Await.result(f2, timeout)); } @Test public void mustBeAbleToExecuteAnOnResultCallback() throws Throwable { final CountDownLatch latch = new CountDownLatch(1); - Promise cf = new akka.dispatch.DefaultPromise(1000, TimeUnit.MILLISECONDS, system - .dispatcherFactory().defaultGlobalDispatcher()); + Promise cf = Futures.promise(system.dispatcher()); Future f = cf; - f.onResult(new Procedure() { - public void apply(String result) { - if (result.equals("foo")) - latch.countDown(); - } + f.onSuccess(new Procedure() { + public void apply(String result) { + if (result.equals("foo")) + latch.countDown(); + } }); - cf.completeWithResult("foo"); + cf.success("foo"); assertTrue(latch.await(5000, TimeUnit.MILLISECONDS)); - assertEquals(f.get(), "foo"); + assertEquals(Await.result(f, timeout), "foo"); } @Test public void mustBeAbleToExecuteAnOnExceptionCallback() throws Throwable { final CountDownLatch latch = new CountDownLatch(1); - Promise cf = new akka.dispatch.DefaultPromise(1000, TimeUnit.MILLISECONDS, system - .dispatcherFactory().defaultGlobalDispatcher()); + Promise cf = Futures.promise(system.dispatcher()); Future f = cf; - f.onException(new Procedure() { - public void apply(Throwable t) { - if (t instanceof NullPointerException) - latch.countDown(); - } + f.onFailure(new Procedure() { + public void apply(Throwable t) { + if (t instanceof NullPointerException) + latch.countDown(); + } }); Throwable exception = new NullPointerException(); - cf.completeWithException(exception); + cf.failure(exception); assertTrue(latch.await(5000, TimeUnit.MILLISECONDS)); - assertEquals(f.exception().get(), exception); - } - - @Test - public void mustBeAbleToExecuteAnOnTimeoutCallback() throws Throwable { - final CountDownLatch latch = new CountDownLatch(1); - Promise cf = new akka.dispatch.DefaultPromise(1000, TimeUnit.MILLISECONDS, system - .dispatcherFactory().defaultGlobalDispatcher()); - Future f = cf; - f.onTimeout(new Procedure>() { - public void apply(Future future) { - latch.countDown(); - } - }); - - assertTrue(latch.await(5000, TimeUnit.MILLISECONDS)); - assertTrue(f.value().isEmpty()); + assertEquals(f.value().get().left().get(), exception); } @Test public void mustBeAbleToExecuteAnOnCompleteCallback() throws Throwable { final CountDownLatch latch = new CountDownLatch(1); - Promise cf = new akka.dispatch.DefaultPromise(1000, TimeUnit.MILLISECONDS, system - .dispatcherFactory().defaultGlobalDispatcher()); + Promise cf = Futures.promise(system.dispatcher()); Future f = cf; - f.onComplete(new Procedure>() { - public void apply(akka.dispatch.Future future) { + f.onComplete(new Procedure2() { + public void apply(Throwable t, String r) { latch.countDown(); } }); - cf.completeWithResult("foo"); + cf.success("foo"); assertTrue(latch.await(5000, TimeUnit.MILLISECONDS)); - assertEquals(f.get(), "foo"); + assertEquals(Await.result(f, timeout), "foo"); } @Test public void mustBeAbleToForeachAFuture() throws Throwable { final CountDownLatch latch = new CountDownLatch(1); - Promise cf = new akka.dispatch.DefaultPromise(1000, TimeUnit.MILLISECONDS, system - .dispatcherFactory().defaultGlobalDispatcher()); + Promise cf = Futures.promise(system.dispatcher()); Future f = cf; f.foreach(new Procedure() { public void apply(String future) { @@ -138,50 +116,47 @@ public class JavaFutureTests { } }); - cf.completeWithResult("foo"); + cf.success("foo"); assertTrue(latch.await(5000, TimeUnit.MILLISECONDS)); - assertEquals(f.get(), "foo"); + assertEquals(Await.result(f, timeout), "foo"); } @Test public void mustBeAbleToFlatMapAFuture() throws Throwable { final CountDownLatch latch = new CountDownLatch(1); - Promise cf = new akka.dispatch.DefaultPromise(1000, TimeUnit.MILLISECONDS, system - .dispatcherFactory().defaultGlobalDispatcher()); - cf.completeWithResult("1000"); + Promise cf = Futures.promise(system.dispatcher()); + cf.success("1000"); Future f = cf; Future r = f.flatMap(new Function>() { public Future apply(String r) { latch.countDown(); - Promise cf = new akka.dispatch.DefaultPromise(1000, TimeUnit.MILLISECONDS, system - .dispatcherFactory().defaultGlobalDispatcher()); - cf.completeWithResult(Integer.parseInt(r)); + Promise cf = Futures.promise(system.dispatcher()); + cf.success(Integer.parseInt(r)); return cf; } - }, t); + }); - assertEquals(f.get(), "1000"); - assertEquals(r.get().intValue(), 1000); + assertEquals(Await.result(f, timeout), "1000"); + assertEquals(Await.result(r, timeout).intValue(), 1000); assertTrue(latch.await(5000, TimeUnit.MILLISECONDS)); } @Test public void mustBeAbleToFilterAFuture() throws Throwable { final CountDownLatch latch = new CountDownLatch(1); - Promise cf = new akka.dispatch.DefaultPromise(1000, TimeUnit.MILLISECONDS, system - .dispatcherFactory().defaultGlobalDispatcher()); + Promise cf = Futures.promise(system.dispatcher()); Future f = cf; Future r = f.filter(new Function() { public Boolean apply(String r) { latch.countDown(); return r.equals("foo"); } - }, t); + }); - cf.completeWithResult("foo"); + cf.success("foo"); assertTrue(latch.await(5000, TimeUnit.MILLISECONDS)); - assertEquals(f.get(), "foo"); - assertEquals(r.get(), "foo"); + assertEquals(Await.result(f, timeout), "foo"); + assertEquals(Await.result(r, timeout), "foo"); } // TODO: Improve this test, perhaps with an Actor @@ -192,16 +167,16 @@ public class JavaFutureTests { for (int i = 0; i < 10; i++) { listExpected.add("test"); - listFutures.add(ff.future(new Callable() { + listFutures.add(Futures.future(new Callable() { public String call() { return "test"; } - })); + }, system.dispatcher())); } - Future> futureList = ff.sequence(listFutures, t); + Future> futureList = Futures.sequence(listFutures, system.dispatcher()); - assertEquals(futureList.get(), listExpected); + assertEquals(Await.result(futureList, timeout), listExpected); } // TODO: Improve this test, perhaps with an Actor @@ -212,20 +187,20 @@ public class JavaFutureTests { for (int i = 0; i < 10; i++) { expected.append("test"); - listFutures.add(ff.future(new Callable() { + listFutures.add(Futures.future(new Callable() { public String call() { return "test"; } - })); + }, system.dispatcher())); } - Future result = ff.fold("", 15000, listFutures, new Function2() { + Future result = Futures.fold("", listFutures, new Function2() { public String apply(String r, String t) { return r + t; } - }); + }, system.dispatcher()); - assertEquals(result.get(), expected.toString()); + assertEquals(Await.result(result, timeout), expected.toString()); } @Test @@ -235,20 +210,20 @@ public class JavaFutureTests { for (int i = 0; i < 10; i++) { expected.append("test"); - listFutures.add(ff.future(new Callable() { + listFutures.add(Futures.future(new Callable() { public String call() { return "test"; } - })); + }, system.dispatcher())); } - Future result = ff.reduce(listFutures, 15000, new Function2() { + Future result = Futures.reduce(listFutures, new Function2() { public String apply(String r, String t) { return r + t; } - }); + }, system.dispatcher()); - assertEquals(result.get(), expected.toString()); + assertEquals(Await.result(result, timeout), expected.toString()); } @Test @@ -261,17 +236,17 @@ public class JavaFutureTests { listStrings.add("test"); } - Future> result = ff.traverse(listStrings, t, new Function>() { + Future> result = Futures.traverse(listStrings, new Function>() { public Future apply(final String r) { - return ff.future(new Callable() { + return Futures.future(new Callable() { public String call() { return r.toUpperCase(); } - }); + }, system.dispatcher()); } - }); + }, system.dispatcher()); - assertEquals(result.get(), expectedStrings); + assertEquals(Await.result(result, timeout), expectedStrings); } @Test @@ -279,20 +254,28 @@ public class JavaFutureTests { LinkedList> listFutures = new LinkedList>(); for (int i = 0; i < 10; i++) { final Integer fi = i; - listFutures.add(ff.future(new Callable() { + listFutures.add(Futures.future(new Callable() { public Integer call() { return fi; } - })); + }, system.dispatcher())); } final Integer expect = 5; - Future> f = ff.find(listFutures, new Function() { + Future> f = Futures.find(listFutures, new Function() { public Boolean apply(Integer i) { return i == 5; } - }, t); + }, system.dispatcher()); - final Integer got = f.get().get(); - assertEquals(expect, got); + assertEquals(expect, Await.result(f, timeout)); + } + + @Test + public void BlockMustBeCallable() { + Promise p = Futures.promise(system.dispatcher()); + Duration d = Duration.create(1, TimeUnit.SECONDS); + p.success("foo"); + Await.ready(p, d); + assertEquals(Await.result(p, d), "foo"); } } diff --git a/akka-actor-tests/src/test/scala/akka/actor/ActorFireForgetRequestReplySpec.scala b/akka-actor-tests/src/test/scala/akka/actor/ActorFireForgetRequestReplySpec.scala index aa7d76d3dc..c059497259 100644 --- a/akka-actor-tests/src/test/scala/akka/actor/ActorFireForgetRequestReplySpec.scala +++ b/akka-actor-tests/src/test/scala/akka/actor/ActorFireForgetRequestReplySpec.scala @@ -7,7 +7,7 @@ package akka.actor import akka.testkit._ import org.scalatest.BeforeAndAfterEach import akka.util.duration._ -import akka.dispatch.Dispatchers +import akka.dispatch.Await object ActorFireForgetRequestReplySpec { @@ -81,13 +81,13 @@ class ActorFireForgetRequestReplySpec extends AkkaSpec with BeforeAndAfterEach w "should shutdown crashed temporary actor" in { filterEvents(EventFilter[Exception]("Expected exception")) { val supervisor = system.actorOf(Props[Supervisor].withFaultHandler(OneForOneStrategy(List(classOf[Exception]), Some(0)))) - val actor = (supervisor ? Props[CrashingActor]).as[ActorRef].get + val actor = Await.result((supervisor ? Props[CrashingActor]).mapTo[ActorRef], timeout.duration) actor.isTerminated must be(false) actor ! "Die" state.finished.await 1.second.dilated.sleep() actor.isTerminated must be(true) - supervisor.stop() + system.stop(supervisor) } } } diff --git a/akka-actor-tests/src/test/scala/akka/actor/ActorLifeCycleSpec.scala b/akka-actor-tests/src/test/scala/akka/actor/ActorLifeCycleSpec.scala index 8f3a58e5e5..b203ff256f 100644 --- a/akka-actor-tests/src/test/scala/akka/actor/ActorLifeCycleSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/actor/ActorLifeCycleSpec.scala @@ -11,6 +11,7 @@ import akka.actor.Actor._ import akka.testkit._ import akka.util.duration._ import java.util.concurrent.atomic._ +import akka.dispatch.Await object ActorLifeCycleSpec { @@ -40,7 +41,7 @@ class ActorLifeCycleSpec extends AkkaSpec with BeforeAndAfterEach with ImplicitS override def preRestart(reason: Throwable, message: Option[Any]) { report("preRestart") } override def postRestart(reason: Throwable) { report("postRestart") } }) - val restarter = (supervisor ? restarterProps).as[ActorRef].get + val restarter = Await.result((supervisor ? restarterProps).mapTo[ActorRef], timeout.duration) expectMsg(("preStart", id, 0)) restarter ! Kill @@ -61,7 +62,7 @@ class ActorLifeCycleSpec extends AkkaSpec with BeforeAndAfterEach with ImplicitS restarter ! Kill expectMsg(("postStop", id, 3)) expectNoMsg(1 seconds) - supervisor.stop + system.stop(supervisor) } } @@ -71,7 +72,7 @@ class ActorLifeCycleSpec extends AkkaSpec with BeforeAndAfterEach with ImplicitS val supervisor = system.actorOf(Props[Supervisor].withFaultHandler(OneForOneStrategy(List(classOf[Exception]), Some(3)))) val gen = new AtomicInteger(0) val restarterProps = Props(new LifeCycleTestActor(testActor, id, gen)) - val restarter = (supervisor ? restarterProps).as[ActorRef].get + val restarter = Await.result((supervisor ? restarterProps).mapTo[ActorRef], timeout.duration) expectMsg(("preStart", id, 0)) restarter ! Kill @@ -92,7 +93,7 @@ class ActorLifeCycleSpec extends AkkaSpec with BeforeAndAfterEach with ImplicitS restarter ! Kill expectMsg(("postStop", id, 3)) expectNoMsg(1 seconds) - supervisor.stop + system.stop(supervisor) } } @@ -101,14 +102,14 @@ class ActorLifeCycleSpec extends AkkaSpec with BeforeAndAfterEach with ImplicitS val supervisor = system.actorOf(Props[Supervisor].withFaultHandler(OneForOneStrategy(List(classOf[Exception]), Some(3)))) val gen = new AtomicInteger(0) val props = Props(new LifeCycleTestActor(testActor, id, gen)) - val a = (supervisor ? props).as[ActorRef].get + val a = Await.result((supervisor ? props).mapTo[ActorRef], timeout.duration) expectMsg(("preStart", id, 0)) a ! "status" expectMsg(("OK", id, 0)) - a.stop + system.stop(a) expectMsg(("postStop", id, 0)) expectNoMsg(1 seconds) - supervisor.stop + system.stop(supervisor) } } diff --git a/akka-actor-tests/src/test/scala/akka/actor/ActorLookupSpec.scala b/akka-actor-tests/src/test/scala/akka/actor/ActorLookupSpec.scala index e2ec943789..6ef64df1b2 100644 --- a/akka-actor-tests/src/test/scala/akka/actor/ActorLookupSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/actor/ActorLookupSpec.scala @@ -5,6 +5,7 @@ package akka.actor import akka.testkit._ import akka.util.duration._ +import akka.dispatch.Await object ActorLookupSpec { @@ -36,7 +37,7 @@ class ActorLookupSpec extends AkkaSpec with DefaultTimeout { val c1 = system.actorOf(p, "c1") val c2 = system.actorOf(p, "c2") - val c21 = (c2 ? Create("c21")).as[ActorRef].get + val c21 = Await.result((c2 ? Create("c21")).mapTo[ActorRef], timeout.duration) val user = system.asInstanceOf[ActorSystemImpl].guardian val syst = system.asInstanceOf[ActorSystemImpl].systemGuardian @@ -122,7 +123,7 @@ class ActorLookupSpec extends AkkaSpec with DefaultTimeout { f.isCompleted must be === false a ! 42 f.isCompleted must be === true - f.get must be === 42 + Await.result(f, timeout.duration) must be === 42 // clean-up is run as onComplete callback, i.e. dispatched on another thread awaitCond(system.actorFor(a.path) == system.deadLetters, 1 second) } @@ -135,7 +136,7 @@ class ActorLookupSpec extends AkkaSpec with DefaultTimeout { "find actors by looking up their path" in { def check(looker: ActorRef, pathOf: ActorRef, result: ActorRef) { - (looker ? LookupPath(pathOf.path)).get must be === result + Await.result(looker ? LookupPath(pathOf.path), timeout.duration) must be === result } for { looker ← all @@ -145,8 +146,8 @@ class ActorLookupSpec extends AkkaSpec with DefaultTimeout { "find actors by looking up their string representation" in { def check(looker: ActorRef, pathOf: ActorRef, result: ActorRef) { - (looker ? LookupString(pathOf.path.toString)).get must be === result - (looker ? LookupString(pathOf.path.toString + "/")).get must be === result + Await.result(looker ? LookupString(pathOf.path.toString), timeout.duration) must be === result + Await.result(looker ? LookupString(pathOf.path.toString + "/"), timeout.duration) must be === result } for { looker ← all @@ -156,8 +157,8 @@ class ActorLookupSpec extends AkkaSpec with DefaultTimeout { "find actors by looking up their root-anchored relative path" in { def check(looker: ActorRef, pathOf: ActorRef, result: ActorRef) { - (looker ? LookupString(pathOf.path.elements.mkString("/", "/", ""))).get must be === result - (looker ? LookupString(pathOf.path.elements.mkString("/", "/", "/"))).get must be === result + Await.result(looker ? LookupString(pathOf.path.elements.mkString("/", "/", "")), timeout.duration) must be === result + Await.result(looker ? LookupString(pathOf.path.elements.mkString("/", "/", "/")), timeout.duration) must be === result } for { looker ← all @@ -167,9 +168,9 @@ class ActorLookupSpec extends AkkaSpec with DefaultTimeout { "find actors by looking up their relative path" in { def check(looker: ActorRef, result: ActorRef, elems: String*) { - (looker ? LookupElems(elems)).get must be === result - (looker ? LookupString(elems mkString "/")).get must be === result - (looker ? LookupString(elems mkString ("", "/", "/"))).get must be === result + Await.result(looker ? LookupElems(elems), timeout.duration) must be === result + Await.result(looker ? LookupString(elems mkString "/"), timeout.duration) must be === result + Await.result(looker ? LookupString(elems mkString ("", "/", "/")), timeout.duration) must be === result } check(c1, user, "..") for { @@ -184,11 +185,11 @@ class ActorLookupSpec extends AkkaSpec with DefaultTimeout { "find system-generated actors" in { def check(target: ActorRef) { for (looker ← all) { - (looker ? LookupPath(target.path)).get must be === target - (looker ? LookupString(target.path.toString)).get must be === target - (looker ? LookupString(target.path.toString + "/")).get must be === target - (looker ? LookupString(target.path.elements.mkString("/", "/", ""))).get must be === target - if (target != root) (looker ? LookupString(target.path.elements.mkString("/", "/", "/"))).get must be === target + Await.result(looker ? LookupPath(target.path), timeout.duration) must be === target + Await.result(looker ? LookupString(target.path.toString), timeout.duration) must be === target + Await.result(looker ? LookupString(target.path.toString + "/"), timeout.duration) must be === target + Await.result(looker ? LookupString(target.path.elements.mkString("/", "/", "")), timeout.duration) must be === target + if (target != root) Await.result(looker ? LookupString(target.path.elements.mkString("/", "/", "/")), timeout.duration) must be === target } } for (target ← Seq(root, syst, user, system.deadLetters)) check(target) @@ -198,7 +199,7 @@ class ActorLookupSpec extends AkkaSpec with DefaultTimeout { import scala.collection.JavaConverters._ def checkOne(looker: ActorRef, query: Query) { - (looker ? query).get must be === system.deadLetters + Await.result(looker ? query, timeout.duration) must be === system.deadLetters } def check(looker: ActorRef) { Seq(LookupString("a/b/c"), @@ -217,21 +218,21 @@ class ActorLookupSpec extends AkkaSpec with DefaultTimeout { val f = c1 ? GetSender(testActor) val a = expectMsgType[ActorRef] a.path.elements.head must be === "temp" - (c2 ? LookupPath(a.path)).get must be === a - (c2 ? LookupString(a.path.toString)).get must be === a - (c2 ? LookupString(a.path.elements.mkString("/", "/", ""))).get must be === a - (c2 ? LookupString("../../" + a.path.elements.mkString("/"))).get must be === a - (c2 ? LookupString(a.path.toString + "/")).get must be === a - (c2 ? LookupString(a.path.elements.mkString("/", "/", "") + "/")).get must be === a - (c2 ? LookupString("../../" + a.path.elements.mkString("/") + "/")).get must be === a - (c2 ? LookupElems(Seq("..", "..") ++ a.path.elements)).get must be === a - (c2 ? LookupElems(Seq("..", "..") ++ a.path.elements :+ "")).get must be === a + Await.result(c2 ? LookupPath(a.path), timeout.duration) must be === a + Await.result(c2 ? LookupString(a.path.toString), timeout.duration) must be === a + Await.result(c2 ? LookupString(a.path.elements.mkString("/", "/", "")), timeout.duration) must be === a + Await.result(c2 ? LookupString("../../" + a.path.elements.mkString("/")), timeout.duration) must be === a + Await.result(c2 ? LookupString(a.path.toString + "/"), timeout.duration) must be === a + Await.result(c2 ? LookupString(a.path.elements.mkString("/", "/", "") + "/"), timeout.duration) must be === a + Await.result(c2 ? LookupString("../../" + a.path.elements.mkString("/") + "/"), timeout.duration) must be === a + Await.result(c2 ? LookupElems(Seq("..", "..") ++ a.path.elements), timeout.duration) must be === a + Await.result(c2 ? LookupElems(Seq("..", "..") ++ a.path.elements :+ ""), timeout.duration) must be === a f.isCompleted must be === false a ! 42 f.isCompleted must be === true - f.get must be === 42 + Await.result(f, timeout.duration) must be === 42 // clean-up is run as onComplete callback, i.e. dispatched on another thread - awaitCond((c2 ? LookupPath(a.path)).get == system.deadLetters, 1 second) + awaitCond(Await.result(c2 ? LookupPath(a.path), timeout.duration) == system.deadLetters, 1 second) } } diff --git a/akka-actor-tests/src/test/scala/akka/actor/ActorRefSpec.scala b/akka-actor-tests/src/test/scala/akka/actor/ActorRefSpec.scala index f1cca42011..248b665b63 100644 --- a/akka-actor-tests/src/test/scala/akka/actor/ActorRefSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/actor/ActorRefSpec.scala @@ -11,9 +11,9 @@ import akka.testkit._ import akka.util.duration._ import java.lang.IllegalStateException import akka.util.ReflectiveAccess -import akka.dispatch.{ DefaultPromise, Promise, Future } import akka.serialization.Serialization import java.util.concurrent.{ CountDownLatch, TimeUnit } +import akka.dispatch.{ Await, DefaultPromise, Promise, Future } object ActorRefSpec { @@ -42,7 +42,7 @@ object ActorRefSpec { case "work" ⇒ { work sender ! "workDone" - self.stop() + context.stop(self) } case ReplyTo(replyTo) ⇒ { work @@ -117,18 +117,18 @@ class ActorRefSpec extends AkkaSpec with DefaultTimeout { def promiseIntercept(f: ⇒ Actor)(to: Promise[Actor]): Actor = try { val r = f - to.completeWithResult(r) + to.success(r) r } catch { case e ⇒ - to.completeWithException(e) + to.failure(e) throw e } def wrap[T](f: Promise[Actor] ⇒ T): T = { - val result = new DefaultPromise[Actor](10 * 60 * 1000) + val result = Promise[Actor]() val r = f(result) - result.get + Await.result(result, 1 minute) r } @@ -306,7 +306,7 @@ class ActorRefSpec extends AkkaSpec with DefaultTimeout { def receive = { case _ ⇒ sender ! nested } })) - val nested = (a ? "any").as[ActorRef].get + val nested = Await.result((a ? "any").mapTo[ActorRef], timeout.duration) a must not be null nested must not be null (a ne nested) must be === true @@ -314,13 +314,13 @@ class ActorRefSpec extends AkkaSpec with DefaultTimeout { "support advanced nested actorOfs" in { val a = system.actorOf(Props(new OuterActor(system.actorOf(Props(new InnerActor))))) - val inner = (a ? "innerself").as[Any].get + val inner = Await.result(a ? "innerself", timeout.duration) - (a ? a).as[ActorRef].get must be(a) - (a ? "self").as[ActorRef].get must be(a) + Await.result(a ? a, timeout.duration) must be(a) + Await.result(a ? "self", timeout.duration) must be(a) inner must not be a - (a ? "msg").as[String] must be === Some("msg") + Await.result(a ? "msg", timeout.duration) must be === "msg" } "support reply via sender" in { @@ -344,8 +344,8 @@ class ActorRefSpec extends AkkaSpec with DefaultTimeout { latch.await - clientRef.stop() - serverRef.stop() + system.stop(clientRef) + system.stop(serverRef) } "stop when sent a poison pill" in { @@ -361,8 +361,8 @@ class ActorRefSpec extends AkkaSpec with DefaultTimeout { val fnull = (ref ? (null, timeout)).mapTo[String] ref ! PoisonPill - ffive.get must be("five") - fnull.get must be("null") + Await.result(ffive, timeout.duration) must be("five") + Await.result(fnull, timeout.duration) must be("null") awaitCond(ref.isTerminated, 2000 millis) } diff --git a/akka-actor-tests/src/test/scala/akka/actor/ActorTimeoutSpec.scala b/akka-actor-tests/src/test/scala/akka/actor/ActorTimeoutSpec.scala index 735867bc97..4df3b0d5e9 100644 --- a/akka-actor-tests/src/test/scala/akka/actor/ActorTimeoutSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/actor/ActorTimeoutSpec.scala @@ -4,10 +4,11 @@ package akka.actor import org.scalatest.BeforeAndAfterAll -import akka.dispatch.FutureTimeoutException import akka.util.duration._ import akka.testkit.AkkaSpec import akka.testkit.DefaultTimeout +import java.util.concurrent.TimeoutException +import akka.dispatch.Await @org.junit.runner.RunWith(classOf[org.scalatest.junit.JUnitRunner]) class ActorTimeoutSpec extends AkkaSpec with BeforeAndAfterAll with DefaultTimeout { @@ -28,8 +29,8 @@ class ActorTimeoutSpec extends AkkaSpec with BeforeAndAfterAll with DefaultTimeo val echo = actorWithTimeout(Timeout(12)) try { val f = echo ? "hallo" - intercept[FutureTimeoutException] { f.await } - } finally { echo.stop } + intercept[TimeoutException] { Await.ready(f, system.settings.ActorTimeout.duration) } + } finally { system.stop(echo) } } } @@ -39,16 +40,20 @@ class ActorTimeoutSpec extends AkkaSpec with BeforeAndAfterAll with DefaultTimeo val echo = actorWithTimeout(Props.defaultTimeout) try { val f = (echo ? "hallo").mapTo[String] - intercept[FutureTimeoutException] { f.await } + intercept[TimeoutException] { Await.ready(f, timeout.duration) } f.value must be(None) - } finally { echo.stop } + } finally { system.stop(echo) } } } "use explicitly supplied timeout" in { within(testTimeout - 100.millis, testTimeout + 300.millis) { val echo = actorWithTimeout(Props.defaultTimeout) - try { (echo.?("hallo", testTimeout)).as[String] must be(None) } finally { echo.stop } + val f = echo.?("hallo", testTimeout) + try { + intercept[TimeoutException] { Await.ready(f, testTimeout) } + f.value must be === None + } finally { system.stop(echo) } } } } diff --git a/akka-actor-tests/src/test/scala/akka/actor/Bench.scala b/akka-actor-tests/src/test/scala/akka/actor/Bench.scala index 4ef5a94b12..2432cc113d 100644 --- a/akka-actor-tests/src/test/scala/akka/actor/Bench.scala +++ b/akka-actor-tests/src/test/scala/akka/actor/Bench.scala @@ -85,7 +85,7 @@ object Chameneos { sumMeetings += i if (numFaded == numChameneos) { Chameneos.end = System.currentTimeMillis - self.stop() + context.stop(self) } case msg @ Meet(a, c) ⇒ @@ -107,10 +107,11 @@ object Chameneos { def run { // System.setProperty("akka.config", "akka.conf") Chameneos.start = System.currentTimeMillis - val system = ActorSystem().actorOf(Props(new Mall(1000000, 4))) + val system = ActorSystem() + val actor = system.actorOf(Props(new Mall(1000000, 4))) Thread.sleep(10000) println("Elapsed: " + (end - start)) - system.stop() + system.shutdown() } def main(args: Array[String]): Unit = run diff --git a/akka-actor-tests/src/test/scala/akka/actor/ConsistencySpec.scala b/akka-actor-tests/src/test/scala/akka/actor/ConsistencySpec.scala index 1118daff1c..1638cd9e4b 100644 --- a/akka-actor-tests/src/test/scala/akka/actor/ConsistencySpec.scala +++ b/akka-actor-tests/src/test/scala/akka/actor/ConsistencySpec.scala @@ -26,7 +26,7 @@ object ConsistencySpec { } lastStep = step - case "done" ⇒ sender ! "done"; self.stop() + case "done" ⇒ sender ! "done"; context.stop(self) } } } diff --git a/akka-actor-tests/src/test/scala/akka/actor/DeathWatchSpec.scala b/akka-actor-tests/src/test/scala/akka/actor/DeathWatchSpec.scala index 9aba8979c1..5abf768c22 100644 --- a/akka-actor-tests/src/test/scala/akka/actor/DeathWatchSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/actor/DeathWatchSpec.scala @@ -8,6 +8,7 @@ import org.scalatest.BeforeAndAfterEach import akka.testkit._ import akka.util.duration._ import java.util.concurrent.atomic._ +import akka.dispatch.Await @org.junit.runner.RunWith(classOf[org.scalatest.junit.JUnitRunner]) class DeathWatchSpec extends AkkaSpec with BeforeAndAfterEach with ImplicitSender with DefaultTimeout { @@ -43,9 +44,9 @@ class DeathWatchSpec extends AkkaSpec with BeforeAndAfterEach with ImplicitSende expectTerminationOf(terminal) expectTerminationOf(terminal) - monitor1.stop() - monitor2.stop() - monitor3.stop() + system.stop(monitor1) + system.stop(monitor2) + system.stop(monitor3) } "notify with _current_ monitors with one Terminated message when an Actor is stopped" in { @@ -69,28 +70,28 @@ class DeathWatchSpec extends AkkaSpec with BeforeAndAfterEach with ImplicitSende expectTerminationOf(terminal) expectTerminationOf(terminal) - monitor1.stop() - monitor2.stop() - monitor3.stop() + system.stop(monitor1) + system.stop(monitor2) + system.stop(monitor3) } "notify with a Terminated message once when an Actor is stopped but not when restarted" in { filterException[ActorKilledException] { val supervisor = system.actorOf(Props[Supervisor].withFaultHandler(OneForOneStrategy(List(classOf[Exception]), Some(2)))) val terminalProps = Props(context ⇒ { case x ⇒ context.sender ! x }) - val terminal = (supervisor ? terminalProps).as[ActorRef].get + val terminal = Await.result((supervisor ? terminalProps).mapTo[ActorRef], timeout.duration) val monitor = startWatching(terminal) terminal ! Kill terminal ! Kill - (terminal ? "foo").as[String] must be === Some("foo") + Await.result(terminal ? "foo", timeout.duration) must be === "foo" terminal ! Kill expectTerminationOf(terminal) terminal.isTerminated must be === true - supervisor.stop() + system.stop(supervisor) } } @@ -99,17 +100,17 @@ class DeathWatchSpec extends AkkaSpec with BeforeAndAfterEach with ImplicitSende case class FF(fail: Failed) val supervisor = system.actorOf(Props[Supervisor] .withFaultHandler(new OneForOneStrategy(FaultHandlingStrategy.makeDecider(List(classOf[Exception])), Some(0)) { - override def handleFailure(child: ActorRef, cause: Throwable, stats: ChildRestartStats, children: Iterable[ChildRestartStats]) = { + override def handleFailure(context: ActorContext, child: ActorRef, cause: Throwable, stats: ChildRestartStats, children: Iterable[ChildRestartStats]) = { testActor.tell(FF(Failed(cause)), child) - super.handleFailure(child, cause, stats, children) + super.handleFailure(context, child, cause, stats, children) } })) - val failed = (supervisor ? Props.empty).as[ActorRef].get - val brother = (supervisor ? Props(new Actor { + val failed = Await.result((supervisor ? Props.empty).mapTo[ActorRef], timeout.duration) + val brother = Await.result((supervisor ? Props(new Actor { context.watch(failed) def receive = Actor.emptyBehavior - })).as[ActorRef].get + })).mapTo[ActorRef], timeout.duration) startWatching(brother) diff --git a/akka-actor-tests/src/test/scala/akka/actor/DeployerSpec.scala b/akka-actor-tests/src/test/scala/akka/actor/DeployerSpec.scala index e38ea1c3d4..83837012aa 100644 --- a/akka-actor-tests/src/test/scala/akka/actor/DeployerSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/actor/DeployerSpec.scala @@ -95,7 +95,7 @@ class DeployerSpec extends AkkaSpec(DeployerSpec.deployerConf) { } """, ConfigParseOptions.defaults).withFallback(AkkaSpec.testConf) - ActorSystem("invalid", invalidDeployerConf).stop() + ActorSystem("invalid", invalidDeployerConf).shutdown() } } diff --git a/akka-actor-tests/src/test/scala/akka/actor/FSMActorSpec.scala b/akka-actor-tests/src/test/scala/akka/actor/FSMActorSpec.scala index e4a30e10e0..a856c045c1 100644 --- a/akka-actor-tests/src/test/scala/akka/actor/FSMActorSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/actor/FSMActorSpec.scala @@ -187,7 +187,7 @@ class FSMActorSpec extends AkkaSpec(Map("akka.actor.debug.fsm" -> true)) with Im } val ref = system.actorOf(Props(fsm)) started.await - ref.stop() + system.stop(ref) expectMsg(1 second, fsm.StopEvent(Shutdown, 1, null)) } @@ -233,7 +233,7 @@ class FSMActorSpec extends AkkaSpec(Map("akka.actor.debug.fsm" -> true)) with Im } } } finally { - fsmEventSystem.stop() + fsmEventSystem.shutdown() } } diff --git a/akka-actor-tests/src/test/scala/akka/actor/FSMTransitionSpec.scala b/akka-actor-tests/src/test/scala/akka/actor/FSMTransitionSpec.scala index 1b1f90e5b3..9db408770c 100644 --- a/akka-actor-tests/src/test/scala/akka/actor/FSMTransitionSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/actor/FSMTransitionSpec.scala @@ -78,7 +78,7 @@ class FSMTransitionSpec extends AkkaSpec with ImplicitSender { within(300 millis) { fsm ! SubscribeTransitionCallBack(forward) expectMsg(CurrentState(fsm, 0)) - forward.stop() + system.stop(forward) fsm ! "tick" expectNoMsg } diff --git a/akka-actor-tests/src/test/scala/akka/actor/ForwardActorSpec.scala b/akka-actor-tests/src/test/scala/akka/actor/ForwardActorSpec.scala index aa2cd4c9ff..12b0c796f6 100644 --- a/akka-actor-tests/src/test/scala/akka/actor/ForwardActorSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/actor/ForwardActorSpec.scala @@ -8,6 +8,7 @@ import akka.testkit._ import akka.util.duration._ import Actor._ import akka.util.Duration +import akka.dispatch.Await object ForwardActorSpec { val ExpectedMessage = "FOO" @@ -32,20 +33,21 @@ class ForwardActorSpec extends AkkaSpec { "A Forward Actor" must { - "forward actor reference when invoking forward on bang" in { + "forward actor reference when invoking forward on tell" in { val latch = new TestLatch(1) - val replyTo = system.actorOf(Props(new Actor { def receive = { case ExpectedMessage ⇒ latch.countDown() } })) + val replyTo = system.actorOf(Props(new Actor { def receive = { case ExpectedMessage ⇒ testActor ! ExpectedMessage } })) val chain = createForwardingChain(system) chain.tell(ExpectedMessage, replyTo) - latch.await(Duration(5, "s")) must be === true + expectMsg(5 seconds, ExpectedMessage) } - "forward actor reference when invoking forward on bang bang" in { + "forward actor reference when invoking forward on ask" in { val chain = createForwardingChain(system) - chain.ask(ExpectedMessage, 5000).get must be === ExpectedMessage + chain.ask(ExpectedMessage, 5000) onSuccess { case ExpectedMessage ⇒ testActor ! ExpectedMessage } + expectMsg(5 seconds, ExpectedMessage) } } } diff --git a/akka-actor-tests/src/test/scala/akka/actor/IOActor.scala b/akka-actor-tests/src/test/scala/akka/actor/IOActor.scala index f2127d92bc..9b1ba99459 100644 --- a/akka-actor-tests/src/test/scala/akka/actor/IOActor.scala +++ b/akka-actor-tests/src/test/scala/akka/actor/IOActor.scala @@ -8,9 +8,9 @@ import org.scalatest.BeforeAndAfterEach import akka.util.ByteString import akka.util.cps._ -import akka.dispatch.Future import scala.util.continuations._ import akka.testkit._ +import akka.dispatch.{ Await, Future } object IOActorSpec { import IO._ @@ -193,12 +193,12 @@ class IOActorSpec extends AkkaSpec with BeforeAndAfterEach with DefaultTimeout { val f1 = client ? ByteString("Hello World!1") val f2 = client ? ByteString("Hello World!2") val f3 = client ? ByteString("Hello World!3") - f1.get must equal(ByteString("Hello World!1")) - f2.get must equal(ByteString("Hello World!2")) - f3.get must equal(ByteString("Hello World!3")) - client.stop - server.stop - ioManager.stop + Await.result(f1, timeout.duration) must equal(ByteString("Hello World!1")) + Await.result(f2, timeout.duration) must equal(ByteString("Hello World!2")) + Await.result(f3, timeout.duration) must equal(ByteString("Hello World!3")) + system.stop(client) + system.stop(server) + system.stop(ioManager) } "run echo server under high load" in { @@ -209,10 +209,10 @@ class IOActorSpec extends AkkaSpec with BeforeAndAfterEach with DefaultTimeout { val client = system.actorOf(Props(new SimpleEchoClient("localhost", 8065, ioManager))) val list = List.range(0, 1000) val f = Future.traverse(list)(i ⇒ client ? ByteString(i.toString)) - assert(f.get.size === 1000) - client.stop - server.stop - ioManager.stop + assert(Await.result(f, timeout.duration).size === 1000) + system.stop(client) + system.stop(server) + system.stop(ioManager) } "run echo server under high load with small buffer" in { @@ -223,10 +223,10 @@ class IOActorSpec extends AkkaSpec with BeforeAndAfterEach with DefaultTimeout { val client = system.actorOf(Props(new SimpleEchoClient("localhost", 8066, ioManager))) val list = List.range(0, 1000) val f = Future.traverse(list)(i ⇒ client ? ByteString(i.toString)) - assert(f.get.size === 1000) - client.stop - server.stop - ioManager.stop + assert(Await.result(f, timeout.duration).size === 1000) + system.stop(client) + system.stop(server) + system.stop(ioManager) } "run key-value store" in { @@ -239,21 +239,21 @@ class IOActorSpec extends AkkaSpec with BeforeAndAfterEach with DefaultTimeout { val f1 = client1 ? (('set, "hello", ByteString("World"))) val f2 = client1 ? (('set, "test", ByteString("No one will read me"))) val f3 = client1 ? (('get, "hello")) - f2.await + Await.ready(f2, timeout.duration) val f4 = client2 ? (('set, "test", ByteString("I'm a test!"))) - f4.await + Await.ready(f4, timeout.duration) val f5 = client1 ? (('get, "test")) val f6 = client2 ? 'getall - f1.get must equal("OK") - f2.get must equal("OK") - f3.get must equal(ByteString("World")) - f4.get must equal("OK") - f5.get must equal(ByteString("I'm a test!")) - f6.get must equal(Map("hello" -> ByteString("World"), "test" -> ByteString("I'm a test!"))) - client1.stop - client2.stop - server.stop - ioManager.stop + Await.result(f1, timeout.duration) must equal("OK") + Await.result(f2, timeout.duration) must equal("OK") + Await.result(f3, timeout.duration) must equal(ByteString("World")) + Await.result(f4, timeout.duration) must equal("OK") + Await.result(f5, timeout.duration) must equal(ByteString("I'm a test!")) + Await.result(f6, timeout.duration) must equal(Map("hello" -> ByteString("World"), "test" -> ByteString("I'm a test!"))) + system.stop(client1) + system.stop(client2) + system.stop(server) + system.stop(ioManager) } } diff --git a/akka-actor-tests/src/test/scala/akka/actor/LocalActorRefProviderSpec.scala b/akka-actor-tests/src/test/scala/akka/actor/LocalActorRefProviderSpec.scala index ad92865124..bb3bf4c58e 100644 --- a/akka-actor-tests/src/test/scala/akka/actor/LocalActorRefProviderSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/actor/LocalActorRefProviderSpec.scala @@ -6,7 +6,7 @@ package akka.actor import akka.testkit._ import akka.util.duration._ -import akka.dispatch.Future +import akka.dispatch.{ Await, Future } @org.junit.runner.RunWith(classOf[org.scalatest.junit.JUnitRunner]) class LocalActorRefProviderSpec extends AkkaSpec { @@ -32,7 +32,7 @@ class LocalActorRefProviderSpec extends AkkaSpec { val address = "new-actor" + i implicit val timeout = Timeout(5 seconds) val actors = for (j ← 1 to 4) yield Future(system.actorOf(Props(c ⇒ { case _ ⇒ }), address)) - val set = Set() ++ actors.map(_.await.value match { + val set = Set() ++ actors.map(a ⇒ Await.ready(a, timeout.duration).value match { case Some(Right(a: ActorRef)) ⇒ 1 case Some(Left(ex: InvalidActorNameException)) ⇒ 2 case x ⇒ x diff --git a/akka-actor-tests/src/test/scala/akka/actor/ReceiveTimeoutSpec.scala b/akka-actor-tests/src/test/scala/akka/actor/ReceiveTimeoutSpec.scala index 02b5aab8c1..9706a77d9f 100644 --- a/akka-actor-tests/src/test/scala/akka/actor/ReceiveTimeoutSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/actor/ReceiveTimeoutSpec.scala @@ -26,7 +26,7 @@ class ReceiveTimeoutSpec extends AkkaSpec { })) timeoutLatch.await - timeoutActor.stop() + system.stop(timeoutActor) } "reschedule timeout after regular receive" in { @@ -45,7 +45,7 @@ class ReceiveTimeoutSpec extends AkkaSpec { timeoutActor ! Tick timeoutLatch.await - timeoutActor.stop() + system.stop(timeoutActor) } "be able to turn off timeout if desired" in { @@ -69,7 +69,7 @@ class ReceiveTimeoutSpec extends AkkaSpec { timeoutLatch.await count.get must be(1) - timeoutActor.stop() + system.stop(timeoutActor) } "not receive timeout message when not specified" in { @@ -82,7 +82,7 @@ class ReceiveTimeoutSpec extends AkkaSpec { })) timeoutLatch.awaitTimeout(1 second) // timeout expected - timeoutActor.stop() + system.stop(timeoutActor) } "have ReceiveTimeout eq to Actors ReceiveTimeout" in { diff --git a/akka-actor-tests/src/test/scala/akka/actor/RestartStrategySpec.scala b/akka-actor-tests/src/test/scala/akka/actor/RestartStrategySpec.scala index f3f70a09d7..789bf12e68 100644 --- a/akka-actor-tests/src/test/scala/akka/actor/RestartStrategySpec.scala +++ b/akka-actor-tests/src/test/scala/akka/actor/RestartStrategySpec.scala @@ -6,12 +6,14 @@ package akka.actor import java.lang.Thread.sleep import org.scalatest.BeforeAndAfterAll +import akka.dispatch.Await import akka.testkit.TestEvent._ import akka.testkit.EventFilter import java.util.concurrent.{ TimeUnit, CountDownLatch } -import org.multiverse.api.latches.StandardLatch import akka.testkit.AkkaSpec import akka.testkit.DefaultTimeout +import akka.testkit.TestLatch +import akka.util.duration._ @org.junit.runner.RunWith(classOf[org.scalatest.junit.JUnitRunner]) class RestartStrategySpec extends AkkaSpec with DefaultTimeout { @@ -28,10 +30,10 @@ class RestartStrategySpec extends AkkaSpec with DefaultTimeout { "ensure that slave stays dead after max restarts within time range" in { val boss = system.actorOf(Props[Supervisor].withFaultHandler(OneForOneStrategy(List(classOf[Throwable]), 2, 1000))) - val restartLatch = new StandardLatch - val secondRestartLatch = new StandardLatch + val restartLatch = new TestLatch + val secondRestartLatch = new TestLatch val countDownLatch = new CountDownLatch(3) - val stopLatch = new StandardLatch + val stopLatch = new TestLatch val slaveProps = Props(new Actor { @@ -42,33 +44,33 @@ class RestartStrategySpec extends AkkaSpec with DefaultTimeout { override def postRestart(reason: Throwable) = { if (!restartLatch.isOpen) - restartLatch.open + restartLatch.open() else - secondRestartLatch.open + secondRestartLatch.open() } override def postStop() = { - stopLatch.open + stopLatch.open() } }) - val slave = (boss ? slaveProps).as[ActorRef].get + val slave = Await.result((boss ? slaveProps).mapTo[ActorRef], timeout.duration) slave ! Ping slave ! Crash slave ! Ping // test restart and post restart ping - assert(restartLatch.tryAwait(10, TimeUnit.SECONDS)) + assert(restartLatch.await(10 seconds)) // now crash again... should not restart slave ! Crash slave ! Ping - assert(secondRestartLatch.tryAwait(10, TimeUnit.SECONDS)) + assert(secondRestartLatch.await(10 seconds)) assert(countDownLatch.await(10, TimeUnit.SECONDS)) slave ! Crash - assert(stopLatch.tryAwait(10, TimeUnit.SECONDS)) + assert(stopLatch.await(10 seconds)) } "ensure that slave is immortal without max restarts and time range" in { @@ -86,7 +88,7 @@ class RestartStrategySpec extends AkkaSpec with DefaultTimeout { countDownLatch.countDown() } }) - val slave = (boss ? slaveProps).as[ActorRef].get + val slave = Await.result((boss ? slaveProps).mapTo[ActorRef], timeout.duration) (1 to 100) foreach { _ ⇒ slave ! Crash } assert(countDownLatch.await(120, TimeUnit.SECONDS)) @@ -96,11 +98,11 @@ class RestartStrategySpec extends AkkaSpec with DefaultTimeout { "ensure that slave restarts after number of crashes not within time range" in { val boss = system.actorOf(Props[Supervisor].withFaultHandler(OneForOneStrategy(List(classOf[Throwable]), 2, 500))) - val restartLatch = new StandardLatch - val secondRestartLatch = new StandardLatch - val thirdRestartLatch = new StandardLatch - val pingLatch = new StandardLatch - val secondPingLatch = new StandardLatch + val restartLatch = new TestLatch + val secondRestartLatch = new TestLatch + val thirdRestartLatch = new TestLatch + val pingLatch = new TestLatch + val secondPingLatch = new TestLatch val slaveProps = Props(new Actor { @@ -111,32 +113,32 @@ class RestartStrategySpec extends AkkaSpec with DefaultTimeout { } override def postRestart(reason: Throwable) = { if (!restartLatch.isOpen) - restartLatch.open + restartLatch.open() else if (!secondRestartLatch.isOpen) - secondRestartLatch.open + secondRestartLatch.open() else - thirdRestartLatch.open + thirdRestartLatch.open() } override def postStop() = { if (restartLatch.isOpen) { - secondRestartLatch.open + secondRestartLatch.open() } } }) - val slave = (boss ? slaveProps).as[ActorRef].get + val slave = Await.result((boss ? slaveProps).mapTo[ActorRef], timeout.duration) slave ! Ping slave ! Crash - assert(restartLatch.tryAwait(10, TimeUnit.SECONDS)) - assert(pingLatch.tryAwait(10, TimeUnit.SECONDS)) + assert(restartLatch.await(10 seconds)) + assert(pingLatch.await(10 seconds)) slave ! Ping slave ! Crash - assert(secondRestartLatch.tryAwait(10, TimeUnit.SECONDS)) - assert(secondPingLatch.tryAwait(10, TimeUnit.SECONDS)) + assert(secondRestartLatch.await(10 seconds)) + assert(secondPingLatch.await(10 seconds)) // sleep to go out of the restart strategy's time range sleep(700L) @@ -145,7 +147,7 @@ class RestartStrategySpec extends AkkaSpec with DefaultTimeout { slave ! Crash slave ! Ping - assert(thirdRestartLatch.tryAwait(1, TimeUnit.SECONDS)) + assert(thirdRestartLatch.await(1 second)) assert(!slave.isTerminated) } @@ -153,10 +155,10 @@ class RestartStrategySpec extends AkkaSpec with DefaultTimeout { "ensure that slave is not restarted after max retries" in { val boss = system.actorOf(Props[Supervisor].withFaultHandler(OneForOneStrategy(List(classOf[Throwable]), Some(2), None))) - val restartLatch = new StandardLatch - val secondRestartLatch = new StandardLatch + val restartLatch = new TestLatch + val secondRestartLatch = new TestLatch val countDownLatch = new CountDownLatch(3) - val stopLatch = new StandardLatch + val stopLatch = new TestLatch val slaveProps = Props(new Actor { @@ -166,23 +168,23 @@ class RestartStrategySpec extends AkkaSpec with DefaultTimeout { } override def postRestart(reason: Throwable) = { if (!restartLatch.isOpen) - restartLatch.open + restartLatch.open() else - secondRestartLatch.open + secondRestartLatch.open() } override def postStop() = { - stopLatch.open + stopLatch.open() } }) - val slave = (boss ? slaveProps).as[ActorRef].get + val slave = Await.result((boss ? slaveProps).mapTo[ActorRef], timeout.duration) slave ! Ping slave ! Crash slave ! Ping // test restart and post restart ping - assert(restartLatch.tryAwait(10, TimeUnit.SECONDS)) + assert(restartLatch.await(10 seconds)) assert(!slave.isTerminated) @@ -190,25 +192,25 @@ class RestartStrategySpec extends AkkaSpec with DefaultTimeout { slave ! Crash slave ! Ping - assert(secondRestartLatch.tryAwait(10, TimeUnit.SECONDS)) + assert(secondRestartLatch.await(10 seconds)) assert(countDownLatch.await(10, TimeUnit.SECONDS)) sleep(700L) slave ! Crash - assert(stopLatch.tryAwait(10, TimeUnit.SECONDS)) + assert(stopLatch.await(10 seconds)) sleep(500L) assert(slave.isTerminated) } "ensure that slave is not restarted within time range" in { - val restartLatch, stopLatch, maxNoOfRestartsLatch = new StandardLatch + val restartLatch, stopLatch, maxNoOfRestartsLatch = new TestLatch val countDownLatch = new CountDownLatch(2) val boss = system.actorOf(Props(new Actor { def receive = { case p: Props ⇒ sender ! context.watch(context.actorOf(p)) - case t: Terminated ⇒ maxNoOfRestartsLatch.open + case t: Terminated ⇒ maxNoOfRestartsLatch.open() } }).withFaultHandler(OneForOneStrategy(List(classOf[Throwable]), None, Some(1000)))) @@ -220,21 +222,21 @@ class RestartStrategySpec extends AkkaSpec with DefaultTimeout { } override def postRestart(reason: Throwable) = { - restartLatch.open + restartLatch.open() } override def postStop() = { - stopLatch.open + stopLatch.open() } }) - val slave = (boss ? slaveProps).as[ActorRef].get + val slave = Await.result((boss ? slaveProps).mapTo[ActorRef], timeout.duration) slave ! Ping slave ! Crash slave ! Ping // test restart and post restart ping - assert(restartLatch.tryAwait(10, TimeUnit.SECONDS)) + assert(restartLatch.await(10 seconds)) assert(!slave.isTerminated) @@ -248,9 +250,9 @@ class RestartStrategySpec extends AkkaSpec with DefaultTimeout { // may not be running slave ! Crash - assert(stopLatch.tryAwait(10, TimeUnit.SECONDS)) + assert(stopLatch.await(10 seconds)) - assert(maxNoOfRestartsLatch.tryAwait(10, TimeUnit.SECONDS)) + assert(maxNoOfRestartsLatch.await(10 seconds)) sleep(500L) assert(slave.isTerminated) } diff --git a/akka-actor-tests/src/test/scala/akka/actor/SchedulerSpec.scala b/akka-actor-tests/src/test/scala/akka/actor/SchedulerSpec.scala index ceeb768b6c..7c0aefb2fe 100644 --- a/akka-actor-tests/src/test/scala/akka/actor/SchedulerSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/actor/SchedulerSpec.scala @@ -1,12 +1,13 @@ package akka.actor import org.scalatest.BeforeAndAfterEach -import org.multiverse.api.latches.StandardLatch import akka.testkit.AkkaSpec import akka.testkit.EventFilter import akka.util.duration._ import java.util.concurrent.{ CountDownLatch, ConcurrentLinkedQueue, TimeUnit } import akka.testkit.DefaultTimeout +import akka.testkit.TestLatch +import akka.dispatch.Await @org.junit.runner.RunWith(classOf[org.scalatest.junit.JUnitRunner]) class SchedulerSpec extends AkkaSpec with BeforeAndAfterEach with DefaultTimeout { @@ -101,7 +102,7 @@ class SchedulerSpec extends AkkaSpec with BeforeAndAfterEach with DefaultTimeout object Ping object Crash - val restartLatch = new StandardLatch + val restartLatch = new TestLatch val pingLatch = new CountDownLatch(6) val supervisor = system.actorOf(Props[Supervisor].withFaultHandler(AllForOneStrategy(List(classOf[Exception]), 3, 1000))) @@ -113,7 +114,7 @@ class SchedulerSpec extends AkkaSpec with BeforeAndAfterEach with DefaultTimeout override def postRestart(reason: Throwable) = restartLatch.open }) - val actor = (supervisor ? props).as[ActorRef].get + val actor = Await.result((supervisor ? props).mapTo[ActorRef], timeout.duration) collectCancellable(system.scheduler.schedule(500 milliseconds, 500 milliseconds, actor, Ping)) // appx 2 pings before crash @@ -121,7 +122,7 @@ class SchedulerSpec extends AkkaSpec with BeforeAndAfterEach with DefaultTimeout collectCancellable(system.scheduler.scheduleOnce(1000 milliseconds, actor, Crash)) } - assert(restartLatch.tryAwait(2, TimeUnit.SECONDS)) + assert(restartLatch.await(2 seconds)) // should be enough time for the ping countdown to recover and reach 6 pings assert(pingLatch.await(4, TimeUnit.SECONDS)) } diff --git a/akka-actor-tests/src/test/scala/akka/actor/Supervisor.scala b/akka-actor-tests/src/test/scala/akka/actor/Supervisor.scala index 6c438f1776..174939915d 100644 --- a/akka-actor-tests/src/test/scala/akka/actor/Supervisor.scala +++ b/akka-actor-tests/src/test/scala/akka/actor/Supervisor.scala @@ -7,4 +7,6 @@ class Supervisor extends Actor { def receive = { case x: Props ⇒ sender ! context.actorOf(x) } + // need to override the default of stopping all children upon restart, tests rely on keeping them around + override def preRestart(cause: Throwable, msg: Option[Any]) {} } diff --git a/akka-actor-tests/src/test/scala/akka/actor/SupervisorHierarchySpec.scala b/akka-actor-tests/src/test/scala/akka/actor/SupervisorHierarchySpec.scala index dc45d012fd..fb34e2345b 100644 --- a/akka-actor-tests/src/test/scala/akka/actor/SupervisorHierarchySpec.scala +++ b/akka-actor-tests/src/test/scala/akka/actor/SupervisorHierarchySpec.scala @@ -7,6 +7,7 @@ package akka.actor import akka.testkit._ import java.util.concurrent.{ TimeUnit, CountDownLatch } +import akka.dispatch.Await object SupervisorHierarchySpec { class FireWorkerException(msg: String) extends Exception(msg) @@ -15,6 +16,8 @@ object SupervisorHierarchySpec { protected def receive = { case p: Props ⇒ sender ! context.actorOf(p) } + // test relies on keeping children around during restart + override def preRestart(cause: Throwable, msg: Option[Any]) {} override def postRestart(reason: Throwable) = { countDown.countDown() } @@ -33,10 +36,10 @@ class SupervisorHierarchySpec extends AkkaSpec with DefaultTimeout { val boss = system.actorOf(Props[Supervisor].withFaultHandler(OneForOneStrategy(List(classOf[Exception]), None, None))) val managerProps = Props(new CountDownActor(countDown)).withFaultHandler(AllForOneStrategy(List(), None, None)) - val manager = (boss ? managerProps).as[ActorRef].get + val manager = Await.result((boss ? managerProps).mapTo[ActorRef], timeout.duration) val workerProps = Props(new CountDownActor(countDown)) - val workerOne, workerTwo, workerThree = (manager ? workerProps).as[ActorRef].get + val workerOne, workerTwo, workerThree = Await.result((manager ? workerProps).mapTo[ActorRef], timeout.duration) filterException[ActorKilledException] { workerOne ! Kill diff --git a/akka-actor-tests/src/test/scala/akka/actor/SupervisorMiscSpec.scala b/akka-actor-tests/src/test/scala/akka/actor/SupervisorMiscSpec.scala index 6438d6eee3..63c3065231 100644 --- a/akka-actor-tests/src/test/scala/akka/actor/SupervisorMiscSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/actor/SupervisorMiscSpec.scala @@ -4,7 +4,7 @@ package akka.actor import akka.testkit.{ filterEvents, EventFilter } -import akka.dispatch.{ PinnedDispatcher, Dispatchers } +import akka.dispatch.{ PinnedDispatcher, Dispatchers, Await } import java.util.concurrent.{ TimeUnit, CountDownLatch } import akka.testkit.AkkaSpec import akka.testkit.DefaultTimeout @@ -24,17 +24,15 @@ class SupervisorMiscSpec extends AkkaSpec with DefaultTimeout { override def postRestart(cause: Throwable) { countDownLatch.countDown() } protected def receive = { case "status" ⇒ this.sender ! "OK" - case _ ⇒ this.self.stop() + case _ ⇒ this.context.stop(self) } }) - val actor1 = (supervisor ? workerProps.withDispatcher(system.dispatcherFactory.newPinnedDispatcher("pinned"))).as[ActorRef].get + val actor1, actor2 = Await.result((supervisor ? workerProps.withDispatcher(system.dispatcherFactory.newPinnedDispatcher("pinned"))).mapTo[ActorRef], timeout.duration) - val actor2 = (supervisor ? workerProps.withDispatcher(system.dispatcherFactory.newPinnedDispatcher("pinned"))).as[ActorRef].get + val actor3 = Await.result((supervisor ? workerProps.withDispatcher(system.dispatcherFactory.newDispatcher("test").build)).mapTo[ActorRef], timeout.duration) - val actor3 = (supervisor ? workerProps.withDispatcher(system.dispatcherFactory.newDispatcher("test").build)).as[ActorRef].get - - val actor4 = (supervisor ? workerProps.withDispatcher(system.dispatcherFactory.newPinnedDispatcher("pinned"))).as[ActorRef].get + val actor4 = Await.result((supervisor ? workerProps.withDispatcher(system.dispatcherFactory.newPinnedDispatcher("pinned"))).mapTo[ActorRef], timeout.duration) actor1 ! Kill actor2 ! Kill @@ -42,10 +40,10 @@ class SupervisorMiscSpec extends AkkaSpec with DefaultTimeout { actor4 ! Kill countDownLatch.await(10, TimeUnit.SECONDS) - assert((actor1 ? "status").as[String].get == "OK", "actor1 is shutdown") - assert((actor2 ? "status").as[String].get == "OK", "actor2 is shutdown") - assert((actor3 ? "status").as[String].get == "OK", "actor3 is shutdown") - assert((actor4 ? "status").as[String].get == "OK", "actor4 is shutdown") + assert(Await.result(actor1 ? "status", timeout.duration) == "OK", "actor1 is shutdown") + assert(Await.result(actor2 ? "status", timeout.duration) == "OK", "actor2 is shutdown") + assert(Await.result(actor3 ? "status", timeout.duration) == "OK", "actor3 is shutdown") + assert(Await.result(actor4 ? "status", timeout.duration) == "OK", "actor4 is shutdown") } } } diff --git a/akka-actor-tests/src/test/scala/akka/actor/SupervisorSpec.scala b/akka-actor-tests/src/test/scala/akka/actor/SupervisorSpec.scala index fdd87a2ba4..e68e6f3906 100644 --- a/akka-actor-tests/src/test/scala/akka/actor/SupervisorSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/actor/SupervisorSpec.scala @@ -7,11 +7,10 @@ package akka.actor import org.scalatest.BeforeAndAfterEach import akka.util.duration._ import akka.{ Die, Ping } -import akka.actor.Actor._ import akka.testkit.TestEvent._ import akka.testkit._ import java.util.concurrent.atomic.AtomicInteger -import java.util.concurrent.LinkedBlockingQueue +import akka.dispatch.Await object SupervisorSpec { val Timeout = 5 seconds @@ -73,7 +72,7 @@ class SupervisorSpec extends AkkaSpec with BeforeAndAfterEach with ImplicitSende // Creating actors and supervisors // ===================================================== - private def child(supervisor: ActorRef, props: Props): ActorRef = (supervisor ? props).as[ActorRef].get + private def child(supervisor: ActorRef, props: Props): ActorRef = Await.result((supervisor ? props).mapTo[ActorRef], props.timeout.duration) def temporaryActorAllForOne = { val supervisor = system.actorOf(Props[Supervisor].withFaultHandler(AllForOneStrategy(List(classOf[Exception]), Some(0)))) @@ -129,14 +128,14 @@ class SupervisorSpec extends AkkaSpec with BeforeAndAfterEach with ImplicitSende } def ping(pingPongActor: ActorRef) = { - (pingPongActor.?(Ping, TimeoutMillis)).as[String] must be === Some(PongMessage) + Await.result(pingPongActor.?(Ping, TimeoutMillis), TimeoutMillis millis) must be === PongMessage expectMsg(Timeout, PingMessage) } def kill(pingPongActor: ActorRef) = { val result = (pingPongActor ? (DieReply, TimeoutMillis)) expectMsg(Timeout, ExceptionMessage) - intercept[RuntimeException] { result.get } + intercept[RuntimeException] { Await.result(result, TimeoutMillis millis) } } "A supervisor" must { @@ -152,7 +151,7 @@ class SupervisorSpec extends AkkaSpec with BeforeAndAfterEach with ImplicitSende "not restart temporary actor" in { val (temporaryActor, _) = temporaryActorAllForOne - intercept[RuntimeException] { (temporaryActor.?(DieReply, TimeoutMillis)).get } + intercept[RuntimeException] { Await.result(temporaryActor.?(DieReply, TimeoutMillis), TimeoutMillis millis) } expectNoMsg(1 second) } @@ -293,20 +292,20 @@ class SupervisorSpec extends AkkaSpec with BeforeAndAfterEach with ImplicitSende throw e } }) - val dyingActor = (supervisor ? dyingProps).as[ActorRef].get + val dyingActor = Await.result((supervisor ? dyingProps).mapTo[ActorRef], timeout.duration) filterEvents(EventFilter[RuntimeException]("Expected", occurrences = 1), EventFilter[IllegalStateException]("error while creating actor", occurrences = 1)) { intercept[RuntimeException] { - (dyingActor.?(DieReply, TimeoutMillis)).get + Await.result(dyingActor.?(DieReply, TimeoutMillis), TimeoutMillis millis) } } - (dyingActor.?(Ping, TimeoutMillis)).as[String] must be === Some(PongMessage) + Await.result(dyingActor.?(Ping, TimeoutMillis), TimeoutMillis millis) must be === PongMessage inits.get must be(3) - supervisor.stop() + system.stop(supervisor) } } } diff --git a/akka-actor-tests/src/test/scala/akka/actor/SupervisorTreeSpec.scala b/akka-actor-tests/src/test/scala/akka/actor/SupervisorTreeSpec.scala index 9ed84ca2b6..3985f6ea48 100644 --- a/akka-actor-tests/src/test/scala/akka/actor/SupervisorTreeSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/actor/SupervisorTreeSpec.scala @@ -6,12 +6,12 @@ package akka.actor import org.scalatest.WordSpec import org.scalatest.matchers.MustMatchers import akka.util.duration._ -import akka.dispatch.Dispatchers import akka.actor.Actor._ import akka.testkit.{ TestKit, EventFilter, filterEvents, filterException } import akka.testkit.AkkaSpec import akka.testkit.ImplicitSender import akka.testkit.DefaultTimeout +import akka.dispatch.{ Await, Dispatchers } @org.junit.runner.RunWith(classOf[org.scalatest.junit.JUnitRunner]) class SupervisorTreeSpec extends AkkaSpec with ImplicitSender with DefaultTimeout { @@ -28,14 +28,14 @@ class SupervisorTreeSpec extends AkkaSpec with ImplicitSender with DefaultTimeou override def preRestart(cause: Throwable, msg: Option[Any]) { testActor ! self.path } }).withFaultHandler(OneForOneStrategy(List(classOf[Exception]), 3, 1000)) val headActor = system.actorOf(p) - val middleActor = (headActor ? p).as[ActorRef].get - val lastActor = (middleActor ? p).as[ActorRef].get + val middleActor = Await.result((headActor ? p).mapTo[ActorRef], timeout.duration) + val lastActor = Await.result((middleActor ? p).mapTo[ActorRef], timeout.duration) middleActor ! Kill expectMsg(middleActor.path) expectMsg(lastActor.path) expectNoMsg(2 seconds) - headActor.stop() + system.stop(headActor) } } } diff --git a/akka-actor-tests/src/test/scala/akka/actor/Ticket669Spec.scala b/akka-actor-tests/src/test/scala/akka/actor/Ticket669Spec.scala index 154ba58fcd..3b5f29c950 100644 --- a/akka-actor-tests/src/test/scala/akka/actor/Ticket669Spec.scala +++ b/akka-actor-tests/src/test/scala/akka/actor/Ticket669Spec.scala @@ -10,6 +10,7 @@ import akka.testkit.{ TestKit, filterEvents, EventFilter } import akka.testkit.AkkaSpec import akka.testkit.ImplicitSender import akka.testkit.DefaultTimeout +import akka.dispatch.Await @org.junit.runner.RunWith(classOf[org.scalatest.junit.JUnitRunner]) class Ticket669Spec extends AkkaSpec with BeforeAndAfterAll with ImplicitSender with DefaultTimeout { @@ -24,22 +25,22 @@ class Ticket669Spec extends AkkaSpec with BeforeAndAfterAll with ImplicitSender "be able to reply on failure during preRestart" in { filterEvents(EventFilter[Exception]("test", occurrences = 1)) { val supervisor = system.actorOf(Props[Supervisor].withFaultHandler(AllForOneStrategy(List(classOf[Exception]), 5, 10000))) - val supervised = (supervisor ? Props[Supervised]).as[ActorRef].get + val supervised = Await.result((supervisor ? Props[Supervised]).mapTo[ActorRef], timeout.duration) supervised.!("test")(testActor) expectMsg("failure1") - supervisor.stop() + system.stop(supervisor) } } "be able to reply on failure during postStop" in { filterEvents(EventFilter[Exception]("test", occurrences = 1)) { val supervisor = system.actorOf(Props[Supervisor].withFaultHandler(AllForOneStrategy(List(classOf[Exception]), Some(0), None))) - val supervised = (supervisor ? Props[Supervised]).as[ActorRef].get + val supervised = Await.result((supervisor ? Props[Supervised]).mapTo[ActorRef], timeout.duration) supervised.!("test")(testActor) expectMsg("failure2") - supervisor.stop() + system.stop(supervisor) } } } diff --git a/akka-actor-tests/src/test/scala/akka/actor/TypedActorSpec.scala b/akka-actor-tests/src/test/scala/akka/actor/TypedActorSpec.scala index b2265367c7..f50b9bd381 100644 --- a/akka-actor-tests/src/test/scala/akka/actor/TypedActorSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/actor/TypedActorSpec.scala @@ -7,7 +7,6 @@ package akka.actor import org.scalatest.{ BeforeAndAfterAll, BeforeAndAfterEach } import akka.util.Duration import akka.util.duration._ -import akka.dispatch.{ Dispatchers, Future, KeptPromise } import akka.serialization.Serialization import java.util.concurrent.atomic.AtomicReference import annotation.tailrec @@ -17,6 +16,7 @@ import akka.actor.TypedActor.{ PostRestart, PreRestart, PostStop, PreStart } import java.util.concurrent.{ TimeUnit, CountDownLatch } import akka.japi.{ Creator, Option ⇒ JOption } import akka.testkit.DefaultTimeout +import akka.dispatch.{ Await, Dispatchers, Future, Promise } object TypedActorSpec { @@ -85,7 +85,7 @@ object TypedActorSpec { def pigdog = "Pigdog" - def futurePigdog(): Future[String] = new KeptPromise(Right(pigdog)) + def futurePigdog(): Future[String] = Promise.successful(pigdog) def futurePigdog(delay: Long): Future[String] = { Thread.sleep(delay) @@ -94,7 +94,7 @@ object TypedActorSpec { def futurePigdog(delay: Long, numbered: Int): Future[String] = { Thread.sleep(delay) - new KeptPromise(Right(pigdog + numbered)) + Promise.successful(pigdog + numbered) } def futureComposePigdogFrom(foo: Foo): Future[String] = { @@ -247,7 +247,7 @@ class TypedActorSpec extends AkkaSpec with BeforeAndAfterEach with BeforeAndAfte val t = newFooBar val f = t.futurePigdog(200) f.isCompleted must be(false) - f.get must be("Pigdog") + Await.result(f, timeout.duration) must be("Pigdog") mustStop(t) } @@ -255,7 +255,7 @@ class TypedActorSpec extends AkkaSpec with BeforeAndAfterEach with BeforeAndAfte val t = newFooBar val futures = for (i ← 1 to 20) yield (i, t.futurePigdog(20, i)) for ((i, f) ← futures) { - f.get must be("Pigdog" + i) + Await.result(f, timeout.duration) must be("Pigdog" + i) } mustStop(t) } @@ -278,7 +278,7 @@ class TypedActorSpec extends AkkaSpec with BeforeAndAfterEach with BeforeAndAfte val t, t2 = newFooBar(Duration(2, "s")) val f = t.futureComposePigdogFrom(t2) f.isCompleted must be(false) - f.get must equal("PIGDOG") + Await.result(f, timeout.duration) must equal("PIGDOG") mustStop(t) mustStop(t2) } @@ -290,13 +290,13 @@ class TypedActorSpec extends AkkaSpec with BeforeAndAfterEach with BeforeAndAfte }).withFaultHandler(OneForOneStrategy { case e: IllegalStateException if e.getMessage == "expected" ⇒ FaultHandlingStrategy.Resume })) - val t = (boss ? Props().withTimeout(2 seconds)).as[Foo].get + val t = Await.result((boss ? Props().withTimeout(2 seconds)).mapTo[Foo], timeout.duration) t.incr() t.failingPigdog() t.read() must be(1) //Make sure state is not reset after failure - t.failingFuturePigdog.await.exception.get.getMessage must be("expected") + intercept[IllegalStateException] { Await.result(t.failingFuturePigdog, 2 seconds) }.getMessage must be("expected") t.read() must be(1) //Make sure state is not reset after failure (intercept[IllegalStateException] { t.failingJOptionPigdog }).getMessage must be("expected") @@ -323,7 +323,7 @@ class TypedActorSpec extends AkkaSpec with BeforeAndAfterEach with BeforeAndAfte val f2 = t.futurePigdog(0) f2.isCompleted must be(false) f.isCompleted must be(false) - f.get must equal(f2.get) + Await.result(f, timeout.duration) must equal(Await.result(f2, timeout.duration)) mustStop(t) } @@ -348,7 +348,7 @@ class TypedActorSpec extends AkkaSpec with BeforeAndAfterEach with BeforeAndAfte val results = for (i ← 1 to 120) yield (i, iterator.next.futurePigdog(200L, i)) - for ((i, r) ← results) r.get must be("Pigdog" + i) + for ((i, r) ← results) Await.result(r, timeout.duration) must be("Pigdog" + i) for (t ← thais) mustStop(t) } diff --git a/akka-actor-tests/src/test/scala/akka/actor/dispatch/ActorModelSpec.scala b/akka-actor-tests/src/test/scala/akka/actor/dispatch/ActorModelSpec.scala index bffa5bac82..03a9405a8f 100644 --- a/akka-actor-tests/src/test/scala/akka/actor/dispatch/ActorModelSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/actor/dispatch/ActorModelSpec.scala @@ -31,7 +31,7 @@ object ActorModelSpec { case class Increment(counter: AtomicLong) extends ActorModelMessage - case class Await(latch: CountDownLatch) extends ActorModelMessage + case class AwaitLatch(latch: CountDownLatch) extends ActorModelMessage case class Meet(acknowledge: CountDownLatch, waitFor: CountDownLatch) extends ActorModelMessage @@ -68,7 +68,7 @@ object ActorModelSpec { } def receive = { - case Await(latch) ⇒ ack; latch.await(); busy.switchOff() + case AwaitLatch(latch) ⇒ ack; latch.await(); busy.switchOff() case Meet(sign, wait) ⇒ ack; sign.countDown(); wait.await(); busy.switchOff() case Wait(time) ⇒ ack; Thread.sleep(time); busy.switchOff() case WaitAck(time, l) ⇒ ack; Thread.sleep(time); l.countDown(); busy.switchOff() @@ -77,7 +77,7 @@ object ActorModelSpec { case Forward(to, msg) ⇒ ack; to.forward(msg); busy.switchOff() case CountDown(latch) ⇒ ack; latch.countDown(); busy.switchOff() case Increment(count) ⇒ ack; count.incrementAndGet(); busy.switchOff() - case CountDownNStop(l) ⇒ ack; l.countDown(); self.stop(); busy.switchOff() + case CountDownNStop(l) ⇒ ack; l.countDown(); context.stop(self); busy.switchOff() case Restart ⇒ ack; busy.switchOff(); throw new Exception("Restart requested") case Interrupt ⇒ ack; sender ! Status.Failure(new ActorInterruptedException(new InterruptedException("Ping!"))); busy.switchOff(); throw new InterruptedException("Ping!") case ThrowException(e: Throwable) ⇒ ack; busy.switchOff(); throw e @@ -204,7 +204,7 @@ object ActorModelSpec { await(deadline)(stats.restarts.get() == restarts) } catch { case e ⇒ - system.eventStream.publish(Error(e, dispatcher.toString, "actual: " + stats + ", required: InterceptorStats(susp=" + suspensions + + system.eventStream.publish(Error(e, Option(dispatcher).toString, "actual: " + stats + ", required: InterceptorStats(susp=" + suspensions + ",res=" + resumes + ",reg=" + registers + ",unreg=" + unregisters + ",recv=" + msgsReceived + ",proc=" + msgsProcessed + ",restart=" + restarts)) throw e @@ -239,7 +239,7 @@ abstract class ActorModelSpec extends AkkaSpec with DefaultTimeout { assertDispatcher(dispatcher)(stops = 0) val a = newTestActor(dispatcher) assertDispatcher(dispatcher)(stops = 0) - a.stop() + system.stop(a) assertDispatcher(dispatcher)(stops = 1) assertRef(a, dispatcher)( suspensions = 0, @@ -260,7 +260,7 @@ abstract class ActorModelSpec extends AkkaSpec with DefaultTimeout { assertDispatcher(dispatcher)(stops = 2) - a2.stop + system.stop(a2) assertDispatcher(dispatcher)(stops = 3) } @@ -279,7 +279,7 @@ abstract class ActorModelSpec extends AkkaSpec with DefaultTimeout { assertCountDown(oneAtATime, (1.5 seconds).dilated.toMillis, "Processed message when allowed") assertRefDefaultZero(a)(registers = 1, msgsReceived = 3, msgsProcessed = 3) - a.stop() + system.stop(a) assertRefDefaultZero(a)(registers = 1, unregisters = 1, msgsReceived = 3, msgsProcessed = 3) } @@ -298,7 +298,7 @@ abstract class ActorModelSpec extends AkkaSpec with DefaultTimeout { assertCountDown(counter, 3.seconds.dilated.toMillis, "Should process 200 messages") assertRefDefaultZero(a)(registers = 1, msgsReceived = 200, msgsProcessed = 200) - a.stop() + system.stop(a) } def spawn(f: ⇒ Unit) { @@ -328,7 +328,7 @@ abstract class ActorModelSpec extends AkkaSpec with DefaultTimeout { assertRefDefaultZero(a)(registers = 1, msgsReceived = 1, msgsProcessed = 1, suspensions = 1, resumes = 1) - a.stop() + system.stop(a) assertRefDefaultZero(a)(registers = 1, unregisters = 1, msgsReceived = 1, msgsProcessed = 1, suspensions = 1, resumes = 1) } @@ -370,7 +370,7 @@ abstract class ActorModelSpec extends AkkaSpec with DefaultTimeout { throw e } assertCountDown(stopLatch, waitTime, "Expected all children to stop") - boss.stop() + system.stop(boss) } for (run ← 1 to 3) { flood(50000) @@ -385,17 +385,17 @@ abstract class ActorModelSpec extends AkkaSpec with DefaultTimeout { val a = newTestActor(dispatcher) val f1 = a ? Reply("foo") val f2 = a ? Reply("bar") - val f3 = try { a ? Interrupt } catch { case ie: InterruptedException ⇒ new KeptPromise(Left(ActorInterruptedException(ie))) } + val f3 = try { a ? Interrupt } catch { case ie: InterruptedException ⇒ Promise.failed(ActorInterruptedException(ie)) } val f4 = a ? Reply("foo2") - val f5 = try { a ? Interrupt } catch { case ie: InterruptedException ⇒ new KeptPromise(Left(ActorInterruptedException(ie))) } + val f5 = try { a ? Interrupt } catch { case ie: InterruptedException ⇒ Promise.failed(ActorInterruptedException(ie)) } val f6 = a ? Reply("bar2") - assert(f1.get === "foo") - assert(f2.get === "bar") - assert(f4.get === "foo2") - assert(intercept[ActorInterruptedException](f3.get).getMessage === "Ping!") - assert(f6.get === "bar2") - assert(intercept[ActorInterruptedException](f5.get).getMessage === "Ping!") + assert(Await.result(f1, timeout.duration) === "foo") + assert(Await.result(f2, timeout.duration) === "bar") + assert(Await.result(f4, timeout.duration) === "foo2") + assert(intercept[ActorInterruptedException](Await.result(f3, timeout.duration)).getMessage === "Ping!") + assert(Await.result(f6, timeout.duration) === "bar2") + assert(intercept[ActorInterruptedException](Await.result(f5, timeout.duration)).getMessage === "Ping!") } } @@ -410,12 +410,12 @@ abstract class ActorModelSpec extends AkkaSpec with DefaultTimeout { val f5 = a ? ThrowException(new RemoteException("RemoteException")) val f6 = a ? Reply("bar2") - assert(f1.get === "foo") - assert(f2.get === "bar") - assert(f4.get === "foo2") - assert(f6.get === "bar2") - assert(f3.result === None) - assert(f5.result === None) + assert(Await.result(f1, timeout.duration) === "foo") + assert(Await.result(f2, timeout.duration) === "bar") + assert(Await.result(f4, timeout.duration) === "foo2") + assert(Await.result(f6, timeout.duration) === "bar2") + assert(f3.value.isEmpty) + assert(f5.value.isEmpty) } } } @@ -447,8 +447,8 @@ class DispatcherModelSpec extends ActorModelSpec { aStop.countDown() - a.stop - b.stop + system.stop(a) + system.stop(b) while (!a.isTerminated && !b.isTerminated) {} //Busy wait for termination @@ -484,8 +484,8 @@ class BalancingDispatcherModelSpec extends ActorModelSpec { aStop.countDown() - a.stop - b.stop + system.stop(a) + system.stop(b) while (!a.isTerminated && !b.isTerminated) {} //Busy wait for termination diff --git a/akka-actor-tests/src/test/scala/akka/actor/dispatch/BalancingDispatcherSpec.scala b/akka-actor-tests/src/test/scala/akka/actor/dispatch/BalancingDispatcherSpec.scala index 6ebc81409e..8c7054721d 100644 --- a/akka-actor-tests/src/test/scala/akka/actor/dispatch/BalancingDispatcherSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/actor/dispatch/BalancingDispatcherSpec.scala @@ -74,8 +74,8 @@ class BalancingDispatcherSpec extends AkkaSpec { fast.underlying.actor.asInstanceOf[DelayableActor].invocationCount must be > sentToFast fast.underlying.actor.asInstanceOf[DelayableActor].invocationCount must be > (slow.underlying.actor.asInstanceOf[DelayableActor].invocationCount) - slow.stop() - fast.stop() + system.stop(slow) + system.stop(fast) } } } diff --git a/akka-actor-tests/src/test/scala/akka/actor/dispatch/DispatcherActorSpec.scala b/akka-actor-tests/src/test/scala/akka/actor/dispatch/DispatcherActorSpec.scala index c6e04c6cf7..e559d63f4c 100644 --- a/akka-actor-tests/src/test/scala/akka/actor/dispatch/DispatcherActorSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/actor/dispatch/DispatcherActorSpec.scala @@ -3,11 +3,11 @@ package akka.actor.dispatch import java.util.concurrent.{ CountDownLatch, TimeUnit } import java.util.concurrent.atomic.{ AtomicBoolean, AtomicInteger } import akka.testkit.{ filterEvents, EventFilter, AkkaSpec } -import akka.dispatch.{ PinnedDispatcher, Dispatchers, Dispatcher } import akka.actor.{ Props, Actor } import akka.util.Duration import akka.util.duration._ import akka.testkit.DefaultTimeout +import akka.dispatch.{ Await, PinnedDispatcher, Dispatchers, Dispatcher } object DispatcherActorSpec { class TestActor extends Actor { @@ -39,14 +39,13 @@ class DispatcherActorSpec extends AkkaSpec with DefaultTimeout { val actor = system.actorOf(Props[OneWayTestActor].withDispatcher(system.dispatcherFactory.newDispatcher("test").build)) val result = actor ! "OneWay" assert(OneWayTestActor.oneWay.await(1, TimeUnit.SECONDS)) - actor.stop() + system.stop(actor) } "support ask/reply" in { val actor = system.actorOf(Props[TestActor].withDispatcher(system.dispatcherFactory.newDispatcher("test").build)) - val result = (actor ? "Hello").as[String] - assert("World" === result.get) - actor.stop() + assert("World" === Await.result(actor ? "Hello", timeout.duration)) + system.stop(actor) } "respect the throughput setting" in { @@ -67,13 +66,13 @@ class DispatcherActorSpec extends AkkaSpec with DefaultTimeout { case "ping" ⇒ if (works.get) latch.countDown() }).withDispatcher(throughputDispatcher)) - assert((slowOne ? "hogexecutor").get === "OK") + assert(Await.result(slowOne ? "hogexecutor", timeout.duration) === "OK") (1 to 100) foreach { _ ⇒ slowOne ! "ping" } fastOne ! "sabotage" start.countDown() latch.await(10, TimeUnit.SECONDS) - fastOne.stop() - slowOne.stop() + system.stop(fastOne) + system.stop(slowOne) assert(latch.getCount() === 0) } @@ -90,13 +89,13 @@ class DispatcherActorSpec extends AkkaSpec with DefaultTimeout { val fastOne = system.actorOf( Props(context ⇒ { - case "ping" ⇒ if (works.get) latch.countDown(); context.self.stop() + case "ping" ⇒ if (works.get) latch.countDown(); context.stop(context.self) }).withDispatcher(throughputDispatcher)) val slowOne = system.actorOf( Props(context ⇒ { case "hogexecutor" ⇒ ready.countDown(); start.await - case "ping" ⇒ works.set(false); context.self.stop() + case "ping" ⇒ works.set(false); context.stop(context.self) }).withDispatcher(throughputDispatcher)) slowOne ! "hogexecutor" diff --git a/akka-actor-tests/src/test/scala/akka/actor/dispatch/DispatcherActorsSpec.scala b/akka-actor-tests/src/test/scala/akka/actor/dispatch/DispatcherActorsSpec.scala index 8ad5bc641d..d054d15e83 100644 --- a/akka-actor-tests/src/test/scala/akka/actor/dispatch/DispatcherActorsSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/actor/dispatch/DispatcherActorsSpec.scala @@ -49,8 +49,8 @@ class DispatcherActorsSpec extends AkkaSpec { assert(sFinished.getCount > 0) sFinished.await assert(sFinished.getCount === 0) - f.stop() - s.stop() + system.stop(f) + system.stop(s) } } } diff --git a/akka-actor-tests/src/test/scala/akka/actor/dispatch/PinnedActorSpec.scala b/akka-actor-tests/src/test/scala/akka/actor/dispatch/PinnedActorSpec.scala index c4750a4691..a194fb35b3 100644 --- a/akka-actor-tests/src/test/scala/akka/actor/dispatch/PinnedActorSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/actor/dispatch/PinnedActorSpec.scala @@ -3,10 +3,10 @@ package akka.actor.dispatch import java.util.concurrent.{ CountDownLatch, TimeUnit } import akka.testkit._ -import akka.dispatch.{ PinnedDispatcher, Dispatchers } import akka.actor.{ Props, Actor } import akka.testkit.AkkaSpec import org.scalatest.BeforeAndAfterEach +import akka.dispatch.{ Await, PinnedDispatcher, Dispatchers } object PinnedActorSpec { class TestActor extends Actor { @@ -30,14 +30,13 @@ class PinnedActorSpec extends AkkaSpec with BeforeAndAfterEach with DefaultTimeo val actor = system.actorOf(Props(self ⇒ { case "OneWay" ⇒ oneWay.countDown() }).withDispatcher(system.dispatcherFactory.newPinnedDispatcher("test"))) val result = actor ! "OneWay" assert(oneWay.await(1, TimeUnit.SECONDS)) - actor.stop() + system.stop(actor) } "support ask/reply" in { val actor = system.actorOf(Props[TestActor].withDispatcher(system.dispatcherFactory.newPinnedDispatcher("test"))) - val result = (actor ? "Hello").as[String] - assert("World" === result.get) - actor.stop() + assert("World" === Await.result(actor ? "Hello", timeout.duration)) + system.stop(actor) } } } diff --git a/akka-actor-tests/src/test/scala/akka/actor/routing/ListenerSpec.scala b/akka-actor-tests/src/test/scala/akka/actor/routing/ListenerSpec.scala index ab149216a7..38a57fda10 100644 --- a/akka-actor-tests/src/test/scala/akka/actor/routing/ListenerSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/actor/routing/ListenerSpec.scala @@ -50,7 +50,7 @@ class ListenerSpec extends AkkaSpec { fooLatch.await - for (a ← List(broadcast, a1, a2, a3)) a.stop() + for (a ← List(broadcast, a1, a2, a3)) system.stop(a) } } } diff --git a/akka-actor-tests/src/test/scala/akka/config/ConfigSpec.scala b/akka-actor-tests/src/test/scala/akka/config/ConfigSpec.scala index b3a625e7b4..b0529e19cf 100644 --- a/akka-actor-tests/src/test/scala/akka/config/ConfigSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/config/ConfigSpec.scala @@ -20,7 +20,6 @@ class ConfigSpec extends AkkaSpec(ConfigFactory.defaultReference) { val config = settings.config import config._ - getList("akka.boot").asScala.toSeq must equal(Nil) getString("akka.version") must equal("2.0-SNAPSHOT") settings.ConfigVersion must equal("2.0-SNAPSHOT") diff --git a/akka-actor-tests/src/test/scala/akka/dataflow/Future2Actor.scala b/akka-actor-tests/src/test/scala/akka/dataflow/Future2Actor.scala index d684474b16..5d24b9678f 100644 --- a/akka-actor-tests/src/test/scala/akka/dataflow/Future2Actor.scala +++ b/akka-actor-tests/src/test/scala/akka/dataflow/Future2Actor.scala @@ -4,7 +4,7 @@ package akka.dataflow import akka.actor.{ Actor, Props } -import akka.dispatch.Future +import akka.dispatch.{ Future, Await } import akka.actor.future2actor import akka.util.duration._ import akka.testkit.AkkaSpec @@ -26,9 +26,9 @@ class Future2ActorSpec extends AkkaSpec with DefaultTimeout { case "ex" ⇒ Future(throw new AssertionError) pipeTo context.sender } })) - (actor ? "do").as[Int] must be(Some(31)) + Await.result(actor ? "do", timeout.duration) must be(31) intercept[AssertionError] { - (actor ? "ex").get + Await.result(actor ? "ex", timeout.duration) } } } diff --git a/akka-actor-tests/src/test/scala/akka/dispatch/FutureSpec.scala b/akka-actor-tests/src/test/scala/akka/dispatch/FutureSpec.scala index 44ddf4f8bc..08b6a766ab 100644 --- a/akka-actor-tests/src/test/scala/akka/dispatch/FutureSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/dispatch/FutureSpec.scala @@ -9,12 +9,12 @@ import org.scalacheck.Gen._ import akka.actor._ import akka.testkit.{ EventFilter, filterEvents, filterException } import akka.util.duration._ -import org.multiverse.api.latches.StandardLatch -import java.util.concurrent.{ TimeUnit, CountDownLatch } import akka.testkit.AkkaSpec import org.scalatest.junit.JUnitSuite import java.lang.ArithmeticException import akka.testkit.DefaultTimeout +import akka.testkit.TestLatch +import java.util.concurrent.{ TimeoutException, TimeUnit, CountDownLatch } object FutureSpec { class TestActor extends Actor { @@ -26,7 +26,7 @@ object FutureSpec { } } - class TestDelayActor(await: StandardLatch) extends Actor { + class TestDelayActor(await: TestLatch) extends Actor { def receive = { case "Hello" ⇒ await.await; sender ! "World" case "NoReply" ⇒ await.await @@ -37,6 +37,7 @@ object FutureSpec { } } +@org.junit.runner.RunWith(classOf[org.scalatest.junit.JUnitRunner]) class JavaFutureSpec extends JavaFutureTests with JUnitSuite @org.junit.runner.RunWith(classOf[org.scalatest.junit.JUnitRunner]) @@ -47,8 +48,9 @@ class FutureSpec extends AkkaSpec with Checkers with BeforeAndAfterAll with Defa "never completed" must { behave like emptyFuture(_(Promise())) "return supplied value on timeout" in { - val promise = Promise[String](100) orElse "Timedout" - promise.get must be("Timedout") + val timedOut = Promise.successful[String]("Timedout") + val promise = Promise[String]() orElse timedOut + Await.result(promise, timeout.duration) must be("Timedout") } } "completed with a result" must { @@ -61,36 +63,33 @@ class FutureSpec extends AkkaSpec with Checkers with BeforeAndAfterAll with Defa val future = Promise[String]().complete(Left(new RuntimeException(message))) behave like futureWithException[RuntimeException](_(future, message)) } - "expired" must { - behave like expiredFuture(_(Promise(0))) - } } "A Future" when { "awaiting a result" that { "is not completed" must { behave like emptyFuture { test ⇒ - val latch = new StandardLatch + val latch = new TestLatch val result = "test value" val future = Future { latch.await result } test(future) - latch.open - future.await + latch.open() + Await.ready(future, timeout.duration) } } "is completed" must { behave like futureWithResult { test ⇒ - val latch = new StandardLatch + val latch = new TestLatch val result = "test value" val future = Future { latch.await result } - latch.open - future.await + latch.open() + Await.ready(future, timeout.duration) test(future, result) } } @@ -99,8 +98,8 @@ class FutureSpec extends AkkaSpec with Checkers with BeforeAndAfterAll with Defa filterException[ArithmeticException] { check({ (future: Future[Int], actions: List[FutureAction]) ⇒ val result = (future /: actions)(_ /: _) - val expected = (future.await.value.get /: actions)(_ /: _) - ((result.await.value.get, expected) match { + val expected = (Await.ready(future, timeout.duration).value.get /: actions)(_ /: _) + ((Await.ready(result, timeout.duration).value.get, expected) match { case (Right(a), Right(b)) ⇒ a == b case (Left(a), Left(b)) if a.toString == b.toString ⇒ true case (Left(a), Left(b)) if a.getStackTrace.isEmpty || b.getStackTrace.isEmpty ⇒ @@ -118,9 +117,9 @@ class FutureSpec extends AkkaSpec with Checkers with BeforeAndAfterAll with Defa behave like futureWithResult { test ⇒ val actor = system.actorOf(Props[TestActor]) val future = actor ? "Hello" - future.await + Await.ready(future, timeout.duration) test(future, "World") - actor.stop() + system.stop(actor) } } "throws an exception" must { @@ -128,9 +127,9 @@ class FutureSpec extends AkkaSpec with Checkers with BeforeAndAfterAll with Defa filterException[RuntimeException] { val actor = system.actorOf(Props[TestActor]) val future = actor ? "Failure" - future.await + Await.ready(future, timeout.duration) test(future, "Expected exception; to test fault-tolerance") - actor.stop() + system.stop(actor) } } } @@ -142,10 +141,10 @@ class FutureSpec extends AkkaSpec with Checkers with BeforeAndAfterAll with Defa val actor1 = system.actorOf(Props[TestActor]) val actor2 = system.actorOf(Props(new Actor { def receive = { case s: String ⇒ sender ! s.toUpperCase } })) val future = actor1 ? "Hello" flatMap { case s: String ⇒ actor2 ? s } - future.await + Await.ready(future, timeout.duration) test(future, "WORLD") - actor1.stop() - actor2.stop() + system.stop(actor1) + system.stop(actor2) } } "will throw an exception" must { @@ -154,10 +153,10 @@ class FutureSpec extends AkkaSpec with Checkers with BeforeAndAfterAll with Defa val actor1 = system.actorOf(Props[TestActor]) val actor2 = system.actorOf(Props(new Actor { def receive = { case s: String ⇒ sender ! Status.Failure(new ArithmeticException("/ by zero")) } })) val future = actor1 ? "Hello" flatMap { case s: String ⇒ actor2 ? s } - future.await + Await.ready(future, timeout.duration) test(future, "/ by zero") - actor1.stop() - actor2.stop() + system.stop(actor1) + system.stop(actor2) } } } @@ -167,10 +166,10 @@ class FutureSpec extends AkkaSpec with Checkers with BeforeAndAfterAll with Defa val actor1 = system.actorOf(Props[TestActor]) val actor2 = system.actorOf(Props(new Actor { def receive = { case s: String ⇒ sender ! s.toUpperCase } })) val future = actor1 ? "Hello" flatMap { case i: Int ⇒ actor2 ? i } - future.await + Await.ready(future, timeout.duration) test(future, "World (of class java.lang.String)") - actor1.stop() - actor2.stop() + system.stop(actor1) + system.stop(actor2) } } } @@ -201,10 +200,10 @@ class FutureSpec extends AkkaSpec with Checkers with BeforeAndAfterAll with Defa c ← (actor ? 7).mapTo[String] } yield b + "-" + c - future1.get must be("10-14") + Await.result(future1, timeout.duration) must be("10-14") assert(checkType(future1, manifest[String])) - intercept[ClassCastException] { future2.get } - actor.stop() + intercept[ClassCastException] { Await.result(future2, timeout.duration) } + system.stop(actor) } } @@ -231,9 +230,9 @@ class FutureSpec extends AkkaSpec with Checkers with BeforeAndAfterAll with Defa Res(c: Int) ← actor ? Req(7) } yield b + "-" + c - future1.get must be("10-14") - intercept[MatchError] { future2.get } - actor.stop() + Await.result(future1, timeout.duration) must be("10-14") + intercept[MatchError] { Await.result(future2, timeout.duration) } + system.stop(actor) } } @@ -268,34 +267,34 @@ class FutureSpec extends AkkaSpec with Checkers with BeforeAndAfterAll with Defa } val future11 = actor ? "Failure" recover { case _ ⇒ "Oops!" } - future1.get must be(5) - intercept[ArithmeticException] { future2.get } - intercept[ArithmeticException] { future3.get } - future4.get must be("5") - future5.get must be("0") - intercept[ArithmeticException] { future6.get } - future7.get must be("You got ERROR") - intercept[RuntimeException] { future8.get } - future9.get must be("FAIL!") - future10.get must be("World") - future11.get must be("Oops!") + Await.result(future1, timeout.duration) must be(5) + intercept[ArithmeticException] { Await.result(future2, timeout.duration) } + intercept[ArithmeticException] { Await.result(future3, timeout.duration) } + Await.result(future4, timeout.duration) must be("5") + Await.result(future5, timeout.duration) must be("0") + intercept[ArithmeticException] { Await.result(future6, timeout.duration) } + Await.result(future7, timeout.duration) must be("You got ERROR") + intercept[RuntimeException] { Await.result(future8, timeout.duration) } + Await.result(future9, timeout.duration) must be("FAIL!") + Await.result(future10, timeout.duration) must be("World") + Await.result(future11, timeout.duration) must be("Oops!") - actor.stop() + system.stop(actor) } } "firstCompletedOf" in { - val futures = Vector.fill[Future[Int]](10)(new DefaultPromise[Int]()) :+ new KeptPromise[Int](Right(5)) - Future.firstCompletedOf(futures).get must be(5) + val futures = Vector.fill[Future[Int]](10)(Promise[Int]()) :+ Promise.successful[Int](5) + Await.result(Future.firstCompletedOf(futures), timeout.duration) must be(5) } "find" in { val futures = for (i ← 1 to 10) yield Future { i } val result = Future.find[Int](futures)(_ == 3) - result.get must be(Some(3)) + Await.result(result, timeout.duration) must be(Some(3)) val notFound = Future.find[Int](futures)(_ == 11) - notFound.get must be(None) + Await.result(notFound, timeout.duration) must be(None) } "fold" in { @@ -306,7 +305,7 @@ class FutureSpec extends AkkaSpec with Checkers with BeforeAndAfterAll with Defa } val timeout = 10000 def futures = actors.zipWithIndex map { case (actor: ActorRef, idx: Int) ⇒ actor.?((idx, idx * 200), timeout).mapTo[Int] } - Future.fold(futures, timeout)(0)(_ + _).get must be(45) + Await.result(Future.fold(futures)(0)(_ + _), timeout millis) must be(45) } "fold by composing" in { @@ -316,7 +315,7 @@ class FutureSpec extends AkkaSpec with Checkers with BeforeAndAfterAll with Defa })) } def futures = actors.zipWithIndex map { case (actor: ActorRef, idx: Int) ⇒ actor.?((idx, idx * 200), 10000).mapTo[Int] } - futures.foldLeft(Future(0))((fr, fa) ⇒ for (r ← fr; a ← fa) yield (r + a)).get must be(45) + Await.result(futures.foldLeft(Future(0))((fr, fa) ⇒ for (r ← fr; a ← fa) yield (r + a)), timeout.duration) must be(45) } "fold with an exception" in { @@ -333,18 +332,19 @@ class FutureSpec extends AkkaSpec with Checkers with BeforeAndAfterAll with Defa } val timeout = 10000 def futures = actors.zipWithIndex map { case (actor: ActorRef, idx: Int) ⇒ actor.?((idx, idx * 100), timeout).mapTo[Int] } - Future.fold(futures, timeout)(0)(_ + _).await.exception.get.getMessage must be("shouldFoldResultsWithException: expected") + intercept[Throwable] { Await.result(Future.fold(futures)(0)(_ + _), timeout millis) }.getMessage must be("shouldFoldResultsWithException: expected") } } "fold mutable zeroes safely" in { import scala.collection.mutable.ArrayBuffer def test(testNumber: Int) { - val fs = (0 to 1000) map (i ⇒ Future(i, 10000)) - val result = Future.fold(fs, 10000)(ArrayBuffer.empty[AnyRef]) { + val fs = (0 to 1000) map (i ⇒ Future(i)) + val f = Future.fold(fs)(ArrayBuffer.empty[AnyRef]) { case (l, i) if i % 2 == 0 ⇒ l += i.asInstanceOf[AnyRef] case (l, _) ⇒ l - }.get.asInstanceOf[ArrayBuffer[Int]].sum + } + val result = Await.result(f.mapTo[ArrayBuffer[Int]], 10000 millis).sum assert(result === 250500) } @@ -353,7 +353,7 @@ class FutureSpec extends AkkaSpec with Checkers with BeforeAndAfterAll with Defa } "return zero value if folding empty list" in { - Future.fold(List[Future[Int]]())(0)(_ + _).get must be(0) + Await.result(Future.fold(List[Future[Int]]())(0)(_ + _), timeout.duration) must be(0) } "shouldReduceResults" in { @@ -364,7 +364,7 @@ class FutureSpec extends AkkaSpec with Checkers with BeforeAndAfterAll with Defa } val timeout = 10000 def futures = actors.zipWithIndex map { case (actor: ActorRef, idx: Int) ⇒ actor.?((idx, idx * 200), timeout).mapTo[Int] } - assert(Future.reduce(futures, timeout)(_ + _).get === 45) + assert(Await.result(Future.reduce(futures)(_ + _), timeout millis) === 45) } "shouldReduceResultsWithException" in { @@ -381,22 +381,22 @@ class FutureSpec extends AkkaSpec with Checkers with BeforeAndAfterAll with Defa } val timeout = 10000 def futures = actors.zipWithIndex map { case (actor: ActorRef, idx: Int) ⇒ actor.?((idx, idx * 100), timeout).mapTo[Int] } - assert(Future.reduce(futures, timeout)(_ + _).await.exception.get.getMessage === "shouldFoldResultsWithException: expected") + intercept[Throwable] { Await.result(Future.reduce(futures)(_ + _), timeout millis) }.getMessage must be === "shouldFoldResultsWithException: expected" } } "shouldReduceThrowIAEOnEmptyInput" in { filterException[IllegalArgumentException] { - intercept[UnsupportedOperationException] { Future.reduce(List[Future[Int]]())(_ + _).get } + intercept[java.util.NoSuchElementException] { Await.result(Future.reduce(List[Future[Int]]())(_ + _), timeout.duration) } } } "receiveShouldExecuteOnComplete" in { - val latch = new StandardLatch + val latch = new TestLatch val actor = system.actorOf(Props[TestActor]) - actor ? "Hello" onResult { case "World" ⇒ latch.open } - assert(latch.tryAwait(5, TimeUnit.SECONDS)) - actor.stop() + actor ? "Hello" onSuccess { case "World" ⇒ latch.open() } + assert(latch.await(5 seconds)) + system.stop(actor) } "shouldTraverseFutures" in { @@ -410,52 +410,46 @@ class FutureSpec extends AkkaSpec with Checkers with BeforeAndAfterAll with Defa })) val oddFutures = List.fill(100)(oddActor ? 'GetNext mapTo manifest[Int]) - assert(Future.sequence(oddFutures).get.sum === 10000) - oddActor.stop() + + assert(Await.result(Future.sequence(oddFutures), timeout.duration).sum === 10000) + system.stop(oddActor) val list = (1 to 100).toList - assert(Future.traverse(list)(x ⇒ Future(x * 2 - 1)).get.sum === 10000) + assert(Await.result(Future.traverse(list)(x ⇒ Future(x * 2 - 1)), timeout.duration).sum === 10000) } "shouldHandleThrowables" in { class ThrowableTest(m: String) extends Throwable(m) filterException[ThrowableTest] { - val f1 = Future { throw new ThrowableTest("test") } - f1.await - intercept[ThrowableTest] { f1.get } + val f1 = Future[Any] { throw new ThrowableTest("test") } + intercept[ThrowableTest] { Await.result(f1, timeout.duration) } - val latch = new StandardLatch - val f2 = Future { latch.tryAwait(5, TimeUnit.SECONDS); "success" } + val latch = new TestLatch + val f2 = Future { latch.await(5 seconds); "success" } f2 foreach (_ ⇒ throw new ThrowableTest("dispatcher foreach")) - f2 onResult { case _ ⇒ throw new ThrowableTest("dispatcher receive") } + f2 onSuccess { case _ ⇒ throw new ThrowableTest("dispatcher receive") } val f3 = f2 map (s ⇒ s.toUpperCase) - latch.open - f2.await - assert(f2.get === "success") + latch.open() + assert(Await.result(f2, timeout.duration) === "success") f2 foreach (_ ⇒ throw new ThrowableTest("current thread foreach")) - f2 onResult { case _ ⇒ throw new ThrowableTest("current thread receive") } - f3.await - assert(f3.get === "SUCCESS") + f2 onSuccess { case _ ⇒ throw new ThrowableTest("current thread receive") } + assert(Await.result(f3, timeout.duration) === "SUCCESS") } } "shouldBlockUntilResult" in { - val latch = new StandardLatch + val latch = new TestLatch - val f = Future({ latch.await; 5 }) - val f2 = Future({ f.get + 5 }) + val f = Future { latch.await; 5 } + val f2 = Future { Await.result(f, timeout.duration) + 5 } - assert(f2.resultOrException === None) - latch.open - assert(f2.get === 10) + intercept[TimeoutException](Await.ready(f2, 100 millis)) + latch.open() + assert(Await.result(f2, timeout.duration) === 10) - val f3 = Future({ Thread.sleep(10); 5 }, 10 millis) - filterException[FutureTimeoutException] { - intercept[FutureTimeoutException] { - f3.get - } - } + val f3 = Future { Thread.sleep(100); 5 } + filterException[TimeoutException] { intercept[TimeoutException] { Await.ready(f3, 0 millis) } } } "futureComposingWithContinuations" in { @@ -468,9 +462,9 @@ class FutureSpec extends AkkaSpec with Checkers with BeforeAndAfterAll with Defa val r = flow(x() + " " + y() + "!") - assert(r.get === "Hello World!") + assert(Await.result(r, timeout.duration) === "Hello World!") - actor.stop + system.stop(actor) } "futureComposingWithContinuationsFailureDivideZero" in { @@ -482,7 +476,7 @@ class FutureSpec extends AkkaSpec with Checkers with BeforeAndAfterAll with Defa val r = flow(x() + " " + y.map(_ / 0).map(_.toString).apply, 100) - intercept[java.lang.ArithmeticException](r.get) + intercept[java.lang.ArithmeticException](Await.result(r, timeout.duration)) } } @@ -497,7 +491,7 @@ class FutureSpec extends AkkaSpec with Checkers with BeforeAndAfterAll with Defa val r = flow(x() + y(), 100) - intercept[ClassCastException](r.get) + intercept[ClassCastException](Await.result(r, timeout.duration)) } } @@ -512,7 +506,7 @@ class FutureSpec extends AkkaSpec with Checkers with BeforeAndAfterAll with Defa val r = flow(x() + y()) - intercept[ClassCastException](r.get) + intercept[ClassCastException](Await.result(r, timeout.duration)) } } @@ -520,60 +514,46 @@ class FutureSpec extends AkkaSpec with Checkers with BeforeAndAfterAll with Defa import Future.flow val x, y, z = Promise[Int]() - val ly, lz = new StandardLatch + val ly, lz = new TestLatch val result = flow { y completeWith x - ly.open // not within continuation + ly.open() // not within continuation z << x - lz.open // within continuation, will wait for 'z' to complete + lz.open() // within continuation, will wait for 'z' to complete z() + y() } - assert(ly.tryAwaitUninterruptible(100, TimeUnit.MILLISECONDS)) - assert(!lz.tryAwaitUninterruptible(100, TimeUnit.MILLISECONDS)) + assert(ly.await(100 milliseconds)) + lz.awaitTimeout(100 milliseconds) flow { x << 5 } - assert(y.get === 5) - assert(z.get === 5) + assert(Await.result(y, timeout.duration) === 5) + assert(Await.result(z, timeout.duration) === 5) assert(lz.isOpen) - assert(result.get === 10) + assert(Await.result(result, timeout.duration) === 10) val a, b, c = Promise[Int]() val result2 = flow { - val n = (a << c).result.get + 10 + val n = (a << c).value.get.right.get + 10 b << (c() - 2) a() + n * b() } c completeWith Future(5) - assert(a.get === 5) - assert(b.get === 3) - assert(result2.get === 50) - } - - "shouldNotAddOrRunCallbacksAfterFailureToBeCompletedBeforeExpiry" in { - val latch = new StandardLatch - val f = Promise[Int](0) - Thread.sleep(25) - f.onComplete(_ ⇒ latch.open) //Shouldn't throw any exception here - - assert(f.isExpired) //Should be expired - - f.complete(Right(1)) //Shouldn't complete the Future since it is expired - - assert(f.value.isEmpty) //Shouldn't be completed - assert(!latch.isOpen) //Shouldn't run the listener + assert(Await.result(a, timeout.duration) === 5) + assert(Await.result(b, timeout.duration) === 3) + assert(Await.result(result2, timeout.duration) === 50) } "futureDataFlowShouldEmulateBlocking1" in { import Future.flow - val one, two = Promise[Int](1000 * 60) + val one, two = Promise[Int]() val simpleResult = flow { one() + two() } @@ -582,24 +562,24 @@ class FutureSpec extends AkkaSpec with Checkers with BeforeAndAfterAll with Defa flow { one << 1 } - one.await + Await.ready(one, 1 minute) assert(one.isCompleted) assert(List(two, simpleResult).forall(_.isCompleted == false)) flow { two << 9 } - two.await + Await.ready(two, 1 minute) assert(List(one, two).forall(_.isCompleted == true)) - assert(simpleResult.get === 10) + assert(Await.result(simpleResult, timeout.duration) === 10) } "futureDataFlowShouldEmulateBlocking2" in { import Future.flow - val x1, x2, y1, y2 = Promise[Int](1000 * 60) - val lx, ly, lz = new StandardLatch + val x1, x2, y1, y2 = Promise[Int]() + val lx, ly, lz = new TestLatch val result = flow { lx.open() x1 << y1 @@ -608,45 +588,45 @@ class FutureSpec extends AkkaSpec with Checkers with BeforeAndAfterAll with Defa lz.open() x1() + x2() } - assert(lx.tryAwaitUninterruptible(2000, TimeUnit.MILLISECONDS)) + assert(lx.await(2 seconds)) assert(!ly.isOpen) assert(!lz.isOpen) assert(List(x1, x2, y1, y2).forall(_.isCompleted == false)) flow { y1 << 1 } // When this is set, it should cascade down the line - assert(ly.tryAwaitUninterruptible(2000, TimeUnit.MILLISECONDS)) - assert(x1.get === 1) + assert(ly.await(2 seconds)) + assert(Await.result(x1, 1 minute) === 1) assert(!lz.isOpen) flow { y2 << 9 } // When this is set, it should cascade down the line - assert(lz.tryAwaitUninterruptible(2000, TimeUnit.MILLISECONDS)) - assert(x2.get === 9) + assert(lz.await(2 seconds)) + assert(Await.result(x2, 1 minute) === 9) - assert(List(x1, x2, y1, y2).forall(_.isCompleted == true)) + assert(List(x1, x2, y1, y2).forall(_.isCompleted)) - assert(result.get === 10) + assert(Await.result(result, 1 minute) === 10) } "dataFlowAPIshouldbeSlick" in { import Future.flow - val i1, i2, s1, s2 = new StandardLatch + val i1, i2, s1, s2 = new TestLatch - val callService1 = Future { i1.open; s1.awaitUninterruptible; 1 } - val callService2 = Future { i2.open; s2.awaitUninterruptible; 9 } + val callService1 = Future { i1.open(); s1.await; 1 } + val callService2 = Future { i2.open(); s2.await; 9 } val result = flow { callService1() + callService2() } assert(!s1.isOpen) assert(!s2.isOpen) assert(!result.isCompleted) - assert(i1.tryAwaitUninterruptible(2000, TimeUnit.MILLISECONDS)) - assert(i2.tryAwaitUninterruptible(2000, TimeUnit.MILLISECONDS)) - s1.open - s2.open - assert(result.get === 10) + assert(i1.await(2 seconds)) + assert(i2.await(2 seconds)) + s1.open() + s2.open() + assert(Await.result(result, timeout.duration) === 10) } "futureCompletingWithContinuationsFailure" in { @@ -654,24 +634,24 @@ class FutureSpec extends AkkaSpec with Checkers with BeforeAndAfterAll with Defa import Future.flow val x, y, z = Promise[Int]() - val ly, lz = new StandardLatch + val ly, lz = new TestLatch val result = flow { y << x - ly.open + ly.open() val oops = 1 / 0 z << x - lz.open + lz.open() z() + y() + oops } - assert(!ly.tryAwaitUninterruptible(100, TimeUnit.MILLISECONDS)) - assert(!lz.tryAwaitUninterruptible(100, TimeUnit.MILLISECONDS)) + ly.awaitTimeout(100 milliseconds) + lz.awaitTimeout(100 milliseconds) flow { x << 5 } - assert(y.get === 5) - intercept[java.lang.ArithmeticException](result.get) + assert(Await.result(y, timeout.duration) === 5) + intercept[java.lang.ArithmeticException](Await.result(result, timeout.duration)) assert(z.value === None) assert(!lz.isOpen) } @@ -680,7 +660,7 @@ class FutureSpec extends AkkaSpec with Checkers with BeforeAndAfterAll with Defa "futureContinuationsShouldNotBlock" in { import Future.flow - val latch = new StandardLatch + val latch = new TestLatch val future = Future { latch.await "Hello" @@ -692,9 +672,9 @@ class FutureSpec extends AkkaSpec with Checkers with BeforeAndAfterAll with Defa assert(!result.isCompleted) - latch.open + latch.open() - assert(result.get === Some("Hello")) + assert(Await.result(result, timeout.duration) === Some("Hello")) } "futureFlowShouldBeTypeSafe" in { @@ -717,8 +697,8 @@ class FutureSpec extends AkkaSpec with Checkers with BeforeAndAfterAll with Defa assert(!checkType(rInt, manifest[Nothing])) assert(!checkType(rInt, manifest[Any])) - rString.await - rInt.await + Await.result(rString, timeout.duration) + Await.result(rInt, timeout.duration) } "futureFlowSimpleAssign" in { @@ -732,7 +712,7 @@ class FutureSpec extends AkkaSpec with Checkers with BeforeAndAfterAll with Defa flow { x << 40 } flow { y << 2 } - assert(z.get === 42) + assert(Await.result(z, timeout.duration) === 42) } "futureFlowLoops" in { @@ -754,7 +734,7 @@ class FutureSpec extends AkkaSpec with Checkers with BeforeAndAfterAll with Defa var i = 0 promises foreach { p ⇒ - assert(p.get === i) + assert(Await.result(p, timeout.duration) === i) i += 1 } @@ -763,39 +743,39 @@ class FutureSpec extends AkkaSpec with Checkers with BeforeAndAfterAll with Defa } "run callbacks async" in { - val latch = Vector.fill(10)(new StandardLatch) + val latch = Vector.fill(10)(new TestLatch) - val f1 = Future { latch(0).open; latch(1).await; "Hello" } - val f2 = f1 map { s ⇒ latch(2).open; latch(3).await; s.length } - f2 foreach (_ ⇒ latch(4).open) + val f1 = Future { latch(0).open(); latch(1).await; "Hello" } + val f2 = f1 map { s ⇒ latch(2).open(); latch(3).await; s.length } + f2 foreach (_ ⇒ latch(4).open()) latch(0).await f1 must not be ('completed) f2 must not be ('completed) - latch(1).open + latch(1).open() latch(2).await f1 must be('completed) f2 must not be ('completed) - val f3 = f1 map { s ⇒ latch(5).open; latch(6).await; s.length * 2 } - f3 foreach (_ ⇒ latch(3).open) + val f3 = f1 map { s ⇒ latch(5).open(); latch(6).await; s.length * 2 } + f3 foreach (_ ⇒ latch(3).open()) latch(5).await f3 must not be ('completed) - latch(6).open + latch(6).open() latch(4).await f2 must be('completed) f3 must be('completed) val p1 = Promise[String]() - val f4 = p1 map { s ⇒ latch(7).open; latch(8).await; s.length } - f4 foreach (_ ⇒ latch(9).open) + val f4 = p1 map { s ⇒ latch(7).open(); latch(8).await; s.length } + f4 foreach (_ ⇒ latch(9).open()) p1 must not be ('completed) f4 must not be ('completed) @@ -807,91 +787,105 @@ class FutureSpec extends AkkaSpec with Checkers with BeforeAndAfterAll with Defa p1 must be('completed) f4 must not be ('completed) - latch(8).open + latch(8).open() latch(9).await - f4.await must be('completed) + Await.ready(f4, timeout.duration) must be('completed) } "should not deadlock with nested await (ticket 1313)" in { - val simple = Future() map (_ ⇒ (Future(()) map (_ ⇒ ())).get) - simple.await must be('completed) + val simple = Future() map (_ ⇒ Await.result((Future(()) map (_ ⇒ ())), timeout.duration)) + Await.ready(simple, timeout.duration) must be('completed) - val l1, l2 = new StandardLatch + val l1, l2 = new TestLatch val complex = Future() map { _ ⇒ - Future.blocking() - val nested = Future() - nested foreach (_ ⇒ l1.open) + Future.blocking(system.dispatcher) + val nested = Future(()) + nested foreach (_ ⇒ l1.open()) l1.await // make sure nested is completed - nested foreach (_ ⇒ l2.open) + nested foreach (_ ⇒ l2.open()) l2.await } - assert(complex.await.isCompleted) + Await.ready(complex, timeout.duration) must be('completed) } } } def emptyFuture(f: (Future[Any] ⇒ Unit) ⇒ Unit) { "not be completed" in { f(_ must not be ('completed)) } - "not be expired" in { f(_ must not be ('expired)) } "not contain a value" in { f(_.value must be(None)) } - "not contain a result" in { f(_.result must be(None)) } - "not contain an exception" in { f(_.exception must be(None)) } } def futureWithResult(f: ((Future[Any], Any) ⇒ Unit) ⇒ Unit) { "be completed" in { f((future, _) ⇒ future must be('completed)) } - "not be expired" in { f((future, _) ⇒ future must not be ('expired)) } "contain a value" in { f((future, result) ⇒ future.value must be(Some(Right(result)))) } - "contain a result" in { f((future, result) ⇒ future.result must be(Some(result))) } - "not contain an exception" in { f((future, _) ⇒ future.exception must be(None)) } - "return result with 'get'" in { f((future, result) ⇒ future.get must be(result)) } - "return result with 'resultOrException'" in { f((future, result) ⇒ future.resultOrException must be(Some(result))) } - "not timeout" in { f((future, _) ⇒ future.await) } + "return result with 'get'" in { f((future, result) ⇒ Await.result(future, timeout.duration) must be(result)) } + "return result with 'Await.sync'" in { f((future, result) ⇒ Await.result(future, timeout.duration) must be(result)) } + "not timeout" in { f((future, _) ⇒ Await.ready(future, 0 millis)) } "filter result" in { f { (future, result) ⇒ - (future filter (_ ⇒ true)).get must be(result) - (evaluating { (future filter (_ ⇒ false)).get } must produce[MatchError]).getMessage must startWith(result.toString) + Await.result((future filter (_ ⇒ true)), timeout.duration) must be(result) + (evaluating { Await.result((future filter (_ ⇒ false)), timeout.duration) } must produce[MatchError]).getMessage must startWith(result.toString) } } - "transform result with map" in { f((future, result) ⇒ (future map (_.toString.length)).get must be(result.toString.length)) } - "compose result with flatMap" is pending - "perform action with foreach" is pending - "match result with collect" is pending - "not recover from exception" is pending - "perform action on result" is pending + "transform result with map" in { f((future, result) ⇒ Await.result((future map (_.toString.length)), timeout.duration) must be(result.toString.length)) } + "compose result with flatMap" in { + f { (future, result) ⇒ + val r = for (r ← future; p ← Promise.successful("foo")) yield r.toString + p + Await.result(r, timeout.duration) must be(result.toString + "foo") + } + } + "perform action with foreach" in { + f { (future, result) ⇒ + val p = Promise[Any]() + future foreach p.success + Await.result(p, timeout.duration) must be(result) + } + } + "not recover from exception" in { f((future, result) ⇒ Await.result(future.recover({ case _ ⇒ "pigdog" }), timeout.duration) must be(result)) } + "perform action on result" in { + f { (future, result) ⇒ + val p = Promise[Any]() + future.onSuccess { case x ⇒ p.success(x) } + Await.result(p, timeout.duration) must be(result) + } + } + "not project a failure" in { f((future, result) ⇒ (evaluating { Await.result(future.failed, timeout.duration) } must produce[NoSuchElementException]).getMessage must be("Future.failed not completed with a throwable. Instead completed with: " + result)) } "not perform action on exception" is pending - "cast using mapTo" is pending + "cast using mapTo" in { f((future, result) ⇒ Await.result(future.mapTo[Boolean].recover({ case _: ClassCastException ⇒ false }), timeout.duration) must be(false)) } } def futureWithException[E <: Throwable: Manifest](f: ((Future[Any], String) ⇒ Unit) ⇒ Unit) { "be completed" in { f((future, _) ⇒ future must be('completed)) } - "not be expired" in { f((future, _) ⇒ future must not be ('expired)) } - "contain a value" in { f((future, _) ⇒ future.value must be('defined)) } - "not contain a result" in { f((future, _) ⇒ future.result must be(None)) } - "contain an exception" in { f((future, message) ⇒ future.exception.get.getMessage must be(message)) } - "throw exception with 'get'" in { f((future, message) ⇒ (evaluating { future.get } must produce[E]).getMessage must be(message)) } - "throw exception with 'resultOrException'" in { f((future, message) ⇒ (evaluating { future.resultOrException } must produce[E]).getMessage must be(message)) } - "not timeout" in { f((future, _) ⇒ future.await) } + "contain a value" in { + f((future, message) ⇒ { + future.value must be('defined) + future.value.get must be('left) + future.value.get.left.get.getMessage must be(message) + }) + } + "throw exception with 'get'" in { f((future, message) ⇒ (evaluating { Await.result(future, timeout.duration) } must produce[E]).getMessage must be(message)) } + "throw exception with 'Await.sync'" in { f((future, message) ⇒ (evaluating { Await.result(future, timeout.duration) } must produce[E]).getMessage must be(message)) } "retain exception with filter" in { f { (future, message) ⇒ - (evaluating { (future filter (_ ⇒ true)).get } must produce[E]).getMessage must be(message) - (evaluating { (future filter (_ ⇒ false)).get } must produce[E]).getMessage must be(message) + (evaluating { Await.result(future filter (_ ⇒ true), timeout.duration) } must produce[E]).getMessage must be(message) + (evaluating { Await.result(future filter (_ ⇒ false), timeout.duration) } must produce[E]).getMessage must be(message) } } - "retain exception with map" in { f((future, message) ⇒ (evaluating { (future map (_.toString.length)).get } must produce[E]).getMessage must be(message)) } - "retain exception with flatMap" is pending + "retain exception with map" in { f((future, message) ⇒ (evaluating { Await.result(future map (_.toString.length), timeout.duration) } must produce[E]).getMessage must be(message)) } + "retain exception with flatMap" in { f((future, message) ⇒ (evaluating { Await.result(future flatMap (_ ⇒ Promise.successful[Any]("foo")), timeout.duration) } must produce[E]).getMessage must be(message)) } "not perform action with foreach" is pending - "retain exception with collect" is pending - "recover from exception" is pending + "recover from exception" in { f((future, message) ⇒ Await.result(future.recover({ case e if e.getMessage == message ⇒ "pigdog" }), timeout.duration) must be("pigdog")) } "not perform action on result" is pending - "perform action on exception" is pending - "always cast successfully using mapTo" is pending - } - - def expiredFuture(f: (Future[Any] ⇒ Unit) ⇒ Unit) { - "not be completed" in { f(_ must not be ('completed)) } - "be expired" in { f(_ must be('expired)) } + "project a failure" in { f((future, message) ⇒ Await.result(future.failed, timeout.duration).getMessage must be(message)) } + "perform action on exception" in { + f { (future, message) ⇒ + val p = Promise[Any]() + future.onFailure { case _ ⇒ p.success(message) } + Await.result(p, timeout.duration) must be(message) + } + } + "always cast successfully using mapTo" in { f((future, message) ⇒ (evaluating { Await.result(future.mapTo[java.lang.Thread], timeout.duration) } must produce[E]).getMessage must be(message)) } } sealed trait IntAction { def apply(that: Int): Int } diff --git a/akka-actor-tests/src/test/scala/akka/dispatch/MailboxConfigSpec.scala b/akka-actor-tests/src/test/scala/akka/dispatch/MailboxConfigSpec.scala index 7af8f057d8..d0c2053243 100644 --- a/akka-actor-tests/src/test/scala/akka/dispatch/MailboxConfigSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/dispatch/MailboxConfigSpec.scala @@ -17,13 +17,9 @@ abstract class MailboxSpec extends AkkaSpec with BeforeAndAfterAll with BeforeAn val q = factory(config) ensureInitialMailboxState(config, q) - implicit val within = 1 second + val f = spawn { q.dequeue } - val f = spawn { - q.dequeue - } - - f.await.resultOrException must be === Some(null) + Await.result(f, 1 second) must be(null) } "create a bounded mailbox with 10 capacity and with push timeout" in { @@ -61,13 +57,13 @@ abstract class MailboxSpec extends AkkaSpec with BeforeAndAfterAll with BeforeAn } //CANDIDATE FOR TESTKIT - def spawn[T <: AnyRef](fun: ⇒ T)(implicit within: Duration): Future[T] = { - val result = new DefaultPromise[T](within.length, within.unit) + def spawn[T <: AnyRef](fun: ⇒ T): Future[T] = { + val result = Promise[T]() val t = new Thread(new Runnable { def run = try { - result.completeWithResult(fun) + result.success(fun) } catch { - case e: Throwable ⇒ result.completeWithException(e) + case e: Throwable ⇒ result.failure(e) } }) t.start @@ -119,8 +115,8 @@ abstract class MailboxSpec extends AkkaSpec with BeforeAndAfterAll with BeforeAn val consumers = for (i ← (1 to 4).toList) yield createConsumer - val ps = producers.map(_.await.resultOrException.get) - val cs = consumers.map(_.await.resultOrException.get) + val ps = producers.map(Await.result(_, within)) + val cs = consumers.map(Await.result(_, within)) ps.map(_.size).sum must be === totalMessages //Must have produced 1000 messages cs.map(_.size).sum must be === totalMessages //Must have consumed all produced messages diff --git a/akka-actor-tests/src/test/scala/akka/dispatch/PriorityDispatcherSpec.scala b/akka-actor-tests/src/test/scala/akka/dispatch/PriorityDispatcherSpec.scala index f332f18030..ccc632c6be 100644 --- a/akka-actor-tests/src/test/scala/akka/dispatch/PriorityDispatcherSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/dispatch/PriorityDispatcherSpec.scala @@ -43,7 +43,7 @@ class PriorityDispatcherSpec extends AkkaSpec with DefaultTimeout { actor.resume //Signal the actor to start treating it's message backlog - actor.?('Result).as[List[Int]].get must be === (msgs.reverse) + Await.result(actor.?('Result).mapTo[List[Int]], timeout.duration) must be === msgs.reverse } } diff --git a/akka-actor-tests/src/test/scala/akka/dispatch/PromiseStreamSpec.scala b/akka-actor-tests/src/test/scala/akka/dispatch/PromiseStreamSpec.scala index 724beba6bb..e41dc9c4cd 100644 --- a/akka-actor-tests/src/test/scala/akka/dispatch/PromiseStreamSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/dispatch/PromiseStreamSpec.scala @@ -21,9 +21,9 @@ class PromiseStreamSpec extends AkkaSpec with DefaultTimeout { b << q c << q() } - assert(a.get === 1) - assert(b.get === 2) - assert(c.get === 3) + assert(Await.result(a, timeout.duration) === 1) + assert(Await.result(b, timeout.duration) === 2) + assert(Await.result(c, timeout.duration) === 3) } "pend" in { @@ -35,43 +35,9 @@ class PromiseStreamSpec extends AkkaSpec with DefaultTimeout { c << q } flow { q <<< List(1, 2, 3) } - assert(a.get === 1) - assert(b.get === 2) - assert(c.get === 3) - } - - "timeout" in { - val a, c = Promise[Int]() - val b = Promise[Int](0) - val q = PromiseStream[Int](1000) - flow { - a << q() - b << q() - c << q() - } - Thread.sleep(10) - flow { - q << (1, 2) - q << 3 - } - assert(a.get === 1) - intercept[FutureTimeoutException] { b.get } - assert(c.get === 3) - } - - "timeout again" in { - val q = PromiseStream[Int](500) - val a = q.dequeue() - val b = q.dequeue() - q += 1 - Thread.sleep(500) - q += (2, 3) - val c = q.dequeue() - val d = q.dequeue() - assert(a.get === 1) - intercept[FutureTimeoutException] { b.get } - assert(c.get === 2) - assert(d.get === 3) + assert(Await.result(a, timeout.duration) === 1) + assert(Await.result(b, timeout.duration) === 2) + assert(Await.result(c, timeout.duration) === 3) } "pend again" in { @@ -88,10 +54,10 @@ class PromiseStreamSpec extends AkkaSpec with DefaultTimeout { c << q1 d << q1 } - assert(a.get === 1) - assert(b.get === 2) - assert(c.get === 3) - assert(d.get === 4) + assert(Await.result(a, timeout.duration) === 1) + assert(Await.result(b, timeout.duration) === 2) + assert(Await.result(c, timeout.duration) === 3) + assert(Await.result(d, timeout.duration) === 4) } "enque" in { @@ -105,10 +71,10 @@ class PromiseStreamSpec extends AkkaSpec with DefaultTimeout { } q ++= List(1, 2, 3, 4) - assert(a.get === 1) - assert(b.get === 2) - assert(c.get === 3) - assert(d.get === 4) + assert(Await.result(a, timeout.duration) === 1) + assert(Await.result(b, timeout.duration) === 2) + assert(Await.result(c, timeout.duration) === 3) + assert(Await.result(d, timeout.duration) === 4) } "map" in { @@ -124,9 +90,9 @@ class PromiseStreamSpec extends AkkaSpec with DefaultTimeout { flow { qs << ("Hello", "World!", "Test") } - assert(a.get === 5) - assert(b.get === "World!") - assert(c.get === 4) + assert(Await.result(a, timeout.duration) === 5) + assert(Await.result(b, timeout.duration) === "World!") + assert(Await.result(c, timeout.duration) === 4) } "not fail under concurrent stress" in { @@ -162,8 +128,7 @@ class PromiseStreamSpec extends AkkaSpec with DefaultTimeout { } } - val result = future.get - assert(result === (1L to 100000L).sum) + assert(Await.result(future, timeout.duration) === (1L to 100000L).sum) } } } diff --git a/akka-actor-tests/src/test/scala/akka/event/EventBusSpec.scala b/akka-actor-tests/src/test/scala/akka/event/EventBusSpec.scala index 5923e84305..fd7fd4c1b0 100644 --- a/akka-actor-tests/src/test/scala/akka/event/EventBusSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/event/EventBusSpec.scala @@ -8,7 +8,7 @@ import org.scalatest.BeforeAndAfterEach import akka.testkit._ import akka.util.duration._ import java.util.concurrent.atomic._ -import akka.actor.{ Props, Actor, ActorRef } +import akka.actor.{ Props, Actor, ActorRef, ActorSystem } import java.util.Comparator import akka.japi.{ Procedure, Function } @@ -33,7 +33,7 @@ abstract class EventBusSpec(busName: String) extends AkkaSpec with BeforeAndAfte def classifierFor(event: BusType#Event): BusType#Classifier - def disposeSubscriber(subscriber: BusType#Subscriber): Unit + def disposeSubscriber(system: ActorSystem, subscriber: BusType#Subscriber): Unit busName must { @@ -58,7 +58,7 @@ abstract class EventBusSpec(busName: String) extends AkkaSpec with BeforeAndAfte "not allow to unsubscribe non-existing subscriber" in { val sub = createNewSubscriber() bus.unsubscribe(sub, classifier) must be === false - disposeSubscriber(sub) + disposeSubscriber(system, sub) } "not allow for the same subscriber to subscribe to the same channel twice" in { @@ -80,7 +80,7 @@ abstract class EventBusSpec(busName: String) extends AkkaSpec with BeforeAndAfte subscribers.zip(classifiers) forall { case (s, c) ⇒ bus.subscribe(s, c) } must be === true subscribers.zip(classifiers) forall { case (s, c) ⇒ bus.unsubscribe(s, c) } must be === true - subscribers foreach disposeSubscriber + subscribers foreach (disposeSubscriber(system, _)) } "publishing events without any subscribers shouldn't be a problem" in { @@ -113,7 +113,7 @@ abstract class EventBusSpec(busName: String) extends AkkaSpec with BeforeAndAfte subscribers foreach { s ⇒ bus.subscribe(s, classifier) must be === true } bus.publish(event) range foreach { _ ⇒ expectMsg(event) } - subscribers foreach { s ⇒ bus.unsubscribe(s, classifier) must be === true; disposeSubscriber(s) } + subscribers foreach { s ⇒ bus.unsubscribe(s, classifier) must be === true; disposeSubscriber(system, s) } } "not publish the given event to any other subscribers than the intended ones" in { @@ -136,7 +136,7 @@ abstract class EventBusSpec(busName: String) extends AkkaSpec with BeforeAndAfte } "cleanup subscriber" in { - disposeSubscriber(subscriber) + disposeSubscriber(system, subscriber) } } } @@ -165,7 +165,7 @@ class ActorEventBusSpec extends EventBusSpec("ActorEventBus") { def classifierFor(event: BusType#Event) = event.toString - def disposeSubscriber(subscriber: BusType#Subscriber): Unit = subscriber.stop() + def disposeSubscriber(system: ActorSystem, subscriber: BusType#Subscriber): Unit = system.stop(subscriber) } object ScanningEventBusSpec { @@ -194,7 +194,7 @@ class ScanningEventBusSpec extends EventBusSpec("ScanningEventBus") { def classifierFor(event: BusType#Event) = event.toString - def disposeSubscriber(subscriber: BusType#Subscriber): Unit = () + def disposeSubscriber(system: ActorSystem, subscriber: BusType#Subscriber): Unit = () } object LookupEventBusSpec { @@ -219,5 +219,5 @@ class LookupEventBusSpec extends EventBusSpec("LookupEventBus") { def classifierFor(event: BusType#Event) = event.toString - def disposeSubscriber(subscriber: BusType#Subscriber): Unit = () + def disposeSubscriber(system: ActorSystem, subscriber: BusType#Subscriber): Unit = () } diff --git a/akka-actor-tests/src/test/scala/akka/event/LoggingReceiveSpec.scala b/akka-actor-tests/src/test/scala/akka/event/LoggingReceiveSpec.scala index 6427997b78..77a815f455 100644 --- a/akka-actor-tests/src/test/scala/akka/event/LoggingReceiveSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/event/LoggingReceiveSpec.scala @@ -52,9 +52,9 @@ class LoggingReceiveSpec extends WordSpec with BeforeAndAfterEach with BeforeAnd } override def afterAll { - appLogging.stop() - appAuto.stop() - appLifecycle.stop() + appLogging.shutdown() + appAuto.shutdown() + appLifecycle.shutdown() } "A LoggingReceive" must { @@ -201,7 +201,7 @@ class LoggingReceiveSpec extends WordSpec with BeforeAndAfterEach with BeforeAnd assert(set == Set(1, 2, 3), set + " was not Set(1, 2, 3)") } - supervisor.stop() + system.stop(supervisor) expectMsg(Logging.Debug(sname, "stopping")) expectMsg(Logging.Debug(aname, "stopped")) expectMsg(Logging.Debug(sname, "stopped")) diff --git a/akka-actor-tests/src/test/scala/akka/performance/microbench/TellLatencyPerformanceSpec.scala b/akka-actor-tests/src/test/scala/akka/performance/microbench/TellLatencyPerformanceSpec.scala index c1de7702e3..ace20bb662 100644 --- a/akka-actor-tests/src/test/scala/akka/performance/microbench/TellLatencyPerformanceSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/performance/microbench/TellLatencyPerformanceSpec.scala @@ -75,7 +75,7 @@ class TellLatencyPerformanceSpec extends PerformanceSpec { ok must be(true) logMeasurement(numberOfClients, durationNs, stat) } - clients.foreach(_.stop()) + clients.foreach(system.stop(_)) } } diff --git a/akka-actor-tests/src/test/scala/akka/performance/microbench/TellThroughput10000PerformanceSpec.scala b/akka-actor-tests/src/test/scala/akka/performance/microbench/TellThroughput10000PerformanceSpec.scala index 29109f8472..4541c093ca 100644 --- a/akka-actor-tests/src/test/scala/akka/performance/microbench/TellThroughput10000PerformanceSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/performance/microbench/TellThroughput10000PerformanceSpec.scala @@ -173,8 +173,8 @@ class TellThroughput10000PerformanceSpec extends PerformanceSpec { ok must be(true) logMeasurement(numberOfClients, durationNs, repeat) } - clients.foreach(_.stop()) - destinations.foreach(_.stop()) + clients.foreach(system.stop(_)) + destinations.foreach(system.stop(_)) } } diff --git a/akka-actor-tests/src/test/scala/akka/performance/microbench/TellThroughputComputationPerformanceSpec.scala b/akka-actor-tests/src/test/scala/akka/performance/microbench/TellThroughputComputationPerformanceSpec.scala index 6a20f982dd..d9f6988231 100644 --- a/akka-actor-tests/src/test/scala/akka/performance/microbench/TellThroughputComputationPerformanceSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/performance/microbench/TellThroughputComputationPerformanceSpec.scala @@ -147,8 +147,8 @@ class TellThroughputComputationPerformanceSpec extends PerformanceSpec { ok must be(true) logMeasurement(numberOfClients, durationNs, repeat) } - clients.foreach(_.stop()) - destinations.foreach(_.stop()) + clients.foreach(system.stop(_)) + destinations.foreach(system.stop(_)) } } diff --git a/akka-actor-tests/src/test/scala/akka/performance/microbench/TellThroughputPerformanceSpec.scala b/akka-actor-tests/src/test/scala/akka/performance/microbench/TellThroughputPerformanceSpec.scala index 111cc8fc6a..a1c9d1c271 100644 --- a/akka-actor-tests/src/test/scala/akka/performance/microbench/TellThroughputPerformanceSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/performance/microbench/TellThroughputPerformanceSpec.scala @@ -78,8 +78,8 @@ class TellThroughputPerformanceSpec extends PerformanceSpec { ok must be(true) logMeasurement(numberOfClients, durationNs, repeat) } - clients.foreach(_.stop()) - destinations.foreach(_.stop()) + clients.foreach(system.stop(_)) + destinations.foreach(system.stop(_)) } } diff --git a/akka-actor-tests/src/test/scala/akka/performance/microbench/TellThroughputSeparateDispatchersPerformanceSpec.scala b/akka-actor-tests/src/test/scala/akka/performance/microbench/TellThroughputSeparateDispatchersPerformanceSpec.scala index ca471b2222..41a969badc 100644 --- a/akka-actor-tests/src/test/scala/akka/performance/microbench/TellThroughputSeparateDispatchersPerformanceSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/performance/microbench/TellThroughputSeparateDispatchersPerformanceSpec.scala @@ -159,8 +159,8 @@ class TellThroughputSeparateDispatchersPerformanceSpec extends PerformanceSpec { ok must be(true) logMeasurement(numberOfClients, durationNs, repeat) } - clients.foreach(_.stop()) - destinations.foreach(_.stop()) + clients.foreach(system.stop(_)) + destinations.foreach(system.stop(_)) } } diff --git a/akka-actor-tests/src/test/scala/akka/performance/trading/system/MatchingEngine.scala b/akka-actor-tests/src/test/scala/akka/performance/trading/system/MatchingEngine.scala index cabf890488..cf5142f0ba 100644 --- a/akka-actor-tests/src/test/scala/akka/performance/trading/system/MatchingEngine.scala +++ b/akka-actor-tests/src/test/scala/akka/performance/trading/system/MatchingEngine.scala @@ -2,9 +2,6 @@ package akka.performance.trading.system import akka.performance.trading.domain._ import akka.actor._ -import akka.dispatch.Future -import akka.dispatch.FutureTimeoutException -import akka.dispatch.MessageDispatcher trait MatchingEngine { val meId: String diff --git a/akka-actor-tests/src/test/scala/akka/performance/trading/system/TradingLatencyPerformanceSpec.scala b/akka-actor-tests/src/test/scala/akka/performance/trading/system/TradingLatencyPerformanceSpec.scala index 6470f6c0ba..f86987270a 100644 --- a/akka-actor-tests/src/test/scala/akka/performance/trading/system/TradingLatencyPerformanceSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/performance/trading/system/TradingLatencyPerformanceSpec.scala @@ -108,7 +108,7 @@ class TradingLatencyPerformanceSpec extends PerformanceSpec { } logMeasurement(numberOfClients, durationNs, stat) } - clients.foreach(_.stop()) + clients.foreach(system.stop(_)) } } diff --git a/akka-actor-tests/src/test/scala/akka/performance/trading/system/TradingThroughputPerformanceSpec.scala b/akka-actor-tests/src/test/scala/akka/performance/trading/system/TradingThroughputPerformanceSpec.scala index aca85b8d3d..88a9ce21a0 100644 --- a/akka-actor-tests/src/test/scala/akka/performance/trading/system/TradingThroughputPerformanceSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/performance/trading/system/TradingThroughputPerformanceSpec.scala @@ -105,7 +105,7 @@ class TradingThroughputPerformanceSpec extends PerformanceSpec { } logMeasurement(numberOfClients, durationNs, totalNumberOfOrders) } - clients.foreach(_.stop()) + clients.foreach(system.stop(_)) } } diff --git a/akka-actor-tests/src/test/scala/akka/routing/ActorPoolSpec.scala b/akka-actor-tests/src/test/scala/akka/routing/ActorPoolSpec.scala index 1893732686..c87e4aeb89 100644 --- a/akka-actor-tests/src/test/scala/akka/routing/ActorPoolSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/routing/ActorPoolSpec.scala @@ -1,11 +1,11 @@ package akka.routing -import akka.dispatch.{ KeptPromise, Future } import akka.actor._ import akka.testkit._ import akka.util.duration._ import java.util.concurrent.atomic.{ AtomicBoolean, AtomicInteger } import akka.testkit.AkkaSpec +import akka.dispatch.{ Await, Promise, Future } object ActorPoolSpec { @@ -17,7 +17,7 @@ object ActorPoolSpec { import TypedActor.dispatcher def sq(x: Int, sleep: Long): Future[Int] = { if (sleep > 0) Thread.sleep(sleep) - new KeptPromise(Right(x * x)) + Promise.successful(x * x) } } @@ -47,7 +47,7 @@ class TypedActorPoolSpec extends AkkaSpec with DefaultTimeout { val results = for (i ← 1 to 100) yield (i, pool.sq(i, 0)) for ((i, r) ← results) - r.get must equal(i * i) + Await.result(r, timeout.duration) must equal(i * i) ta.stop(pool) } @@ -97,9 +97,9 @@ class ActorPoolSpec extends AkkaSpec with DefaultTimeout { count.get must be(2) - (pool ? ActorPool.Stat).as[ActorPool.Stats].get.size must be(2) + Await.result((pool ? ActorPool.Stat).mapTo[ActorPool.Stats], timeout.duration).size must be(2) - pool.stop() + system.stop(pool) } "pass ticket #705" in { @@ -125,11 +125,11 @@ class ActorPoolSpec extends AkkaSpec with DefaultTimeout { }).withFaultHandler(faultHandler)) try { - (for (count ← 1 to 500) yield pool.?("Test", 20000)) foreach { - _.await.resultOrException.get must be("Response") + (for (count ← 1 to 500) yield pool.?("Test", 20 seconds)) foreach { + Await.result(_, 20 seconds) must be("Response") } } finally { - pool.stop() + system.stop(pool) } } @@ -163,7 +163,7 @@ class ActorPoolSpec extends AkkaSpec with DefaultTimeout { pool ! 1 - (pool ? ActorPool.Stat).as[ActorPool.Stats].get.size must be(2) + Await.result((pool ? ActorPool.Stat).mapTo[ActorPool.Stats], timeout.duration).size must be(2) var loops = 0 def loop(t: Int) = { @@ -183,7 +183,7 @@ class ActorPoolSpec extends AkkaSpec with DefaultTimeout { latch.await count.get must be(loops) - (pool ? ActorPool.Stat).as[ActorPool.Stats].get.size must be(2) + Await.result((pool ? ActorPool.Stat).mapTo[ActorPool.Stats], timeout.duration).size must be(2) // a whole bunch should max it out @@ -192,9 +192,9 @@ class ActorPoolSpec extends AkkaSpec with DefaultTimeout { latch.await count.get must be(loops) - (pool ? ActorPool.Stat).as[ActorPool.Stats].get.size must be(4) + Await.result((pool ? ActorPool.Stat).mapTo[ActorPool.Stats], timeout.duration).size must be(4) - pool.stop() + system.stop(pool) } "grow as needed under mailbox pressure" in { @@ -239,7 +239,7 @@ class ActorPoolSpec extends AkkaSpec with DefaultTimeout { latch.await count.get must be(loops) - (pool ? ActorPool.Stat).as[ActorPool.Stats].get.size must be(2) + Await.result((pool ? ActorPool.Stat).mapTo[ActorPool.Stats], timeout.duration).size must be(2) // send a bunch over the threshold and observe an increment loops = 15 @@ -248,9 +248,9 @@ class ActorPoolSpec extends AkkaSpec with DefaultTimeout { latch.await(10 seconds) count.get must be(loops) - (pool ? ActorPool.Stat).as[ActorPool.Stats].get.size must be >= (3) + Await.result((pool ? ActorPool.Stat).mapTo[ActorPool.Stats], timeout.duration).size must be >= (3) - pool.stop() + system.stop(pool) } "round robin" in { @@ -281,7 +281,7 @@ class ActorPoolSpec extends AkkaSpec with DefaultTimeout { latch1.await delegates.size must be(1) - pool1.stop() + system.stop(pool1) val latch2 = TestLatch(2) delegates.clear() @@ -309,7 +309,7 @@ class ActorPoolSpec extends AkkaSpec with DefaultTimeout { latch2.await delegates.size must be(2) - pool2.stop() + system.stop(pool2) } "backoff" in { @@ -342,7 +342,7 @@ class ActorPoolSpec extends AkkaSpec with DefaultTimeout { (5 millis).dilated.sleep - val z = (pool ? ActorPool.Stat).as[ActorPool.Stats].get.size + val z = Await.result((pool ? ActorPool.Stat).mapTo[ActorPool.Stats], timeout.duration).size z must be >= (2) @@ -353,9 +353,9 @@ class ActorPoolSpec extends AkkaSpec with DefaultTimeout { (500 millis).dilated.sleep } - (pool ? ActorPool.Stat).as[ActorPool.Stats].get.size must be <= (z) + Await.result((pool ? ActorPool.Stat).mapTo[ActorPool.Stats], timeout.duration).size must be <= (z) - pool.stop() + system.stop(pool) } } } diff --git a/akka-actor-tests/src/test/scala/akka/routing/ConfiguredLocalRoutingSpec.scala b/akka-actor-tests/src/test/scala/akka/routing/ConfiguredLocalRoutingSpec.scala index 0b6cdae645..45e86fc387 100644 --- a/akka-actor-tests/src/test/scala/akka/routing/ConfiguredLocalRoutingSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/routing/ConfiguredLocalRoutingSpec.scala @@ -6,6 +6,7 @@ import java.util.concurrent.atomic.AtomicInteger import java.util.concurrent.{ CountDownLatch, TimeUnit } import akka.testkit._ import akka.util.duration._ +import akka.dispatch.Await @org.junit.runner.RunWith(classOf[org.scalatest.junit.JUnitRunner]) class ConfiguredLocalRoutingSpec extends AkkaSpec with DefaultTimeout with ImplicitSender { @@ -49,7 +50,7 @@ class ConfiguredLocalRoutingSpec extends AkkaSpec with DefaultTimeout with Impli actor ! "hello" helloLatch.await(5, TimeUnit.SECONDS) must be(true) - actor.stop() + system.stop(actor) stopLatch.await(5, TimeUnit.SECONDS) must be(true) } @@ -74,7 +75,7 @@ class ConfiguredLocalRoutingSpec extends AkkaSpec with DefaultTimeout with Impli for (i ← 0 until iterationCount) { for (k ← 0 until connectionCount) { - val id = (actor ? "hit").as[Int].getOrElse(fail("No id returned by actor")) + val id = Await.result((actor ? "hit").mapTo[Int], timeout.duration) replies = replies + (id -> (replies(id) + 1)) } } @@ -104,7 +105,7 @@ class ConfiguredLocalRoutingSpec extends AkkaSpec with DefaultTimeout with Impli actor ! Broadcast("hello") helloLatch.await(5, TimeUnit.SECONDS) must be(true) - actor.stop() + system.stop(actor) stopLatch.await(5, TimeUnit.SECONDS) must be(true) } } @@ -134,7 +135,7 @@ class ConfiguredLocalRoutingSpec extends AkkaSpec with DefaultTimeout with Impli for (i ← 1 to 5) expectMsg("world") } - actor.stop() + system.stop(actor) stopLatch.await(5, TimeUnit.SECONDS) must be(true) } @@ -159,7 +160,7 @@ class ConfiguredLocalRoutingSpec extends AkkaSpec with DefaultTimeout with Impli for (i ← 0 until iterationCount) { for (k ← 0 until connectionCount) { - val id = (actor ? "hit").as[Int].getOrElse(fail("No id returned by actor")) + val id = Await.result((actor ? "hit").mapTo[Int], timeout.duration) replies = replies + (id -> (replies(id) + 1)) } } @@ -190,7 +191,7 @@ class ConfiguredLocalRoutingSpec extends AkkaSpec with DefaultTimeout with Impli actor ! Broadcast("hello") helloLatch.await(5, TimeUnit.SECONDS) must be(true) - actor.stop() + system.stop(actor) stopLatch.await(5, TimeUnit.SECONDS) must be(true) } } diff --git a/akka-actor-tests/src/test/scala/akka/routing/RoutingSpec.scala b/akka-actor-tests/src/test/scala/akka/routing/RoutingSpec.scala index e6e0f1c898..6f08ba6dcf 100644 --- a/akka-actor-tests/src/test/scala/akka/routing/RoutingSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/routing/RoutingSpec.scala @@ -9,6 +9,7 @@ import collection.mutable.LinkedList import java.util.concurrent.{ CountDownLatch, TimeUnit } import akka.testkit._ import akka.util.duration._ +import akka.dispatch.Await object RoutingSpec { @@ -43,7 +44,7 @@ class RoutingSpec extends AkkaSpec with DefaultTimeout with ImplicitSender { val c1, c2 = expectMsgType[ActorRef] watch(router) watch(c2) - c2.stop() + system.stop(c2) expectMsg(Terminated(c2)) // it might take a while until the Router has actually processed the Terminated message awaitCond { @@ -54,7 +55,7 @@ class RoutingSpec extends AkkaSpec with DefaultTimeout with ImplicitSender { } res == Seq(c1, c1) } - c1.stop() + system.stop(c1) expectMsg(Terminated(router)) } @@ -317,15 +318,15 @@ class RoutingSpec extends AkkaSpec with DefaultTimeout with ImplicitSender { routedActor ! Broadcast(Stop(Some(1))) shutdownLatch.await - (routedActor ? Broadcast(0)).as[Int].get must be(22) + Await.result(routedActor ? Broadcast(0), timeout.duration) must be(22) } case class Stop(id: Option[Int] = None) def newActor(id: Int, shudownLatch: Option[TestLatch] = None) = system.actorOf(Props(new Actor { def receive = { - case Stop(None) ⇒ self.stop() - case Stop(Some(_id)) if (_id == id) ⇒ self.stop() + case Stop(None) ⇒ context.stop(self) + case Stop(Some(_id)) if (_id == id) ⇒ context.stop(self) case _id: Int if (_id == id) ⇒ case x ⇒ { Thread sleep 100 * id diff --git a/akka-actor-tests/src/test/scala/akka/serialization/SerializeSpec.scala b/akka-actor-tests/src/test/scala/akka/serialization/SerializeSpec.scala index 3b0b6ea5bc..cceb608452 100644 --- a/akka-actor-tests/src/test/scala/akka/serialization/SerializeSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/serialization/SerializeSpec.scala @@ -106,7 +106,7 @@ class SerializeSpec extends AkkaSpec(SerializeSpec.serializationConf) { })) a ! new ObjectOutputStream(new ByteArrayOutputStream()) expectMsg("pass") - a.stop() + system.stop(a) } "serialize DeadLetterActorRef" in { @@ -124,7 +124,7 @@ class SerializeSpec extends AkkaSpec(SerializeSpec.serializationConf) { (deadLetters eq a.deadLetters) must be(true) } } finally { - a.stop() + a.shutdown() } } } diff --git a/akka-actor-tests/src/test/scala/akka/ticket/Ticket703Spec.scala b/akka-actor-tests/src/test/scala/akka/ticket/Ticket703Spec.scala index 09b5f5e24c..f51beb7617 100644 --- a/akka-actor-tests/src/test/scala/akka/ticket/Ticket703Spec.scala +++ b/akka-actor-tests/src/test/scala/akka/ticket/Ticket703Spec.scala @@ -3,6 +3,8 @@ package akka.ticket import akka.actor._ import akka.routing._ import akka.testkit.AkkaSpec +import akka.dispatch.Await +import akka.util.duration._ @org.junit.runner.RunWith(classOf[org.scalatest.junit.JUnitRunner]) class Ticket703Spec extends AkkaSpec { @@ -26,7 +28,7 @@ class Ticket703Spec extends AkkaSpec { } })) }).withFaultHandler(OneForOneStrategy(List(classOf[Exception]), 5, 1000))) - (actorPool.?("Ping", 10000)).await.result must be === Some("Response") + Await.result(actorPool.?("Ping", 10000), 10 seconds) must be === "Response" } } } diff --git a/akka-actor-tests/src/test/scala/akka/util/DurationSpec.scala b/akka-actor-tests/src/test/scala/akka/util/DurationSpec.scala index 6a291872b8..4a04a648bf 100644 --- a/akka-actor-tests/src/test/scala/akka/util/DurationSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/util/DurationSpec.scala @@ -38,6 +38,16 @@ class DurationSpec extends WordSpec with MustMatchers { (inf - minf) must be(inf) (minf - inf) must be(minf) (minf + minf) must be(minf) + assert(inf == inf) + assert(minf == minf) + inf.compareTo(inf) must be(0) + inf.compareTo(one) must be(1) + minf.compareTo(minf) must be(0) + minf.compareTo(one) must be(-1) + assert(inf != minf) + assert(minf != inf) + assert(one != inf) + assert(minf != one) } "support fromNow" in { diff --git a/akka-actor-tests/src/test/scala/akka/util/IndexSpec.scala b/akka-actor-tests/src/test/scala/akka/util/IndexSpec.scala index 271f8a9ac0..9f869fe907 100644 --- a/akka-actor-tests/src/test/scala/akka/util/IndexSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/util/IndexSpec.scala @@ -4,7 +4,7 @@ package akka.util import org.scalatest.matchers.MustMatchers -import akka.dispatch.Future +import akka.dispatch.{ Future, Await } import akka.testkit.AkkaSpec import scala.util.Random import akka.testkit.DefaultTimeout @@ -125,8 +125,7 @@ class IndexSpec extends AkkaSpec with MustMatchers with DefaultTimeout { val tasks = List.fill(nrOfTasks)(executeRandomTask) - tasks.foreach(_.await) - tasks.foreach(_.exception.map(throw _)) + tasks.foreach(Await.result(_, timeout.duration)) } } } \ No newline at end of file diff --git a/akka-actor/src/main/java/com/typesafe/config/Config.java b/akka-actor/src/main/java/com/typesafe/config/Config.java index 44eebe1158..d3496c73ef 100644 --- a/akka-actor/src/main/java/com/typesafe/config/Config.java +++ b/akka-actor/src/main/java/com/typesafe/config/Config.java @@ -21,7 +21,7 @@ import java.util.Set; * is a key in a JSON object; it's just a string that's the key in a map. A * "path" is a parseable expression with a syntax and it refers to a series of * keys. Path expressions are described in the spec for + * href="https://github.com/typesafehub/config/blob/master/HOCON.md">spec for * Human-Optimized Config Object Notation. In brief, a path is * period-separated so "a.b.c" looks for key c in object b in object a in the * root object. Sometimes double quotes are needed around special characters in @@ -97,7 +97,7 @@ public interface Config extends ConfigMergeable { /** * Returns a replacement config with all substitutions (the * ${foo.bar} syntax, see the + * href="https://github.com/typesafehub/config/blob/master/HOCON.md">the * spec) resolved. Substitutions are looked up using this * Config as the root object, that is, a substitution * ${foo.bar} will be replaced with the result of @@ -395,7 +395,8 @@ public interface Config extends ConfigMergeable { * Gets a value as a size in bytes (parses special strings like "128M"). If * the value is already a number, then it's left alone; if it's a string, * it's parsed understanding unit suffixes such as "128K", as documented in - * the the + * the the * spec. * * @param path @@ -414,9 +415,9 @@ public interface Config extends ConfigMergeable { * Get value as a duration in milliseconds. If the value is already a * number, then it's left alone; if it's a string, it's parsed understanding * units suffixes like "10m" or "5ns" as documented in the the + * href="https://github.com/typesafehub/config/blob/master/HOCON.md">the * spec. - * + * * @param path * path expression * @return the duration value at the requested path, in milliseconds diff --git a/akka-actor/src/main/java/com/typesafe/config/ConfigException.java b/akka-actor/src/main/java/com/typesafe/config/ConfigException.java index 8c23d09533..b8dcb8ca00 100644 --- a/akka-actor/src/main/java/com/typesafe/config/ConfigException.java +++ b/akka-actor/src/main/java/com/typesafe/config/ConfigException.java @@ -5,7 +5,8 @@ package com.typesafe.config; /** - * All exceptions thrown by the library are subclasses of ConfigException. + * All exceptions thrown by the library are subclasses of + * ConfigException. */ public abstract class ConfigException extends RuntimeException { private static final long serialVersionUID = 1L; @@ -338,6 +339,9 @@ public abstract class ConfigException extends RuntimeException { sb.append(p.problem()); sb.append(", "); } + if (sb.length() == 0) + throw new ConfigException.BugOrBroken( + "ValidationFailed must have a non-empty list of problems"); sb.setLength(sb.length() - 2); // chop comma and space return sb.toString(); diff --git a/akka-actor/src/main/java/com/typesafe/config/ConfigFactory.java b/akka-actor/src/main/java/com/typesafe/config/ConfigFactory.java index dc851d7f2b..df5e762a5c 100644 --- a/akka-actor/src/main/java/com/typesafe/config/ConfigFactory.java +++ b/akka-actor/src/main/java/com/typesafe/config/ConfigFactory.java @@ -295,18 +295,18 @@ public final class ConfigFactory { /** * Converts a Java {@link java.util.Properties} object to a * {@link ConfigObject} using the rules documented in the HOCON + * href="https://github.com/typesafehub/config/blob/master/HOCON.md">HOCON * spec. The keys in the Properties object are split on the * period character '.' and treated as paths. The values will all end up as * string values. If you have both "a=foo" and "a.b=bar" in your properties * file, so "a" is both the object containing "b" and the string "foo", then * the string value is dropped. - * + * *

* If you want to have System.getProperties() as a * ConfigObject, it's better to use the {@link #systemProperties()} method * which returns a cached global singleton. - * + * * @param properties * a Java Properties object * @param options diff --git a/akka-actor/src/main/java/com/typesafe/config/ConfigMergeable.java b/akka-actor/src/main/java/com/typesafe/config/ConfigMergeable.java index c4280e93ea..1214db8c44 100644 --- a/akka-actor/src/main/java/com/typesafe/config/ConfigMergeable.java +++ b/akka-actor/src/main/java/com/typesafe/config/ConfigMergeable.java @@ -24,19 +24,19 @@ public interface ConfigMergeable { * method (they need to merge the fallback keys into themselves). All other * values just return the original value, since they automatically override * any fallback. - * + * *

* The semantics of merging are described in the spec for - * HOCON. - * + * href="https://github.com/typesafehub/config/blob/master/HOCON.md">spec + * for HOCON. + * *

* Note that objects do not merge "across" non-objects; if you write * object.withFallback(nonObject).withFallback(otherObject), * then otherObject will simply be ignored. This is an * intentional part of how merging works. Both non-objects, and any object * which has fallen back to a non-object, block subsequent fallbacks. - * + * * @param other * an object whose keys should be used if the keys are not * present in this one diff --git a/akka-actor/src/main/java/com/typesafe/config/ConfigObject.java b/akka-actor/src/main/java/com/typesafe/config/ConfigObject.java index 54cce1c39f..bb4d14da89 100644 --- a/akka-actor/src/main/java/com/typesafe/config/ConfigObject.java +++ b/akka-actor/src/main/java/com/typesafe/config/ConfigObject.java @@ -8,38 +8,38 @@ import java.util.Map; /** * Subtype of {@link ConfigValue} representing an object (dictionary, map) * value, as in JSON's { "a" : 42 } syntax. - * + * *

* {@code ConfigObject} implements {@code java.util.Map} so * you can use it like a regular Java map. Or call {@link #unwrapped()} to * unwrap the map to a map with plain Java values rather than * {@code ConfigValue}. - * + * *

* Like all {@link ConfigValue} subtypes, {@code ConfigObject} is immutable. * This makes it threadsafe and you never have to create "defensive copies." The * mutator methods from {@link java.util.Map} all throw * {@link java.lang.UnsupportedOperationException}. - * + * *

* The {@link ConfigValue#valueType} method on an object returns * {@link ConfigValueType#OBJECT}. - * + * *

* In most cases you want to use the {@link Config} interface rather than this * one. Call {@link #toConfig()} to convert a {@code ConfigObject} to a * {@code Config}. - * + * *

* The API for a {@code ConfigObject} is in terms of keys, while the API for a * {@link Config} is in terms of path expressions. Conceptually, * {@code ConfigObject} is a tree of maps from keys to values, while a - * {@code ConfigObject} is a one-level map from paths to values. - * + * {@code Config} is a one-level map from paths to values. + * *

* Use {@link ConfigUtil#joinPath} and {@link ConfigUtil#splitPath} to convert * between path expressions and individual path elements (keys). - * + * *

* A {@code ConfigObject} may contain null values, which will have * {@link ConfigValue#valueType()} equal to {@link ConfigValueType#NULL}. If @@ -47,7 +47,7 @@ import java.util.Map; * file (or wherever this value tree came from). If {@code get()} returns a * {@link ConfigValue} with type {@code ConfigValueType#NULL} then the key was * set to null explicitly in the config file. - * + * *

* Do not implement {@code ConfigObject}; it should only be implemented * by the config library. Arbitrary implementations will not work because the diff --git a/akka-actor/src/main/java/com/typesafe/config/ConfigOrigin.java b/akka-actor/src/main/java/com/typesafe/config/ConfigOrigin.java index 013d91eb9e..fbc1fe17c3 100644 --- a/akka-actor/src/main/java/com/typesafe/config/ConfigOrigin.java +++ b/akka-actor/src/main/java/com/typesafe/config/ConfigOrigin.java @@ -4,6 +4,7 @@ package com.typesafe.config; import java.net.URL; +import java.util.List; /** @@ -12,13 +13,13 @@ import java.net.URL; * with {@link ConfigValue#origin}. Exceptions may have an origin, see * {@link ConfigException#origin}, but be careful because * ConfigException.origin() may return null. - * + * *

* It's best to use this interface only for debugging; its accuracy is * "best effort" rather than guaranteed, and a potentially-noticeable amount of * memory could probably be saved if origins were not kept around, so in the * future there might be some option to discard origins. - * + * *

* Do not implement this interface; it should only be implemented by * the config library. Arbitrary implementations will not work because the @@ -66,4 +67,16 @@ public interface ConfigOrigin { * @return line number or -1 if none is available */ public int lineNumber(); + + /** + * Returns any comments that appeared to "go with" this place in the file. + * Often an empty list, but never null. The details of this are subject to + * change, but at the moment comments that are immediately before an array + * element or object field, with no blank line after the comment, "go with" + * that element or field. + * + * @return any comments that seemed to "go with" this origin, empty list if + * none + */ + public List comments(); } diff --git a/akka-actor/src/main/java/com/typesafe/config/ConfigResolveOptions.java b/akka-actor/src/main/java/com/typesafe/config/ConfigResolveOptions.java index 37c7b36d5b..3adb589f1d 100644 --- a/akka-actor/src/main/java/com/typesafe/config/ConfigResolveOptions.java +++ b/akka-actor/src/main/java/com/typesafe/config/ConfigResolveOptions.java @@ -6,11 +6,13 @@ package com.typesafe.config; /** * A set of options related to resolving substitutions. Substitutions use the * ${foo.bar} syntax and are documented in the HOCON spec. + * href="https://github.com/typesafehub/config/blob/master/HOCON.md">HOCON + * spec. *

* This object is immutable, so the "setters" return a new object. *

* Here is an example of creating a custom {@code ConfigResolveOptions}: + * *

  *     ConfigResolveOptions options = ConfigResolveOptions.defaults()
  *         .setUseSystemEnvironment(false)
diff --git a/akka-actor/src/main/java/com/typesafe/config/ConfigSyntax.java b/akka-actor/src/main/java/com/typesafe/config/ConfigSyntax.java
index 58e7fc020b..7626a92e6d 100644
--- a/akka-actor/src/main/java/com/typesafe/config/ConfigSyntax.java
+++ b/akka-actor/src/main/java/com/typesafe/config/ConfigSyntax.java
@@ -5,8 +5,8 @@ package com.typesafe.config;
 
 /**
  * The syntax of a character stream, JSON, HOCON aka
- * ".conf", or HOCON
+ * aka ".conf", or Java properties.
  *
@@ -19,8 +19,8 @@ public enum ConfigSyntax {
     JSON,
     /**
      * The JSON-superset HOCON
-     * format.
+     * href="https://github.com/typesafehub/config/blob/master/HOCON.md"
+     * >HOCON format.
      */
     CONF,
     /**
diff --git a/akka-actor/src/main/java/com/typesafe/config/ConfigUtil.java b/akka-actor/src/main/java/com/typesafe/config/ConfigUtil.java
index 1aa463f46c..cc936923fe 100644
--- a/akka-actor/src/main/java/com/typesafe/config/ConfigUtil.java
+++ b/akka-actor/src/main/java/com/typesafe/config/ConfigUtil.java
@@ -4,6 +4,10 @@ import java.util.List;
 
 import com.typesafe.config.impl.ConfigImplUtil;
 
+/**
+ * Contains static utility methods.
+ * 
+ */
 public final class ConfigUtil {
     private ConfigUtil() {
 
@@ -41,7 +45,7 @@ public final class ConfigUtil {
      * elements as needed and then joining them separated by a period. A path
      * expression is usable with a {@link Config}, while individual path
      * elements are usable with a {@link ConfigObject}.
-     * 
+     *
      * @param elements
      *            the keys in the path
      * @return a path expression
@@ -57,7 +61,7 @@ public final class ConfigUtil {
      * and unquoting the individual path elements. A path expression is usable
      * with a {@link Config}, while individual path elements are usable with a
      * {@link ConfigObject}.
-     * 
+     *
      * @param path
      *            a path expression
      * @return the individual keys in the path
diff --git a/akka-actor/src/main/java/com/typesafe/config/ConfigValueFactory.java b/akka-actor/src/main/java/com/typesafe/config/ConfigValueFactory.java
index 2f381f9ad1..14c2bff8f7 100644
--- a/akka-actor/src/main/java/com/typesafe/config/ConfigValueFactory.java
+++ b/akka-actor/src/main/java/com/typesafe/config/ConfigValueFactory.java
@@ -8,9 +8,9 @@ import java.util.Map;
 import com.typesafe.config.impl.ConfigImpl;
 
 /**
- * This class holds some static factory methods for building ConfigValue. See
- * also ConfigFactory which has methods for parsing files and certain in-memory
- * data structures.
+ * This class holds some static factory methods for building {@link ConfigValue}
+ * instances. See also {@link ConfigFactory} which has methods for parsing files
+ * and certain in-memory data structures.
  */
 public final class ConfigValueFactory {
     private ConfigValueFactory() {
diff --git a/akka-actor/src/main/java/com/typesafe/config/impl/AbstractConfigObject.java b/akka-actor/src/main/java/com/typesafe/config/impl/AbstractConfigObject.java
index 428a7b2b3f..a21ccd81f7 100644
--- a/akka-actor/src/main/java/com/typesafe/config/impl/AbstractConfigObject.java
+++ b/akka-actor/src/main/java/com/typesafe/config/impl/AbstractConfigObject.java
@@ -111,12 +111,12 @@ abstract class AbstractConfigObject extends AbstractConfigValue implements
         return ConfigValueType.OBJECT;
     }
 
-    protected abstract AbstractConfigObject newCopy(ResolveStatus status,
-            boolean ignoresFallbacks);
+    protected abstract AbstractConfigObject newCopy(ResolveStatus status, boolean ignoresFallbacks,
+            ConfigOrigin origin);
 
     @Override
-    protected AbstractConfigObject newCopy(boolean ignoresFallbacks) {
-            return newCopy(resolveStatus(), ignoresFallbacks);
+    protected AbstractConfigObject newCopy(boolean ignoresFallbacks, ConfigOrigin origin) {
+        return newCopy(resolveStatus(), ignoresFallbacks, origin);
     }
 
     @Override
@@ -173,7 +173,7 @@ abstract class AbstractConfigObject extends AbstractConfigValue implements
             return new SimpleConfigObject(mergeOrigins(this, fallback), merged, newResolveStatus,
                     newIgnoresFallbacks);
         else if (newResolveStatus != resolveStatus() || newIgnoresFallbacks != ignoresFallbacks())
-            return newCopy(newResolveStatus, newIgnoresFallbacks);
+            return newCopy(newResolveStatus, newIgnoresFallbacks, origin());
         else
             return this;
     }
@@ -234,7 +234,7 @@ abstract class AbstractConfigObject extends AbstractConfigValue implements
             }
         }
         if (changes == null) {
-            return newCopy(newResolveStatus, ignoresFallbacks());
+            return newCopy(newResolveStatus, ignoresFallbacks(), origin());
         } else {
             Map modified = new HashMap();
             for (String k : keySet()) {
@@ -306,6 +306,12 @@ abstract class AbstractConfigObject extends AbstractConfigValue implements
                     sb.append("# ");
                     sb.append(v.origin().description());
                     sb.append("\n");
+                    for (String comment : v.origin().comments()) {
+                        indent(sb, indent + 1);
+                        sb.append("# ");
+                        sb.append(comment);
+                        sb.append("\n");
+                    }
                     indent(sb, indent + 1);
                 }
                 v.render(sb, indent + 1, k, formatted);
diff --git a/akka-actor/src/main/java/com/typesafe/config/impl/AbstractConfigValue.java b/akka-actor/src/main/java/com/typesafe/config/impl/AbstractConfigValue.java
index 68ab5cc316..e51f4c6067 100644
--- a/akka-actor/src/main/java/com/typesafe/config/impl/AbstractConfigValue.java
+++ b/akka-actor/src/main/java/com/typesafe/config/impl/AbstractConfigValue.java
@@ -18,14 +18,14 @@ import com.typesafe.config.ConfigValue;
  */
 abstract class AbstractConfigValue implements ConfigValue, MergeableValue {
 
-    final private ConfigOrigin origin;
+    final private SimpleConfigOrigin origin;
 
     AbstractConfigValue(ConfigOrigin origin) {
-        this.origin = origin;
+        this.origin = (SimpleConfigOrigin) origin;
     }
 
     @Override
-    public ConfigOrigin origin() {
+    public SimpleConfigOrigin origin() {
         return this.origin;
     }
 
@@ -76,9 +76,7 @@ abstract class AbstractConfigValue implements ConfigValue, MergeableValue {
         return this;
     }
 
-    protected AbstractConfigValue newCopy(boolean ignoresFallbacks) {
-        return this;
-    }
+    protected abstract AbstractConfigValue newCopy(boolean ignoresFallbacks, ConfigOrigin origin);
 
     // this is virtualized rather than a field because only some subclasses
     // really need to store the boolean, and they may be able to pack it
@@ -105,6 +103,13 @@ abstract class AbstractConfigValue implements ConfigValue, MergeableValue {
         throw badMergeException();
     }
 
+    public AbstractConfigValue withOrigin(ConfigOrigin origin) {
+        if (this.origin == origin)
+            return this;
+        else
+            return newCopy(ignoresFallbacks(), origin);
+    }
+
     @Override
     public AbstractConfigValue withFallback(ConfigMergeable mergeable) {
         if (ignoresFallbacks()) {
@@ -118,7 +123,7 @@ abstract class AbstractConfigValue implements ConfigValue, MergeableValue {
                 AbstractConfigObject fallback = (AbstractConfigObject) other;
                 if (fallback.resolveStatus() == ResolveStatus.RESOLVED && fallback.isEmpty()) {
                     if (fallback.ignoresFallbacks())
-                        return newCopy(true /* ignoresFallbacks */);
+                        return newCopy(true /* ignoresFallbacks */, origin);
                     else
                         return this;
                 } else {
@@ -128,7 +133,7 @@ abstract class AbstractConfigValue implements ConfigValue, MergeableValue {
                 // falling back to a non-object doesn't merge anything, and also
                 // prohibits merging any objects that we fall back to later.
                 // so we have to switch to ignoresFallbacks mode.
-                return newCopy(true /* ignoresFallbacks */);
+                return newCopy(true /* ignoresFallbacks */, origin);
             }
         }
     }
diff --git a/akka-actor/src/main/java/com/typesafe/config/impl/ConfigBoolean.java b/akka-actor/src/main/java/com/typesafe/config/impl/ConfigBoolean.java
index d45dbd1326..c926c0c942 100644
--- a/akka-actor/src/main/java/com/typesafe/config/impl/ConfigBoolean.java
+++ b/akka-actor/src/main/java/com/typesafe/config/impl/ConfigBoolean.java
@@ -29,4 +29,9 @@ final class ConfigBoolean extends AbstractConfigValue {
     String transformToString() {
         return value ? "true" : "false";
     }
+
+    @Override
+    protected ConfigBoolean newCopy(boolean ignoresFallbacks, ConfigOrigin origin) {
+        return new ConfigBoolean(origin, value);
+    }
 }
diff --git a/akka-actor/src/main/java/com/typesafe/config/impl/ConfigDelayedMerge.java b/akka-actor/src/main/java/com/typesafe/config/impl/ConfigDelayedMerge.java
index 9846cc57f2..4cca7834bd 100644
--- a/akka-actor/src/main/java/com/typesafe/config/impl/ConfigDelayedMerge.java
+++ b/akka-actor/src/main/java/com/typesafe/config/impl/ConfigDelayedMerge.java
@@ -107,6 +107,11 @@ final class ConfigDelayedMerge extends AbstractConfigValue implements
         return ignoresFallbacks;
     }
 
+    @Override
+    protected AbstractConfigValue newCopy(boolean newIgnoresFallbacks, ConfigOrigin newOrigin) {
+        return new ConfigDelayedMerge(newOrigin, stack, newIgnoresFallbacks);
+    }
+
     @Override
     protected final ConfigDelayedMerge mergedWithTheUnmergeable(Unmergeable fallback) {
         if (ignoresFallbacks)
@@ -196,6 +201,12 @@ final class ConfigDelayedMerge extends AbstractConfigValue implements
                 i += 1;
                 sb.append(v.origin().description());
                 sb.append("\n");
+                for (String comment : v.origin().comments()) {
+                    indent(sb, indent);
+                    sb.append("# ");
+                    sb.append(comment);
+                    sb.append("\n");
+                }
                 indent(sb, indent);
             }
 
diff --git a/akka-actor/src/main/java/com/typesafe/config/impl/ConfigDelayedMergeObject.java b/akka-actor/src/main/java/com/typesafe/config/impl/ConfigDelayedMergeObject.java
index 5669f62f34..fe970d59c8 100644
--- a/akka-actor/src/main/java/com/typesafe/config/impl/ConfigDelayedMergeObject.java
+++ b/akka-actor/src/main/java/com/typesafe/config/impl/ConfigDelayedMergeObject.java
@@ -49,12 +49,12 @@ class ConfigDelayedMergeObject extends AbstractConfigObject implements
     }
 
     @Override
-    protected ConfigDelayedMergeObject newCopy(ResolveStatus status,
-            boolean ignoresFallbacks) {
+    protected ConfigDelayedMergeObject newCopy(ResolveStatus status, boolean ignoresFallbacks,
+            ConfigOrigin origin) {
         if (status != resolveStatus())
             throw new ConfigException.BugOrBroken(
                     "attempt to create resolved ConfigDelayedMergeObject");
-        return new ConfigDelayedMergeObject(origin(), stack, ignoresFallbacks);
+        return new ConfigDelayedMergeObject(origin, stack, ignoresFallbacks);
     }
 
     @Override
diff --git a/akka-actor/src/main/java/com/typesafe/config/impl/ConfigDouble.java b/akka-actor/src/main/java/com/typesafe/config/impl/ConfigDouble.java
index 3317974453..c26d3cd6a9 100644
--- a/akka-actor/src/main/java/com/typesafe/config/impl/ConfigDouble.java
+++ b/akka-actor/src/main/java/com/typesafe/config/impl/ConfigDouble.java
@@ -43,4 +43,9 @@ final class ConfigDouble extends ConfigNumber {
     protected double doubleValue() {
         return value;
     }
+
+    @Override
+    protected ConfigDouble newCopy(boolean ignoresFallbacks, ConfigOrigin origin) {
+        return new ConfigDouble(origin, value, originalText);
+    }
 }
diff --git a/akka-actor/src/main/java/com/typesafe/config/impl/ConfigInt.java b/akka-actor/src/main/java/com/typesafe/config/impl/ConfigInt.java
index 4ce4a58545..440b5ae8cf 100644
--- a/akka-actor/src/main/java/com/typesafe/config/impl/ConfigInt.java
+++ b/akka-actor/src/main/java/com/typesafe/config/impl/ConfigInt.java
@@ -43,4 +43,9 @@ final class ConfigInt extends ConfigNumber {
     protected double doubleValue() {
         return value;
     }
+
+    @Override
+    protected ConfigInt newCopy(boolean ignoresFallbacks, ConfigOrigin origin) {
+        return new ConfigInt(origin, value, originalText);
+    }
 }
diff --git a/akka-actor/src/main/java/com/typesafe/config/impl/ConfigLong.java b/akka-actor/src/main/java/com/typesafe/config/impl/ConfigLong.java
index feb3897bb3..6a72bc4cab 100644
--- a/akka-actor/src/main/java/com/typesafe/config/impl/ConfigLong.java
+++ b/akka-actor/src/main/java/com/typesafe/config/impl/ConfigLong.java
@@ -43,4 +43,9 @@ final class ConfigLong extends ConfigNumber {
     protected double doubleValue() {
         return value;
     }
+
+    @Override
+    protected ConfigLong newCopy(boolean ignoresFallbacks, ConfigOrigin origin) {
+        return new ConfigLong(origin, value, originalText);
+    }
 }
diff --git a/akka-actor/src/main/java/com/typesafe/config/impl/ConfigNull.java b/akka-actor/src/main/java/com/typesafe/config/impl/ConfigNull.java
index a45d2dbc40..fbdc21d7a5 100644
--- a/akka-actor/src/main/java/com/typesafe/config/impl/ConfigNull.java
+++ b/akka-actor/src/main/java/com/typesafe/config/impl/ConfigNull.java
@@ -39,4 +39,9 @@ final class ConfigNull extends AbstractConfigValue {
     protected void render(StringBuilder sb, int indent, boolean formatted) {
         sb.append("null");
     }
+
+    @Override
+    protected ConfigNull newCopy(boolean ignoresFallbacks, ConfigOrigin origin) {
+        return new ConfigNull(origin);
+    }
 }
diff --git a/akka-actor/src/main/java/com/typesafe/config/impl/ConfigNumber.java b/akka-actor/src/main/java/com/typesafe/config/impl/ConfigNumber.java
index 3c01d9b950..4a6bbd0b15 100644
--- a/akka-actor/src/main/java/com/typesafe/config/impl/ConfigNumber.java
+++ b/akka-actor/src/main/java/com/typesafe/config/impl/ConfigNumber.java
@@ -11,7 +11,7 @@ abstract class ConfigNumber extends AbstractConfigValue {
     // a sentence) we always have it exactly as the person typed it into the
     // config file. It's purely cosmetic; equals/hashCode don't consider this
     // for example.
-    final private String originalText;
+    final protected String originalText;
 
     protected ConfigNumber(ConfigOrigin origin, String originalText) {
         super(origin);
diff --git a/akka-actor/src/main/java/com/typesafe/config/impl/ConfigString.java b/akka-actor/src/main/java/com/typesafe/config/impl/ConfigString.java
index 0d1bc97920..9b41e7f7ab 100644
--- a/akka-actor/src/main/java/com/typesafe/config/impl/ConfigString.java
+++ b/akka-actor/src/main/java/com/typesafe/config/impl/ConfigString.java
@@ -34,4 +34,9 @@ final class ConfigString extends AbstractConfigValue {
     protected void render(StringBuilder sb, int indent, boolean formatted) {
         sb.append(ConfigImplUtil.renderJsonString(value));
     }
+
+    @Override
+    protected ConfigString newCopy(boolean ignoresFallbacks, ConfigOrigin origin) {
+        return new ConfigString(origin, value);
+    }
 }
diff --git a/akka-actor/src/main/java/com/typesafe/config/impl/ConfigSubstitution.java b/akka-actor/src/main/java/com/typesafe/config/impl/ConfigSubstitution.java
index 9a8590bade..f4441b81a5 100644
--- a/akka-actor/src/main/java/com/typesafe/config/impl/ConfigSubstitution.java
+++ b/akka-actor/src/main/java/com/typesafe/config/impl/ConfigSubstitution.java
@@ -61,8 +61,8 @@ final class ConfigSubstitution extends AbstractConfigValue implements
     }
 
     @Override
-    protected ConfigSubstitution newCopy(boolean ignoresFallbacks) {
-        return new ConfigSubstitution(origin(), pieces, prefixLength, ignoresFallbacks);
+    protected ConfigSubstitution newCopy(boolean ignoresFallbacks, ConfigOrigin newOrigin) {
+        return new ConfigSubstitution(newOrigin, pieces, prefixLength, ignoresFallbacks);
     }
 
     @Override
diff --git a/akka-actor/src/main/java/com/typesafe/config/impl/Parser.java b/akka-actor/src/main/java/com/typesafe/config/impl/Parser.java
index 6f0de1211c..5df0314fe6 100644
--- a/akka-actor/src/main/java/com/typesafe/config/impl/Parser.java
+++ b/akka-actor/src/main/java/com/typesafe/config/impl/Parser.java
@@ -32,9 +32,53 @@ final class Parser {
         return context.parse();
     }
 
+    static private final class TokenWithComments {
+        final Token token;
+        final List comments;
+
+        TokenWithComments(Token token, List comments) {
+            this.token = token;
+            this.comments = comments;
+        }
+
+        TokenWithComments(Token token) {
+            this(token, Collections. emptyList());
+        }
+
+        TokenWithComments prepend(List earlier) {
+            if (this.comments.isEmpty()) {
+                return new TokenWithComments(token, earlier);
+            } else {
+                List merged = new ArrayList();
+                merged.addAll(earlier);
+                merged.addAll(comments);
+                return new TokenWithComments(token, merged);
+            }
+        }
+
+        SimpleConfigOrigin setComments(SimpleConfigOrigin origin) {
+            if (comments.isEmpty()) {
+                return origin;
+            } else {
+                List newComments = new ArrayList();
+                for (Token c : comments) {
+                    newComments.add(Tokens.getCommentText(c));
+                }
+                return origin.setComments(newComments);
+            }
+        }
+
+        @Override
+        public String toString() {
+            // this ends up in user-visible error messages, so we don't want the
+            // comments
+            return token.toString();
+        }
+    }
+
     static private final class ParseContext {
         private int lineNumber;
-        final private Stack buffer;
+        final private Stack buffer;
         final private Iterator tokens;
         final private ConfigIncluder includer;
         final private ConfigIncludeContext includeContext;
@@ -50,7 +94,7 @@ final class Parser {
                 Iterator tokens, ConfigIncluder includer,
                 ConfigIncludeContext includeContext) {
             lineNumber = 1;
-            buffer = new Stack();
+            buffer = new Stack();
             this.tokens = tokens;
             this.flavor = flavor;
             this.baseOrigin = origin;
@@ -60,14 +104,67 @@ final class Parser {
             this.equalsCount = 0;
         }
 
-        private Token nextToken() {
-            Token t = null;
-            if (buffer.isEmpty()) {
-                t = tokens.next();
-            } else {
-                t = buffer.pop();
+        private void consolidateCommentBlock(Token commentToken) {
+            // a comment block "goes with" the following token
+            // unless it's separated from it by a blank line.
+            // we want to build a list of newline tokens followed
+            // by a non-newline non-comment token; with all comments
+            // associated with that final non-newline non-comment token.
+            List newlines = new ArrayList();
+            List comments = new ArrayList();
+
+            Token previous = null;
+            Token next = commentToken;
+            while (true) {
+                if (Tokens.isNewline(next)) {
+                    if (previous != null && Tokens.isNewline(previous)) {
+                        // blank line; drop all comments to this point and
+                        // start a new comment block
+                        comments.clear();
+                    }
+                    newlines.add(next);
+                } else if (Tokens.isComment(next)) {
+                    comments.add(next);
+                } else {
+                    // a non-newline non-comment token
+                    break;
+                }
+
+                previous = next;
+                next = tokens.next();
             }
 
+            // put our concluding token in the queue with all the comments
+            // attached
+            buffer.push(new TokenWithComments(next, comments));
+
+            // now put all the newlines back in front of it
+            ListIterator li = newlines.listIterator(newlines.size());
+            while (li.hasPrevious()) {
+                buffer.push(new TokenWithComments(li.previous()));
+            }
+        }
+
+        private TokenWithComments popToken() {
+            if (buffer.isEmpty()) {
+                Token t = tokens.next();
+                if (Tokens.isComment(t)) {
+                    consolidateCommentBlock(t);
+                    return buffer.pop();
+                } else {
+                    return new TokenWithComments(t);
+                }
+            } else {
+                return buffer.pop();
+            }
+        }
+
+        private TokenWithComments nextToken() {
+            TokenWithComments withComments = null;
+
+            withComments = popToken();
+            Token t = withComments.token;
+
             if (Tokens.isProblem(t)) {
                 ConfigOrigin origin = t.origin();
                 String message = Tokens.getProblemMessage(t);
@@ -79,32 +176,35 @@ final class Parser {
                     message = addKeyName(message);
                 }
                 throw new ConfigException.Parse(origin, message, cause);
-            }
-
-            if (flavor == ConfigSyntax.JSON) {
-                if (Tokens.isUnquotedText(t)) {
-                    throw parseError(addKeyName("Token not allowed in valid JSON: '"
-                            + Tokens.getUnquotedText(t) + "'"));
-                } else if (Tokens.isSubstitution(t)) {
-                    throw parseError(addKeyName("Substitutions (${} syntax) not allowed in JSON"));
+            } else {
+                if (flavor == ConfigSyntax.JSON) {
+                    if (Tokens.isUnquotedText(t)) {
+                        throw parseError(addKeyName("Token not allowed in valid JSON: '"
+                                + Tokens.getUnquotedText(t) + "'"));
+                    } else if (Tokens.isSubstitution(t)) {
+                        throw parseError(addKeyName("Substitutions (${} syntax) not allowed in JSON"));
+                    }
                 }
-            }
 
-            return t;
+                return withComments;
+            }
         }
 
-        private void putBack(Token token) {
+        private void putBack(TokenWithComments token) {
             buffer.push(token);
         }
 
-        private Token nextTokenIgnoringNewline() {
-            Token t = nextToken();
-            while (Tokens.isNewline(t)) {
+        private TokenWithComments nextTokenIgnoringNewline() {
+            TokenWithComments t = nextToken();
+
+            while (Tokens.isNewline(t.token)) {
                 // line number tokens have the line that was _ended_ by the
                 // newline, so we have to add one.
-                lineNumber = t.lineNumber() + 1;
+                lineNumber = t.token.lineNumber() + 1;
+
                 t = nextToken();
             }
+
             return t;
         }
 
@@ -116,8 +216,8 @@ final class Parser {
         // is left just after the comma or the newline.
         private boolean checkElementSeparator() {
             if (flavor == ConfigSyntax.JSON) {
-                Token t = nextTokenIgnoringNewline();
-                if (t == Tokens.COMMA) {
+                TokenWithComments t = nextTokenIgnoringNewline();
+                if (t.token == Tokens.COMMA) {
                     return true;
                 } else {
                     putBack(t);
@@ -125,15 +225,16 @@ final class Parser {
                 }
             } else {
                 boolean sawSeparatorOrNewline = false;
-                Token t = nextToken();
+                TokenWithComments t = nextToken();
                 while (true) {
-                    if (Tokens.isNewline(t)) {
+                    if (Tokens.isNewline(t.token)) {
                         // newline number is the line just ended, so add one
-                        lineNumber = t.lineNumber() + 1;
+                        lineNumber = t.token.lineNumber() + 1;
                         sawSeparatorOrNewline = true;
+
                         // we want to continue to also eat
                         // a comma if there is one.
-                    } else if (t == Tokens.COMMA) {
+                    } else if (t.token == Tokens.COMMA) {
                         return true;
                     } else {
                         // non-newline-or-comma
@@ -154,12 +255,17 @@ final class Parser {
                 return;
 
             List values = null; // create only if we have value tokens
-            Token t = nextTokenIgnoringNewline(); // ignore a newline up front
-            while (Tokens.isValue(t) || Tokens.isUnquotedText(t)
-                    || Tokens.isSubstitution(t)) {
-                if (values == null)
+            TokenWithComments firstValueWithComments = null;
+            TokenWithComments t = nextTokenIgnoringNewline(); // ignore a
+                                                              // newline up
+                                                              // front
+            while (Tokens.isValue(t.token) || Tokens.isUnquotedText(t.token)
+                    || Tokens.isSubstitution(t.token)) {
+                if (values == null) {
                     values = new ArrayList();
-                values.add(t);
+                    firstValueWithComments = t;
+                }
+                values.add(t.token);
                 t = nextToken(); // but don't consolidate across a newline
             }
             // the last one wasn't a value token
@@ -168,9 +274,9 @@ final class Parser {
             if (values == null)
                 return;
 
-            if (values.size() == 1 && Tokens.isValue(values.get(0))) {
+            if (values.size() == 1 && Tokens.isValue(firstValueWithComments.token)) {
                 // a single value token requires no consolidation
-                putBack(values.get(0));
+                putBack(firstValueWithComments);
                 return;
             }
 
@@ -235,7 +341,7 @@ final class Parser {
                         firstOrigin, minimized));
             }
 
-            putBack(consolidated);
+            putBack(new TokenWithComments(consolidated, firstValueWithComments.comments));
         }
 
         private ConfigOrigin lineOrigin() {
@@ -309,17 +415,23 @@ final class Parser {
                 return part + ")";
         }
 
-        private AbstractConfigValue parseValue(Token token) {
-            if (Tokens.isValue(token)) {
-                return Tokens.getValue(token);
-            } else if (token == Tokens.OPEN_CURLY) {
-                return parseObject(true);
-            } else if (token == Tokens.OPEN_SQUARE) {
-                return parseArray();
+        private AbstractConfigValue parseValue(TokenWithComments t) {
+            AbstractConfigValue v;
+
+            if (Tokens.isValue(t.token)) {
+                v = Tokens.getValue(t.token);
+            } else if (t.token == Tokens.OPEN_CURLY) {
+                v = parseObject(true);
+            } else if (t.token == Tokens.OPEN_SQUARE) {
+                v = parseArray();
             } else {
-                throw parseError(addQuoteSuggestion(token.toString(),
-                        "Expecting a value but got wrong token: " + token));
+                throw parseError(addQuoteSuggestion(t.token.toString(),
+                        "Expecting a value but got wrong token: " + t.token));
             }
+
+            v = v.withOrigin(t.setComments(v.origin()));
+
+            return v;
         }
 
         private static AbstractConfigObject createValueUnderPath(Path path,
@@ -339,24 +451,29 @@ final class Parser {
                     remaining = remaining.remainder();
                 }
             }
+
+            // the setComments(null) is to ensure comments are only
+            // on the exact leaf node they apply to.
+            // a comment before "foo.bar" applies to the full setting
+            // "foo.bar" not also to "foo"
             ListIterator i = keys.listIterator(keys.size());
             String deepest = i.previous();
-            AbstractConfigObject o = new SimpleConfigObject(value.origin(),
+            AbstractConfigObject o = new SimpleConfigObject(value.origin().setComments(null),
                     Collections. singletonMap(
                             deepest, value));
             while (i.hasPrevious()) {
                 Map m = Collections. singletonMap(
                         i.previous(), o);
-                o = new SimpleConfigObject(value.origin(), m);
+                o = new SimpleConfigObject(value.origin().setComments(null), m);
             }
 
             return o;
         }
 
-        private Path parseKey(Token token) {
+        private Path parseKey(TokenWithComments token) {
             if (flavor == ConfigSyntax.JSON) {
-                if (Tokens.isValueWithType(token, ConfigValueType.STRING)) {
-                    String key = (String) Tokens.getValue(token).unwrapped();
+                if (Tokens.isValueWithType(token.token, ConfigValueType.STRING)) {
+                    String key = (String) Tokens.getValue(token.token).unwrapped();
                     return Path.newKey(key);
                 } else {
                     throw parseError(addKeyName("Expecting close brace } or a field name here, got "
@@ -364,9 +481,9 @@ final class Parser {
                 }
             } else {
                 List expression = new ArrayList();
-                Token t = token;
-                while (Tokens.isValue(t) || Tokens.isUnquotedText(t)) {
-                    expression.add(t);
+                TokenWithComments t = token;
+                while (Tokens.isValue(t.token) || Tokens.isUnquotedText(t.token)) {
+                    expression.add(t.token);
                     t = nextToken(); // note: don't cross a newline
                 }
 
@@ -400,13 +517,13 @@ final class Parser {
         }
 
         private void parseInclude(Map values) {
-            Token t = nextTokenIgnoringNewline();
-            while (isUnquotedWhitespace(t)) {
+            TokenWithComments t = nextTokenIgnoringNewline();
+            while (isUnquotedWhitespace(t.token)) {
                 t = nextTokenIgnoringNewline();
             }
 
-            if (Tokens.isValueWithType(t, ConfigValueType.STRING)) {
-                String name = (String) Tokens.getValue(t).unwrapped();
+            if (Tokens.isValueWithType(t.token, ConfigValueType.STRING)) {
+                String name = (String) Tokens.getValue(t.token).unwrapped();
                 AbstractConfigObject obj = (AbstractConfigObject) includer
                         .include(includeContext, name);
 
@@ -448,8 +565,8 @@ final class Parser {
             boolean lastInsideEquals = false;
 
             while (true) {
-                Token t = nextTokenIgnoringNewline();
-                if (t == Tokens.CLOSE_CURLY) {
+                TokenWithComments t = nextTokenIgnoringNewline();
+                if (t.token == Tokens.CLOSE_CURLY) {
                     if (flavor == ConfigSyntax.JSON && afterComma) {
                         throw parseError(addQuoteSuggestion(t.toString(),
                                 "expecting a field name after a comma, got a close brace } instead"));
@@ -458,45 +575,45 @@ final class Parser {
                                 "unbalanced close brace '}' with no open brace"));
                     }
                     break;
-                } else if (t == Tokens.END && !hadOpenCurly) {
+                } else if (t.token == Tokens.END && !hadOpenCurly) {
                     putBack(t);
                     break;
-                } else if (flavor != ConfigSyntax.JSON && isIncludeKeyword(t)) {
+                } else if (flavor != ConfigSyntax.JSON && isIncludeKeyword(t.token)) {
                     parseInclude(values);
 
                     afterComma = false;
                 } else {
-                    Path path = parseKey(t);
-                    Token afterKey = nextTokenIgnoringNewline();
+                    TokenWithComments keyToken = t;
+                    Path path = parseKey(keyToken);
+                    TokenWithComments afterKey = nextTokenIgnoringNewline();
                     boolean insideEquals = false;
 
                     // path must be on-stack while we parse the value
                     pathStack.push(path);
 
-                    Token valueToken;
+                    TokenWithComments valueToken;
                     AbstractConfigValue newValue;
-                    if (flavor == ConfigSyntax.CONF
-                            && afterKey == Tokens.OPEN_CURLY) {
+                    if (flavor == ConfigSyntax.CONF && afterKey.token == Tokens.OPEN_CURLY) {
                         // can omit the ':' or '=' before an object value
                         valueToken = afterKey;
-                        newValue = parseObject(true);
                     } else {
-                        if (!isKeyValueSeparatorToken(afterKey)) {
+                        if (!isKeyValueSeparatorToken(afterKey.token)) {
                             throw parseError(addQuoteSuggestion(afterKey.toString(),
                                     "Key '" + path.render() + "' may not be followed by token: "
                                             + afterKey));
                         }
 
-                        if (afterKey == Tokens.EQUALS) {
+                        if (afterKey.token == Tokens.EQUALS) {
                             insideEquals = true;
                             equalsCount += 1;
                         }
 
                         consolidateValueTokens();
                         valueToken = nextTokenIgnoringNewline();
-                        newValue = parseValue(valueToken);
                     }
 
+                    newValue = parseValue(valueToken.prepend(keyToken.comments));
+
                     lastPath = pathStack.pop();
                     if (insideEquals) {
                         equalsCount -= 1;
@@ -547,7 +664,7 @@ final class Parser {
                     afterComma = true;
                 } else {
                     t = nextTokenIgnoringNewline();
-                    if (t == Tokens.CLOSE_CURLY) {
+                    if (t.token == Tokens.CLOSE_CURLY) {
                         if (!hadOpenCurly) {
                             throw parseError(addQuoteSuggestion(lastPath, lastInsideEquals,
                                     t.toString(), "unbalanced close brace '}' with no open brace"));
@@ -557,7 +674,7 @@ final class Parser {
                         throw parseError(addQuoteSuggestion(lastPath, lastInsideEquals,
                                 t.toString(), "Expecting close brace } or a comma, got " + t));
                     } else {
-                        if (t == Tokens.END) {
+                        if (t.token == Tokens.END) {
                             putBack(t);
                             break;
                         } else {
@@ -567,6 +684,7 @@ final class Parser {
                     }
                 }
             }
+
             return new SimpleConfigObject(objectOrigin, values);
         }
 
@@ -577,18 +695,15 @@ final class Parser {
 
             consolidateValueTokens();
 
-            Token t = nextTokenIgnoringNewline();
+            TokenWithComments t = nextTokenIgnoringNewline();
 
             // special-case the first element
-            if (t == Tokens.CLOSE_SQUARE) {
+            if (t.token == Tokens.CLOSE_SQUARE) {
                 return new SimpleConfigList(arrayOrigin,
                         Collections. emptyList());
-            } else if (Tokens.isValue(t)) {
+            } else if (Tokens.isValue(t.token) || t.token == Tokens.OPEN_CURLY
+                    || t.token == Tokens.OPEN_SQUARE) {
                 values.add(parseValue(t));
-            } else if (t == Tokens.OPEN_CURLY) {
-                values.add(parseObject(true));
-            } else if (t == Tokens.OPEN_SQUARE) {
-                values.add(parseArray());
             } else {
                 throw parseError(addKeyName("List should have ] or a first element after the open [, instead had token: "
                         + t
@@ -604,7 +719,7 @@ final class Parser {
                     // comma (or newline equivalent) consumed
                 } else {
                     t = nextTokenIgnoringNewline();
-                    if (t == Tokens.CLOSE_SQUARE) {
+                    if (t.token == Tokens.CLOSE_SQUARE) {
                         return new SimpleConfigList(arrayOrigin, values);
                     } else {
                         throw parseError(addKeyName("List should have ended with ] or had a comma, instead had token: "
@@ -619,14 +734,10 @@ final class Parser {
                 consolidateValueTokens();
 
                 t = nextTokenIgnoringNewline();
-                if (Tokens.isValue(t)) {
+                if (Tokens.isValue(t.token) || t.token == Tokens.OPEN_CURLY
+                        || t.token == Tokens.OPEN_SQUARE) {
                     values.add(parseValue(t));
-                } else if (t == Tokens.OPEN_CURLY) {
-                    values.add(parseObject(true));
-                } else if (t == Tokens.OPEN_SQUARE) {
-                    values.add(parseArray());
-                } else if (flavor != ConfigSyntax.JSON
-                        && t == Tokens.CLOSE_SQUARE) {
+                } else if (flavor != ConfigSyntax.JSON && t.token == Tokens.CLOSE_SQUARE) {
                     // we allow one trailing comma
                     putBack(t);
                 } else {
@@ -640,8 +751,8 @@ final class Parser {
         }
 
         AbstractConfigValue parse() {
-            Token t = nextTokenIgnoringNewline();
-            if (t == Tokens.START) {
+            TokenWithComments t = nextTokenIgnoringNewline();
+            if (t.token == Tokens.START) {
                 // OK
             } else {
                 throw new ConfigException.BugOrBroken(
@@ -650,13 +761,11 @@ final class Parser {
 
             t = nextTokenIgnoringNewline();
             AbstractConfigValue result = null;
-            if (t == Tokens.OPEN_CURLY) {
-                result = parseObject(true);
-            } else if (t == Tokens.OPEN_SQUARE) {
-                result = parseArray();
+            if (t.token == Tokens.OPEN_CURLY || t.token == Tokens.OPEN_SQUARE) {
+                result = parseValue(t);
             } else {
                 if (flavor == ConfigSyntax.JSON) {
-                    if (t == Tokens.END) {
+                    if (t.token == Tokens.END) {
                         throw parseError("Empty document");
                     } else {
                         throw parseError("Document must have an object or array at root, unexpected token: "
@@ -668,11 +777,14 @@ final class Parser {
                     // of it, so put it back.
                     putBack(t);
                     result = parseObject(false);
+                    // in this case we don't try to use commentsStack comments
+                    // since they would all presumably apply to fields not the
+                    // root object
                 }
             }
 
             t = nextTokenIgnoringNewline();
-            if (t == Tokens.END) {
+            if (t.token == Tokens.END) {
                 return result;
             } else {
                 throw parseError("Document has trailing tokens after first object or array: "
diff --git a/akka-actor/src/main/java/com/typesafe/config/impl/SimpleConfigList.java b/akka-actor/src/main/java/com/typesafe/config/impl/SimpleConfigList.java
index 6703540040..1921826352 100644
--- a/akka-actor/src/main/java/com/typesafe/config/impl/SimpleConfigList.java
+++ b/akka-actor/src/main/java/com/typesafe/config/impl/SimpleConfigList.java
@@ -145,6 +145,14 @@ final class SimpleConfigList extends AbstractConfigValue implements ConfigList {
                     sb.append("# ");
                     sb.append(v.origin().description());
                     sb.append("\n");
+
+                    for (String comment : v.origin().comments()) {
+                        indent(sb, indent + 1);
+                        sb.append("# ");
+                        sb.append(comment);
+                        sb.append("\n");
+                    }
+
                     indent(sb, indent + 1);
                 }
                 v.render(sb, indent + 1, formatted);
@@ -353,4 +361,9 @@ final class SimpleConfigList extends AbstractConfigValue implements ConfigList {
     public ConfigValue set(int index, ConfigValue element) {
         throw weAreImmutable("set");
     }
+
+    @Override
+    protected SimpleConfigList newCopy(boolean ignoresFallbacks, ConfigOrigin newOrigin) {
+        return new SimpleConfigList(newOrigin, value);
+    }
 }
diff --git a/akka-actor/src/main/java/com/typesafe/config/impl/SimpleConfigObject.java b/akka-actor/src/main/java/com/typesafe/config/impl/SimpleConfigObject.java
index 0c855ba879..953f26491f 100644
--- a/akka-actor/src/main/java/com/typesafe/config/impl/SimpleConfigObject.java
+++ b/akka-actor/src/main/java/com/typesafe/config/impl/SimpleConfigObject.java
@@ -45,8 +45,9 @@ final class SimpleConfigObject extends AbstractConfigObject {
     }
 
     @Override
-    protected SimpleConfigObject newCopy(ResolveStatus newStatus, boolean newIgnoresFallbacks) {
-        return new SimpleConfigObject(origin(), value, newStatus, newIgnoresFallbacks);
+    protected SimpleConfigObject newCopy(ResolveStatus newStatus, boolean newIgnoresFallbacks,
+            ConfigOrigin newOrigin) {
+        return new SimpleConfigObject(newOrigin, value, newStatus, newIgnoresFallbacks);
     }
 
     @Override
diff --git a/akka-actor/src/main/java/com/typesafe/config/impl/SimpleConfigOrigin.java b/akka-actor/src/main/java/com/typesafe/config/impl/SimpleConfigOrigin.java
index 01d5b6070b..f0a0dbd353 100644
--- a/akka-actor/src/main/java/com/typesafe/config/impl/SimpleConfigOrigin.java
+++ b/akka-actor/src/main/java/com/typesafe/config/impl/SimpleConfigOrigin.java
@@ -8,6 +8,7 @@ import java.net.MalformedURLException;
 import java.net.URL;
 import java.util.ArrayList;
 import java.util.Collection;
+import java.util.Collections;
 import java.util.Iterator;
 import java.util.List;
 
@@ -22,19 +23,21 @@ final class SimpleConfigOrigin implements ConfigOrigin {
     final private int endLineNumber;
     final private OriginType originType;
     final private String urlOrNull;
+    final private List commentsOrNull;
 
     protected SimpleConfigOrigin(String description, int lineNumber, int endLineNumber,
             OriginType originType,
-            String urlOrNull) {
+ String urlOrNull, List commentsOrNull) {
         this.description = description;
         this.lineNumber = lineNumber;
         this.endLineNumber = endLineNumber;
         this.originType = originType;
         this.urlOrNull = urlOrNull;
+        this.commentsOrNull = commentsOrNull;
     }
 
     static SimpleConfigOrigin newSimple(String description) {
-        return new SimpleConfigOrigin(description, -1, -1, OriginType.GENERIC, null);
+        return new SimpleConfigOrigin(description, -1, -1, OriginType.GENERIC, null, null);
     }
 
     static SimpleConfigOrigin newFile(String filename) {
@@ -44,17 +47,17 @@ final class SimpleConfigOrigin implements ConfigOrigin {
         } catch (MalformedURLException e) {
             url = null;
         }
-        return new SimpleConfigOrigin(filename, -1, -1, OriginType.FILE, url);
+        return new SimpleConfigOrigin(filename, -1, -1, OriginType.FILE, url, null);
     }
 
     static SimpleConfigOrigin newURL(URL url) {
         String u = url.toExternalForm();
-        return new SimpleConfigOrigin(u, -1, -1, OriginType.URL, u);
+        return new SimpleConfigOrigin(u, -1, -1, OriginType.URL, u, null);
     }
 
     static SimpleConfigOrigin newResource(String resource, URL url) {
         return new SimpleConfigOrigin(resource, -1, -1, OriginType.RESOURCE,
-                url != null ? url.toExternalForm() : null);
+                url != null ? url.toExternalForm() : null, null);
     }
 
     static SimpleConfigOrigin newResource(String resource) {
@@ -66,13 +69,22 @@ final class SimpleConfigOrigin implements ConfigOrigin {
             return this;
         } else {
             return new SimpleConfigOrigin(this.description, lineNumber, lineNumber,
-                    this.originType, this.urlOrNull);
+                    this.originType, this.urlOrNull, this.commentsOrNull);
         }
     }
 
     SimpleConfigOrigin addURL(URL url) {
-        return new SimpleConfigOrigin(this.description, this.lineNumber, this.endLineNumber, this.originType,
-                url != null ? url.toExternalForm() : null);
+        return new SimpleConfigOrigin(this.description, this.lineNumber, this.endLineNumber,
+                this.originType, url != null ? url.toExternalForm() : null, this.commentsOrNull);
+    }
+
+    SimpleConfigOrigin setComments(List comments) {
+        if (ConfigImplUtil.equalsHandlingNull(comments, this.commentsOrNull)) {
+            return this;
+        } else {
+            return new SimpleConfigOrigin(this.description, this.lineNumber, this.endLineNumber,
+                    this.originType, this.urlOrNull, comments);
+        }
     }
 
     @Override
@@ -172,12 +184,22 @@ final class SimpleConfigOrigin implements ConfigOrigin {
         return lineNumber;
     }
 
+    @Override
+    public List comments() {
+        if (commentsOrNull != null) {
+            return commentsOrNull;
+        } else {
+            return Collections.emptyList();
+        }
+    }
+
     static final String MERGE_OF_PREFIX = "merge of ";
 
     private static SimpleConfigOrigin mergeTwo(SimpleConfigOrigin a, SimpleConfigOrigin b) {
         String mergedDesc;
         int mergedStartLine;
         int mergedEndLine;
+        List mergedComments;
 
         OriginType mergedType;
         if (a.originType == b.originType) {
@@ -233,8 +255,18 @@ final class SimpleConfigOrigin implements ConfigOrigin {
             mergedURL = null;
         }
 
+        if (ConfigImplUtil.equalsHandlingNull(a.commentsOrNull, b.commentsOrNull)) {
+            mergedComments = a.commentsOrNull;
+        } else {
+            mergedComments = new ArrayList();
+            if (a.commentsOrNull != null)
+                mergedComments.addAll(a.commentsOrNull);
+            if (b.commentsOrNull != null)
+                mergedComments.addAll(b.commentsOrNull);
+        }
+
         return new SimpleConfigOrigin(mergedDesc, mergedStartLine, mergedEndLine, mergedType,
-                mergedURL);
+                mergedURL, mergedComments);
     }
 
     private static int similarity(SimpleConfigOrigin a, SimpleConfigOrigin b) {
diff --git a/akka-actor/src/main/java/com/typesafe/config/impl/TokenType.java b/akka-actor/src/main/java/com/typesafe/config/impl/TokenType.java
index ace12fa70b..fc617d9ee2 100644
--- a/akka-actor/src/main/java/com/typesafe/config/impl/TokenType.java
+++ b/akka-actor/src/main/java/com/typesafe/config/impl/TokenType.java
@@ -17,5 +17,6 @@ enum TokenType {
     NEWLINE,
     UNQUOTED_TEXT,
     SUBSTITUTION,
-    PROBLEM;
+    PROBLEM,
+    COMMENT;
 }
diff --git a/akka-actor/src/main/java/com/typesafe/config/impl/Tokenizer.java b/akka-actor/src/main/java/com/typesafe/config/impl/Tokenizer.java
index 2aeb7184bc..280a028077 100644
--- a/akka-actor/src/main/java/com/typesafe/config/impl/Tokenizer.java
+++ b/akka-actor/src/main/java/com/typesafe/config/impl/Tokenizer.java
@@ -168,40 +168,27 @@ final class Tokenizer {
             return c != '\n' && ConfigImplUtil.isWhitespace(c);
         }
 
-        private int slurpComment() {
-            for (;;) {
-                int c = nextCharRaw();
-                if (c == -1 || c == '\n') {
-                    return c;
-                }
-            }
-        }
-
-        // get next char, skipping comments
-        private int nextCharSkippingComments() {
-            for (;;) {
-                int c = nextCharRaw();
-
-                if (c == -1) {
-                    return -1;
-                } else {
-                    if (allowComments) {
-                        if (c == '#') {
-                            return slurpComment();
-                        } else if (c == '/') {
-                            int maybeSecondSlash = nextCharRaw();
-                            if (maybeSecondSlash == '/') {
-                                return slurpComment();
-                            } else {
-                                putBack(maybeSecondSlash);
-                                return c;
-                            }
+        private boolean startOfComment(int c) {
+            if (c == -1) {
+                return false;
+            } else {
+                if (allowComments) {
+                    if (c == '#') {
+                        return true;
+                    } else if (c == '/') {
+                        int maybeSecondSlash = nextCharRaw();
+                        // we want to predictably NOT consume any chars
+                        putBack(maybeSecondSlash);
+                        if (maybeSecondSlash == '/') {
+                            return true;
                         } else {
-                            return c;
+                            return false;
                         }
                     } else {
-                        return c;
+                        return false;
                     }
+                } else {
+                    return false;
                 }
             }
         }
@@ -209,7 +196,7 @@ final class Tokenizer {
         // get next char, skipping non-newline whitespace
         private int nextCharAfterWhitespace(WhitespaceSaver saver) {
             for (;;) {
-                int c = nextCharSkippingComments();
+                int c = nextCharRaw();
 
                 if (c == -1) {
                     return -1;
@@ -269,6 +256,27 @@ final class Tokenizer {
             return ((SimpleConfigOrigin) baseOrigin).setLineNumber(lineNumber);
         }
 
+        // ONE char has always been consumed, either the # or the first /, but
+        // not both slashes
+        private Token pullComment(int firstChar) {
+            if (firstChar == '/') {
+                int discard = nextCharRaw();
+                if (discard != '/')
+                    throw new ConfigException.BugOrBroken("called pullComment but // not seen");
+            }
+
+            StringBuilder sb = new StringBuilder();
+            for (;;) {
+                int c = nextCharRaw();
+                if (c == -1 || c == '\n') {
+                    putBack(c);
+                    return Tokens.newComment(lineOrigin, sb.toString());
+                } else {
+                    sb.appendCodePoint(c);
+                }
+            }
+        }
+
         // chars JSON allows a number to start with
         static final String firstNumberChars = "0123456789-";
         // chars JSON allows to be part of a number
@@ -283,7 +291,7 @@ final class Tokenizer {
         private Token pullUnquotedText() {
             ConfigOrigin origin = lineOrigin;
             StringBuilder sb = new StringBuilder();
-            int c = nextCharSkippingComments();
+            int c = nextCharRaw();
             while (true) {
                 if (c == -1) {
                     break;
@@ -291,6 +299,8 @@ final class Tokenizer {
                     break;
                 } else if (isWhitespace(c)) {
                     break;
+                } else if (startOfComment(c)) {
+                    break;
                 } else {
                     sb.appendCodePoint(c);
                 }
@@ -310,7 +320,7 @@ final class Tokenizer {
                         return Tokens.newBoolean(origin, false);
                 }
 
-                c = nextCharSkippingComments();
+                c = nextCharRaw();
             }
 
             // put back the char that ended the unquoted text
@@ -324,12 +334,12 @@ final class Tokenizer {
             StringBuilder sb = new StringBuilder();
             sb.appendCodePoint(firstChar);
             boolean containedDecimalOrE = false;
-            int c = nextCharSkippingComments();
+            int c = nextCharRaw();
             while (c != -1 && numberChars.indexOf(c) >= 0) {
                 if (c == '.' || c == 'e' || c == 'E')
                     containedDecimalOrE = true;
                 sb.appendCodePoint(c);
-                c = nextCharSkippingComments();
+                c = nextCharRaw();
             }
             // the last character we looked at wasn't part of the number, put it
             // back
@@ -382,7 +392,7 @@ final class Tokenizer {
                 // kind of absurdly slow, but screw it for now
                 char[] a = new char[4];
                 for (int i = 0; i < 4; ++i) {
-                    int c = nextCharSkippingComments();
+                    int c = nextCharRaw();
                     if (c == -1)
                         throw problem("End of input but expecting 4 hex digits for \\uXXXX escape");
                     a[i] = (char) c;
@@ -431,14 +441,14 @@ final class Tokenizer {
         private Token pullSubstitution() throws ProblemException {
             // the initial '$' has already been consumed
             ConfigOrigin origin = lineOrigin;
-            int c = nextCharSkippingComments();
+            int c = nextCharRaw();
             if (c != '{') {
                 throw problem(asString(c), "'$' not followed by {, '" + asString(c)
                         + "' not allowed after '$'", true /* suggestQuotes */);
             }
 
             boolean optional = false;
-            c = nextCharSkippingComments();
+            c = nextCharRaw();
             if (c == '?') {
                 optional = true;
             } else {
@@ -484,45 +494,49 @@ final class Tokenizer {
                 return line;
             } else {
                 Token t = null;
-                switch (c) {
-                case '"':
-                    t = pullQuotedString();
-                    break;
-                case '$':
-                    t = pullSubstitution();
-                    break;
-                case ':':
-                    t = Tokens.COLON;
-                    break;
-                case ',':
-                    t = Tokens.COMMA;
-                    break;
-                case '=':
-                    t = Tokens.EQUALS;
-                    break;
-                case '{':
-                    t = Tokens.OPEN_CURLY;
-                    break;
-                case '}':
-                    t = Tokens.CLOSE_CURLY;
-                    break;
-                case '[':
-                    t = Tokens.OPEN_SQUARE;
-                    break;
-                case ']':
-                    t = Tokens.CLOSE_SQUARE;
-                    break;
-                }
+                if (startOfComment(c)) {
+                    t = pullComment(c);
+                } else {
+                    switch (c) {
+                    case '"':
+                        t = pullQuotedString();
+                        break;
+                    case '$':
+                        t = pullSubstitution();
+                        break;
+                    case ':':
+                        t = Tokens.COLON;
+                        break;
+                    case ',':
+                        t = Tokens.COMMA;
+                        break;
+                    case '=':
+                        t = Tokens.EQUALS;
+                        break;
+                    case '{':
+                        t = Tokens.OPEN_CURLY;
+                        break;
+                    case '}':
+                        t = Tokens.CLOSE_CURLY;
+                        break;
+                    case '[':
+                        t = Tokens.OPEN_SQUARE;
+                        break;
+                    case ']':
+                        t = Tokens.CLOSE_SQUARE;
+                        break;
+                    }
 
-                if (t == null) {
-                    if (firstNumberChars.indexOf(c) >= 0) {
-                        t = pullNumber(c);
-                    } else if (notInUnquotedText.indexOf(c) >= 0) {
-                        throw problem(asString(c), "Reserved character '" + asString(c)
-                                + "' is not allowed outside quotes", true /* suggestQuotes */);
-                    } else {
-                        putBack(c);
-                        t = pullUnquotedText();
+                    if (t == null) {
+                        if (firstNumberChars.indexOf(c) >= 0) {
+                            t = pullNumber(c);
+                        } else if (notInUnquotedText.indexOf(c) >= 0) {
+                            throw problem(asString(c), "Reserved character '" + asString(c)
+                                    + "' is not allowed outside quotes", true /* suggestQuotes */);
+                        } else {
+                            putBack(c);
+                            t = pullUnquotedText();
+                        }
                     }
                 }
 
@@ -548,6 +562,7 @@ final class Tokenizer {
             Token whitespace = whitespaceSaver.check(t, origin, lineNumber);
             if (whitespace != null)
                 tokens.add(whitespace);
+
             tokens.add(t);
         }
 
diff --git a/akka-actor/src/main/java/com/typesafe/config/impl/Tokens.java b/akka-actor/src/main/java/com/typesafe/config/impl/Tokens.java
index 9f7bd42e7c..d726d83d53 100644
--- a/akka-actor/src/main/java/com/typesafe/config/impl/Tokens.java
+++ b/akka-actor/src/main/java/com/typesafe/config/impl/Tokens.java
@@ -52,7 +52,7 @@ final class Tokens {
 
         @Override
         public String toString() {
-            return "'\n'@" + lineNumber();
+            return "'\\n'@" + lineNumber();
         }
 
         @Override
@@ -167,6 +167,45 @@ final class Tokens {
         }
     }
 
+    static private class Comment extends Token {
+        final private String text;
+
+        Comment(ConfigOrigin origin, String text) {
+            super(TokenType.COMMENT, origin);
+            this.text = text;
+        }
+
+        String text() {
+            return text;
+        }
+
+        @Override
+        public String toString() {
+            StringBuilder sb = new StringBuilder();
+            sb.append("'#");
+            sb.append(text);
+            sb.append("' (COMMENT)");
+            return sb.toString();
+        }
+
+        @Override
+        protected boolean canEqual(Object other) {
+            return other instanceof Comment;
+        }
+
+        @Override
+        public boolean equals(Object other) {
+            return super.equals(other) && ((Comment) other).text.equals(text);
+        }
+
+        @Override
+        public int hashCode() {
+            int h = 41 * (41 + super.hashCode());
+            h = 41 * (h + text.hashCode());
+            return h;
+        }
+    }
+
     // This is not a Value, because it requires special processing
     static private class Substitution extends Token {
         final private boolean optional;
@@ -262,6 +301,18 @@ final class Tokens {
         }
     }
 
+    static boolean isComment(Token token) {
+        return token instanceof Comment;
+    }
+
+    static String getCommentText(Token token) {
+        if (token instanceof Comment) {
+            return ((Comment) token).text();
+        } else {
+            throw new ConfigException.BugOrBroken("tried to get comment text from " + token);
+        }
+    }
+
     static boolean isUnquotedText(Token token) {
         return token instanceof UnquotedText;
     }
@@ -316,6 +367,10 @@ final class Tokens {
         return new Problem(origin, what, message, suggestQuotes, cause);
     }
 
+    static Token newComment(ConfigOrigin origin, String text) {
+        return new Comment(origin, text);
+    }
+
     static Token newUnquotedText(ConfigOrigin origin, String s) {
         return new UnquotedText(origin, s);
     }
diff --git a/akka-actor/src/main/resources/reference.conf b/akka-actor/src/main/resources/reference.conf
index f31e61bcbe..f4fe972735 100644
--- a/akka-actor/src/main/resources/reference.conf
+++ b/akka-actor/src/main/resources/reference.conf
@@ -3,120 +3,180 @@
 ##############################
 
 # This the reference config file has all the default settings.
-# Make your edits/overrides in your akka.conf.
+# Make your edits/overrides in your application.conf.
 
 akka {
-  version = "2.0-SNAPSHOT" # Akka version, checked against the runtime version of Akka.
-  
-  home = ""                # Home directory of Akka, modules in the deploy directory will be loaded
+  # Akka version, checked against the runtime version of Akka.
+  version = "2.0-SNAPSHOT"
 
-  enabled-modules = []     # Comma separated list of the enabled modules. Options: ["cluster", "camel", "http"]
+  # Home directory of Akka, modules in the deploy directory will be loaded
+  home = ""
 
-  event-handlers = ["akka.event.Logging$DefaultLogger"] # Event handlers to register at boot time (Logging$DefaultLogger logs to STDOUT)
-  loglevel        = "INFO"                              # Options: ERROR, WARNING, INFO, DEBUG
-                                                        # this level is used by the configured loggers (see "event-handlers") as soon
-                                                        # as they have been started; before that, see "stdout-loglevel"
-  stdout-loglevel = "WARNING"                           # Loglevel for the very basic logger activated during AkkaApplication startup
-                                                        # FIXME: Is there any sensible reason why we have 2 different log levels?
+  # Event handlers to register at boot time (Logging$DefaultLogger logs to STDOUT)
+  event-handlers = ["akka.event.Logging$DefaultLogger"]
 
-  logConfigOnStart = off                                # Log the complete configuration at INFO level when the actor system is started. 
-                                                        # This is useful when you are uncertain of what configuration is used.
+  # Log level used by the configured loggers (see "event-handlers") as soon
+  # as they have been started; before that, see "stdout-loglevel"
+  # Options: ERROR, WARNING, INFO, DEBUG
+  loglevel = "INFO"
 
-  extensions = []          # List FQCN of extensions which shall be loaded at actor system startup. 
-                           # FIXME: clarify "extensions" here, "Akka Extensions ()" 
+  # Log level for the very basic logger activated during AkkaApplication startup
+  # Options: ERROR, WARNING, INFO, DEBUG
+  stdout-loglevel = "WARNING"
 
-  # These boot classes are loaded (and created) automatically when the Akka Microkernel boots up
-  #     Can be used to bootstrap your application(s)
-  #     Should be the FQN (Fully Qualified Name) of the boot class which needs to have a default constructor
-  # boot = ["sample.camel.Boot",
-  #         "sample.rest.java.Boot",
-  #         "sample.rest.scala.Boot",
-  #         "sample.security.Boot"]
-  boot = []
+  # Log the complete configuration at INFO level when the actor system is started.
+  # This is useful when you are uncertain of what configuration is used.
+  logConfigOnStart = off
+
+  # List FQCN of extensions which shall be loaded at actor system startup.
+  # FIXME: clarify "extensions" here, "Akka Extensions ()"
+  extensions = []
 
   actor {
+
     provider = "akka.actor.LocalActorRefProvider"
-    creation-timeout = 20s           # Timeout for ActorSystem.actorOf
-    timeout = 5s                     # Default timeout for Future based invocations
-                                     #    - Actor:        ask && ?
-                                     #    - UntypedActor: ask
-                                     #    - TypedActor:   methods with non-void return type
-    serialize-messages = off         # Does a deep clone of (non-primitive) messages to ensure immutability
-    dispatcher-shutdown-timeout = 1s # How long dispatchers by default will wait for new actors until they shut down
+
+    # Timeout for ActorSystem.actorOf
+    creation-timeout = 20s
+
+    # frequency with which stopping actors are prodded in case they had to be removed from their parents
+    reaper-interval = 5s
+
+    # Default timeout for Future based invocations
+    #    - Actor:        ask && ?
+    #    - UntypedActor: ask
+    #    - TypedActor:   methods with non-void return type
+    timeout = 5s
+
+    # Does a deep clone of (non-primitive) messages to ensure immutability
+    serialize-messages = off
+
+    # How long dispatchers by default will wait for new actors until they shut down
+    dispatcher-shutdown-timeout = 1s
 
     deployment {
-    
-      default {                # deployment id pattern, e.g. /app/service-ping
 
-        router = "direct"      # routing (load-balance) scheme to use
-                               #     available: "direct", "round-robin", "random", "scatter-gather"
-                               #     or:        fully qualified class name of the router class
-                               #     default is "direct";
-                               # In case of non-direct routing, the actors to be routed to can be specified
-                               # in several ways:
-                               # - nr-of-instances: will create that many children given the actor factory
-                               #   supplied in the source code (overridable using create-as below)
-                               # - target.paths: will look the paths up using actorFor and route to 
-                               #   them, i.e. will not create children
+      # deployment id pattern, e.g. /user/service-ping
+      default {
 
-        nr-of-instances = 1    # number of children to create in case of a non-direct router; this setting
-                               # is ignored if target.paths is given
 
-        create-as {            # FIXME document 'create-as'
-          class = ""           # fully qualified class name of recipe implementation
+        # routing (load-balance) scheme to use
+        #     available: "direct", "round-robin", "random", "scatter-gather"
+        #     or:        fully qualified class name of the router class
+        #     default is "direct";
+        # In case of non-direct routing, the actors to be routed to can be specified
+        # in several ways:
+        # - nr-of-instances: will create that many children given the actor factory
+        #   supplied in the source code (overridable using create-as below)
+        # - target.paths: will look the paths up using actorFor and route to
+        #   them, i.e. will not create children
+        router = "direct"
+
+        # number of children to create in case of a non-direct router; this setting
+        # is ignored if target.paths is given
+        nr-of-instances = 1
+
+        # FIXME document 'create-as', ticket 1511
+        create-as {
+          # fully qualified class name of recipe implementation
+          class = ""
         }
 
         target {
-          paths = []           # Alternatively to giving nr-of-instances you can specify the full paths of 
-                               # those actors which should be routed to. This setting takes precedence over
-                               # nr-of-instances
+          # Alternatively to giving nr-of-instances you can specify the full paths of
+          # those actors which should be routed to. This setting takes precedence over
+          # nr-of-instances
+          paths = []
         }
-        
+
       }
     }
 
     default-dispatcher {
-      type = "Dispatcher"              # Must be one of the following
-                                       # Dispatcher, (BalancingDispatcher, only valid when all actors using it are of the same type),
-                                       # A FQCN to a class inheriting MessageDispatcherConfigurator with a no-arg visible constructor
-      name = "DefaultDispatcher"       # Name used in log messages and thread names.
-      daemonic = off                   # Toggles whether the threads created by this dispatcher should be daemons or not
-      keep-alive-time = 60s            # Keep alive time for threads
-      core-pool-size-min = 8           # minimum number of threads to cap factor-based core number to
-      core-pool-size-factor = 8.0      # No of core threads ... ceil(available processors * factor)
-      core-pool-size-max = 4096        # maximum number of threads to cap factor-based number to
-                                       # Hint: max-pool-size is only used for bounded task queues
-      max-pool-size-min = 8            # minimum number of threads to cap factor-based max number to
-      max-pool-size-factor  = 8.0      # Max no of threads ... ceil(available processors * factor)
-      max-pool-size-max = 4096         # maximum number of threads to cap factor-based max number to
-      task-queue-size = -1             # Specifies the bounded capacity of the task queue (< 1 == unbounded)
-      task-queue-type = "linked"       # Specifies which type of task queue will be used, can be "array" or "linked" (default)
-      allow-core-timeout = on          # Allow core threads to time out
-      throughput = 5                   # Throughput defines the number of messages that are processed in a batch before the
-                                       # thread is returned to the pool. Set to 1 for as fair as possible.
-      throughput-deadline-time =  0ms  # Throughput deadline for Dispatcher, set to 0 or negative for no deadline
-      mailbox-capacity = -1            # If negative (or zero) then an unbounded mailbox is used (default)
-                                       # If positive then a bounded mailbox is used and the capacity is set using the property
-                                       # NOTE: setting a mailbox to 'blocking' can be a bit dangerous, could lead to deadlock, use with care
-                                       # The following are only used for Dispatcher and only if mailbox-capacity > 0
-      mailbox-push-timeout-time = 10s  # Specifies the timeout to add a new message to a mailbox that is full - negative number means infinite timeout
+      # Must be one of the following
+      # Dispatcher, (BalancingDispatcher, only valid when all actors using it are of the same type),
+      # A FQCN to a class inheriting MessageDispatcherConfigurator with a no-arg visible constructor
+      type = "Dispatcher"
+
+      # Name used in log messages and thread names.
+      name = "DefaultDispatcher"
+
+      # Toggles whether the threads created by this dispatcher should be daemons or not
+      daemonic = off
+
+      # Keep alive time for threads
+      keep-alive-time = 60s
+
+      # minimum number of threads to cap factor-based core number to
+      core-pool-size-min = 8
+
+      # No of core threads ... ceil(available processors * factor)
+      core-pool-size-factor = 8.0
+
+      # maximum number of threads to cap factor-based number to
+      core-pool-size-max = 4096
+
+      # Hint: max-pool-size is only used for bounded task queues
+      # minimum number of threads to cap factor-based max number to
+      max-pool-size-min = 8
+
+      # Max no of threads ... ceil(available processors * factor)
+      max-pool-size-factor  = 8.0
+
+      # maximum number of threads to cap factor-based max number to
+      max-pool-size-max = 4096
+
+      # Specifies the bounded capacity of the task queue (< 1 == unbounded)
+      task-queue-size = -1
+
+      # Specifies which type of task queue will be used, can be "array" or "linked" (default)
+      task-queue-type = "linked"
+
+      # Allow core threads to time out
+      allow-core-timeout = on
+
+      # Throughput defines the number of messages that are processed in a batch before the
+      # thread is returned to the pool. Set to 1 for as fair as possible.
+      throughput = 5
+
+      # Throughput deadline for Dispatcher, set to 0 or negative for no deadline
+      throughput-deadline-time = 0ms
+
+      # If negative (or zero) then an unbounded mailbox is used (default)
+      # If positive then a bounded mailbox is used and the capacity is set using the property
+      # NOTE: setting a mailbox to 'blocking' can be a bit dangerous, could lead to deadlock, use with care
+      # The following are only used for Dispatcher and only if mailbox-capacity > 0
+      mailbox-capacity = -1
+
+      # Specifies the timeout to add a new message to a mailbox that is full -
+      # negative number means infinite timeout
+      mailbox-push-timeout-time = 10s
     }
 
     debug {
-      receive = off        # enable function of Actor.loggable(), which is to log any received message at DEBUG level
-      autoreceive = off    # enable DEBUG logging of all AutoReceiveMessages (Kill, PoisonPill and the like)
-      lifecycle = off      # enable DEBUG logging of actor lifecycle changes
-      fsm = off            # enable DEBUG logging of all LoggingFSMs for events, transitions and timers
-      event-stream = off   # enable DEBUG logging of subscription changes on the eventStream
+      # enable function of Actor.loggable(), which is to log any received message at DEBUG level
+      receive = off
+
+      # enable DEBUG logging of all AutoReceiveMessages (Kill, PoisonPill and the like)
+      autoreceive = off
+
+      # enable DEBUG logging of actor lifecycle changes
+      lifecycle = off
+
+      # enable DEBUG logging of all LoggingFSMs for events, transitions and timers
+      fsm = off
+
+      # enable DEBUG logging of subscription changes on the eventStream
+      event-stream = off
     }
-    
+
     # Entries for pluggable serializers and their bindings. If a binding for a specific class is not found,
     # then the default serializer (Java serialization) is used.
-    #
     serializers {
       # java = "akka.serialization.JavaSerializer"
       # proto = "akka.testing.ProtobufSerializer"
       # sjson = "akka.testing.SJSONSerializer"
+
       default = "akka.serialization.JavaSerializer"
     }
 
@@ -137,7 +197,6 @@ akka {
   #
   scheduler {
     # The HashedWheelTimer (HWT) implementation from Netty is used as the default scheduler in the system.
-    #
     # HWT does not execute the scheduled tasks on exact time.
     # It will, on every tick, check if there are any tasks behind the schedule and execute them.
     # You can increase or decrease the accuracy of the execution timing by specifying smaller or larger tick duration.
@@ -146,5 +205,5 @@ akka {
     tickDuration = 100ms
     ticksPerWheel = 512
   }
-  
+
 }
diff --git a/akka-actor/src/main/scala/akka/actor/Actor.scala b/akka-actor/src/main/scala/akka/actor/Actor.scala
index ffb941408a..ad95d28238 100644
--- a/akka-actor/src/main/scala/akka/actor/Actor.scala
+++ b/akka-actor/src/main/scala/akka/actor/Actor.scala
@@ -156,15 +156,44 @@ object Actor {
 /**
  * Actor base trait that should be extended by or mixed to create an Actor with the semantics of the 'Actor Model':
  * http://en.wikipedia.org/wiki/Actor_model
- * 

- * An actor has a well-defined (non-cyclic) life-cycle. - *

- * => RUNNING (created and started actor) - can receive messages
- * => SHUTDOWN (when 'stop' or 'exit' is invoked) - can't do anything
- * 
* - *

- * The Actor's own ActorRef is available in the 'self' member variable. + * An actor has a well-defined (non-cyclic) life-cycle. + * - ''RUNNING'' (created and started actor) - can receive messages + * - ''SHUTDOWN'' (when 'stop' or 'exit' is invoked) - can't do anything + * + * The Actor's own [[akka.actor.ActorRef]] is available as `self`, the current + * message’s sender as `sender` and the [[akka.actor.ActorContext]] as + * `context`. The only abstract method is `receive` which shall return the + * initial behavior of the actor as a partial function (behavior can be changed + * using `context.become` and `context.unbecome`). + * + * {{{ + * class ExampleActor extends Actor { + * def receive = { + * // directly calculated reply + * case Request(r) => sender ! calculate(r) + * + * // just to demonstrate how to stop yourself + * case Shutdown => context.stop(self) + * + * // error kernel with child replying directly to “customer” + * case Dangerous(r) => context.actorOf(Props[ReplyToOriginWorker]).tell(PerformWork(r), sender) + * + * // error kernel with reply going through us + * case OtherJob(r) => context.actorOf(Props[ReplyToMeWorker]) ! JobRequest(r, sender) + * case JobReply(result, orig_s) => orig_s ! result + * } + * } + * }}} + * + * The last line demonstrates the essence of the error kernel design: spawn + * one-off actors which terminate after doing their job, pass on `sender` to + * allow direct reply if that is what makes sense, or round-trip the sender + * as shown with the fictitious JobRequest/JobReply message pair. + * + * If you don’t like writing `context` you can always `import context._` to get + * direct access to `actorOf`, `stop` etc. This is not default in order to keep + * the name-space clean. */ trait Actor { @@ -218,25 +247,8 @@ trait Actor { final def sender: ActorRef = context.sender /** - * User overridable callback/setting. - *

- * Partial function implementing the actor logic. - * To be implemented by concrete actor class. - *

- * Example code: - *

-   *   def receive = {
-   *     case Ping =>
-   *       println("got a 'Ping' message")
-   *       sender ! "pong"
-   *
-   *     case OneWay =>
-   *       println("got a 'OneWay' message")
-   *
-   *     case unknown =>
-   *       println("unknown message: " + unknown)
-   * }
-   * 
+ * This defines the initial actor behavior, it must return a partial function + * with the actor logic. */ protected def receive: Receive @@ -258,19 +270,20 @@ trait Actor { def postStop() {} /** - * User overridable callback. + * User overridable callback: '''By default it disposes of all children and then calls `postStop()`.''' *

* Is called on a crashed Actor right BEFORE it is restarted to allow clean * up of resources before Actor is terminated. - * By default it calls postStop() */ - def preRestart(reason: Throwable, message: Option[Any]) { postStop() } + def preRestart(reason: Throwable, message: Option[Any]) { + context.children foreach (context.stop(_)) + postStop() + } /** - * User overridable callback. + * User overridable callback: By default it calls `preStart()`. *

* Is called right AFTER restart on the newly created Actor to allow reinitialization after an Actor crash. - * By default it calls preStart() */ def postRestart(reason: Throwable) { preStart() } @@ -278,7 +291,9 @@ trait Actor { * User overridable callback. *

* Is called when a message isn't handled by the current behavior of the actor - * by default it does: EventHandler.warning(self, message) + * by default it fails with either a [[akka.actor.DeathPactException]] (in + * case of an unhandled [[akka.actor.Terminated]] message) or a + * [[akka.actor.UnhandledMessageException]]. */ def unhandled(message: Any) { message match { @@ -292,7 +307,8 @@ trait Actor { // ========================================= private[akka] final def apply(msg: Any) = { - val behaviorStack = context.hotswap + // FIXME this should all go into ActorCell + val behaviorStack = context.asInstanceOf[ActorCell].hotswap msg match { case msg if behaviorStack.nonEmpty && behaviorStack.head.isDefinedAt(msg) ⇒ behaviorStack.head.apply(msg) case msg if behaviorStack.isEmpty && processingBehavior.isDefinedAt(msg) ⇒ processingBehavior.apply(msg) diff --git a/akka-actor/src/main/scala/akka/actor/ActorCell.scala b/akka-actor/src/main/scala/akka/actor/ActorCell.scala index c4053081cd..d689be07c1 100644 --- a/akka-actor/src/main/scala/akka/actor/ActorCell.scala +++ b/akka-actor/src/main/scala/akka/actor/ActorCell.scala @@ -67,7 +67,7 @@ trait ActorContext extends ActorRefFactory { def setReceiveTimeout(timeout: Duration): Unit /** - * Resets the current receive timeout. + * Clears the receive timeout, i.e. deactivates this feature. */ def resetReceiveTimeout(): Unit @@ -83,16 +83,6 @@ trait ActorContext extends ActorRefFactory { */ def unbecome(): Unit - /** - * Returns the current message envelope. - */ - def currentMessage: Envelope - - /** - * Returns a stack with the hotswapped behaviors (as Scala PartialFunction). - */ - def hotswap: Stack[PartialFunction[Any, Unit]] - /** * Returns the sender 'ActorRef' of the current message. */ @@ -109,10 +99,6 @@ trait ActorContext extends ActorRefFactory { */ implicit def dispatcher: MessageDispatcher - def handleFailure(child: ActorRef, cause: Throwable): Unit - - def handleChildTerminated(child: ActorRef): Unit - /** * The system that the actor belongs to. * Importing this member will place a implicit MessageDispatcher in scope. @@ -185,7 +171,7 @@ private[akka] class ActorCell( val system: ActorSystemImpl, val self: InternalActorRef, val props: Props, - val parent: InternalActorRef, + @volatile var parent: InternalActorRef, /*no member*/ _receiveTimeout: Option[Duration], var hotswap: Stack[PartialFunction[Any, Unit]]) extends UntypedActorContext { @@ -242,6 +228,16 @@ private[akka] class ActorCell( _actorOf(props, name) } + final def stop(actor: ActorRef): Unit = { + val a = actor.asInstanceOf[InternalActorRef] + if (childrenRefs contains actor.path.name) { + system.locker ! a + childrenRefs -= actor.path.name + handleChildTerminated(actor) + } + a.stop() + } + final var currentMessage: Envelope = null final var actor: Actor = _ @@ -405,7 +401,8 @@ private[akka] class ActorCell( // do not process normal messages while waiting for all children to terminate dispatcher suspend this if (system.settings.DebugLifecycle) system.eventStream.publish(Debug(self.path.toString, "stopping")) - for (child ← c) child.stop() + // do not use stop(child) because that would dissociate the children from us, but we still want to wait for them + for (child ← c) child.asInstanceOf[InternalActorRef].stop() stopping = true } } @@ -550,15 +547,17 @@ private[akka] class ActorCell( } final def handleFailure(child: ActorRef, cause: Throwable): Unit = childrenRefs.get(child.path.name) match { - case Some(stats) if stats.child == child ⇒ if (!props.faultHandler.handleFailure(child, cause, stats, childrenRefs.values)) throw cause + case Some(stats) if stats.child == child ⇒ if (!props.faultHandler.handleFailure(this, child, cause, stats, childrenRefs.values)) throw cause case Some(stats) ⇒ system.eventStream.publish(Warning(self.path.toString, "dropping Failed(" + cause + ") from unknown child " + child + " matching names but not the same, was: " + stats.child)) case None ⇒ system.eventStream.publish(Warning(self.path.toString, "dropping Failed(" + cause + ") from unknown child " + child)) } final def handleChildTerminated(child: ActorRef): Unit = { - childrenRefs -= child.path.name - props.faultHandler.handleChildTerminated(child, children) - if (stopping && childrenRefs.isEmpty) doTerminate() + if (childrenRefs contains child.path.name) { + childrenRefs -= child.path.name + props.faultHandler.handleChildTerminated(this, child, children) + if (stopping && childrenRefs.isEmpty) doTerminate() + } else system.locker ! ChildTerminated(child) } // ➡➡➡ NEVER SEND THE SAME SYSTEM MESSAGE OBJECT TO TWO ACTORS ⬅⬅⬅ diff --git a/akka-actor/src/main/scala/akka/actor/ActorRef.scala b/akka-actor/src/main/scala/akka/actor/ActorRef.scala index 3fc3574e75..1bdb9ae8ce 100644 --- a/akka-actor/src/main/scala/akka/actor/ActorRef.scala +++ b/akka-actor/src/main/scala/akka/actor/ActorRef.scala @@ -15,36 +15,60 @@ import akka.event.DeathWatch import scala.annotation.tailrec import java.util.concurrent.ConcurrentHashMap import akka.event.LoggingAdapter +import java.util.concurrent.atomic.AtomicBoolean /** - * ActorRef is an immutable and serializable handle to an Actor. - *

- * Create an ActorRef for an Actor by using the factory method on the Actor object. - *

- * Here is an example on how to create an actor with a default constructor. - *

- *   import Actor._
+ * Immutable and serializable handle to an actor, which may or may not reside
+ * on the local host or inside the same [[akka.actor.ActorSystem]]. An ActorRef
+ * can be obtained from an [[akka.actor.ActorRefFactory]], an interface which
+ * is implemented by ActorSystem and [[akka.actor.ActorContext]]. This means
+ * actors can be created top-level in the ActorSystem or as children of an
+ * existing actor, but only from within that actor.
  *
- *   val actor = actorOf(Props[MyActor]
- *   actor ! message
- *   actor.stop()
- * 
+ * ActorRefs can be freely shared among actors by message passing. Message + * passing conversely is their only purpose, as demonstrated in the following + * examples: * - * You can also create and start actors like this: - *
- *   val actor = actorOf(Props[MyActor]
- * 
+ * Scala: + * {{{ + * class ExampleActor extends Actor { + * val other = context.actorOf(Props[OtherActor], "childName") // will be destroyed and re-created upon restart by default * - * Here is an example on how to create an actor with a non-default constructor. - *
- *   import Actor._
+ *   def receive {
+ *     case Request1(msg) => other ! refine(msg)     // uses this actor as sender reference, reply goes to us
+ *     case Request2(msg) => other.tell(msg, sender) // forward sender reference, enabling direct reply
+ *     case Request3(msg) => sender ! (other ? msg)  // will reply with a Future for holding other’s reply (implicit timeout from "akka.actor.timeout")
+ *   }
+ * }
+ * }}}
  *
- *   val actor = actorOf(Props(new MyActor(...))
- *   actor ! message
- *   actor.stop()
- * 
+ * Java: + * {{{ + * public class ExampleActor Extends UntypedActor { + * // this child will be destroyed and re-created upon restart by default + * final ActorRef other = getContext().actorOf(new Props(OtherActor.class), "childName"); * - * The natural ordering of ActorRef is defined in terms of its [[akka.actor.ActorPath]]. + * @Override + * public void onReceive(Object o) { + * if (o instanceof Request1) { + * val msg = ((Request1) o).getMsg(); + * other.tell(msg); // uses this actor as sender reference, reply goes to us + * + * } else if (o instanceof Request2) { + * val msg = ((Request2) o).getMsg(); + * other.tell(msg, getSender()); // forward sender reference, enabling direct reply + * + * } else if (o instanceof Request3) { + * val msg = ((Request3) o).getMsg(); + * getSender().tell(other.ask(msg, 5000)); // reply with Future for holding the other’s reply (timeout 5 seconds) + * + * } + * } + * } + * }}} + * + * ActorRef does not have a method for terminating the actor it points to, use + * [[akka.actor.ActorRefFactory]]`.stop(child)` for this purpose. */ abstract class ActorRef extends java.lang.Comparable[ActorRef] with Serializable { scalaRef: InternalActorRef ⇒ @@ -110,11 +134,6 @@ abstract class ActorRef extends java.lang.Comparable[ActorRef] with Serializable */ def forward(message: Any)(implicit context: ActorContext) = tell(message, context.sender) - /** - * Shuts down the actor its dispatcher and message queue. - */ - def stop(): Unit - /** * Is the actor shut down? */ @@ -192,6 +211,7 @@ private[akka] abstract class InternalActorRef extends ActorRef with ScalaActorRe def resume(): Unit def suspend(): Unit def restart(cause: Throwable): Unit + def stop(): Unit def sendSystemMessage(message: SystemMessage): Unit def getParent: InternalActorRef /** @@ -325,7 +345,7 @@ private[akka] class LocalActorRef private[akka] ( a.result case None ⇒ this.!(message)(null) - new DefaultPromise[Any](0)(actorCell.system.dispatcher) + Promise[Any]()(actorCell.system.dispatcher) } } @@ -411,7 +431,7 @@ class DeadLetterActorRef(val eventStream: EventStream) extends MinimalActorRef { private[akka] def init(dispatcher: MessageDispatcher, rootPath: ActorPath) { _path = rootPath / "deadLetters" - brokenPromise = new KeptPromise[Any](Left(new ActorKilledException("In DeadLetterActorRef - promises are always broken.")))(dispatcher) + brokenPromise = Promise.failed(new ActorKilledException("In DeadLetterActorRef - promises are always broken."))(dispatcher) } override def isTerminated(): Boolean = true @@ -470,24 +490,16 @@ class VirtualPathContainer(val path: ActorPath, override val getParent: Internal class AskActorRef( val path: ActorPath, override val getParent: InternalActorRef, - deathWatch: DeathWatch, - timeout: Timeout, - val dispatcher: MessageDispatcher) extends MinimalActorRef { + val dispatcher: MessageDispatcher, + val deathWatch: DeathWatch) extends MinimalActorRef { - final val result = new DefaultPromise[Any](timeout)(dispatcher) + final val running = new AtomicBoolean(true) + final val result = Promise[Any]()(dispatcher) - { - val callback: Future[Any] ⇒ Unit = { _ ⇒ deathWatch.publish(Terminated(AskActorRef.this)); whenDone() } - result onComplete callback - result onTimeout callback - } - - protected def whenDone(): Unit = () - - override def !(message: Any)(implicit sender: ActorRef = null): Unit = message match { - case Status.Success(r) ⇒ result.completeWithResult(r) - case Status.Failure(f) ⇒ result.completeWithException(f) - case other ⇒ result.completeWithResult(other) + override def !(message: Any)(implicit sender: ActorRef = null): Unit = if (running.get) message match { + case Status.Success(r) ⇒ result.success(r) + case Status.Failure(f) ⇒ result.failure(f) + case other ⇒ result.success(other) } override def sendSystemMessage(message: SystemMessage): Unit = message match { @@ -496,11 +508,13 @@ class AskActorRef( } override def ?(message: Any)(implicit timeout: Timeout): Future[Any] = - new KeptPromise[Any](Left(new UnsupportedOperationException("Ask/? is not supported for [%s]".format(getClass.getName))))(dispatcher) + Promise.failed(new UnsupportedOperationException("Ask/? is not supported for %s".format(getClass.getName)))(dispatcher) - override def isTerminated = result.isCompleted || result.isExpired + override def isTerminated = result.isCompleted - override def stop(): Unit = if (!isTerminated) result.completeWithException(new ActorKilledException("Stopped")) + override def stop(): Unit = if (running.getAndSet(false)) { + deathWatch.publish(Terminated(this)) + } @throws(classOf[java.io.ObjectStreamException]) private def writeReplace(): AnyRef = SerializedActorRef(path.toString) diff --git a/akka-actor/src/main/scala/akka/actor/ActorRefProvider.scala b/akka-actor/src/main/scala/akka/actor/ActorRefProvider.scala index 74762f170b..dfc0252b27 100644 --- a/akka-actor/src/main/scala/akka/actor/ActorRefProvider.scala +++ b/akka-actor/src/main/scala/akka/actor/ActorRefProvider.scala @@ -234,6 +234,18 @@ trait ActorRefFactory { * replies in order to resolve the matching set of actors. */ def actorSelection(path: String): ActorSelection = ActorSelection(lookupRoot, path) + + /** + * Stop the actor pointed to by the given [[akka.actor.ActorRef]]; this is + * an asynchronous operation, i.e. involves a message send, but if invoked + * on an [[akka.actor.ActorContext]] if operating on a child of that + * context it will free up the name for immediate reuse. + * + * When invoked on [[akka.actor.ActorSystem]] for a top-level actor, this + * method sends a message to the guardian actor and blocks waiting for a reply, + * see `akka.actor.creation-timeout` in the `reference.conf`. + */ + def stop(actor: ActorRef): Unit } class ActorRefProviderException(message: String) extends AkkaException(message) @@ -248,6 +260,11 @@ private[akka] case class CreateChild(props: Props, name: String) */ private[akka] case class CreateRandomNameChild(props: Props) +/** + * Internal Akka use only, used in implementation of system.stop(child). + */ +private[akka] case class StopChild(child: ActorRef) + /** * Local ActorRef provider. */ @@ -309,7 +326,7 @@ class LocalActorRefProvider( override def isTerminated = stopped.isOn override def !(message: Any)(implicit sender: ActorRef = null): Unit = stopped.ifOff(message match { - case Failed(ex) if sender ne null ⇒ causeOfTermination = Some(ex); sender.stop() + case Failed(ex) if sender ne null ⇒ causeOfTermination = Some(ex); sender.asInstanceOf[InternalActorRef].stop() case _ ⇒ log.error(this + " received unexpected message [" + message + "]") }) @@ -329,11 +346,15 @@ class LocalActorRefProvider( */ private class Guardian extends Actor { def receive = { - case Terminated(_) ⇒ context.self.stop() + case Terminated(_) ⇒ context.stop(self) case CreateChild(child, name) ⇒ sender ! (try context.actorOf(child, name) catch { case e: Exception ⇒ e }) case CreateRandomNameChild(child) ⇒ sender ! (try context.actorOf(child) catch { case e: Exception ⇒ e }) + case StopChild(child) ⇒ context.stop(child); sender ! "ok" case m ⇒ deadLetters ! DeadLetter(m, sender, self) } + + // guardian MUST NOT lose its children during restart + override def preRestart(cause: Throwable, msg: Option[Any]) {} } /* @@ -345,11 +366,15 @@ class LocalActorRefProvider( def receive = { case Terminated(_) ⇒ eventStream.stopDefaultLoggers() - context.self.stop() + context.stop(self) case CreateChild(child, name) ⇒ sender ! (try context.actorOf(child, name) catch { case e: Exception ⇒ e }) case CreateRandomNameChild(child) ⇒ sender ! (try context.actorOf(child) catch { case e: Exception ⇒ e }) + case StopChild(child) ⇒ context.stop(child); sender ! "ok" case m ⇒ deadLetters ! DeadLetter(m, sender, self) } + + // guardian MUST NOT lose its children during restart + override def preRestart(cause: Throwable, msg: Option[Any]) {} } private val guardianFaultHandlingStrategy = { @@ -374,7 +399,7 @@ class LocalActorRefProvider( def dispatcher: MessageDispatcher = system.dispatcher - lazy val terminationFuture: DefaultPromise[Unit] = new DefaultPromise[Unit](Timeout.never)(dispatcher) + lazy val terminationFuture: Promise[Unit] = Promise[Unit]()(dispatcher) @volatile private var extraNames: Map[String, InternalActorRef] = Map() @@ -412,7 +437,7 @@ class LocalActorRefProvider( lazy val tempContainer = new VirtualPathContainer(tempNode, rootGuardian, log) - val deathWatch = new LocalDeathWatch + val deathWatch = new LocalDeathWatch(1024) //TODO make configrable def init(_system: ActorSystemImpl) { system = _system @@ -461,20 +486,20 @@ class LocalActorRefProvider( case t ⇒ val path = tempPath() val name = path.name - val a = new AskActorRef(path, tempContainer, deathWatch, t, dispatcher) { - override def whenDone() { - tempContainer.removeChild(name) - } - } + val a = new AskActorRef(path, tempContainer, dispatcher, deathWatch) tempContainer.addChild(name, a) + val f = dispatcher.prerequisites.scheduler.scheduleOnce(t.duration) { tempContainer.removeChild(name); a.stop() } + a.result onComplete { _ ⇒ + try { a.stop(); f.cancel() } + finally { tempContainer.removeChild(name) } + } + Some(a) } } } -class LocalDeathWatch extends DeathWatch with ActorClassification { - - def mapSize = 1024 +class LocalDeathWatch(val mapSize: Int) extends DeathWatch with ActorClassification { override def publish(event: Event): Unit = { val monitors = dissociate(classify(event)) @@ -508,6 +533,9 @@ class DefaultScheduler(hashedWheelTimer: HashedWheelTimer, log: LoggingAdapter, def schedule(initialDelay: Duration, delay: Duration)(f: ⇒ Unit): Cancellable = new DefaultCancellable(hashedWheelTimer.newTimeout(createContinuousTask(delay, f), initialDelay)) + def schedule(initialDelay: Duration, delay: Duration, runnable: Runnable): Cancellable = + new DefaultCancellable(hashedWheelTimer.newTimeout(createContinuousTask(delay, runnable), initialDelay)) + def scheduleOnce(delay: Duration, runnable: Runnable): Cancellable = new DefaultCancellable(hashedWheelTimer.newTimeout(createSingleTask(runnable), delay)) @@ -565,6 +593,17 @@ class DefaultScheduler(hashedWheelTimer: HashedWheelTimer, log: LoggingAdapter, } } + private def createContinuousTask(delay: Duration, runnable: Runnable): TimerTask = { + new TimerTask { + def run(timeout: org.jboss.netty.akka.util.Timeout) { + dispatcher.dispatchTask(() ⇒ runnable.run()) + try timeout.getTimer.newTimeout(this, delay) catch { + case _: IllegalStateException ⇒ // stop recurring if timer is stopped + } + } + } + } + private def execDirectly(t: HWTimeout): Unit = { try t.getTask.run(t) catch { case e: InterruptedException ⇒ throw e diff --git a/akka-actor/src/main/scala/akka/actor/ActorSystem.scala b/akka-actor/src/main/scala/akka/actor/ActorSystem.scala index af0ec81d7b..ec647ae132 100644 --- a/akka-actor/src/main/scala/akka/actor/ActorSystem.scala +++ b/akka-actor/src/main/scala/akka/actor/ActorSystem.scala @@ -73,6 +73,7 @@ object ActorSystem { val ProviderClass = getString("akka.actor.provider") val CreationTimeout = Timeout(Duration(getMilliseconds("akka.actor.creation-timeout"), MILLISECONDS)) + val ReaperInterval = Duration(getMilliseconds("akka.actor.reaper-interval"), MILLISECONDS) val ActorTimeout = Timeout(Duration(getMilliseconds("akka.actor.timeout"), MILLISECONDS)) val SerializeAllMessages = getBoolean("akka.actor.serialize-messages") @@ -96,9 +97,6 @@ object ActorSystem { case "" ⇒ None case x ⇒ Some(x) } - val BootClasses: Seq[String] = getStringList("akka.boot").asScala - - val EnabledModules: Seq[String] = getStringList("akka.enabled-modules").asScala val SchedulerTickDuration = Duration(getMilliseconds("akka.scheduler.tickDuration"), MILLISECONDS) val SchedulerTicksPerWheel = getInt("akka.scheduler.ticksPerWheel") @@ -300,7 +298,7 @@ abstract class ActorSystem extends ActorRefFactory { * (below which the logging actors reside) and the execute all registered * termination handlers (see [[ActorSystem.registerOnTermination]]). */ - def stop() + def shutdown() /** * Registers the provided extension and creates its payload, if this extension isn't already registered @@ -339,7 +337,7 @@ class ActorSystemImpl(val name: String, applicationConfig: Config) extends Actor private[akka] def systemActorOf(props: Props, name: String): ActorRef = { implicit val timeout = settings.CreationTimeout - (systemGuardian ? CreateChild(props, name)).get match { + Await.result(systemGuardian ? CreateChild(props, name), timeout.duration) match { case ref: ActorRef ⇒ ref case ex: Exception ⇒ throw ex } @@ -347,7 +345,7 @@ class ActorSystemImpl(val name: String, applicationConfig: Config) extends Actor def actorOf(props: Props, name: String): ActorRef = { implicit val timeout = settings.CreationTimeout - (guardian ? CreateChild(props, name)).get match { + Await.result(guardian ? CreateChild(props, name), timeout.duration) match { case ref: ActorRef ⇒ ref case ex: Exception ⇒ throw ex } @@ -355,12 +353,24 @@ class ActorSystemImpl(val name: String, applicationConfig: Config) extends Actor def actorOf(props: Props): ActorRef = { implicit val timeout = settings.CreationTimeout - (guardian ? CreateRandomNameChild(props)).get match { + Await.result(guardian ? CreateRandomNameChild(props), timeout.duration) match { case ref: ActorRef ⇒ ref case ex: Exception ⇒ throw ex } } + def stop(actor: ActorRef): Unit = { + implicit val timeout = settings.CreationTimeout + val path = actor.path + val guard = guardian.path + val sys = systemGuardian.path + path.parent match { + case `guard` ⇒ Await.result(guardian ? StopChild(actor), timeout.duration) + case `sys` ⇒ Await.result(systemGuardian ? StopChild(actor), timeout.duration) + case _ ⇒ actor.asInstanceOf[InternalActorRef].stop() + } + } + import settings._ // this provides basic logging (to stdout) until .start() is called below @@ -423,18 +433,23 @@ class ActorSystemImpl(val name: String, applicationConfig: Config) extends Actor deadLetters.init(dispatcher, provider.rootPath) // this starts the reaper actor and the user-configured logging subscribers, which are also actors registerOnTermination(stopScheduler()) + _locker = new Locker(scheduler, ReaperInterval, lookupRoot.path / "locker", deathWatch) loadExtensions() if (LogConfigOnStart) logConfiguration() this } + @volatile + private var _locker: Locker = _ // initialized in start() + def locker = _locker + def start() = _start def registerOnTermination[T](code: ⇒ T) { terminationFuture onComplete (_ ⇒ code) } def registerOnTermination(code: Runnable) { terminationFuture onComplete (_ ⇒ code.run) } - def stop() { - guardian.stop() + def shutdown() { + stop(guardian) } /** diff --git a/akka-actor/src/main/scala/akka/actor/BootableActorLoaderService.scala b/akka-actor/src/main/scala/akka/actor/BootableActorLoaderService.scala deleted file mode 100644 index 47c2cd86c7..0000000000 --- a/akka-actor/src/main/scala/akka/actor/BootableActorLoaderService.scala +++ /dev/null @@ -1,66 +0,0 @@ -/** - * Copyright (C) 2009-2011 Typesafe Inc. - */ - -package akka.actor - -import java.io.File -import java.net.{ URL, URLClassLoader } -import java.util.jar.JarFile -import akka.util.Bootable - -/** - * Handles all modules in the deploy directory (load and unload) - */ -trait BootableActorLoaderService extends Bootable { - - def system: ActorSystem - - val BOOT_CLASSES = system.settings.BootClasses - lazy val applicationLoader = createApplicationClassLoader() - - protected def createApplicationClassLoader(): Option[ClassLoader] = Some({ - if (system.settings.Home.isDefined) { - val DEPLOY = system.settings.Home.get + "/deploy" - val DEPLOY_DIR = new File(DEPLOY) - if (!DEPLOY_DIR.exists) { - System.exit(-1) - } - val filesToDeploy = DEPLOY_DIR.listFiles.toArray.toList - .asInstanceOf[List[File]].filter(_.getName.endsWith(".jar")) - var dependencyJars: List[URL] = Nil - filesToDeploy.map { file ⇒ - val jarFile = new JarFile(file) - val en = jarFile.entries - while (en.hasMoreElements) { - val name = en.nextElement.getName - if (name.endsWith(".jar")) dependencyJars ::= new File( - String.format("jar:file:%s!/%s", jarFile.getName, name)).toURI.toURL - } - } - val toDeploy = filesToDeploy.map(_.toURI.toURL) - val allJars = toDeploy ::: dependencyJars - - new URLClassLoader(allJars.toArray, Thread.currentThread.getContextClassLoader) - } else Thread.currentThread.getContextClassLoader - }) - - abstract override def onLoad() = { - super.onLoad() - - applicationLoader foreach Thread.currentThread.setContextClassLoader - - for (loader ← applicationLoader; clazz ← BOOT_CLASSES) { - loader.loadClass(clazz).newInstance - } - } - - abstract override def onUnload() = { - super.onUnload() - } -} - -/** - * Java API for the default JAX-RS/Mist Initializer - */ -class DefaultBootableActorLoaderService(val system: ActorSystem) extends BootableActorLoaderService diff --git a/akka-actor/src/main/scala/akka/actor/FSM.scala b/akka-actor/src/main/scala/akka/actor/FSM.scala index ce7e7f8318..1f8f9cba70 100644 --- a/akka-actor/src/main/scala/akka/actor/FSM.scala +++ b/akka-actor/src/main/scala/akka/actor/FSM.scala @@ -511,7 +511,7 @@ trait FSM[S, D] extends ListenerManagement { case _ ⇒ nextState.replies.reverse foreach { r ⇒ sender ! r } terminate(nextState) - self.stop() + context.stop(self) } } diff --git a/akka-actor/src/main/scala/akka/actor/FaultHandling.scala b/akka-actor/src/main/scala/akka/actor/FaultHandling.scala index 87e65002fe..e4e2ee856a 100644 --- a/akka-actor/src/main/scala/akka/actor/FaultHandling.scala +++ b/akka-actor/src/main/scala/akka/actor/FaultHandling.scala @@ -119,12 +119,12 @@ abstract class FaultHandlingStrategy { /** * This method is called after the child has been removed from the set of children. */ - def handleChildTerminated(child: ActorRef, children: Iterable[ActorRef]): Unit + def handleChildTerminated(context: ActorContext, child: ActorRef, children: Iterable[ActorRef]): Unit /** * This method is called to act on the failure of a child: restart if the flag is true, stop otherwise. */ - def processFailure(restart: Boolean, child: ActorRef, cause: Throwable, stats: ChildRestartStats, children: Iterable[ChildRestartStats]): Unit + def processFailure(context: ActorContext, restart: Boolean, child: ActorRef, cause: Throwable, stats: ChildRestartStats, children: Iterable[ChildRestartStats]): Unit def handleSupervisorFailing(supervisor: ActorRef, children: Iterable[ActorRef]): Unit = { if (children.nonEmpty) @@ -139,12 +139,12 @@ abstract class FaultHandlingStrategy { /** * Returns whether it processed the failure or not */ - def handleFailure(child: ActorRef, cause: Throwable, stats: ChildRestartStats, children: Iterable[ChildRestartStats]): Boolean = { + def handleFailure(context: ActorContext, child: ActorRef, cause: Throwable, stats: ChildRestartStats, children: Iterable[ChildRestartStats]): Boolean = { val action = if (decider.isDefinedAt(cause)) decider(cause) else Escalate action match { case Resume ⇒ child.asInstanceOf[InternalActorRef].resume(); true - case Restart ⇒ processFailure(true, child, cause, stats, children); true - case Stop ⇒ processFailure(false, child, cause, stats, children); true + case Restart ⇒ processFailure(context, true, child, cause, stats, children); true + case Stop ⇒ processFailure(context, false, child, cause, stats, children); true case Escalate ⇒ false } } @@ -192,17 +192,17 @@ case class AllForOneStrategy(decider: FaultHandlingStrategy.Decider, */ val retriesWindow = (maxNrOfRetries, withinTimeRange) - def handleChildTerminated(child: ActorRef, children: Iterable[ActorRef]): Unit = { - children foreach (_.stop()) + def handleChildTerminated(context: ActorContext, child: ActorRef, children: Iterable[ActorRef]): Unit = { + children foreach (context.stop(_)) //TODO optimization to drop all children here already? } - def processFailure(restart: Boolean, child: ActorRef, cause: Throwable, stats: ChildRestartStats, children: Iterable[ChildRestartStats]): Unit = { + def processFailure(context: ActorContext, restart: Boolean, child: ActorRef, cause: Throwable, stats: ChildRestartStats, children: Iterable[ChildRestartStats]): Unit = { if (children.nonEmpty) { if (restart && children.forall(_.requestRestartPermission(retriesWindow))) children.foreach(_.child.asInstanceOf[InternalActorRef].restart(cause)) else - children.foreach(_.child.stop()) + for (c ← children) context.stop(c.child) } } } @@ -249,13 +249,13 @@ case class OneForOneStrategy(decider: FaultHandlingStrategy.Decider, */ val retriesWindow = (maxNrOfRetries, withinTimeRange) - def handleChildTerminated(child: ActorRef, children: Iterable[ActorRef]): Unit = {} + def handleChildTerminated(context: ActorContext, child: ActorRef, children: Iterable[ActorRef]): Unit = {} - def processFailure(restart: Boolean, child: ActorRef, cause: Throwable, stats: ChildRestartStats, children: Iterable[ChildRestartStats]): Unit = { + def processFailure(context: ActorContext, restart: Boolean, child: ActorRef, cause: Throwable, stats: ChildRestartStats, children: Iterable[ChildRestartStats]): Unit = { if (restart && stats.requestRestartPermission(retriesWindow)) child.asInstanceOf[InternalActorRef].restart(cause) else - child.stop() //TODO optimization to drop child here already? + context.stop(child) //TODO optimization to drop child here already? } } diff --git a/akka-actor/src/main/scala/akka/actor/IO.scala b/akka-actor/src/main/scala/akka/actor/IO.scala index 1551eef2ec..28bad4f85e 100644 --- a/akka-actor/src/main/scala/akka/actor/IO.scala +++ b/akka-actor/src/main/scala/akka/actor/IO.scala @@ -46,15 +46,15 @@ object IO { override def asReadable = this def read(len: Int)(implicit actor: Actor with IO): ByteString @cps[IOSuspendable[Any]] = shift { cont: (ByteString ⇒ IOSuspendable[Any]) ⇒ - ByteStringLength(cont, this, actor.context.currentMessage, len) + ByteStringLength(cont, this, actor.context.asInstanceOf[ActorCell].currentMessage, len) } def read()(implicit actor: Actor with IO): ByteString @cps[IOSuspendable[Any]] = shift { cont: (ByteString ⇒ IOSuspendable[Any]) ⇒ - ByteStringAny(cont, this, actor.context.currentMessage) + ByteStringAny(cont, this, actor.context.asInstanceOf[ActorCell].currentMessage) } def read(delimiter: ByteString, inclusive: Boolean = false)(implicit actor: Actor with IO): ByteString @cps[IOSuspendable[Any]] = shift { cont: (ByteString ⇒ IOSuspendable[Any]) ⇒ - ByteStringDelimited(cont, this, actor.context.currentMessage, delimiter, inclusive, 0) + ByteStringDelimited(cont, this, actor.context.asInstanceOf[ActorCell].currentMessage, delimiter, inclusive, 0) } } @@ -158,7 +158,7 @@ trait IO { } run() case msg if _next ne Idle ⇒ - _messages enqueue context.currentMessage + _messages enqueue context.asInstanceOf[ActorCell].currentMessage case msg if _receiveIO.isDefinedAt(msg) ⇒ _next = reset { _receiveIO(msg); Idle } run() diff --git a/akka-actor/src/main/scala/akka/actor/Locker.scala b/akka-actor/src/main/scala/akka/actor/Locker.scala new file mode 100644 index 0000000000..8bbcdd15e6 --- /dev/null +++ b/akka-actor/src/main/scala/akka/actor/Locker.scala @@ -0,0 +1,50 @@ +/** + * Copyright (C) 2009-2011 Typesafe Inc. + */ +package akka.actor + +import akka.dispatch._ +import akka.util.Duration +import akka.util.duration._ +import java.util.concurrent.ConcurrentHashMap +import akka.event.DeathWatch + +class Locker(scheduler: Scheduler, period: Duration, val path: ActorPath, val deathWatch: DeathWatch) extends MinimalActorRef { + + class DavyJones extends Runnable { + def run = { + val iter = heap.entrySet.iterator + while (iter.hasNext) { + val soul = iter.next() + deathWatch.subscribe(Locker.this, soul.getKey) // in case Terminated got lost somewhere + soul.getKey match { + case _: LocalActorRef ⇒ // nothing to do, they know what they signed up for + case nonlocal ⇒ nonlocal.stop() // try again in case it was due to a communications failure + } + } + } + } + + private val heap = new ConcurrentHashMap[InternalActorRef, Long] + + scheduler.schedule(period, period, new DavyJones) + + override def sendSystemMessage(msg: SystemMessage): Unit = this.!(msg) + + override def !(msg: Any)(implicit sender: ActorRef = null): Unit = msg match { + case Terminated(soul) ⇒ heap.remove(soul) + case ChildTerminated(soul) ⇒ heap.remove(soul) + case soul: InternalActorRef ⇒ + heap.put(soul, 0l) // wanted to put System.nanoTime and do something intelligent, but forgot what that was + deathWatch.subscribe(this, soul) + // now re-bind the soul so that it does not drown its parent + soul match { + case local: LocalActorRef ⇒ + val cell = local.underlying + cell.parent = this + case _ ⇒ + } + case _ ⇒ // ignore + } + +} diff --git a/akka-actor/src/main/scala/akka/actor/Scheduler.scala b/akka-actor/src/main/scala/akka/actor/Scheduler.scala index 7075ee0a8a..e1d502f5b4 100644 --- a/akka-actor/src/main/scala/akka/actor/Scheduler.scala +++ b/akka-actor/src/main/scala/akka/actor/Scheduler.scala @@ -42,6 +42,15 @@ trait Scheduler { */ def schedule(initialDelay: Duration, frequency: Duration)(f: ⇒ Unit): Cancellable + /** + * Schedules a function to be run repeatedly with an initial delay and a frequency. + * E.g. if you would like the function to be run after 2 seconds and thereafter every 100ms you would set + * delay = Duration(2, TimeUnit.SECONDS) and frequency = Duration(100, TimeUnit.MILLISECONDS) + * + * Java API + */ + def schedule(initialDelay: Duration, frequency: Duration, runnable: Runnable): Cancellable + /** * Schedules a Runnable to be run once with a delay, i.e. a time period that has to pass before the runnable is executed. * diff --git a/akka-actor/src/main/scala/akka/actor/TypedActor.scala b/akka-actor/src/main/scala/akka/actor/TypedActor.scala index e16d027fd8..daf8f075d8 100644 --- a/akka-actor/src/main/scala/akka/actor/TypedActor.scala +++ b/akka-actor/src/main/scala/akka/actor/TypedActor.scala @@ -11,6 +11,7 @@ import java.util.concurrent.atomic.{ AtomicReference ⇒ AtomVar } import akka.serialization.{ Serializer, Serialization } import akka.dispatch._ import akka.serialization.SerializationExtension +import java.util.concurrent.TimeoutException trait TypedActorFactory { @@ -24,7 +25,7 @@ trait TypedActorFactory { */ def stop(proxy: AnyRef): Boolean = getActorRefFor(proxy) match { case null ⇒ false - case ref ⇒ ref.stop; true + case ref ⇒ ref.asInstanceOf[InternalActorRef].stop; true } /** @@ -338,10 +339,8 @@ object TypedActor extends ExtensionId[TypedActorExtension] with ExtensionIdProvi if (m.returnsFuture_?) { val s = sender m(me).asInstanceOf[Future[Any]] onComplete { - _.value.get match { - case Left(f) ⇒ s ! Status.Failure(f) - case Right(r) ⇒ s ! r - } + case Left(f) ⇒ s ! Status.Failure(f) + case Right(r) ⇒ s ! r } } else { sender ! m(me) @@ -418,12 +417,12 @@ object TypedActor extends ExtensionId[TypedActorExtension] with ExtensionIdProvi case m if m.returnsFuture_? ⇒ actor.?(m, timeout) case m if m.returnsJOption_? || m.returnsOption_? ⇒ val f = actor.?(m, timeout) - (try { f.await.value } catch { case _: FutureTimeoutException ⇒ None }) match { + (try { Await.ready(f, timeout.duration).value } catch { case _: TimeoutException ⇒ None }) match { case None | Some(Right(null)) ⇒ if (m.returnsJOption_?) JOption.none[Any] else None case Some(Right(joption: AnyRef)) ⇒ joption case Some(Left(ex)) ⇒ throw ex } - case m ⇒ (actor.?(m, timeout)).get.asInstanceOf[AnyRef] + case m ⇒ Await.result(actor.?(m, timeout), timeout.duration).asInstanceOf[AnyRef] } } } diff --git a/akka-actor/src/main/scala/akka/actor/UntypedActor.scala b/akka-actor/src/main/scala/akka/actor/UntypedActor.scala index 1692396a8f..ccac32f82f 100644 --- a/akka-actor/src/main/scala/akka/actor/UntypedActor.scala +++ b/akka-actor/src/main/scala/akka/actor/UntypedActor.scala @@ -8,46 +8,67 @@ import akka.japi.{ Creator, Procedure } import akka.dispatch.{ MessageDispatcher, Promise } /** + * Actor base trait that should be extended by or mixed to create an Actor with the semantics of the 'Actor Model': + * http://en.wikipedia.org/wiki/Actor_model + * + * This class is the Java cousin to the [[akka.actor.Actor]] Scala interface. * Subclass this abstract class to create a MDB-style untyped actor. - *

- * This class is meant to be used from Java. - *

+ * + * An actor has a well-defined (non-cyclic) life-cycle. + * - ''RUNNING'' (created and started actor) - can receive messages + * - ''SHUTDOWN'' (when 'stop' or 'exit' is invoked) - can't do anything + * + * The Actor's own [[akka.actor.ActorRef]] is available as `getSelf()`, the current + * message’s sender as `getSender()` and the [[akka.actor.UntypedActorContext]] as + * `getContext()`. The only abstract method is `onReceive()` which is invoked for + * each processed message unless dynamically overridden using `getContext().become()`. + * * Here is an example on how to create and use an UntypedActor: - *

+ *
+ * {{{
  *  public class SampleUntypedActor extends UntypedActor {
+ *
+ *    public class Reply {
+ *      final public ActorRef sender;
+ *      final public Result result;
+ *      Reply(ActorRef sender, Result result) {
+ *        this.sender = sender;
+ *        this.result = result;
+ *      }
+ *    }
+ *
  *    public void onReceive(Object message) throws Exception {
  *      if (message instanceof String) {
  *        String msg = (String)message;
  *
- *        if (msg.equals("UseReply")) {
- *          // Reply to original sender of message using the 'reply' method
- *          getContext().getSender().tell(msg + ":" + getSelf().getAddress());
- *
- *        } else if (msg.equals("UseSender") && getSender().isDefined()) {
- *          // Reply to original sender of message using the sender reference
- *          // also passing along my own reference (the self)
- *          getSender().get().tell(msg, getSelf());
+ *        if (msg.equals("UseSender")) {
+ *          // Reply to original sender of message
+ *          getSender().tell(msg + ":" + getSelf());
  *
  *        } else if (msg.equals("SendToSelf")) {
  *          // Send message to the actor itself recursively
- *          getSelf().tell(msg)
+ *          getSelf().tell("SomeOtherMessage");
  *
- *        } else if (msg.equals("ForwardMessage")) {
- *          // Retreive an actor from the ActorRegistry by ID and get an ActorRef back
- *          ActorRef actorRef = Actor.registry.local.actorsFor("some-actor-id").head();
+ *        } else if (msg.equals("ErrorKernelWithDirectReply")) {
+ *          // Send work to one-off child which will reply directly to original sender
+ *          getContext().actorOf(new Props(Worker.class)).tell("DoSomeDangerousWork", getSender());
+ *
+ *        } else if (msg.equals("ErrorKernelWithReplyHere")) {
+ *          // Send work to one-off child and collect the answer, reply handled further down
+ *          getContext().actorOf(new Props(Worker.class)).tell("DoWorkAndReplyToMe");
  *
  *        } else throw new IllegalArgumentException("Unknown message: " + message);
+ *
+ *      } else if (message instanceof Reply) {
+ *
+ *        final Reply reply = (Reply) message;
+ *        // might want to do some processing/book-keeping here
+ *        reply.sender.tell(reply.result);
+ *
  *      } else throw new IllegalArgumentException("Unknown message: " + message);
  *    }
- *
- *    public static void main(String[] args) {
- *      ActorSystem system = ActorSystem.create("Sample");
- *      ActorRef actor = system.actorOf(SampleUntypedActor.class);
- *      actor.tell("SendToSelf");
- *      actor.stop();
- *    }
  *  }
- * 
+ * }}} */ abstract class UntypedActor extends Actor { @@ -65,8 +86,9 @@ abstract class UntypedActor extends Actor { def getSelf(): ActorRef = self /** - * The reference sender Actor of the last received message. - * Is defined if the message was sent from another Actor, else None. + * The reference sender Actor of the currently processed message. This is + * always a legal destination to send to, even if there is no logical recipient + * for the reply, in which case it will be sent to the dead letter mailbox. */ def getSender(): ActorRef = sender @@ -77,7 +99,7 @@ abstract class UntypedActor extends Actor { * Actor are automatically started asynchronously when created. * Empty default implementation. */ - override def preStart() {} + override def preStart(): Unit = super.preStart() /** * User overridable callback. @@ -85,24 +107,22 @@ abstract class UntypedActor extends Actor { * Is called asynchronously after 'actor.stop()' is invoked. * Empty default implementation. */ - override def postStop() {} + override def postStop(): Unit = super.postStop() /** - * User overridable callback. + * User overridable callback: '''By default it disposes of all children and then calls `postStop()`.''' *

* Is called on a crashed Actor right BEFORE it is restarted to allow clean * up of resources before Actor is terminated. - * By default it calls postStop() */ - override def preRestart(reason: Throwable, message: Option[Any]) { postStop() } + override def preRestart(reason: Throwable, message: Option[Any]): Unit = super.preRestart(reason, message) /** - * User overridable callback. + * User overridable callback: By default it calls `preStart()`. *

* Is called right AFTER restart on the newly created Actor to allow reinitialization after an Actor crash. - * By default it calls preStart() */ - override def postRestart(reason: Throwable) { preStart() } + override def postRestart(reason: Throwable): Unit = super.postRestart(reason) /** * User overridable callback. diff --git a/akka-actor/src/main/scala/akka/actor/package.scala b/akka-actor/src/main/scala/akka/actor/package.scala index 569c66f03e..cfe5bc1b0d 100644 --- a/akka-actor/src/main/scala/akka/actor/package.scala +++ b/akka-actor/src/main/scala/akka/actor/package.scala @@ -30,8 +30,10 @@ package object actor { implicit def future2actor[T](f: akka.dispatch.Future[T]) = new { def pipeTo(actor: ActorRef): this.type = { - def send(f: akka.dispatch.Future[T]) { f.value.get.fold(f ⇒ actor ! Status.Failure(f), r ⇒ actor ! r) } - if (f.isCompleted) send(f) else f onComplete send + f onComplete { + case Right(r) ⇒ actor ! r + case Left(f) ⇒ actor ! Status.Failure(f) + } this } } diff --git a/akka-actor/src/main/scala/akka/dispatch/Dispatchers.scala b/akka-actor/src/main/scala/akka/dispatch/Dispatchers.scala index cdcb056372..21f2dbd26f 100644 --- a/akka-actor/src/main/scala/akka/dispatch/Dispatchers.scala +++ b/akka-actor/src/main/scala/akka/dispatch/Dispatchers.scala @@ -97,28 +97,6 @@ class Dispatchers(val settings: ActorSystem.Settings, val prerequisites: Dispatc } } - /** - * Creates an thread based dispatcher serving a single actor through the same single thread. - * Uses the default timeout - *

- * E.g. each actor consumes its own thread. - */ - def newPinnedDispatcher(actor: LocalActorRef) = actor match { - case null ⇒ new PinnedDispatcher(prerequisites, null, "anon", MailboxType, settings.DispatcherDefaultShutdown) - case some ⇒ new PinnedDispatcher(prerequisites, some.underlying, some.path.toString, MailboxType, settings.DispatcherDefaultShutdown) - } - - /** - * Creates an thread based dispatcher serving a single actor through the same single thread. - * If capacity is negative, it's Integer.MAX_VALUE - *

- * E.g. each actor consumes its own thread. - */ - def newPinnedDispatcher(actor: LocalActorRef, mailboxType: MailboxType) = actor match { - case null ⇒ new PinnedDispatcher(prerequisites, null, "anon", mailboxType, settings.DispatcherDefaultShutdown) - case some ⇒ new PinnedDispatcher(prerequisites, some.underlying, some.path.toString, mailboxType, settings.DispatcherDefaultShutdown) - } - /** * Creates an thread based dispatcher serving a single actor through the same single thread. *

diff --git a/akka-actor/src/main/scala/akka/dispatch/Future.scala b/akka-actor/src/main/scala/akka/dispatch/Future.scala index a09f28f6a9..2bbc1fcb15 100644 --- a/akka-actor/src/main/scala/akka/dispatch/Future.scala +++ b/akka-actor/src/main/scala/akka/dispatch/Future.scala @@ -13,7 +13,6 @@ import akka.japi.{ Procedure, Function ⇒ JFunc, Option ⇒ JOption } import scala.util.continuations._ -import java.util.concurrent.{ ConcurrentLinkedQueue, TimeUnit, Callable } import java.util.concurrent.TimeUnit.{ NANOSECONDS, MILLISECONDS } import java.lang.{ Iterable ⇒ JIterable } import java.util.{ LinkedList ⇒ JLinkedList } @@ -22,68 +21,56 @@ import scala.annotation.tailrec import scala.collection.mutable.Stack import akka.util.{ Switch, Duration, BoxedType } import java.util.concurrent.atomic.{ AtomicReferenceFieldUpdater, AtomicInteger, AtomicBoolean } +import java.util.concurrent.{ TimeoutException, ConcurrentLinkedQueue, TimeUnit, Callable } +import akka.dispatch.Await.CanAwait -class FutureTimeoutException(message: String, cause: Throwable = null) extends AkkaException(message, cause) { - def this(message: String) = this(message, null) +object Await { + sealed trait CanAwait + + trait Awaitable[+T] { + /** + * Should throw java.util.concurrent.TimeoutException if times out + */ + def ready(atMost: Duration)(implicit permit: CanAwait): this.type + + /** + * Throws exceptions if cannot produce a T within the specified time + */ + def result(atMost: Duration)(implicit permit: CanAwait): T + } + + private implicit val permit = new CanAwait {} + + def ready[T <: Awaitable[_]](awaitable: T, atMost: Duration): T = awaitable.ready(atMost) + def result[T](awaitable: Awaitable[T], atMost: Duration): T = awaitable.result(atMost) } -class FutureFactory()(implicit dispatcher: MessageDispatcher, timeout: Timeout) { +object Futures { /** * Java API, equivalent to Future.apply */ - def future[T](body: Callable[T]): Future[T] = - Future(body.call, timeout) + def future[T](body: Callable[T], dispatcher: MessageDispatcher): Future[T] = Future(body.call)(dispatcher) /** - * Java API, equivalent to Future.apply + * Java API, equivalent to Promise.apply */ - def future[T](body: Callable[T], timeout: Timeout): Future[T] = - Future(body.call, timeout) - - /** - * Java API, equivalent to Future.apply - */ - def future[T](body: Callable[T], timeout: Long): Future[T] = - Future(body.call, timeout) - - /** - * Java API, equivalent to Future.apply - */ - def future[T](body: Callable[T], dispatcher: MessageDispatcher): Future[T] = - Future(body.call)(dispatcher, timeout) - - /** - * Java API, equivalent to Future.apply - */ - def future[T](body: Callable[T], timeout: Timeout, dispatcher: MessageDispatcher): Future[T] = - Future(body.call)(dispatcher, timeout) - - /** - * Java API, equivalent to Future.apply - */ - def future[T](body: Callable[T], timeout: Long, dispatcher: MessageDispatcher): Future[T] = - Future(body.call)(dispatcher, timeout) + def promise[T](dispatcher: MessageDispatcher): Promise[T] = Promise[T]()(dispatcher) /** * Java API. * Returns a Future that will hold the optional result of the first Future with a result that matches the predicate */ - def find[T <: AnyRef](futures: JIterable[Future[T]], predicate: JFunc[T, java.lang.Boolean], timeout: Timeout): Future[JOption[T]] = { - val pred: T ⇒ Boolean = predicate.apply(_) - Future.find[T]((scala.collection.JavaConversions.iterableAsScalaIterable(futures)), timeout)(pred).map(JOption.fromScalaOption(_))(timeout) + def find[T <: AnyRef](futures: JIterable[Future[T]], predicate: JFunc[T, java.lang.Boolean], dispatcher: MessageDispatcher): Future[JOption[T]] = { + Future.find[T]((scala.collection.JavaConversions.iterableAsScalaIterable(futures)))(predicate.apply(_))(dispatcher).map(JOption.fromScalaOption(_)) } - def find[T <: AnyRef](futures: JIterable[Future[T]], predicate: JFunc[T, java.lang.Boolean]): Future[JOption[T]] = find(futures, predicate, timeout) - /** * Java API. * Returns a Future to the result of the first future in the list that is completed */ - def firstCompletedOf[T <: AnyRef](futures: JIterable[Future[T]], timeout: Timeout): Future[T] = - Future.firstCompletedOf(scala.collection.JavaConversions.iterableAsScalaIterable(futures), timeout) - - def firstCompletedOf[T <: AnyRef](futures: JIterable[Future[T]]): Future[T] = firstCompletedOf(futures, timeout) + def firstCompletedOf[T <: AnyRef](futures: JIterable[Future[T]], dispatcher: MessageDispatcher): Future[T] = + Future.firstCompletedOf(scala.collection.JavaConversions.iterableAsScalaIterable(futures))(dispatcher) /** * Java API @@ -92,31 +79,23 @@ class FutureFactory()(implicit dispatcher: MessageDispatcher, timeout: Timeout) * the result will be the first failure of any of the futures, or any failure in the actual fold, * or the result of the fold. */ - def fold[T <: AnyRef, R <: AnyRef](zero: R, timeout: Timeout, futures: java.lang.Iterable[Future[T]], fun: akka.japi.Function2[R, T, R]): Future[R] = - Future.fold(scala.collection.JavaConversions.iterableAsScalaIterable(futures), timeout)(zero)(fun.apply _) - - def fold[T <: AnyRef, R <: AnyRef](zero: R, timeout: Long, futures: java.lang.Iterable[Future[T]], fun: akka.japi.Function2[R, T, R]): Future[R] = fold(zero, timeout: Timeout, futures, fun) - - def fold[T <: AnyRef, R <: AnyRef](zero: R, futures: java.lang.Iterable[Future[T]], fun: akka.japi.Function2[R, T, R]): Future[R] = fold(zero, timeout, futures, fun) + def fold[T <: AnyRef, R <: AnyRef](zero: R, futures: JIterable[Future[T]], fun: akka.japi.Function2[R, T, R], dispatcher: MessageDispatcher): Future[R] = + Future.fold(scala.collection.JavaConversions.iterableAsScalaIterable(futures))(zero)(fun.apply _)(dispatcher) /** * Java API. * Initiates a fold over the supplied futures where the fold-zero is the result value of the Future that's completed first */ - def reduce[T <: AnyRef, R >: T](futures: java.lang.Iterable[Future[T]], timeout: Timeout, fun: akka.japi.Function2[R, T, T]): Future[R] = - Future.reduce(scala.collection.JavaConversions.iterableAsScalaIterable(futures), timeout)(fun.apply _) - - def reduce[T <: AnyRef, R >: T](futures: java.lang.Iterable[Future[T]], timeout: Long, fun: akka.japi.Function2[R, T, T]): Future[R] = reduce(futures, timeout: Timeout, fun) - - def reduce[T <: AnyRef, R >: T](futures: java.lang.Iterable[Future[T]], fun: akka.japi.Function2[R, T, T]): Future[R] = reduce(futures, timeout, fun) + def reduce[T <: AnyRef, R >: T](futures: JIterable[Future[T]], fun: akka.japi.Function2[R, T, T], dispatcher: MessageDispatcher): Future[R] = + Future.reduce(scala.collection.JavaConversions.iterableAsScalaIterable(futures))(fun.apply _)(dispatcher) /** * Java API. - * Simple version of Future.traverse. Transforms a java.lang.Iterable[Future[A]] into a Future[java.lang.Iterable[A]]. + * Simple version of Future.traverse. Transforms a JIterable[Future[A]] into a Future[JIterable[A]]. * Useful for reducing many Futures into a single Future. */ - def sequence[A](in: JIterable[Future[A]], timeout: Timeout): Future[JIterable[A]] = { - implicit val t = timeout + def sequence[A](in: JIterable[Future[A]], dispatcher: MessageDispatcher): Future[JIterable[A]] = { + implicit val d = dispatcher scala.collection.JavaConversions.iterableAsScalaIterable(in).foldLeft(Future(new JLinkedList[A]()))((fr, fa) ⇒ for (r ← fr; a ← fa) yield { r add a @@ -124,27 +103,19 @@ class FutureFactory()(implicit dispatcher: MessageDispatcher, timeout: Timeout) }) } - def sequence[A](in: JIterable[Future[A]]): Future[JIterable[A]] = sequence(in, timeout) - /** * Java API. - * Transforms a java.lang.Iterable[A] into a Future[java.lang.Iterable[B]] using the provided Function A ⇒ Future[B]. + * Transforms a JIterable[A] into a Future[JIterable[B]] using the provided Function A ⇒ Future[B]. * This is useful for performing a parallel map. For example, to apply a function to all items of a list * in parallel. */ - def traverse[A, B](in: JIterable[A], timeout: Timeout, fn: JFunc[A, Future[B]]): Future[JIterable[B]] = { - implicit val t = timeout + def traverse[A, B](in: JIterable[A], fn: JFunc[A, Future[B]], dispatcher: MessageDispatcher): Future[JIterable[B]] = { + implicit val d = dispatcher scala.collection.JavaConversions.iterableAsScalaIterable(in).foldLeft(Future(new JLinkedList[B]())) { (fr, a) ⇒ val fb = fn(a) - for (r ← fr; b ← fb) yield { - r add b - r - } + for (r ← fr; b ← fb) yield { r add b; r } } } - - def traverse[A, B](in: JIterable[A], fn: JFunc[A, Future[B]]): Future[JIterable[B]] = traverse(in, timeout, fn) - } object Future { @@ -153,8 +124,8 @@ object Future { * This method constructs and returns a Future that will eventually hold the result of the execution of the supplied body * The execution is performed by the specified Dispatcher. */ - def apply[T](body: ⇒ T)(implicit dispatcher: MessageDispatcher, timeout: Timeout): Future[T] = { - val promise = new DefaultPromise[T](timeout) + def apply[T](body: ⇒ T)(implicit dispatcher: MessageDispatcher): Future[T] = { + val promise = Promise[T]() dispatcher dispatchTask { () ⇒ promise complete { try { @@ -168,15 +139,6 @@ object Future { promise } - def apply[T](body: ⇒ T, timeout: Timeout)(implicit dispatcher: MessageDispatcher): Future[T] = - apply(body)(dispatcher, timeout) - - def apply[T](body: ⇒ T, timeout: Duration)(implicit dispatcher: MessageDispatcher): Future[T] = - apply(body)(dispatcher, timeout) - - def apply[T](body: ⇒ T, timeout: Long)(implicit dispatcher: MessageDispatcher): Future[T] = - apply(body)(dispatcher, timeout) - import scala.collection.mutable.Builder import scala.collection.generic.CanBuildFrom @@ -184,50 +146,45 @@ object Future { * Simple version of Futures.traverse. Transforms a Traversable[Future[A]] into a Future[Traversable[A]]. * Useful for reducing many Futures into a single Future. */ - def sequence[A, M[_] <: Traversable[_]](in: M[Future[A]])(implicit cbf: CanBuildFrom[M[Future[A]], A, M[A]], timeout: Timeout, dispatcher: MessageDispatcher): Future[M[A]] = - in.foldLeft(new KeptPromise(Right(cbf(in))): Future[Builder[A, M[A]]])((fr, fa) ⇒ for (r ← fr; a ← fa.asInstanceOf[Future[A]]) yield (r += a)).map(_.result) - - def sequence[A, M[_] <: Traversable[_]](in: M[Future[A]], timeout: Timeout)(implicit cbf: CanBuildFrom[M[Future[A]], A, M[A]], dispatcher: MessageDispatcher): Future[M[A]] = - sequence(in)(cbf, timeout, dispatcher) + def sequence[A, M[_] <: Traversable[_]](in: M[Future[A]])(implicit cbf: CanBuildFrom[M[Future[A]], A, M[A]], dispatcher: MessageDispatcher): Future[M[A]] = + in.foldLeft(Promise.successful(cbf(in)): Future[Builder[A, M[A]]])((fr, fa) ⇒ for (r ← fr; a ← fa.asInstanceOf[Future[A]]) yield (r += a)).map(_.result) /** * Returns a Future to the result of the first future in the list that is completed */ - def firstCompletedOf[T](futures: Iterable[Future[T]])(implicit dispatcher: MessageDispatcher, timeout: Timeout): Future[T] = { - val futureResult = new DefaultPromise[T](timeout) + def firstCompletedOf[T](futures: Traversable[Future[T]])(implicit dispatcher: MessageDispatcher): Future[T] = { + val futureResult = Promise[T]() - val completeFirst: Future[T] ⇒ Unit = _.value.foreach(futureResult complete _) + val completeFirst: Either[Throwable, T] ⇒ Unit = futureResult complete _ futures.foreach(_ onComplete completeFirst) futureResult } - def firstCompletedOf[T](futures: Iterable[Future[T]], timeout: Timeout)(implicit dispatcher: MessageDispatcher): Future[T] = - firstCompletedOf(futures)(dispatcher, timeout) - /** * Returns a Future that will hold the optional result of the first Future with a result that matches the predicate */ - def find[T](futures: Iterable[Future[T]])(predicate: T ⇒ Boolean)(implicit dispatcher: MessageDispatcher, timeout: Timeout): Future[Option[T]] = { - if (futures.isEmpty) new KeptPromise[Option[T]](Right(None)) + def find[T](futures: Traversable[Future[T]])(predicate: T ⇒ Boolean)(implicit dispatcher: MessageDispatcher): Future[Option[T]] = { + if (futures.isEmpty) Promise.successful[Option[T]](None) else { - val result = new DefaultPromise[Option[T]](timeout) + val result = Promise[Option[T]]() val ref = new AtomicInteger(futures.size) - val search: Future[T] ⇒ Unit = f ⇒ try { - f.result.filter(predicate).foreach(r ⇒ result completeWithResult Some(r)) + val search: Either[Throwable, T] ⇒ Unit = v ⇒ try { + v match { + case Right(r) ⇒ if (predicate(r)) result success Some(r) + case _ ⇒ + } } finally { if (ref.decrementAndGet == 0) - result completeWithResult None + result success None } + futures.foreach(_ onComplete search) result } } - def find[T](futures: Iterable[Future[T]], timeout: Timeout)(predicate: T ⇒ Boolean)(implicit dispatcher: MessageDispatcher): Future[Option[T]] = - find(futures)(predicate)(dispatcher, timeout) - /** * A non-blocking fold over the specified futures. * The fold is performed on the thread where the last future is completed, @@ -235,83 +192,25 @@ object Future { * or the result of the fold. * Example: *

-   *   val result = Futures.fold(0)(futures)(_ + _).await.result
+   *   val result = Await.result(Futures.fold(0)(futures)(_ + _), 5 seconds)
    * 
*/ - def fold[T, R](futures: Iterable[Future[T]])(zero: R)(foldFun: (R, T) ⇒ R)(implicit dispatcher: MessageDispatcher, timeout: Timeout): Future[R] = { - if (futures.isEmpty) { - new KeptPromise[R](Right(zero)) - } else { - val result = new DefaultPromise[R](timeout) - val results = new ConcurrentLinkedQueue[T]() - val done = new Switch(false) - val allDone = futures.size - - val aggregate: Future[T] ⇒ Unit = f ⇒ if (done.isOff && !result.isCompleted) { //TODO: This is an optimization, is it premature? - f.value.get match { - case Right(value) ⇒ - val added = results add value - if (added && results.size == allDone) { //Only one thread can get here - if (done.switchOn) { - try { - val i = results.iterator - var currentValue = zero - while (i.hasNext) { currentValue = foldFun(currentValue, i.next) } - result completeWithResult currentValue - } catch { - case e: Exception ⇒ - dispatcher.prerequisites.eventStream.publish(Error(e, "Future.fold", e.getMessage)) - result completeWithException e - } finally { - results.clear - } - } - } - case Left(exception) ⇒ - if (done.switchOn) { - result completeWithException exception - results.clear - } - } - } - - futures foreach { _ onComplete aggregate } - result - } + def fold[T, R](futures: Traversable[Future[T]])(zero: R)(foldFun: (R, T) ⇒ R)(implicit dispatcher: MessageDispatcher): Future[R] = { + if (futures.isEmpty) Promise.successful(zero) + else sequence(futures).map(_.foldLeft(zero)(foldFun)) } - def fold[T, R](futures: Iterable[Future[T]], timeout: Timeout)(zero: R)(foldFun: (R, T) ⇒ R)(implicit dispatcher: MessageDispatcher): Future[R] = - fold(futures)(zero)(foldFun)(dispatcher, timeout) - /** * Initiates a fold over the supplied futures where the fold-zero is the result value of the Future that's completed first * Example: *
-   *   val result = Futures.reduce(futures)(_ + _).await.result
+   *   val result = Await.result(Futures.reduce(futures)(_ + _), 5 seconds)
    * 
*/ - def reduce[T, R >: T](futures: Iterable[Future[T]])(op: (R, T) ⇒ T)(implicit dispatcher: MessageDispatcher, timeout: Timeout): Future[R] = { - if (futures.isEmpty) - new KeptPromise[R](Left(new UnsupportedOperationException("empty reduce left"))) - else { - val result = new DefaultPromise[R](timeout) - val seedFound = new AtomicBoolean(false) - val seedFold: Future[T] ⇒ Unit = f ⇒ { - if (seedFound.compareAndSet(false, true)) { //Only the first completed should trigger the fold - f.value.get match { - case Right(value) ⇒ result.completeWith(fold(futures.filterNot(_ eq f))(value)(op)) - case Left(exception) ⇒ result.completeWithException(exception) - } - } - } - for (f ← futures) f onComplete seedFold //Attach the listener to the Futures - result - } + def reduce[T, R >: T](futures: Traversable[Future[T]])(op: (R, T) ⇒ T)(implicit dispatcher: MessageDispatcher): Future[R] = { + if (futures.isEmpty) Promise[R].failure(new NoSuchElementException("reduce attempted on empty collection")) + else sequence(futures).map(_ reduce op) } - - def reduce[T, R >: T](futures: Iterable[Future[T]], timeout: Timeout)(op: (R, T) ⇒ T)(implicit dispatcher: MessageDispatcher): Future[R] = - reduce(futures)(op)(dispatcher, timeout) - /** * Transforms a Traversable[A] into a Future[Traversable[B]] using the provided Function A ⇒ Future[B]. * This is useful for performing a parallel map. For example, to apply a function to all items of a list @@ -320,15 +219,12 @@ object Future { * val myFutureList = Futures.traverse(myList)(x ⇒ Future(myFunc(x))) *
*/ - def traverse[A, B, M[_] <: Traversable[_]](in: M[A])(fn: A ⇒ Future[B])(implicit cbf: CanBuildFrom[M[A], B, M[B]], timeout: Timeout, dispatcher: MessageDispatcher): Future[M[B]] = - in.foldLeft(new KeptPromise(Right(cbf(in))): Future[Builder[B, M[B]]]) { (fr, a) ⇒ + def traverse[A, B, M[_] <: Traversable[_]](in: M[A])(fn: A ⇒ Future[B])(implicit cbf: CanBuildFrom[M[A], B, M[B]], dispatcher: MessageDispatcher): Future[M[B]] = + in.foldLeft(Promise.successful(cbf(in)): Future[Builder[B, M[B]]]) { (fr, a) ⇒ val fb = fn(a.asInstanceOf[A]) for (r ← fr; b ← fb) yield (r += b) }.map(_.result) - def traverse[A, B, M[_] <: Traversable[_]](in: M[A], timeout: Timeout)(fn: A ⇒ Future[B])(implicit cbf: CanBuildFrom[M[A], B, M[B]], dispatcher: MessageDispatcher): Future[M[B]] = - traverse(in)(fn)(cbf, timeout, dispatcher) - /** * Captures a block that will be transformed into 'Continuation Passing Style' using Scala's Delimited * Continuations plugin. @@ -345,18 +241,16 @@ object Future { * * The Delimited Continuations compiler plugin must be enabled in order to use this method. */ - def flow[A](body: ⇒ A @cps[Future[Any]])(implicit dispatcher: MessageDispatcher, timeout: Timeout): Future[A] = { - val future = Promise[A](timeout) + def flow[A](body: ⇒ A @cps[Future[Any]])(implicit dispatcher: MessageDispatcher): Future[A] = { + val future = Promise[A] dispatchTask({ () ⇒ - (reify(body) foreachFull (future completeWithResult, future completeWithException): Future[Any]) onException { - case e: Exception ⇒ future completeWithException e + (reify(body) foreachFull (future success, future failure): Future[Any]) onFailure { + case e: Exception ⇒ future failure e } }, true) future } - // TODO make variant of flow(timeout)(body) which does NOT break type inference - /** * Assures that any Future tasks initiated in the current thread will be * executed asynchronously, including any tasks currently queued to be @@ -364,7 +258,7 @@ object Future { * block, causing delays in executing the remaining tasks which in some * cases may cause a deadlock. * - * Note: Calling 'Future.await' will automatically trigger this method. + * Note: Calling 'Await.result(future)' or 'Await.ready(future)' will automatically trigger this method. * * For example, in the following block of code the call to 'latch.open' * might not be executed until after the call to 'latch.await', causing @@ -381,7 +275,7 @@ object Future { * } * */ - def blocking()(implicit dispatcher: MessageDispatcher): Unit = + def blocking(implicit dispatcher: MessageDispatcher): Unit = _taskStack.get match { case Some(taskStack) if taskStack.nonEmpty ⇒ val tasks = taskStack.elems @@ -419,7 +313,7 @@ object Future { } } -sealed trait Future[+T] extends japi.Future[T] { +sealed trait Future[+T] extends japi.Future[T] with Await.Awaitable[T] { implicit def dispatcher: MessageDispatcher @@ -429,92 +323,13 @@ sealed trait Future[+T] extends japi.Future[T] { * Returns the result of this Future without blocking, by suspending execution and storing it as a * continuation until the result is available. */ - def apply()(implicit timeout: Timeout): T @cps[Future[Any]] = shift(this flatMap (_: T ⇒ Future[Any])) - - /** - * Blocks awaiting completion of this Future, then returns the resulting value, - * or throws the completed exception - * - * Scala & Java API - * - * throws FutureTimeoutException if this Future times out when waiting for completion - */ - def get: T = this.await.resultOrException.get - - /** - * Blocks the current thread until the Future has been completed or the - * timeout has expired. In the case of the timeout expiring a - * FutureTimeoutException will be thrown. - */ - def await: Future[T] - - /** - * Blocks the current thread until the Future has been completed or the - * timeout has expired, additionally bounding the waiting period according to - * the atMost parameter. The timeout will be the lesser value of - * 'atMost' and the timeout supplied at the constructuion of this Future. In - * the case of the timeout expiring a FutureTimeoutException will be thrown. - * Other callers of this method are not affected by the additional bound - * imposed by atMost. - */ - def await(atMost: Duration): Future[T] - - /** - * Await completion of this Future and return its value if it conforms to A's - * erased type. Will throw a ClassCastException if the value does not - * conform, or any exception the Future was completed with. Will return None - * in case of a timeout. - */ - def as[A](implicit m: Manifest[A]): Option[A] = { - try await catch { case _: FutureTimeoutException ⇒ } - value match { - case None ⇒ None - case Some(Left(ex)) ⇒ throw ex - case Some(Right(v)) ⇒ - try { Some(BoxedType(m.erasure).cast(v).asInstanceOf[A]) } catch { - case c: ClassCastException ⇒ - if (v.asInstanceOf[AnyRef] eq null) throw new ClassCastException("null cannot be cast to " + m.erasure) - else throw new ClassCastException("'" + v + "' of class " + v.asInstanceOf[AnyRef].getClass + " cannot be cast to " + m.erasure) - } - } - } - - /** - * Await completion of this Future and return its value if it conforms to A's - * erased type, None otherwise. Will throw any exception the Future was - * completed with. Will return None in case of a timeout. - */ - def asSilently[A](implicit m: Manifest[A]): Option[A] = { - try await catch { case _: FutureTimeoutException ⇒ } - value match { - case None ⇒ None - case Some(Left(ex)) ⇒ throw ex - case Some(Right(v)) ⇒ - try Some(BoxedType(m.erasure).cast(v).asInstanceOf[A]) - catch { case _: ClassCastException ⇒ None } - } - } + def apply(): T @cps[Future[Any]] = shift(this flatMap (_: T ⇒ Future[Any])) /** * Tests whether this Future has been completed. */ final def isCompleted: Boolean = value.isDefined - /** - * Tests whether this Future's timeout has expired. - * - * Note that an expired Future may still contain a value, or it may be - * completed with a value. - */ - def isExpired: Boolean - - def timeout: Timeout - - /** - * This Future's timeout in nanoseconds. - */ - def timeoutInNanos = if (timeout.duration.isFinite) timeout.duration.toNanos else Long.MaxValue - /** * The contained value of this Future. Before this Future is completed * the value will be None. After completion the value will be Some(Right(t)) @@ -523,68 +338,64 @@ sealed trait Future[+T] extends japi.Future[T] { */ def value: Option[Either[Throwable, T]] - /** - * Returns the successful result of this Future if it exists. - */ - final def result: Option[T] = value match { - case Some(Right(r)) ⇒ Some(r) - case _ ⇒ None - } - - /** - * Returns the contained exception of this Future if it exists. - */ - final def exception: Option[Throwable] = value match { - case Some(Left(e)) ⇒ Some(e) - case _ ⇒ None - } - /** * When this Future is completed, apply the provided function to the * Future. If the Future has already been completed, this will apply - * immediately. Will not be called in case of a timeout, which also holds if - * corresponding Promise is attempted to complete after expiry. Multiple + * immediately. Multiple * callbacks may be registered; there is no guarantee that they will be * executed in a particular order. */ - def onComplete(func: Future[T] ⇒ Unit): this.type + def onComplete(func: Either[Throwable, T] ⇒ Unit): this.type /** * When the future is completed with a valid result, apply the provided * PartialFunction to the result. See `onComplete` for more details. *
-   *   future onResult {
+   *   future onSuccess {
    *     case Foo ⇒ target ! "foo"
    *     case Bar ⇒ target ! "bar"
    *   }
    * 
*/ - final def onResult(pf: PartialFunction[T, Unit]): this.type = onComplete { - _.value match { - case Some(Right(r)) if pf isDefinedAt r ⇒ pf(r) - case _ ⇒ - } + final def onSuccess[U](pf: PartialFunction[T, U]): this.type = onComplete { + case Right(r) if pf isDefinedAt r ⇒ pf(r) + case _ ⇒ } /** * When the future is completed with an exception, apply the provided * PartialFunction to the exception. See `onComplete` for more details. *
-   *   future onException {
+   *   future onFailure {
    *     case NumberFormatException ⇒ target ! "wrong format"
    *   }
    * 
*/ - final def onException(pf: PartialFunction[Throwable, Unit]): this.type = onComplete { - _.value match { - case Some(Left(ex)) if pf isDefinedAt ex ⇒ pf(ex) - case _ ⇒ - } + final def onFailure[U](pf: PartialFunction[Throwable, U]): this.type = onComplete { + case Left(ex) if pf isDefinedAt ex ⇒ pf(ex) + case _ ⇒ } - def onTimeout(func: Future[T] ⇒ Unit): this.type + /** + * Returns a failure projection of this Future + * If `this` becomes completed with a failure, that failure will be the success of the returned Future + * If `this` becomes completed with a result, then the returned future will fail with a NoSuchElementException + */ + final def failed: Future[Throwable] = { + val p = Promise[Throwable]() + this.onComplete { + case Left(t) ⇒ p success t + case Right(r) ⇒ p failure new NoSuchElementException("Future.failed not completed with a throwable. Instead completed with: " + r) + } + p + } - def orElse[A >: T](fallback: ⇒ A): Future[A] + /** + * Creates a Future that will be the result of the first completed Future of this and the Future that was passed into this. + * This is semantically the same as: Future.firstCompletedOf(Seq(this, that)) + */ + //FIXME implement as The result of any of the Futures, or if oth failed, the first failure + def orElse[A >: T](that: Future[A]): Future[A] = Future.firstCompletedOf(List(this, that)) //TODO Optimize /** * Creates a new Future that will handle any matching Throwable that this @@ -597,13 +408,11 @@ sealed trait Future[+T] extends japi.Future[T] { * Future(6 / 2) recover { case e: ArithmeticException ⇒ 0 } // result: 3 * */ - final def recover[A >: T](pf: PartialFunction[Throwable, A])(implicit timeout: Timeout): Future[A] = { - val future = new DefaultPromise[A](timeout) + final def recover[A >: T](pf: PartialFunction[Throwable, A]): Future[A] = { + val future = Promise[A]() onComplete { - _.value.get match { - case Left(e) if pf isDefinedAt e ⇒ future.complete(try { Right(pf(e)) } catch { case x: Exception ⇒ Left(x) }) - case otherwise ⇒ future complete otherwise - } + case Left(e) if pf isDefinedAt e ⇒ future.complete(try { Right(pf(e)) } catch { case x: Exception ⇒ Left(x) }) + case otherwise ⇒ future complete otherwise } future } @@ -621,20 +430,18 @@ sealed trait Future[+T] extends japi.Future[T] { * } yield b + "-" + c * */ - final def map[A](f: T ⇒ A)(implicit timeout: Timeout): Future[A] = { - val future = new DefaultPromise[A](timeout) + final def map[A](f: T ⇒ A): Future[A] = { + val future = Promise[A]() onComplete { - _.value.get match { - case l: Left[_, _] ⇒ future complete l.asInstanceOf[Either[Throwable, A]] - case Right(res) ⇒ - future complete (try { - Right(f(res)) - } catch { - case e: Exception ⇒ - dispatcher.prerequisites.eventStream.publish(Error(e, "Future.map", e.getMessage)) - Left(e) - }) - } + case l: Left[_, _] ⇒ future complete l.asInstanceOf[Either[Throwable, A]] + case Right(res) ⇒ + future complete (try { + Right(f(res)) + } catch { + case e: Exception ⇒ + dispatcher.prerequisites.eventStream.publish(Error(e, "Future.map", e.getMessage)) + Left(e) + }) } future } @@ -643,18 +450,16 @@ sealed trait Future[+T] extends japi.Future[T] { * Creates a new Future[A] which is completed with this Future's result if * that conforms to A's erased type or a ClassCastException otherwise. */ - final def mapTo[A](implicit m: Manifest[A], timeout: Timeout = this.timeout): Future[A] = { - val fa = new DefaultPromise[A](timeout) - onComplete { ft ⇒ - fa complete (ft.value.get match { - case l: Left[_, _] ⇒ l.asInstanceOf[Either[Throwable, A]] - case Right(t) ⇒ - try { - Right(BoxedType(m.erasure).cast(t).asInstanceOf[A]) - } catch { - case e: ClassCastException ⇒ Left(e) - } - }) + final def mapTo[A](implicit m: Manifest[A]): Future[A] = { + val fa = Promise[A]() + onComplete { + case l: Left[_, _] ⇒ fa complete l.asInstanceOf[Either[Throwable, A]] + case Right(t) ⇒ + fa complete (try { + Right(BoxedType(m.erasure).cast(t).asInstanceOf[A]) + } catch { + case e: ClassCastException ⇒ Left(e) + }) } fa } @@ -673,137 +478,143 @@ sealed trait Future[+T] extends japi.Future[T] { * } yield b + "-" + c * */ - final def flatMap[A](f: T ⇒ Future[A])(implicit timeout: Timeout): Future[A] = { - val future = new DefaultPromise[A](timeout) + final def flatMap[A](f: T ⇒ Future[A]): Future[A] = { + val p = Promise[A]() onComplete { - _.value.get match { - case l: Left[_, _] ⇒ future complete l.asInstanceOf[Either[Throwable, A]] - case Right(r) ⇒ try { - future.completeWith(f(r)) + case l: Left[_, _] ⇒ p complete l.asInstanceOf[Either[Throwable, A]] + case Right(r) ⇒ + try { + p completeWith f(r) } catch { case e: Exception ⇒ + p complete Left(e) dispatcher.prerequisites.eventStream.publish(Error(e, "Future.flatMap", e.getMessage)) - future complete Left(e) } - } } - future + p } final def foreach(f: T ⇒ Unit): Unit = onComplete { - _.value.get match { - case Right(r) ⇒ f(r) - case _ ⇒ - } + case Right(r) ⇒ f(r) + case _ ⇒ } - final def withFilter(p: T ⇒ Boolean)(implicit timeout: Timeout) = new FutureWithFilter[T](this, p) + final def withFilter(p: T ⇒ Boolean) = new FutureWithFilter[T](this, p) - final class FutureWithFilter[+A](self: Future[A], p: A ⇒ Boolean)(implicit timeout: Timeout) { + final class FutureWithFilter[+A](self: Future[A], p: A ⇒ Boolean) { def foreach(f: A ⇒ Unit): Unit = self filter p foreach f def map[B](f: A ⇒ B): Future[B] = self filter p map f def flatMap[B](f: A ⇒ Future[B]): Future[B] = self filter p flatMap f def withFilter(q: A ⇒ Boolean): FutureWithFilter[A] = new FutureWithFilter[A](self, x ⇒ p(x) && q(x)) } - final def filter(p: T ⇒ Boolean)(implicit timeout: Timeout): Future[T] = { - val future = new DefaultPromise[T](timeout) + final def filter(pred: T ⇒ Boolean): Future[T] = { + val p = Promise[T]() onComplete { - _.value.get match { - case l: Left[_, _] ⇒ future complete l.asInstanceOf[Either[Throwable, T]] - case r @ Right(res) ⇒ future complete (try { - if (p(res)) r else Left(new MatchError(res)) - } catch { - case e: Exception ⇒ - dispatcher.prerequisites.eventStream.publish(Error(e, "Future.filter", e.getMessage)) - Left(e) - }) - } + case l: Left[_, _] ⇒ p complete l.asInstanceOf[Either[Throwable, T]] + case r @ Right(res) ⇒ p complete (try { + if (pred(res)) r else Left(new MatchError(res)) + } catch { + case e: Exception ⇒ + dispatcher.prerequisites.eventStream.publish(Error(e, "Future.filter", e.getMessage)) + Left(e) + }) } - future - } - - /** - * Returns the current result, throws the exception if one has been raised, else returns None - */ - final def resultOrException: Option[T] = value match { - case Some(Left(e)) ⇒ throw e - case Some(Right(r)) ⇒ Some(r) - case _ ⇒ None + p } } object Promise { + /** + * Creates a non-completed Promise + * + * Scala API + */ + def apply[A]()(implicit dispatcher: MessageDispatcher): Promise[A] = new DefaultPromise[A]() /** - * Creates a non-completed, new, Promise with the supplied timeout in milliseconds + * Creates an already completed Promise with the specified exception */ - def apply[A](timeout: Timeout)(implicit dispatcher: MessageDispatcher): Promise[A] = new DefaultPromise[A](timeout) + def failed[T](exception: Throwable)(implicit dispatcher: MessageDispatcher): Promise[T] = new KeptPromise[T](Left(exception)) /** - * Creates a non-completed, new, Promise with the default timeout (akka.actor.timeout in conf) + * Creates an already completed Promise with the specified result */ - def apply[A]()(implicit dispatcher: MessageDispatcher, timeout: Timeout): Promise[A] = apply(timeout) + def successful[T](result: T)(implicit dispatcher: MessageDispatcher): Promise[T] = new KeptPromise[T](Right(result)) } /** * Essentially this is the Promise (or write-side) of a Future (read-side). */ trait Promise[T] extends Future[T] { - /** - * Completes this Future with the specified result, if not already completed. - * @return this - */ - def complete(value: Either[Throwable, T]): this.type /** - * Completes this Future with the specified result, if not already completed. - * @return this + * Returns the Future associated with this Promise */ - final def completeWithResult(result: T): this.type = complete(Right(result)) + def future: Future[T] = this /** - * Completes this Future with the specified exception, if not already completed. - * @return this + * Completes this Promise with the specified result, if not already completed. + * @return whether this call completed the Promise */ - final def completeWithException(exception: Throwable): this.type = complete(Left(exception)) + def tryComplete(value: Either[Throwable, T]): Boolean /** - * Completes this Future with the specified other Future, when that Future is completed, - * unless this Future has already been completed. + * Completes this Promise with the specified result, if not already completed. + * @return this + */ + final def complete(value: Either[Throwable, T]): this.type = { tryComplete(value); this } + + /** + * Completes this Promise with the specified result, if not already completed. + * @return this + */ + final def success(result: T): this.type = complete(Right(result)) + + /** + * Completes this Promise with the specified exception, if not already completed. + * @return this + */ + final def failure(exception: Throwable): this.type = complete(Left(exception)) + + /** + * Completes this Promise with the specified other Future, when that Future is completed, + * unless this Promise has already been completed. * @return this. */ final def completeWith(other: Future[T]): this.type = { - other onComplete { f ⇒ complete(f.value.get) } + other onComplete { complete(_) } this } final def <<(value: T): Future[T] @cps[Future[Any]] = shift { cont: (Future[T] ⇒ Future[Any]) ⇒ cont(complete(Right(value))) } final def <<(other: Future[T]): Future[T] @cps[Future[Any]] = shift { cont: (Future[T] ⇒ Future[Any]) ⇒ - val fr = new DefaultPromise[Any](this.timeout) - this completeWith other onComplete { f ⇒ + val fr = Promise[Any]() + val thisPromise = this + thisPromise completeWith other onComplete { v ⇒ try { - fr completeWith cont(f) + fr completeWith cont(thisPromise) } catch { case e: Exception ⇒ dispatcher.prerequisites.eventStream.publish(Error(e, "Promise.completeWith", e.getMessage)) - fr completeWithException e + fr failure e } } fr } final def <<(stream: PromiseStreamOut[T]): Future[T] @cps[Future[Any]] = shift { cont: (Future[T] ⇒ Future[Any]) ⇒ - val fr = new DefaultPromise[Any](this.timeout) - stream.dequeue(this).onComplete { f ⇒ + val fr = Promise[Any]() + val f = stream.dequeue(this) + f.onComplete { _ ⇒ try { fr completeWith cont(f) } catch { case e: Exception ⇒ dispatcher.prerequisites.eventStream.publish(Error(e, "Promise.completeWith", e.getMessage)) - fr completeWithException e + fr failure e } } fr @@ -819,7 +630,7 @@ private[dispatch] object DefaultPromise { */ sealed trait FState[+T] { def value: Option[Either[Throwable, T]] } - case class Pending[T](listeners: List[Future[T] ⇒ Unit] = Nil) extends FState[T] { + case class Pending[T](listeners: List[Either[Throwable, T] ⇒ Unit] = Nil) extends FState[T] { def value: Option[Either[Throwable, T]] = None } case class Success[T](value: Option[Either[Throwable, T]] = None) extends FState[T] { @@ -828,94 +639,68 @@ private[dispatch] object DefaultPromise { case class Failure[T](value: Option[Either[Throwable, T]] = None) extends FState[T] { def exception: Throwable = value.get.left.get } - case object Expired extends FState[Nothing] { - def value: Option[Either[Throwable, Nothing]] = None - } private val emptyPendingValue = Pending[Nothing](Nil) } /** * The default concrete Future implementation. */ -class DefaultPromise[T](val timeout: Timeout)(implicit val dispatcher: MessageDispatcher) extends AbstractPromise with Promise[T] { +class DefaultPromise[T](implicit val dispatcher: MessageDispatcher) extends AbstractPromise with Promise[T] { self ⇒ - import DefaultPromise.{ FState, Success, Failure, Pending, Expired } + import DefaultPromise.{ FState, Success, Failure, Pending } - def this()(implicit dispatcher: MessageDispatcher, timeout: Timeout) = this(timeout) + protected final def tryAwait(atMost: Duration): Boolean = { + Future.blocking - def this(timeout: Long)(implicit dispatcher: MessageDispatcher) = this(Timeout(timeout)) + @tailrec + def awaitUnsafe(waitTimeNanos: Long): Boolean = { + if (value.isEmpty && waitTimeNanos > 0) { + val ms = NANOSECONDS.toMillis(waitTimeNanos) + val ns = (waitTimeNanos % 1000000l).toInt //As per object.wait spec + val start = System.nanoTime() + try { synchronized { if (value.isEmpty) wait(ms, ns) } } catch { case e: InterruptedException ⇒ } - def this(timeout: Long, timeunit: TimeUnit)(implicit dispatcher: MessageDispatcher) = this(Timeout(timeout, timeunit)) - - private val _startTimeInNanos = currentTimeInNanos - - @tailrec - private def awaitUnsafe(waitTimeNanos: Long): Boolean = { - if (value.isEmpty && waitTimeNanos > 0) { - val ms = NANOSECONDS.toMillis(waitTimeNanos) - val ns = (waitTimeNanos % 1000000l).toInt //As per object.wait spec - val start = currentTimeInNanos - try { synchronized { if (value.isEmpty) wait(ms, ns) } } catch { case e: InterruptedException ⇒ } - - awaitUnsafe(waitTimeNanos - (currentTimeInNanos - start)) - } else { - value.isDefined + awaitUnsafe(waitTimeNanos - (System.nanoTime() - start)) + } else + value.isDefined } + awaitUnsafe(if (atMost.isFinite) atMost.toNanos else Long.MaxValue) } - def await(atMost: Duration): this.type = if (value.isDefined) this else { - Future.blocking() + def ready(atMost: Duration)(implicit permit: CanAwait): this.type = + if (value.isDefined || tryAwait(atMost)) this + else throw new TimeoutException("Futures timed out after [" + atMost.toMillis + "] milliseconds") - val waitNanos = - if (timeout.duration.isFinite && atMost.isFinite) - atMost.toNanos min timeLeft() - else if (atMost.isFinite) - atMost.toNanos - else if (timeout.duration.isFinite) - timeLeft() - else Long.MaxValue //If both are infinite, use Long.MaxValue - - if (awaitUnsafe(waitNanos)) this - else throw new FutureTimeoutException("Futures timed out after [" + NANOSECONDS.toMillis(waitNanos) + "] milliseconds") - } - - def await = await(timeout.duration) - - def isExpired: Boolean = if (timeout.duration.isFinite) timeLeft() <= 0 else false + def result(atMost: Duration)(implicit permit: CanAwait): T = + ready(atMost).value.get match { + case Left(e) ⇒ throw e + case Right(r) ⇒ r + } def value: Option[Either[Throwable, T]] = getState.value @inline - protected final def updateState(oldState: FState[T], newState: FState[T]): Boolean = - AbstractPromise.updater.asInstanceOf[AtomicReferenceFieldUpdater[AbstractPromise, FState[T]]].compareAndSet(this, oldState, newState) + private[this] final def updater = AbstractPromise.updater.asInstanceOf[AtomicReferenceFieldUpdater[AbstractPromise, FState[T]]] @inline - protected final def getState: FState[T] = { + protected final def updateState(oldState: FState[T], newState: FState[T]): Boolean = updater.compareAndSet(this, oldState, newState) - @tailrec - def read(): FState[T] = { - val cur = AbstractPromise.updater.asInstanceOf[AtomicReferenceFieldUpdater[AbstractPromise, FState[T]]].get(this) - if (cur.isInstanceOf[Pending[_]] && isExpired) { - if (updateState(cur, Expired)) Expired else read() - } else cur - } + @inline + protected final def getState: FState[T] = updater.get(this) - read() - } - - def complete(value: Either[Throwable, T]): this.type = { - val callbacks = { + def tryComplete(value: Either[Throwable, T]): Boolean = { + val callbacks: List[Either[Throwable, T] ⇒ Unit] = { try { @tailrec - def tryComplete: List[Future[T] ⇒ Unit] = { + def tryComplete: List[Either[Throwable, T] ⇒ Unit] = { val cur = getState cur match { case Pending(listeners) ⇒ if (updateState(cur, if (value.isLeft) Failure(Some(value)) else Success(Some(value)))) listeners else tryComplete - case _ ⇒ Nil + case _ ⇒ null } } tryComplete @@ -924,120 +709,55 @@ class DefaultPromise[T](val timeout: Timeout)(implicit val dispatcher: MessageDi } } - if (callbacks.nonEmpty) Future.dispatchTask(() ⇒ callbacks foreach notifyCompleted) - - this + callbacks match { + case null ⇒ false + case cs if cs.isEmpty ⇒ true + case cs ⇒ Future.dispatchTask(() ⇒ cs.foreach(f ⇒ notifyCompleted(f, value))); true + } } - def onComplete(func: Future[T] ⇒ Unit): this.type = { + def onComplete(func: Either[Throwable, T] ⇒ Unit): this.type = { @tailrec //Returns whether the future has already been completed or not def tryAddCallback(): Boolean = { val cur = getState cur match { case _: Success[_] | _: Failure[_] ⇒ true - case Expired ⇒ false case p: Pending[_] ⇒ val pt = p.asInstanceOf[Pending[T]] - if (updateState(pt, pt.copy(listeners = func :: pt.listeners))) false - else tryAddCallback() + if (updateState(pt, pt.copy(listeners = func :: pt.listeners))) false else tryAddCallback() } } - if (tryAddCallback()) Future.dispatchTask(() ⇒ notifyCompleted(func)) + if (tryAddCallback()) { + val result = value.get + Future.dispatchTask(() ⇒ notifyCompleted(func, result)) + } this } - def onTimeout(func: Future[T] ⇒ Unit): this.type = { - val runNow = - if (!timeout.duration.isFinite) false //Not possible - else if (value.isEmpty) { - if (!isExpired) { - val runnable = new Runnable { - def run() { - if (!isCompleted) { - if (!isExpired) - try dispatcher.prerequisites.scheduler.scheduleOnce(Duration(timeLeftNoinline(), TimeUnit.NANOSECONDS), this) - catch { - case _: IllegalStateException ⇒ func(DefaultPromise.this) - } - else func(DefaultPromise.this) - } - } - } - val timeoutFuture = dispatcher.prerequisites.scheduler.scheduleOnce(Duration(timeLeft(), NANOSECONDS), runnable) - onComplete(_ ⇒ timeoutFuture.cancel()) - false - } else true - } else false - - if (runNow) Future.dispatchTask(() ⇒ notifyCompleted(func)) - - this + private final def notifyCompleted(func: Either[Throwable, T] ⇒ Unit, result: Either[Throwable, T]) { + try { func(result) } catch { case e ⇒ dispatcher.prerequisites.eventStream.publish(Error(e, "Future", "Future onComplete-callback raised an exception")) } } - - final def orElse[A >: T](fallback: ⇒ A): Future[A] = - if (timeout.duration.isFinite) { - getState match { - case _: Success[_] | _: Failure[_] ⇒ this - case Expired ⇒ Future[A](fallback, timeout) - case _: Pending[_] ⇒ - val promise = new DefaultPromise[A](Timeout.never) //TODO FIXME We can't have infinite timeout here, doesn't make sense. - promise completeWith this - val runnable = new Runnable { - def run() { - if (!isCompleted) { - val done = - if (!isExpired) - try { - dispatcher.prerequisites.scheduler.scheduleOnce(Duration(timeLeftNoinline(), TimeUnit.NANOSECONDS), this) - true - } catch { - case _: IllegalStateException ⇒ false - } - else false - if (!done) - promise complete (try { Right(fallback) } catch { case e ⇒ Left(e) }) // FIXME catching all and continue isn't good for OOME, ticket #1418 - } - } - } - dispatcher.prerequisites.scheduler.scheduleOnce(Duration(timeLeft(), NANOSECONDS), runnable) - promise - } - } else this - - private def notifyCompleted(func: Future[T] ⇒ Unit) { - // FIXME catching all and continue isn't good for OOME, ticket #1418 - try { func(this) } catch { case e ⇒ dispatcher.prerequisites.eventStream.publish(Error(e, "Future", "Future onComplete-callback raised an exception")) } //TODO catch, everything? Really? - } - - @inline - private def currentTimeInNanos: Long = MILLISECONDS.toNanos(System.currentTimeMillis) //TODO Switch to math.abs(System.nanoTime)? - //TODO: the danger of Math.abs is that it could break the ordering of time. So I would not recommend an abs. - @inline - private def timeLeft(): Long = timeoutInNanos - (currentTimeInNanos - _startTimeInNanos) - - private def timeLeftNoinline(): Long = timeLeft() } /** * An already completed Future is seeded with it's result at creation, is useful for when you are participating in * a Future-composition but you already have a value to contribute. */ -sealed class KeptPromise[T](suppliedValue: Either[Throwable, T])(implicit val dispatcher: MessageDispatcher) extends Promise[T] { +final class KeptPromise[T](suppliedValue: Either[Throwable, T])(implicit val dispatcher: MessageDispatcher) extends Promise[T] { val value = Some(suppliedValue) - def complete(value: Either[Throwable, T]): this.type = this - def onComplete(func: Future[T] ⇒ Unit): this.type = { - Future dispatchTask (() ⇒ func(this)) + def tryComplete(value: Either[Throwable, T]): Boolean = true + def onComplete(func: Either[Throwable, T] ⇒ Unit): this.type = { + val completedAs = value.get + Future dispatchTask (() ⇒ func(completedAs)) this } - def await(atMost: Duration): this.type = this - def await: this.type = this - def isExpired: Boolean = true - def timeout: Timeout = Timeout.zero - - final def onTimeout(func: Future[T] ⇒ Unit): this.type = this - final def orElse[A >: T](fallback: ⇒ A): Future[A] = this + def ready(atMost: Duration)(implicit permit: CanAwait): this.type = this + def result(atMost: Duration)(implicit permit: CanAwait): T = value.get match { + case Left(e) ⇒ throw e + case Right(r) ⇒ r + } } diff --git a/akka-actor/src/main/scala/akka/dispatch/PromiseStream.scala b/akka-actor/src/main/scala/akka/dispatch/PromiseStream.scala index 4356cbaff3..4ec0aaf300 100644 --- a/akka-actor/src/main/scala/akka/dispatch/PromiseStream.scala +++ b/akka-actor/src/main/scala/akka/dispatch/PromiseStream.scala @@ -166,7 +166,7 @@ class PromiseStream[A](implicit val dispatcher: MessageDispatcher, val timeout: } else enqueue(elem) } else { if (_pendOut.compareAndSet(po, po.tail)) { - po.head completeWithResult elem + po.head success elem if (!po.head.isCompleted) enqueue(elem) } else enqueue(elem) } @@ -183,11 +183,11 @@ class PromiseStream[A](implicit val dispatcher: MessageDispatcher, val timeout: if (eo eq null) dequeue() else { if (eo.nonEmpty) { - if (_elemOut.compareAndSet(eo, eo.tail)) new KeptPromise(Right(eo.head)) + if (_elemOut.compareAndSet(eo, eo.tail)) Promise.successful(eo.head) else dequeue() - } else dequeue(Promise[A](timeout)) + } else dequeue(Promise[A]) } - } else dequeue(Promise[A](timeout)) + } else dequeue(Promise[A]) @tailrec final def dequeue(promise: Promise[A]): Future[A] = _state.get match { @@ -227,7 +227,7 @@ class PromiseStream[A](implicit val dispatcher: MessageDispatcher, val timeout: } else dequeue(promise) } else { if (_elemOut.compareAndSet(eo, eo.tail)) { - promise completeWithResult eo.head + promise success eo.head } else dequeue(promise) } } diff --git a/akka-actor/src/main/scala/akka/dispatch/japi/Future.scala b/akka-actor/src/main/scala/akka/dispatch/japi/Future.scala index e923dd6c18..64852912fe 100644 --- a/akka-actor/src/main/scala/akka/dispatch/japi/Future.scala +++ b/akka-actor/src/main/scala/akka/dispatch/japi/Future.scala @@ -3,27 +3,52 @@ */ package akka.dispatch.japi -import akka.japi.{ Procedure, Function ⇒ JFunc, Option ⇒ JOption } import akka.actor.Timeout +import akka.japi.{ Procedure2, Procedure, Function ⇒ JFunc, Option ⇒ JOption } /* Java API */ trait Future[+T] { self: akka.dispatch.Future[T] ⇒ - private[japi] final def onTimeout[A >: T](proc: Procedure[akka.dispatch.Future[A]]): this.type = self.onTimeout(proc(_)) - private[japi] final def onResult[A >: T](proc: Procedure[A]): this.type = self.onResult({ case r ⇒ proc(r.asInstanceOf[A]) }: PartialFunction[T, Unit]) - private[japi] final def onException(proc: Procedure[Throwable]): this.type = self.onException({ case t: Throwable ⇒ proc(t) }: PartialFunction[Throwable, Unit]) - private[japi] final def onComplete[A >: T](proc: Procedure[akka.dispatch.Future[A]]): this.type = self.onComplete(proc(_)) - private[japi] final def map[A >: T, B](f: JFunc[A, B], timeout: Timeout): akka.dispatch.Future[B] = { - implicit val t = timeout - self.map(f(_)) - } - private[japi] final def flatMap[A >: T, B](f: JFunc[A, akka.dispatch.Future[B]], timeout: Timeout): akka.dispatch.Future[B] = { - implicit val t = timeout - self.flatMap(f(_)) - } + /** + * Asynchronously called when this Future gets a successful result + */ + private[japi] final def onSuccess[A >: T](proc: Procedure[A]): this.type = self.onSuccess({ case r ⇒ proc(r.asInstanceOf[A]) }: PartialFunction[T, Unit]) + + /** + * Asynchronously called when this Future gets a failed result + */ + private[japi] final def onFailure(proc: Procedure[Throwable]): this.type = self.onFailure({ case t: Throwable ⇒ proc(t) }: PartialFunction[Throwable, Unit]) + + /** + * Asynchronously called when this future is completed with either a failed or a successful result + * In case of a success, the first parameter (Throwable) will be null + * In case of a failure, the second parameter (T) will be null + * For no reason will both be null or neither be null + */ + private[japi] final def onComplete[A >: T](proc: Procedure2[Throwable, A]): this.type = self.onComplete(_.fold(t ⇒ proc(t, null.asInstanceOf[T]), r ⇒ proc(null, r))) + + /** + * Asynchronously applies the provided function to the (if any) successful result of this Future + * Any failure of this Future will be propagated to the Future returned by this method. + */ + private[japi] final def map[A >: T, B](f: JFunc[A, B]): akka.dispatch.Future[B] = self.map(f(_)) + + /** + * Asynchronously applies the provided function to the (if any) successful result of this Future and flattens it. + * Any failure of this Future will be propagated to the Future returned by this method. + */ + private[japi] final def flatMap[A >: T, B](f: JFunc[A, akka.dispatch.Future[B]]): akka.dispatch.Future[B] = self.flatMap(f(_)) + + /** + * Asynchronously applies the provided Procedure to the (if any) successful result of this Future + * Provided Procedure will not be called in case of no-result or in case of failed result + */ private[japi] final def foreach[A >: T](proc: Procedure[A]): Unit = self.foreach(proc(_)) - private[japi] final def filter[A >: T](p: JFunc[A, java.lang.Boolean], timeout: Timeout): akka.dispatch.Future[A] = { - implicit val t = timeout + + /** + * Returns a new Future whose successful result will be the successful result of this Future if that result conforms to the provided predicate + * Any failure of this Future will be propagated to the Future returned by this method. + */ + private[japi] final def filter[A >: T](p: JFunc[A, java.lang.Boolean]): akka.dispatch.Future[A] = self.filter((a: Any) ⇒ p(a.asInstanceOf[A])).asInstanceOf[akka.dispatch.Future[A]] - } } diff --git a/akka-actor/src/main/scala/akka/event/Logging.scala b/akka-actor/src/main/scala/akka/event/Logging.scala index 0425b4c661..016d977d40 100644 --- a/akka-actor/src/main/scala/akka/event/Logging.scala +++ b/akka-actor/src/main/scala/akka/event/Logging.scala @@ -11,10 +11,11 @@ import akka.config.ConfigurationException import akka.util.ReentrantGuard import akka.util.duration._ import akka.actor.Timeout -import akka.dispatch.FutureTimeoutException import java.util.concurrent.atomic.AtomicInteger import akka.actor.ActorRefProvider import scala.util.control.NoStackTrace +import java.util.concurrent.TimeoutException +import akka.dispatch.Await object LoggingBus { implicit def fromActorSystem(system: ActorSystem): LoggingBus = system.eventStream @@ -137,7 +138,10 @@ trait LoggingBus extends ActorEventBus { } { // this is very necessary, else you get infinite loop with DeadLetter unsubscribe(logger) - logger.stop() + logger match { + case ref: InternalActorRef ⇒ ref.stop() + case _ ⇒ + } } publish(Debug(simpleName(this), "all default loggers stopped")) } @@ -146,8 +150,8 @@ trait LoggingBus extends ActorEventBus { val name = "log" + Extension(system).id() + "-" + simpleName(clazz) val actor = system.systemActorOf(Props(clazz), name) implicit val timeout = Timeout(3 seconds) - val response = try actor ? InitializeLogger(this) get catch { - case _: FutureTimeoutException ⇒ + val response = try Await.result(actor ? InitializeLogger(this), timeout.duration) catch { + case _: TimeoutException ⇒ publish(Warning(simpleName(this), "Logger " + name + " did not respond within " + timeout + " to InitializeLogger(bus)")) } if (response != LoggerInitialized) diff --git a/akka-actor/src/main/scala/akka/routing/ConnectionManager.scala b/akka-actor/src/main/scala/akka/routing/ConnectionManager.scala index a417c75bac..135546ad2b 100644 --- a/akka-actor/src/main/scala/akka/routing/ConnectionManager.scala +++ b/akka-actor/src/main/scala/akka/routing/ConnectionManager.scala @@ -66,49 +66,3 @@ trait ConnectionManager { */ def remove(deadRef: ActorRef) } - -/** - * Manages local connections for a router, e.g. local actors. - */ -class LocalConnectionManager(initialConnections: Iterable[ActorRef]) extends ConnectionManager { - - def this(iterable: java.lang.Iterable[ActorRef]) { - this(JavaConverters.iterableAsScalaIterableConverter(iterable).asScala) - } - - case class State(version: Long, connections: Iterable[ActorRef]) extends VersionedIterable[ActorRef] { - def iterable = connections - } - - private val state: AtomicReference[State] = new AtomicReference[State](newState()) - - private def newState() = State(Long.MinValue, initialConnections) - - def version: Long = state.get.version - - def size: Int = state.get.connections.size - - def isEmpty: Boolean = state.get.connections.isEmpty - - def connections = state.get - - def shutdown() { - state.get.connections foreach (_.stop()) - } - - @tailrec - final def remove(ref: ActorRef) = { - val oldState = state.get - - //remote the ref from the connections. - var newList = oldState.connections.filter(currentActorRef ⇒ currentActorRef ne ref) - - if (newList.size != oldState.connections.size) { - //one or more occurrences of the actorRef were removed, so we need to update the state. - - val newState = State(oldState.version + 1, newList) - //if we are not able to update the state, we just try again. - if (!state.compareAndSet(oldState, newState)) remove(ref) - } - } -} diff --git a/akka-actor/src/main/scala/akka/routing/Routing.scala b/akka-actor/src/main/scala/akka/routing/Routing.scala index 7df6a388cb..a321bb8983 100644 --- a/akka-actor/src/main/scala/akka/routing/Routing.scala +++ b/akka-actor/src/main/scala/akka/routing/Routing.scala @@ -132,7 +132,7 @@ trait Router extends Actor { case Terminated(child) ⇒ ref._routees = ref._routees filterNot (_ == child) - if (ref.routees.isEmpty) self.stop() + if (ref.routees.isEmpty) context.stop(self) }: Receive) orElse routerReceive diff --git a/akka-actor/src/main/scala/akka/util/AkkaLoader.scala b/akka-actor/src/main/scala/akka/util/AkkaLoader.scala deleted file mode 100644 index f2bf63c137..0000000000 --- a/akka-actor/src/main/scala/akka/util/AkkaLoader.scala +++ /dev/null @@ -1,91 +0,0 @@ -/** - * Copyright (C) 2009-2011 Typesafe Inc. - */ - -package akka.util -import akka.actor.ActorSystem - -/* - * This class is responsible for booting up a stack of bundles and then shutting them down - */ -class AkkaLoader(system: ActorSystem) { - private val hasBooted = new Switch(false) - - @volatile - private var _bundles: Option[Bootable] = None - - def bundles = _bundles; - - /* - * Boot initializes the specified bundles - */ - def boot(withBanner: Boolean, b: Bootable): Unit = hasBooted switchOn { - if (withBanner) printBanner() - println("Starting Akka...") - b.onLoad() - Thread.currentThread.setContextClassLoader(getClass.getClassLoader) - _bundles = Some(b) - println("Akka started successfully") - } - - /* - * Shutdown, well, shuts down the bundles used in boot - */ - def shutdown() { - hasBooted switchOff { - println("Shutting down Akka...") - _bundles.foreach(_.onUnload()) - _bundles = None - println("Akka succesfully shut down") - } - } - - private def printBanner() { - println(""" -============================================================================== - - ZZ: - ZZZZ - ZZZZZZ - ZZZ' ZZZ - ~7 7ZZ' ZZZ - :ZZZ: IZZ' ZZZ - ,OZZZZ.~ZZ? ZZZ - ZZZZ' 'ZZZ$ ZZZ - . $ZZZ ~ZZ$ ZZZ - .=Z?. .ZZZO ~ZZ7 OZZ - .ZZZZ7..:ZZZ~ 7ZZZ ZZZ~ - .$ZZZ$Z+.ZZZZ ZZZ: ZZZ$ - .,ZZZZ?' =ZZO= .OZZ 'ZZZ - .$ZZZZ+ .ZZZZ IZZZ ZZZ$ - .ZZZZZ' .ZZZZ' .ZZZ$ ?ZZZ - .ZZZZZZ' .OZZZ? ?ZZZ 'ZZZ$ - .?ZZZZZZ' .ZZZZ? .ZZZ? 'ZZZO - .+ZZZZZZ?' .7ZZZZ' .ZZZZ :ZZZZ - .ZZZZZZ$' .?ZZZZZ' .~ZZZZ 'ZZZZ. - - - NNNNN $NNNN+ - NNNNN $NNNN+ - NNNNN $NNNN+ - NNNNN $NNNN+ - NNNNN $NNNN+ - =NNNNNNNNND$ NNNNN DDDDDD: $NNNN+ DDDDDN NDDNNNNNNNN, - NNNNNNNNNNNNND NNNNN DNNNNN $NNNN+ 8NNNNN= :NNNNNNNNNNNNNN - NNNNN$ DNNNNN NNNNN $NNNNN~ $NNNN+ NNNNNN NNNNN, :NNNNN+ - ?DN~ NNNNN NNNNN MNNNNN $NNNN+:NNNNN7 $ND =NNNNN - DNNNNN NNNNNDNNNN$ $NNNNDNNNNN :DNNNNN - ZNDNNNNNNNNND NNNNNNNNNND, $NNNNNNNNNNN DNDNNNNNNNNNN - NNNNNNNDDINNNNN NNNNNNNNNNND $NNNNNNNNNNND ONNNNNNND8+NNNNN - :NNNND NNNNN NNNNNN DNNNN, $NNNNNO 7NNNND NNNNNO :NNNNN - DNNNN NNNNN NNNNN DNNNN $NNNN+ 8NNNNN NNNNN $NNNNN - DNNNNO NNNNNN NNNNN NNNNN $NNNN+ NNNNN$ NNNND, ,NNNNND - NNNNNNDDNNNNNNNN NNNNN =NNNNN $NNNN+ DNNNN? DNNNNNNDNNNNNNNND - NNNNNNNNN NNNN$ NNNNN 8NNNND $NNNN+ NNNNN= ,DNNNNNNND NNNNN$ - -============================================================================== - Running version %s -============================================================================== -""".format(ActorSystem.Version)) - } -} diff --git a/akka-actor/src/main/scala/akka/util/Bootable.scala b/akka-actor/src/main/scala/akka/util/Bootable.scala deleted file mode 100644 index a7a55f58e7..0000000000 --- a/akka-actor/src/main/scala/akka/util/Bootable.scala +++ /dev/null @@ -1,11 +0,0 @@ -/** - * Copyright (C) 2009-2011 Typesafe Inc. - */ - -package akka.util -import akka.actor.ActorSystem - -trait Bootable { - def onLoad() {} - def onUnload() {} -} diff --git a/akka-actor/src/main/scala/akka/util/Duration.scala b/akka-actor/src/main/scala/akka/util/Duration.scala index 6e9310e5d8..fba61c8a48 100644 --- a/akka-actor/src/main/scala/akka/util/Duration.scala +++ b/akka-actor/src/main/scala/akka/util/Duration.scala @@ -148,8 +148,6 @@ object Duration { trait Infinite { this: Duration ⇒ - override def equals(other: Any) = false - def +(other: Duration): Duration = other match { case _: this.type ⇒ this @@ -192,7 +190,7 @@ object Duration { */ val Inf: Duration = new Duration with Infinite { override def toString = "Duration.Inf" - def compare(other: Duration) = 1 + def compare(other: Duration) = if (other eq this) 0 else 1 def unary_- : Duration = MinusInf } @@ -202,7 +200,7 @@ object Duration { */ val MinusInf: Duration = new Duration with Infinite { override def toString = "Duration.MinusInf" - def compare(other: Duration) = -1 + def compare(other: Duration) = if (other eq this) 0 else -1 def unary_- : Duration = Inf } diff --git a/akka-actor/src/main/scala/akka/util/ListenerManagement.scala b/akka-actor/src/main/scala/akka/util/ListenerManagement.scala index 3efbcbc902..1d6df328d5 100644 --- a/akka-actor/src/main/scala/akka/util/ListenerManagement.scala +++ b/akka-actor/src/main/scala/akka/util/ListenerManagement.scala @@ -4,13 +4,15 @@ package akka.util +import akka.actor.Actor + import java.util.concurrent.ConcurrentSkipListSet import akka.actor.{ ActorInitializationException, ActorRef } /** * A manager for listener actors. Intended for mixin by observables. */ -trait ListenerManagement { +trait ListenerManagement { this: Actor ⇒ private val listeners = new ConcurrentSkipListSet[ActorRef] @@ -33,7 +35,7 @@ trait ListenerManagement { */ def removeListener(listener: ActorRef) { listeners remove listener - if (manageLifeCycleOfListeners) listener.stop() + if (manageLifeCycleOfListeners) context.stop(listener) } /* diff --git a/akka-actor/src/main/scala/akka/util/cps/package.scala b/akka-actor/src/main/scala/akka/util/cps/package.scala index 6e88ff9cfe..7cbf60aaf2 100644 --- a/akka-actor/src/main/scala/akka/util/cps/package.scala +++ b/akka-actor/src/main/scala/akka/util/cps/package.scala @@ -42,7 +42,7 @@ package cps { if (test) Future(reify(block) flatMap (_ ⇒ reify(whileC(test)(block))) foreach c) else - Promise() completeWithResult (shiftUnitR[Unit, Future[Any]](()) foreach c) + Promise() success (shiftUnitR[Unit, Future[Any]](()) foreach c) } def repeatC[U](times: Int)(block: ⇒ U @cps[Future[Any]])(implicit dispatcher: MessageDispatcher, timeout: Timeout): Unit @cps[Future[Any]] = @@ -50,7 +50,7 @@ package cps { if (times > 0) Future(reify(block) flatMap (_ ⇒ reify(repeatC(times - 1)(block))) foreach c) else - Promise() completeWithResult (shiftUnitR[Unit, Future[Any]](()) foreach c) + Promise() success (shiftUnitR[Unit, Future[Any]](()) foreach c) } } diff --git a/akka-camel-typed/src/test/scala/akka/camel/TypedConsumerPublishRequestorTest.scala b/akka-camel-typed/src/test/scala/akka/camel/TypedConsumerPublishRequestorTest.scala index 358bedc070..322a0412c5 100644 --- a/akka-camel-typed/src/test/scala/akka/camel/TypedConsumerPublishRequestorTest.scala +++ b/akka-camel-typed/src/test/scala/akka/camel/TypedConsumerPublishRequestorTest.scala @@ -4,10 +4,11 @@ import java.util.concurrent.{ CountDownLatch, TimeUnit } import org.junit.{ Before, After, Test } import org.scalatest.junit.JUnitSuite - +import akka.util.duration._ import akka.actor._ import akka.actor.Actor._ import akka.camel.TypedCamelTestSupport.{ SetExpectedMessageCount ⇒ SetExpectedTestMessageCount, _ } +import akka.dispatch.Await class TypedConsumerPublishRequestorTest extends JUnitSuite { import TypedConsumerPublishRequestorTest._ @@ -39,10 +40,10 @@ class TypedConsumerPublishRequestorTest extends JUnitSuite { @Test def shouldReceiveOneConsumerMethodRegisteredEvent = { Actor.registry.addListener(requestor) - val latch = (publisher ? SetExpectedTestMessageCount(1)).as[CountDownLatch].get + val latch = Await.result((publisher ? SetExpectedTestMessageCount(1)).mapTo[CountDownLatch], 3 seconds) val obj = TypedActor.typedActorOf(classOf[SampleTypedSingleConsumer], classOf[SampleTypedSingleConsumerImpl], Props()) assert(latch.await(5000, TimeUnit.MILLISECONDS)) - val event = (publisher ? GetRetainedMessage).as[ConsumerMethodRegistered].get + val event = Await.result((publisher ? GetRetainedMessage).mapTo[ConsumerMethodRegistered], 3 seconds) assert(event.endpointUri === "direct:foo") assert(event.typedActor === obj) assert(event.methodName === "foo") @@ -50,21 +51,21 @@ class TypedConsumerPublishRequestorTest extends JUnitSuite { @Test def shouldReceiveOneConsumerMethodUnregisteredEvent = { - val latch = (publisher ? SetExpectedTestMessageCount(1)).as[CountDownLatch].get + val latch = Await.result((publisher ? SetExpectedTestMessageCount(1)).mapTo[CountDownLatch], 3 seconds) Actor.registry.addListener(requestor) val obj = TypedActor.typedActorOf(classOf[SampleTypedSingleConsumer], classOf[SampleTypedSingleConsumerImpl], Props()) assert(latch.await(5000, TimeUnit.MILLISECONDS)) - val ignorableEvent = (publisher ? GetRetainedMessage).as[ConsumerMethodRegistered].get + val ignorableEvent = Await.result((publisher ? GetRetainedMessage).mapTo[ConsumerMethodRegistered], 3 seconds) - val latch2 = (publisher ? SetExpectedTestMessageCount(1)).as[CountDownLatch].get + val latch2 = Await.result((publisher ? SetExpectedTestMessageCount(1)).mapTo[CountDownLatch], 3 seconds) TypedActor.stop(obj) assert(latch2.await(5000, TimeUnit.MILLISECONDS)) - val event = (publisher ? GetRetainedMessage).as[ConsumerMethodUnregistered].get + val event = Await.result((publisher ? GetRetainedMessage).mapTo[ConsumerMethodUnregistered], 3 seconds) assert(event.endpointUri === "direct:foo") assert(event.typedActor === obj) @@ -74,23 +75,23 @@ class TypedConsumerPublishRequestorTest extends JUnitSuite { @Test def shouldReceiveThreeConsumerMethodRegisteredEvents = { Actor.registry.addListener(requestor) - val latch = (publisher ? SetExpectedTestMessageCount(3)).as[CountDownLatch].get + val latch = Await.result((publisher ? SetExpectedTestMessageCount(3)).mapTo[CountDownLatch], 3 seconds) val obj = TypedActor.typedActorOf(classOf[SampleTypedConsumer], classOf[SampleTypedConsumerImpl], Props()) assert(latch.await(5000, TimeUnit.MILLISECONDS)) val request = GetRetainedMessages(_.isInstanceOf[ConsumerMethodRegistered]) - val events = (publisher ? request).as[List[ConsumerMethodRegistered]].get + val events = Await.result((publisher ? request).mapTo[List[ConsumerMethodRegistered]], 3 seconds) assert(events.map(_.method.getName).sortWith(_ < _) === List("m2", "m3", "m4")) } @Test def shouldReceiveThreeConsumerMethodUnregisteredEvents = { val obj = TypedActor.typedActorOf(classOf[SampleTypedConsumer], classOf[SampleTypedConsumerImpl], Props()) - val latch = (publisher ? SetExpectedTestMessageCount(3)).as[CountDownLatch].get + val latch = Await.result((publisher ? SetExpectedTestMessageCount(3)).mapTo[CountDownLatch], 3 seconds) Actor.registry.addListener(requestor) TypedActor.stop(obj) assert(latch.await(5000, TimeUnit.MILLISECONDS)) val request = GetRetainedMessages(_.isInstanceOf[ConsumerMethodUnregistered]) - val events = (publisher ? request).as[List[ConsumerMethodUnregistered]].get + val events = Await.result((publisher ? request).mapTo[List[ConsumerMethodUnregistered]], 3 seconds) assert(events.map(_.method.getName).sortWith(_ < _) === List("m2", "m3", "m4")) } } diff --git a/akka-camel/src/main/scala/akka/camel/CamelService.scala b/akka-camel/src/main/scala/akka/camel/CamelService.scala index eb3c8e4ae1..0b8a2aece0 100644 --- a/akka-camel/src/main/scala/akka/camel/CamelService.scala +++ b/akka-camel/src/main/scala/akka/camel/CamelService.scala @@ -14,6 +14,7 @@ import akka.japi.{ SideEffect, Option ⇒ JOption } import akka.util.Bootable import TypedCamelAccess._ +import akka.dispatch.Await /** * Publishes consumer actors at their Camel endpoints. Consumer actors are published asynchronously when @@ -164,7 +165,7 @@ trait CamelService extends Bootable { * activations that occurred in the past are not considered. */ private def expectEndpointActivationCount(count: Int): CountDownLatch = - (activationTracker ? SetExpectedActivationCount(count)).as[CountDownLatch].get + Await.result((activationTracker ? SetExpectedActivationCount(count)).mapTo[CountDownLatch], 3 seconds) /** * Sets an expectation on the number of upcoming endpoint de-activations and returns @@ -172,7 +173,7 @@ trait CamelService extends Bootable { * de-activations that occurred in the past are not considered. */ private def expectEndpointDeactivationCount(count: Int): CountDownLatch = - (activationTracker ? SetExpectedDeactivationCount(count)).as[CountDownLatch].get + Await.result((activationTracker ? SetExpectedDeactivationCount(count)).mapTo[CountDownLatch], 3 seconds) private[camel] def registerPublishRequestor: Unit = Actor.registry.addListener(publishRequestor) diff --git a/akka-camel/src/main/scala/akka/camel/component/ActorComponent.scala b/akka-camel/src/main/scala/akka/camel/component/ActorComponent.scala index c4ec7dcf31..c0d0281ab3 100644 --- a/akka-camel/src/main/scala/akka/camel/component/ActorComponent.scala +++ b/akka-camel/src/main/scala/akka/camel/component/ActorComponent.scala @@ -172,7 +172,7 @@ class ActorProducer(val ep: ActorEndpoint) extends DefaultProducer(ep) with Asyn private def sendSync(exchange: Exchange) = { val actor = target(exchange) - val result: Any = try { (actor ? requestFor(exchange)).as[Any] } catch { case e ⇒ Some(Failure(e)) } + val result: Any = try { Some(Await.result((actor ? requestFor(exchange), 5 seconds)) } catch { case e ⇒ Some(Failure(e)) } result match { case Some(Ack) ⇒ { /* no response message to set */ } @@ -294,7 +294,7 @@ private[akka] class AsyncCallbackAdapter(exchange: Exchange, callback: AsyncCall } def ?(message: Any)(implicit timeout: Timeout): Future[Any] = - new KeptPromise[Any](Left(new UnsupportedOperationException("Ask/? is not supported for %s".format(getClass.getName)))) + Promise.failed(new UnsupportedOperationException("Ask/? is not supported for %s".format(getClass.getName))) def restart(reason: Throwable): Unit = unsupported private def unsupported = throw new UnsupportedOperationException("Not supported for %s" format classOf[AsyncCallbackAdapter].getName) diff --git a/akka-camel/src/test/scala/akka/camel/ConsumerPublishRequestorTest.scala b/akka-camel/src/test/scala/akka/camel/ConsumerPublishRequestorTest.scala index f77cec4c0b..1c1fb273fb 100644 --- a/akka-camel/src/test/scala/akka/camel/ConsumerPublishRequestorTest.scala +++ b/akka-camel/src/test/scala/akka/camel/ConsumerPublishRequestorTest.scala @@ -8,6 +8,7 @@ import org.scalatest.junit.JUnitSuite import akka.actor._ import akka.actor.Actor._ import akka.camel.CamelTestSupport.{ SetExpectedMessageCount ⇒ SetExpectedTestMessageCount, _ } +import akka.dispatch.Await class ConsumerPublishRequestorTest extends JUnitSuite { import ConsumerPublishRequestorTest._ @@ -35,19 +36,19 @@ class ConsumerPublishRequestorTest extends JUnitSuite { @Test def shouldReceiveOneConsumerRegisteredEvent = { - val latch = (publisher ? SetExpectedTestMessageCount(1)).as[CountDownLatch].get + val latch = Await.result((publisher ? SetExpectedTestMessageCount(1)).mapTo[CountDownLatch], 5 seconds) requestor ! ActorRegistered(consumer.address, consumer) assert(latch.await(5000, TimeUnit.MILLISECONDS)) - assert((publisher ? GetRetainedMessage).get === + assert(Await.result(publisher ? GetRetainedMessage, 5 seconds) === ConsumerActorRegistered(consumer, consumer.underlyingActorInstance.asInstanceOf[Consumer])) } @Test def shouldReceiveOneConsumerUnregisteredEvent = { - val latch = (publisher ? SetExpectedTestMessageCount(1)).as[CountDownLatch].get + val latch = Await.result((publisher ? SetExpectedTestMessageCount(1)).mapTo[CountDownLatch], 5 seconds) requestor ! ActorUnregistered(consumer.address, consumer) assert(latch.await(5000, TimeUnit.MILLISECONDS)) - assert((publisher ? GetRetainedMessage).get === + assert(Await.result(publisher ? GetRetainedMessage, 5 seconds) === ConsumerActorUnregistered(consumer, consumer.underlyingActorInstance.asInstanceOf[Consumer])) } } diff --git a/akka-cluster/src/main/scala/akka/cluster/Cluster.scala b/akka-cluster/src/main/scala/akka/cluster/Cluster.scala index 5a3f115ef8..505474213f 100644 --- a/akka-cluster/src/main/scala/akka/cluster/Cluster.scala +++ b/akka-cluster/src/main/scala/akka/cluster/Cluster.scala @@ -33,7 +33,6 @@ import Status._ import DeploymentConfig._ import akka.event.EventHandler -import akka.dispatch.{ Dispatchers, Future, PinnedDispatcher } import akka.config.Config import akka.config.Config._ @@ -52,6 +51,7 @@ import RemoteSystemDaemonMessageType._ import com.eaio.uuid.UUID import com.google.protobuf.ByteString +import akka.dispatch.{Await, Dispatchers, Future, PinnedDispatcher} // FIXME add watch for each node that when the entry for the node is removed then the node shuts itself down @@ -1150,22 +1150,17 @@ class DefaultClusterNode private[akka] ( connection ! command } else { try { - (connection ? (command, remoteDaemonAckTimeout)).as[Status] match { - - case Some(Success(status)) ⇒ + Await.result(connection ? (command, remoteDaemonAckTimeout), 10 seconds).asInstanceOf[Status] match { + case Success(status) ⇒ EventHandler.debug(this, "Remote command sent to [%s] successfully received".format(status)) - - case Some(Failure(cause)) ⇒ + case Failure(cause) ⇒ EventHandler.error(cause, this, cause.toString) throw cause - - case None ⇒ - val error = new ClusterException( - "Remote command to [%s] timed out".format(connection.address)) - EventHandler.error(error, this, error.toString) - throw error } } catch { + case e: TimeoutException => + EventHandler.error(e, this, "Remote command to [%s] timed out".format(connection.address)) + throw e case e: Exception ⇒ EventHandler.error(e, this, "Could not send remote command to [%s] due to: %s".format(connection.address, e.toString)) throw e diff --git a/akka-cluster/src/main/scala/akka/cluster/TransactionLog.scala b/akka-cluster/src/main/scala/akka/cluster/TransactionLog.scala index 7d593437ae..8aa1727ca9 100644 --- a/akka-cluster/src/main/scala/akka/cluster/TransactionLog.scala +++ b/akka-cluster/src/main/scala/akka/cluster/TransactionLog.scala @@ -195,7 +195,7 @@ class TransactionLog private ( EventHandler.debug(this, "Reading entries [%s -> %s] for log [%s]".format(from, to, logId)) if (isAsync) { - val future = new DefaultPromise[Vector[Array[Byte]]](timeout) + val future = Promise[Vector[Array[Byte]]]() ledger.asyncReadEntries( from, to, new AsyncCallback.ReadCallback { @@ -203,8 +203,8 @@ class TransactionLog private ( val future = ctx.asInstanceOf[Promise[Vector[Array[Byte]]]] val entries = toByteArrays(enumeration) - if (returnCode == BKException.Code.OK) future.completeWithResult(entries) - else future.completeWithException(BKException.create(returnCode)) + if (returnCode == BKException.Code.OK) future.success(entries) + else future.failure(BKException.create(returnCode)) } }, future) @@ -457,7 +457,7 @@ object TransactionLog { } } - val future = new DefaultPromise[LedgerHandle](timeout) + val future = Promise[LedgerHandle]() if (isAsync) { bookieClient.asyncCreateLedger( ensembleSize, quorumSize, digestType, password, @@ -467,8 +467,8 @@ object TransactionLog { ledgerHandle: LedgerHandle, ctx: AnyRef) { val future = ctx.asInstanceOf[Promise[LedgerHandle]] - if (returnCode == BKException.Code.OK) future.completeWithResult(ledgerHandle) - else future.completeWithException(BKException.create(returnCode)) + if (returnCode == BKException.Code.OK) future.success(ledgerHandle) + else future.failure(BKException.create(returnCode)) } }, future) @@ -519,14 +519,14 @@ object TransactionLog { val ledger = try { if (isAsync) { - val future = new DefaultPromise[LedgerHandle](timeout) + val future = Promise[LedgerHandle]() bookieClient.asyncOpenLedger( logId, digestType, password, new AsyncCallback.OpenCallback { def openComplete(returnCode: Int, ledgerHandle: LedgerHandle, ctx: AnyRef) { val future = ctx.asInstanceOf[Promise[LedgerHandle]] - if (returnCode == BKException.Code.OK) future.completeWithResult(ledgerHandle) - else future.completeWithException(BKException.create(returnCode)) + if (returnCode == BKException.Code.OK) future.success(ledgerHandle) + else future.failure(BKException.create(returnCode)) } }, future) @@ -542,10 +542,10 @@ object TransactionLog { } private[akka] def await[T](future: Promise[T]): T = { - future.await - if (future.result.isDefined) future.result.get - else if (future.exception.isDefined) handleError(future.exception.get) - else handleError(new ReplicationException("No result from async read of entries for transaction log")) + future.await.value.get match { + case Right(result) => result + case Left(throwable) => handleError(throwable) + } } private[akka] def handleError(e: Throwable): Nothing = { diff --git a/akka-cluster/src/multi-jvm/scala/akka/cluster/metrics/local/LocalMetricsMultiJvmSpec.scala b/akka-cluster/src/multi-jvm/scala/akka/cluster/metrics/local/LocalMetricsMultiJvmSpec.scala index 456fd4f65a..50b7741758 100644 --- a/akka-cluster/src/multi-jvm/scala/akka/cluster/metrics/local/LocalMetricsMultiJvmSpec.scala +++ b/akka-cluster/src/multi-jvm/scala/akka/cluster/metrics/local/LocalMetricsMultiJvmSpec.scala @@ -73,7 +73,7 @@ class LocalMetricsMultiJvmNode1 extends MasterClusterTestNode { } "allow to track JVM state and bind handles through MetricsAlterationMonitors" in { - val monitorReponse = new DefaultPromise[String] + val monitorReponse = Promise[String]() node.metricsManager.addMonitor(new LocalMetricsAlterationMonitor { @@ -81,11 +81,11 @@ class LocalMetricsMultiJvmNode1 extends MasterClusterTestNode { def reactsOn(metrics: NodeMetrics) = metrics.usedHeapMemory > 1 - def react(metrics: NodeMetrics) = monitorReponse.completeWithResult("Too much memory is used!") + def react(metrics: NodeMetrics) = monitorReponse.success("Too much memory is used!") }) - monitorReponse.get must be("Too much memory is used!") + Await.result(monitorReponse, 5 seconds) must be("Too much memory is used!") } diff --git a/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/direct/failover/DirectRoutingFailoverMultiJvmSpec.scala b/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/direct/failover/DirectRoutingFailoverMultiJvmSpec.scala index 260a365019..6bc1653836 100644 --- a/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/direct/failover/DirectRoutingFailoverMultiJvmSpec.scala +++ b/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/direct/failover/DirectRoutingFailoverMultiJvmSpec.scala @@ -11,6 +11,7 @@ import akka.testkit.{ EventFilter, TestEvent } import java.net.ConnectException import java.nio.channels.NotYetConnectedException import akka.cluster.LocalCluster +import akka.dispatch.Await object DirectRoutingFailoverMultiJvmSpec { @@ -48,7 +49,7 @@ class DirectRoutingFailoverMultiJvmNode1 extends MasterClusterTestNode { } LocalCluster.barrier("verify-actor", NrOfNodes) { - (actor ? "identify").get must equal("node2") + Await.result(actor ? "identify", timeout.duration) must equal("node2") } val timer = Timer(30.seconds, true) diff --git a/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/random/failover/RandomFailoverMultiJvmSpec.scala b/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/random/failover/RandomFailoverMultiJvmSpec.scala index 2842c55a97..cbdc42dbe9 100644 --- a/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/random/failover/RandomFailoverMultiJvmSpec.scala +++ b/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/random/failover/RandomFailoverMultiJvmSpec.scala @@ -11,6 +11,7 @@ import java.util.{ Collections, Set ⇒ JSet } import java.net.ConnectException import java.nio.channels.NotYetConnectedException import akka.cluster.LocalCluster._ +import akka.dispatch.Await object RandomFailoverMultiJvmSpec { @@ -91,7 +92,7 @@ class RandomFailoverMultiJvmNode1 extends MasterClusterTestNode { def identifyConnections(actor: ActorRef): JSet[String] = { val set = new java.util.HashSet[String] for (i ← 0 until 100) { // we should get hits from both nodes in 100 attempts, if not then not very random - val value = (actor ? "identify").get.asInstanceOf[String] + val value = Await.result(actor ? "identify", timeout.duration).asInstanceOf[String] set.add(value) } set diff --git a/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/random/replicationfactor_3/Random3ReplicasMultiJvmSpec.scala b/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/random/replicationfactor_3/Random3ReplicasMultiJvmSpec.scala index edb000b566..cfe98d5680 100644 --- a/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/random/replicationfactor_3/Random3ReplicasMultiJvmSpec.scala +++ b/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/random/replicationfactor_3/Random3ReplicasMultiJvmSpec.scala @@ -9,6 +9,7 @@ import akka.actor._ import akka.config.Config import Cluster._ import akka.cluster.LocalCluster._ +import akka.dispatch.Await /** * When a MultiJvmNode is started, will it automatically be part of the cluster (so will it automatically be eligible @@ -78,7 +79,7 @@ class Random3ReplicasMultiJvmNode2 extends ClusterTestNode { } for (i ← 0 until 1000) { - count((hello ? "Hello").as[String].getOrElse(fail("Should have recieved reply from a node"))) + count(Await.result((hello ? "Hello").mapTo[String], 10 seconds)) } val repliesNode1 = replies("World from node [node1]") diff --git a/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/roundrobin/failover/RoundRobinFailoverMultiJvmSpec.scala b/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/roundrobin/failover/RoundRobinFailoverMultiJvmSpec.scala index 63cd6c6313..1b97ef1075 100644 --- a/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/roundrobin/failover/RoundRobinFailoverMultiJvmSpec.scala +++ b/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/roundrobin/failover/RoundRobinFailoverMultiJvmSpec.scala @@ -12,6 +12,7 @@ import java.net.ConnectException import java.nio.channels.NotYetConnectedException import java.lang.Thread import akka.cluster.LocalCluster._ +import akka.dispatch.Await object RoundRobinFailoverMultiJvmSpec { @@ -94,7 +95,7 @@ class RoundRobinFailoverMultiJvmNode1 extends MasterClusterTestNode { def identifyConnections(actor: ActorRef): JSet[String] = { val set = new java.util.HashSet[String] for (i ← 0 until 100) { - val value = (actor ? "identify").get.asInstanceOf[String] + val value = Await.result(actor ? "identify", timeout.duration).asInstanceOf[String] set.add(value) } set diff --git a/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/roundrobin/replicationfactor_2/RoundRobin2ReplicasMultiJvmSpec.scala b/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/roundrobin/replicationfactor_2/RoundRobin2ReplicasMultiJvmSpec.scala index a99dbbbae9..1803ec2c83 100644 --- a/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/roundrobin/replicationfactor_2/RoundRobin2ReplicasMultiJvmSpec.scala +++ b/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/roundrobin/replicationfactor_2/RoundRobin2ReplicasMultiJvmSpec.scala @@ -20,6 +20,7 @@ import akka.cluster.LocalCluster._ import java.util.concurrent.atomic.AtomicInteger import java.util.concurrent.ConcurrentHashMap +import akka.dispatch.Await /** * When a MultiJvmNode is started, will it automatically be part of the cluster (so will it automatically be eligible @@ -107,14 +108,8 @@ class RoundRobin2ReplicasMultiJvmNode2 extends ClusterTestNode { implicit val timeout = Timeout(Duration(20, "seconds")) - count((hello ? "Hello").as[String].getOrElse(fail("Should have recieved reply from node1"))) - count((hello ? "Hello").as[String].getOrElse(fail("Should have recieved reply from node2"))) - count((hello ? "Hello").as[String].getOrElse(fail("Should have recieved reply from node1"))) - count((hello ? "Hello").as[String].getOrElse(fail("Should have recieved reply from node2"))) - count((hello ? "Hello").as[String].getOrElse(fail("Should have recieved reply from node1"))) - count((hello ? "Hello").as[String].getOrElse(fail("Should have recieved reply from node2"))) - count((hello ? "Hello").as[String].getOrElse(fail("Should have recieved reply from node1"))) - count((hello ? "Hello").as[String].getOrElse(fail("Should have recieved reply from node2"))) + for(i <- 1 to 8) + count(Await.result((hello ? "Hello").mapTo[String], timeout.duration)) replies.get("World from node [node1]").get must equal(4) replies.get("World from node [node2]").get must equal(4) diff --git a/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/scattergather/failover/ScatterGatherFailoverMultiJvmSpec.scala b/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/scattergather/failover/ScatterGatherFailoverMultiJvmSpec.scala index 90f9e0aa56..e8cc4f7d68 100644 --- a/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/scattergather/failover/ScatterGatherFailoverMultiJvmSpec.scala +++ b/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/scattergather/failover/ScatterGatherFailoverMultiJvmSpec.scala @@ -11,6 +11,7 @@ import java.nio.channels.NotYetConnectedException import java.lang.Thread import akka.routing.Routing.Broadcast import akka.cluster.LocalCluster._ +import akka.dispatch.Await object ScatterGatherFailoverMultiJvmSpec { @@ -84,7 +85,7 @@ class ScatterGatherFailoverMultiJvmNode1 extends MasterClusterTestNode { def identifyConnections(actor: ActorRef): JSet[String] = { val set = new java.util.HashSet[String] for (i ← 0 until NrOfNodes * 2) { - val value = (actor ? "foo").get.asInstanceOf[String] + val value = Await.result(actor ? "foo", timeout.duration).asInstanceOf[String] set.add(value) } set diff --git a/akka-cluster/src/test/scala/akka/cluster/sample/ComputeGridSample.scala b/akka-cluster/src/test/scala/akka/cluster/sample/ComputeGridSample.scala index 7a3a9ca606..cb57df1718 100644 --- a/akka-cluster/src/test/scala/akka/cluster/sample/ComputeGridSample.scala +++ b/akka-cluster/src/test/scala/akka/cluster/sample/ComputeGridSample.scala @@ -46,7 +46,7 @@ object ComputeGridSample { val fun = () ⇒ "AKKA ROCKS" val futures = local send (fun, 2) // send and invoke function on to two cluster nodes and get result - val result = Futures.fold("")(futures)(_ + " - " + _).await.resultOrException + val result = Await.sync(Futures.fold("")(futures)(_ + " - " + _), timeout) println("===================>>> Cluster says [" + result + "]") local.stop @@ -80,8 +80,8 @@ object ComputeGridSample { val future2 = local send (fun, 2, 1) head // send and invoke function on one cluster node and get result // grab the result from the first one that returns - val result = Futures.firstCompletedOf(List(future1, future2)).await.resultOrException - println("===================>>> Cluster says [" + result.get + "]") + val result = Await.sync(Futures.firstCompletedOf(List(future1, future2)), timeout) + println("===================>>> Cluster says [" + result + "]") local.stop remote1.stop diff --git a/akka-docs/.history b/akka-docs/.history new file mode 100644 index 0000000000..a3abe50906 --- /dev/null +++ b/akka-docs/.history @@ -0,0 +1 @@ +exit diff --git a/akka-docs/common/index.rst b/akka-docs/common/index.rst index f3ed26aa73..4e19d1a1aa 100644 --- a/akka-docs/common/index.rst +++ b/akka-docs/common/index.rst @@ -4,5 +4,4 @@ Common utilities .. toctree:: :maxdepth: 2 - scheduler duration diff --git a/akka-docs/general/code/akka/docs/config/ConfigDocSpec.scala b/akka-docs/general/code/akka/docs/config/ConfigDocSpec.scala index 557c33ff53..b520a3b45d 100644 --- a/akka-docs/general/code/akka/docs/config/ConfigDocSpec.scala +++ b/akka-docs/general/code/akka/docs/config/ConfigDocSpec.scala @@ -26,7 +26,7 @@ class ConfigDocSpec extends WordSpec with MustMatchers { val system = ActorSystem("MySystem", ConfigFactory.load(customConf)) //#custom-config - system.stop() + system.shutdown() } diff --git a/akka-docs/general/configuration.rst b/akka-docs/general/configuration.rst index 6f00cae81f..5bbb012a1d 100644 --- a/akka-docs/general/configuration.rst +++ b/akka-docs/general/configuration.rst @@ -11,26 +11,26 @@ Configuration Specifying the configuration file --------------------------------- -If you don't specify a configuration file then Akka uses default values, corresponding to the reference -configuration files that you see below. You can specify your own configuration file to override any -property in the reference config. You only have to define the properties that differ from the default +If you don't specify a configuration file then Akka uses default values, corresponding to the reference +configuration files that you see below. You can specify your own configuration file to override any +property in the reference config. You only have to define the properties that differ from the default configuration. -By default the ``ConfigFactory.load`` method is used, which will load all ``application.conf`` (and +By default the ``ConfigFactory.load`` method is used, which will load all ``application.conf`` (and ``application.json`` and ``application.properties``) from the root of the classpath, if they exists. -It uses ``ConfigFactory.defaultOverrides``, i.e. system properties, before falling back to +It uses ``ConfigFactory.defaultOverrides``, i.e. system properties, before falling back to application and reference configuration. Note that *all* ``application.{conf,json,properties}`` classpath resources, from all directories and -jar files, are loaded and merged. Therefore it is a good practice to define separate sub-trees in the +jar files, are loaded and merged. Therefore it is a good practice to define separate sub-trees in the configuration for each actor system, and grab the specific configuration when instantiating the ActorSystem. :: - - myapp1 { + + myapp1 { akka.loglevel = WARNING } - myapp2 { + myapp2 { akka.loglevel = ERROR } @@ -44,7 +44,7 @@ classpath resource, file, or URL specified in those properties will be used rath ``application.{conf,json,properties}`` classpath resources. Note that classpath resource names start with ``/``. ``-Dconfig.resource=/dev.conf`` will load the ``dev.conf`` from the root of the classpath. -You may also specify and parse the configuration programmatically in other ways when instantiating +You may also specify and parse the configuration programmatically in other ways when instantiating the ``ActorSystem``. .. includecode:: code/akka/docs/config/ConfigDocSpec.scala @@ -66,7 +66,7 @@ Each Akka module has a reference configuration file with the default values. .. literalinclude:: ../../akka-remote/src/main/resources/reference.conf :language: none - + *akka-testkit:* .. literalinclude:: ../../akka-testkit/src/main/resources/reference.conf @@ -103,30 +103,30 @@ A custom ``application.conf`` might look like this:: # Copy in parts of the reference files and modify as you please. akka { + + # Event handlers to register at boot time (Logging$DefaultLogger logs to STDOUT) event-handlers = ["akka.event.slf4j.Slf4jEventHandler"] - loglevel = DEBUG # Options: ERROR, WARNING, INFO, DEBUG - # this level is used by the configured loggers (see "event-handlers") as soon - # as they have been started; before that, see "stdout-loglevel" - stdout-loglevel = DEBUG # Loglevel for the very basic logger activated during AkkaApplication startup - # Comma separated list of the enabled modules. - enabled-modules = ["camel", "remote"] + # Log level used by the configured loggers (see "event-handlers") as soon + # as they have been started; before that, see "stdout-loglevel" + # Options: ERROR, WARNING, INFO, DEBUG + loglevel = DEBUG - # These boot classes are loaded (and created) automatically when the Akka Microkernel boots up - # Can be used to bootstrap your application(s) - # Should be the FQN (Fully Qualified Name) of the boot class which needs to have a default constructor - boot = ["sample.camel.Boot", - "sample.myservice.Boot"] + # Log level for the very basic logger activated during AkkaApplication startup + # Options: ERROR, WARNING, INFO, DEBUG + stdout-loglevel = DEBUG actor { default-dispatcher { - throughput = 10 # Throughput for default Dispatcher, set to 1 for as fair as possible + # Throughput for default Dispatcher, set to 1 for as fair as possible + throughput = 10 } } remote { server { - port = 2562 # The port clients should connect to. Default is 2552 (AKKA) + # The port clients should connect to. Default is 2552 (AKKA) + port = 2562 } } } @@ -136,7 +136,7 @@ Config file format ------------------ The configuration file syntax is described in the `HOCON `_ -specification. Note that it supports three formats; conf, json, and properties. +specification. Note that it supports three formats; conf, json, and properties. Including files @@ -145,7 +145,7 @@ Including files Sometimes it can be useful to include another configuration file, for example if you have one ``application.conf`` with all environment independent settings and then override some settings for specific environments. -Specifying system property with ``-Dconfig.resource=/dev.conf`` will load the ``dev.conf`` file, which includes the ``application.conf`` +Specifying system property with ``-Dconfig.resource=/dev.conf`` will load the ``dev.conf`` file, which includes the ``application.conf`` dev.conf: @@ -166,6 +166,6 @@ specification. Logging of Configuration ------------------------ -If the system or config property ``akka.logConfigOnStart`` is set to ``on``, then the -complete configuration at INFO level when the actor system is started. This is useful +If the system or config property ``akka.logConfigOnStart`` is set to ``on``, then the +complete configuration at INFO level when the actor system is started. This is useful when you are uncertain of what configuration is used. diff --git a/akka-docs/general/jmm.rst b/akka-docs/general/jmm.rst index ecb6dad6f1..23871449ef 100644 --- a/akka-docs/general/jmm.rst +++ b/akka-docs/general/jmm.rst @@ -81,7 +81,7 @@ Since Akka runs on the JVM there are still some rules to be followed. // Very bad, shared mutable state, // will break your application in weird ways Future { state = NewState } - anotherActor ? message onResult { r => state = r } + anotherActor ? message onSuccess { r => state = r } // Very bad, "sender" changes for every message, // shared mutable state bug diff --git a/akka-docs/general/supervision.rst b/akka-docs/general/supervision.rst index 0867d931f8..74c95abfc5 100644 --- a/akka-docs/general/supervision.rst +++ b/akka-docs/general/supervision.rst @@ -25,7 +25,9 @@ which explains the existence of the fourth choice (as a supervisor also is subordinate to another supervisor higher up) and has implications on the first three: resuming an actor resumes all its subordinates, restarting an actor entails restarting all its subordinates, similarly stopping an actor will also -stop all its subordinates. +stop all its subordinates. It should be noted that the default behavior of an +actor is to stop all its children before restarting, but this can be overridden +using the :meth:`preRestart` hook. Each supervisor is configured with a function translating all possible failure causes (i.e. exceptions) into one of the four choices given above; notably, @@ -69,14 +71,12 @@ that the restart is not visible outside of the actor itself with the notable exception that the message during which the failure occurred is not re-processed. -Restarting an actor in this way recursively restarts all its children in the -same fashion, whereby all parent–child relationships are kept intact. If this -is not the right approach for certain sub-trees of the supervision hierarchy, -you should choose to stop the failed actor instead, which will terminate all -its children recursively, after which that part of the system may be recreated -from scratch. The second part of this action may be implemented using the -lifecycle monitoring described next or using lifecycle callbacks as described -in :class:`Actor`. +Restarting an actor in this way recursively terminates all its children. If +this is not the right approach for certain sub-trees of the supervision +hierarchy, you may choose to retain the children, in which case they will be +recursively restarted in the same fashion as the failed parent (with the same +default to terminate children, which must be overridden on a per-actor basis, +see :class:`Actor` for details). What Lifecycle Monitoring Means ------------------------------- diff --git a/akka-docs/java/code/akka/docs/actor/UntypedActorTest.scala b/akka-docs/java/code/akka/docs/actor/SchedulerDocTest.scala similarity index 51% rename from akka-docs/java/code/akka/docs/actor/UntypedActorTest.scala rename to akka-docs/java/code/akka/docs/actor/SchedulerDocTest.scala index 1747f30f92..b522a142d8 100644 --- a/akka-docs/java/code/akka/docs/actor/UntypedActorTest.scala +++ b/akka-docs/java/code/akka/docs/actor/SchedulerDocTest.scala @@ -2,4 +2,4 @@ package akka.docs.actor import org.scalatest.junit.JUnitSuite -class UntypedActorTest extends UntypedActorTestBase with JUnitSuite +class SchedulerDocTest extends SchedulerDocTestBase with JUnitSuite diff --git a/akka-docs/java/code/akka/docs/actor/SchedulerDocTestBase.java b/akka-docs/java/code/akka/docs/actor/SchedulerDocTestBase.java new file mode 100644 index 0000000000..bbcec2f4e5 --- /dev/null +++ b/akka-docs/java/code/akka/docs/actor/SchedulerDocTestBase.java @@ -0,0 +1,85 @@ +package akka.docs.actor; + +//#imports1 +import akka.actor.Props; +import akka.util.Duration; +import java.util.concurrent.TimeUnit; + +//#imports1 + +//#imports2 +import akka.actor.UntypedActor; +import akka.actor.UntypedActorFactory; +import akka.actor.Cancellable; + +//#imports2 + +import akka.actor.ActorRef; +import akka.actor.ActorSystem; +import akka.testkit.AkkaSpec; + +import org.junit.After; +import org.junit.Before; +import org.junit.Test; +import static org.junit.Assert.*; + +public class SchedulerDocTestBase { + + ActorSystem system; + ActorRef testActor; + + @Before + public void setUp() { + system = ActorSystem.create("MySystem", AkkaSpec.testConf()); + testActor = system.actorOf(new Props().withCreator(MyUntypedActor.class)); + } + + @After + public void tearDown() { + system.shutdown(); + } + + @Test + public void scheduleOneOffTask() { + //#schedule-one-off-message + //Schedules to send the "foo"-message to the testActor after 50ms + system.scheduler().scheduleOnce(Duration.create(50, TimeUnit.MILLISECONDS), testActor, "foo"); + //#schedule-one-off-message + + //#schedule-one-off-thunk + //Schedules a Runnable to be executed (send the current time) to the testActor after 50ms + system.scheduler().scheduleOnce(Duration.create(50, TimeUnit.MILLISECONDS), new Runnable() { + @Override + public void run() { + testActor.tell(System.currentTimeMillis()); + } + }); + //#schedule-one-off-thunk + } + + @Test + public void scheduleRecurringTask() { + //#schedule-recurring + ActorRef tickActor = system.actorOf(new Props().withCreator(new UntypedActorFactory() { + public UntypedActor create() { + return new UntypedActor() { + public void onReceive(Object message) { + if (message.equals("Tick")) { + // Do someting + } + } + }; + } + })); + + //This will schedule to send the Tick-message + //to the tickActor after 0ms repeating every 50ms + Cancellable cancellable = system.scheduler().schedule(Duration.Zero(), Duration.create(50, TimeUnit.MILLISECONDS), + tickActor, "Tick"); + + //This cancels further Ticks to be sent + cancellable.cancel(); + //#schedule-recurring + system.stop(tickActor); + } +} diff --git a/akka-docs/java/code/akka/docs/actor/UntypedActorDocTest.scala b/akka-docs/java/code/akka/docs/actor/UntypedActorDocTest.scala new file mode 100644 index 0000000000..76b3b990fa --- /dev/null +++ b/akka-docs/java/code/akka/docs/actor/UntypedActorDocTest.scala @@ -0,0 +1,5 @@ +package akka.docs.actor + +import org.scalatest.junit.JUnitSuite + +class UntypedActorDocTest extends UntypedActorDocTestBase with JUnitSuite diff --git a/akka-docs/java/code/akka/docs/actor/UntypedActorTestBase.java b/akka-docs/java/code/akka/docs/actor/UntypedActorDocTestBase.java similarity index 87% rename from akka-docs/java/code/akka/docs/actor/UntypedActorTestBase.java rename to akka-docs/java/code/akka/docs/actor/UntypedActorDocTestBase.java index c2a877d962..1093f58caf 100644 --- a/akka-docs/java/code/akka/docs/actor/UntypedActorTestBase.java +++ b/akka-docs/java/code/akka/docs/actor/UntypedActorDocTestBase.java @@ -9,6 +9,8 @@ import akka.actor.Props; //#import-future import akka.dispatch.Future; +import akka.dispatch.Await; +import akka.util.Duration; //#import-future @@ -28,12 +30,13 @@ import akka.actor.UntypedActorFactory; import akka.dispatch.MessageDispatcher; import org.junit.Test; - import scala.Option; +import java.lang.Object; +import java.util.concurrent.TimeUnit; import static org.junit.Assert.*; -public class UntypedActorTestBase { +public class UntypedActorDocTestBase { @Test public void systemActorOf() { @@ -42,7 +45,7 @@ public class UntypedActorTestBase { ActorRef myActor = system.actorOf(new Props(MyUntypedActor.class)); //#system-actorOf myActor.tell("test"); - system.stop(); + system.shutdown(); } @Test @@ -52,7 +55,7 @@ public class UntypedActorTestBase { ActorRef myActor = system.actorOf(new Props(MyUntypedActor.class)); //#context-actorOf myActor.tell("test"); - system.stop(); + system.shutdown(); } @Test @@ -67,7 +70,7 @@ public class UntypedActorTestBase { })); //#creating-constructor myActor.tell("test"); - system.stop(); + system.shutdown(); } @Test @@ -75,12 +78,11 @@ public class UntypedActorTestBase { ActorSystem system = ActorSystem.create("MySystem"); //#creating-props MessageDispatcher dispatcher = system.dispatcherFactory().lookup("my-dispatcher"); - ActorRef myActor = system.actorOf( - new Props().withCreator(MyUntypedActor.class).withDispatcher(dispatcher), + ActorRef myActor = system.actorOf(new Props().withCreator(MyUntypedActor.class).withDispatcher(dispatcher), "myactor"); //#creating-props myActor.tell("test"); - system.stop(); + system.shutdown(); } @Test @@ -93,19 +95,10 @@ public class UntypedActorTestBase { })); //#using-ask - Future future = myActor.ask("Hello", 1000); - future.await(); - if (future.isCompleted()) { - Option resultOption = future.result(); - if (resultOption.isDefined()) { - Object result = resultOption.get(); - // ... - } else { - //... whatever - } - } + Future future = myActor.ask("Hello", 1000); + Object result = Await.result(future, Duration.create(1, TimeUnit.SECONDS)); //#using-ask - system.stop(); + system.shutdown(); } @Test @@ -113,7 +106,7 @@ public class UntypedActorTestBase { ActorSystem system = ActorSystem.create("MySystem"); ActorRef myActor = system.actorOf(new Props(MyReceivedTimeoutUntypedActor.class)); myActor.tell("Hello"); - system.stop(); + system.shutdown(); } @Test @@ -123,7 +116,7 @@ public class UntypedActorTestBase { //#poison-pill myActor.tell(poisonPill()); //#poison-pill - system.stop(); + system.shutdown(); } @Test @@ -133,7 +126,7 @@ public class UntypedActorTestBase { //#kill victim.tell(kill()); //#kill - system.stop(); + system.shutdown(); } @Test @@ -147,7 +140,7 @@ public class UntypedActorTestBase { myActor.tell("foo"); myActor.tell("bar"); myActor.tell("bar"); - system.stop(); + system.shutdown(); } public static class MyActor extends UntypedActor { @@ -172,6 +165,8 @@ public class UntypedActorTestBase { } public void preRestart(Throwable reason, Option message) { + for (ActorRef each : getContext().getChildren()) + getContext().stop(each); postStop(); } diff --git a/akka-docs/java/code/akka/docs/dispatcher/DispatcherDocTest.scala b/akka-docs/java/code/akka/docs/dispatcher/DispatcherDocTest.scala new file mode 100644 index 0000000000..c83eed0df4 --- /dev/null +++ b/akka-docs/java/code/akka/docs/dispatcher/DispatcherDocTest.scala @@ -0,0 +1,5 @@ +package akka.docs.dispatcher + +import org.scalatest.junit.JUnitSuite + +class DispatcherDocTest extends DispatcherDocTestBase with JUnitSuite diff --git a/akka-docs/java/code/akka/docs/dispatcher/DispatcherDocTestBase.java b/akka-docs/java/code/akka/docs/dispatcher/DispatcherDocTestBase.java new file mode 100644 index 0000000000..28c0ad4477 --- /dev/null +++ b/akka-docs/java/code/akka/docs/dispatcher/DispatcherDocTestBase.java @@ -0,0 +1,131 @@ +package akka.docs.dispatcher; + +//#imports +import akka.actor.ActorRef; +import akka.actor.Props; +import akka.dispatch.MessageDispatcher; + +//#imports + +//#imports-prio +import akka.actor.UntypedActor; +import akka.actor.UntypedActorFactory; +import akka.actor.Actors; +import akka.dispatch.PriorityGenerator; +import akka.dispatch.UnboundedPriorityMailbox; +import akka.event.Logging; +import akka.event.LoggingAdapter; + +//#imports-prio + +import org.junit.After; +import org.junit.Before; +import org.junit.Test; +import scala.Option; +import static org.junit.Assert.*; + +import com.typesafe.config.ConfigFactory; + +import akka.actor.ActorSystem; +import akka.docs.actor.MyUntypedActor; +import akka.docs.actor.UntypedActorDocTestBase.MyActor; +import akka.testkit.AkkaSpec; + +public class DispatcherDocTestBase { + + ActorSystem system; + + @Before + public void setUp() { + system = ActorSystem.create("MySystem", + ConfigFactory.parseString(DispatcherDocSpec.config()).withFallback(AkkaSpec.testConf())); + } + + @After + public void tearDown() { + system.shutdown(); + } + + @Test + public void defineDispatcher() { + //#defining-dispatcher + MessageDispatcher dispatcher = system.dispatcherFactory().lookup("my-dispatcher"); + ActorRef myActor1 = system.actorOf(new Props().withCreator(MyUntypedActor.class).withDispatcher(dispatcher), + "myactor1"); + ActorRef myActor2 = system.actorOf(new Props().withCreator(MyUntypedActor.class).withDispatcher(dispatcher), + "myactor2"); + //#defining-dispatcher + } + + @Test + public void definePinnedDispatcher() { + //#defining-pinned-dispatcher + String name = "myactor"; + MessageDispatcher dispatcher = system.dispatcherFactory().newPinnedDispatcher(name); + ActorRef myActor = system.actorOf(new Props().withCreator(MyUntypedActor.class).withDispatcher(dispatcher), name); + //#defining-pinned-dispatcher + } + + @Test + public void priorityDispatcher() throws Exception { + //#prio-dispatcher + PriorityGenerator generator = new PriorityGenerator() { // Create a new PriorityGenerator, lower prio means more important + @Override + public int gen(Object message) { + if (message.equals("highpriority")) + return 0; // 'highpriority messages should be treated first if possible + else if (message.equals("lowpriority")) + return 100; // 'lowpriority messages should be treated last if possible + else if (message.equals(Actors.poisonPill())) + return 1000; // PoisonPill when no other left + else + return 50; // We default to 50 + } + }; + + // We create a new Priority dispatcher and seed it with the priority generator + MessageDispatcher dispatcher = system.dispatcherFactory() + .newDispatcher("foo", 5, new UnboundedPriorityMailbox(generator)).build(); + + ActorRef myActor = system.actorOf( // We create a new Actor that just prints out what it processes + new Props().withCreator(new UntypedActorFactory() { + public UntypedActor create() { + return new UntypedActor() { + LoggingAdapter log = Logging.getLogger(getContext().system(), this); + { + getSelf().tell("lowpriority"); + getSelf().tell("lowpriority"); + getSelf().tell("highpriority"); + getSelf().tell("pigdog"); + getSelf().tell("pigdog2"); + getSelf().tell("pigdog3"); + getSelf().tell("highpriority"); + getSelf().tell(Actors.poisonPill()); + } + + public void onReceive(Object message) { + log.info(message.toString()); + } + }; + } + }).withDispatcher(dispatcher)); + + /* + Logs: + 'highpriority + 'highpriority + 'pigdog + 'pigdog2 + 'pigdog3 + 'lowpriority + 'lowpriority + */ + //#prio-dispatcher + + for (int i = 0; i < 10; i++) { + if (myActor.isTerminated()) + break; + Thread.sleep(100); + } + } +} diff --git a/akka-docs/java/code/akka/docs/event/LoggingDocTestBase.java b/akka-docs/java/code/akka/docs/event/LoggingDocTestBase.java index 3241623e95..ba689e2fa1 100644 --- a/akka-docs/java/code/akka/docs/event/LoggingDocTestBase.java +++ b/akka-docs/java/code/akka/docs/event/LoggingDocTestBase.java @@ -37,7 +37,7 @@ public class LoggingDocTestBase { } })); myActor.tell("test"); - system.stop(); + system.shutdown(); } //#my-actor diff --git a/akka-docs/java/dispatchers.rst b/akka-docs/java/dispatchers.rst index 2b5b311fef..d053501d78 100644 --- a/akka-docs/java/dispatchers.rst +++ b/akka-docs/java/dispatchers.rst @@ -1,7 +1,7 @@ .. _dispatchers-java: Dispatchers (Java) -================== +=================== .. sidebar:: Contents @@ -9,204 +9,132 @@ Dispatchers (Java) The Dispatcher is an important piece that allows you to configure the right semantics and parameters for optimal performance, throughput and scalability. Different Actors have different needs. -Akka supports dispatchers for both event-driven lightweight threads, allowing creation of millions threads on a single workstation, and thread-based Actors, where each dispatcher is bound to a dedicated OS thread. +Akka supports dispatchers for both event-driven lightweight threads, allowing creation of millions of threads on a single workstation, and thread-based Actors, where each dispatcher is bound to a dedicated OS thread. The event-based Actors currently consume ~600 bytes per Actor which means that you can create more than 6.5 million Actors on 4 GB RAM. Default dispatcher ------------------ -For most scenarios the default settings are the best. Here we have one single event-based dispatcher for all Actors created. The default dispatcher used is "GlobalDispatcher" which also is retrievable in ``akka.dispatch.Dispatchers.globalDispatcher``. -The Dispatcher specified in the :ref:`configuration` as "default-dispatcher" is as ``Dispatchers.defaultGlobalDispatcher``. +For most scenarios the default settings are the best. Here we have one single event-based dispatcher for all Actors created. +The default dispatcher is available from the ``ActorSystem.dispatcher`` and can be configured in the ``akka.actor.default-dispatcher`` +section of the :ref:`configuration`. -The "GlobalDispatcher" is not configurable but will use default parameters given by Akka itself. - -But if you feel that you are starting to contend on the single dispatcher (the 'Executor' and its queue) or want to group a specific set of Actors for a dedicated dispatcher for better flexibility and configurability then you can override the defaults and define your own dispatcher. See below for details on which ones are available and how they can be configured. +If you are starting to get contention on the single dispatcher (the ``Executor`` and its queue) or want to group a specific set of Actors +for a dedicated dispatcher for better flexibility and configurability then you can override the defaults and define your own dispatcher. +See below for details on which ones are available and how they can be configured. Setting the dispatcher ---------------------- -Normally you set the dispatcher from within the Actor itself. The dispatcher is defined by the 'dispatcher: MessageDispatcher' member field in 'ActorRef'. +You specify the dispatcher to use when creating an actor. -.. code-block:: java - - class MyActor extends UntypedActor { - public MyActor() { - getContext().setDispatcher(..); // set the dispatcher - } - ... - } - -You can also set the dispatcher for an Actor **before** it has been started: - -.. code-block:: java - - actorRef.setDispatcher(dispatcher); +.. includecode:: code/akka/docs/dispatcher/DispatcherDocTestBase.java + :include: imports,defining-dispatcher Types of dispatchers -------------------- -There are six different types of message dispatchers: +There are 4 different types of message dispatchers: -* Thread-based +* Thread-based (Pinned) * Event-based * Priority event-based -* Work-stealing event-based +* Work-sharing (Balancing) -Factory methods for all of these, including global versions of some of them, are in the 'akka.dispatch.Dispatchers' object. +It is recommended to define the dispatcher in :ref:`configuration` to allow for tuning for different environments. + +Example of a custom event-based dispatcher, which can be fetched with ``system.dispatcherFactory().lookup("my-dispatcher")`` +as in the example above: + +.. includecode:: ../scala/code/akka/docs/dispatcher/DispatcherDocSpec.scala#my-dispatcher-config + +Default values are taken from ``default-dispatcher``, i.e. all options doesn't need to be defined. + +.. warning:: + + Factory methods for creating dispatchers programmatically are available in ``akka.dispatch.Dispatchers``, i.e. + ``dispatcherFactory`` of the ``ActorSystem``. These methods will probably be changed or removed before + 2.0 final release, because dispatchers need to be defined by configuration to work in a clustered setup. Let's now walk through the different dispatchers in more detail. Thread-based ^^^^^^^^^^^^ -The 'PinnedDispatcher' binds a dedicated OS thread to each specific Actor. The messages are posted to a 'LinkedBlockingQueue' which feeds the messages to the dispatcher one by one. A 'PinnedDispatcher' cannot be shared between actors. This dispatcher has worse performance and scalability than the event-based dispatcher but works great for creating "daemon" Actors that consumes a low frequency of messages and are allowed to go off and do their own thing for a longer period of time. Another advantage with this dispatcher is that Actors do not block threads for each other. +The ``PinnedDispatcher`` binds a dedicated OS thread to each specific Actor. The messages are posted to a +`LinkedBlockingQueue `_ +which feeds the messages to the dispatcher one by one. A ``PinnedDispatcher`` cannot be shared between actors. This dispatcher +has worse performance and scalability than the event-based dispatcher but works great for creating "daemon" Actors that consumes +a low frequency of messages and are allowed to go off and do their own thing for a longer period of time. Another advantage with +this dispatcher is that Actors do not block threads for each other. -.. code-block:: java +The ``PinnedDispatcher`` can't be configured, but is created and associated with an actor like this: - Dispatcher dispatcher = Dispatchers.newPinnedDispatcher(actorRef); - -It would normally by used from within the actor like this: - -.. code-block:: java - - class MyActor extends UntypedActor { - public MyActor() { - getContext().setDispatcher(Dispatchers.newPinnedDispatcher(getContext())); - } - ... - } +.. includecode:: code/akka/docs/dispatcher/DispatcherDocTestBase.java#defining-pinned-dispatcher Event-based ^^^^^^^^^^^ -The 'Dispatcher' binds a set of Actors to a thread pool backed up by a 'BlockingQueue'. This dispatcher is highly configurable and supports a fluent configuration API to configure the 'BlockingQueue' (type of queue, max items etc.) as well as the thread pool. +The event-based ``Dispatcher`` binds a set of Actors to a thread pool backed up by a +`BlockingQueue `_. This dispatcher is highly configurable +and supports a fluent configuration API to configure the ``BlockingQueue`` (type of queue, max items etc.) as well as the thread pool. -The event-driven dispatchers **must be shared** between multiple Typed Actors and/or Actors. One best practice is to let each top-level Actor, e.g. the Actors you define in the declarative supervisor config, to get their own dispatcher but reuse the dispatcher for each new Actor that the top-level Actor creates. But you can also share dispatcher between multiple top-level Actors. This is very use-case specific and needs to be tried out on a case by case basis. The important thing is that Akka tries to provide you with the freedom you need to design and implement your system in the most efficient way in regards to performance, throughput and latency. +The event-driven dispatchers **must be shared** between multiple Actors. One best practice is to let each top-level Actor, e.g. +the Actors you create from ``system.actorOf`` to get their own dispatcher but reuse the dispatcher for each new Actor +that the top-level Actor creates. But you can also share dispatcher between multiple top-level Actors. This is very use-case specific +and needs to be tried out on a case by case basis. The important thing is that Akka tries to provide you with the freedom you need to +design and implement your system in the most efficient way in regards to performance, throughput and latency. It comes with many different predefined BlockingQueue configurations: -* Bounded LinkedBlockingQueue -* Unbounded LinkedBlockingQueue -* Bounded ArrayBlockingQueue -* Unbounded ArrayBlockingQueue -* SynchronousQueue +* Bounded `LinkedBlockingQueue `_ +* Unbounded `LinkedBlockingQueue `_ +* Bounded `ArrayBlockingQueue `_ +* Unbounded `ArrayBlockingQueue `_ +* `SynchronousQueue `_ -You can also set the rejection policy that should be used, e.g. what should be done if the dispatcher (e.g. the Actor) can't keep up and the mailbox is growing up to the limit defined. You can choose between four different rejection policies: +When using a bounded queue and it has grown up to limit defined the message processing will run in the caller's +thread as a way to slow him down and balance producer/consumer. -* java.util.concurrent.ThreadPoolExecutor.CallerRuns - will run the message processing in the caller's thread as a way to slow him down and balance producer/consumer -* java.util.concurrent.ThreadPoolExecutor.AbortPolicy - rejected messages by throwing a 'RejectedExecutionException' -* java.util.concurrent.ThreadPoolExecutor.DiscardPolicy - discards the message (throws it away) -* java.util.concurrent.ThreadPoolExecutor.DiscardOldestPolicy - discards the oldest message in the mailbox (throws it away) +Here is an example of a bounded mailbox: -You cane read more about these policies `here `_. - -Here is an example: - -.. code-block:: java - - import akka.actor.Actor; - import akka.dispatch.Dispatchers; - import java.util.concurrent.ThreadPoolExecutor.CallerRunsPolicy; - - class MyActor extends UntypedActor { - public MyActor() { - getContext().setDispatcher(Dispatchers.newDispatcher(name) - .withNewThreadPoolWithLinkedBlockingQueueWithCapacity(100) - .setCorePoolSize(16) - .setMaxPoolSize(128) - .setKeepAliveTimeInMillis(60000) - .setRejectionPolicy(new CallerRunsPolicy()) - .build()); - } - ... - } +.. includecode:: ../scala/code/akka/docs/dispatcher/DispatcherDocSpec.scala#my-bounded-config The standard :class:`Dispatcher` allows you to define the ``throughput`` it should have, as shown above. This defines the number of messages for a specific Actor the dispatcher should process in one single sweep; in other words, the -dispatcher will bunch up to ``throughput`` message invocations together when +dispatcher will batch process up to ``throughput`` messages together when having elected an actor to run. Setting this to a higher number will increase throughput but lower fairness, and vice versa. If you don't specify it explicitly then it uses the value (5) defined for ``default-dispatcher`` in the :ref:`configuration`. -Browse the :ref:`scaladoc` or look at the code for all the options available. +Browse the `ScalaDoc `_ or look at the code for all the options available. Priority event-based ^^^^^^^^^^^^^^^^^^^^ -Sometimes it's useful to be able to specify priority order of messages, that is done by using Dispatcher and supply either -an UnboundedPriorityMailbox or BoundedPriorityMailbox with a java.util.Comparator[MessageInvocation] or use a akka.dispatch.PriorityGenerator (recommended): +Sometimes it's useful to be able to specify priority order of messages, that is done by using Dispatcher and supply +an UnboundedPriorityMailbox or BoundedPriorityMailbox with a ``java.util.Comparator[Envelope]`` or use a +``akka.dispatch.PriorityGenerator`` (recommended). -Creating a Dispatcher with a priority mailbox using PriorityGenerator: +Creating a Dispatcher using PriorityGenerator: -.. code-block:: java +.. includecode:: code/akka/docs/dispatcher/DispatcherDocTestBase.java + :include: imports-prio,prio-dispatcher - package some.pkg; - import akka.actor.*; - import akka.dispatch.*; - - public class Main { - // A simple Actor that just prints the messages it processes - public static class MyActor extends UntypedActor { - public MyActor() { - self.tell("lowpriority"); - getSelf().tell("lowpriority"); - getSelf().tell("highpriority"); - getSelf().tell("pigdog"); - getSelf().tell("pigdog2"); - getSelf().tell("pigdog3"); - getSelf().tell("highpriority"); - } - public void onReceive(Object message) throws Exception { - System.out.println(message); - } - } - - public static void main(String[] args) { - // Create a new PriorityGenerator, lower prio means more important - PriorityGenerator gen = new PriorityGenerator() { - public int gen(Object message) { - if (message.equals("highpriority")) return 0; // "highpriority" messages should be treated first if possible - else if (message.equals("lowpriority")) return 100; // "lowpriority" messages should be treated last if possible - else return 50; // We default to 50 - } - }; - // We create an instance of the actor that will print out the messages it processes - // We create a new Priority dispatcher and seed it with the priority generator - ActorRef ref = Actors.actorOf(new Props(MyActor.class).withDispatcher(new Dispatcher("foo", 5, new UnboundedPriorityMailbox(gen)))); - - } - } - -Prints: - -highpriority -highpriority -pigdog -pigdog2 -pigdog3 -lowpriority -lowpriority - -Work-stealing event-based +Work-sharing event-based ^^^^^^^^^^^^^^^^^^^^^^^^^ -The 'BalancingDispatcher' is a variation of the 'Dispatcher' in which Actors of the same type can be set up to share this dispatcher and during execution time the different actors will steal messages from other actors if they have less messages to process. This can be a great way to improve throughput at the cost of a little higher latency. +The ``BalancingDispatcher`` is a variation of the ``Dispatcher`` in which Actors of the same type can be set up to +share this dispatcher and during execution time the different actors will steal messages from other actors if they +have less messages to process. +Although the technique used in this implementation is commonly known as "work stealing", the actual implementation is probably +best described as "work donating" because the actor of which work is being stolen takes the initiative. +This can be a great way to improve throughput at the cost of a little higher latency. -Normally the way you use it is to define a static field to hold the dispatcher and then set in in the Actor explicitly. - -.. code-block:: java - - class MyActor extends UntypedActor { - public static MessageDispatcher dispatcher = Dispatchers.newBalancingDispatcher(name).build(); - - public MyActor() { - getContext().setDispatcher(dispatcher); - } - ... - } +.. includecode:: ../scala/code/akka/docs/dispatcher/DispatcherDocSpec.scala#my-balancing-config Here is an article with some more information: `Load Balancing Actors with Work Stealing Techniques `_ Here is another article discussing this particular dispatcher: `Flexible load balancing with Akka in Scala `_ @@ -217,14 +145,19 @@ Making the Actor mailbox bounded Global configuration ^^^^^^^^^^^^^^^^^^^^ -You can make the Actor mailbox bounded by a capacity in two ways. Either you define it in the configuration file under 'default-dispatcher'. This will set it globally. +You can make the Actor mailbox bounded by a capacity in two ways. Either you define it in the :ref:`configuration` file under +``default-dispatcher``. This will set it globally as default for the DefaultDispatcher and for other configured dispatchers, +if not specified otherwise. .. code-block:: ruby - actor { - default-dispatcher { - mailbox-capacity = -1 # If negative (or zero) then an unbounded mailbox is used (default) - # If positive then a bounded mailbox is used and the capacity is set to the number specified + akka { + actor { + default-dispatcher { + # If negative (or zero) then an unbounded mailbox is used (default) + # If positive then a bounded mailbox is used and the capacity is set to the number specified + task-queue-size = 1000 + } } } @@ -233,33 +166,11 @@ Per-instance based configuration You can also do it on a specific dispatcher instance. -For the 'Dispatcher' and the 'ExecutorBasedWorkStealingDispatcher' you can do it through their constructor +.. includecode:: ../scala/code/akka/docs/dispatcher/DispatcherDocSpec.scala#my-bounded-config -.. code-block:: java - class MyActor extends UntypedActor { - public MyActor() { - int capacity = 100; - Duration pushTimeout = new FiniteDuration(10, TimeUnit.SECONDS); - MailboxType mailboxCapacity = new BoundedMailbox(false, capacity, pushTimeout); - MessageDispatcher dispatcher = - Dispatchers.newDispatcher(name, throughput, mailboxCapacity).build(); - getContext().setDispatcher(dispatcher); - } - ... - } - -For the 'PinnedDispatcher', it is non-shareable between actors, and associates a dedicated Thread with the actor. -Making it bounded (by specifying a capacity) is optional, but if you do, you need to provide a pushTimeout (default is 10 seconds). When trying to send a message to the Actor it will throw a MessageQueueAppendFailedException("BlockingMessageTransferQueue transfer timed out") if the message cannot be added to the mailbox within the time specified by the pushTimeout. - -.. code-block:: java - - class MyActor extends UntypedActor { - public MyActor() { - int mailboxCapacity = 100; - Duration pushTimeout = new FiniteDuration(10, TimeUnit.SECONDS); - getContext().setDispatcher(Dispatchers.newPinnedDispatcher(getContext(), mailboxCapacity, pushTimeout)); - } - ... - } +For the ``PinnedDispatcher``, it is non-shareable between actors, and associates a dedicated Thread with the actor. +Making it bounded (by specifying a capacity) is optional, but if you do, you need to provide a pushTimeout (default is 10 seconds). +When trying to send a message to the Actor it will throw a MessageQueueAppendFailedException("BlockingMessageTransferQueue transfer timed out") +if the message cannot be added to the mailbox within the time specified by the pushTimeout. diff --git a/akka-docs/java/index.rst b/akka-docs/java/index.rst index b7db493c09..c04e5bc259 100644 --- a/akka-docs/java/index.rst +++ b/akka-docs/java/index.rst @@ -9,9 +9,9 @@ Java API untyped-actors typed-actors logging + scheduler futures dataflow - stm transactors fault-tolerance dispatchers diff --git a/akka-docs/java/logging.rst b/akka-docs/java/logging.rst index c9ad9256fc..20920d940b 100644 --- a/akka-docs/java/logging.rst +++ b/akka-docs/java/logging.rst @@ -25,14 +25,14 @@ The source object is translated to a String according to the following rules: * in case of a class an approximation of its simpleName * and in all other cases the simpleName of its class -The log message may contain argument placeholders ``{}``, which will be substituted if the log level +The log message may contain argument placeholders ``{}``, which will be substituted if the log level is enabled. Event Handler ============= -Logging is performed asynchronously through an event bus. You can configure which event handlers that should -subscribe to the logging events. That is done using the 'event-handlers' element in the :ref:`configuration`. +Logging is performed asynchronously through an event bus. You can configure which event handlers that should +subscribe to the logging events. That is done using the 'event-handlers' element in the :ref:`configuration`. Here you can also define the log level. .. code-block:: ruby @@ -40,16 +40,17 @@ Here you can also define the log level. akka { # Event handlers to register at boot time (Logging$DefaultLogger logs to STDOUT) event-handlers = ["akka.event.Logging$DefaultLogger"] - loglevel = "DEBUG" # Options: ERROR, WARNING, INFO, DEBUG + # Options: ERROR, WARNING, INFO, DEBUG + loglevel = "DEBUG" } -The default one logs to STDOUT and is registered by default. It is not intended to be used for production. There is also an :ref:`slf4j-java` +The default one logs to STDOUT and is registered by default. It is not intended to be used for production. There is also an :ref:`slf4j-java` event handler available in the 'akka-slf4j' module. Example of creating a listener: .. includecode:: code/akka/docs/event/LoggingDocTestBase.java - :include: imports,imports-listener,my-event-listener + :include: imports,imports-listener,my-event-listener .. _slf4j-java: @@ -57,7 +58,7 @@ Example of creating a listener: SLF4J ===== -Akka provides an event handler for `SL4FJ `_. This module is available in the 'akka-slf4j.jar'. +Akka provides an event handler for `SL4FJ `_. This module is available in the 'akka-slf4j.jar'. It has one single dependency; the slf4j-api jar. In runtime you also need a SLF4J backend, we recommend `Logback `_: .. code-block:: xml @@ -69,10 +70,10 @@ It has one single dependency; the slf4j-api jar. In runtime you also need a SLF4 runtime -You need to enable the Slf4jEventHandler in the 'event-handlers' element in -the :ref:`configuration`. Here you can also define the log level of the event bus. +You need to enable the Slf4jEventHandler in the 'event-handlers' element in +the :ref:`configuration`. Here you can also define the log level of the event bus. More fine grained log levels can be defined in the configuration of the SLF4J backend -(e.g. logback.xml). The String representation of the source object that is used when +(e.g. logback.xml). The String representation of the source object that is used when creating the ``LoggingAdapter`` correspond to the name of the SL4FJ logger. .. code-block:: ruby @@ -89,9 +90,9 @@ Since the logging is done asynchronously the thread in which the logging was per Mapped Diagnostic Context (MDC) with attribute name ``sourceThread``. With Logback the thread name is available with ``%X{sourceThread}`` specifier within the pattern layout configuration:: - - - %date{ISO8601} %-5level %logger{36} %X{sourceThread} - %msg%n - - + + + %date{ISO8601} %-5level %logger{36} %X{sourceThread} - %msg%n + + diff --git a/akka-docs/java/scheduler.rst b/akka-docs/java/scheduler.rst new file mode 100644 index 0000000000..3dde1345a6 --- /dev/null +++ b/akka-docs/java/scheduler.rst @@ -0,0 +1,53 @@ + +.. _scheduler-java: + +################## + Scheduler (Java) +################## + +Sometimes the need for making things happen in the future arises, and where do you go look then? +Look no further than ``ActorSystem``! There you find the :meth:`scheduler` method that returns an instance +of akka.actor.Scheduler, this instance is unique per ActorSystem and is used internally for scheduling things +to happen at specific points in time. Please note that the scheduled tasks are executed by the default +``MessageDispatcher`` of the ``ActorSystem``. + +You can schedule sending of messages to actors and execution of tasks (functions or Runnable). +You will get a ``Cancellable`` back that you can call :meth:`cancel` on to cancel the execution of the +scheduled operation. + +Some examples +------------- + +.. includecode:: code/akka/docs/actor/SchedulerDocTestBase.java + :include: imports1,schedule-one-off-message + +.. includecode:: code/akka/docs/actor/SchedulerDocTestBase.java + :include: schedule-one-off-thunk + +.. includecode:: code/akka/docs/actor/SchedulerDocTestBase.java + :include: imports1,imports2,schedule-recurring + +From ``akka.actor.ActorSystem`` +------------------------------- + +.. includecode:: ../../akka-actor/src/main/scala/akka/actor/ActorSystem.scala + :include: scheduler + + +The Scheduler interface +----------------------- + +.. includecode:: ../../akka-actor/src/main/scala/akka/actor/Scheduler.scala + :include: scheduler + +The Cancellable interface +------------------------- + +This allows you to ``cancel`` something that has been scheduled for execution. + +.. warning:: + This does not abort the execution of the task, if it had already been started. + +.. includecode:: ../../akka-actor/src/main/scala/akka/actor/Scheduler.scala + :include: cancellable + diff --git a/akka-docs/java/transactors.rst b/akka-docs/java/transactors.rst new file mode 100644 index 0000000000..994ad00cb5 --- /dev/null +++ b/akka-docs/java/transactors.rst @@ -0,0 +1,6 @@ +.. _transactors-java: + +Transactors (Java) +================== + +The Akka Transactors module has not been migrated to Akka 2.0-SNAPSHOT yet. \ No newline at end of file diff --git a/akka-docs/java/untyped-actors.rst b/akka-docs/java/untyped-actors.rst index 4324aadf19..8ad7a7f7b2 100644 --- a/akka-docs/java/untyped-actors.rst +++ b/akka-docs/java/untyped-actors.rst @@ -42,7 +42,7 @@ Here is an example: Creating Actors with default constructor ---------------------------------------- -.. includecode:: code/akka/docs/actor/UntypedActorTestBase.java +.. includecode:: code/akka/docs/actor/UntypedActorDocTestBase.java :include: imports,system-actorOf The call to :meth:`actorOf` returns an instance of ``ActorRef``. This is a handle to @@ -62,7 +62,7 @@ a top level actor, that is supervised by the system (internal guardian actor). .. includecode:: code/akka/docs/actor/FirstUntypedActor.java#context-actorOf Actors are automatically started asynchronously when created. -When you create the ``UntypedActor`` then it will automatically call the ``preStart`` +When you create the ``UntypedActor`` then it will automatically call the ``preStart`` callback method on the ``UntypedActor`` class. This is an excellent place to add initialization code for the actor. @@ -76,26 +76,26 @@ add initialization code for the actor. Creating Actors with non-default constructor -------------------------------------------- -If your UntypedActor has a constructor that takes parameters then you can't create it using 'actorOf(clazz)'. -Instead you can use a variant of ``actorOf`` that takes an instance of an 'UntypedActorFactory' -in which you can create the Actor in any way you like. If you use this method then you to make sure that -no one can get a reference to the actor instance. If they can get a reference it then they can -touch state directly in bypass the whole actor dispatching mechanism and create race conditions +If your UntypedActor has a constructor that takes parameters then you can't create it using 'actorOf(clazz)'. +Instead you can use a variant of ``actorOf`` that takes an instance of an 'UntypedActorFactory' +in which you can create the Actor in any way you like. If you use this method then you to make sure that +no one can get a reference to the actor instance. If they can get a reference it then they can +touch state directly in bypass the whole actor dispatching mechanism and create race conditions which can lead to corrupt data. Here is an example: -.. includecode:: code/akka/docs/actor/UntypedActorTestBase.java#creating-constructor +.. includecode:: code/akka/docs/actor/UntypedActorDocTestBase.java#creating-constructor This way of creating the Actor is also great for integrating with Dependency Injection (DI) frameworks like Guice or Spring. Creating Actors with Props -------------------------- -``Props`` is a configuration object to specify additional things for the actor to +``Props`` is a configuration object to specify additional things for the actor to be created, such as the ``MessageDispatcher``. -.. includecode:: code/akka/docs/actor/UntypedActorTestBase.java#creating-props +.. includecode:: code/akka/docs/actor/UntypedActorDocTestBase.java#creating-props UntypedActor API @@ -119,7 +119,7 @@ In addition, it offers: The remaining visible methods are user-overridable life-cycle hooks which are described in the following: -.. includecode:: code/akka/docs/actor/UntypedActorTestBase.java#lifecycle-callbacks +.. includecode:: code/akka/docs/actor/UntypedActorDocTestBase.java#lifecycle-callbacks The implementations shown above are the defaults provided by the :class:`UntypedActor` class. @@ -152,7 +152,7 @@ processing a message. This restart involves the hooks mentioned above: message, e.g. when a supervisor does not trap the exception and is restarted in turn by its supervisor. This method is the best place for cleaning up, preparing hand-over to the fresh actor instance, etc. - By default it calls :meth:`postStop`. + By default it stops all children and calls :meth:`postStop`. 2. The initial factory from the ``actorOf`` call is used to produce the fresh instance. 3. The new actor’s :meth:`postRestart` method is invoked with the exception @@ -162,7 +162,7 @@ processing a message. This restart involves the hooks mentioned above: An actor restart replaces only the actual actor object; the contents of the mailbox and the hotswap stack are unaffected by the restart, so processing of -messages will resume after the :meth:`postRestart` hook returns. The message +messages will resume after the :meth:`postRestart` hook returns. The message that triggered the exception will not be received again. Any message sent to an actor while it is being restarted will be queued to its mailbox as usual. @@ -172,9 +172,9 @@ Stop Hook After stopping an actor, its :meth:`postStop` hook is called, which may be used e.g. for deregistering this actor from other services. This hook is guaranteed -to run after message queuing has been disabled for this actor, i.e. messages -sent to a stopped actor will be redirected to the :obj:`deadLetters` of the -:obj:`ActorSystem`. +to run after message queuing has been disabled for this actor, i.e. messages +sent to a stopped actor will be redirected to the :obj:`deadLetters` of the +:obj:`ActorSystem`. Identifying Actors @@ -188,7 +188,7 @@ Messages and immutability **IMPORTANT**: Messages can be any kind of object but have to be immutable. Akka can’t enforce immutability (yet) so this has to be by -convention. +convention. Here is an example of an immutable message: @@ -207,8 +207,8 @@ Messages are sent to an Actor through one of the following methods. Message ordering is guaranteed on a per-sender basis. -In all these methods you have the option of passing along your own ``ActorRef``. -Make it a practice of doing so because it will allow the receiver actors to be able to respond +In all these methods you have the option of passing along your own ``ActorRef``. +Make it a practice of doing so because it will allow the receiver actors to be able to respond to your message, since the sender reference is sent along with the message. Tell: Fire-forget @@ -229,7 +229,7 @@ to reply to the original sender, by using ``getSender().tell(replyMsg)``. actor.tell("Hello", getSelf()); -If invoked without the sender parameter the sender will be +If invoked without the sender parameter the sender will be :obj:`deadLetters` actor reference in the target actor. Ask: Send-And-Receive-Future @@ -244,13 +244,13 @@ will immediately return a :class:`Future`: Future future = actorRef.ask("Hello", timeoutMillis); The receiving actor should reply to this message, which will complete the -future with the reply message as value; ``getSender.tell(result)``. +future with the reply message as value; ``getSender.tell(result)``. -To complete the future with an exception you need send a Failure message to the sender. -This is not done automatically when an actor throws an exception while processing a -message. +To complete the future with an exception you need send a Failure message to the sender. +This is not done automatically when an actor throws an exception while processing a +message. -.. includecode:: code/akka/docs/actor/UntypedActorTestBase.java#reply-exception +.. includecode:: code/akka/docs/actor/UntypedActorDocTestBase.java#reply-exception If the actor does not complete the future, it will expire after the timeout period, specified as parameter to the ``ask`` method. @@ -258,16 +258,16 @@ specified as parameter to the ``ask`` method. See :ref:`futures-java` for more information on how to await or query a future. -The ``onComplete``, ``onResult``, or ``onTimeout`` methods of the ``Future`` can be -used to register a callback to get a notification when the Future completes. +The ``onComplete``, ``onResult``, or ``onTimeout`` methods of the ``Future`` can be +used to register a callback to get a notification when the Future completes. Gives you a way to avoid blocking. .. warning:: When using future callbacks, inside actors you need to carefully avoid closing over - the containing actor’s reference, i.e. do not call methods or access mutable state - on the enclosing actor from within the callback. This would break the actor - encapsulation and may introduce synchronization bugs and race conditions because + the containing actor’s reference, i.e. do not call methods or access mutable state + on the enclosing actor from within the callback. This would break the actor + encapsulation and may introduce synchronization bugs and race conditions because the callback will be scheduled concurrently to the enclosing actor. Unfortunately there is not yet a way to detect these illegal accesses at compile time. See also: :ref:`jmm-shared-state` @@ -278,7 +278,7 @@ even if that entails waiting for it (but keep in mind that waiting inside an actor is prone to dead-locks, e.g. if obtaining the result depends on processing another message on this actor). -.. includecode:: code/akka/docs/actor/UntypedActorTestBase.java +.. includecode:: code/akka/docs/actor/UntypedActorDocTestBase.java :include: import-future,using-ask Forward message @@ -297,7 +297,7 @@ You need to pass along your context variable as well. Receive messages ================ -When an actor receives a message it is passed into the ``onReceive`` method, this is +When an actor receives a message it is passed into the ``onReceive`` method, this is an abstract method on the ``UntypedActor`` base class that needs to be defined. Here is an example: @@ -340,17 +340,15 @@ message. Stopping actors =============== -Actors are stopped by invoking the ``stop`` method of the ``ActorRef``. -The actual termination of the actor is performed asynchronously, i.e. -``stop`` may return before the actor is stopped. +Actors are stopped by invoking the :meth:`stop` method of a ``ActorRefFactory``, +i.e. ``ActorContext`` or ``ActorSystem``. Typically the context is used for stopping +child actors and the system for stopping top level actors. The actual termination of +the actor is performed asynchronously, i.e. :meth:`stop` may return before the actor is +stopped. -.. code-block:: java - - actor.stop(); - -Processing of the current message, if any, will continue before the actor is stopped, +Processing of the current message, if any, will continue before the actor is stopped, but additional messages in the mailbox will not be processed. By default these -messages are sent to the :obj:`deadLetters` of the :obj:`ActorSystem`, but that +messages are sent to the :obj:`deadLetters` of the :obj:`ActorSystem`, but that depends on the mailbox implementation. When stop is called then a call to the ``def postStop`` callback method will @@ -365,7 +363,7 @@ take place. The ``Actor`` can use this callback to implement shutdown behavior. All Actors are stopped when the ``ActorSystem`` is stopped. Supervised actors are stopped when the supervisor is stopped, i.e. children are stopped -when parent is stopped. +when parent is stopped. PoisonPill @@ -381,7 +379,7 @@ If the ``PoisonPill`` was sent with ``ask``, the ``Future`` will be completed wi Use it like this: -.. includecode:: code/akka/docs/actor/UntypedActorTestBase.java +.. includecode:: code/akka/docs/actor/UntypedActorDocTestBase.java :include: import-actors,poison-pill .. _UntypedActor.HotSwap: @@ -402,10 +400,10 @@ The hotswapped code is kept in a Stack which can be pushed and popped. To hotswap the Actor using ``getContext().become``: -.. includecode:: code/akka/docs/actor/UntypedActorTestBase.java +.. includecode:: code/akka/docs/actor/UntypedActorDocTestBase.java :include: import-procedure,hot-swap-actor -The ``become`` method is useful for many different things, such as to implement +The ``become`` method is useful for many different things, such as to implement a Finite State Machine (FSM). Here is another little cute example of ``become`` and ``unbecome`` in action: @@ -432,7 +430,7 @@ through regular supervisor semantics. Use it like this: -.. includecode:: code/akka/docs/actor/UntypedActorTestBase.java +.. includecode:: code/akka/docs/actor/UntypedActorDocTestBase.java :include: import-actors,kill Actors and exceptions @@ -462,9 +460,9 @@ messages on that mailbox, will be there as well. What happens to the actor ------------------------- -If an exception is thrown, the actor instance is discarded and a new instance is +If an exception is thrown, the actor instance is discarded and a new instance is created. This new instance will now be used in the actor references to this actor -(so this is done invisible to the developer). Note that this means that current -state of the failing actor instance is lost if you don't store and restore it in -``preRestart`` and ``postRestart`` callbacks. +(so this is done invisible to the developer). Note that this means that current +state of the failing actor instance is lost if you don't store and restore it in +``preRestart`` and ``postRestart`` callbacks. diff --git a/akka-docs/modules/camel.rst b/akka-docs/modules/camel.rst index 4aa988d609..96d1ffdae7 100644 --- a/akka-docs/modules/camel.rst +++ b/akka-docs/modules/camel.rst @@ -5,2899 +5,4 @@ Camel ####### -For an introduction to akka-camel, see also the `Appendix E - Akka and Camel`_ -(pdf) of the book `Camel in Action`_. - -.. _Appendix E - Akka and Camel: http://www.manning.com/ibsen/appEsample.pdf -.. _Camel in Action: http://www.manning.com/ibsen/ - -Contents: - -.. contents:: :local: - -Other, more advanced external articles are: - -* `Akka Consumer Actors: New Features and Best Practices `_ -* `Akka Producer Actors: New Features and Best Practices `_ - - -Introduction -============ - -The akka-camel module allows actors, untyped actors, and typed actors to receive -and send messages over a great variety of protocols and APIs. This section gives -a brief overview of the general ideas behind the akka-camel module, the -remaining sections go into the details. In addition to the native Scala and Java -actor API, actors can now exchange messages with other systems over large number -of protocols and APIs such as HTTP, SOAP, TCP, FTP, SMTP or JMS, to mention a -few. At the moment, approximately 80 protocols and APIs are supported. - -The akka-camel module is based on `Apache Camel`_, a powerful and leight-weight -integration framework for the JVM. For an introduction to Apache Camel you may -want to read this `Apache Camel article`_. Camel comes with a -large number of `components`_ that provide bindings to different protocols and -APIs. The `camel-extra`_ project provides further components. - -.. _Apache Camel: http://camel.apache.org/ -.. _Apache Camel article: http://architects.dzone.com/articles/apache-camel-integration -.. _components: http://camel.apache.org/components.html -.. _camel-extra: http://code.google.com/p/camel-extra/ - -Usage of Camel's integration components in Akka is essentially a -one-liner. Here's an example. - -.. code-block:: scala - - import akka.actor.Actor - import akka.actor.Actor._ - import akka.camel.{Message, Consumer} - - class MyActor extends Actor with Consumer { - def endpointUri = "mina:tcp://localhost:6200?textline=true" - - def receive = { - case msg: Message => { /* ... */} - case _ => { /* ... */} - } - } - - // start and expose actor via tcp - val myActor = actorOf(Props[MyActor]) - -The above example exposes an actor over a tcp endpoint on port 6200 via Apache -Camel's `Mina component`_. The actor implements the endpointUri method to define -an endpoint from which it can receive messages. After starting the actor, tcp -clients can immediately send messages to and receive responses from that -actor. If the message exchange should go over HTTP (via Camel's `Jetty -component`_), only the actor's endpointUri method must be changed. - -.. _Mina component: http://camel.apache.org/mina.html -.. _Jetty component: http://camel.apache.org/jetty.html - -.. code-block:: scala - - class MyActor extends Actor with Consumer { - def endpointUri = "jetty:http://localhost:8877/example" - - def receive = { - case msg: Message => { /* ... */} - case _ => { /* ... */} - } - } - -Actors can also trigger message exchanges with external systems i.e. produce to -Camel endpoints. - -.. code-block:: scala - - import akka.actor.Actor - import akka.camel.{Producer, Oneway} - - class MyActor extends Actor with Producer with Oneway { - def endpointUri = "jms:queue:example" - } - -In the above example, any message sent to this actor will be added (produced) to -the example JMS queue. Producer actors may choose from the same set of Camel -components as Consumer actors do. - -The number of Camel components is constantly increasing. The akka-camel module -can support these in a plug-and-play manner. Just add them to your application's -classpath, define a component-specific endpoint URI and use it to exchange -messages over the component-specific protocols or APIs. This is possible because -Camel components bind protocol-specific message formats to a Camel-specific -`normalized message format`__. The normalized message format hides -protocol-specific details from Akka and makes it therefore very easy to support -a large number of protocols through a uniform Camel component interface. The -akka-camel module further converts mutable Camel messages into `immutable -representations`__ which are used by Consumer and Producer actors for pattern -matching, transformation, serialization or storage, for example. - -__ https://svn.apache.org/repos/asf/camel/trunk/camel-core/src/main/java/org/apache/camel/Message.java -__ http://github.com/jboner/akka/blob/v0.8/akka-camel/src/main/scala/akka/Message.scala#L17 - - -Dependencies -============ - -Akka's Camel Integration consists of two modules - -* akka-camel - this module depends on akka-actor and camel-core (+ transitive - dependencies) and implements the Camel integration for (untyped) actors - -* akka-camel-typed - this module depends on akka-typed-actor and akka-camel (+ - transitive dependencies) and implements the Camel integration for typed actors - -The akka-camel-typed module is optional. To have both untyped and typed actors -working with Camel, add the following dependencies to your SBT project -definition. - -.. code-block:: scala - - import sbt._ - - class Project(info: ProjectInfo) extends DefaultProject(info) with AkkaProject { - // ... - val akkaCamel = akkaModule("camel") - val akkaCamelTyped = akkaModule("camel-typed") // optional typed actor support - // ... - } - - -.. _camel-consume-messages: - -Consume messages -================ - -Actors (untyped) ----------------- - -For actors (Scala) to receive messages, they must mixin the `Consumer`_ -trait. For example, the following actor class (Consumer1) implements the -endpointUri method, which is declared in the Consumer trait, in order to receive -messages from the ``file:data/input/actor`` Camel endpoint. Untyped actors -(Java) need to extend the abstract UntypedConsumerActor class and implement the -getEndpointUri() and onReceive(Object) methods. - -.. _Consumer: http://github.com/jboner/akka/blob/master/akka-camel/src/main/scala/akka/camel/Consumer.scala - -**Scala** - -.. code-block:: scala - - import akka.actor.Actor - import akka.camel.{Message, Consumer} - - class Consumer1 extends Actor with Consumer { - def endpointUri = "file:data/input/actor" - - def receive = { - case msg: Message => println("received %s" format msg.bodyAs[String]) - } - } - -**Java** - -.. code-block:: java - - import akka.camel.Message; - import akka.camel.UntypedConsumerActor; - - public class Consumer1 extends UntypedConsumerActor { - public String getEndpointUri() { - return "file:data/input/actor"; - } - - public void onReceive(Object message) { - Message msg = (Message)message; - String body = msg.getBodyAs(String.class); - System.out.println(String.format("received %s", body)) - } - } - -Whenever a file is put into the data/input/actor directory, its content is -picked up by the Camel `file component`_ and sent as message to the -actor. Messages consumed by actors from Camel endpoints are of type -`Message`_. These are immutable representations of Camel messages. - -.. _file component: http://camel.apache.org/file2.html -.. _Message: http://github.com/jboner/akka/blob/master/akka-camel/src/main/scala/akka/camel/Message.scala - -For Message usage examples refer to the unit tests: - -* Message unit tests - `Scala API `_ -* Message unit tests - `Java API `_ - -Here's another example that sets the endpointUri to -``jetty:http://localhost:8877/camel/default``. It causes Camel's `Jetty -component`_ to start an embedded `Jetty`_ server, accepting HTTP connections -from localhost on port 8877. - -.. _Jetty component: http://camel.apache.org/jetty.html -.. _Jetty: http://www.eclipse.org/jetty/ - -**Scala** - -.. code-block:: scala - - import akka.actor.Actor - import akka.camel.{Message, Consumer} - - class Consumer2 extends Actor with Consumer { - def endpointUri = "jetty:http://localhost:8877/camel/default" - - def receive = { - case msg: Message => self.reply("Hello %s" format msg.bodyAs[String]) - } - } - -**Java** - -.. code-block:: java - - import akka.camel.Message; - import akka.camel.UntypedConsumerActor; - - public class Consumer2 extends UntypedConsumerActor { - public String getEndpointUri() { - return "jetty:http://localhost:8877/camel/default"; - } - - public void onReceive(Object message) { - Message msg = (Message)message; - String body = msg.getBodyAs(String.class); - getContext().tryReply(String.format("Hello %s", body)); - } - } - -After starting the actor, clients can send messages to that actor by POSTing to -``http://localhost:8877/camel/default``. The actor sends a response by using the -self.reply method (Scala). For returning a message body and headers to the HTTP -client the response type should be `Message`_. For any other response type, a -new Message object is created by akka-camel with the actor response as message -body. - -.. _Message: http://github.com/jboner/akka/blob/master/akka-camel/src/main/scala/akka/camel/Message.scala - - -Typed actors ------------- - -Typed actors can also receive messages from Camel endpoints. In contrast to -(untyped) actors, which only implement a single receive or onReceive method, a -typed actor may define several (message processing) methods, each of which can -receive messages from a different Camel endpoint. For a typed actor method to be -exposed as Camel endpoint it must be annotated with the `@consume -annotation`_. For example, the following typed consumer actor defines two -methods, foo and bar. - -.. _@consume annotation: http://github.com/jboner/akka/blob/master/akka-camel/src/main/java/akka/camel/consume.java - -**Scala** - -.. code-block:: scala - - import org.apache.camel.{Body, Header} - import akka.actor.TypedActor - import akka.camel.consume - - trait TypedConsumer1 { - @consume("file:data/input/foo") - def foo(body: String): Unit - - @consume("jetty:http://localhost:8877/camel/bar") - def bar(@Body body: String, @Header("X-Whatever") header: String): String - } - - class TypedConsumer1Impl extends TypedActor with TypedConsumer1 { - def foo(body: String) = println("Received message: %s" format body) - def bar(body: String, header: String) = "body=%s header=%s" format (body, header) - } - -**Java** - -.. code-block:: java - - import org.apache.camel.Body; - import org.apache.camel.Header; - import akka.actor.TypedActor; - import akka.camel.consume; - - public interface TypedConsumer1 { - @consume("file:data/input/foo") - public void foo(String body); - - @consume("jetty:http://localhost:8877/camel/bar") - public String bar(@Body String body, @Header("X-Whatever") String header); - } - - public class TypedConsumer1Impl extends TypedActor implements TypedConsumer1 { - public void foo(String body) { - System.out.println(String.format("Received message: ", body)); - } - - public String bar(String body, String header) { - return String.format("body=%s header=%s", body, header); - } - } - -The foo method can be invoked by placing a file in the data/input/foo -directory. Camel picks up the file from this directory and akka-camel invokes -foo with the file content as argument (converted to a String). Camel -automatically tries to convert messages to appropriate types as defined by the -method parameter(s). The conversion rules are described in detail on the -following pages: - -* `Bean integration `_ -* `Bean binding `_ -* `Parameter binding `_ - -The bar method can be invoked by POSTing a message to -http://localhost:8877/camel/bar. Here, parameter binding annotations are used to -tell Camel how to extract data from the HTTP message. The @Body annotation binds -the HTTP request body to the first parameter, the @Header annotation binds the -X-Whatever header to the second parameter. The return value is sent as HTTP -response message body to the client. - -Parameter binding annotations must be placed on the interface, the @consume -annotation can also be placed on the methods in the implementation class. - - -.. _camel-publishing: - -Consumer publishing -------------------- - -Actors (untyped) -^^^^^^^^^^^^^^^^ - -Publishing a consumer actor at its Camel endpoint occurs when the actor is -started. Publication is done asynchronously; setting up an endpoint (more -precisely, the route from that endpoint to the actor) may still be in progress -after the ActorRef method returned. - -**Scala** - -.. code-block:: scala - - import akka.actor.Actor._ - - val actor = actorOf(Props[Consumer1]) // create Consumer actor and activate endpoint in background - -**Java** - -.. code-block:: java - - import static akka.actor.Actors.*; - import akka.actor.ActorRef; - - ActorRef actor = actorOf(new Props(Consumer1.class)); // create Consumer actor and activate endpoint in background - - -Typed actors -^^^^^^^^^^^^ - -Publishing of typed actor methods is done when the typed actor is created with -one of the TypedActor.newInstance(..) methods. Publication is done in the -background here as well i.e. it may still be in progress when -TypedActor.newInstance(..) returns. - -**Scala** - -.. code-block:: scala - - import akka.actor.TypedActor - - // create TypedConsumer1 object and activate endpoint(s) in background - val consumer = TypedActor.newInstance(classOf[TypedConsumer1], classOf[TypedConumer1Impl]) - -**Java** - -.. code-block:: java - - import akka.actor.TypedActor; - - // create TypedConsumer1 object and activate endpoint(s) in background - TypedConsumer1 consumer = TypedActor.newInstance(TypedConsumer1.class, TypedConumer1Impl.class); - - -.. _camel-consumers-and-camel-service: - -Consumers and the CamelService ------------------------------- - -Publishing of consumer actors or typed actor methods requires a running -CamelService. The Akka :ref:`microkernel` can start a CamelService automatically -(see :ref:`camel-configuration`). When using Akka in other environments, a -CamelService must be started manually. Applications can do that by calling the -CamelServiceManager.startCamelService method. - -**Scala** - -.. code-block:: scala - - import akka.camel.CamelServiceManager._ - - startCamelService - -**Java** - -.. code-block:: java - - import static akka.camel.CamelServiceManager.*; - - startCamelService(); - -If applications need to wait for a certain number of consumer actors or typed -actor methods to be published they can do so with the -``CamelServiceManager.mandatoryService.awaitEndpointActivation`` method, where -``CamelServiceManager.mandatoryService`` is the current CamelService instance -(or throws an IllegalStateException there's no current CamelService). - -**Scala** - -.. code-block:: scala - - import akka.camel.CamelServiceManager._ - - startCamelService - - // Wait for three conumer endpoints to be activated - mandatoryService.awaitEndpointActivation(3) { - // Start three consumer actors (for example) - // ... - } - - // Communicate with consumer actors via their activated endpoints - // ... - -**Java** - -.. code-block:: java - - import akka.japi.SideEffect; - import static akka.camel.CamelServiceManager.*; - - startCamelService(); - - // Wait for three conumer endpoints to be activated - getMandatoryService().awaitEndpointActivation(3, new SideEffect() { - public void apply() { - // Start three consumer actors (for example) - // ... - } - }); - - // Communicate with consumer actors via their activated endpoints - // ... - -Alternatively, one can also use ``Option[CamelService]`` returned by -``CamelServiceManager.service``. - -**Scala** - -.. code-block:: scala - - import akka.camel.CamelServiceManager._ - - startCamelService - - for(s <- service) s.awaitEndpointActivation(3) { - // ... - } - -**Java** - -.. code-block:: java - - import java.util.concurrent.CountDownLatch; - - import akka.camel.CamelService; - import static akka.camel.CamelServiceManager.*; - - startCamelService(); - - for (CamelService s : getService()) s.awaitEndpointActivation(3, new SideEffect() { - public void apply() { - // ... - } - }); - -:ref:`camel-configuration` additionally describes how a CamelContext, that is -managed by a CamelService, can be cutomized before starting the service. When -the CamelService is no longer needed, it should be stopped. - -**Scala** - -.. code-block:: scala - - import akka.camel.CamelServiceManager._ - - stopCamelService - -**Java** - -.. code-block:: java - - import static akka.camel.CamelServiceManager.*; - - stopCamelService(); - - -.. _camel-unpublishing: - -Consumer un-publishing ----------------------- - -Actors (untyped) -^^^^^^^^^^^^^^^^ - -When an actor is stopped, the route from the endpoint to that actor is stopped -as well. For example, stopping an actor that has been previously published at -``http://localhost:8877/camel/test`` will cause a connection failure when trying -to access that endpoint. Stopping the route is done asynchronously; it may be -still in progress after the ``ActorRef.stop`` method returned. - -**Scala** - -.. code-block:: scala - - import akka.actor.Actor._ - - val actor = actorOf(Props[Consumer1]) // create Consumer actor - actor // activate endpoint in background - // ... - actor.stop // deactivate endpoint in background - -**Java** - -.. code-block:: java - - import static akka.actor.Actors.*; - import akka.actor.ActorRef; - - ActorRef actor = actorOf(new Props(Consumer1.class)); // create Consumer actor and activate endpoint in background - // ... - actor.stop(); // deactivate endpoint in background - - -Typed actors -^^^^^^^^^^^^ - -When a typed actor is stopped, routes to @consume annotated methods of this -typed actors are stopped as well. Stopping the routes is done asynchronously; it -may be still in progress after the TypedActor.stop method returned. - -**Scala** - -.. code-block:: scala - - import akka.actor.TypedActor - - // create TypedConsumer1 object and activate endpoint(s) in background - val consumer = TypedActor.newInstance(classOf[TypedConsumer1], classOf[TypedConumer1Impl]) - - // deactivate endpoints in background - TypedActor.stop(consumer) - -**Java** - -.. code-block:: java - - import akka.actor.TypedActor; - - // Create typed consumer actor and activate endpoints in background - TypedConsumer1 consumer = TypedActor.newInstance(TypedConsumer1.class, TypedConumer1Impl.class); - - // Deactivate endpoints in background - TypedActor.stop(consumer); - - -.. _camel-acknowledgements: - -Acknowledgements ----------------- - -Actors (untyped) -^^^^^^^^^^^^^^^^ - -With in-out message exchanges, clients usually know that a message exchange is -done when they receive a reply from a consumer actor. The reply message can be a -Message (or any object which is then internally converted to a Message) on -success, and a Failure message on failure. - -With in-only message exchanges, by default, an exchange is done when a message -is added to the consumer actor's mailbox. Any failure or exception that occurs -during processing of that message by the consumer actor cannot be reported back -to the endpoint in this case. To allow consumer actors to positively or -negatively acknowledge the receipt of a message from an in-only message -exchange, they need to override the ``autoack`` (Scala) or ``isAutoack`` (Java) -method to return false. In this case, consumer actors must reply either with a -special Ack message (positive acknowledgement) or a Failure (negative -acknowledgement). - -**Scala** - -.. code-block:: scala - - import akka.camel.{Ack, Failure} - // ... other imports omitted - - class Consumer3 extends Actor with Consumer { - override def autoack = false - - def endpointUri = "jms:queue:test" - - def receive = { - // ... - self.reply(Ack) // on success - // ... - self.reply(Failure(...)) // on failure - } - } - -**Java** - -.. code-block:: java - - import akka.camel.Failure - import static akka.camel.Ack.ack; - // ... other imports omitted - - public class Consumer3 extends UntypedConsumerActor { - - public String getEndpointUri() { - return "jms:queue:test"; - } - - public boolean isAutoack() { - return false; - } - - public void onReceive(Object message) { - // ... - getContext().reply(ack()) // on success - // ... - val e: Exception = ... - getContext().reply(new Failure(e)) // on failure - } - } - - -.. _camel-blocking-exchanges: - -Blocking exchanges ------------------- - -By default, message exchanges between a Camel endpoint and a consumer actor are -non-blocking because, internally, the ! (bang) operator is used to commicate -with the actor. The route to the actor does not block waiting for a reply. The -reply is sent asynchronously (see also :ref:`camel-asynchronous-routing`). -Consumer actors however can be configured to make this interaction blocking. - -**Scala** - -.. code-block:: scala - - class ExampleConsumer extends Actor with Consumer { - override def blocking = true - - def endpointUri = ... - def receive = { - // ... - } - } - -**Java** - -.. code-block:: java - - public class ExampleConsumer extends UntypedConsumerActor { - - public boolean isBlocking() { - return true; - } - - public String getEndpointUri() { - // ... - } - - public void onReceive(Object message) { - // ... - } - } - -In this case, the ``!!`` (bangbang) operator is used internally to communicate -with the actor which blocks a thread until the consumer sends a response or -throws an exception within receive. Although it may decrease scalability, this -setting can simplify error handling (see `this article`_) or allows timeout -configurations on actor-level (see :ref:`camel-timeout`). - -.. _this article: http://krasserm.blogspot.com/2011/02/akka-consumer-actors-new-features-and.html - - -.. _camel-timeout: - -Consumer timeout ----------------- - -Endpoints that support two-way communications need to wait for a response from -an (untyped) actor or typed actor before returning it to the initiating client. -For some endpoint types, timeout values can be defined in an endpoint-specific -way which is described in the documentation of the individual `Camel -components`_. Another option is to configure timeouts on the level of consumer -actors and typed consumer actors. - -.. _Camel components: http://camel.apache.org/components.html - - -Typed actors -^^^^^^^^^^^^ - -For typed actors, timeout values for method calls that return a result can be -set when the typed actor is created. In the following example, the timeout is -set to 20 seconds (default is 5 seconds). - -**Scala** - -.. code-block:: scala - - import akka.actor.TypedActor - - val consumer = TypedActor.newInstance(classOf[TypedConsumer1], classOf[TypedConumer1Impl], 20000 /* 20 seconds */) - -**Java** - -.. code-block:: java - - import akka.actor.TypedActor; - - TypedConsumer1 consumer = TypedActor.newInstance(TypedConsumer1.class, TypedConumer1Impl.class, 20000 /* 20 seconds */); - - -Actors (untyped) -^^^^^^^^^^^^^^^^ - -Two-way communications between a Camel endpoint and an (untyped) actor are -initiated by sending the request message to the actor with the ``!`` (bang) -operator and the actor replies to the endpoint when the response is ready. In -order to support timeouts on actor-level, endpoints need to send the request -message with the ``!!`` (bangbang) operator for which a timeout value is -applicable. This can be achieved by overriding the Consumer.blocking method to -return true. - -**Scala** - -.. code-block:: scala - - class Consumer2 extends Actor with Consumer { - self.timeout = 20000 // timeout set to 20 seconds - - override def blocking = true - - def endpointUri = "direct:example" - - def receive = { - // ... - } - } - -**Java** - -.. code-block:: java - - public class Consumer2 extends UntypedConsumerActor { - - public Consumer2() { - getContext().setTimeout(20000); // timeout set to 20 seconds - } - - public String getEndpointUri() { - return "direct:example"; - } - - public boolean isBlocking() { - return true; - } - - public void onReceive(Object message) { - // ... - } - } - -This is a valid approach for all endpoint types that do not "natively" support -asynchronous two-way message exchanges. For all other endpoint types (like -`Jetty`_ endpoints) is it not recommended to switch to blocking mode but rather -to configure timeouts in an endpoint-specific way (see -also :ref:`camel-asynchronous-routing`). - - -Remote consumers ----------------- - -Actors (untyped) -^^^^^^^^^^^^^^^^ - -Publishing of remote consumer actors is always done on the server side, local -proxies are never published. Hence the CamelService must be started on the -remote node. For example, to publish an (untyped) actor on a remote node at -endpoint URI ``jetty:http://localhost:6644/remote-actor-1``, define the -following consumer actor class. - -**Scala** - -.. code-block:: scala - - import akka.actor.Actor - import akka.annotation.consume - import akka.camel.Consumer - - class RemoteActor1 extends Actor with Consumer { - def endpointUri = "jetty:http://localhost:6644/remote-actor-1" - - protected def receive = { - case msg => self.reply("response from remote actor 1") - } - } - -**Java** - -.. code-block:: java - - import akka.camel.UntypedConsumerActor; - - public class RemoteActor1 extends UntypedConsumerActor { - public String getEndpointUri() { - return "jetty:http://localhost:6644/remote-actor-1"; - } - - public void onReceive(Object message) { - getContext().tryReply("response from remote actor 1"); - } - } - -On the remote node, start a `CamelService`_, start a remote server, create the -actor and register it at the remote server. - -.. _CamelService: http://github.com/jboner/akka/blob/master/akka-camel/src/main/scala/akka/camel/CamelService.scala - -**Scala** - -.. code-block:: scala - - import akka.camel.CamelServiceManager._ - import akka.actor.Actor._ - import akka.actor.ActorRef - - // ... - startCamelService - - val consumer = val consumer = actorOf(Props[RemoteActor1]) - - remote.start("localhost", 7777) - remote.register(consumer) // register and start remote consumer - // ... - -**Java** - -.. code-block:: java - - import akka.camel.CamelServiceManager; - import static akka.actor.Actors.*; - - // ... - CamelServiceManager.startCamelService(); - - ActorRef actor = actorOf(new Props(RemoteActor1.class)); - - remote().start("localhost", 7777); - remote().register(actor); // register and start remote consumer - // ... - -Explicitly starting a CamelService can be omitted when Akka is running in Kernel -mode, for example (see also :ref:`camel-configuration`). - - -Typed actors -^^^^^^^^^^^^ - -Remote typed consumer actors can be registered with one of the -``registerTyped*`` methods on the remote server. The following example registers -the actor with the custom id "123". - -**Scala** - -.. code-block:: scala - - import akka.actor.TypedActor - - // ... - val obj = TypedActor.newRemoteInstance( - classOf[SampleRemoteTypedConsumer], - classOf[SampleRemoteTypedConsumerImpl]) - - remote.registerTypedActor("123", obj) - // ... - -**Java** - -.. code-block:: java - - import akka.actor.TypedActor; - - SampleRemoteTypedConsumer obj = (SampleRemoteTypedConsumer)TypedActor.newInstance( - SampleRemoteTypedConsumer.class, - SampleRemoteTypedConsumerImpl.class); - - remote.registerTypedActor("123", obj) - // ... - - -Produce messages -================ - -A minimum pre-requisite for producing messages to Camel endpoints with producer -actors (see below) is an initialized and started CamelContextManager. - -**Scala** - -.. code-block:: scala - - import akka.camel.CamelContextManager - - CamelContextManager.init // optionally takes a CamelContext as argument - CamelContextManager.start // starts the managed CamelContext - -**Java** - -.. code-block:: java - - import akka.camel.CamelContextManager; - - CamelContextManager.init(); // optionally takes a CamelContext as argument - CamelContextManager; // starts the managed CamelContext - -For using producer actors, application may also start a CamelService. This will -not only setup a CamelContextManager behind the scenes but also register -listeners at the actor registry (needed to publish consumer actors). If your -application uses producer actors only and you don't want to have the (very -small) overhead generated by the registry listeners then setting up a -CamelContextManager without starting CamelService is recommended. Otherwise, -just start a CamelService as described for consumer -actors: :ref:`camel-consumers-and-camel-service`. - - -Producer trait --------------- - -Actors (untyped) -^^^^^^^^^^^^^^^^ - -For sending messages to Camel endpoints, actors - -* written in Scala need to mixin the `Producer`_ trait and implement the - endpointUri method. - -* written in Java need to extend the abstract UntypedProducerActor class and - implement the getEndpointUri() method. By extending the UntypedProducerActor - class, untyped actors (Java) inherit the behaviour of the Producer trait. - -.. _Producer: http://github.com/jboner/akka/blob/master/akka-camel/src/main/scala/akka/camel/Producer.scala - -**Scala** - -.. code-block:: scala - - import akka.actor.Actor - import akka.camel.Producer - - class Producer1 extends Actor with Producer { - def endpointUri = "http://localhost:8080/news" - } - -**Java** - -.. code-block:: java - - import akka.camel.UntypedProducerActor; - - public class Producer1 extends UntypedProducerActor { - public String getEndpointUri() { - return "http://localhost:8080/news"; - } - } - -Producer1 inherits a default implementation of the receive method from the -Producer trait. To customize a producer actor's default behavior it is -recommended to override the Producer.receiveBeforeProduce and -Producer.receiveAfterProduce methods. This is explained later in more detail. -Actors should not override the default Producer.receive method. - -Any message sent to a Producer actor (or UntypedProducerActor) will be sent to -the associated Camel endpoint, in the above example to -``http://localhost:8080/news``. Response messages (if supported by the -configured endpoint) will, by default, be returned to the original sender. The -following example uses the ``?`` operator (Scala) to send a message to a -Producer actor and waits for a response. In Java, the sendRequestReply method is -used. - -**Scala** - -.. code-block:: scala - - import akka.actor.Actor._ - import akka.actor.ActorRef - - val producer = actorOf(Props[Producer1]) - val response = (producer ? "akka rocks").get - val body = response.bodyAs[String] - -**Java** - -.. code-block:: java - - import akka.actor.ActorRef; - import static akka.actor.Actors.*; - import akka.camel.Message; - - ActorRef producer = actorOf(new Props(Producer1.class)); - Message response = (Message)producer.sendRequestReply("akka rocks"); - String body = response.getBodyAs(String.class) - -If the message is sent using the ! operator (or the tell method in Java) -then the response message is sent back asynchronously to the original sender. In -the following example, a Sender actor sends a message (a String) to a producer -actor using the ! operator and asynchronously receives a response (of type -Message). - -**Scala** - -.. code-block:: scala - - import akka.actor.{Actor, ActorRef} - import akka.camel.Message - - class Sender(producer: ActorRef) extends Actor { - def receive = { - case request: String => producer ! request - case response: Message => { - /* process response ... */ - } - // ... - } - } - -**Java** - -.. code-block:: java - - // TODO - - -.. _camel-custom-processing: - -Custom Processing -^^^^^^^^^^^^^^^^^ - -Instead of replying to the initial sender, producer actors can implement custom -reponse processing by overriding the receiveAfterProduce method (Scala) or -onReceiveAfterProduce method (Java). In the following example, the reponse -message is forwarded to a target actor instead of being replied to the original -sender. - -**Scala** - -.. code-block:: scala - - import akka.actor.{Actor, ActorRef} - import akka.camel.Producer - - class Producer1(target: ActorRef) extends Actor with Producer { - def endpointUri = "http://localhost:8080/news" - - override protected def receiveAfterProduce = { - // do not reply but forward result to target - case msg => target forward msg - } - } - -**Java** - -.. code-block:: java - - import akka.actor.ActorRef; - import akka.camel.UntypedProducerActor; - - public class Producer1 extends UntypedProducerActor { - private ActorRef target; - - public Producer1(ActorRef target) { - this.target = target; - } - - public String getEndpointUri() { - return "http://localhost:8080/news"; - } - - @Override - public void onReceiveAfterProduce(Object message) { - target.forward((Message)message, getContext()); - } - } - -To create an untyped actor instance with a constructor argument, a factory is -needed (this should be doable without a factory in upcoming Akka versions). - -.. code-block:: java - - import akka.actor.ActorRef; - import akka.actor.UntypedActorFactory; - import akka.actor.UntypedActor; - - public class Producer1Factory implements UntypedActorFactory { - - private ActorRef target; - - public Producer1Factory(ActorRef target) { - this.target = target; - } - - public UntypedActor create() { - return new Producer1(target); - } - } - -The instanitation is done with the Actors.actorOf method and the factory as -argument. - -.. code-block:: java - - import static akka.actor.Actors.*; - import akka.actor.ActorRef; - - ActorRef target = ... - ActorRef producer = actorOf(Props(new Producer1Factory(target))); - producer; - -Before producing messages to endpoints, producer actors can pre-process them by -overriding the receiveBeforeProduce method (Scala) or onReceiveBeforeProduce -method (Java). - -**Scala** - -.. code-block:: scala - - import akka.actor.{Actor, ActorRef} - import akka.camel.{Message, Producer} - - class Producer1(target: ActorRef) extends Actor with Producer { - def endpointUri = "http://localhost:8080/news" - - override protected def receiveBeforeProduce = { - case msg: Message => { - // do some pre-processing (e.g. add endpoint-specific message headers) - // ... - - // and return the modified message - msg - } - } - } - -**Java** - -.. code-block:: java - - import akka.actor.ActorRef; - import akka.camel.Message - import akka.camel.UntypedProducerActor; - - public class Producer1 extends UntypedProducerActor { - private ActorRef target; - - public Producer1(ActorRef target) { - this.target = target; - } - - public String getEndpointUri() { - return "http://localhost:8080/news"; - } - - @Override - public Object onReceiveBeforeProduce(Object message) { - Message msg = (Message)message; - // do some pre-processing (e.g. add endpoint-specific message headers) - // ... - - // and return the modified message - return msg - } - } - - -Producer configuration options -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -The interaction of producer actors with Camel endpoints can be configured to be -one-way or two-way (by initiating in-only or in-out message exchanges, -respectively). By default, the producer initiates an in-out message exchange -with the endpoint. For initiating an in-only exchange, producer actors - -* written in Scala either have to override the oneway method to return true -* written in Java have to override the isOneway method to return true. - -**Scala** - -.. code-block:: scala - - import akka.camel.Producer - - class Producer2 extends Actor with Producer { - def endpointUri = "jms:queue:test" - override def oneway = true - } - -**Java** - -.. code-block:: java - - import akka.camel.UntypedProducerActor; - - public class SampleUntypedReplyingProducer extends UntypedProducerActor { - public String getEndpointUri() { - return "jms:queue:test"; - } - - @Override - public boolean isOneway() { - return true; - } - } - -Message correlation -^^^^^^^^^^^^^^^^^^^ - -To correlate request with response messages, applications can set the -Message.MessageExchangeId message header. - -**Scala** - -.. code-block:: scala - - import akka.camel.Message - - producer ! Message("bar", Map(Message.MessageExchangeId -> "123")) - -**Java** - -.. code-block:: java - - // TODO - -Responses of type Message or Failure will contain that header as well. When -receiving messages from Camel endpoints this message header is already set (see -:ref:`camel-consume-messages`). - - -Matching responses -^^^^^^^^^^^^^^^^^^ - -The following code snippet shows how to best match responses when sending -messages with the ``?`` operator (Scala) or with the ``ask`` method -(Java). - -**Scala** - -.. code-block:: scala - - val response = (producer ? message).get - - response match { - case Some(Message(body, headers)) => ... - case Some(Failure(exception, headers)) => ... - case _ => ... - } - -**Java** - -.. code-block:: java - - // TODO - - -ProducerTemplate ----------------- - -The `Producer`_ trait (and the abstract UntypedProducerActor class) is a very -convenient way for actors to produce messages to Camel endpoints. (Untyped) -actors and typed actors may also use a Camel `ProducerTemplate`_ for producing -messages to endpoints. For typed actors it's the only way to produce messages to -Camel endpoints. - -At the moment, only the Producer trait fully supports asynchronous in-out -message exchanges with Camel endpoints without allocating a thread for the full -duration of the exchange. For example, when using endpoints that support -asynchronous message exchanges (such as Jetty endpoints that internally use -`Jetty's asynchronous HTTP client`_) then usage of the Producer trait is highly -recommended (see also :ref:`camel-asynchronous-routing`). - -.. _Producer: http://github.com/jboner/akka/blob/master/akka-camel/src/main/scala/akka/camel/Producer.scala -.. _ProducerTemplate: http://camel.apache.org/maven/camel-2.2.0/camel-core/apidocs/index.html -.. _Jetty's asynchronous HTTP client: http://wiki.eclipse.org/Jetty/Tutorial/HttpClient - - -Actors (untyped) -^^^^^^^^^^^^^^^^ - -A managed ProducerTemplate instance can be obtained via -CamelContextManager.mandatoryTemplate. In the following example, an actor uses a -ProducerTemplate to send a one-way message to a ``direct:news`` endpoint. - -**Scala** - -.. code-block:: scala - - import akka.actor.Actor - import akka.camel.CamelContextManager - - class ProducerActor extends Actor { - protected def receive = { - // one-way message exchange with direct:news endpoint - case msg => CamelContextManager.mandatoryTemplate.sendBody("direct:news", msg) - } - } - -**Java** - -.. code-block:: java - - import akka.actor.UntypedActor; - import akka.camel.CamelContextManager; - - public class SampleUntypedActor extends UntypedActor { - public void onReceive(Object msg) { - CamelContextManager.getMandatoryTemplate().sendBody("direct:news", msg); - } - } - -Alternatively, one can also use ``Option[ProducerTemplate]`` returned by -``CamelContextManager.template``. - -**Scala** - -.. code-block:: scala - - import akka.actor.Actor - import akka.camel.CamelContextManager - - class ProducerActor extends Actor { - protected def receive = { - // one-way message exchange with direct:news endpoint - case msg => for(t <- CamelContextManager.template) t.sendBody("direct:news", msg) - } - } - -**Java** - -.. code-block:: java - - import org.apache.camel.ProducerTemplate - - import akka.actor.UntypedActor; - import akka.camel.CamelContextManager; - - public class SampleUntypedActor extends UntypedActor { - public void onReceive(Object msg) { - for (ProducerTemplate t : CamelContextManager.getTemplate()) { - t.sendBody("direct:news", msg); - } - } - } - -For initiating a a two-way message exchange, one of the -``ProducerTemplate.request*`` methods must be used. - -**Scala** - -.. code-block:: scala - - import akka.actor.Actor - import akka.camel.CamelContextManager - - class ProducerActor extends Actor { - protected def receive = { - // two-way message exchange with direct:news endpoint - case msg => self.reply(CamelContextManager.mandatoryTemplate.requestBody("direct:news", msg)) - } - } - -**Java** - -.. code-block:: java - - import akka.actor.UntypedActor; - import akka.camel.CamelContextManager; - - public class SampleUntypedActor extends UntypedActor { - public void onReceive(Object msg) { - getContext().tryReply(CamelContextManager.getMandatoryTemplate().requestBody("direct:news", msg)); - } - } - - -Typed actors -^^^^^^^^^^^^ - -Typed Actors get access to a managed ProducerTemplate in the same way, as shown -in the next example. - -**Scala** - -.. code-block:: scala - - // TODO - -**Java** - -.. code-block:: java - - import akka.actor.TypedActor; - import akka.camel.CamelContextManager; - - public class SampleProducerImpl extends TypedActor implements SampleProducer { - public void foo(String msg) { - ProducerTemplate template = CamelContextManager.getMandatoryTemplate(); - template.sendBody("direct:news", msg); - } - } - - -.. _camel-asynchronous-routing: - -Asynchronous routing -==================== - -Since Akka 0.10, in-out message exchanges between endpoints and actors are -designed to be asynchronous. This is the case for both, consumer and producer -actors. - -* A consumer endpoint sends request messages to its consumer actor using the ``!`` - (bang) operator and the actor returns responses with self.reply once they are - ready. The sender reference used for reply is an adapter to Camel's asynchronous - routing engine that implements the ActorRef trait. - -* A producer actor sends request messages to its endpoint using Camel's - asynchronous routing engine. Asynchronous responses are wrapped and added to the - producer actor's mailbox for later processing. By default, response messages are - returned to the initial sender but this can be overridden by Producer - implementations (see also description of the ``receiveAfterProcessing`` method - in :ref:`camel-custom-processing`). - -However, asynchronous two-way message exchanges, without allocating a thread for -the full duration of exchange, cannot be generically supported by Camel's -asynchronous routing engine alone. This must be supported by the individual -`Camel components`_ (from which endpoints are created) as well. They must be -able to suspend any work started for request processing (thereby freeing threads -to do other work) and resume processing when the response is ready. This is -currently the case for a `subset of components`_ such as the `Jetty component`_. -All other Camel components can still be used, of course, but they will cause -allocation of a thread for the duration of an in-out message exchange. There's -also a :ref:`camel-async-example` that implements both, an asynchronous -consumer and an asynchronous producer, with the jetty component. - -.. _Camel components: http://camel.apache.org/components.html -.. _subset of components: http://camel.apache.org/asynchronous-routing-engine.html -.. _Jetty component: http://camel.apache.org/jetty.html - - -Fault tolerance -=============== - -Consumer actors and typed actors can be also managed by supervisors. If a -consumer is configured to be restarted upon failure the associated Camel -endpoint is not restarted. It's behaviour during restart is as follows. - -* A one-way (in-only) message exchange will be queued by the consumer and - processed once restart completes. - -* A two-way (in-out) message exchange will wait and either succeed after restart - completes or time-out when the restart duration exceeds - the :ref:`camel-timeout`. - -If a consumer is configured to be shut down upon failure, the associated -endpoint is shut down as well. For details refer to :ref:`camel-unpublishing`. - -For examples, tips and trick how to implement fault-tolerant consumer and -producer actors, take a look at these two articles. - -* `Akka Consumer Actors: New Features and Best Practices `_ -* `Akka Producer Actors: New Features and Best Practices `_ - - -.. _camel-configuration: - -CamelService configuration -========================== - -For publishing consumer actors and typed actor methods -(:ref:`camel-publishing`), applications must start a CamelService. When starting -Akka in :ref:`microkernel` mode then a CamelService can be started automatically -when camel is added to the enabled-modules list in :ref:`configuration`, for example: - -.. code-block:: none - - akka { - ... - enabled-modules = ["camel"] # Options: ["remote", "camel", "http"] - ... - } - -Applications that do not use the Akka Kernel, such as standalone applications -for example, need to start a CamelService manually, as explained in the -following subsections.When starting a CamelService manually, settings in -:ref:`configuration` are ignored. - - -Standalone applications ------------------------ - -Standalone application should create and start a CamelService in the following way. - -**Scala** - -.. code-block:: scala - - import akka.camel.CamelServiceManager._ - - startCamelService - -**Java** - -.. code-block:: java - - import static akka.camel.CamelServiceManager.*; - - startCamelService(); - -Internally, a CamelService uses the CamelContextManager singleton to manage a -CamelContext. A CamelContext manages the routes from endpoints to consumer -actors and typed actors. These routes are added and removed at runtime (when -(untyped) consumer actors and typed consumer actors are started and stopped). -Applications may additionally want to add their own custom routes or modify the -CamelContext in some other way. This can be done by initializing the -CamelContextManager manually and making modifications to CamelContext **before** -the CamelService is started. - -**Scala** - -.. code-block:: scala - - import org.apache.camel.builder.RouteBuilder - - import akka.camel.CamelContextManager - import akka.camel.CamelServiceManager._ - - CamelContextManager.init - - // add a custom route to the managed CamelContext - CamelContextManager.mandatoryContext.addRoutes(new CustomRouteBuilder) - - startCamelService - - // an application-specific route builder - class CustomRouteBuilder extends RouteBuilder { - def configure { - // ... - } - } - -**Java** - -.. code-block:: java - - import org.apache.camel.builder.RouteBuilder; - - import akka.camel.CamelContextManager; - import static akka.camel.CamelServiceManager.*; - - CamelContextManager.init(); - - // add a custom route to the managed CamelContext - CamelContextManager.getMandatoryContext().addRoutes(new CustomRouteBuilder()); - - startCamelService(); - - // an application-specific route builder - private static class CustomRouteBuilder extends RouteBuilder { - public void configure() { - // ... - } - } - - -Applications may even provide their own CamelContext instance as argument to the -init method call as shown in the following snippet. Here, a DefaultCamelContext -is created using a Spring application context as `registry`_. - -.. _registry: http://camel.apache.org/registry.html - - -**Scala** - -.. code-block:: scala - - import org.apache.camel.impl.DefaultCamelContext - import org.apache.camel.spring.spi.ApplicationContextRegistry - import org.springframework.context.support.ClassPathXmlApplicationContext - - import akka.camel.CamelContextManager - import akka.camel.CamelServiceManager._ - - // create a custom Camel registry backed up by a Spring application context - val context = new ClassPathXmlApplicationContext("/context.xml") - val registry = new ApplicationContextRegistry(context) - - // initialize CamelContextManager with a DefaultCamelContext using the custom registry - CamelContextManager.init(new DefaultCamelContext(registry)) - - // ... - - startCamelService - -**Java** - -.. code-block:: java - - import org.apache.camel.impl.DefaultCamelContext - import org.apache.camel.spi.Registry; - import org.apache.camel.spring.spi.ApplicationContextRegistry; - - import org.springframework.context.ApplicationContext; - import org.springframework.context.support.ClassPathXmlApplicationContext; - - import akka.camel.CamelContextManager; - import static akka.camel.CamelServiceManager.*; - - // create a custom Camel registry backed up by a Spring application context - ApplicationContext context = new ClassPathXmlApplicationContext("/context.xml"); - Registry registry = new ApplicationContextRegistry(context); - - // initialize CamelContextManager with a DefaultCamelContext using the custom registry - CamelContextManager.init(new DefaultCamelContext(registry)); - - // ... - - startCamelService(); - - -.. _camel-spring-applications: - -Standalone Spring applications ------------------------------- - -A better approach to configure a Spring application context as registry for the -CamelContext is to use `Camel's Spring support`_. Furthermore, -the :ref:`spring-module` module additionally supports a element -for creating and starting a CamelService. An optional reference to a custom -CamelContext can be defined for as well. Here's an example. - -.. _Camel's Spring support: http://camel.apache.org/spring.html - -.. code-block:: xml - - - - - - - - - - - - - - - - - -Creating a CamelContext this way automatically adds the defining Spring -application context as registry to that CamelContext. The CamelService is -started when the application context is started and stopped when the application -context is closed. A simple usage example is shown in the following snippet. - -**Scala** - -.. code-block:: scala - - import org.springframework.context.support.ClassPathXmlApplicationContext - import akka.camel.CamelContextManager - - // Create and start application context (start CamelService) - val appctx = new ClassPathXmlApplicationContext("/context.xml") - - // Access to CamelContext (SpringCamelContext) - val ctx = CamelContextManager.mandatoryContext - // Access to ProducerTemplate of that CamelContext - val tpl = CamelContextManager.mandatoryTemplate - - // use ctx and tpl ... - - // Close application context (stop CamelService) - appctx.close - -**Java** - -.. code-block:: java - - // TODO - - -If the CamelService doesn't reference a custom CamelContext then a -DefaultCamelContext is created (and accessible via the CamelContextManager). - -.. code-block:: xml - - - - - - - - - -Kernel mode ------------ - -For classes that are loaded by the Kernel or the Initializer, starting the -CamelService can be omitted, as discussed in the previous section. Since these -classes are loaded and instantiated before the CamelService is started (by -Akka), applications can make modifications to a CamelContext here as well (and -even provide their own CamelContext). Assuming there's a boot class -sample.camel.Boot configured in :ref:`configuration`. - -.. code-block:: none - - akka { - ... - boot = ["sample.camel.Boot"] - ... - } - -Modifications to the CamelContext can be done like in the following snippet. - -**Scala** - -.. code-block:: scala - - package sample.camel - - import org.apache.camel.builder.RouteBuilder - - import akka.camel.CamelContextManager - - class Boot { - CamelContextManager.init - - // Customize CamelContext with application-specific routes - CamelContextManager.mandatoryContext.addRoutes(new CustomRouteBuilder) - - // No need to start CamelService here. It will be started - // when this classes has been loaded and instantiated. - } - - class CustomRouteBuilder extends RouteBuilder { - def configure { - // ... - } - } - -**Java** - -.. code-block:: java - - // TODO - - -Custom Camel routes -=================== - -In all the examples so far, routes to consumer actors have been automatically -constructed by akka-camel, when the actor was started. Although the default -route construction templates, used by akka-camel internally, are sufficient for -most use cases, some applications may require more specialized routes to actors. -The akka-camel module provides two mechanisms for customizing routes to actors, -which will be explained in this section. These are - -* Usage of :ref:`camel-components` to access (untyped) actor and actors. - Any Camel route can use these components to access Akka actors. - -* :ref:`camel-intercepting-route-construction` to (untyped) actor and actors. - Default routes to consumer actors are extended using predefined extension - points. - - -.. _camel-components: - -Akka Camel components ---------------------- - -Akka actors can be access from Camel routes using the `actor`_ and -`typed-actor`_ Camel components, respectively. These components can be used to -access any Akka actor (not only consumer actors) from Camel routes, as described -in the following sections. - -.. _actor: http://github.com/jboner/akka/blob/master/akka-camel/src/main/scala/akka/camel/component/ActorComponent.scala -.. _typed-actor: http://github.com/jboner/akka/blob/master/akka-camel/src/main/scala/akka/camel/component/TypedActorComponent.scala - - -Access to actors ----------------- - -To access (untyped) actors from custom Camel routes, the `actor`_ Camel -component should be used. It fully supports Camel's `asynchronous routing -engine`_. - -.. _actor: http://github.com/jboner/akka/blob/master/akka-camel/src/main/scala/akka/camel/component/ActorComponent.scala -.. _asynchronous routing engine: http://camel.apache.org/asynchronous-routing-engine.html - -This component accepts the following enpoint URI formats: - -* ``actor:[?]`` -* ``actor:id:[][?]`` -* ``actor:uuid:[][?]`` - -where ```` and ```` refer to ``actorRef.id`` and the -String-representation of ``actorRef.uuid``, respectively. The ```` are -name-value pairs separated by ``&`` (i.e. ``name1=value1&name2=value2&...``). - - -URI options -^^^^^^^^^^^ - -The following URI options are supported: - -+----------+---------+---------+-------------------------------------------+ -| Name | Type | Default | Description | -+==========+=========+=========+===========================================+ -| blocking | Boolean | false | If set to true, in-out message exchanges | -| | | | with the target actor will be made with | -| | | | the ``!!`` operator, otherwise with the | -| | | | ``!`` operator. | -| | | | | -| | | | See also :ref:`camel-timeout`. | -+----------+---------+---------+-------------------------------------------+ -| autoack | Boolean | true | If set to true, in-only message exchanges | -| | | | are auto-acknowledged when the message is | -| | | | added to the actor's mailbox. If set to | -| | | | false, actors must acknowledge the | -| | | | receipt of the message. | -| | | | | -| | | | See also :ref:`camel-acknowledgements`. | -+----------+---------+---------+-------------------------------------------+ - -Here's an actor endpoint URI example containing an actor uuid:: - - actor:uuid:12345678?blocking=true - -In actor endpoint URIs that contain id: or uuid:, an actor identifier (id or -uuid) is optional. In this case, the in-message of an exchange produced to an -actor endpoint must contain a message header with name CamelActorIdentifier -(which is defined by the ActorComponent.ActorIdentifier field) and a value that -is the target actor's identifier. On the other hand, if the URI contains an -actor identifier, it can be seen as a default actor identifier that can be -overridden by messages containing a CamelActorIdentifier header. - - -Message headers -^^^^^^^^^^^^^^^ - -+----------------------+--------+-------------------------------------------+ -| Name | Type | Description | -+======================+========+===========================================+ -| CamelActorIdentifier | String | Contains the identifier (id or uuid) of | -| | | the actor to route the message to. The | -| | | identifier is interpreted as actor id if | -| | | the URI contains id:, the identifier is | -| | | interpreted as uuid id the URI contains | -| | | uuid:. A uuid value may also be of type | -| | | Uuid (not only String). The header name | -| | | is defined by the | -| | | ActorComponent.ActorIdentifier field. | -+----------------------+--------+-------------------------------------------+ - -Here's another actor endpoint URI example that doesn't define an actor uuid. In -this case the target actor uuid must be defined by the CamelActorIdentifier -message header:: - - actor:uuid: - -In the following example, a custom route to an actor is created, using the -actor's uuid (i.e. actorRef.uuid). The route starts from a `Jetty`_ endpoint and -ends at the target actor. - - -**Scala** - -.. code-block:: scala - - import org.apache.camel.builder.RouteBuilder - - import akka.actor._ - import akka.actor.Actor - import akka.actor.Actor._ - import akka.camel.{Message, CamelContextManager, CamelServiceManager} - - object CustomRouteExample extends Application { - val target = actorOf(Props[CustomRouteTarget]) - - CamelServiceManager.startCamelService - CamelContextManager.mandatoryContext.addRoutes(new CustomRouteBuilder(target.uuid)) - } - - class CustomRouteTarget extends Actor { - def receive = { - case msg: Message => self.reply("Hello %s" format msg.bodyAs[String]) - } - } - - class CustomRouteBuilder(uuid: Uuid) extends RouteBuilder { - def configure { - val actorUri = "actor:uuid:%s" format uuid - from("jetty:http://localhost:8877/camel/custom").to(actorUri) - } - } - - -**Java** - -.. code-block:: java - - import com.eaio.uuid.UUID; - - import org.apache.camel.builder.RouteBuilder; - import static akka.actor.Actors.*; - import akka.actor.ActorRef; - import akka.actor.UntypedActor; - import akka.camel.CamelServiceManager; - import akka.camel.CamelContextManager; - import akka.camel.Message; - - public class CustomRouteExample { - public static void main(String... args) throws Exception { - ActorRef target = actorOf(new Props(CustomRouteTarget.class)); - CamelServiceManager.startCamelService(); - CamelContextManager.getMandatoryContext().addRoutes(new CustomRouteBuilder(target.getUuid())); - } - } - - public class CustomRouteTarget extends UntypedActor { - public void onReceive(Object message) { - Message msg = (Message) message; - String body = msg.getBodyAs(String.class); - getContext().tryReply(String.format("Hello %s", body)); - } - } - - public class CustomRouteBuilder extends RouteBuilder { - private UUID uuid; - - public CustomRouteBuilder(UUID uuid) { - this.uuid = uuid; - } - - public void configure() { - String actorUri = String.format("actor:uuid:%s", uuid); - from("jetty:http://localhost:8877/camel/custom").to(actorUri); - } - } - -When the example is started, messages POSTed to -``http://localhost:8877/camel/custom`` are routed to the target actor. - - -Access to typed actors ----------------------- - -To access typed actor methods from custom Camel routes, the `typed-actor`_ Camel -component should be used. It is a specialization of the Camel `bean`_ component. -Applications should use the interface (endpoint URI syntax and options) as -described in the bean component documentation but with the typed-actor schema. -Typed Actors must be added to a `Camel registry`_ for being accessible by the -typed-actor component. - -.. _typed-actor: http://github.com/jboner/akka/blob/master/akka-camel/src/main/scala/akka/camel/component/TypedActorComponent.scala -.. _bean: http://camel.apache.org/bean.html -.. _Camel registry: http://camel.apache.org/registry.html - - -.. _camel-typed-actors-using-spring: - -Using Spring -^^^^^^^^^^^^ - -The following example shows how to access typed actors in a Spring application -context. For adding typed actors to the application context and for starting -:ref:`camel-spring-applications` the :ref:`spring-module` module is used in the -following example. It offers a ```` element to define typed actor -factory beans and a ```` element to create and start a -CamelService. - -.. code-block:: xml - - - - - - - - - - - - - - - - - -SampleTypedActor is the typed actor interface and SampleTypedActorImpl in the -typed actor implementation class. - -**Scala** - -.. code-block:: scala - - package sample - - import akka.actor.TypedActor - - trait SampleTypedActor { - def foo(s: String): String - } - - class SampleTypedActorImpl extends TypedActor with SampleTypedActor { - def foo(s: String) = "hello %s" format s - } - -**Java** - -.. code-block:: java - - package sample; - - import akka.actor.TypedActor; - - public interface SampleTypedActor { - public String foo(String s); - } - - public class SampleTypedActorImpl extends TypedActor implements SampleTypedActor { - - public String foo(String s) { - return "hello " + s; - } - } - -The SampleRouteBuilder defines a custom route from the direct:test endpoint to -the sample typed actor using a typed-actor endpoint URI. - -**Scala** - -.. code-block:: scala - - package sample - - import org.apache.camel.builder.RouteBuilder - - class SampleRouteBuilder extends RouteBuilder { - def configure = { - // route to typed actor - from("direct:test").to("typed-actor:sample?method=foo") - } - } - -**Java** - -.. code-block:: java - - package sample; - - import org.apache.camel.builder.RouteBuilder; - - public class SampleRouteBuilder extends RouteBuilder { - public void configure() { - // route to typed actor - from("direct:test").to("typed-actor:sample?method=foo"); - } - } - -The typed-actor endpoint URI syntax is::: - - typed-actor:?method= - -where ```` is the id of the bean in the Spring application context and -```` is the name of the typed actor method to invoke. - -Usage of the custom route for sending a message to the typed actor is shown in -the following snippet. - -**Scala** - -.. code-block:: scala - - package sample - - import org.springframework.context.support.ClassPathXmlApplicationContext - import akka.camel.CamelContextManager - - // load Spring application context (starts CamelService) - val appctx = new ClassPathXmlApplicationContext("/context-standalone.xml") - - // access 'sample' typed actor via custom route - assert("hello akka" == CamelContextManager.mandatoryTemplate.requestBody("direct:test", "akka")) - - // close Spring application context (stops CamelService) - appctx.close - -**Java** - -.. code-block:: java - - package sample; - - import org.springframework.context.support.ClassPathXmlApplicationContext; - import akka.camel.CamelContextManager; - - // load Spring application context - ClassPathXmlApplicationContext appctx = new ClassPathXmlApplicationContext("/context-standalone.xml"); - - // access 'externally' registered typed actors with typed-actor component - assert("hello akka" == CamelContextManager.getMandatoryTemplate().requestBody("direct:test", "akka")); - - // close Spring application context (stops CamelService) - appctx.close(); - -The application uses a Camel `producer template`_ to access the typed actor via -the ``direct:test`` endpoint. - -.. _producer template: http://camel.apache.org/producertemplate.html - - -Without Spring -^^^^^^^^^^^^^^ - -Usage of :ref:`spring-module` for adding typed actors to the Camel registry and -starting a CamelService is optional. Setting up a Spring-less application for -accessing typed actors is shown in the next example. - -**Scala** - -.. code-block:: scala - - package sample - - import org.apache.camel.impl.{DefaultCamelContext, SimpleRegistry} - import akka.actor.TypedActor - import akka.camel.CamelContextManager - import akka.camel.CamelServiceManager._ - - // register typed actor - val registry = new SimpleRegistry - registry.put("sample", TypedActor.newInstance(classOf[SampleTypedActor], classOf[SampleTypedActorImpl])) - - // customize CamelContext - CamelContextManager.init(new DefaultCamelContext(registry)) - CamelContextManager.mandatoryContext.addRoutes(new SampleRouteBuilder) - - startCamelService - - // access 'sample' typed actor via custom route - assert("hello akka" == CamelContextManager.mandatoryTemplate.requestBody("direct:test", "akka")) - - stopCamelService - -**Java** - -.. code-block:: java - - package sample; - - // register typed actor - SimpleRegistry registry = new SimpleRegistry(); - registry.put("sample", TypedActor.newInstance(SampleTypedActor.class, SampleTypedActorImpl.class)); - - // customize CamelContext - CamelContextManager.init(new DefaultCamelContext(registry)); - CamelContextManager.getMandatoryContext().addRoutes(new SampleRouteBuilder()); - - startCamelService(); - - // access 'sample' typed actor via custom route - assert("hello akka" == CamelContextManager.getMandatoryTemplate().requestBody("direct:test", "akka")); - - stopCamelService(); - -Here, `SimpleRegistry`_, a java.util.Map based registry, is used to register -typed actors. The CamelService is started and stopped programmatically. - -.. _SimpleRegistry: https://svn.apache.org/repos/asf/camel/trunk/camel-core/src/main/java/org/apache/camel/impl/SimpleRegistry.java - - -.. _camel-intercepting-route-construction: - -Intercepting route construction -------------------------------- - -The previous section, :ref:`camel-components`, explained how to setup a route to -an (untyped) actor or typed actor manually. It was the application's -responsibility to define the route and add it to the current CamelContext. This -section explains a more conventient way to define custom routes: akka-camel is -still setting up the routes to consumer actors (and adds these routes to the -current CamelContext) but applications can define extensions to these routes. -Extensions can be defined with Camel's `Java DSL`_ or `Scala DSL`_. For example, -an extension could be a custom error handler that redelivers messages from an -endpoint to an actor's bounded mailbox when the mailbox was full. - -.. _Java DSL: http://camel.apache.org/dsl.html -.. _Scala DSL: http://camel.apache.org/scala-dsl.html - -The following examples demonstrate how to extend a route to a consumer actor for -handling exceptions thrown by that actor. To simplify the example, we configure -:ref:`camel-blocking-exchanges` which reports any exception, that is thrown by -receive, directly back to the Camel route. One could also report exceptions -asynchronously using a Failure reply (see also `this article`__) but we'll do it -differently here. - -__ http://krasserm.blogspot.com/2011/02/akka-consumer-actors-new-features-and.html - - -Actors (untyped) -^^^^^^^^^^^^^^^^ - -**Scala** - -.. code-block:: scala - - import akka.actor.Actor - import akka.camel.Consumer - - import org.apache.camel.builder.Builder - import org.apache.camel.model.RouteDefinition - - class ErrorHandlingConsumer extends Actor with Consumer { - def endpointUri = "direct:error-handler-test" - - // Needed to propagate exception back to caller - override def blocking = true - - onRouteDefinition {rd: RouteDefinition => - // Catch any exception and handle it by returning the exception message as response - rd.onException(classOf[Exception]).handled(true).transform(Builder.exceptionMessage).end - } - - protected def receive = { - case msg: Message => throw new Exception("error: %s" format msg.body) - } - } - -**Java** - -.. code-block:: java - - import akka.camel.UntypedConsumerActor; - - import org.apache.camel.builder.Builder; - import org.apache.camel.model.ProcessorDefinition; - import org.apache.camel.model.RouteDefinition; - - public class SampleErrorHandlingConsumer extends UntypedConsumerActor { - - public String getEndpointUri() { - return "direct:error-handler-test"; - } - - // Needed to propagate exception back to caller - public boolean isBlocking() { - return true; - } - - public void preStart() { - onRouteDefinition(new RouteDefinitionHandler() { - public ProcessorDefinition onRouteDefinition(RouteDefinition rd) { - // Catch any exception and handle it by returning the exception message as response - return rd.onException(Exception.class).handled(true).transform(Builder.exceptionMessage()).end(); - } - }); - } - - public void onReceive(Object message) throws Exception { - Message msg = (Message)message; - String body = msg.getBodyAs(String.class); - throw new Exception(String.format("error: %s", body)); - } - - } - - - -For (untyped) actors, consumer route extensions are defined by calling the -onRouteDefinition method with a route definition handler. In Scala, this is a -function of type ``RouteDefinition => ProcessorDefinition[_]``, in Java it is an -instance of ``RouteDefinitionHandler`` which is defined as follows. - -.. code-block:: scala - - package akka.camel - - import org.apache.camel.model.RouteDefinition - import org.apache.camel.model.ProcessorDefinition - - trait RouteDefinitionHandler { - def onRouteDefinition(rd: RouteDefinition): ProcessorDefinition[_] - } - -The akka-camel module creates a RouteDefinition instance by calling -from(endpointUri) on a Camel RouteBuilder (where endpointUri is the endpoint URI -of the consumer actor) and passes that instance as argument to the route -definition handler \*). The route definition handler then extends the route and -returns a ProcessorDefinition (in the above example, the ProcessorDefinition -returned by the end method. See the `org.apache.camel.model`__ package for -details). After executing the route definition handler, akka-camel finally calls -a to(actor:uuid:actorUuid) on the returned ProcessorDefinition to complete the -route to the comsumer actor (where actorUuid is the uuid of the consumer actor). - -\*) Before passing the RouteDefinition instance to the route definition handler, -akka-camel may make some further modifications to it. - -__ https://svn.apache.org/repos/asf/camel/trunk/camel-core/src/main/java/org/apache/camel/model/ - - -Typed actors -^^^^^^^^^^^^ - -For typed consumer actors to define a route definition handler, they must -provide a RouteDefinitionHandler implementation class with the @consume -annotation. The implementation class must have a no-arg constructor. Here's an -example (in Java). - -.. code-block:: java - - import org.apache.camel.builder.Builder; - import org.apache.camel.model.ProcessorDefinition; - import org.apache.camel.model.RouteDefinition; - - public class SampleRouteDefinitionHandler implements RouteDefinitionHandler { - public ProcessorDefinition onRouteDefinition(RouteDefinition rd) { - return rd.onException(Exception.class).handled(true).transform(Builder.exceptionMessage()).end(); - } - } - -It can be used as follows. - -**Scala** - -.. code-block:: scala - - trait TestTypedConsumer { - @consume(value="direct:error-handler-test", routeDefinitionHandler=classOf[SampleRouteDefinitionHandler]) - def foo(s: String): String - } - - // implementation class omitted - -**Java** - -.. code-block:: java - - public interface SampleErrorHandlingTypedConsumer { - - @consume(value="direct:error-handler-test", routeDefinitionHandler=SampleRouteDefinitionHandler.class) - String foo(String s); - - } - - // implementation class omitted - - -.. _camel-examples: - -Examples -======== - -For all features described so far, there's running sample code in -`akka-sample-camel`_. The examples in `sample.camel.Boot`_ are started during -Kernel startup because this class has been added to the boot :ref:`configuration`. - -.. _akka-sample-camel: http://github.com/jboner/akka/tree/master/akka-samples/akka-sample-camel/ -.. _sample.camel.Boot: http://github.com/jboner/akka/blob/master/akka-samples/akka-sample-camel/src/main/scala/sample/camel/Boot.scala - -.. code-block:: none - - akka { - ... - boot = ["sample.camel.Boot", ...] - ... - } - -If you don't want to have these examples started during Kernel startup, delete -it from the :ref:`configuration`. Other examples are standalone applications (i.e. classes with a -main method) that can be started from `sbt`_. - -.. _sbt: http://code.google.com/p/simple-build-tool/ - -.. code-block:: none - - $ sbt - [info] Building project akka 2.0-SNAPSHOT against Scala 2.9.0 - [info] using AkkaModulesParentProject with sbt 0.7.7 and Scala 2.7.7 - > project akka-sample-camel - Set current project to akka-sample-camel 2.0-SNAPSHOT - > run - ... - Multiple main classes detected, select one to run: - - [1] sample.camel.ClientApplication - [2] sample.camel.ServerApplication - [3] sample.camel.StandaloneSpringApplication - [4] sample.camel.StandaloneApplication - [5] sample.camel.StandaloneFileApplication - [6] sample.camel.StandaloneJmsApplication - - -Some of the examples in `akka-sample-camel`_ are described in more detail in the -following subsections. - - -.. _camel-async-example: - -Asynchronous routing and transformation example ------------------------------------------------ - -This example demonstrates how to implement consumer and producer actors that -support :ref:`camel-asynchronous-routing` with their Camel endpoints. The sample -application transforms the content of the Akka homepage, http://akka.io, by -replacing every occurrence of *Akka* with *AKKA*. After starting -the :ref:`microkernel`, direct the browser to http://localhost:8875 and the -transformed Akka homepage should be displayed. Please note that this example -will probably not work if you're behind an HTTP proxy. - -The following figure gives an overview how the example actors interact with -external systems and with each other. A browser sends a GET request to -http://localhost:8875 which is the published endpoint of the ``HttpConsumer`` -actor. The ``HttpConsumer`` actor forwards the requests to the ``HttpProducer`` -actor which retrieves the Akka homepage from http://akka.io. The retrieved HTML -is then forwarded to the ``HttpTransformer`` actor which replaces all occurences -of *Akka* with *AKKA*. The transformation result is sent back the HttpConsumer -which finally returns it to the browser. - -.. image:: camel-async-interact.png - -Implementing the example actor classes and wiring them together is rather easy -as shown in the following snippet (see also `sample.camel.Boot`_). - -.. code-block:: scala - - import org.apache.camel.Exchange - import akka.actor.Actor._ - import akka.actor.{Actor, ActorRef} - import akka.camel.{Producer, Message, Consumer} - - class HttpConsumer(producer: ActorRef) extends Actor with Consumer { - def endpointUri = "jetty:http://0.0.0.0:8875/" - - protected def receive = { - case msg => producer forward msg - } - } - - class HttpProducer(transformer: ActorRef) extends Actor with Producer { - def endpointUri = "jetty://http://akka.io/?bridgeEndpoint=true" - - override protected def receiveBeforeProduce = { - // only keep Exchange.HTTP_PATH message header (which needed by bridge endpoint) - case msg: Message => msg.setHeaders(msg.headers(Set(Exchange.HTTP_PATH))) - } - - override protected def receiveAfterProduce = { - // do not reply but forward result to transformer - case msg => transformer forward msg - } - } - - class HttpTransformer extends Actor { - protected def receive = { - case msg: Message => self.reply(msg.transformBody {body: String => body replaceAll ("Akka ", "AKKA ")}) - case msg: Failure => self.reply(msg) - } - } - - // Wire and start the example actors - val httpTransformer = actorOf(Props(new HttpTransformer)) - val httpProducer = actorOf(Props(new HttpProducer(httpTransformer))) - val httpConsumer = actorOf(Props(new HttpConsumer(httpProducer))) - -The `jetty endpoints`_ of HttpConsumer and HttpProducer support asynchronous -in-out message exchanges and do not allocate threads for the full duration of -the exchange. This is achieved by using `Jetty continuations`_ on the -consumer-side and by using `Jetty's asynchronous HTTP client`_ on the producer -side. The following high-level sequence diagram illustrates that. - -.. _jetty endpoints: http://camel.apache.org/jetty.html -.. _Jetty continuations: http://wiki.eclipse.org/Jetty/Feature/Continuations -.. _Jetty's asynchronous HTTP client: http://wiki.eclipse.org/Jetty/Tutorial/HttpClient - -.. image:: camel-async-sequence.png - - -Custom Camel route example --------------------------- - -This section also demonstrates the combined usage of a ``Producer`` and a -``Consumer`` actor as well as the inclusion of a custom Camel route. The -following figure gives an overview. - -.. image:: camel-custom-route.png - -* A consumer actor receives a message from an HTTP client - -* It forwards the message to another actor that transforms the message (encloses - the original message into hyphens) - -* The transformer actor forwards the transformed message to a producer actor - -* The producer actor sends the message to a custom Camel route beginning at the - ``direct:welcome`` endpoint - -* A processor (transformer) in the custom Camel route prepends "Welcome" to the - original message and creates a result message - -* The producer actor sends the result back to the consumer actor which returns - it to the HTTP client - - -The example is part of `sample.camel.Boot`_. The consumer, transformer and -producer actor implementations are as follows. - -.. code-block:: scala - - package sample.camel - - import akka.actor.{Actor, ActorRef} - import akka.camel.{Message, Consumer} - - class Consumer3(transformer: ActorRef) extends Actor with Consumer { - def endpointUri = "jetty:http://0.0.0.0:8877/camel/welcome" - - def receive = { - // Forward a string representation of the message body to transformer - case msg: Message => transformer.forward(msg.setBodyAs[String]) - } - } - - class Transformer(producer: ActorRef) extends Actor { - protected def receive = { - // example: transform message body "foo" to "- foo -" and forward result to producer - case msg: Message => producer.forward(msg.transformBody((body: String) => "- %s -" format body)) - } - } - - class Producer1 extends Actor with Producer { - def endpointUri = "direct:welcome" - } - -The producer actor knows where to reply the message to because the consumer and -transformer actors have forwarded the original sender reference as well. The -application configuration and the route starting from direct:welcome are as -follows. - -.. code-block:: scala - - package sample.camel - - import org.apache.camel.builder.RouteBuilder - import org.apache.camel.{Exchange, Processor} - - import akka.actor.Actor._ - import akka.camel.CamelContextManager - - class Boot { - CamelContextManager.init() - CamelContextManager.mandatoryContext.addRoutes(new CustomRouteBuilder) - - val producer = actorOf(Props[Producer1]) - val mediator = actorOf(Props(new Transformer(producer))) - val consumer = actorOf(Props(new Consumer3(mediator))) - } - - class CustomRouteBuilder extends RouteBuilder { - def configure { - from("direct:welcome").process(new Processor() { - def process(exchange: Exchange) { - // Create a 'welcome' message from the input message - exchange.getOut.setBody("Welcome %s" format exchange.getIn.getBody) - } - }) - } - } - -To run the example, start the :ref:`microkernel` and POST a message to -``http://localhost:8877/camel/welcome``. - -.. code-block:: none - - curl -H "Content-Type: text/plain" -d "Anke" http://localhost:8877/camel/welcome - -The response should be: - -.. code-block:: none - - Welcome - Anke - - - -Publish-subcribe example ------------------------- - -JMS -^^^ - -This section demonstrates how akka-camel can be used to implement -publish/subscribe for actors. The following figure sketches an example for -JMS-based publish/subscribe. - -.. image:: camel-pubsub.png - -A consumer actor receives a message from an HTTP client. It sends the message to -a JMS producer actor (publisher). The JMS producer actor publishes the message -to a JMS topic. Two other actors that subscribed to that topic both receive the -message. The actor classes used in this example are shown in the following -snippet. - -.. code-block:: scala - - package sample.camel - - import akka.actor.{Actor, ActorRef} - import akka.camel.{Producer, Message, Consumer} - - class Subscriber(name:String, uri: String) extends Actor with Consumer { - def endpointUri = uri - - protected def receive = { - case msg: Message => println("%s received: %s" format (name, msg.body)) - } - } - - class Publisher(name: String, uri: String) extends Actor with Producer { - self.id = name - - def endpointUri = uri - - // one-way communication with JMS - override def oneway = true - } - - class PublisherBridge(uri: String, publisher: ActorRef) extends Actor with Consumer { - def endpointUri = uri - - protected def receive = { - case msg: Message => { - publisher ! msg.bodyAs[String] - self.reply("message published") - } - } - } - -Wiring these actors to implement the above example is as simple as - -.. code-block:: scala - - package sample.camel - - import org.apache.camel.impl.DefaultCamelContext - import org.apache.camel.spring.spi.ApplicationContextRegistry - import org.springframework.context.support.ClassPathXmlApplicationContext - - import akka.actor.Actor._ - import akka.camel.CamelContextManager - - class Boot { - // Create CamelContext with Spring-based registry and custom route builder - val context = new ClassPathXmlApplicationContext("/context-jms.xml", getClass) - val registry = new ApplicationContextRegistry(context) - CamelContextManager.init(new DefaultCamelContext(registry)) - - // Setup publish/subscribe example - val jmsUri = "jms:topic:test" - val jmsSubscriber1 = actorOf(Props(new Subscriber("jms-subscriber-1", jmsUri))) - val jmsSubscriber2 = actorOf(Props(new Subscriber("jms-subscriber-2", jmsUri))) - val jmsPublisher = actorOf(Props(new Publisher("jms-publisher", jmsUri))) - - val jmsPublisherBridge = actorOf(Props(new PublisherBridge("jetty:http://0.0.0.0:8877/camel/pub/jms", jmsPublisher))) - } - -To publish messages to subscribers one could of course also use the JMS API -directly; there's no need to do that over a JMS producer actor as in this -example. For the example to work, Camel's `jms`_ component needs to be -configured with a JMS connection factory which is done in a Spring application -context XML file (context-jms.xml). - -.. _jms: http://camel.apache.org/jms.html - -.. code-block:: xml - - - - - - - - - - - - - - - - - - - - - - - - - -To run the example, start the :ref:`microkernel` and POST a -message to ``http://localhost:8877/camel/pub/jms``. - -.. code-block:: none - - curl -H "Content-Type: text/plain" -d "Happy hAkking" http://localhost:8877/camel/pub/jms - -The HTTP response body should be - -.. code-block:: none - - message published - -On the console, where you started the Akka Kernel, you should see something like - -.. code-block:: none - - ... - INF [20100622-11:49:57.688] camel: jms-subscriber-2 received: Happy hAkking - INF [20100622-11:49:57.688] camel: jms-subscriber-1 received: Happy hAkking - - -Cometd -^^^^^^ - -Publish/subscribe with `CometD`_ is equally easy using `Camel's cometd -component`_. - -.. _CometD: http://cometd.org/ -.. _Camel's cometd component: http://camel.apache.org/cometd.html - -.. image:: camel-pubsub2.png - -All actor classes from the JMS example can re-used, only the endpoint URIs need -to be changed. - -.. code-block:: scala - - package sample.camel - - import org.apache.camel.impl.DefaultCamelContext - import org.apache.camel.spring.spi.ApplicationContextRegistry - import org.springframework.context.support.ClassPathXmlApplicationContext - - import akka.actor.Actor._ - import akka.camel.CamelContextManager - - class Boot { - // ... - - // Setup publish/subscribe example - val cometdUri = "cometd://localhost:8111/test/abc?resourceBase=target" - val cometdSubscriber = actorOf(Props(new Subscriber("cometd-subscriber", cometdUri))) - val cometdPublisher = actorOf(Props(new Publisher("cometd-publisher", cometdUri))) - - val cometdPublisherBridge = actorOf(Props(new PublisherBridge("jetty:http://0.0.0.0:8877/camel/pub/cometd", cometdPublisher))) - } - - -Quartz Scheduler Example ------------------------- - -Here is an example showing how simple is to implement a cron-style scheduler by -using the Camel Quartz component in Akka. - -The following example creates a "timer" actor which fires a message every 2 -seconds: - -.. code-block:: scala - - package com.dimingo.akka - - import akka.actor.Actor - import akka.actor.Actor.actorOf - - import akka.camel.{Consumer, Message} - import akka.camel.CamelServiceManager._ - - class MyQuartzActor extends Actor with Consumer { - - def endpointUri = "quartz://example?cron=0/2+*+*+*+*+?" - - def receive = { - - case msg => println("==============> received %s " format msg) - - } // end receive - - } // end MyQuartzActor - - object MyQuartzActor { - - def main(str: Array[String]) { - - // start the Camel service - startCamelService - - // create and start a quartz actor - val myActor = actorOf(Props[MyQuartzActor]) - - } // end main - - } // end MyQuartzActor - -The full working example is available for download here: -http://www.dimingo.com/akka/examples/example-akka-quartz.tar.gz - -You can launch it using the maven command: - -.. code-block:: none - - $ mvn scala:run -DmainClass=com.dimingo.akka.MyQuartzActor - -For more information about the Camel Quartz component, see here: -http://camel.apache.org/quartz.html +The Akka Camel module has not been migrated to Akka 2.0-SNAPSHOT yet. diff --git a/akka-docs/modules/code/akka/docs/actor/mailbox/DurableMailboxDocTestBase.java b/akka-docs/modules/code/akka/docs/actor/mailbox/DurableMailboxDocTestBase.java index 4ac3204d0b..cc9cb31bce 100644 --- a/akka-docs/modules/code/akka/docs/actor/mailbox/DurableMailboxDocTestBase.java +++ b/akka-docs/modules/code/akka/docs/actor/mailbox/DurableMailboxDocTestBase.java @@ -31,7 +31,7 @@ public class DurableMailboxDocTestBase { })); //#define-dispatcher myActor.tell("test"); - system.stop(); + system.shutdown(); } public static class MyUntypedActor extends UntypedActor { diff --git a/akka-docs/modules/microkernel.rst b/akka-docs/modules/microkernel.rst index 266d888b6c..7686dcb2e5 100644 --- a/akka-docs/modules/microkernel.rst +++ b/akka-docs/modules/microkernel.rst @@ -5,4 +5,32 @@ Microkernel ############# -The Akka Spring module has not been migrated to Akka 2.0-SNAPSHOT yet. +The Akka Microkernel is included in the Akka download found at `downloads`_. + +.. _downloads: http://akka.io/downloads + +To run an application with the microkernel you need to create a Bootable class +that handles the startup and shutdown the application. An example is included below. + +Put your application jar in the ``deploy`` directory to have it automatically +loaded. + +To start the kernel use the scripts in the ``bin`` directory, passing the boot +classes for your application. + +There is a simple example of an application setup for running with the +microkernel included in the akka download. This can be run with the following +command (on a unix-based system): + +.. code-block:: none + + bin/akka sample.kernel.hello.HelloKernel + +Use Ctrl-C to interrupt and exit the microkernel. + +On a Windows machine you can also use the bin/akka.bat script. + +The code for the Hello Kernel example (see the HelloKernel class for an example +of creating a Bootable): + +.. includecode:: ../../akka-samples/akka-sample-hello-kernel/src/main/scala/sample/kernel/hello/HelloKernel.scala diff --git a/akka-docs/project/migration-guide-0.10.x-1.0.x.rst b/akka-docs/project/migration-guide-0.10.x-1.0.x.rst deleted file mode 100644 index 6352e63061..0000000000 --- a/akka-docs/project/migration-guide-0.10.x-1.0.x.rst +++ /dev/null @@ -1,447 +0,0 @@ -Migration Guide 0.10.x to 1.0.x -==================================== - -Akka & Akka Modules separated into two different repositories and distributions -------------------------------------------------------------------------------- - -Akka is split up into two different parts: -* Akka - Reflects all the sections under 'Scala API' and 'Java API' in the navigation bar. -* Akka Modules - Reflects all the sections under 'Add-on modules' in the navigation bar. - -Download the release you need (Akka core or Akka Modules) from ``_ and unzip it. - ----- - -Changed Akka URI ----------------- - -http://akkasource.org changed to http://akka.io - -Reflects XSDs, Maven repositories, ScalaDoc etc. - ----- - -Removed 'se.scalablesolutions' prefix -------------------------------------- - -We have removed some boilerplate by shortening the Akka package from -**se.scalablesolutions.akka** to just **akka** so just do a search-replace in your project, -we apologize for the inconvenience, but we did it for our users. - ----- - -Akka-core is no more --------------------- - -Akka-core has been split into akka-actor, akka-stm, akka-typed-actor & akka-remote this means that you need to update any deps you have on akka-core. - ----- - -Config ------- - -Turning on/off modules -^^^^^^^^^^^^^^^^^^^^^^ - -All the 'service = on' elements for turning modules on and off have been replaced by a top-level list of the enabled services. - -Services available for turning on/off are: -* "remote" -* "http" -* "camel" - -**All** services are **OFF** by default. Enable the ones you are using. - -.. code-block:: ruby - - akka { - enabled-modules = [] # Comma separated list of the enabled modules. Options: ["remote", "camel", "http"] - } - -Renames -^^^^^^^ - -* 'rest' section - has been renamed to 'http' to align with the module name 'akka-http'. -* 'storage' section - has been renamed to 'persistence' to align with the module name 'akka-persistence'. - -.. code-block:: ruby - - akka { - http { - .. - } - - persistence { - .. - } - } - ----- - -Important changes from RC2-RC3 ------------------------------- - -**akka.config.SupervisionSupervise** - -**Scala** - -.. code-block:: scala - - def apply(actorRef: ActorRef, lifeCycle: LifeCycle, registerAsRemoteService: Boolean = false) - -- boolean instead of remoteAddress, registers that actor with it's id as service name on the local server - -**akka.actor.Actors now is the API for Java to interact with Actors, Remoting and ActorRegistry:** - -**Java** - -.. code-block:: java - - import static akka.actor.Actors.*; // <-- The important part - - actorOf(); - remote().actorOf(); - registry().actorsFor("foo"); - -***akka.actor.Actor now is the API for Scala to interact with Actors, Remoting and ActorRegistry:*** - -**Scala** - -.. code-block:: scala - - import akka.actor.Actor._ // <-- The important part - - actorOf().method - remote.actorOf() - registry.actorsFor("foo") - -**object UntypedActor has been deleted and replaced with akka.actor.Actors/akka.actor.Actor (Java/Scala)** - -- UntypedActor.actorOf -> Actors.actorOf (Java) or Actor.actorOf (Scala) - -**object ActorRegistry has been deleted and replaced with akka.actor.Actors.registry()/akka.actor.Actor.registry (Java/Scala)** - -- ActorRegistry. -> Actors.registry(). (Java) or Actor.registry. (Scala) - -**object RemoteClient has been deleted and replaced with akka.actor.Actors.remote()/akka.actor.Actor.remote (Java/Scala)** - -- RemoteClient -> Actors.remote() (Java) or Actor.remote (Scala) - -**object RemoteServer has been deleted and replaced with akka.actor.Actors.remote()/akka.actor.Actor.remote (Java/Scala)** - -- RemoteServer - deleted -> Actors.remote() (Java) or Actor.remote (Scala) - -**classes RemoteActor, RemoteUntypedActor and RemoteUntypedConsumerActors has been deleted and replaced with akka.actor.Actors.remote().actorOf(x, host port)/akka.actor.Actor.remote.actorOf(x, host, port)** - -- RemoteActor, RemoteUntypedActor - deleted, use: remote().actorOf(YourActor.class, host, port) (Java) or remote.actorOf(Props[YourActor](host, port) - -**Remoted spring-actors now default to spring id as service-name, use "service-name" attribute on "remote"-tag to override** - -**Listeners for RemoteServer and RemoteClient** are now registered on Actors.remote().addListener (Java) or Actor.remote.addListener (Scala), this means that all listeners get all remote events, both remote server evens and remote client events, **so adjust your code accordingly.** - -**ActorRef.startLinkRemote has been removed since one specified on creation wether the actor is client-managed or not.** - -Important change from RC3 to RC4 --------------------------------- - -The Akka-Spring namespace has changed from akkasource.org and scalablesolutions.se to http://akka.io/schema and http://akka.io/akka-.xsd - -Module akka-actor ------------------ - -The Actor.init callback has been renamed to "preStart" to align with the general callback naming and is more clear about when it's called. - -The Actor.shutdown callback has been renamed to "postStop" to align with the general callback naming and is more clear about when it's called. - -The Actor.initTransactionalState callback has been removed, logic should be moved to preStart and be wrapped in an atomic block - -**se.scalablesolutions.akka.config.ScalaConfig** and **se.scalablesolutions.akka.config.JavaConfig** have been merged into **akka.config.Supervision** - -**RemoteAddress** has moved from **se.scalablesolutions.akka.config.ScalaConfig** to **akka.config** - -The ActorRef.lifeCycle has changed signature from Option[LifeCycle] to LifeCycle, this means you need to change code that looks like this: -**self.lifeCycle = Some(LifeCycle(Permanent))** to **self.lifeCycle = Permanent** - -The equivalent to **self.lifeCycle = None** is **self.lifeCycle = UndefinedLifeCycle** -**LifeCycle(Permanent)** becomes **Permanent** -**new LifeCycle(permanent())** becomes **permanent()** (need to do: import static se.scalablesolutions.akka.config.Supervision.*; first) - -**JavaConfig.Component** and **ScalaConfig.Component** have been consolidated and renamed as **Supervision.SuperviseTypedActor** - -**self.trapExit** has been moved into the FaultHandlingStrategy, and **ActorRef.faultHandler** has switched type from Option[FaultHandlingStrategy] -to FaultHandlingStrategy: - -**Scala** - -.. code-block:: scala - - import akka.config.Supervision._ - - self.faultHandler = OneForOneStrategy(List(classOf[Exception]), 3, 5000) - -**Java** - -.. code-block:: java - - import static akka.Supervision.*; - - getContext().setFaultHandler(new OneForOneStrategy(new Class[] { Exception.class },50,1000)) - -**RestartStrategy, AllForOne, OneForOne** have been replaced with **AllForOneStrategy** and **OneForOneStrategy** in **se.scalablesolutions.akka.config.Supervision** - -**Scala** - -.. code-block:: scala - - import akka.config.Supervision._ - SupervisorConfig( - OneForOneStrategy(List(classOf[Exception]), 3, 5000), - Supervise(pingpong1,Permanent) :: Nil - ) - -**Java** - -.. code-block:: java - - import static akka.Supervision.*; - - new SupervisorConfig( - new OneForOneStrategy(new Class[] { Exception.class },50,1000), - new Server[] { new Supervise(pingpong1, permanent()) } - ) - -***We have removed the following factory methods:*** - -**Actor.actor { case foo => bar }** -**Actor.transactor { case foo => bar }** -**Actor.temporaryActor { case foo => bar }** -**Actor.init {} receive { case foo => bar }** - -They started the actor and no config was possible, it was inconsistent and irreparable. - -replace with your own factories, or: - -**Scala** - -.. code-block:: scala - - actorOf( new Actor { def receive = { case foo => bar } } ).start - actorOf( new Actor { self.lifeCycle = Temporary; def receive = { case foo => bar } } ).start - -ReceiveTimeout is now rescheduled after every message, before there was only an initial timeout. -To stop rescheduling of ReceiveTimeout, set **receiveTimeout = None** - -HotSwap -------- - -HotSwap does no longer use behavior stacking by default, but that is an option to both "become" and HotSwap. - -HotSwap now takes for Scala a Function from ActorRef to a Receive, the ActorRef passed in is the reference to self, so you can do self.reply() etc. - ----- - -Module akka-stm ---------------- - -The STM stuff is now in its own module. This means that there is no support for transactions or transactors in akka-actor. - -Local and global -^^^^^^^^^^^^^^^^ - -The **local/global** distinction has been dropped. This means that if the following general import was being used: - -**Scala** - -.. code-block:: scala - - import akka.stm.local._ - -this is now just: - -**Scala** - -.. code-block:: scala - - import akka.stm._ - -Coordinated is the new global -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -There is a new explicit mechanism for coordinated transactions. See the `Scala Transactors `_ and `Java Transactors `_ documentation for more information. Coordinated transactions and transactors are found in the ``akka.transactor`` package now. The usage of transactors has changed. - -Agents -^^^^^^ - -Agent is now in the akka-stm module and has moved to the ``akka.agent`` package. The implementation has been reworked and is now closer to Clojure agents. There is not much difference in general usage, the main changes involve interaction with the STM. - -While updates to Agents are asynchronous, the state of an Agent is always immediately available for reading by any thread. Agents are integrated with the STM - any dispatches made in a transaction are held until that transaction commits, and are discarded if it is retried or aborted. There is a new ``sendOff`` method for long-running or blocking update functions. - ----- - -Module akka-camel ------------------ - -Access to the CamelService managed by CamelServiceManager has changed: - -* Method service renamed to mandatoryService (Scala) -* Method service now returns Option[CamelService] (Scala) -* Introduced method getMandatoryService() (Java) -* Introduced method getService() (Java) - -**Scala** - -.. code-block:: scala - - import se.scalablesolutions.akka.camel.CamelServiceManager._ - import se.scalablesolutions.akka.camel.CamelService - - val o: Option[CamelService] = service - val s: CamelService = mandatoryService - -**Java** - -.. code-block:: java - - import se.scalablesolutions.akka.camel.CamelService; - import se.scalablesolutions.akka.japi.Option; - import static se.scalablesolutions.akka.camel.CamelServiceManager.*; - - Option o = getService(); - CamelService s = getMandatoryService(); - -Access to the CamelContext and ProducerTemplate managed by CamelContextManager has changed: - -* Method context renamed to mandatoryContext (Scala) -* Method template renamed to mandatoryTemplate (Scala) -* Method service now returns Option[CamelContext] (Scala) -* Method template now returns Option[ProducerTemplate] (Scala) -* Introduced method getMandatoryContext() (Java) -* Introduced method getContext() (Java) -* Introduced method getMandatoryTemplate() (Java) -* Introduced method getTemplate() (Java) - -**Scala** - -.. code-block:: scala - - import org.apache.camel.CamelContext - import org.apache.camel.ProducerTemplate - - import se.scalablesolutions.akka.camel.CamelContextManager._ - - val co: Option[CamelContext] = context - val to: Option[ProducerTemplate] = template - - val c: CamelContext = mandatoryContext - val t: ProducerTemplate = mandatoryTemplate - -**Java** - -.. code-block:: java - - import org.apache.camel.CamelContext; - import org.apache.camel.ProducerTemplate; - - import se.scalablesolutions.akka.japi.Option; - import static se.scalablesolutions.akka.camel.CamelContextManager.*; - - Option co = getContext(); - Option to = getTemplate(); - - CamelContext c = getMandatoryContext(); - ProducerTemplate t = getMandatoryTemplate(); - -The following methods have been renamed on class se.scalablesolutions.akka.camel.Message: - -* bodyAs(Class) has been renamed to getBodyAs(Class) -* headerAs(String, Class) has been renamed to getHeaderAs(String, Class) - -The API for waiting for consumer endpoint activation and de-activation has been changed - -* CamelService.expectEndpointActivationCount has been removed and replaced by CamelService.awaitEndpointActivation -* CamelService.expectEndpointDeactivationCount has been removed and replaced by CamelService.awaitEndpointDeactivation - -**Scala** - -.. code-block:: scala - - import se.scalablesolutions.akka.actor.Actor - import se.scalablesolutions.akka.camel.CamelServiceManager._ - - val s = startCamelService - val actor = Actor.actorOf(Props[SampleConsumer] - - // wait for 1 consumer being activated - s.awaitEndpointActivation(1) { - actor.start - } - - // wait for 1 consumer being de-activated - s.awaitEndpointDeactivation(1) { - actor.stop - } - - s.stop - -**Java** - -.. code-block:: java - - import java.util.concurrent.TimeUnit; - import se.scalablesolutions.akka.actor.ActorRef; - import se.scalablesolutions.akka.actor.Actors; - import se.scalablesolutions.akka.camel.CamelService; - import se.scalablesolutions.akka.japi.SideEffect; - import static se.scalablesolutions.akka.camel.CamelServiceManager.*; - - CamelService s = startCamelService(); - final ActorRef actor = Actors.actorOf(SampleUntypedConsumer.class); - - // wait for 1 consumer being activated - s.awaitEndpointActivation(1, new SideEffect() { - public void apply() { - actor.start(); - } - }); - - // wait for 1 consumer being de-activated - s.awaitEndpointDeactivation(1, new SideEffect() { - public void apply() { - actor.stop(); - } - }); - - s.stop(); - -Module Akka-Http ----------------- - -Atmosphere support has been removed. If you were using akka.comet.AkkaServlet for Jersey support only, -you can switch that to: akka.http.AkkaRestServlet and it should work just like before. - -Atmosphere has been removed because we have a new async http support in the form of Akka Mist, a very thin bridge -between Servlet3.0/JettyContinuations and Actors, enabling Http-as-messages, read more about it here: -http://doc.akka.io/http#Mist%20-%20Lightweight%20Asynchronous%20HTTP - -If you really need Atmosphere support, you can add it yourself by following the steps listed at the start of: -http://doc.akka.io/comet - -Module akka-spring ------------------- - -The Akka XML schema URI has changed to http://akka.io/schema/akka - -.. code-block:: xml - - - - - - diff --git a/akka-docs/project/migration-guide-0.8.x-0.9.x.rst b/akka-docs/project/migration-guide-0.8.x-0.9.x.rst deleted file mode 100644 index 868879a5b0..0000000000 --- a/akka-docs/project/migration-guide-0.8.x-0.9.x.rst +++ /dev/null @@ -1,172 +0,0 @@ -Migration Guide 0.8.x to 0.9.x -============================== - -**This document describes between the 0.8.x and the 0.9 release.** - -Background for the new ActorRef -------------------------------- - -In the work towards 0.9 release we have now done a major change to how Actors are created. In short we have separated identity and value, created an 'ActorRef' that holds the actual Actor instance. This allows us to do many great things such as for example: - -* Create serializable, immutable, network-aware Actor references that can be freely shared across the network. They "remember" their origin and will always work as expected. -* Not only kill and restart the same supervised Actor instance when it has crashed (as we do now), but dereference it, throw it away and make it eligible for garbage collection. -* etc. much more - -These work very much like the 'PID' (process id) in Erlang. - -These changes means that there is no difference in defining Actors. You still use the old Actor trait, all methods are there etc. But you can't just new this Actor up and send messages to it since all its public API methods are gone. They now reside in a new class; 'ActorRef' and use need to use instances of this class to interact with the Actor (sending messages etc.). - -Here is a short migration guide with the things that you have to change. It is a big conceptual change but in practice you don't have to change much. - - - -Creating Actors with default constructor ----------------------------------------- - -From: - -.. code-block:: scala - - val a = new MyActor - a ! msg - -To: - -.. code-block:: scala - - import Actor._ - val a = actorOf(Props[MyActor] - a ! msg - -You can also start it in the same statement: - -.. code-block:: scala - - val a = actorOf(Props[MyActor] - -Creating Actors with non-default constructor --------------------------------------------- - -From: - -.. code-block:: scala - - val a = new MyActor(..) - a ! msg - -To: - -.. code-block:: scala - - import Actor._ - val a = actorOf(Props(new MyActor(..)) - a ! msg - -Use of 'self' ActorRef API --------------------------- - -Where you have used 'this' to refer to the Actor from within itself now use 'self': - -.. code-block:: scala - - self ! MessageToMe - -Now the Actor trait only has the callbacks you can implement: -* receive -* postRestart/preRestart -* init/shutdown - -It has no state at all. - -All API has been moved to ActorRef. The Actor is given its ActorRef through the 'self' member variable. -Here you find functions like: -* !, !!, !!! and forward -* link, unlink, startLink, spawnLink etc -* makeTransactional, makeRemote etc. -* start, stop -* etc. - -Here you also find fields like -* dispatcher = ... -* id = ... -* lifeCycle = ... -* faultHandler = ... -* trapExit = ... -* etc. - -This means that to use them you have to prefix them with 'self', like this: - -.. code-block:: scala - - self ! Message - -However, for convenience you can import these functions and fields like below, which will allow you do drop the 'self' prefix: - -.. code-block:: scala - - class MyActor extends Actor { - import self._ - id = ... - dispatcher = ... - spawnLink[OtherActor] - ... - } - -Serialization -------------- - -If you want to serialize it yourself, here is how to do it: - -.. code-block:: scala - - val actorRef1 = actorOf(Props[MyActor] - - val bytes = actorRef1.toBinary - - val actorRef2 = ActorRef.fromBinary(bytes) - -If you are also using Protobuf then you can use the methods that work with Protobuf's Messages directly. - -.. code-block:: scala - - val actorRef1 = actorOf(Props[MyActor] - - val protobufMessage = actorRef1.toProtocol - - val actorRef2 = ActorRef.fromProtocol(protobufMessage) - -Camel ------ - -Some methods of the se.scalablesolutions.akka.camel.Message class have been deprecated in 0.9. These are - -.. code-block:: scala - - package se.scalablesolutions.akka.camel - - case class Message(...) { - // ... - @deprecated def bodyAs[T](clazz: Class[T]): T - @deprecated def setBodyAs[T](clazz: Class[T]): Message - // ... - } - -They will be removed in 1.0. Instead use - -.. code-block:: scala - - package se.scalablesolutions.akka.camel - - case class Message(...) { - // ... - def bodyAs[T](implicit m: Manifest[T]): T = - def setBodyAs[T](implicit m: Manifest[T]): Message - // ... - } - -Usage example: -.. code-block:: scala - - val m = Message(1.4) - val b = m.bodyAs[String] - diff --git a/akka-docs/scala/actors.rst b/akka-docs/scala/actors.rst index 618d2618c9..dcb7ed2795 100644 --- a/akka-docs/scala/actors.rst +++ b/akka-docs/scala/actors.rst @@ -40,7 +40,7 @@ along with the implementation of how the messages should be processed. Here is an example: -.. includecode:: code/ActorDocSpec.scala +.. includecode:: code/akka/docs/actor/ActorDocSpec.scala :include: imports1,my-actor Please note that the Akka Actor ``receive`` message loop is exhaustive, which is @@ -53,8 +53,8 @@ thrown and the actor is restarted when an unknown message is received. Creating Actors with default constructor ---------------------------------------- -.. includecode:: code/ActorDocSpec.scala -:include: imports2,system-actorOf +.. includecode:: code/akka/docs/actor/ActorDocSpec.scala + :include: imports2,system-actorOf The call to :meth:`actorOf` returns an instance of ``ActorRef``. This is a handle to the ``Actor`` instance which you can use to interact with the ``Actor``. The @@ -70,7 +70,7 @@ how the supervisor hierarchy is arranged. When using the context the current act will be supervisor of the created child actor. When using the system it will be a top level actor, that is supervised by the system (internal guardian actor). -.. includecode:: code/ActorDocSpec.scala#context-actorOf +.. includecode:: code/akka/docs/actor/ActorDocSpec.scala#context-actorOf Actors are automatically started asynchronously when created. When you create the ``Actor`` then it will automatically call the ``preStart`` @@ -92,7 +92,7 @@ a call-by-name block in which you can create the Actor in any way you like. Here is an example: -.. includecode:: code/ActorDocSpec.scala#creating-constructor +.. includecode:: code/akka/docs/actor/ActorDocSpec.scala#creating-constructor Creating Actors with Props @@ -101,7 +101,7 @@ Creating Actors with Props ``Props`` is a configuration object to specify additional things for the actor to be created, such as the ``MessageDispatcher``. -.. includecode:: code/ActorDocSpec.scala#creating-props +.. includecode:: code/akka/docs/actor/ActorDocSpec.scala#creating-props Creating Actors using anonymous classes @@ -109,7 +109,7 @@ Creating Actors using anonymous classes When spawning actors for specific sub-tasks from within an actor, it may be convenient to include the code to be executed directly in place, using an anonymous class. -.. includecode:: code/ActorDocSpec.scala#anonymous-actor +.. includecode:: code/akka/docs/actor/ActorDocSpec.scala#anonymous-actor .. warning:: @@ -145,13 +145,16 @@ In addition, it offers: You can import the members in the :obj:`context` to avoid prefixing access with ``context.`` -.. includecode:: code/ActorDocSpec.scala#import-context +.. includecode:: code/akka/docs/actor/ActorDocSpec.scala#import-context The remaining visible methods are user-overridable life-cycle hooks which are described in the following:: def preStart() {} - def preRestart(reason: Throwable, message: Option[Any]) { postStop() } + def preRestart(reason: Throwable, message: Option[Any]) { + context.children foreach (context.stop(_)) + postStop() + } def postRestart(reason: Throwable) { preStart() } def postStop() {} @@ -185,7 +188,7 @@ processing a message. This restart involves the hooks mentioned above: message, e.g. when a supervisor does not trap the exception and is restarted in turn by its supervisor. This method is the best place for cleaning up, preparing hand-over to the fresh actor instance, etc. - By default it calls :meth:`postStop`. + By default it stops all children and calls :meth:`postStop`. 2. The initial factory from the ``actorOf`` call is used to produce the fresh instance. 3. The new actor’s :meth:`postRestart` method is invoked with the exception @@ -287,7 +290,7 @@ To complete the future with an exception you need send a Failure message to the This is not done automatically when an actor throws an exception while processing a message. -.. includecode:: code/ActorDocSpec.scala#reply-exception +.. includecode:: code/akka/docs/actor/ActorDocSpec.scala#reply-exception If the actor does not complete the future, it will expire after the timeout period, which is taken from one of the following locations in order of precedence: @@ -312,7 +315,8 @@ Gives you a way to avoid blocking. .. warning:: - When using future callbacks, inside actors you need to carefully avoid closing over + When using future callbacks, such as ``onComplete``, ``onSuccess``, and ``onFailure``, + inside actors you need to carefully avoid closing over the containing actor’s reference, i.e. do not call methods or access mutable state on the enclosing actor from within the callback. This would break the actor encapsulation and may introduce synchronization bugs and race conditions because @@ -335,7 +339,7 @@ type, it will throw the exception or a :class:`ClassCastException` (if you want to get :obj:`None` in the latter case, use :meth:`Future.asSilently[T]`). In case of a timeout, :obj:`None` is returned. -.. includecode:: code/ActorDocSpec.scala#using-ask +.. includecode:: code/akka/docs/actor/ActorDocSpec.scala#using-ask Forward message --------------- @@ -367,7 +371,7 @@ This method should return a ``PartialFunction``, e.g. a ‘match/case’ clause which the message can be matched against the different case clauses using Scala pattern matching. Here is an example: -.. includecode:: code/ActorDocSpec.scala +.. includecode:: code/akka/docs/actor/ActorDocSpec.scala :include: imports1,my-actor @@ -397,19 +401,17 @@ received within a certain time. To receive this timeout you have to set the ``receiveTimeout`` property and declare a case handing the ReceiveTimeout object. -.. includecode:: code/ActorDocSpec.scala#receive-timeout +.. includecode:: code/akka/docs/actor/ActorDocSpec.scala#receive-timeout Stopping actors =============== -Actors are stopped by invoking the ``stop`` method of the ``ActorRef``. -The actual termination of the actor is performed asynchronously, i.e. -``stop`` may return before the actor is stopped. - -.. code-block:: scala - - actor.stop() +Actors are stopped by invoking the :meth:`stop` method of a ``ActorRefFactory``, +i.e. ``ActorContext`` or ``ActorSystem``. Typically the context is used for stopping +child actors and the system for stopping top level actors. The actual termination of +the actor is performed asynchronously, i.e. :meth:`stop` may return before the actor is +stopped. Processing of the current message, if any, will continue before the actor is stopped, but additional messages in the mailbox will not be processed. By default these @@ -463,7 +465,7 @@ pushed and popped. To hotswap the Actor behavior using ``become``: -.. includecode:: code/ActorDocSpec.scala#hot-swap-actor +.. includecode:: code/akka/docs/actor/ActorDocSpec.scala#hot-swap-actor The ``become`` method is useful for many different things, but a particular nice example of it is in example where it is used to implement a Finite State Machine @@ -473,12 +475,12 @@ example of it is in example where it is used to implement a Finite State Machine Here is another little cute example of ``become`` and ``unbecome`` in action: -.. includecode:: code/ActorDocSpec.scala#swapper +.. includecode:: code/akka/docs/actor/ActorDocSpec.scala#swapper Encoding Scala Actors nested receives without accidentally leaking memory ------------------------------------------------------------------------- -See this `Unnested receive example `_. +See this `Unnested receive example `_. Downgrade @@ -554,4 +556,4 @@ A bit advanced but very useful way of defining a base message handler and then extend that, either through inheritance or delegation, is to use ``PartialFunction.orElse`` chaining. -.. includecode:: code/ActorDocSpec.scala#receive-orElse +.. includecode:: code/akka/docs/actor/ActorDocSpec.scala#receive-orElse diff --git a/akka-docs/scala/code/StmDocSpec.scala b/akka-docs/scala/code/StmDocSpec.scala deleted file mode 100644 index 99c2e051ae..0000000000 --- a/akka-docs/scala/code/StmDocSpec.scala +++ /dev/null @@ -1,27 +0,0 @@ -package akka.docs.stm - -import org.scalatest.WordSpec -import org.scalatest.matchers.MustMatchers - -class StmDocSpec extends WordSpec with MustMatchers { - - "simple counter example" in { - //#simple - import akka.stm._ - - val ref = Ref(0) - - def counter = atomic { - ref alter (_ + 1) - } - - counter - // -> 1 - - counter - // -> 2 - //#simple - - ref.get must be === 2 - } -} diff --git a/akka-docs/scala/code/ActorDocSpec.scala b/akka-docs/scala/code/akka/docs/actor/ActorDocSpec.scala similarity index 94% rename from akka-docs/scala/code/ActorDocSpec.scala rename to akka-docs/scala/code/akka/docs/actor/ActorDocSpec.scala index 5592572443..68733d2009 100644 --- a/akka-docs/scala/code/ActorDocSpec.scala +++ b/akka-docs/scala/code/akka/docs/actor/ActorDocSpec.scala @@ -4,6 +4,8 @@ package akka.docs.actor import akka.actor.Actor import akka.actor.Props import akka.event.Logging +import akka.dispatch.Future + //#imports1 //#imports2 @@ -40,7 +42,7 @@ class FirstActor extends Actor { case DoIt(msg) ⇒ val replyMsg = doSomeDangerousWork(msg) sender ! replyMsg - self.stop() + context.stop(self) } def doSomeDangerousWork(msg: ImmutableMessage): String = { "done" } })) ! m @@ -143,7 +145,7 @@ class ActorDocSpec extends AkkaSpec(Map("akka.loglevel" -> "INFO")) { //#import-context val first = system.actorOf(Props(new FirstActor)) - first.stop() + system.stop(first) } @@ -169,7 +171,7 @@ class ActorDocSpec extends AkkaSpec(Map("akka.loglevel" -> "INFO")) { system.eventStream.unsubscribe(testActor) system.eventStream.publish(TestEvent.UnMute(filter)) - myActor.stop() + system.stop(myActor) } "creating actor with constructor" in { @@ -182,7 +184,7 @@ class ActorDocSpec extends AkkaSpec(Map("akka.loglevel" -> "INFO")) { val myActor = system.actorOf(Props(new MyActor("..."))) //#creating-constructor - myActor.stop() + system.stop(myActor) } "creating actor with Props" in { @@ -192,7 +194,7 @@ class ActorDocSpec extends AkkaSpec(Map("akka.loglevel" -> "INFO")) { val myActor = system.actorOf(Props[MyActor].withDispatcher(dispatcher), name = "myactor") //#creating-props - myActor.stop() + system.stop(myActor) } "using ask" in { @@ -207,14 +209,12 @@ class ActorDocSpec extends AkkaSpec(Map("akka.loglevel" -> "INFO")) { val myActor = system.actorOf(Props(new MyActor)) implicit val timeout = system.settings.ActorTimeout val future = myActor ? "hello" - future.as[String] match { - case Some(answer) ⇒ //... - case None ⇒ //... - } - val result: Option[Int] = for (x ← (myActor ? 3).as[Int]) yield { 2 * x } + for (x ← future) println(x) //Prints "hello" + + val result: Future[Int] = for (x ← (myActor ? 3).mapTo[Int]) yield { 2 * x } //#using-ask - myActor.stop() + system.stop(myActor) } "using receiveTimeout" in { diff --git a/akka-docs/common/code/SchedulerDocSpec.scala b/akka-docs/scala/code/akka/docs/actor/SchedulerDocSpec.scala similarity index 71% rename from akka-docs/common/code/SchedulerDocSpec.scala rename to akka-docs/scala/code/akka/docs/actor/SchedulerDocSpec.scala index ac101e396d..3192c67e06 100644 --- a/akka-docs/common/code/SchedulerDocSpec.scala +++ b/akka-docs/scala/code/akka/docs/actor/SchedulerDocSpec.scala @@ -1,4 +1,4 @@ -package akka.scheduler.actor +package akka.docs.actor //#imports1 import akka.actor.Actor @@ -10,7 +10,6 @@ import akka.util.duration._ import org.scalatest.{ BeforeAndAfterAll, WordSpec } import org.scalatest.matchers.MustMatchers import akka.testkit._ -import akka.util.duration._ class SchedulerDocSpec extends AkkaSpec(Map("akka.loglevel" -> "INFO")) { "schedule a one-off task" in { @@ -22,25 +21,12 @@ class SchedulerDocSpec extends AkkaSpec(Map("akka.loglevel" -> "INFO")) { expectMsg(1 second, "foo") //#schedule-one-off-thunk - //Schedules to send the "foo"-message to the testActor after 50ms + //Schedules a function to be executed (send the current time) to the testActor after 50ms system.scheduler.scheduleOnce(50 milliseconds) { - testActor ! "foo" + testActor ! System.currentTimeMillis } //#schedule-one-off-thunk - expectMsg(1 second, "foo") - - //#schedule-one-off-runnable - //Schedules to send the "foo"-message to the testActor after 50ms - system.scheduler.scheduleOnce( - 50 milliseconds, - new Runnable { - def run = testActor ! "foo" - }) - - //#schedule-one-off-runnable - - expectMsg(1 second, "foo") } "schedule a recurring task" in { @@ -62,6 +48,6 @@ class SchedulerDocSpec extends AkkaSpec(Map("akka.loglevel" -> "INFO")) { //This cancels further Ticks to be sent cancellable.cancel() //#schedule-recurring - tickActor.stop() + system.stop(tickActor) } } diff --git a/akka-docs/scala/code/UnnestedReceives.scala b/akka-docs/scala/code/akka/docs/actor/UnnestedReceives.scala similarity index 100% rename from akka-docs/scala/code/UnnestedReceives.scala rename to akka-docs/scala/code/akka/docs/actor/UnnestedReceives.scala diff --git a/akka-docs/scala/code/DispatcherDocSpec.scala b/akka-docs/scala/code/akka/docs/dispatcher/DispatcherDocSpec.scala similarity index 72% rename from akka-docs/scala/code/DispatcherDocSpec.scala rename to akka-docs/scala/code/akka/docs/dispatcher/DispatcherDocSpec.scala index 057ce05602..ffe0e4ed4b 100644 --- a/akka-docs/scala/code/DispatcherDocSpec.scala +++ b/akka-docs/scala/code/akka/docs/dispatcher/DispatcherDocSpec.scala @@ -16,35 +16,35 @@ object DispatcherDocSpec { val config = """ //#my-dispatcher-config my-dispatcher { - type = Dispatcher # Dispatcher is the name of the event-based dispatcher - daemonic = off # Toggles whether the threads created by this dispatcher should be daemons or not - core-pool-size-min = 2 # minimum number of threads to cap factor-based core number to - core-pool-size-factor = 2.0 # No of core threads ... ceil(available processors * factor) - core-pool-size-max = 10 # maximum number of threads to cap factor-based number to - throughput = 100 # Throughput defines the number of messages that are processed in a batch before the - # thread is returned to the pool. Set to 1 for as fair as possible. + # Dispatcher is the name of the event-based dispatcher + type = Dispatcher + # Toggles whether the threads created by this dispatcher should be daemons or not + daemonic = off + # minimum number of threads to cap factor-based core number to + core-pool-size-min = 2 + # No of core threads ... ceil(available processors * factor) + core-pool-size-factor = 2.0 + # maximum number of threads to cap factor-based number to + core-pool-size-max = 10 + # Throughput defines the number of messages that are processed in a batch before the + # thread is returned to the pool. Set to 1 for as fair as possible. + throughput = 100 } //#my-dispatcher-config - - //#my-pinned-config - my-pinned-dispatcher { - type = Dispatcher - core-pool-size-min = 1 - core-pool-size-max = 1 - } - //#my-pinned-config - + //#my-bounded-config my-dispatcher-bounded-queue { type = Dispatcher core-pool-size-factor = 8.0 max-pool-size-factor = 16.0 - task-queue-size = 100 # Specifies the bounded capacity of the task queue - task-queue-type = "array" # Specifies which type of task queue will be used, can be "array" or "linked" (default) + # Specifies the bounded capacity of the task queue + task-queue-size = 100 + # Specifies which type of task queue will be used, can be "array" or "linked" (default) + task-queue-type = "array" throughput = 3 } //#my-bounded-config - + //#my-balancing-config my-balancing-dispatcher { type = BalancingDispatcher @@ -76,6 +76,14 @@ class DispatcherDocSpec extends AkkaSpec(DispatcherDocSpec.config) { val dispatcher = system.dispatcherFactory.lookup("my-dispatcher-bounded-queue") } + "defining pinned dispatcher" in { + //#defining-pinned-dispatcher + val name = "myactor" + val dispatcher = system.dispatcherFactory.newPinnedDispatcher(name) + val myActor = system.actorOf(Props[MyActor].withDispatcher(dispatcher), name) + //#defining-pinned-dispatcher + } + "defining priority dispatcher" in { //#prio-dispatcher val gen = PriorityGenerator { // Create a new PriorityGenerator, lower prio means more important diff --git a/akka-docs/scala/dispatchers.rst b/akka-docs/scala/dispatchers.rst index 1683aa05c8..a5a4453e89 100644 --- a/akka-docs/scala/dispatchers.rst +++ b/akka-docs/scala/dispatchers.rst @@ -29,7 +29,7 @@ Setting the dispatcher You specify the dispatcher to use when creating an actor. -.. includecode:: code/DispatcherDocSpec.scala +.. includecode:: code/akka/docs/dispatcher/DispatcherDocSpec.scala :include: imports,defining-dispatcher Types of dispatchers @@ -40,14 +40,14 @@ There are 4 different types of message dispatchers: * Thread-based (Pinned) * Event-based * Priority event-based -* Work-stealing (Balancing) +* Work-sharing (Balancing) It is recommended to define the dispatcher in :ref:`configuration` to allow for tuning for different environments. Example of a custom event-based dispatcher, which can be fetched with ``system.dispatcherFactory.lookup("my-dispatcher")`` as in the example above: -.. includecode:: code/DispatcherDocSpec.scala#my-dispatcher-config +.. includecode:: code/akka/docs/dispatcher/DispatcherDocSpec.scala#my-dispatcher-config Default values are taken from ``default-dispatcher``, i.e. all options doesn't need to be defined. @@ -69,11 +69,9 @@ has worse performance and scalability than the event-based dispatcher but works a low frequency of messages and are allowed to go off and do their own thing for a longer period of time. Another advantage with this dispatcher is that Actors do not block threads for each other. -FIXME PN: Is this the way to configure a PinnedDispatcher, and then why "A ``PinnedDispatcher`` cannot be shared between actors." +The ``PinnedDispatcher`` can't be configured, but is created and associated with an actor like this: -The ``PinnedDispatcher`` is configured as a event-based dispatcher with with core pool size of 1. - -.. includecode:: code/DispatcherDocSpec.scala#my-pinned-config +.. includecode:: code/akka/docs/dispatcher/DispatcherDocSpec.scala#defining-pinned-dispatcher Event-based ^^^^^^^^^^^ @@ -101,7 +99,7 @@ thread as a way to slow him down and balance producer/consumer. Here is an example of a bounded mailbox: -.. includecode:: code/DispatcherDocSpec.scala#my-bounded-config +.. includecode:: code/akka/docs/dispatcher/DispatcherDocSpec.scala#my-bounded-config The standard :class:`Dispatcher` allows you to define the ``throughput`` it should have, as shown above. This defines the number of messages for a specific @@ -118,20 +116,23 @@ Priority event-based Sometimes it's useful to be able to specify priority order of messages, that is done by using Dispatcher and supply an UnboundedPriorityMailbox or BoundedPriorityMailbox with a ``java.util.Comparator[Envelope]`` or use a -``akka.dispatch.PriorityGenerator`` (recommended): +``akka.dispatch.PriorityGenerator`` (recommended). Creating a Dispatcher using PriorityGenerator: -.. includecode:: code/DispatcherDocSpec.scala#prio-dispatcher +.. includecode:: code/akka/docs/dispatcher/DispatcherDocSpec.scala#prio-dispatcher -Work-stealing event-based +Work-sharing event-based ^^^^^^^^^^^^^^^^^^^^^^^^^ The ``BalancingDispatcher`` is a variation of the ``Dispatcher`` in which Actors of the same type can be set up to share this dispatcher and during execution time the different actors will steal messages from other actors if they -have less messages to process. This can be a great way to improve throughput at the cost of a little higher latency. +have less messages to process. +Although the technique used in this implementation is commonly known as "work stealing", the actual implementation is probably +best described as "work donating" because the actor of which work is being stolen takes the initiative. +This can be a great way to improve throughput at the cost of a little higher latency. -.. includecode:: code/DispatcherDocSpec.scala#my-balancing-config +.. includecode:: code/akka/docs/dispatcher/DispatcherDocSpec.scala#my-balancing-config Here is an article with some more information: `Load Balancing Actors with Work Stealing Techniques `_ Here is another article discussing this particular dispatcher: `Flexible load balancing with Akka in Scala `_ @@ -151,8 +152,9 @@ if not specified otherwise. akka { actor { default-dispatcher { - task-queue-size = 1000 # If negative (or zero) then an unbounded mailbox is used (default) - # If positive then a bounded mailbox is used and the capacity is set to the number specified + # If negative (or zero) then an unbounded mailbox is used (default) + # If positive then a bounded mailbox is used and the capacity is set to the number specified + task-queue-size = 1000 } } } @@ -162,7 +164,7 @@ Per-instance based configuration You can also do it on a specific dispatcher instance. -.. includecode:: code/DispatcherDocSpec.scala#my-bounded-config +.. includecode:: code/akka/docs/dispatcher/DispatcherDocSpec.scala#my-bounded-config For the ``PinnedDispatcher``, it is non-shareable between actors, and associates a dedicated Thread with the actor. diff --git a/akka-docs/scala/index.rst b/akka-docs/scala/index.rst index 7e24497f5f..a36fe9513f 100644 --- a/akka-docs/scala/index.rst +++ b/akka-docs/scala/index.rst @@ -9,10 +9,10 @@ Scala API actors typed-actors logging + scheduler futures dataflow agents - stm transactors fault-tolerance dispatchers diff --git a/akka-docs/scala/logging.rst b/akka-docs/scala/logging.rst index e5cc7597a9..35f4e838ff 100644 --- a/akka-docs/scala/logging.rst +++ b/akka-docs/scala/logging.rst @@ -21,7 +21,7 @@ For convenience you can mixin the ``log`` member into actors, instead of definin .. code-block:: scala - class MyActor extends Actor with akka.actor.ActorLogging { + class MyActor extends Actor with akka.actor.ActorLogging { The second parameter to the ``Logging`` is the source of this logging channel. The source object is translated to a String according to the following rules: @@ -31,14 +31,14 @@ The source object is translated to a String according to the following rules: * in case of a class an approximation of its simpleName * and in all other cases the simpleName of its class -The log message may contain argument placeholders ``{}``, which will be substituted if the log level +The log message may contain argument placeholders ``{}``, which will be substituted if the log level is enabled. Event Handler ============= -Logging is performed asynchronously through an event bus. You can configure which event handlers that should -subscribe to the logging events. That is done using the 'event-handlers' element in the :ref:`configuration`. +Logging is performed asynchronously through an event bus. You can configure which event handlers that should +subscribe to the logging events. That is done using the 'event-handlers' element in the :ref:`configuration`. Here you can also define the log level. .. code-block:: ruby @@ -46,10 +46,11 @@ Here you can also define the log level. akka { # Event handlers to register at boot time (Logging$DefaultLogger logs to STDOUT) event-handlers = ["akka.event.Logging$DefaultLogger"] - loglevel = "DEBUG" # Options: ERROR, WARNING, INFO, DEBUG + # Options: ERROR, WARNING, INFO, DEBUG + loglevel = "DEBUG" } -The default one logs to STDOUT and is registered by default. It is not intended to be used for production. There is also an :ref:`slf4j-scala` +The default one logs to STDOUT and is registered by default. It is not intended to be used for production. There is also an :ref:`slf4j-scala` event handler available in the 'akka-slf4j' module. Example of creating a listener: @@ -63,7 +64,7 @@ Example of creating a listener: SLF4J ===== -Akka provides an event handler for `SL4FJ `_. This module is available in the 'akka-slf4j.jar'. +Akka provides an event handler for `SL4FJ `_. This module is available in the 'akka-slf4j.jar'. It has one single dependency; the slf4j-api jar. In runtime you also need a SLF4J backend, we recommend `Logback `_: .. code-block:: scala @@ -71,10 +72,10 @@ It has one single dependency; the slf4j-api jar. In runtime you also need a SLF4 lazy val logback = "ch.qos.logback" % "logback-classic" % "1.0.0" % "runtime" -You need to enable the Slf4jEventHandler in the 'event-handlers' element in -the :ref:`configuration`. Here you can also define the log level of the event bus. +You need to enable the Slf4jEventHandler in the 'event-handlers' element in +the :ref:`configuration`. Here you can also define the log level of the event bus. More fine grained log levels can be defined in the configuration of the SLF4J backend -(e.g. logback.xml). The String representation of the source object that is used when +(e.g. logback.xml). The String representation of the source object that is used when creating the ``LoggingAdapter`` correspond to the name of the SL4FJ logger. .. code-block:: ruby @@ -91,9 +92,9 @@ Since the logging is done asynchronously the thread in which the logging was per Mapped Diagnostic Context (MDC) with attribute name ``sourceThread``. With Logback the thread name is available with ``%X{sourceThread}`` specifier within the pattern layout configuration:: - - - %date{ISO8601} %-5level %logger{36} %X{sourceThread} - %msg%n - - + + + %date{ISO8601} %-5level %logger{36} %X{sourceThread} - %msg%n + + diff --git a/akka-docs/common/scheduler.rst b/akka-docs/scala/scheduler.rst similarity index 72% rename from akka-docs/common/scheduler.rst rename to akka-docs/scala/scheduler.rst index d05cea60aa..6089630625 100644 --- a/akka-docs/common/scheduler.rst +++ b/akka-docs/scala/scheduler.rst @@ -1,30 +1,31 @@ -Scheduler -========= + +.. _scheduler-scala: + +################### + Scheduler (Scala) +################### Sometimes the need for making things happen in the future arises, and where do you go look then? -Look no further than ``ActorSystem``! There you find the :meth:``scheduler`` method that returns an instance +Look no further than ``ActorSystem``! There you find the :meth:`scheduler` method that returns an instance of akka.actor.Scheduler, this instance is unique per ActorSystem and is used internally for scheduling things to happen at specific points in time. Please note that the scheduled tasks are executed by the default ``MessageDispatcher`` of the ``ActorSystem``. You can schedule sending of messages to actors and execution of tasks (functions or Runnable). -You will get a ``Cancellable`` back that you can call :meth:``cancel`` on to cancel the execution of the +You will get a ``Cancellable`` back that you can call :meth:`cancel` on to cancel the execution of the scheduled operation. Some examples ------------- -.. includecode:: code/SchedulerDocSpec.scala +.. includecode:: code/akka/docs/actor/SchedulerDocSpec.scala :include: imports1,schedule-one-off-message -.. includecode:: code/SchedulerDocSpec.scala - :include: imports1,schedule-one-off-thunk +.. includecode:: code/akka/docs/actor/SchedulerDocSpec.scala + :include: schedule-one-off-thunk -.. includecode:: code/SchedulerDocSpec.scala - :include: imports1,schedule-one-off-runnable - -.. includecode:: code/SchedulerDocSpec.scala - :include: imports1,schedule-recurring +.. includecode:: code/akka/docs/actor/SchedulerDocSpec.scala + :include: schedule-recurring From ``akka.actor.ActorSystem`` ------------------------------- diff --git a/akka-docs/scala/transactors.rst b/akka-docs/scala/transactors.rst new file mode 100644 index 0000000000..cdd284ae43 --- /dev/null +++ b/akka-docs/scala/transactors.rst @@ -0,0 +1,6 @@ +.. _transactors-scala: + +Transactors (Scala) +=================== + +The Akka Transactors module has not been migrated to Akka 2.0-SNAPSHOT yet. \ No newline at end of file diff --git a/akka-docs/scala/typed-actors.rst b/akka-docs/scala/typed-actors.rst index 0e70acd282..5f6920138a 100644 --- a/akka-docs/scala/typed-actors.rst +++ b/akka-docs/scala/typed-actors.rst @@ -4,7 +4,7 @@ Typed Actors (Scala) .. sidebar:: Contents .. contents:: :local: - + The Typed Actors are implemented through `Typed Actors `_. It uses AOP through `AspectWerkz `_ to turn regular POJOs into asynchronous non-blocking Actors with semantics of the Actor Model. Each method dispatch is turned into a message that is put on a queue to be processed by the Typed Actor sequentially one by one. If you are using the `Spring Framework `_ then take a look at Akka's `Spring integration `_. @@ -182,7 +182,8 @@ Akka can help you in this regard. It allows you to turn on an option for seriali akka { actor { - serialize-messages = on # does a deep clone of messages to ensure immutability + # does a deep clone of messages to ensure immutability + serialize-messages = on } } diff --git a/akka-durable-mailboxes/akka-beanstalk-mailbox/src/main/resources/reference.conf b/akka-durable-mailboxes/akka-beanstalk-mailbox/src/main/resources/reference.conf index 3e6b914bf7..82beeeddd8 100644 --- a/akka-durable-mailboxes/akka-beanstalk-mailbox/src/main/resources/reference.conf +++ b/akka-durable-mailboxes/akka-beanstalk-mailbox/src/main/resources/reference.conf @@ -3,7 +3,7 @@ ################################################## # This the reference config file has all the default settings. -# Make your edits/overrides in your akka.conf. +# Make your edits/overrides in your application.conf. akka { actor { diff --git a/akka-durable-mailboxes/akka-file-mailbox/src/main/resources/reference.conf b/akka-durable-mailboxes/akka-file-mailbox/src/main/resources/reference.conf index f81f8995f9..93ee52fcc7 100644 --- a/akka-durable-mailboxes/akka-file-mailbox/src/main/resources/reference.conf +++ b/akka-durable-mailboxes/akka-file-mailbox/src/main/resources/reference.conf @@ -3,7 +3,7 @@ ############################################# # This the reference config file has all the default settings. -# Make your edits/overrides in your akka.conf. +# Make your edits/overrides in your application.conf. akka { actor { diff --git a/akka-durable-mailboxes/akka-file-mailbox/src/main/scala/akka/actor/mailbox/filequeue/tools/QDumper.scala b/akka-durable-mailboxes/akka-file-mailbox/src/main/scala/akka/actor/mailbox/filequeue/tools/QDumper.scala index 54c5ba36b6..06f151d84a 100644 --- a/akka-durable-mailboxes/akka-file-mailbox/src/main/scala/akka/actor/mailbox/filequeue/tools/QDumper.scala +++ b/akka-durable-mailboxes/akka-file-mailbox/src/main/scala/akka/actor/mailbox/filequeue/tools/QDumper.scala @@ -149,6 +149,6 @@ object QDumper { new QueueDumper(filename, system.log)() } - system.stop() + system.shutdown() } } diff --git a/akka-durable-mailboxes/akka-mongo-mailbox/src/main/resources/reference.conf b/akka-durable-mailboxes/akka-mongo-mailbox/src/main/resources/reference.conf index 09a0c316ec..991f638053 100644 --- a/akka-durable-mailboxes/akka-mongo-mailbox/src/main/resources/reference.conf +++ b/akka-durable-mailboxes/akka-mongo-mailbox/src/main/resources/reference.conf @@ -3,19 +3,23 @@ ################################################ # This the reference config file has all the default settings. -# Make your edits/overrides in your akka.conf. +# Make your edits/overrides in your application.conf. akka { actor { mailbox { mongodb { + # Any specified collection name will be used as a prefix for collections that use durable mongo mailboxes - uri = "mongodb://localhost/akka.mailbox" # Follow Mongo URI Spec - http://www.mongodb.org/display/DOCS/Connections + # Follow Mongo URI Spec - http://www.mongodb.org/display/DOCS/Connections + uri = "mongodb://localhost/akka.mailbox" # Configurable timeouts for certain ops timeout { - read = 3000ms # time to wait for a read to succeed before timing out the future - write = 3000ms # time to wait for a write to succeed before timing out the future + # time to wait for a read to succeed before timing out the future + read = 3000ms + # time to wait for a write to succeed before timing out the future + write = 3000ms } } } diff --git a/akka-durable-mailboxes/akka-mongo-mailbox/src/main/scala/akka/actor/mailbox/MongoBasedMailbox.scala b/akka-durable-mailboxes/akka-mongo-mailbox/src/main/scala/akka/actor/mailbox/MongoBasedMailbox.scala index d010a1ef6a..6e1c28219d 100644 --- a/akka-durable-mailboxes/akka-mongo-mailbox/src/main/scala/akka/actor/mailbox/MongoBasedMailbox.scala +++ b/akka-durable-mailboxes/akka-mongo-mailbox/src/main/scala/akka/actor/mailbox/MongoBasedMailbox.scala @@ -8,10 +8,10 @@ import com.mongodb.async._ import com.mongodb.async.futures.RequestFutures import org.bson.collection._ import akka.actor.ActorCell -import akka.dispatch.Envelope import akka.event.Logging -import akka.dispatch.DefaultPromise import akka.actor.ActorRef +import akka.dispatch.{ Await, Promise, Envelope, DefaultPromise } +import java.util.concurrent.TimeoutException class MongoBasedMailboxException(message: String) extends AkkaException(message) @@ -43,15 +43,14 @@ class MongoBasedMailbox(val owner: ActorCell) extends DurableMailbox(owner) { /* TODO - Test if a BSON serializer is registered for the message and only if not, use toByteString? */ val durableMessage = MongoDurableMessage(ownerPathString, envelope.message, envelope.sender) // todo - do we need to filter the actor name at all for safe collection naming? - val result = new DefaultPromise[Boolean](settings.WriteTimeout)(dispatcher) + val result = Promise[Boolean]()(dispatcher) mongo.insert(durableMessage, false)(RequestFutures.write { wr: Either[Throwable, (Option[AnyRef], WriteResult)] ⇒ wr match { - case Right((oid, wr)) ⇒ result.completeWithResult(true) - case Left(t) ⇒ result.completeWithException(t) + case Right((oid, wr)) ⇒ result.success(true) + case Left(t) ⇒ result.failure(t) } }) - - result.as[Boolean].orNull + Await.ready(result, settings.WriteTimeout) } def dequeue(): Envelope = withErrorHandling { @@ -62,29 +61,27 @@ class MongoBasedMailbox(val owner: ActorCell) extends DurableMailbox(owner) { * TODO - Should we have a specific query in place? Which way do we sort? * TODO - Error handling version! */ - val envelopePromise = new DefaultPromise[Envelope](settings.ReadTimeout)(dispatcher) + val envelopePromise = Promise[Envelope]()(dispatcher) mongo.findAndRemove(Document.empty) { doc: Option[MongoDurableMessage] ⇒ doc match { case Some(msg) ⇒ { log.debug("DEQUEUING message in mongo-based mailbox [{}]", msg) - envelopePromise.completeWithResult(msg.envelope()) + envelopePromise.success(msg.envelope()) log.debug("DEQUEUING messageInvocation in mongo-based mailbox [{}]", envelopePromise) } case None ⇒ - { - log.info("No matching document found. Not an error, just an empty queue.") - envelopePromise.completeWithResult(null) - } + log.info("No matching document found. Not an error, just an empty queue.") + envelopePromise.success(null) () } } - envelopePromise.as[Envelope].orNull + try { Await.result(envelopePromise, settings.ReadTimeout) } catch { case _: TimeoutException ⇒ null } } def numberOfMessages: Int = { - val count = new DefaultPromise[Int](settings.ReadTimeout)(dispatcher) - mongo.count()(count.completeWithResult) - count.as[Int].getOrElse(-1) + val count = Promise[Int]()(dispatcher) + mongo.count()(count.success) + try { Await.result(count, settings.ReadTimeout).asInstanceOf[Int] } catch { case _: Exception ⇒ -1 } } //TODO review find other solution, this will be very expensive diff --git a/akka-durable-mailboxes/akka-redis-mailbox/src/main/resources/reference.conf b/akka-durable-mailboxes/akka-redis-mailbox/src/main/resources/reference.conf index 20f1d03abd..7b12dc24b2 100644 --- a/akka-durable-mailboxes/akka-redis-mailbox/src/main/resources/reference.conf +++ b/akka-durable-mailboxes/akka-redis-mailbox/src/main/resources/reference.conf @@ -3,7 +3,7 @@ ############################################## # This the reference config file has all the default settings. -# Make your edits/overrides in your akka.conf. +# Make your edits/overrides in your application.conf. akka { actor { diff --git a/akka-durable-mailboxes/akka-zookeeper-mailbox/src/main/resources/reference.conf b/akka-durable-mailboxes/akka-zookeeper-mailbox/src/main/resources/reference.conf index b31de45f76..3dfea7a944 100644 --- a/akka-durable-mailboxes/akka-zookeeper-mailbox/src/main/resources/reference.conf +++ b/akka-durable-mailboxes/akka-zookeeper-mailbox/src/main/resources/reference.conf @@ -3,7 +3,7 @@ ################################################## # This the reference config file has all the default settings. -# Make your edits/overrides in your akka.conf. +# Make your edits/overrides in your application.conf. akka { actor { diff --git a/akka-kernel/src/main/scala/akka/kernel/DefaultAkkaLoader.scala b/akka-kernel/src/main/scala/akka/kernel/DefaultAkkaLoader.scala deleted file mode 100644 index 88645ceff8..0000000000 --- a/akka-kernel/src/main/scala/akka/kernel/DefaultAkkaLoader.scala +++ /dev/null @@ -1,23 +0,0 @@ -/** - * Copyright (C) 2009-2011 Typesafe Inc. - */ - -package akka.http - -import akka.config.Config -import akka.util.{ Bootable, AkkaLoader } -import akka.cluster.BootableRemoteActorService -import akka.actor.BootableActorLoaderService - -class DefaultAkkaLoader extends AkkaLoader { - def boot(): Unit = boot(true, new EmbeddedAppServer with BootableActorLoaderService with BootableRemoteActorService) -} - -/** - * Can be used to boot Akka - * - * java -cp ... akka.http.Main - */ -object Main extends DefaultAkkaLoader { - def main(args: Array[String]) = boot -} diff --git a/akka-kernel/src/main/scala/akka/kernel/EmbeddedAppServer.scala b/akka-kernel/src/main/scala/akka/kernel/EmbeddedAppServer.scala deleted file mode 100644 index 84d0006ea6..0000000000 --- a/akka-kernel/src/main/scala/akka/kernel/EmbeddedAppServer.scala +++ /dev/null @@ -1,73 +0,0 @@ -/** - * Copyright (C) 2009-2011 Typesafe Inc. - */ - -package akka.http - -import javax.ws.rs.core.UriBuilder -import javax.servlet.ServletConfig -import java.io.File - -import akka.actor.BootableActorLoaderService -import akka.util.Bootable - -import org.eclipse.jetty.xml.XmlConfiguration -import org.eclipse.jetty.server.{ Handler, Server } -import org.eclipse.jetty.server.handler.{ HandlerList, HandlerCollection, ContextHandler } -import java.net.URL -import akka.AkkaException - -/** - * Handles the Akka Comet Support (load/unload) - */ -trait EmbeddedAppServer extends Bootable { - self: BootableActorLoaderService ⇒ - - import akka.config.Config._ - - val REST_HOSTNAME = config.getString("akka.http.hostname", "localhost") - val REST_PORT = config.getInt("akka.http.port", 9998) - - val isRestEnabled = config.getList("akka.enabled-modules").exists(_ == "http") - - protected var server: Option[Server] = None - - protected def findJettyConfigXML: Option[URL] = - Option(applicationLoader.getOrElse(this.getClass.getClassLoader).getResource("microkernel-server.xml")) orElse - HOME.map(home ⇒ new File(home + "/config/microkernel-server.xml").toURI.toURL) - - abstract override def onLoad = { - super.onLoad - if (isRestEnabled) { - - val configuration = new XmlConfiguration(findJettyConfigXML.getOrElse(sys.error("microkernel-server.xml not found!"))) - - System.setProperty("jetty.port", REST_PORT.toString) - System.setProperty("jetty.host", REST_HOSTNAME) - - HOME.foreach(home ⇒ System.setProperty("jetty.home", home + "/deploy/root")) - - server = Option(configuration.configure.asInstanceOf[Server]) map { s ⇒ //Set the correct classloader to our contexts - applicationLoader foreach { loader ⇒ - //We need to provide the correct classloader to the servlets - def setClassLoader(handlers: Seq[Handler]) { - handlers foreach { - case c: ContextHandler ⇒ c.setClassLoader(loader) - case c: HandlerCollection ⇒ setClassLoader(c.getHandlers) - case _ ⇒ - } - } - setClassLoader(s.getHandlers) - } - //Start the server - s.start() - s - } - } - } - - abstract override def onUnload = { - super.onUnload - server foreach { _.stop() } - } -} diff --git a/akka-kernel/src/main/scala/akka/kernel/Kernel.scala b/akka-kernel/src/main/scala/akka/kernel/Kernel.scala deleted file mode 100644 index 74c90b47c7..0000000000 --- a/akka-kernel/src/main/scala/akka/kernel/Kernel.scala +++ /dev/null @@ -1,36 +0,0 @@ -/** - * Copyright (C) 2009-2010 Typesafe Inc. - */ - -package akka.kernel - -import akka.http.EmbeddedAppServer -import akka.util.AkkaLoader -import akka.cluster.BootableRemoteActorService -import akka.actor.BootableActorLoaderService -import akka.camel.CamelService - -import java.util.concurrent.CountDownLatch - -object Main { - val keepAlive = new CountDownLatch(2) - - def main(args: Array[String]) = { - Kernel.boot - keepAlive.await - } -} - -/** - * The Akka Kernel, is used to start And postStop Akka in standalone/kernel mode. - */ -object Kernel extends AkkaLoader { - - def boot(): Unit = boot(true, new EmbeddedAppServer with BootableActorLoaderService with BootableRemoteActorService with CamelService) - - // For testing purposes only - def startRemoteService(): Unit = bundles.foreach(_ match { - case x: BootableRemoteActorService ⇒ x.startRemoteService() - case _ ⇒ - }) -} diff --git a/akka-kernel/src/main/scala/akka/kernel/Main.scala b/akka-kernel/src/main/scala/akka/kernel/Main.scala new file mode 100644 index 0000000000..3b89cf4ec0 --- /dev/null +++ b/akka-kernel/src/main/scala/akka/kernel/Main.scala @@ -0,0 +1,185 @@ +/** + * Copyright (C) 2009-2010 Typesafe Inc. + */ + +package akka.kernel + +import akka.actor.ActorSystem +import java.io.File +import java.lang.Boolean.getBoolean +import java.net.{ URL, URLClassLoader } +import java.util.jar.JarFile +import scala.collection.JavaConverters._ + +/** + * To use the microkernel at least one 'boot class' needs to be specified. + * A boot class implements this interface ([[akka.kernel.Bootable]]) and + * must have an empty default constructor. + * + * ActorSystems can be created within the boot class. + * + * An example of a simple boot class: + * {{{ + * class BootApp extends Bootable { + * val system = ActorSystem("app") + * + * def startup = { + * system.actorOf(Props[FirstActor]) ! FirstMessage + * } + * + * def shutdown = { + * system.shutdown() + * } + * } + * }}} + * + * Boot classes are specified as main arguments to the microkernel. + * + * For example, using the akka script an application can be started with + * the following at the command line: + * {{{ + * bin/akka org.app.BootApp + * }}} + */ +trait Bootable { + /** + * Callback run on microkernel startup. + * Create initial actors and messages here. + */ + def startup(): Unit + + /** + * Callback run on microkernel shutdown. + * Shutdown actor systems here. + */ + def shutdown(): Unit +} + +/** + * Main class for running the microkernel. + */ +object Main { + val quiet = getBoolean("akka.kernel.quiet") + + def log(s: String) = if (!quiet) println(s) + + def main(args: Array[String]) = { + if (args.isEmpty) { + log("[error] No boot classes specified") + System.exit(1) + } + + log(banner) + log("Starting Akka...") + log("Running Akka " + ActorSystem.Version) + + val classLoader = createClassLoader() + + Thread.currentThread.setContextClassLoader(classLoader) + + val bootClasses: Seq[String] = args.toSeq + val bootables: Seq[Bootable] = bootClasses map { c ⇒ classLoader.loadClass(c).newInstance.asInstanceOf[Bootable] } + + for (bootable ← bootables) { + log("Starting up " + bootable.getClass.getName) + bootable.startup() + } + + addShutdownHook(bootables) + + log("Successfully started Akka") + } + + def createClassLoader(): ClassLoader = { + if (ActorSystem.GlobalHome.isDefined) { + val home = ActorSystem.GlobalHome.get + val deploy = new File(home, "deploy") + if (deploy.exists) { + loadDeployJars(deploy) + } else { + log("[warning] No deploy dir found at " + deploy) + Thread.currentThread.getContextClassLoader + } + } else { + log("[warning] Akka home is not defined") + Thread.currentThread.getContextClassLoader + } + } + + def loadDeployJars(deploy: File): ClassLoader = { + val jars = deploy.listFiles.filter(_.getName.endsWith(".jar")) + + val nestedJars = jars flatMap { jar ⇒ + val jarFile = new JarFile(jar) + val jarEntries = jarFile.entries.asScala.toArray.filter(_.getName.endsWith(".jar")) + jarEntries map { entry ⇒ new File("jar:file:%s!/%s" format (jarFile.getName, entry.getName)) } + } + + val urls = (jars ++ nestedJars) map { _.toURI.toURL } + + urls foreach { url ⇒ log("Deploying " + url) } + + new URLClassLoader(urls, Thread.currentThread.getContextClassLoader) + } + + def addShutdownHook(bootables: Seq[Bootable]): Unit = { + Runtime.getRuntime.addShutdownHook(new Thread(new Runnable { + def run = { + log("") + log("Shutting down Akka...") + + for (bootable ← bootables) { + log("Shutting down " + bootable.getClass.getName) + bootable.shutdown() + } + + log("Successfully shut down Akka") + } + })) + } + + def banner = """ +============================================================================== + + ZZ: + ZZZZ + ZZZZZZ + ZZZ' ZZZ + ~7 7ZZ' ZZZ + :ZZZ: IZZ' ZZZ + ,OZZZZ.~ZZ? ZZZ + ZZZZ' 'ZZZ$ ZZZ + . $ZZZ ~ZZ$ ZZZ + .=Z?. .ZZZO ~ZZ7 OZZ + .ZZZZ7..:ZZZ~ 7ZZZ ZZZ~ + .$ZZZ$Z+.ZZZZ ZZZ: ZZZ$ + .,ZZZZ?' =ZZO= .OZZ 'ZZZ + .$ZZZZ+ .ZZZZ IZZZ ZZZ$ + .ZZZZZ' .ZZZZ' .ZZZ$ ?ZZZ + .ZZZZZZ' .OZZZ? ?ZZZ 'ZZZ$ + .?ZZZZZZ' .ZZZZ? .ZZZ? 'ZZZO + .+ZZZZZZ?' .7ZZZZ' .ZZZZ :ZZZZ + .ZZZZZZ$' .?ZZZZZ' .~ZZZZ 'ZZZZ. + + + NNNNN $NNNN+ + NNNNN $NNNN+ + NNNNN $NNNN+ + NNNNN $NNNN+ + NNNNN $NNNN+ + =NNNNNNNNND$ NNNNN DDDDDD: $NNNN+ DDDDDN NDDNNNNNNNN, + NNNNNNNNNNNNND NNNNN DNNNNN $NNNN+ 8NNNNN= :NNNNNNNNNNNNNN + NNNNN$ DNNNNN NNNNN $NNNNN~ $NNNN+ NNNNNN NNNNN, :NNNNN+ + ?DN~ NNNNN NNNNN MNNNNN $NNNN+:NNNNN7 $ND =NNNNN + DNNNNN NNNNNDNNNN$ $NNNNDNNNNN :DNNNNN + ZNDNNNNNNNNND NNNNNNNNNND, $NNNNNNNNNNN DNDNNNNNNNNNN + NNNNNNNDDINNNNN NNNNNNNNNNND $NNNNNNNNNNND ONNNNNNND8+NNNNN + :NNNND NNNNN NNNNNN DNNNN, $NNNNNO 7NNNND NNNNNO :NNNNN + DNNNN NNNNN NNNNN DNNNN $NNNN+ 8NNNNN NNNNN $NNNNN + DNNNNO NNNNNN NNNNN NNNNN $NNNN+ NNNNN$ NNNND, ,NNNNND + NNNNNNDDNNNNNNNN NNNNN =NNNNN $NNNN+ DNNNN? DNNNNNNDNNNNNNNND + NNNNNNNNN NNNN$ NNNNN 8NNNND $NNNN+ NNNNN= ,DNNNNNNND NNNNN$ + +============================================================================== +""" +} diff --git a/akka-kernel/src/main/scala/akka/servlet/Initializer.scala b/akka-kernel/src/main/scala/akka/servlet/Initializer.scala deleted file mode 100644 index b91e5ae439..0000000000 --- a/akka-kernel/src/main/scala/akka/servlet/Initializer.scala +++ /dev/null @@ -1,33 +0,0 @@ -/** - * Copyright (C) 2009-2011 Typesafe Inc. - */ - -package akka.servlet - -import akka.cluster.BootableRemoteActorService -import akka.actor.BootableActorLoaderService -import akka.config.Config -import akka.util.{ Bootable, AkkaLoader } - -import javax.servlet.{ ServletContextListener, ServletContextEvent } - -/** - * This class can be added to web.xml mappings as a listener to start and postStop Akka. - * - * - * ... - * - * akka.servlet.Initializer - * - * ... - * - */ -class Initializer extends ServletContextListener { - lazy val loader = new AkkaLoader - - def contextDestroyed(e: ServletContextEvent): Unit = - loader.shutdown - - def contextInitialized(e: ServletContextEvent): Unit = - loader.boot(true, new BootableActorLoaderService with BootableRemoteActorService) -} diff --git a/akka-kernel/src/main/scripts/akka b/akka-kernel/src/main/scripts/akka new file mode 100755 index 0000000000..595bc6e34c --- /dev/null +++ b/akka-kernel/src/main/scripts/akka @@ -0,0 +1,24 @@ +#!/usr/bin/env bash + +declare quiet="false" + +while true; do + case "$1" in + -q | --quiet ) quiet="true"; shift ;; + * ) break ;; + esac +done + +[[ "$@" ]] || { + echo "No boot classes specified" + echo "Usage: bin/akka org.somewhere.BootClass" + exit 1 +} + +declare AKKA_HOME="$(cd "$(cd "$(dirname "$0")"; pwd -P)"/..; pwd)" + +[ -n "$JAVA_OPTS" ] || JAVA_OPTS="-Xmx1024M -Xms1024M -Xss1M -XX:MaxPermSize=256M -XX:+UseParallelGC -XX:OnOutOfMemoryError=\"kill -9 %p\"" + +[ -n "$AKKA_CLASSPATH" ] || AKKA_CLASSPATH="$AKKA_HOME/lib/scala-library.jar:$AKKA_HOME/lib/akka/*:$AKKA_HOME/config" + +java "$JAVA_OPTS" -cp "$AKKA_CLASSPATH" -Dakka.home="$AKKA_HOME" -Dakka.kernel.quiet=$quiet akka.kernel.Main "$@" diff --git a/scripts/microkernel/akka.bat b/akka-kernel/src/main/scripts/akka.bat similarity index 70% rename from scripts/microkernel/akka.bat rename to akka-kernel/src/main/scripts/akka.bat index 59d1a91a48..b6c2f8628a 100644 --- a/scripts/microkernel/akka.bat +++ b/akka-kernel/src/main/scripts/akka.bat @@ -1,6 +1,7 @@ @echo off + set AKKA_HOME=%~dp0.. -set JAVA_OPTS=-Xms1024M -Xmx1024M -Xss1M -XX:MaxPermSize=256M -XX:+UseParallelGC +set JAVA_OPTS=-Xmx1024M -Xms1024M -Xss1M -XX:MaxPermSize=256M -XX:+UseParallelGC set AKKA_CLASSPATH=%AKKA_HOME%\lib\scala-library.jar;%AKKA_HOME%\config;%AKKA_HOME%\lib\akka\* -java %JAVA_OPTS% -cp "%AKKA_CLASSPATH%" -Dakka.home="%AKKA_HOME%" akka.kernel.Main +java %JAVA_OPTS% -cp "%AKKA_CLASSPATH%" -Dakka.home="%AKKA_HOME%" akka.kernel.Main %* diff --git a/akka-remote/src/main/resources/reference.conf b/akka-remote/src/main/resources/reference.conf index 4083a64ea2..0ffd461a25 100644 --- a/akka-remote/src/main/resources/reference.conf +++ b/akka-remote/src/main/resources/reference.conf @@ -3,7 +3,7 @@ ##################################### # This the reference config file has all the default settings. -# Make your edits/overrides in your akka.conf. +# Make your edits/overrides in your application.conf. akka { @@ -13,18 +13,22 @@ akka { default { - remote = "" # if this is set to a valid remote address, the named actor will be deployed at that node - # e.g. "akka://sys@host:port" + # if this is set to a valid remote address, the named actor will be deployed at that node + # e.g. "akka://sys@host:port" + remote = "" target { - nodes = [] # A list of hostnames and ports for instantiating the children of a non-direct router - # The format should be on "akka://sys@host:port", where: - # - sys is the remote actor system name - # - hostname can be either hostname or IP address the remote actor should connect to - # - port should be the port for the remote server on the other node - # The number of actor instances to be spawned is still taken from the nr-of-instances - # setting as for local routers; the instances will be distributed round-robin among the - # given nodes. + + # A list of hostnames and ports for instantiating the children of a non-direct router + # The format should be on "akka://sys@host:port", where: + # - sys is the remote actor system name + # - hostname can be either hostname or IP address the remote actor should connect to + # - port should be the port for the remote server on the other node + # The number of actor instances to be spawned is still taken from the nr-of-instances + # setting as for local routers; the instances will be distributed round-robin among the + # given nodes. + nodes = [] + } } } @@ -35,50 +39,76 @@ akka { use-compression = off - secure-cookie = "" # Generate your own with '$AKKA_HOME/scripts/generate_config_with_secure_cookie.sh' - # or using 'akka.util.Crypt.generateSecureCookie' + # Generate your own with '$AKKA_HOME/scripts/generate_config_with_secure_cookie.sh' + # or using 'akka.util.Crypt.generateSecureCookie' + secure-cookie = "" - remote-daemon-ack-timeout = 30s # Timeout for ACK of cluster operations, lik checking actor out etc. + # Timeout for ACK of cluster operations, lik checking actor out etc. + remote-daemon-ack-timeout = 30s - use-passive-connections = on # Reuse inbound connections for outbound messages + # Reuse inbound connections for outbound messages + use-passive-connections = on + + # accrual failure detection config + failure-detector { + + # defines the failure detector threshold + # A low threshold is prone to generate many wrong suspicions but ensures a + # quick detection in the event of a real crash. Conversely, a high threshold + # generates fewer mistakes but needs more time to detect actual crashes + threshold = 8 - failure-detector { # accrual failure detection config - threshold = 8 # defines the failure detector threshold - # A low threshold is prone to generate many wrong suspicions but ensures a - # quick detection in the event of a real crash. Conversely, a high threshold - # generates fewer mistakes but needs more time to detect actual crashes max-sample-size = 1000 } - + gossip { initialDelay = 5s frequency = 1s } - - compute-grid-dispatcher { # The dispatcher used for remote system messages - name = ComputeGridDispatcher # defaults to same settings as default-dispatcher + + # The dispatcher used for remote system messages + compute-grid-dispatcher { + # defaults to same settings as default-dispatcher + name = ComputeGridDispatcher } server { - hostname = "" # The hostname or ip to bind the remoting to, InetAddress.getLocalHost.getHostAddress is used if empty - port = 2552 # The default remote server port clients should connect to. Default is 2552 (AKKA) - message-frame-size = 1 MiB # Increase this if you want to be able to send messages with large payloads - connection-timeout = 120s # Timeout duration - require-cookie = off # Should the remote server require that it peers share the same secure-cookie (defined in the 'remote' section)? - untrusted-mode = off # Enable untrusted mode for full security of server managed actors, allows untrusted clients to connect. - backlog = 4096 # Sets the size of the connection backlog + # The hostname or ip to bind the remoting to, InetAddress.getLocalHost.getHostAddress is used if empty + hostname = "" + + # The default remote server port clients should connect to. Default is 2552 (AKKA) + port = 2552 + + # Increase this if you want to be able to send messages with large payloads + message-frame-size = 1 MiB + + # Timeout duration + connection-timeout = 120s + + # Should the remote server require that it peers share the same secure-cookie (defined in the 'remote' section)? + require-cookie = off + + # Enable untrusted mode for full security of server managed actors, allows untrusted clients to connect. + untrusted-mode = off + + # Sets the size of the connection backlog + backlog = 4096 } client { buffering { - retry-message-send-on-failure = off # Should message buffering on remote client error be used (buffer flushed on successful reconnect) - capacity = -1 # If negative (or zero) then an unbounded mailbox is used (default) - # If positive then a bounded mailbox is used and the capacity is set using the property + # Should message buffering on remote client error be used (buffer flushed on successful reconnect) + retry-message-send-on-failure = off + + # If negative (or zero) then an unbounded mailbox is used (default) + # If positive then a bounded mailbox is used and the capacity is set using the property + capacity = -1 } reconnect-delay = 5s read-timeout = 3600s message-frame-size = 1 MiB - reconnection-time-window = 600s # Maximum time window that a client should try to reconnect for + # Maximum time window that a client should try to reconnect for + reconnection-time-window = 600s } } diff --git a/akka-remote/src/main/scala/akka/remote/BootableRemoteActorService.scala b/akka-remote/src/main/scala/akka/remote/BootableRemoteActorService.scala deleted file mode 100644 index e3bd903c07..0000000000 --- a/akka-remote/src/main/scala/akka/remote/BootableRemoteActorService.scala +++ /dev/null @@ -1,47 +0,0 @@ -/** - * Copyright (C) 2009-2011 Typesafe Inc. - */ - -package akka.remote - -import akka.actor.{ Actor, BootableActorLoaderService } -import akka.util.{ ReflectiveAccess, Bootable } - -// TODO: remove me - remoting is enabled through the RemoteActorRefProvider - -/** - * This bundle/service is responsible for booting up and shutting down the remote actors facility. - *

- * It is used in Kernel. - */ -/* -trait BootableRemoteActorService extends Bootable { - self: BootableActorLoaderService ⇒ - - def settings: RemoteServerSettings - - protected lazy val remoteServerThread = new Thread(new Runnable() { - def run = system.remote.start(self.applicationLoader.getOrElse(null)) //Use config host/port - }, "Akka RemoteModule Service") - - def startRemoteService() { remoteServerThread.start() } - - abstract override def onLoad() { - if (system.reflective.ClusterModule.isEnabled && settings.isRemotingEnabled) { - system.eventHandler.info(this, "Initializing Remote Actors Service...") - startRemoteService() - system.eventHandler.info(this, "Remote Actors Service initialized") - } - super.onLoad() - } - - abstract override def onUnload() { - system.eventHandler.info(this, "Shutting down Remote Actors Service") - - system.remote.shutdown() - if (remoteServerThread.isAlive) remoteServerThread.join(1000) - system.eventHandler.info(this, "Remote Actors Service has been shut down") - super.onUnload() - } -} -*/ diff --git a/akka-remote/src/main/scala/akka/remote/Gossiper.scala b/akka-remote/src/main/scala/akka/remote/Gossiper.scala index 0b8044f9c4..121ead58bc 100644 --- a/akka-remote/src/main/scala/akka/remote/Gossiper.scala +++ b/akka-remote/src/main/scala/akka/remote/Gossiper.scala @@ -22,6 +22,8 @@ import scala.collection.immutable.Map import scala.annotation.tailrec import com.google.protobuf.ByteString +import java.util.concurrent.TimeoutException +import akka.dispatch.Await /** * Interface for node membership change listener. @@ -250,18 +252,13 @@ class Gossiper(remote: Remote, system: ActorSystemImpl) { throw new IllegalStateException("Connection for [" + peer + "] is not set up")) try { - (connection ? (toRemoteMessage(newGossip), remoteSettings.RemoteSystemDaemonAckTimeout)).as[Status] match { - case Some(Success(receiver)) ⇒ - log.debug("Gossip sent to [{}] was successfully received", receiver) - - case Some(Failure(cause)) ⇒ - log.error(cause, cause.toString) - - case None ⇒ - val error = new RemoteException("Gossip to [%s] timed out".format(connection.path)) - log.error(error, error.toString) + val t = remoteSettings.RemoteSystemDaemonAckTimeout + Await.result(connection ? (toRemoteMessage(newGossip), t), t) match { + case Success(receiver) ⇒ log.debug("Gossip sent to [{}] was successfully received", receiver) + case Failure(cause) ⇒ log.error(cause, cause.toString) } } catch { + case e: TimeoutException ⇒ log.error(e, "Gossip to [%s] timed out".format(connection.path)) case e: Exception ⇒ log.error(e, "Could not gossip to [{}] due to: {}", connection.path, e.toString) } diff --git a/akka-remote/src/main/scala/akka/remote/RemoteActorRefProvider.scala b/akka-remote/src/main/scala/akka/remote/RemoteActorRefProvider.scala index 92582c4168..5a8afd5a5b 100644 --- a/akka-remote/src/main/scala/akka/remote/RemoteActorRefProvider.scala +++ b/akka-remote/src/main/scala/akka/remote/RemoteActorRefProvider.scala @@ -12,9 +12,9 @@ import akka.remote.RemoteProtocol._ import akka.remote.RemoteProtocol.RemoteSystemDaemonMessageType._ import com.google.protobuf.ByteString import akka.event.EventStream -import akka.serialization.SerializationExtension -import akka.serialization.Serialization +import akka.dispatch.Promise import akka.config.ConfigurationException +import java.util.concurrent.{ TimeoutException } /** * Remote ActorRefProvider. Starts up actor on remote node and creates a RemoteActorRef representing it. @@ -199,7 +199,7 @@ private[akka] class RemoteActorRef private[akka] ( a.result case None ⇒ this.!(message)(null) - new DefaultPromise[Any](0)(provider.dispatcher) + Promise[Any]()(provider.dispatcher) } } diff --git a/akka-remote/src/main/scala/akka/remote/RemoteConnectionManager.scala b/akka-remote/src/main/scala/akka/remote/RemoteConnectionManager.scala index 9f623ff853..03aa5ddc62 100644 --- a/akka-remote/src/main/scala/akka/remote/RemoteConnectionManager.scala +++ b/akka-remote/src/main/scala/akka/remote/RemoteConnectionManager.scala @@ -57,7 +57,7 @@ class RemoteConnectionManager( def isEmpty: Boolean = connections.connections.isEmpty def shutdown() { - state.get.iterable foreach (_.stop()) // shut down all remote connections + state.get.iterable foreach (system.stop(_)) // shut down all remote connections } @tailrec @@ -136,7 +136,7 @@ class RemoteConnectionManager( //if we are not able to update the state, we just try again. if (!state.compareAndSet(oldState, newState)) { // we failed, need compensating action - newConnection.stop() // stop the new connection actor and try again + system.stop(newConnection) // stop the new connection actor and try again putIfAbsent(address, newConnectionFactory) // recur } else { // we succeeded diff --git a/akka-remote/src/multi-jvm/scala/akka/remote/direct_routed/DirectRoutedRemoteActorMultiJvmSpec.scala b/akka-remote/src/multi-jvm/scala/akka/remote/direct_routed/DirectRoutedRemoteActorMultiJvmSpec.scala index 0a6098f27d..d789069522 100644 --- a/akka-remote/src/multi-jvm/scala/akka/remote/direct_routed/DirectRoutedRemoteActorMultiJvmSpec.scala +++ b/akka-remote/src/multi-jvm/scala/akka/remote/direct_routed/DirectRoutedRemoteActorMultiJvmSpec.scala @@ -4,6 +4,7 @@ import akka.remote._ import akka.routing._ import akka.actor.{ Actor, Props } import akka.testkit._ +import akka.dispatch.Await object DirectRoutedRemoteActorMultiJvmSpec { val NrOfNodes = 2 @@ -42,8 +43,7 @@ class DirectRoutedRemoteActorMultiJvmNode2 extends AkkaRemoteSpec with DefaultTi val actor = system.actorOf(Props[SomeActor], "service-hello") actor.isInstanceOf[RemoteActorRef] must be(true) - val result = (actor ? "identify").get - result must equal("node1") + Await.result(actor ? "identify", timeout.duration) must equal("node1") barrier("done") } diff --git a/akka-remote/src/multi-jvm/scala/akka/remote/new_remote_actor/NewRemoteActorMultiJvmSpec.scala b/akka-remote/src/multi-jvm/scala/akka/remote/new_remote_actor/NewRemoteActorMultiJvmSpec.scala index 1e8c45112e..ae34fbd440 100644 --- a/akka-remote/src/multi-jvm/scala/akka/remote/new_remote_actor/NewRemoteActorMultiJvmSpec.scala +++ b/akka-remote/src/multi-jvm/scala/akka/remote/new_remote_actor/NewRemoteActorMultiJvmSpec.scala @@ -3,6 +3,7 @@ package akka.remote.new_remote_actor import akka.actor.{ Actor, Props } import akka.remote._ import akka.testkit.DefaultTimeout +import akka.dispatch.Await object NewRemoteActorMultiJvmSpec { val NrOfNodes = 2 @@ -40,8 +41,7 @@ class NewRemoteActorMultiJvmNode2 extends AkkaRemoteSpec with DefaultTimeout { barrier("start") val actor = system.actorOf(Props[SomeActor], "service-hello") - val result = (actor ? "identify").get - result must equal("node1") + Await.result(actor ? "identify", timeout.duration) must equal("node1") barrier("done") } diff --git a/akka-remote/src/multi-jvm/scala/akka/remote/random_routed/RandomRoutedRemoteActorMultiJvmSpec.scala b/akka-remote/src/multi-jvm/scala/akka/remote/random_routed/RandomRoutedRemoteActorMultiJvmSpec.scala index 3efc3c5ce5..9d20907089 100644 --- a/akka-remote/src/multi-jvm/scala/akka/remote/random_routed/RandomRoutedRemoteActorMultiJvmSpec.scala +++ b/akka-remote/src/multi-jvm/scala/akka/remote/random_routed/RandomRoutedRemoteActorMultiJvmSpec.scala @@ -4,13 +4,14 @@ import akka.actor.{ Actor, Props } import akka.remote._ import akka.routing._ import akka.testkit.DefaultTimeout +import akka.dispatch.Await object RandomRoutedRemoteActorMultiJvmSpec { val NrOfNodes = 4 class SomeActor extends Actor with Serializable { def receive = { case "hit" ⇒ sender ! context.system.nodename - case "end" ⇒ self.stop() + case "end" ⇒ context.stop(self) } } } @@ -74,7 +75,7 @@ class RandomRoutedRemoteActorMultiJvmNode4 extends AkkaRemoteSpec with DefaultTi for (i ← 0 until iterationCount) { for (k ← 0 until connectionCount) { - val nodeName = (actor ? "hit").as[String].getOrElse(fail("No id returned by actor")) + val nodeName = Await.result(actor ? "hit", timeout.duration).toString replies = replies + (nodeName -> (replies(nodeName) + 1)) } } diff --git a/akka-remote/src/multi-jvm/scala/akka/remote/round_robin_routed/RoundRobinRoutedRemoteActorMultiJvmSpec.scala b/akka-remote/src/multi-jvm/scala/akka/remote/round_robin_routed/RoundRobinRoutedRemoteActorMultiJvmSpec.scala index 786f278a7e..0caf841953 100644 --- a/akka-remote/src/multi-jvm/scala/akka/remote/round_robin_routed/RoundRobinRoutedRemoteActorMultiJvmSpec.scala +++ b/akka-remote/src/multi-jvm/scala/akka/remote/round_robin_routed/RoundRobinRoutedRemoteActorMultiJvmSpec.scala @@ -4,13 +4,14 @@ import akka.actor.{ Actor, Props } import akka.remote._ import akka.routing._ import akka.testkit.DefaultTimeout +import akka.dispatch.Await object RoundRobinRoutedRemoteActorMultiJvmSpec { val NrOfNodes = 4 class SomeActor extends Actor with Serializable { def receive = { case "hit" ⇒ sender ! context.system.nodename - case "end" ⇒ self.stop() + case "end" ⇒ context.stop(self) } } } @@ -74,7 +75,7 @@ class RoundRobinRoutedRemoteActorMultiJvmNode4 extends AkkaRemoteSpec with Defau for (i ← 0 until iterationCount) { for (k ← 0 until connectionCount) { - val nodeName = (actor ? "hit").as[String].getOrElse(fail("No id returned by actor")) + val nodeName = Await.result(actor ? "hit", timeout.duration).toString replies = replies + (nodeName -> (replies(nodeName) + 1)) } } diff --git a/akka-remote/src/multi-jvm/scala/akka/remote/scatter_gather_routed/ScatterGatherRoutedRemoteActorMultiJvmSpec.scala b/akka-remote/src/multi-jvm/scala/akka/remote/scatter_gather_routed/ScatterGatherRoutedRemoteActorMultiJvmSpec.scala index 10d6e22f58..c985bf2152 100644 --- a/akka-remote/src/multi-jvm/scala/akka/remote/scatter_gather_routed/ScatterGatherRoutedRemoteActorMultiJvmSpec.scala +++ b/akka-remote/src/multi-jvm/scala/akka/remote/scatter_gather_routed/ScatterGatherRoutedRemoteActorMultiJvmSpec.scala @@ -11,7 +11,7 @@ object ScatterGatherRoutedRemoteActorMultiJvmSpec { class SomeActor extends Actor with Serializable { def receive = { case "hit" ⇒ sender ! context.system.nodename - case "end" ⇒ self.stop() + case "end" ⇒ context.stop(self) } } } diff --git a/akka-remote/src/test/scala/akka/remote/RemoteCommunicationSpec.scala b/akka-remote/src/test/scala/akka/remote/RemoteCommunicationSpec.scala index 62de045fb5..dd62ae48e2 100644 --- a/akka-remote/src/test/scala/akka/remote/RemoteCommunicationSpec.scala +++ b/akka-remote/src/test/scala/akka/remote/RemoteCommunicationSpec.scala @@ -6,6 +6,7 @@ package akka.remote import akka.testkit._ import akka.actor._ import com.typesafe.config._ +import akka.dispatch.Await object RemoteCommunicationSpec { class Echo extends Actor { @@ -61,7 +62,7 @@ akka { implicit val timeout = system.settings.ActorTimeout override def atTermination() { - other.stop() + other.shutdown() } "Remoting" must { @@ -80,7 +81,7 @@ akka { } "support ask" in { - (here ? "ping").get match { + Await.result(here ? "ping", timeout.duration) match { case ("pong", s: AskActorRef) ⇒ // good case m ⇒ fail(m + " was not (pong, AskActorRef)") } @@ -103,7 +104,7 @@ akka { expectMsg("preRestart") r ! 42 expectMsg(42) - r.stop() + system.stop(r) expectMsg("postStop") } @@ -124,10 +125,10 @@ akka { myref ! 43 expectMsg(43) lastSender must be theSameInstanceAs remref - (l ? "child/..").as[ActorRef].get must be theSameInstanceAs l - (system.actorFor(system / "looker" / "child") ? "..").as[ActorRef].get must be theSameInstanceAs l + Await.result(l ? "child/..", timeout.duration).asInstanceOf[AnyRef] must be theSameInstanceAs l + Await.result(system.actorFor(system / "looker" / "child") ? "..", timeout.duration).asInstanceOf[AnyRef] must be theSameInstanceAs l } } -} \ No newline at end of file +} diff --git a/akka-remote/src/test/scala/akka/remote/RemoteRouterSpec.scala b/akka-remote/src/test/scala/akka/remote/RemoteRouterSpec.scala index 60209d087b..f183a940a7 100644 --- a/akka-remote/src/test/scala/akka/remote/RemoteRouterSpec.scala +++ b/akka-remote/src/test/scala/akka/remote/RemoteRouterSpec.scala @@ -40,7 +40,7 @@ akka { val other = ActorSystem("remote_sys", conf) override def atTermination() { - other.stop() + other.shutdown() } "A Remote Router" must { @@ -55,4 +55,4 @@ akka { } -} \ No newline at end of file +} diff --git a/akka-samples/akka-sample-hello-kernel/src/main/scala/sample/kernel/hello/HelloKernel.scala b/akka-samples/akka-sample-hello-kernel/src/main/scala/sample/kernel/hello/HelloKernel.scala new file mode 100644 index 0000000000..abf526484c --- /dev/null +++ b/akka-samples/akka-sample-hello-kernel/src/main/scala/sample/kernel/hello/HelloKernel.scala @@ -0,0 +1,37 @@ +/** + * Copyright (C) 2009-2011 Typesafe Inc. + */ +package sample.kernel.hello + +import akka.actor.{ Actor, ActorSystem, Props } +import akka.kernel.Bootable + +case object Start + +class HelloActor extends Actor { + val worldActor = context.actorOf(Props[WorldActor]) + + def receive = { + case Start ⇒ worldActor ! "Hello" + case message: String ⇒ + println("Received message '%s'" format message) + } +} + +class WorldActor extends Actor { + def receive = { + case message: String ⇒ sender ! (message.toUpperCase + " world!") + } +} + +class HelloKernel extends Bootable { + val system = ActorSystem("hellokernel") + + def startup = { + system.actorOf(Props[HelloActor]) ! Start + } + + def shutdown = { + system.shutdown() + } +} diff --git a/akka-samples/akka-sample-hello/src/main/scala/sample/hello/Main.scala b/akka-samples/akka-sample-hello/src/main/scala/sample/hello/Main.scala index 710a099312..2921c2d27c 100644 --- a/akka-samples/akka-sample-hello/src/main/scala/sample/hello/Main.scala +++ b/akka-samples/akka-sample-hello/src/main/scala/sample/hello/Main.scala @@ -20,7 +20,7 @@ class HelloActor extends Actor { case Start ⇒ worldActor ! "Hello" case s: String ⇒ println("Received message: %s".format(s)) - context.system.stop() + context.system.shutdown() } } diff --git a/akka-spring/src/test/scala/TypedActorSpringFeatureTest.scala b/akka-spring/src/test/scala/TypedActorSpringFeatureTest.scala index f2cc5a288d..1fa4874408 100644 --- a/akka-spring/src/test/scala/TypedActorSpringFeatureTest.scala +++ b/akka-spring/src/test/scala/TypedActorSpringFeatureTest.scala @@ -4,7 +4,6 @@ package akka.spring import foo.{ PingActor, IMyPojo, MyPojo } -import akka.dispatch.FutureTimeoutException import org.scalatest.matchers.ShouldMatchers import org.scalatest.junit.JUnitRunner import org.junit.runner.RunWith @@ -14,10 +13,10 @@ import org.springframework.context.ApplicationContext import org.springframework.context.support.ClassPathXmlApplicationContext import org.springframework.core.io.{ ClassPathResource, Resource } import org.scalatest.{ BeforeAndAfterAll, FeatureSpec } -import java.util.concurrent.CountDownLatch import akka.remote.netty.NettyRemoteSupport import akka.actor._ import akka.actor.Actor._ +import java.util.concurrent.{TimeoutException, CountDownLatch} object RemoteTypedActorLog { import java.util.concurrent.{ LinkedBlockingQueue, TimeUnit, BlockingQueue } @@ -89,9 +88,9 @@ class TypedActorSpringFeatureTest extends FeatureSpec with ShouldMatchers with B assert(MyPojo.lastOneWayMessage === "hello 1") } - scenario("FutureTimeoutException when timed out") { + scenario("TimeoutException when timed out") { val myPojo = getTypedActorFromContext("/typed-actor-config.xml", "simple-typed-actor") - evaluating { myPojo.longRunning() } should produce[FutureTimeoutException] + evaluating { myPojo.longRunning() } should produce[TimeoutException] } scenario("typed-actor with timeout") { diff --git a/akka-stm/src/main/resources/reference.conf b/akka-stm/src/main/resources/reference.conf index 98a3e70d5d..05aa9b433c 100644 --- a/akka-stm/src/main/resources/reference.conf +++ b/akka-stm/src/main/resources/reference.conf @@ -3,19 +3,21 @@ ################################## # This the reference config file has all the default settings. -# Make your edits/overrides in your akka.conf. +# Make your edits/overrides in your application.conf. akka { stm { - fair = on # Should global transactions be fair or non-fair (non fair yield better performance) + # Should global transactions be fair or non-fair (non fair yield better performance) + fair = on max-retries = 1000 - timeout = 5s # Default timeout for blocking transactions and transaction set - write-skew = on - blocking-allowed = off - interruptible = off - speculative = on - quick-release = on + # Default timeout for blocking transactions and transaction set + timeout = 5s + write-skew = on + blocking-allowed = off + interruptible = off + speculative = on + quick-release = on propagation = "requires" trace-level = "none" } diff --git a/akka-stm/src/main/scala/akka/agent/Agent.scala b/akka-stm/src/main/scala/akka/agent/Agent.scala index cfe618ce47..25849c31f3 100644 --- a/akka-stm/src/main/scala/akka/agent/Agent.scala +++ b/akka-stm/src/main/scala/akka/agent/Agent.scala @@ -8,7 +8,7 @@ import akka.actor.ActorSystem import akka.actor._ import akka.stm._ import akka.japi.{ Function ⇒ JFunc, Procedure ⇒ JProc } -import akka.dispatch.{ PinnedDispatcher, UnboundedMailbox, DefaultPromise, Dispatchers, Future } +import akka.dispatch._ /** * Used internally to send functions. @@ -123,7 +123,7 @@ class Agent[T](initialValue: T, system: ActorSystem) { def alter(f: T ⇒ T)(timeout: Timeout): Future[T] = { def dispatch = updater.?(Update(f), timeout).asInstanceOf[Future[T]] if (Stm.activeTransaction) { - val result = new DefaultPromise[T](timeout)(system.dispatcher) + val result = Promise[T]()(system.dispatcher) get //Join xa deferred { result completeWith dispatch } //Attach deferred-block to current transaction result @@ -134,7 +134,7 @@ class Agent[T](initialValue: T, system: ActorSystem) { * Dispatch a new value for the internal state. Behaves the same * as sending a function (x => newValue). */ - def send(newValue: T): Unit = send(x ⇒ newValue) + def send(newValue: T): Unit = send(_ ⇒ newValue) /** * Dispatch a new value for the internal state. Behaves the same @@ -166,7 +166,7 @@ class Agent[T](initialValue: T, system: ActorSystem) { * still be executed in order. */ def alterOff(f: T ⇒ T)(timeout: Timeout): Future[T] = { - val result = new DefaultPromise[T](timeout)(system.dispatcher) + val result = Promise[T]()(system.dispatcher) send((value: T) ⇒ { suspend() val pinnedDispatcher = new PinnedDispatcher(system.dispatcherFactory.prerequisites, null, "agent-alter-off", UnboundedMailbox(), system.settings.ActorTimeout.duration) @@ -186,7 +186,7 @@ class Agent[T](initialValue: T, system: ActorSystem) { /** * Gets this agent's value after all currently queued updates have completed. */ - def await(implicit timeout: Timeout): T = future.await.result.get + def await(implicit timeout: Timeout): T = Await.result(future, timeout.duration) /** * Map this agent to a new agent, applying the function to the internal state. @@ -302,8 +302,8 @@ class ThreadBasedAgentUpdater[T](agent: Agent[T]) extends Actor { sender.tell(atomic(txFactory) { agent.ref alter update.function.asInstanceOf[T ⇒ T] }) } finally { agent.resume() - self.stop() + context.stop(self) } - case _ ⇒ self.stop() + case _ ⇒ context.stop(self) } } diff --git a/akka-stm/src/test/java/akka/stm/example/EitherOrElseExample.java b/akka-stm/src/test/java/akka/stm/example/EitherOrElseExample.java index a8f3fd475c..61d172e82f 100644 --- a/akka-stm/src/test/java/akka/stm/example/EitherOrElseExample.java +++ b/akka-stm/src/test/java/akka/stm/example/EitherOrElseExample.java @@ -24,6 +24,6 @@ public class EitherOrElseExample { } }.execute(); - brancher.stop(); + application.stop(brancher); } } diff --git a/akka-stm/src/test/java/akka/stm/example/RetryExample.java b/akka-stm/src/test/java/akka/stm/example/RetryExample.java index f15850d232..590e05d94e 100644 --- a/akka-stm/src/test/java/akka/stm/example/RetryExample.java +++ b/akka-stm/src/test/java/akka/stm/example/RetryExample.java @@ -46,8 +46,8 @@ public class RetryExample { System.out.println("Account 2: " + acc2); // Account 2: 600.0 - transferer.stop(); + application.stop(transferer); - application.stop(); + application.shutdown(); } } diff --git a/akka-stm/src/test/java/akka/transactor/example/UntypedCoordinatedExample.java b/akka-stm/src/test/java/akka/transactor/example/UntypedCoordinatedExample.java index 9baf0f1485..d5b236694f 100644 --- a/akka-stm/src/test/java/akka/transactor/example/UntypedCoordinatedExample.java +++ b/akka-stm/src/test/java/akka/transactor/example/UntypedCoordinatedExample.java @@ -3,46 +3,37 @@ package akka.transactor.example; import akka.actor.ActorSystem; import akka.actor.ActorRef; import akka.actor.Props; +import akka.dispatch.Await; import akka.dispatch.Future; import akka.testkit.AkkaSpec; import akka.transactor.Coordinated; +import akka.util.Duration; +import java.util.concurrent.TimeUnit; + public class UntypedCoordinatedExample { public static void main(String[] args) throws InterruptedException { - ActorSystem application = ActorSystem.create("UntypedCoordinatedExample", AkkaSpec.testConf()); + ActorSystem app = ActorSystem.create("UntypedCoordinatedExample", AkkaSpec.testConf()); - ActorRef counter1 = application.actorOf(new Props().withCreator(UntypedCoordinatedCounter.class)); - ActorRef counter2 = application.actorOf(new Props().withCreator(UntypedCoordinatedCounter.class)); + ActorRef counter1 = app.actorOf(new Props().withCreator(UntypedCoordinatedCounter.class)); + ActorRef counter2 = app.actorOf(new Props().withCreator(UntypedCoordinatedCounter.class)); counter1.tell(new Coordinated(new Increment(counter2))); Thread.sleep(3000); long timeout = 5000; + Duration d = Duration.create(timeout, TimeUnit.MILLISECONDS); - Future future1 = counter1.ask("GetCount", timeout); - Future future2 = counter2.ask("GetCount", timeout); + Future future1 = counter1.ask("GetCount", timeout); + Future future2 = counter2.ask("GetCount", timeout); - future1.await(); - if (future1.isCompleted()) { - if (future1.result().isDefined()) { - int result = (Integer) future1.result().get(); - System.out.println("counter 1: " + result); - } - } + int count1 = (Integer) Await.result(future1, d); + System.out.println("counter 1: " + count1); + int count2 = (Integer) Await.result(future2, d); + System.out.println("counter 1: " + count2); - future2.await(); - if (future2.isCompleted()) { - if (future2.result().isDefined()) { - int result = (Integer) future2.result().get(); - System.out.println("counter 2: " + result); - } - } - - counter1.stop(); - counter2.stop(); - - application.stop(); + app.shutdown(); } } diff --git a/akka-stm/src/test/java/akka/transactor/example/UntypedTransactorExample.java b/akka-stm/src/test/java/akka/transactor/example/UntypedTransactorExample.java index 55e28f872f..8a63415c27 100644 --- a/akka-stm/src/test/java/akka/transactor/example/UntypedTransactorExample.java +++ b/akka-stm/src/test/java/akka/transactor/example/UntypedTransactorExample.java @@ -3,45 +3,36 @@ package akka.transactor.example; import akka.actor.ActorSystem; import akka.actor.ActorRef; import akka.actor.Props; +import akka.dispatch.Await; import akka.dispatch.Future; import akka.testkit.AkkaSpec; +import akka.util.Duration; + +import java.util.concurrent.TimeUnit; public class UntypedTransactorExample { public static void main(String[] args) throws InterruptedException { - ActorSystem application = ActorSystem.create("UntypedTransactorExample", AkkaSpec.testConf()); + ActorSystem app = ActorSystem.create("UntypedTransactorExample", AkkaSpec.testConf()); - ActorRef counter1 = application.actorOf(new Props().withCreator(UntypedCounter.class)); - ActorRef counter2 = application.actorOf(new Props().withCreator(UntypedCounter.class)); + ActorRef counter1 = app.actorOf(new Props().withCreator(UntypedCounter.class)); + ActorRef counter2 = app.actorOf(new Props().withCreator(UntypedCounter.class)); counter1.tell(new Increment(counter2)); Thread.sleep(3000); long timeout = 5000; + Duration d = Duration.create(timeout, TimeUnit.MILLISECONDS); - Future future1 = counter1.ask("GetCount", timeout); - Future future2 = counter2.ask("GetCount", timeout); + Future future1 = counter1.ask("GetCount", timeout); + Future future2 = counter2.ask("GetCount", timeout); - future1.await(); - if (future1.isCompleted()) { - if (future1.result().isDefined()) { - int result = (Integer) future1.result().get(); - System.out.println("counter 1: " + result); - } - } + int count1 = (Integer) Await.result(future1, d); + System.out.println("counter 1: " + count1); + int count2 = (Integer) Await.result(future2, d); + System.out.println("counter 1: " + count2); - future2.await(); - if (future2.isCompleted()) { - if (future2.result().isDefined()) { - int result = (Integer) future2.result().get(); - System.out.println("counter 2: " + result); - } - } - - counter1.stop(); - counter2.stop(); - - application.stop(); + app.shutdown(); } } diff --git a/akka-stm/src/test/java/akka/transactor/test/UntypedCoordinatedIncrementTest.java b/akka-stm/src/test/java/akka/transactor/test/UntypedCoordinatedIncrementTest.java index a90e0a1952..e09d15b74d 100644 --- a/akka-stm/src/test/java/akka/transactor/test/UntypedCoordinatedIncrementTest.java +++ b/akka-stm/src/test/java/akka/transactor/test/UntypedCoordinatedIncrementTest.java @@ -2,6 +2,8 @@ package akka.transactor.test; import static org.junit.Assert.*; +import akka.dispatch.Await; +import akka.util.Duration; import org.junit.After; import org.junit.AfterClass; import org.junit.BeforeClass; @@ -10,7 +12,6 @@ import org.junit.Before; import akka.actor.ActorSystem; import akka.transactor.Coordinated; -import akka.actor.Actors; import akka.actor.ActorRef; import akka.actor.Props; import akka.actor.UntypedActor; @@ -28,7 +29,6 @@ import java.util.List; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; -import scala.Option; import scala.collection.JavaConverters; import scala.collection.Seq; @@ -44,7 +44,7 @@ public class UntypedCoordinatedIncrementTest { @AfterClass public static void afterAll() { - system.stop(); + system.shutdown(); system = null; } @@ -81,8 +81,8 @@ public class UntypedCoordinatedIncrementTest { } catch (InterruptedException exception) { } for (ActorRef counter : counters) { - Future future = counter.ask("GetCount", askTimeout); - assertEquals(1, ((Integer) future.get()).intValue()); + Future future = counter.ask("GetCount", askTimeout); + assertEquals(1, ((Integer) Await.result(future, Duration.create(timeout, TimeUnit.SECONDS))).intValue()); } } @@ -102,8 +102,8 @@ public class UntypedCoordinatedIncrementTest { } catch (InterruptedException exception) { } for (ActorRef counter : counters) { - Future future = counter.ask("GetCount", askTimeout); - assertEquals(0, ((Integer) future.get()).intValue()); + Futurefuture = counter.ask("GetCount", askTimeout); + assertEquals(0,((Integer) Await.result(future, Duration.create(timeout, TimeUnit.SECONDS))).intValue()); } } @@ -113,6 +113,6 @@ public class UntypedCoordinatedIncrementTest { @After public void stop() { - application.stop(); + application.shutdown(); } } diff --git a/akka-stm/src/test/java/akka/transactor/test/UntypedTransactorTest.java b/akka-stm/src/test/java/akka/transactor/test/UntypedTransactorTest.java index 528a2a14f8..db5528f10c 100644 --- a/akka-stm/src/test/java/akka/transactor/test/UntypedTransactorTest.java +++ b/akka-stm/src/test/java/akka/transactor/test/UntypedTransactorTest.java @@ -2,6 +2,8 @@ package akka.transactor.test; import static org.junit.Assert.*; +import akka.dispatch.Await; +import akka.util.Duration; import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; @@ -9,7 +11,6 @@ import org.junit.Before; import akka.actor.ActorSystem; import akka.actor.ActorRef; -import akka.actor.Actors; import akka.actor.Props; import akka.actor.UntypedActor; import akka.actor.UntypedActorFactory; @@ -25,7 +26,6 @@ import java.util.List; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; -import scala.Option; import scala.collection.JavaConverters; import scala.collection.Seq; import akka.testkit.AkkaSpec; @@ -41,7 +41,7 @@ public class UntypedTransactorTest { @AfterClass public static void afterAll() { - system.stop(); + system.shutdown(); system = null; } @@ -77,16 +77,9 @@ public class UntypedTransactorTest { } catch (InterruptedException exception) { } for (ActorRef counter : counters) { - Future future = counter.ask("GetCount", askTimeout); - future.await(); - if (future.isCompleted()) { - Option resultOption = future.result(); - if (resultOption.isDefined()) { - Object result = resultOption.get(); - int count = (Integer) result; - assertEquals(1, count); - } - } + Future future = counter.ask("GetCount", askTimeout); + int count = (Integer) Await.result(future, Duration.create(askTimeout, TimeUnit.MILLISECONDS)); + assertEquals(1, count); } } @@ -106,16 +99,9 @@ public class UntypedTransactorTest { } catch (InterruptedException exception) { } for (ActorRef counter : counters) { - Future future = counter.ask("GetCount", askTimeout); - future.await(); - if (future.isCompleted()) { - Option resultOption = future.result(); - if (resultOption.isDefined()) { - Object result = resultOption.get(); - int count = (Integer) result; - assertEquals(0, count); - } - } + Future future = counter.ask("GetCount", askTimeout); + int count = (Integer) Await.result(future, Duration.create(askTimeout, TimeUnit.MILLISECONDS)); + assertEquals(0, count); } } diff --git a/akka-stm/src/test/scala/akka/agent/test/AgentSpec.scala b/akka-stm/src/test/scala/akka/agent/test/AgentSpec.scala index 9ef95594be..901e45cd8a 100644 --- a/akka-stm/src/test/scala/akka/agent/test/AgentSpec.scala +++ b/akka-stm/src/test/scala/akka/agent/test/AgentSpec.scala @@ -11,6 +11,7 @@ import akka.util.duration._ import java.util.concurrent.CountDownLatch import akka.testkit.AkkaSpec import akka.testkit._ +import akka.dispatch.Await class CountDownFunction[A](num: Int = 1) extends Function1[A, A] { val latch = new CountDownLatch(num) @@ -35,7 +36,7 @@ class AgentSpec extends AkkaSpec { countDown.await(5 seconds) agent() must be("abcd") - agent.close + agent.close() } "maintain order between send and sendOff" in { @@ -51,7 +52,7 @@ class AgentSpec extends AkkaSpec { countDown.await(5 seconds) agent() must be("abcd") - agent.close + agent.close() } "maintain order between alter and alterOff" in { @@ -62,13 +63,13 @@ class AgentSpec extends AkkaSpec { val r2 = agent.alterOff((s: String) ⇒ { Thread.sleep(2000); s + "c" })(5000) val r3 = agent.alter(_ + "d")(5000) - r1.await.resultOrException.get must be === "ab" - r2.await.resultOrException.get must be === "abc" - r3.await.resultOrException.get must be === "abcd" + Await.result(r1, 5 seconds) must be === "ab" + Await.result(r2, 5 seconds) must be === "abc" + Await.result(r3, 5 seconds) must be === "abcd" agent() must be("abcd") - agent.close + agent.close() } "be immediately readable" in { @@ -90,14 +91,14 @@ class AgentSpec extends AkkaSpec { read must be(5) agent() must be(10) - agent.close + agent.close() } "be readable within a transaction" in { val agent = Agent(5) val value = atomic { agent() } value must be(5) - agent.close + agent.close() } "dispatch sends in successful transactions" in { @@ -112,7 +113,7 @@ class AgentSpec extends AkkaSpec { countDown.await(5 seconds) agent() must be(10) - agent.close + agent.close() } "not dispatch sends in aborted transactions" in { @@ -132,7 +133,7 @@ class AgentSpec extends AkkaSpec { countDown.await(5 seconds) agent() must be(5) - agent.close + agent.close() } "be able to return a 'queued' future" in { @@ -140,11 +141,9 @@ class AgentSpec extends AkkaSpec { agent send (_ + "b") agent send (_ + "c") - val future = agent.future + Await.result(agent.future, timeout.duration) must be("abc") - future.await.result.get must be("abc") - - agent.close + agent.close() } "be able to await the value after updates have completed" in { @@ -154,7 +153,7 @@ class AgentSpec extends AkkaSpec { agent.await must be("abc") - agent.close + agent.close() } "be able to be mapped" in { @@ -164,8 +163,8 @@ class AgentSpec extends AkkaSpec { agent1() must be(5) agent2() must be(10) - agent1.close - agent2.close + agent1.close() + agent2.close() } "be able to be used in a 'foreach' for comprehension" in { @@ -178,7 +177,7 @@ class AgentSpec extends AkkaSpec { result must be(3) - agent.close + agent.close() } "be able to be used in a 'map' for comprehension" in { @@ -188,8 +187,8 @@ class AgentSpec extends AkkaSpec { agent1() must be(5) agent2() must be(10) - agent1.close - agent2.close + agent1.close() + agent2.close() } "be able to be used in a 'flatMap' for comprehension" in { @@ -205,9 +204,9 @@ class AgentSpec extends AkkaSpec { agent2() must be(2) agent3() must be(3) - agent1.close - agent2.close - agent3.close + agent1.close() + agent2.close() + agent3.close() } } } diff --git a/akka-stm/src/test/scala/akka/transactor/test/CoordinatedIncrementSpec.scala b/akka-stm/src/test/scala/akka/transactor/test/CoordinatedIncrementSpec.scala index eda336b78e..d3a23a8191 100644 --- a/akka-stm/src/test/scala/akka/transactor/test/CoordinatedIncrementSpec.scala +++ b/akka-stm/src/test/scala/akka/transactor/test/CoordinatedIncrementSpec.scala @@ -7,6 +7,7 @@ import akka.actor._ import akka.stm.{ Ref, TransactionFactory } import akka.util.duration._ import akka.testkit._ +import akka.dispatch.Await object CoordinatedIncrement { case class Increment(friends: Seq[ActorRef]) @@ -72,10 +73,10 @@ class CoordinatedIncrementSpec extends AkkaSpec with BeforeAndAfterAll { counters(0) ! coordinated(Increment(counters.tail)) coordinated.await for (counter ← counters) { - (counter ? GetCount).as[Int].get must be === 1 + Await.result((counter ? GetCount).mapTo[Int], timeout.duration) must be === 1 } - counters foreach (_.stop()) - failer.stop() + counters foreach (system.stop(_)) + system.stop(failer) } "increment no counters with a failing transaction" in { @@ -89,10 +90,10 @@ class CoordinatedIncrementSpec extends AkkaSpec with BeforeAndAfterAll { counters(0) ! Coordinated(Increment(counters.tail :+ failer)) coordinated.await for (counter ← counters) { - (counter ? GetCount).as[Int].get must be === 0 + Await.result(counter ? GetCount, timeout.duration) must be === 0 } - counters foreach (_.stop()) - failer.stop() + counters foreach (system.stop(_)) + system.stop(failer) } } } diff --git a/akka-stm/src/test/scala/akka/transactor/test/FickleFriendsSpec.scala b/akka-stm/src/test/scala/akka/transactor/test/FickleFriendsSpec.scala index a74490b410..7fdc55f91d 100644 --- a/akka-stm/src/test/scala/akka/transactor/test/FickleFriendsSpec.scala +++ b/akka-stm/src/test/scala/akka/transactor/test/FickleFriendsSpec.scala @@ -11,6 +11,7 @@ import akka.testkit._ import scala.util.Random.{ nextInt ⇒ random } import java.util.concurrent.CountDownLatch import akka.testkit.TestEvent.Mute +import akka.dispatch.Await object FickleFriends { case class FriendlyIncrement(friends: Seq[ActorRef], latch: CountDownLatch) @@ -119,12 +120,12 @@ class FickleFriendsSpec extends AkkaSpec with BeforeAndAfterAll { val latch = new CountDownLatch(1) coordinator ! FriendlyIncrement(counters, latch) latch.await // this could take a while - (coordinator ? GetCount).as[Int].get must be === 1 + Await.result(coordinator ? GetCount, timeout.duration) must be === 1 for (counter ← counters) { - (counter ? GetCount).as[Int].get must be === 1 + Await.result(counter ? GetCount, timeout.duration) must be === 1 } - counters foreach (_.stop()) - coordinator.stop() + counters foreach (system.stop(_)) + system.stop(coordinator) } } } diff --git a/akka-stm/src/test/scala/akka/transactor/test/TransactorSpec.scala b/akka-stm/src/test/scala/akka/transactor/test/TransactorSpec.scala index 43ee399196..f82f224870 100644 --- a/akka-stm/src/test/scala/akka/transactor/test/TransactorSpec.scala +++ b/akka-stm/src/test/scala/akka/transactor/test/TransactorSpec.scala @@ -8,6 +8,7 @@ import akka.actor._ import akka.stm._ import akka.util.duration._ import akka.testkit._ +import akka.dispatch.Await object TransactorIncrement { case class Increment(friends: Seq[ActorRef], latch: TestLatch) @@ -95,10 +96,10 @@ class TransactorSpec extends AkkaSpec { counters(0) ! Increment(counters.tail, incrementLatch) incrementLatch.await for (counter ← counters) { - (counter ? GetCount).as[Int].get must be === 1 + Await.result(counter ? GetCount, timeout.duration) must be === 1 } - counters foreach (_.stop()) - failer.stop() + counters foreach (system.stop(_)) + system.stop(failer) } "increment no counters with a failing transaction" in { @@ -112,10 +113,10 @@ class TransactorSpec extends AkkaSpec { counters(0) ! Increment(counters.tail :+ failer, failLatch) failLatch.await for (counter ← counters) { - (counter ? GetCount).as[Int].get must be === 0 + Await.result(counter ? GetCount, timeout.duration) must be === 0 } - counters foreach (_.stop()) - failer.stop() + counters foreach (system.stop(_)) + system.stop(failer) } } } @@ -129,7 +130,7 @@ class TransactorSpec extends AkkaSpec { latch.await val value = atomic { ref.get } value must be === 5 - transactor.stop() + system.stop(transactor) } } } diff --git a/akka-testkit/src/main/resources/reference.conf b/akka-testkit/src/main/resources/reference.conf index 0aa150e4b5..d2a4859c30 100644 --- a/akka-testkit/src/main/resources/reference.conf +++ b/akka-testkit/src/main/resources/reference.conf @@ -3,12 +3,15 @@ ###################################### # This the reference config file has all the default settings. -# Make your edits/overrides in your akka.conf. +# Make your edits/overrides in your application.conf. akka { test { - timefactor = 1.0 # factor by which to scale timeouts during tests, e.g. to account for shared build system load - filter-leeway = 3s # duration of EventFilter.intercept waits after the block is finished until all required messages are received - single-expect-default = 3s # duration to wait in expectMsg and friends outside of within() block by default + # factor by which to scale timeouts during tests, e.g. to account for shared build system load + timefactor = 1.0 + # duration of EventFilter.intercept waits after the block is finished until all required messages are received + filter-leeway = 3s + # duration to wait in expectMsg and friends outside of within() block by default + single-expect-default = 3s } } diff --git a/akka-testkit/src/main/scala/akka/testkit/TestActorRef.scala b/akka-testkit/src/main/scala/akka/testkit/TestActorRef.scala index 71a5e9fc22..aca5524674 100644 --- a/akka-testkit/src/main/scala/akka/testkit/TestActorRef.scala +++ b/akka-testkit/src/main/scala/akka/testkit/TestActorRef.scala @@ -11,8 +11,8 @@ import akka.actor.Props._ import akka.actor.ActorSystem import java.util.concurrent.atomic.AtomicLong import akka.event.EventStream -import akka.dispatch.{ DefaultDispatcherPrerequisites, DispatcherPrerequisites, Mailbox, Envelope } import scala.collection.immutable.Stack +import akka.dispatch._ /** * This special ActorRef is exclusively for use during unit testing in a single-threaded environment. Therefore, it @@ -69,8 +69,10 @@ class TestActorRef[T <: Actor]( // volatile mailbox read to bring in actor field if (isTerminated) throw new IllegalActorStateException("underlying actor is terminated") underlying.actor.asInstanceOf[T] match { - case null ⇒ ?(InternalGetActor)(underlying.system.settings.ActorTimeout).get.asInstanceOf[T] - case ref ⇒ ref + case null ⇒ + val t = underlying.system.settings.ActorTimeout + Await.result(?(InternalGetActor)(t), t.duration).asInstanceOf[T] + case ref ⇒ ref } } diff --git a/akka-testkit/src/main/scala/akka/testkit/TestKit.scala b/akka-testkit/src/main/scala/akka/testkit/TestKit.scala index d87a313589..96b0cb151e 100644 --- a/akka-testkit/src/main/scala/akka/testkit/TestKit.scala +++ b/akka-testkit/src/main/scala/akka/testkit/TestKit.scala @@ -165,7 +165,7 @@ class TestKit(_system: ActorSystem) { def msgAvailable = !queue.isEmpty /** - * Block until the given condition evaluates to `true` or the timeout + * Await until the given condition evaluates to `true` or the timeout * expires, whichever comes first. * * If no timeout is given, take it from the innermost enclosing `within` @@ -560,7 +560,7 @@ object TestKit { private[testkit] val testActorId = new AtomicInteger(0) /** - * Block until the given condition evaluates to `true` or the timeout + * Await until the given condition evaluates to `true` or the timeout * expires, whichever comes first. * * If no timeout is given, take it from the innermost enclosing `within` diff --git a/akka-testkit/src/main/scala/akka/testkit/TestLatch.scala b/akka-testkit/src/main/scala/akka/testkit/TestLatch.scala index 1af4785525..13bb9d84ab 100644 --- a/akka-testkit/src/main/scala/akka/testkit/TestLatch.scala +++ b/akka-testkit/src/main/scala/akka/testkit/TestLatch.scala @@ -29,7 +29,9 @@ class TestLatch(count: Int = 1)(implicit system: ActorSystem) { def countDown() = latch.countDown() - def open() = countDown() + def isOpen: Boolean = latch.getCount == 0 + + def open() = while (!isOpen) countDown() def await(): Boolean = await(TestLatch.DefaultTimeout) diff --git a/akka-testkit/src/test/scala/akka/testkit/AkkaSpec.scala b/akka-testkit/src/test/scala/akka/testkit/AkkaSpec.scala index 1fdbaee7d7..baab14b9cb 100644 --- a/akka-testkit/src/test/scala/akka/testkit/AkkaSpec.scala +++ b/akka-testkit/src/test/scala/akka/testkit/AkkaSpec.scala @@ -7,16 +7,15 @@ import org.scalatest.{ WordSpec, BeforeAndAfterAll, Tag } import org.scalatest.matchers.MustMatchers import akka.actor.{ ActorSystem, ActorSystemImpl } import akka.actor.{ Actor, ActorRef, Props } -import akka.dispatch.MessageDispatcher import akka.event.{ Logging, LoggingAdapter } import akka.util.duration._ -import akka.dispatch.FutureTimeoutException import com.typesafe.config.Config import com.typesafe.config.ConfigFactory import akka.actor.PoisonPill -import java.util.concurrent.LinkedBlockingQueue import akka.actor.CreateChild import akka.actor.DeadLetter +import java.util.concurrent.TimeoutException +import akka.dispatch.{ Await, MessageDispatcher } object TimingTest extends Tag("timing") @@ -64,9 +63,9 @@ abstract class AkkaSpec(_system: ActorSystem) } final override def afterAll { - system.stop() - try system.asInstanceOf[ActorSystemImpl].terminationFuture.await(5 seconds) catch { - case _: FutureTimeoutException ⇒ system.log.warning("Failed to stop [{}] within 5 seconds", system.name) + system.shutdown() + try Await.ready(system.asInstanceOf[ActorSystemImpl].terminationFuture, 5 seconds) catch { + case _: TimeoutException ⇒ system.log.warning("Failed to stop [{}] within 5 seconds", system.name) } atTermination() } @@ -76,7 +75,7 @@ abstract class AkkaSpec(_system: ActorSystem) protected def atTermination() {} def spawn(body: ⇒ Unit)(implicit dispatcher: MessageDispatcher) { - system.actorOf(Props(ctx ⇒ { case "go" ⇒ try body finally ctx.self.stop() }).withDispatcher(dispatcher)) ! "go" + system.actorOf(Props(ctx ⇒ { case "go" ⇒ try body finally ctx.stop(ctx.self) }).withDispatcher(dispatcher)) ! "go" } } @@ -96,7 +95,7 @@ class AkkaSpecSpec extends WordSpec with MustMatchers { val ref = Seq(testActor, system.actorOf(Props.empty, "name")) } spec.ref foreach (_.isTerminated must not be true) - system.stop() + system.shutdown() spec.awaitCond(spec.ref forall (_.isTerminated), 2 seconds) } @@ -120,7 +119,7 @@ class AkkaSpecSpec extends WordSpec with MustMatchers { implicit val davyJones = otherSystem.actorOf(Props(new Actor { def receive = { case m: DeadLetter ⇒ locker :+= m - case "Die!" ⇒ sender ! "finally gone"; self.stop() + case "Die!" ⇒ sender ! "finally gone"; context.stop(self) } }), "davyJones") @@ -139,15 +138,15 @@ class AkkaSpecSpec extends WordSpec with MustMatchers { val latch = new TestLatch(1)(system) system.registerOnTermination(latch.countDown()) - system.stop() + system.shutdown() latch.await(2 seconds) - (davyJones ? "Die!").get must be === "finally gone" + Await.result(davyJones ? "Die!", timeout.duration) must be === "finally gone" // this will typically also contain log messages which were sent after the logger shutdown locker must contain(DeadLetter(42, davyJones, probe.ref)) } finally { - system.stop() - otherSystem.stop() + system.shutdown() + otherSystem.shutdown() } } diff --git a/akka-testkit/src/test/scala/akka/testkit/TestActorRefSpec.scala b/akka-testkit/src/test/scala/akka/testkit/TestActorRefSpec.scala index 462ee6ffc6..6fbe81a0cc 100644 --- a/akka-testkit/src/test/scala/akka/testkit/TestActorRefSpec.scala +++ b/akka-testkit/src/test/scala/akka/testkit/TestActorRefSpec.scala @@ -7,7 +7,7 @@ import org.scalatest.matchers.MustMatchers import org.scalatest.{ BeforeAndAfterEach, WordSpec } import akka.actor._ import akka.event.Logging.Warning -import akka.dispatch.{ Future, Promise } +import akka.dispatch.{ Future, Promise, Await } import akka.util.duration._ import akka.actor.ActorSystem @@ -56,8 +56,8 @@ object TestActorRefSpec { class WorkerActor() extends TActor { def receiveT = { - case "work" ⇒ sender ! "workDone"; self.stop() - case replyTo: Promise[Any] ⇒ replyTo.completeWithResult("complexReply") + case "work" ⇒ sender ! "workDone"; context.stop(self) + case replyTo: Promise[Any] ⇒ replyTo.success("complexReply") case replyTo: ActorRef ⇒ replyTo ! "complexReply" } } @@ -110,7 +110,7 @@ class TestActorRefSpec extends AkkaSpec with BeforeAndAfterEach with DefaultTime def receive = { case _ ⇒ sender ! nested } })) a must not be (null) - val nested = (a ? "any").as[ActorRef].get + val nested = Await.result((a ? "any").mapTo[ActorRef], timeout.duration) nested must not be (null) a must not be theSameInstanceAs(nested) } @@ -121,7 +121,7 @@ class TestActorRefSpec extends AkkaSpec with BeforeAndAfterEach with DefaultTime def receive = { case _ ⇒ sender ! nested } })) a must not be (null) - val nested = (a ? "any").as[ActorRef].get + val nested = Await.result((a ? "any").mapTo[ActorRef], timeout.duration) nested must not be (null) a must not be theSameInstanceAs(nested) } @@ -195,7 +195,7 @@ class TestActorRefSpec extends AkkaSpec with BeforeAndAfterEach with DefaultTime val f = a ? "work" // CallingThreadDispatcher means that there is no delay f must be('completed) - f.as[String] must equal(Some("workDone")) + Await.result(f, timeout.duration) must equal("workDone") } } diff --git a/akka-testkit/src/test/scala/akka/testkit/TestProbeSpec.scala b/akka-testkit/src/test/scala/akka/testkit/TestProbeSpec.scala index 5e2d775195..4723070299 100644 --- a/akka-testkit/src/test/scala/akka/testkit/TestProbeSpec.scala +++ b/akka-testkit/src/test/scala/akka/testkit/TestProbeSpec.scala @@ -4,8 +4,8 @@ import org.scalatest.WordSpec import org.scalatest.matchers.MustMatchers import org.scalatest.{ BeforeAndAfterEach, WordSpec } import akka.actor._ -import akka.dispatch.Future import akka.util.duration._ +import akka.dispatch.{ Await, Future } @org.junit.runner.RunWith(classOf[org.scalatest.junit.JUnitRunner]) class TestProbeSpec extends AkkaSpec with DefaultTimeout { @@ -18,7 +18,7 @@ class TestProbeSpec extends AkkaSpec with DefaultTimeout { tk.expectMsg(0 millis, "hello") // TestActor runs on CallingThreadDispatcher tk.lastMessage.sender ! "world" future must be('completed) - future.get must equal("world") + Await.result(future, timeout.duration) must equal("world") } "reply to messages" in { diff --git a/akka-tutorials/akka-tutorial-first/src/main/java/akka/tutorial/first/java/Pi.java b/akka-tutorials/akka-tutorial-first/src/main/java/akka/tutorial/first/java/Pi.java index 7c90b2f310..72703f768b 100644 --- a/akka-tutorials/akka-tutorial-first/src/main/java/akka/tutorial/first/java/Pi.java +++ b/akka-tutorials/akka-tutorial-first/src/main/java/akka/tutorial/first/java/Pi.java @@ -116,7 +116,7 @@ public class Pi { Result result = (Result) message; pi += result.getValue(); nrOfResults += 1; - if (nrOfResults == nrOfMessages) getSelf().stop(); + if (nrOfResults == nrOfMessages) getContext().stop(getSelf()); } else throw new IllegalArgumentException("Unknown message [" + message + "]"); //#handle-messages } @@ -162,7 +162,7 @@ public class Pi { latch.await(); // Shut down the system - system.stop(); + system.shutdown(); } } -//#app \ No newline at end of file +//#app diff --git a/akka-tutorials/akka-tutorial-first/src/main/scala/Pi.scala b/akka-tutorials/akka-tutorial-first/src/main/scala/Pi.scala index 10395405f7..0dddc16654 100644 --- a/akka-tutorials/akka-tutorial-first/src/main/scala/Pi.scala +++ b/akka-tutorials/akka-tutorial-first/src/main/scala/Pi.scala @@ -63,7 +63,7 @@ object Pi extends App { pi += value nrOfResults += 1 // Stops this actor and all its supervised children - if (nrOfResults == nrOfMessages) self.stop() + if (nrOfResults == nrOfMessages) context.stop(self) //#handle-messages } //#master-receive @@ -101,7 +101,7 @@ object Pi extends App { latch.await() // Shut down the system - system.stop() + system.shutdown() } } //#app diff --git a/akka-tutorials/akka-tutorial-first/src/test/scala/WorkerSpec.scala b/akka-tutorials/akka-tutorial-first/src/test/scala/WorkerSpec.scala index a752b3c783..a9d2a202fd 100644 --- a/akka-tutorials/akka-tutorial-first/src/test/scala/WorkerSpec.scala +++ b/akka-tutorials/akka-tutorial-first/src/test/scala/WorkerSpec.scala @@ -17,7 +17,7 @@ class WorkerSpec extends WordSpec with MustMatchers with BeforeAndAfterAll { implicit val system = ActorSystem() override def afterAll { - system.stop() + system.shutdown() } "Worker" must { diff --git a/config/akka.conf b/config/akka.conf deleted file mode 100644 index 64883cf7c1..0000000000 --- a/config/akka.conf +++ /dev/null @@ -1,2 +0,0 @@ -# In this file you can override any option defined in the 'akka-reference.conf' file. -# Copy in all or parts of the 'akka-reference.conf' file and modify as you please. diff --git a/config/application.conf b/config/application.conf new file mode 100644 index 0000000000..2f7ad95abd --- /dev/null +++ b/config/application.conf @@ -0,0 +1,2 @@ +# In this file you can override any option defined in the 'reference.conf' files. +# Copy in all or parts of the 'reference.conf' files and modify as you please. diff --git a/config/microkernel-server.xml b/config/microkernel-server.xml deleted file mode 100644 index 07fda30fcf..0000000000 --- a/config/microkernel-server.xml +++ /dev/null @@ -1,106 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 300000 - 2 - false - 8443 - 20000 - 5000 - - - - - - - - - - - - - - - - - / - - akka.http.AkkaMistServlet - /* - - - - - - - - - - - - - - - true - true - true - 1000 - - diff --git a/project/AkkaBuild.scala b/project/AkkaBuild.scala index cbad5fda90..4a4685113b 100644 --- a/project/AkkaBuild.scala +++ b/project/AkkaBuild.scala @@ -30,7 +30,7 @@ object AkkaBuild extends Build { Unidoc.unidocExclude := Seq(samples.id, tutorials.id), Dist.distExclude := Seq(actorTests.id, akkaSbtPlugin.id, docs.id) ), - aggregate = Seq(actor, testkit, actorTests, stm, remote, slf4j, amqp, mailboxes, akkaSbtPlugin, samples, tutorials, docs) + aggregate = Seq(actor, testkit, actorTests, remote, slf4j, amqp, mailboxes, kernel, akkaSbtPlugin, samples, tutorials, docs) ) lazy val actor = Project( @@ -66,19 +66,10 @@ object AkkaBuild extends Build { ) ) - lazy val stm = Project( - id = "akka-stm", - base = file("akka-stm"), - dependencies = Seq(actor, testkit % "test->test"), - settings = defaultSettings ++ Seq( - libraryDependencies ++= Dependencies.stm - ) - ) - lazy val remote = Project( id = "akka-remote", base = file("akka-remote"), - dependencies = Seq(stm, actorTests % "test->test", testkit % "test->test"), + dependencies = Seq(actor, actorTests % "test->test", testkit % "test->test"), settings = defaultSettings ++ multiJvmSettings ++ Seq( libraryDependencies ++= Dependencies.cluster, extraOptions in MultiJvm <<= (sourceDirectory in MultiJvm) { src => @@ -192,14 +183,14 @@ object AkkaBuild extends Build { // ) // ) - // lazy val kernel = Project( - // id = "akka-kernel", - // base = file("akka-kernel"), - // dependencies = Seq(cluster, slf4j, spring), - // settings = defaultSettings ++ Seq( - // libraryDependencies ++= Dependencies.kernel - // ) - // ) + lazy val kernel = Project( + id = "akka-kernel", + base = file("akka-kernel"), + dependencies = Seq(actor, testkit % "test->test"), + settings = defaultSettings ++ Seq( + libraryDependencies ++= Dependencies.kernel + ) + ) lazy val akkaSbtPlugin = Project( id = "akka-sbt-plugin", @@ -213,7 +204,7 @@ object AkkaBuild extends Build { id = "akka-samples", base = file("akka-samples"), settings = parentSettings, - aggregate = Seq(fsmSample, helloSample) + aggregate = Seq(fsmSample, helloSample, helloKernelSample) ) lazy val fsmSample = Project( @@ -230,6 +221,13 @@ object AkkaBuild extends Build { settings = defaultSettings ) + lazy val helloKernelSample = Project( + id = "akka-sample-hello-kernel", + base = file("akka-samples/akka-sample-hello-kernel"), + dependencies = Seq(kernel), + settings = defaultSettings + ) + lazy val tutorials = Project( id = "akka-tutorials", base = file("akka-tutorials"), @@ -256,7 +254,7 @@ object AkkaBuild extends Build { lazy val docs = Project( id = "akka-docs", base = file("akka-docs"), - dependencies = Seq(actor, testkit % "test->test", stm, remote, slf4j, fileMailbox, mongoMailbox, redisMailbox, beanstalkMailbox, zookeeperMailbox), + dependencies = Seq(actor, testkit % "test->test", remote, slf4j, fileMailbox, mongoMailbox, redisMailbox, beanstalkMailbox, zookeeperMailbox), settings = defaultSettings ++ Seq( unmanagedSourceDirectories in Test <<= baseDirectory { _ ** "code" get }, libraryDependencies ++= Dependencies.docs, @@ -359,12 +357,10 @@ object Dependencies { val testkit = Seq(Test.scalatest, Test.junit) val actorTests = Seq( - Test.junit, Test.scalatest, Test.multiverse, Test.commonsMath, Test.mockito, + Test.junit, Test.scalatest, Test.commonsMath, Test.mockito, Test.scalacheck, protobuf, jacksonMapper, sjson ) - val stm = Seq(multiverse, Test.junit, Test.scalatest) - val cluster = Seq( bookkeeper, commonsCodec, commonsIo, guice, h2Lzf, jacksonCore, jacksonMapper, log4j, netty, protobuf, sjson, zkClient, zookeeper, zookeeperLock, Test.junit, Test.scalatest @@ -388,9 +384,7 @@ object Dependencies { val spring = Seq(springBeans, springContext, Test.junit, Test.scalatest) - val kernel = Seq( - jettyUtil, jettyXml, jettyServlet, jacksonCore, staxApi - ) + val kernel = Seq(Test.scalatest, Test.junit) // TODO: resolve Jetty version conflict // val sampleCamel = Seq(camelCore, camelSpring, commonsCodec, Runtime.camelJms, Runtime.activemq, Runtime.springJms, @@ -412,7 +406,6 @@ object Dependency { val Jersey = "1.3" val Jetty = "7.4.0.v20110414" val Logback = "0.9.28" - val Multiverse = "0.6.2" val Netty = "3.2.5.Final" val Protobuf = "2.4.1" val Scalatest = "1.6.1" @@ -439,7 +432,6 @@ object Dependency { val jettyServlet = "org.eclipse.jetty" % "jetty-servlet" % V.Jetty // Eclipse license val log4j = "log4j" % "log4j" % "1.2.15" // ApacheV2 val mongoAsync = "com.mongodb.async" % "mongo-driver_2.9.0-1" % "0.2.9-1" // ApacheV2 - val multiverse = "org.multiverse" % "multiverse-alpha" % V.Multiverse // ApacheV2 val netty = "org.jboss.netty" % "netty" % V.Netty // ApacheV2 val osgi = "org.osgi" % "org.osgi.core" % "4.2.0" // ApacheV2 val protobuf = "com.google.protobuf" % "protobuf-java" % V.Protobuf // New BSD @@ -482,7 +474,6 @@ object Dependency { val junit = "junit" % "junit" % "4.5" % "test" // Common Public License 1.0 val logback = "ch.qos.logback" % "logback-classic" % V.Logback % "test" // EPL 1.0 / LGPL 2.1 val mockito = "org.mockito" % "mockito-all" % "1.8.1" % "test" // MIT - val multiverse = "org.multiverse" % "multiverse-alpha" % V.Multiverse % "test" // ApacheV2 val scalatest = "org.scalatest" %% "scalatest" % V.Scalatest % "test" // ApacheV2 val scalacheck = "org.scala-tools.testing" %% "scalacheck" % "1.9" % "test" // New BSD } diff --git a/project/Dist.scala b/project/Dist.scala index 3f3af68098..002ae63fc1 100644 --- a/project/Dist.scala +++ b/project/Dist.scala @@ -52,10 +52,12 @@ object Dist { (baseDirectory, distSources, distUnzipped, version, distFile, streams) map { (projectBase, allSources, unzipped, version, zipFile, s) => { val base = unzipped / ("akka-" + version) - val scripts = (projectBase / "scripts" / "microkernel" * "*").get + val scripts = (projectBase / "akka-kernel" / "src" / "main" / "scripts" * "*").get val bin = base / "bin" val configSources = projectBase / "config" val config = base / "config" + val deploy = base / "deploy" + val deployReadme = deploy / "readme" val doc = base / "doc" / "akka" val api = doc / "api" val docs = doc / "docs" @@ -66,9 +68,10 @@ object Dist { val libAkka = lib / "akka" val src = base / "src" / "akka" IO.delete(unzipped) - // TODO: re-enable bin and config dirs, and add deploy dir, when akka-kernel is enabled - //copyFilesTo(scripts, bin, setExecutable = true) - //IO.copyDirectory(configSources, config) + copyFilesTo(scripts, bin, setExecutable = true) + IO.copyDirectory(configSources, config) + IO.createDirectory(deploy) + IO.write(deployReadme, "Place application jars in this directory") IO.copyDirectory(allSources.api, api) IO.copyDirectory(allSources.docs, docs) copyFilesTo(allSources.docJars, docJars) diff --git a/scripts/microkernel/akka b/scripts/microkernel/akka deleted file mode 100755 index 4241d2693d..0000000000 --- a/scripts/microkernel/akka +++ /dev/null @@ -1,9 +0,0 @@ -#!/bin/bash - -AKKA_HOME="$(cd "$(cd "$(dirname "$0")"; pwd -P)"/..; pwd)" - -[ -n "$JAVA_OPTS" ] || JAVA_OPTS="-Xms1536M -Xmx1536M -Xss1M -XX:MaxPermSize=256M -XX:+UseParallelGC" - -[ -n "$AKKA_CLASSPATH" ] || AKKA_CLASSPATH="$AKKA_HOME/lib/scala-library.jar:$AKKA_HOME/lib/akka/*:$AKKA_HOME/config" - -java $JAVA_OPTS -cp "$AKKA_CLASSPATH" -Dakka.home="$AKKA_HOME" akka.kernel.Main