diff --git a/akka-actor-tests/src/test/java/akka/actor/JavaAPI.java b/akka-actor-tests/src/test/java/akka/actor/JavaAPI.java index cfe2a7c63c..d0ef5104cd 100644 --- a/akka-actor-tests/src/test/java/akka/actor/JavaAPI.java +++ b/akka-actor-tests/src/test/java/akka/actor/JavaAPI.java @@ -3,30 +3,31 @@ package akka.actor; import akka.actor.ActorSystem; import akka.japi.Creator; import org.junit.Test; -import akka.actor.Actors; -import akka.remote.RemoteSupport; import static org.junit.Assert.*; public class JavaAPI { - private ActorSystem app = new ActorSystem(); + private ActorSystem system = ActorSystem.create(); - @Test void mustBeAbleToCreateActorRefFromClass() { - ActorRef ref = app.actorOf(JavaAPITestActor.class); - assertNotNull(ref); + @Test + void mustBeAbleToCreateActorRefFromClass() { + ActorRef ref = system.actorOf(JavaAPITestActor.class); + assertNotNull(ref); } - @Test void mustBeAbleToCreateActorRefFromFactory() { - ActorRef ref = app.actorOf(new Props().withCreator(new Creator() { - public Actor create() { - return new JavaAPITestActor(); - } - })); - assertNotNull(ref); + @Test + void mustBeAbleToCreateActorRefFromFactory() { + ActorRef ref = system.actorOf(new Props().withCreator(new Creator() { + public Actor create() { + return new JavaAPITestActor(); + } + })); + assertNotNull(ref); } - @Test void mustAcceptSingleArgTell() { - ActorRef ref = app.actorOf(JavaAPITestActor.class); + @Test + void mustAcceptSingleArgTell() { + ActorRef ref = system.actorOf(JavaAPITestActor.class); ref.tell("hallo"); ref.tell("hallo", ref); } diff --git a/akka-actor-tests/src/test/java/akka/actor/JavaExtension.java b/akka-actor-tests/src/test/java/akka/actor/JavaExtension.java new file mode 100644 index 0000000000..533fba8164 --- /dev/null +++ b/akka-actor-tests/src/test/java/akka/actor/JavaExtension.java @@ -0,0 +1,45 @@ +/** + * Copyright (C) 2009-2011 Typesafe Inc. + */ +package akka.actor; + +import org.junit.Test; + +import com.typesafe.config.ConfigFactory; +import com.typesafe.config.Config; +import com.typesafe.config.ConfigParseOptions; + +import static org.junit.Assert.*; + +public class JavaExtension { + + static class TestExtension implements Extension { + private ActorSystemImpl system; + public static ExtensionKey key = new ExtensionKey() { + }; + + public ExtensionKey key() { + return key; + } + + public void init(ActorSystemImpl system) { + this.system = system; + } + + public ActorSystemImpl getSystem() { + return system; + } + } + + private Config c = ConfigFactory.parseString("akka.extensions = [ \"akka.actor.JavaExtension$TestExtension\" ]", + ConfigParseOptions.defaults()); + + private ActorSystem system = ActorSystem.create("JavaExtension", c); + + @Test + public void mustBeAccessible() { + final ActorSystemImpl s = system.extension(TestExtension.key).getSystem(); + assertSame(s, system); + } + +} diff --git a/akka-actor-tests/src/test/java/akka/dispatch/JavaFutureTests.java b/akka-actor-tests/src/test/java/akka/dispatch/JavaFutureTests.java index 7c7bc52876..12dbe736d6 100644 --- a/akka-actor-tests/src/test/java/akka/dispatch/JavaFutureTests.java +++ b/akka-actor-tests/src/test/java/akka/dispatch/JavaFutureTests.java @@ -14,248 +14,267 @@ import akka.japi.Function; import akka.japi.Function2; import akka.japi.Procedure; import akka.japi.Option; -import scala.Some; -import scala.Right; public class JavaFutureTests { - private final ActorSystem app = new ActorSystem(); - private final Timeout t = app.AkkaConfig().ActorTimeout(); - private final FutureFactory ff = new FutureFactory(app.dispatcher(), t); + private final ActorSystem system = ActorSystem.create(); + private final Timeout t = system.settings().ActorTimeout(); + private final FutureFactory ff = new FutureFactory(system.dispatcher(), t); - @Test public void mustBeAbleToMapAFuture() { - Future f1 = ff.future(new Callable() { - public String call() { - return "Hello"; - } - }); + @Test + public void mustBeAbleToMapAFuture() { + Future f1 = ff.future(new Callable() { + public String call() { + return "Hello"; + } + }); - Future f2 = f1.map(new Function() { - public String apply(String s) { - return s + " World"; - } - }, t); + Future f2 = f1.map(new Function() { + public String apply(String s) { + return s + " World"; + } + }, t); - assertEquals("Hello World", f2.get()); - } + assertEquals("Hello World", f2.get()); + } - @Test public void mustBeAbleToExecuteAnOnResultCallback() throws Throwable { - final CountDownLatch latch = new CountDownLatch(1); - Promise cf = new akka.dispatch.DefaultPromise(1000, TimeUnit.MILLISECONDS, app.dispatcherFactory().defaultGlobalDispatcher()); - Future f = cf; - f.onResult(new Procedure() { - public void apply(String result) { - if(result.equals("foo")) - latch.countDown(); - } - }); + @Test + public void mustBeAbleToExecuteAnOnResultCallback() throws Throwable { + final CountDownLatch latch = new CountDownLatch(1); + Promise cf = new akka.dispatch.DefaultPromise(1000, TimeUnit.MILLISECONDS, system + .dispatcherFactory().defaultGlobalDispatcher()); + Future f = cf; + f.onResult(new Procedure() { + public void apply(String result) { + if (result.equals("foo")) + latch.countDown(); + } + }); - cf.completeWithResult("foo"); - assertTrue(latch.await(5000, TimeUnit.MILLISECONDS)); - assertEquals(f.get(), "foo"); - } + cf.completeWithResult("foo"); + assertTrue(latch.await(5000, TimeUnit.MILLISECONDS)); + assertEquals(f.get(), "foo"); + } - @Test public void mustBeAbleToExecuteAnOnExceptionCallback() throws Throwable { - final CountDownLatch latch = new CountDownLatch(1); - Promise cf = new akka.dispatch.DefaultPromise(1000, TimeUnit.MILLISECONDS, app.dispatcherFactory().defaultGlobalDispatcher()); - Future f = cf; - f.onException(new Procedure() { - public void apply(Throwable t) { - if(t instanceof NullPointerException) - latch.countDown(); - } - }); + @Test + public void mustBeAbleToExecuteAnOnExceptionCallback() throws Throwable { + final CountDownLatch latch = new CountDownLatch(1); + Promise cf = new akka.dispatch.DefaultPromise(1000, TimeUnit.MILLISECONDS, system + .dispatcherFactory().defaultGlobalDispatcher()); + Future f = cf; + f.onException(new Procedure() { + public void apply(Throwable t) { + if (t instanceof NullPointerException) + latch.countDown(); + } + }); - Throwable exception = new NullPointerException(); - cf.completeWithException(exception); - assertTrue(latch.await(5000, TimeUnit.MILLISECONDS)); - assertEquals(f.exception().get(), exception); - } + Throwable exception = new NullPointerException(); + cf.completeWithException(exception); + assertTrue(latch.await(5000, TimeUnit.MILLISECONDS)); + assertEquals(f.exception().get(), exception); + } - @Test public void mustBeAbleToExecuteAnOnTimeoutCallback() throws Throwable { - final CountDownLatch latch = new CountDownLatch(1); - Promise cf = new akka.dispatch.DefaultPromise(1000, TimeUnit.MILLISECONDS, app.dispatcherFactory().defaultGlobalDispatcher()); - Future f = cf; - f.onTimeout(new Procedure>() { - public void apply(Future future) { - latch.countDown(); - } - }); + @Test + public void mustBeAbleToExecuteAnOnTimeoutCallback() throws Throwable { + final CountDownLatch latch = new CountDownLatch(1); + Promise cf = new akka.dispatch.DefaultPromise(1000, TimeUnit.MILLISECONDS, system + .dispatcherFactory().defaultGlobalDispatcher()); + Future f = cf; + f.onTimeout(new Procedure>() { + public void apply(Future future) { + latch.countDown(); + } + }); - assertTrue(latch.await(5000, TimeUnit.MILLISECONDS)); - assertTrue(f.value().isEmpty()); - } + assertTrue(latch.await(5000, TimeUnit.MILLISECONDS)); + assertTrue(f.value().isEmpty()); + } - @Test public void mustBeAbleToExecuteAnOnCompleteCallback() throws Throwable { - final CountDownLatch latch = new CountDownLatch(1); - Promise cf = new akka.dispatch.DefaultPromise(1000, TimeUnit.MILLISECONDS, app.dispatcherFactory().defaultGlobalDispatcher()); - Future f = cf; - f.onComplete(new Procedure>() { - public void apply(akka.dispatch.Future future) { - latch.countDown(); - } - }); + @Test + public void mustBeAbleToExecuteAnOnCompleteCallback() throws Throwable { + final CountDownLatch latch = new CountDownLatch(1); + Promise cf = new akka.dispatch.DefaultPromise(1000, TimeUnit.MILLISECONDS, system + .dispatcherFactory().defaultGlobalDispatcher()); + Future f = cf; + f.onComplete(new Procedure>() { + public void apply(akka.dispatch.Future future) { + latch.countDown(); + } + }); - cf.completeWithResult("foo"); - assertTrue(latch.await(5000, TimeUnit.MILLISECONDS)); - assertEquals(f.get(), "foo"); - } + cf.completeWithResult("foo"); + assertTrue(latch.await(5000, TimeUnit.MILLISECONDS)); + assertEquals(f.get(), "foo"); + } - @Test public void mustBeAbleToForeachAFuture() throws Throwable { - final CountDownLatch latch = new CountDownLatch(1); - Promise cf = new akka.dispatch.DefaultPromise(1000, TimeUnit.MILLISECONDS, app.dispatcherFactory().defaultGlobalDispatcher()); - Future f = cf; - f.foreach(new Procedure() { - public void apply(String future) { - latch.countDown(); - } - }); + @Test + public void mustBeAbleToForeachAFuture() throws Throwable { + final CountDownLatch latch = new CountDownLatch(1); + Promise cf = new akka.dispatch.DefaultPromise(1000, TimeUnit.MILLISECONDS, system + .dispatcherFactory().defaultGlobalDispatcher()); + Future f = cf; + f.foreach(new Procedure() { + public void apply(String future) { + latch.countDown(); + } + }); - cf.completeWithResult("foo"); - assertTrue(latch.await(5000, TimeUnit.MILLISECONDS)); - assertEquals(f.get(), "foo"); - } + cf.completeWithResult("foo"); + assertTrue(latch.await(5000, TimeUnit.MILLISECONDS)); + assertEquals(f.get(), "foo"); + } - @Test public void mustBeAbleToFlatMapAFuture() throws Throwable { - final CountDownLatch latch = new CountDownLatch(1); - Promise cf = new akka.dispatch.DefaultPromise(1000, TimeUnit.MILLISECONDS, app.dispatcherFactory().defaultGlobalDispatcher()); - cf.completeWithResult("1000"); - Future f = cf; - Future r = f.flatMap(new Function>() { - public Future apply(String r) { - latch.countDown(); - Promise cf = new akka.dispatch.DefaultPromise(1000, TimeUnit.MILLISECONDS, app.dispatcherFactory().defaultGlobalDispatcher()); - cf.completeWithResult(Integer.parseInt(r)); - return cf; - } - }, t); + @Test + public void mustBeAbleToFlatMapAFuture() throws Throwable { + final CountDownLatch latch = new CountDownLatch(1); + Promise cf = new akka.dispatch.DefaultPromise(1000, TimeUnit.MILLISECONDS, system + .dispatcherFactory().defaultGlobalDispatcher()); + cf.completeWithResult("1000"); + Future f = cf; + Future r = f.flatMap(new Function>() { + public Future apply(String r) { + latch.countDown(); + Promise cf = new akka.dispatch.DefaultPromise(1000, TimeUnit.MILLISECONDS, system + .dispatcherFactory().defaultGlobalDispatcher()); + cf.completeWithResult(Integer.parseInt(r)); + return cf; + } + }, t); - assertEquals(f.get(), "1000"); - assertEquals(r.get().intValue(), 1000); - assertTrue(latch.await(5000, TimeUnit.MILLISECONDS)); - } + assertEquals(f.get(), "1000"); + assertEquals(r.get().intValue(), 1000); + assertTrue(latch.await(5000, TimeUnit.MILLISECONDS)); + } - @Test public void mustBeAbleToFilterAFuture() throws Throwable { - final CountDownLatch latch = new CountDownLatch(1); - Promise cf = new akka.dispatch.DefaultPromise(1000, TimeUnit.MILLISECONDS, app.dispatcherFactory().defaultGlobalDispatcher()); - Future f = cf; - Future r = f.filter(new Function() { - public Boolean apply(String r) { - latch.countDown(); - return r.equals("foo"); - } - }, t); + @Test + public void mustBeAbleToFilterAFuture() throws Throwable { + final CountDownLatch latch = new CountDownLatch(1); + Promise cf = new akka.dispatch.DefaultPromise(1000, TimeUnit.MILLISECONDS, system + .dispatcherFactory().defaultGlobalDispatcher()); + Future f = cf; + Future r = f.filter(new Function() { + public Boolean apply(String r) { + latch.countDown(); + return r.equals("foo"); + } + }, t); - cf.completeWithResult("foo"); - assertTrue(latch.await(5000, TimeUnit.MILLISECONDS)); - assertEquals(f.get(), "foo"); - assertEquals(r.get(), "foo"); - } + cf.completeWithResult("foo"); + assertTrue(latch.await(5000, TimeUnit.MILLISECONDS)); + assertEquals(f.get(), "foo"); + assertEquals(r.get(), "foo"); + } - // TODO: Improve this test, perhaps with an Actor - @Test public void mustSequenceAFutureList() { - LinkedList> listFutures = new LinkedList>(); - LinkedList listExpected = new LinkedList(); + // TODO: Improve this test, perhaps with an Actor + @Test + public void mustSequenceAFutureList() { + LinkedList> listFutures = new LinkedList>(); + LinkedList listExpected = new LinkedList(); - for (int i = 0; i < 10; i++) { - listExpected.add("test"); - listFutures.add(ff.future(new Callable() { - public String call() { - return "test"; - } - })); + for (int i = 0; i < 10; i++) { + listExpected.add("test"); + listFutures.add(ff.future(new Callable() { + public String call() { + return "test"; } - - Future> futureList = ff.sequence(listFutures, t); - - assertEquals(futureList.get(), listExpected); + })); } - // TODO: Improve this test, perhaps with an Actor - @Test public void foldForJavaApiMustWork() { - LinkedList> listFutures = new LinkedList>(); - StringBuilder expected = new StringBuilder(); + Future> futureList = ff.sequence(listFutures, t); - for (int i = 0; i < 10; i++) { - expected.append("test"); - listFutures.add(ff.future(new Callable() { - public String call() { - return "test"; - } - })); + assertEquals(futureList.get(), listExpected); + } + + // TODO: Improve this test, perhaps with an Actor + @Test + public void foldForJavaApiMustWork() { + LinkedList> listFutures = new LinkedList>(); + StringBuilder expected = new StringBuilder(); + + for (int i = 0; i < 10; i++) { + expected.append("test"); + listFutures.add(ff.future(new Callable() { + public String call() { + return "test"; } + })); + } - Future result = ff.fold("", 15000,listFutures, new Function2() { - public String apply(String r, String t) { - return r + t; + Future result = ff.fold("", 15000, listFutures, new Function2() { + public String apply(String r, String t) { + return r + t; + } + }); + + assertEquals(result.get(), expected.toString()); + } + + @Test + public void reduceForJavaApiMustWork() { + LinkedList> listFutures = new LinkedList>(); + StringBuilder expected = new StringBuilder(); + + for (int i = 0; i < 10; i++) { + expected.append("test"); + listFutures.add(ff.future(new Callable() { + public String call() { + return "test"; + } + })); + } + + Future result = ff.reduce(listFutures, 15000, new Function2() { + public String apply(String r, String t) { + return r + t; + } + }); + + assertEquals(result.get(), expected.toString()); + } + + @Test + public void traverseForJavaApiMustWork() { + LinkedList listStrings = new LinkedList(); + LinkedList expectedStrings = new LinkedList(); + + for (int i = 0; i < 10; i++) { + expectedStrings.add("TEST"); + listStrings.add("test"); + } + + Future> result = ff.traverse(listStrings, t, new Function>() { + public Future apply(final String r) { + return ff.future(new Callable() { + public String call() { + return r.toUpperCase(); } }); + } + }); - assertEquals(result.get(), expected.toString()); - } + assertEquals(result.get(), expectedStrings); + } - @Test public void reduceForJavaApiMustWork() { - LinkedList> listFutures = new LinkedList>(); - StringBuilder expected = new StringBuilder(); - - for (int i = 0; i < 10; i++) { - expected.append("test"); - listFutures.add(ff.future(new Callable() { - public String call() { - return "test"; - } - })); + @Test + public void findForJavaApiMustWork() { + LinkedList> listFutures = new LinkedList>(); + for (int i = 0; i < 10; i++) { + final Integer fi = i; + listFutures.add(ff.future(new Callable() { + public Integer call() { + return fi; } - - Future result = ff.reduce(listFutures, 15000, new Function2() { - public String apply(String r, String t) { - return r + t; - } - }); - - assertEquals(result.get(), expected.toString()); + })); } + final Integer expect = 5; + Future> f = ff.find(listFutures, new Function() { + public Boolean apply(Integer i) { + return i == 5; + } + }, t); - @Test public void traverseForJavaApiMustWork() { - LinkedList listStrings = new LinkedList(); - LinkedList expectedStrings = new LinkedList(); - - for (int i = 0; i < 10; i++) { - expectedStrings.add("TEST"); - listStrings.add("test"); - } - - Future> result = ff.traverse(listStrings, t, new Function>() { - public Future apply(final String r) { - return ff.future(new Callable() { - public String call() { - return r.toUpperCase(); - } - }); - } - }); - - assertEquals(result.get(), expectedStrings); - } - - @Test public void findForJavaApiMustWork() { - LinkedList> listFutures = new LinkedList>(); - for (int i = 0; i < 10; i++) { - final Integer fi = i; - listFutures.add(ff.future(new Callable() { - public Integer call() { - return fi; - } - })); - } - final Integer expect = 5; - Future> f = ff.find(listFutures, new Function() { - public Boolean apply(Integer i) { - return i == 5; - } - }, t); - - final Integer got = f.get().get(); - assertEquals(expect, got); - } + final Integer got = f.get().get(); + assertEquals(expect, got); + } } diff --git a/akka-actor-tests/src/test/java/akka/japi/JavaAPITestBase.java b/akka-actor-tests/src/test/java/akka/japi/JavaAPITestBase.java index e3a160f776..c0361530da 100644 --- a/akka-actor-tests/src/test/java/akka/japi/JavaAPITestBase.java +++ b/akka-actor-tests/src/test/java/akka/japi/JavaAPITestBase.java @@ -6,37 +6,44 @@ import static org.junit.Assert.*; public class JavaAPITestBase { - @Test public void shouldCreateSomeString() { - Option o = Option.some("abc"); - assertFalse(o.isEmpty()); - assertTrue(o.isDefined()); - assertEquals("abc", o.get()); - } + @Test + public void shouldCreateSomeString() { + Option o = Option.some("abc"); + assertFalse(o.isEmpty()); + assertTrue(o.isDefined()); + assertEquals("abc", o.get()); + } - @Test public void shouldCreateNone() { - Option o1 = Option.none(); - assertTrue(o1.isEmpty()); - assertFalse(o1.isDefined()); + @Test + public void shouldCreateNone() { + Option o1 = Option.none(); + assertTrue(o1.isEmpty()); + assertFalse(o1.isDefined()); - Option o2 = Option.none(); - assertTrue(o2.isEmpty()); - assertFalse(o2.isDefined()); - } + Option o2 = Option.none(); + assertTrue(o2.isEmpty()); + assertFalse(o2.isDefined()); + } - @Test public void shouldEnterForLoop() { - for(String s : Option.some("abc")) { - return; - } - fail("for-loop not entered"); + @Test + public void shouldEnterForLoop() { + for (@SuppressWarnings("unused") + String s : Option.some("abc")) { + return; } + fail("for-loop not entered"); + } - @Test public void shouldNotEnterForLoop() { - for(Object o : Option.none()) { - fail("for-loop entered"); - } + @Test + public void shouldNotEnterForLoop() { + for (@SuppressWarnings("unused") + Object o : Option.none()) { + fail("for-loop entered"); } + } - @Test public void shouldBeSingleton() { - assertSame(Option.none(), Option.none()); - } + @Test + public void shouldBeSingleton() { + assertSame(Option.none(), Option.none()); + } } diff --git a/akka-actor-tests/src/test/scala/ConfigDocSpec.scala b/akka-actor-tests/src/test/scala/ConfigDocSpec.scala new file mode 100644 index 0000000000..5f65cce91e --- /dev/null +++ b/akka-actor-tests/src/test/scala/ConfigDocSpec.scala @@ -0,0 +1,30 @@ +package akka.docs.config + +import org.scalatest.WordSpec +import org.scalatest.matchers.MustMatchers +import com.typesafe.config.ConfigFactory +import com.typesafe.config.ConfigParseOptions +import akka.actor.ActorSystem + +//#imports + +class ConfigDocSpec extends WordSpec { + + "programmatically configure ActorSystem" in { + //#custom-config + val customConf = ConfigFactory.parseString(""" + akka.actor.deployment { + /app/my-service { + router = round-robin + nr-of-instances = 3 + } + } + """, ConfigParseOptions.defaults) + val system = ActorSystem("MySystem", ConfigFactory.systemProperties.withFallback(customConf)) + //#custom-config + + system.stop() + + } + +} diff --git a/akka-actor-tests/src/test/scala/akka/actor/ActorFireForgetRequestReplySpec.scala b/akka-actor-tests/src/test/scala/akka/actor/ActorFireForgetRequestReplySpec.scala index d54411a847..7daf812631 100644 --- a/akka-actor-tests/src/test/scala/akka/actor/ActorFireForgetRequestReplySpec.scala +++ b/akka-actor-tests/src/test/scala/akka/actor/ActorFireForgetRequestReplySpec.scala @@ -80,11 +80,11 @@ class ActorFireForgetRequestReplySpec extends AkkaSpec with BeforeAndAfterEach { filterEvents(EventFilter[Exception]("Expected exception")) { val supervisor = actorOf(Props[Supervisor].withFaultHandler(OneForOneStrategy(List(classOf[Exception]), Some(0)))) val actor = (supervisor ? Props[CrashingActor]).as[ActorRef].get - actor.isShutdown must be(false) + actor.isTerminated must be(false) actor ! "Die" state.finished.await 1.second.dilated.sleep() - actor.isShutdown must be(true) + actor.isTerminated must be(true) supervisor.stop() } } diff --git a/akka-actor-tests/src/test/scala/akka/actor/ActorLifeCycleSpec.scala b/akka-actor-tests/src/test/scala/akka/actor/ActorLifeCycleSpec.scala index 23373a8af6..5d3358dc6f 100644 --- a/akka-actor-tests/src/test/scala/akka/actor/ActorLifeCycleSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/actor/ActorLifeCycleSpec.scala @@ -14,13 +14,7 @@ import java.util.concurrent.atomic._ object ActorLifeCycleSpec { -} - -@org.junit.runner.RunWith(classOf[org.scalatest.junit.JUnitRunner]) -class ActorLifeCycleSpec extends AkkaSpec with BeforeAndAfterEach with ImplicitSender { - import ActorLifeCycleSpec._ - - class LifeCycleTestActor(id: String, generationProvider: AtomicInteger) extends Actor { + class LifeCycleTestActor(testActor: ActorRef, id: String, generationProvider: AtomicInteger) extends Actor { def report(msg: Any) = testActor ! message(msg) def message(msg: Any): Tuple3[Any, String, Int] = (msg, id, currentGen) val currentGen = generationProvider.getAndIncrement() @@ -29,6 +23,12 @@ class ActorLifeCycleSpec extends AkkaSpec with BeforeAndAfterEach with ImplicitS def receive = { case "status" ⇒ sender ! message("OK") } } +} + +@org.junit.runner.RunWith(classOf[org.scalatest.junit.JUnitRunner]) +class ActorLifeCycleSpec extends AkkaSpec with BeforeAndAfterEach with ImplicitSender { + import ActorLifeCycleSpec._ + "An Actor" must { "invoke preRestart, preStart, postRestart when using OneForOneStrategy" in { @@ -36,7 +36,7 @@ class ActorLifeCycleSpec extends AkkaSpec with BeforeAndAfterEach with ImplicitS val id = newUuid().toString val supervisor = actorOf(Props[Supervisor].withFaultHandler(OneForOneStrategy(List(classOf[Exception]), Some(3)))) val gen = new AtomicInteger(0) - val restarterProps = Props(new LifeCycleTestActor(id, gen) { + val restarterProps = Props(new LifeCycleTestActor(testActor, id, gen) { override def preRestart(reason: Throwable, message: Option[Any]) { report("preRestart") } override def postRestart(reason: Throwable) { report("postRestart") } }) @@ -70,7 +70,7 @@ class ActorLifeCycleSpec extends AkkaSpec with BeforeAndAfterEach with ImplicitS val id = newUuid().toString val supervisor = actorOf(Props[Supervisor].withFaultHandler(OneForOneStrategy(List(classOf[Exception]), Some(3)))) val gen = new AtomicInteger(0) - val restarterProps = Props(new LifeCycleTestActor(id, gen)) + val restarterProps = Props(new LifeCycleTestActor(testActor, id, gen)) val restarter = (supervisor ? restarterProps).as[ActorRef].get expectMsg(("preStart", id, 0)) @@ -100,7 +100,7 @@ class ActorLifeCycleSpec extends AkkaSpec with BeforeAndAfterEach with ImplicitS val id = newUuid().toString val supervisor = actorOf(Props[Supervisor].withFaultHandler(OneForOneStrategy(List(classOf[Exception]), Some(3)))) val gen = new AtomicInteger(0) - val props = Props(new LifeCycleTestActor(id, gen)) + val props = Props(new LifeCycleTestActor(testActor, id, gen)) val a = (supervisor ? props).as[ActorRef].get expectMsg(("preStart", id, 0)) a ! "status" diff --git a/akka-actor-tests/src/test/scala/akka/actor/ActorRefSpec.scala b/akka-actor-tests/src/test/scala/akka/actor/ActorRefSpec.scala index 4865654da0..1358e61c82 100644 --- a/akka-actor-tests/src/test/scala/akka/actor/ActorRefSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/actor/ActorRefSpec.scala @@ -247,7 +247,7 @@ class ActorRefSpec extends AkkaSpec { out.flush out.close - Serialization.app.withValue(app) { + Serialization.system.withValue(system.asInstanceOf[ActorSystemImpl]) { val in = new ObjectInputStream(new ByteArrayInputStream(baos.toByteArray)) val readA = in.readObject @@ -257,7 +257,7 @@ class ActorRefSpec extends AkkaSpec { } } - "throw an exception on deserialize if no app in scope" in { + "throw an exception on deserialize if no system in scope" in { val a = actorOf[InnerActor] import java.io._ @@ -275,7 +275,7 @@ class ActorRefSpec extends AkkaSpec { (intercept[java.lang.IllegalStateException] { in.readObject }).getMessage must be === "Trying to deserialize a serialized ActorRef without an ActorSystem in scope." + - " Use akka.serialization.Serialization.app.withValue(akkaApplication) { ... }" + " Use akka.serialization.Serialization.system.withValue(system) { ... }" } "must throw exception on deserialize if not present in actor hierarchy (and remoting is not enabled)" in { @@ -284,14 +284,15 @@ class ActorRefSpec extends AkkaSpec { val baos = new ByteArrayOutputStream(8192 * 32) val out = new ObjectOutputStream(baos) - val serialized = SerializedActorRef(app.address.hostname, app.address.port, "/this/path/does/not/exist") + val addr = system.asInstanceOf[ActorSystemImpl].provider.rootPath.remoteAddress + val serialized = SerializedActorRef(addr.hostname, addr.port, "/this/path/does/not/exist") out.writeObject(serialized) out.flush out.close - Serialization.app.withValue(app) { + Serialization.system.withValue(system.asInstanceOf[ActorSystemImpl]) { val in = new ObjectInputStream(new ByteArrayInputStream(baos.toByteArray)) (intercept[java.lang.IllegalStateException] { in.readObject @@ -363,7 +364,7 @@ class ActorRefSpec extends AkkaSpec { ffive.get must be("five") fnull.get must be("null") - awaitCond(ref.isShutdown, 2000 millis) + awaitCond(ref.isTerminated, 2000 millis) } "restart when Kill:ed" in { diff --git a/akka-actor-tests/src/test/scala/akka/actor/ActorSystemSpec.scala b/akka-actor-tests/src/test/scala/akka/actor/ActorSystemSpec.scala new file mode 100644 index 0000000000..d472387f13 --- /dev/null +++ b/akka-actor-tests/src/test/scala/akka/actor/ActorSystemSpec.scala @@ -0,0 +1,39 @@ +/** + * Copyright (C) 2009-2011 Typesafe Inc. + */ +package akka.actor + +import akka.testkit._ +import org.scalatest.junit.JUnitSuite +import com.typesafe.config.ConfigFactory + +class JavaExtensionSpec extends JavaExtension with JUnitSuite + +object ActorSystemSpec { + + class TestExtension extends Extension[TestExtension] { + var system: ActorSystemImpl = _ + + def key = TestExtension + + def init(system: ActorSystemImpl) { + this.system = system + } + } + + object TestExtension extends ExtensionKey[TestExtension] + +} + +class ActorSystemSpec extends AkkaSpec("""akka.extensions = ["akka.actor.ActorSystemSpec$TestExtension"]""") { + import ActorSystemSpec._ + + "An ActorSystem" must { + + "support extensions" in { + system.extension(TestExtension).system must be === system + } + + } + +} \ No newline at end of file diff --git a/akka-actor-tests/src/test/scala/akka/actor/ActorTimeoutSpec.scala b/akka-actor-tests/src/test/scala/akka/actor/ActorTimeoutSpec.scala index 46a345a7c2..0057cdda60 100644 --- a/akka-actor-tests/src/test/scala/akka/actor/ActorTimeoutSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/actor/ActorTimeoutSpec.scala @@ -17,8 +17,8 @@ class ActorTimeoutSpec extends AkkaSpec with BeforeAndAfterAll { } }, timeout = t)) - val defaultTimeout = app.AkkaConfig.ActorTimeout.duration - val testTimeout = if (app.AkkaConfig.ActorTimeout.duration < 400.millis) 500 millis else 100 millis + val defaultTimeout = system.settings.ActorTimeout.duration + val testTimeout = if (system.settings.ActorTimeout.duration < 400.millis) 500 millis else 100 millis "An Actor-based Future" must { diff --git a/akka-actor-tests/src/test/scala/akka/actor/ClusterSpec.scala b/akka-actor-tests/src/test/scala/akka/actor/ClusterSpec.scala deleted file mode 100644 index 2d2bed7342..0000000000 --- a/akka-actor-tests/src/test/scala/akka/actor/ClusterSpec.scala +++ /dev/null @@ -1,54 +0,0 @@ -package akka.actor - -import akka.testkit.AkkaSpec - -@org.junit.runner.RunWith(classOf[org.scalatest.junit.JUnitRunner]) -class ClusterSpec extends AkkaSpec { - - "ClusterSpec: A Deployer" must { - "be able to parse 'akka.actor.cluster._' config elements" in { - - // TODO: make it use its own special config? - val config = app.config - import config._ - - //akka.cluster - getString("akka.cluster.name") must equal(Some("test-cluster")) - getString("akka.cluster.zookeeper-server-addresses") must equal(Some("localhost:2181")) - getInt("akka.remote.server.port") must equal(Some(2552)) - getInt("akka.cluster.max-time-to-wait-until-connected") must equal(Some(30)) - getInt("akka.cluster.session-timeout") must equal(Some(60)) - getInt("akka.cluster.connection-timeout") must equal(Some(60)) - getInt("akka.remote.remote-daemon-ack-timeout") must equal(Some(30)) - getBool("akka.cluster.include-ref-node-in-replica-set") must equal(Some(true)) - getString("akka.remote.layer") must equal(Some("akka.cluster.netty.NettyRemoteSupport")) - getString("akka.remote.secure-cookie") must equal(Some("")) - getBool("akka.remote.use-passive-connections") must equal(Some(true)) - getString("akka.cluster.log-directory") must equal(Some("_akka_cluster")) - - //akka.cluster.replication - getString("akka.cluster.replication.digest-type") must equal(Some("MAC")) - getString("akka.cluster.replication.password") must equal(Some("secret")) - getInt("akka.cluster.replication.ensemble-size") must equal(Some(3)) - getInt("akka.cluster.replication.quorum-size") must equal(Some(2)) - getInt("akka.cluster.replication.snapshot-frequency") must equal(Some(1000)) - getInt("akka.cluster.replication.timeout") must equal(Some(30)) - - //akka.remote.server - getInt("akka.remote.server.port") must equal(Some(2552)) - getInt("akka.remote.server.message-frame-size") must equal(Some(1048576)) - getInt("akka.remote.server.connection-timeout") must equal(Some(120)) - getBool("akka.remote.server.require-cookie") must equal(Some(false)) - getBool("akka.remote.server.untrusted-mode") must equal(Some(false)) - getInt("akka.remote.server.backlog") must equal(Some(4096)) - - //akka.remote.client - getBool("akka.remote.client.buffering.retry-message-send-on-failure") must equal(Some(false)) - getInt("akka.remote.client.buffering.capacity") must equal(Some(-1)) - getInt("akka.remote.client.reconnect-delay") must equal(Some(5)) - getInt("akka.remote.client.read-timeout") must equal(Some(3600)) - getInt("akka.remote.client.reap-futures-delay") must equal(Some(5)) - getInt("akka.remote.client.reconnection-time-window") must equal(Some(600)) - } - } -} diff --git a/akka-actor-tests/src/test/scala/akka/actor/DeathWatchSpec.scala b/akka-actor-tests/src/test/scala/akka/actor/DeathWatchSpec.scala index d18b1f97bb..90e398e4cb 100644 --- a/akka-actor-tests/src/test/scala/akka/actor/DeathWatchSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/actor/DeathWatchSpec.scala @@ -96,7 +96,7 @@ class DeathWatchSpec extends AkkaSpec with BeforeAndAfterEach with ImplicitSende terminal ! Kill expectTerminationOf(terminal) - terminal.isShutdown must be === true + terminal.isTerminated must be === true supervisor.stop() } @@ -107,7 +107,7 @@ class DeathWatchSpec extends AkkaSpec with BeforeAndAfterEach with ImplicitSende case class FF(fail: Failed) val supervisor = actorOf(Props[Supervisor] .withFaultHandler(new OneForOneStrategy(FaultHandlingStrategy.makeDecider(List(classOf[Exception])), Some(0)) { - override def handleFailure(child: ActorRef, cause: Throwable, stats: ChildRestartStats, children: Iterable[(ActorRef, ChildRestartStats)]) = { + override def handleFailure(child: ActorRef, cause: Throwable, stats: ChildRestartStats, children: Iterable[ChildRestartStats]) = { testActor.tell(FF(Failed(cause)), child) super.handleFailure(child, cause, stats, children) } @@ -123,7 +123,7 @@ class DeathWatchSpec extends AkkaSpec with BeforeAndAfterEach with ImplicitSende case FF(Failed(DeathPactException(`failed`))) if lastSender eq brother ⇒ 2 case Terminated(`brother`) ⇒ 3 } - testActor must not be 'shutdown + testActor.isTerminated must not be true result must be(Seq(1, 2, 3)) } } diff --git a/akka-actor-tests/src/test/scala/akka/actor/DeployerSpec.scala b/akka-actor-tests/src/test/scala/akka/actor/DeployerSpec.scala index 0604f9e01f..68a81d9797 100644 --- a/akka-actor-tests/src/test/scala/akka/actor/DeployerSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/actor/DeployerSpec.scala @@ -8,29 +8,240 @@ import akka.testkit.AkkaSpec import akka.util.duration._ import DeploymentConfig._ import akka.remote.RemoteAddress +import com.typesafe.config.ConfigFactory +import com.typesafe.config.ConfigParseOptions + +object DeployerSpec { + val deployerConf = ConfigFactory.parseString(""" + akka.actor.deployment { + /app/service1 { + } + /app/service2 { + router = round-robin + nr-of-instances = 3 + remote { + nodes = ["wallace:2552", "gromit:2552"] + } + } + /app/service3 { + create-as { + class = "akka.actor.DeployerSpec$RecipeActor" + } + } + /app/service-auto { + router = round-robin + nr-of-instances = auto + } + /app/service-direct { + router = direct + } + /app/service-direct2 { + router = direct + # nr-of-instances ignored when router = direct + nr-of-instances = 2 + } + /app/service-round-robin { + router = round-robin + } + /app/service-random { + router = random + } + /app/service-scatter-gather { + router = scatter-gather + } + /app/service-least-cpu { + router = least-cpu + } + /app/service-least-ram { + router = least-ram + } + /app/service-least-messages { + router = least-messages + } + /app/service-custom { + router = org.my.Custom + } + /app/service-cluster1 { + cluster { + preferred-nodes = ["node:wallace", "node:gromit"] + } + } + /app/service-cluster2 { + cluster { + preferred-nodes = ["node:wallace", "node:gromit"] + replication { + strategy = write-behind + } + } + } + } + """, ConfigParseOptions.defaults) + + class RecipeActor extends Actor { + def receive = { case _ ⇒ } + } + +} @org.junit.runner.RunWith(classOf[org.scalatest.junit.JUnitRunner]) -class DeployerSpec extends AkkaSpec { +class DeployerSpec extends AkkaSpec(DeployerSpec.deployerConf) { "A Deployer" must { - "be able to parse 'akka.actor.deployment._' config elements" in { - val deployment = app.provider.deployer.lookupInConfig("/app/service-ping") + + "be able to parse 'akka.actor.deployment._' with all default values" in { + val service = "/app/service1" + val deployment = system.asInstanceOf[ActorSystemImpl].provider.deployer.lookupDeployment(service) deployment must be('defined) - deployment must equal(Some( + deployment must be(Some( Deploy( - "/app/service-ping", + service, + None, + Direct, + NrOfInstances(1), + LocalScope))) + } + + "use None deployment for undefined service" in { + val service = "/app/undefined" + val deployment = system.asInstanceOf[ActorSystemImpl].provider.deployer.lookupDeployment(service) + deployment must be(None) + } + + "be able to parse 'akka.actor.deployment._' with specified remote nodes" in { + val service = "/app/service2" + val deployment = system.asInstanceOf[ActorSystemImpl].provider.deployer.lookupDeployment(service) + deployment must be('defined) + + deployment must be(Some( + Deploy( + service, None, RoundRobin, NrOfInstances(3), - RemoteScope(List( + RemoteScope(Seq( RemoteAddress("wallace", 2552), RemoteAddress("gromit", 2552)))))) - // ClusterScope( - // List(Node("node1")), - // new NrOfInstances(3), - // Replication( - // TransactionLog, - // WriteThrough))))) } + + "be able to parse 'akka.actor.deployment._' with recipe" in { + val service = "/app/service3" + val deployment = system.asInstanceOf[ActorSystemImpl].provider.deployer.lookupDeployment(service) + deployment must be('defined) + + deployment must be(Some( + Deploy( + service, + Some(ActorRecipe(classOf[DeployerSpec.RecipeActor])), + Direct, + NrOfInstances(1), + LocalScope))) + } + + "be able to parse 'akka.actor.deployment._' with number-of-instances=auto" in { + val service = "/app/service-auto" + val deployment = system.asInstanceOf[ActorSystemImpl].provider.deployer.lookupDeployment(service) + deployment must be('defined) + + deployment must be(Some( + Deploy( + service, + None, + RoundRobin, + AutoNrOfInstances, + LocalScope))) + } + + "detect invalid number-of-instances" in { + intercept[akka.config.ConfigurationException] { + val invalidDeployerConf = ConfigFactory.parseString(""" + akka.actor.deployment { + /app/service-invalid-number-of-instances { + router = round-robin + nr-of-instances = boom + } + } + """, ConfigParseOptions.defaults) + + ActorSystem("invalid", invalidDeployerConf) + } + } + + "be able to parse 'akka.actor.deployment._' with direct router" in { + assertRouting(Direct, "/app/service-direct") + } + + "ignore nr-of-instances with direct router" in { + assertRouting(Direct, "/app/service-direct2") + } + + "be able to parse 'akka.actor.deployment._' with round-robin router" in { + assertRouting(RoundRobin, "/app/service-round-robin") + } + + "be able to parse 'akka.actor.deployment._' with random router" in { + assertRouting(Random, "/app/service-random") + } + + "be able to parse 'akka.actor.deployment._' with scatter-gather router" in { + assertRouting(ScatterGather, "/app/service-scatter-gather") + } + + "be able to parse 'akka.actor.deployment._' with least-cpu router" in { + assertRouting(LeastCPU, "/app/service-least-cpu") + } + + "be able to parse 'akka.actor.deployment._' with least-ram router" in { + assertRouting(LeastRAM, "/app/service-least-ram") + } + + "be able to parse 'akka.actor.deployment._' with least-messages router" in { + assertRouting(LeastMessages, "/app/service-least-messages") + } + "be able to parse 'akka.actor.deployment._' with custom router" in { + assertRouting(CustomRouter("org.my.Custom"), "/app/service-custom") + } + + def assertRouting(expected: Routing, service: String) { + val deployment = system.asInstanceOf[ActorSystemImpl].provider.deployer.lookupDeployment(service) + deployment must be('defined) + + deployment must be(Some( + Deploy( + service, + None, + expected, + NrOfInstances(1), + LocalScope))) + + } + + "be able to parse 'akka.actor.deployment._' with specified cluster nodes" in { + val service = "/app/service-cluster1" + val deploymentConfig = system.asInstanceOf[ActorSystemImpl].provider.deployer.deploymentConfig + val deployment = system.asInstanceOf[ActorSystemImpl].provider.deployer.lookupDeployment(service) + deployment must be('defined) + + deployment.get.scope match { + case deploymentConfig.ClusterScope(remoteNodes, replication) ⇒ + remoteNodes must be(Seq(Node("wallace"), Node("gromit"))) + replication must be(Transient) + case other ⇒ fail("Unexpected: " + other) + } + } + + "be able to parse 'akka.actor.deployment._' with specified cluster replication" in { + val service = "/app/service-cluster2" + val deploymentConfig = system.asInstanceOf[ActorSystemImpl].provider.deployer.deploymentConfig + val deployment = system.asInstanceOf[ActorSystemImpl].provider.deployer.lookupDeployment(service) + deployment must be('defined) + + deployment.get.scope match { + case deploymentConfig.ClusterScope(remoteNodes, Replication(storage, strategy)) ⇒ + storage must be(TransactionLog) + strategy must be(WriteBehind) + case other ⇒ fail("Unexpected: " + other) + } + } + } } diff --git a/akka-actor-tests/src/test/scala/akka/actor/FSMActorSpec.scala b/akka-actor-tests/src/test/scala/akka/actor/FSMActorSpec.scala index 2ba83a9971..7d829ec622 100644 --- a/akka-actor-tests/src/test/scala/akka/actor/FSMActorSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/actor/FSMActorSpec.scala @@ -5,19 +5,17 @@ package akka.actor import org.scalatest.{ BeforeAndAfterAll, BeforeAndAfterEach } - import akka.testkit._ import TestEvent.Mute import FSM._ import akka.util.Duration import akka.util.duration._ import akka.event._ -import akka.actor.ActorSystem.defaultConfig -import akka.config.Configuration +import com.typesafe.config.ConfigFactory object FSMActorSpec { - class Latches(implicit app: ActorSystem) { + class Latches(implicit system: ActorSystem) { val unlockedLatch = TestLatch() val lockedLatch = TestLatch() val unhandledLatch = TestLatch() @@ -103,7 +101,7 @@ object FSMActorSpec { } @org.junit.runner.RunWith(classOf[org.scalatest.junit.JUnitRunner]) -class FSMActorSpec extends AkkaSpec(Configuration("akka.actor.debug.fsm" -> true)) with ImplicitSender { +class FSMActorSpec extends AkkaSpec(Map("akka.actor.debug.fsm" -> true)) with ImplicitSender { import FSMActorSpec._ "An FSM Actor" must { @@ -166,13 +164,14 @@ class FSMActorSpec extends AkkaSpec(Configuration("akka.actor.debug.fsm" -> true case Ev("go") ⇒ goto(2) } }) + val name = fsm.toString filterException[Logging.EventHandlerException] { - app.eventStream.subscribe(testActor, classOf[Logging.Error]) + system.eventStream.subscribe(testActor, classOf[Logging.Error]) fsm ! "go" expectMsgPF(1 second, hint = "Next state 2 does not exist") { - case Logging.Error(_, `fsm`, "Next state 2 does not exist") ⇒ true + case Logging.Error(_, `name`, "Next state 2 does not exist") ⇒ true } - app.eventStream.unsubscribe(testActor) + system.eventStream.unsubscribe(testActor) } } @@ -193,9 +192,10 @@ class FSMActorSpec extends AkkaSpec(Configuration("akka.actor.debug.fsm" -> true } "log events and transitions if asked to do so" in { - new TestKit(ActorSystem("fsm event", ActorSystem.defaultConfig ++ - Configuration("akka.loglevel" -> "DEBUG", - "akka.actor.debug.fsm" -> true))) { + import scala.collection.JavaConverters._ + val config = ConfigFactory.parseMap(Map("akka.loglevel" -> "DEBUG", + "akka.actor.debug.fsm" -> true).asJava) + new TestKit(ActorSystem("fsm event", config)) { EventFilter.debug() intercept { val fsm = TestActorRef(new Actor with LoggingFSM[Int, Null] { startWith(1, null) @@ -213,20 +213,21 @@ class FSMActorSpec extends AkkaSpec(Configuration("akka.actor.debug.fsm" -> true case StopEvent(r, _, _) ⇒ testActor ! r } }) - app.eventStream.subscribe(testActor, classOf[Logging.Debug]) + val name = fsm.toString + system.eventStream.subscribe(testActor, classOf[Logging.Debug]) fsm ! "go" expectMsgPF(1 second, hint = "processing Event(go,null)") { - case Logging.Debug(`fsm`, s: String) if s.startsWith("processing Event(go,null) from Actor[" + app.address + "/sys/testActor") ⇒ true + case Logging.Debug(`name`, s: String) if s.startsWith("processing Event(go,null) from Actor[") ⇒ true } - expectMsg(1 second, Logging.Debug(fsm, "setting timer 't'/1500 milliseconds: Shutdown")) - expectMsg(1 second, Logging.Debug(fsm, "transition 1 -> 2")) + expectMsg(1 second, Logging.Debug(name, "setting timer 't'/1500 milliseconds: Shutdown")) + expectMsg(1 second, Logging.Debug(name, "transition 1 -> 2")) fsm ! "stop" expectMsgPF(1 second, hint = "processing Event(stop,null)") { - case Logging.Debug(`fsm`, s: String) if s.startsWith("processing Event(stop,null) from Actor[" + app.address + "/sys/testActor") ⇒ true + case Logging.Debug(`name`, s: String) if s.startsWith("processing Event(stop,null) from Actor[") ⇒ true } - expectMsgAllOf(1 second, Logging.Debug(fsm, "canceling timer 't'"), Normal) + expectMsgAllOf(1 second, Logging.Debug(name, "canceling timer 't'"), Normal) expectNoMsg(1 second) - app.eventStream.unsubscribe(testActor) + system.eventStream.unsubscribe(testActor) } } } diff --git a/akka-actor-tests/src/test/scala/akka/actor/FSMTimingSpec.scala b/akka-actor-tests/src/test/scala/akka/actor/FSMTimingSpec.scala index cb30653d8f..6a0cc7d3ec 100644 --- a/akka-actor-tests/src/test/scala/akka/actor/FSMTimingSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/actor/FSMTimingSpec.scala @@ -24,7 +24,7 @@ class FSMTimingSpec extends AkkaSpec with ImplicitSender { "A Finite State Machine" must { - "receive StateTimeout" in { + "receive StateTimeout" taggedAs TimingTest in { within(1 second) { within(500 millis, 1 second) { fsm ! TestStateTimeout @@ -34,7 +34,7 @@ class FSMTimingSpec extends AkkaSpec with ImplicitSender { } } - "cancel a StateTimeout" in { + "cancel a StateTimeout" taggedAs TimingTest in { within(1 second) { fsm ! TestStateTimeout fsm ! Cancel @@ -44,7 +44,7 @@ class FSMTimingSpec extends AkkaSpec with ImplicitSender { } } - "allow StateTimeout override" in { + "allow StateTimeout override" taggedAs TimingTest in { within(500 millis) { fsm ! TestStateTimeoutOverride expectNoMsg @@ -56,8 +56,8 @@ class FSMTimingSpec extends AkkaSpec with ImplicitSender { } } - "receive single-shot timer" in { - within(1.5 seconds) { + "receive single-shot timer" taggedAs TimingTest in { + within(2 seconds) { within(500 millis, 1 second) { fsm ! TestSingleTimer expectMsg(Tick) @@ -67,7 +67,7 @@ class FSMTimingSpec extends AkkaSpec with ImplicitSender { } } - "correctly cancel a named timer" in { + "correctly cancel a named timer" taggedAs TimingTest in { fsm ! TestCancelTimer within(500 millis) { fsm ! Tick @@ -80,7 +80,7 @@ class FSMTimingSpec extends AkkaSpec with ImplicitSender { expectMsg(1 second, Transition(fsm, TestCancelTimer, Initial)) } - "not get confused between named and state timers" in { + "not get confused between named and state timers" taggedAs TimingTest in { fsm ! TestCancelStateTimerInNamedTimerMessage fsm ! Tick expectMsg(500 millis, Tick) @@ -94,7 +94,7 @@ class FSMTimingSpec extends AkkaSpec with ImplicitSender { } } - "receive and cancel a repeated timer" in { + "receive and cancel a repeated timer" taggedAs TimingTest in { fsm ! TestRepeatedTimer val seq = receiveWhile(2 seconds) { case Tick ⇒ Tick @@ -105,9 +105,9 @@ class FSMTimingSpec extends AkkaSpec with ImplicitSender { } } - "notify unhandled messages" in { - filterEvents(EventFilter.warning("unhandled event Tick in state TestUnhandled", source = fsm, occurrences = 1), - EventFilter.warning("unhandled event Unhandled(test) in state TestUnhandled", source = fsm, occurrences = 1)) { + "notify unhandled messages" taggedAs TimingTest in { + filterEvents(EventFilter.warning("unhandled event Tick in state TestUnhandled", source = fsm.toString, occurrences = 1), + EventFilter.warning("unhandled event Unhandled(test) in state TestUnhandled", source = fsm.toString, occurrences = 1)) { fsm ! TestUnhandled within(1 second) { fsm ! Tick @@ -182,7 +182,7 @@ object FSMTimingSpec { when(TestCancelTimer) { case Ev(Tick) ⇒ setTimer("hallo", Tock, 1 milli, false) - TestKit.awaitCond(context.hasMessages, 1 second) + TestKit.awaitCond(!context.dispatcher.mailboxIsEmpty(context.asInstanceOf[ActorCell]), 1 second) cancelTimer("hallo") sender ! Tick setTimer("hallo", Tock, 500 millis, false) @@ -209,7 +209,7 @@ object FSMTimingSpec { case Ev(Tick) ⇒ suspend(self) setTimer("named", Tock, 1 millis, false) - TestKit.awaitCond(context.hasMessages, 1 second) + TestKit.awaitCond(!context.dispatcher.mailboxIsEmpty(context.asInstanceOf[ActorCell]), 1 second) stay forMax (1 millis) replying Tick case Ev(Tock) ⇒ goto(TestCancelStateTimerInNamedTimerMessage2) diff --git a/akka-actor-tests/src/test/scala/akka/actor/ForwardActorSpec.scala b/akka-actor-tests/src/test/scala/akka/actor/ForwardActorSpec.scala index 9b6a7d1d86..2951e957dc 100644 --- a/akka-actor-tests/src/test/scala/akka/actor/ForwardActorSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/actor/ForwardActorSpec.scala @@ -12,12 +12,12 @@ import akka.util.Duration object ForwardActorSpec { val ExpectedMessage = "FOO" - def createForwardingChain(app: ActorSystem): ActorRef = { - val replier = app.actorOf(new Actor { + def createForwardingChain(system: ActorSystem): ActorRef = { + val replier = system.actorOf(new Actor { def receive = { case x ⇒ sender ! x } }) - def mkforwarder(forwardTo: ActorRef) = app.actorOf( + def mkforwarder(forwardTo: ActorRef) = system.actorOf( new Actor { def receive = { case x ⇒ forwardTo forward x } }) @@ -37,14 +37,14 @@ class ForwardActorSpec extends AkkaSpec { val replyTo = actorOf(new Actor { def receive = { case ExpectedMessage ⇒ latch.countDown() } }) - val chain = createForwardingChain(app) + val chain = createForwardingChain(system) chain.tell(ExpectedMessage, replyTo) latch.await(Duration(5, "s")) must be === true } "forward actor reference when invoking forward on bang bang" in { - val chain = createForwardingChain(app) + val chain = createForwardingChain(system) chain.ask(ExpectedMessage, 5000).get must be === ExpectedMessage } } diff --git a/akka-actor-tests/src/test/scala/akka/actor/IOActor.scala b/akka-actor-tests/src/test/scala/akka/actor/IOActor.scala index d4f08e40c2..3765ad5b6c 100644 --- a/akka-actor-tests/src/test/scala/akka/actor/IOActor.scala +++ b/akka-actor-tests/src/test/scala/akka/actor/IOActor.scala @@ -42,7 +42,7 @@ object IOActorSpec { class SimpleEchoClient(host: String, port: Int, ioManager: ActorRef) extends Actor with IO { - lazy val socket: SocketHandle = connect(ioManager, host, port, reader) + lazy val socket: SocketHandle = connect(ioManager, host, port)(reader) lazy val reader: ActorRef = context.actorOf { new Actor with IO { def receiveIO = { diff --git a/akka-actor-tests/src/test/scala/akka/actor/LocalActorRefProviderSpec.scala b/akka-actor-tests/src/test/scala/akka/actor/LocalActorRefProviderSpec.scala index 4b93d37d2c..707c425295 100644 --- a/akka-actor-tests/src/test/scala/akka/actor/LocalActorRefProviderSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/actor/LocalActorRefProviderSpec.scala @@ -13,14 +13,15 @@ class LocalActorRefProviderSpec extends AkkaSpec { "An LocalActorRefProvider" must { "only create one instance of an actor with a specific address in a concurrent environment" in { - val provider = app.provider + val impl = system.asInstanceOf[ActorSystemImpl] + val provider = impl.provider provider.isInstanceOf[LocalActorRefProvider] must be(true) (0 until 100) foreach { i ⇒ // 100 concurrent runs val address = "new-actor" + i implicit val timeout = Timeout(5 seconds) - ((1 to 4) map { _ ⇒ Future { provider.actorOf(Props(c ⇒ { case _ ⇒ }), app.guardian, address) } }).map(_.get).distinct.size must be(1) + ((1 to 4) map { _ ⇒ Future { provider.actorOf(impl, Props(c ⇒ { case _ ⇒ }), impl.guardian, address) } }).map(_.get).distinct.size must be(1) } } } diff --git a/akka-actor-tests/src/test/scala/akka/actor/LoggingReceiveSpec.scala b/akka-actor-tests/src/test/scala/akka/actor/LoggingReceiveSpec.scala index 1ecde61cf5..0e5602a899 100644 --- a/akka-actor-tests/src/test/scala/akka/actor/LoggingReceiveSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/actor/LoggingReceiveSpec.scala @@ -7,10 +7,12 @@ import org.scalatest.{ BeforeAndAfterAll, BeforeAndAfterEach } import akka.util.duration._ import akka.testkit._ import org.scalatest.WordSpec -import akka.actor.ActorSystem.defaultConfig -import akka.config.Configuration import akka.event.Logging import akka.util.Duration +import com.typesafe.config.ConfigFactory +import com.typesafe.config.ConfigParseOptions +import scala.collection.JavaConverters._ +import java.util.Properties object LoggingReceiveSpec { class TestLogActor extends Actor { @@ -22,11 +24,10 @@ object LoggingReceiveSpec { class LoggingReceiveSpec extends WordSpec with BeforeAndAfterEach with BeforeAndAfterAll { import LoggingReceiveSpec._ - - val config = defaultConfig ++ Configuration("akka.event-handler-level" -> "DEBUG") - val appLogging = ActorSystem("logging", config ++ Configuration("akka.actor.debug.receive" -> true)) - val appAuto = ActorSystem("autoreceive", config ++ Configuration("akka.actor.debug.autoreceive" -> true)) - val appLifecycle = ActorSystem("lifecycle", config ++ Configuration("akka.actor.debug.lifecycle" -> true)) + val config = ConfigFactory.parseMap(Map("akka.logLevel" -> "DEBUG").asJava) + val appLogging = ActorSystem("logging", ConfigFactory.parseMap(Map("akka.actor.debug.receive" -> true).asJava).withFallback(config)) + val appAuto = ActorSystem("autoreceive", ConfigFactory.parseMap(Map("akka.actor.debug.autoreceive" -> true).asJava).withFallback(config)) + val appLifecycle = ActorSystem("lifecycle", ConfigFactory.parseMap(Map("akka.actor.debug.lifecycle" -> true).asJava).withFallback(config)) val filter = TestEvent.Mute(EventFilter.custom { case _: Logging.Debug ⇒ true @@ -53,29 +54,31 @@ class LoggingReceiveSpec extends WordSpec with BeforeAndAfterEach with BeforeAnd "decorate a Receive" in { new TestKit(appLogging) { - app.eventStream.subscribe(testActor, classOf[Logging.Debug]) + system.eventStream.subscribe(testActor, classOf[Logging.Debug]) val r: Actor.Receive = { case null ⇒ } - val log = Actor.LoggingReceive(this, r) + val log = Actor.LoggingReceive("funky", r) log.isDefinedAt("hallo") - expectMsg(1 second, Logging.Debug(this, "received unhandled message hallo")) + expectMsg(1 second, Logging.Debug("funky", "received unhandled message hallo")) } } "be added on Actor if requested" in { new TestKit(appLogging) with ImplicitSender { ignoreMute(this) - app.eventStream.subscribe(testActor, classOf[Logging.Debug]) - app.eventStream.subscribe(testActor, classOf[Logging.Error]) + system.eventStream.subscribe(testActor, classOf[Logging.Debug]) + system.eventStream.subscribe(testActor, classOf[Logging.Error]) val actor = TestActorRef(new Actor { def receive = loggable(this) { - case _ ⇒ sender ! "x" + case x ⇒ + sender ! "x" } }) + val name = actor.toString actor ! "buh" within(1 second) { - expectMsg(Logging.Debug(actor.underlyingActor, "received handled message buh")) + expectMsg(Logging.Debug(name, "received handled message buh")) expectMsg("x") } val r: Actor.Receive = { @@ -86,7 +89,7 @@ class LoggingReceiveSpec extends WordSpec with BeforeAndAfterEach with BeforeAnd within(500 millis) { actor ! "bah" expectMsgPF() { - case Logging.Error(_: UnhandledMessageException, `actor`, _) ⇒ true + case Logging.Error(_: UnhandledMessageException, `name`, _) ⇒ true } } } @@ -95,7 +98,7 @@ class LoggingReceiveSpec extends WordSpec with BeforeAndAfterEach with BeforeAnd "not duplicate logging" in { new TestKit(appLogging) with ImplicitSender { - app.eventStream.subscribe(testActor, classOf[Logging.Debug]) + system.eventStream.subscribe(testActor, classOf[Logging.Debug]) val actor = TestActorRef(new Actor { def receive = loggable(this)(loggable(this) { case _ ⇒ sender ! "x" @@ -103,7 +106,7 @@ class LoggingReceiveSpec extends WordSpec with BeforeAndAfterEach with BeforeAnd }) actor ! "buh" within(1 second) { - expectMsg(Logging.Debug(actor.underlyingActor, "received handled message buh")) + expectMsg(Logging.Debug(actor.toString, "received handled message buh")) expectMsg("x") } } @@ -115,17 +118,18 @@ class LoggingReceiveSpec extends WordSpec with BeforeAndAfterEach with BeforeAnd "log AutoReceiveMessages if requested" in { new TestKit(appAuto) { - app.eventStream.subscribe(testActor, classOf[Logging.Debug]) + system.eventStream.subscribe(testActor, classOf[Logging.Debug]) val actor = TestActorRef(new Actor { def receive = { case _ ⇒ } }) + val name = actor.toString actor ! PoisonPill expectMsgPF() { - case Logging.Debug(`actor`, msg: String) if msg startsWith "received AutoReceiveMessage Envelope(PoisonPill" ⇒ true + case Logging.Debug(`name`, msg: String) if msg startsWith "received AutoReceiveMessage Envelope(PoisonPill" ⇒ true } - awaitCond(actor.isShutdown, 100 millis) + awaitCond(actor.isTerminated, 100 millis) } } @@ -137,24 +141,27 @@ class LoggingReceiveSpec extends WordSpec with BeforeAndAfterEach with BeforeAnd val s = ref.toString s.contains("MainBusReaper") || s.contains("Supervisor") } - app.eventStream.subscribe(testActor, classOf[Logging.Debug]) - app.eventStream.subscribe(testActor, classOf[Logging.Error]) + system.eventStream.subscribe(testActor, classOf[Logging.Debug]) + system.eventStream.subscribe(testActor, classOf[Logging.Error]) within(3 seconds) { - val lifecycleGuardian = appLifecycle.guardian + val lifecycleGuardian = appLifecycle.asInstanceOf[ActorSystemImpl].guardian + val lname = lifecycleGuardian.toString val supervisor = TestActorRef[TestLogActor](Props[TestLogActor].withFaultHandler(OneForOneStrategy(List(classOf[Throwable]), 5, 5000))) + val sname = supervisor.toString val supervisorSet = receiveWhile(messages = 2) { - case Logging.Debug(`lifecycleGuardian`, msg: String) if msg startsWith "now supervising" ⇒ 1 - case Logging.Debug(`supervisor`, msg: String) if msg startsWith "started" ⇒ 2 + case Logging.Debug(`lname`, msg: String) if msg startsWith "now supervising" ⇒ 1 + case Logging.Debug(`sname`, msg: String) if msg startsWith "started" ⇒ 2 }.toSet expectNoMsg(Duration.Zero) assert(supervisorSet == Set(1, 2), supervisorSet + " was not Set(1, 2)") - val actor = new TestActorRef[TestLogActor](app, Props[TestLogActor], supervisor, "none") + val actor = TestActorRef[TestLogActor](Props[TestLogActor], supervisor, "none") + val aname = actor.toString val set = receiveWhile(messages = 2) { - case Logging.Debug(`supervisor`, msg: String) if msg startsWith "now supervising" ⇒ 1 - case Logging.Debug(`actor`, msg: String) if msg startsWith "started" ⇒ 2 + case Logging.Debug(`sname`, msg: String) if msg startsWith "now supervising" ⇒ 1 + case Logging.Debug(`aname`, msg: String) if msg startsWith "started" ⇒ 2 }.toSet expectNoMsg(Duration.Zero) assert(set == Set(1, 2), set + " was not Set(1, 2)") @@ -174,18 +181,18 @@ class LoggingReceiveSpec extends WordSpec with BeforeAndAfterEach with BeforeAnd filterException[ActorKilledException] { actor ! Kill val set = receiveWhile(messages = 3) { - case Logging.Error(_: ActorKilledException, `actor`, "Kill") ⇒ 1 - case Logging.Debug(`actor`, "restarting") ⇒ 2 - case Logging.Debug(`actor`, "restarted") ⇒ 3 + case Logging.Error(_: ActorKilledException, `aname`, "Kill") ⇒ 1 + case Logging.Debug(`aname`, "restarting") ⇒ 2 + case Logging.Debug(`aname`, "restarted") ⇒ 3 }.toSet expectNoMsg(Duration.Zero) assert(set == Set(1, 2, 3), set + " was not Set(1, 2, 3)") } supervisor.stop() - expectMsg(Logging.Debug(supervisor, "stopping")) - expectMsg(Logging.Debug(actor, "stopped")) - expectMsg(Logging.Debug(supervisor, "stopped")) + expectMsg(Logging.Debug(sname, "stopping")) + expectMsg(Logging.Debug(aname, "stopped")) + expectMsg(Logging.Debug(sname, "stopped")) } } } diff --git a/akka-actor-tests/src/test/scala/akka/actor/RestartStrategySpec.scala b/akka-actor-tests/src/test/scala/akka/actor/RestartStrategySpec.scala index f9ef1d8096..f7ad0d34cb 100644 --- a/akka-actor-tests/src/test/scala/akka/actor/RestartStrategySpec.scala +++ b/akka-actor-tests/src/test/scala/akka/actor/RestartStrategySpec.scala @@ -16,7 +16,7 @@ import akka.testkit.AkkaSpec class RestartStrategySpec extends AkkaSpec { override def atStartup { - app.eventStream.publish(Mute(EventFilter[Exception]("Crashing..."))) + system.eventStream.publish(Mute(EventFilter[Exception]("Crashing..."))) } object Ping @@ -89,7 +89,7 @@ class RestartStrategySpec extends AkkaSpec { (1 to 100) foreach { _ ⇒ slave ! Crash } assert(countDownLatch.await(120, TimeUnit.SECONDS)) - assert(!slave.isShutdown) + assert(!slave.isTerminated) } "ensure that slave restarts after number of crashes not within time range" in { @@ -146,7 +146,7 @@ class RestartStrategySpec extends AkkaSpec { assert(thirdRestartLatch.tryAwait(1, TimeUnit.SECONDS)) - assert(!slave.isShutdown) + assert(!slave.isTerminated) } "ensure that slave is not restarted after max retries" in { @@ -183,7 +183,7 @@ class RestartStrategySpec extends AkkaSpec { // test restart and post restart ping assert(restartLatch.tryAwait(10, TimeUnit.SECONDS)) - assert(!slave.isShutdown) + assert(!slave.isTerminated) // now crash again... should not restart slave ! Crash @@ -197,7 +197,7 @@ class RestartStrategySpec extends AkkaSpec { slave ! Crash assert(stopLatch.tryAwait(10, TimeUnit.SECONDS)) sleep(500L) - assert(slave.isShutdown) + assert(slave.isTerminated) } "ensure that slave is not restarted within time range" in { @@ -237,7 +237,7 @@ class RestartStrategySpec extends AkkaSpec { // test restart and post restart ping assert(restartLatch.tryAwait(10, TimeUnit.SECONDS)) - assert(!slave.isShutdown) + assert(!slave.isTerminated) // now crash again... should not restart slave ! Crash @@ -253,7 +253,7 @@ class RestartStrategySpec extends AkkaSpec { assert(maxNoOfRestartsLatch.tryAwait(10, TimeUnit.SECONDS)) sleep(500L) - assert(slave.isShutdown) + assert(slave.isTerminated) } } } diff --git a/akka-actor-tests/src/test/scala/akka/actor/SchedulerSpec.scala b/akka-actor-tests/src/test/scala/akka/actor/SchedulerSpec.scala index ca7863f00e..6293f9c876 100644 --- a/akka-actor-tests/src/test/scala/akka/actor/SchedulerSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/actor/SchedulerSpec.scala @@ -2,9 +2,10 @@ package akka.actor import org.scalatest.BeforeAndAfterEach import org.multiverse.api.latches.StandardLatch -import java.util.concurrent.{ ConcurrentLinkedQueue, CountDownLatch, TimeUnit } import akka.testkit.AkkaSpec import akka.testkit.EventFilter +import akka.util.duration._ +import java.util.concurrent.{ CountDownLatch, ConcurrentLinkedQueue, TimeUnit } @org.junit.runner.RunWith(classOf[org.scalatest.junit.JUnitRunner]) class SchedulerSpec extends AkkaSpec with BeforeAndAfterEach { @@ -28,14 +29,14 @@ class SchedulerSpec extends AkkaSpec with BeforeAndAfterEach { def receive = { case Tick ⇒ countDownLatch.countDown() } }) // run every 50 millisec - collectCancellable(app.scheduler.schedule(tickActor, Tick, 0, 50, TimeUnit.MILLISECONDS)) + collectCancellable(system.scheduler.schedule(tickActor, Tick, 0 milliseconds, 50 milliseconds)) // after max 1 second it should be executed at least the 3 times already assert(countDownLatch.await(1, TimeUnit.SECONDS)) val countDownLatch2 = new CountDownLatch(3) - collectCancellable(app.scheduler.schedule(() ⇒ countDownLatch2.countDown(), 0, 50, TimeUnit.MILLISECONDS)) + collectCancellable(system.scheduler.schedule(() ⇒ countDownLatch2.countDown(), 0 milliseconds, 50 milliseconds)) // after max 1 second it should be executed at least the 3 times already assert(countDownLatch2.await(2, TimeUnit.SECONDS)) @@ -49,8 +50,8 @@ class SchedulerSpec extends AkkaSpec with BeforeAndAfterEach { }) // run every 50 millisec - collectCancellable(app.scheduler.scheduleOnce(tickActor, Tick, 50, TimeUnit.MILLISECONDS)) - collectCancellable(app.scheduler.scheduleOnce(() ⇒ countDownLatch.countDown(), 50, TimeUnit.MILLISECONDS)) + collectCancellable(system.scheduler.scheduleOnce(tickActor, Tick, 50 milliseconds)) + collectCancellable(system.scheduler.scheduleOnce(() ⇒ countDownLatch.countDown(), 50 milliseconds)) // after 1 second the wait should fail assert(countDownLatch.await(2, TimeUnit.SECONDS) == false) @@ -58,22 +59,6 @@ class SchedulerSpec extends AkkaSpec with BeforeAndAfterEach { assert(countDownLatch.getCount == 1) } - /** - * ticket #372 - * FIXME rewrite the test so that registry is not used - */ - // "not create actors" in { - // object Ping - // val ticks = new CountDownLatch(1000) - // val actor = actorOf(new Actor { - // def receive = { case Ping ⇒ ticks.countDown } - // }) - // val numActors = app.registry.local.actors.length - // (1 to 1000).foreach(_ ⇒ collectFuture(Scheduler.scheduleOnce(actor, Ping, 1, TimeUnit.MILLISECONDS))) - // assert(ticks.await(10, TimeUnit.SECONDS)) - // assert(app.registry.local.actors.length === numActors) - // } - /** * ticket #372 */ @@ -86,7 +71,7 @@ class SchedulerSpec extends AkkaSpec with BeforeAndAfterEach { }) (1 to 10).foreach { i ⇒ - val timeout = collectCancellable(app.scheduler.scheduleOnce(actor, Ping, 1, TimeUnit.SECONDS)) + val timeout = collectCancellable(system.scheduler.scheduleOnce(actor, Ping, 1 second)) timeout.cancel() } @@ -114,15 +99,59 @@ class SchedulerSpec extends AkkaSpec with BeforeAndAfterEach { }) val actor = (supervisor ? props).as[ActorRef].get - collectCancellable(app.scheduler.schedule(actor, Ping, 500, 500, TimeUnit.MILLISECONDS)) + collectCancellable(system.scheduler.schedule(actor, Ping, 500 milliseconds, 500 milliseconds)) // appx 2 pings before crash EventFilter[Exception]("CRASH", occurrences = 1) intercept { - collectCancellable(app.scheduler.scheduleOnce(actor, Crash, 1000, TimeUnit.MILLISECONDS)) + collectCancellable(system.scheduler.scheduleOnce(actor, Crash, 1000 milliseconds)) } assert(restartLatch.tryAwait(2, TimeUnit.SECONDS)) // should be enough time for the ping countdown to recover and reach 6 pings assert(pingLatch.await(4, TimeUnit.SECONDS)) } + + "never fire prematurely" in { + val ticks = new CountDownLatch(300) + + case class Msg(ts: Long) + + val actor = actorOf(new Actor { + def receive = { + case Msg(ts) ⇒ + val now = System.nanoTime + // Make sure that no message has been dispatched before the scheduled time (10ms = 10000000ns) has occurred + if (now - ts < 10000000) throw new RuntimeException("Interval is too small: " + (now - ts)) + ticks.countDown() + } + }) + + (1 to 300).foreach { i ⇒ + collectCancellable(system.scheduler.scheduleOnce(actor, Msg(System.nanoTime), 10 milliseconds)) + Thread.sleep(5) + } + + assert(ticks.await(3, TimeUnit.SECONDS) == true) + } + + "schedule with different initial delay and frequency" in { + val ticks = new CountDownLatch(3) + + case object Msg + + val actor = actorOf(new Actor { + def receive = { + case Msg ⇒ ticks.countDown() + } + }) + + val startTime = System.nanoTime() + val cancellable = system.scheduler.schedule(actor, Msg, 1 second, 100 milliseconds) + ticks.await(3, TimeUnit.SECONDS) + val elapsedTimeMs = (System.nanoTime() - startTime) / 1000000 + + assert(elapsedTimeMs > 1200) + assert(elapsedTimeMs < 1500) // the precision is not ms exact + cancellable.cancel() + } } } diff --git a/akka-actor-tests/src/test/scala/akka/actor/SupervisorMiscSpec.scala b/akka-actor-tests/src/test/scala/akka/actor/SupervisorMiscSpec.scala index 99068ed76e..d8ae9d7444 100644 --- a/akka-actor-tests/src/test/scala/akka/actor/SupervisorMiscSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/actor/SupervisorMiscSpec.scala @@ -27,13 +27,13 @@ class SupervisorMiscSpec extends AkkaSpec { } }) - val actor1 = (supervisor ? workerProps.withDispatcher(app.dispatcherFactory.newPinnedDispatcher("pinned"))).as[ActorRef].get + val actor1 = (supervisor ? workerProps.withDispatcher(system.dispatcherFactory.newPinnedDispatcher("pinned"))).as[ActorRef].get - val actor2 = (supervisor ? workerProps.withDispatcher(app.dispatcherFactory.newPinnedDispatcher("pinned"))).as[ActorRef].get + val actor2 = (supervisor ? workerProps.withDispatcher(system.dispatcherFactory.newPinnedDispatcher("pinned"))).as[ActorRef].get - val actor3 = (supervisor ? workerProps.withDispatcher(app.dispatcherFactory.newDispatcher("test").build)).as[ActorRef].get + val actor3 = (supervisor ? workerProps.withDispatcher(system.dispatcherFactory.newDispatcher("test").build)).as[ActorRef].get - val actor4 = (supervisor ? workerProps.withDispatcher(app.dispatcherFactory.newPinnedDispatcher("pinned"))).as[ActorRef].get + val actor4 = (supervisor ? workerProps.withDispatcher(system.dispatcherFactory.newPinnedDispatcher("pinned"))).as[ActorRef].get actor1 ! Kill actor2 ! Kill diff --git a/akka-actor-tests/src/test/scala/akka/actor/SupervisorSpec.scala b/akka-actor-tests/src/test/scala/akka/actor/SupervisorSpec.scala index adc6e25574..65faedeb51 100644 --- a/akka-actor-tests/src/test/scala/akka/actor/SupervisorSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/actor/SupervisorSpec.scala @@ -9,7 +9,7 @@ import akka.util.duration._ import akka.{ Die, Ping } import akka.actor.Actor._ import akka.testkit.TestEvent._ -import akka.testkit.{ EventFilter, ImplicitSender, AkkaSpec, filterEvents } +import akka.testkit._ import java.util.concurrent.atomic.AtomicInteger import java.util.concurrent.LinkedBlockingQueue @@ -121,7 +121,7 @@ class SupervisorSpec extends AkkaSpec with BeforeAndAfterEach with ImplicitSende } override def atStartup() { - app.eventStream.publish(Mute(EventFilter[RuntimeException](ExceptionMessage))) + system.eventStream.publish(Mute(EventFilter[RuntimeException](ExceptionMessage))) } override def beforeEach() = { diff --git a/akka-actor-tests/src/test/scala/akka/actor/TypedActorSpec.scala b/akka-actor-tests/src/test/scala/akka/actor/TypedActorSpec.scala index 59ce3d4952..d29627e443 100644 --- a/akka-actor-tests/src/test/scala/akka/actor/TypedActorSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/actor/TypedActorSpec.scala @@ -13,6 +13,7 @@ import akka.serialization.Serialization import java.util.concurrent.atomic.AtomicReference import annotation.tailrec import akka.testkit.{ EventFilter, filterEvents, AkkaSpec } +import akka.serialization.SerializationExtension object TypedActorSpec { @@ -147,18 +148,18 @@ class TypedActorSpec extends AkkaSpec with BeforeAndAfterEach with BeforeAndAfte newFooBar(Props().withTimeout(Timeout(d))) def newFooBar(props: Props): Foo = - app.typedActorOf(classOf[Foo], classOf[Bar], props) + system.typedActorOf(classOf[Foo], classOf[Bar], props) def newStacked(props: Props = Props().withTimeout(Timeout(2000))): Stacked = - app.typedActorOf(classOf[Stacked], classOf[StackedImpl], props) + system.typedActorOf(classOf[Stacked], classOf[StackedImpl], props) - def mustStop(typedActor: AnyRef) = app.typedActor.stop(typedActor) must be(true) + def mustStop(typedActor: AnyRef) = system.typedActor.stop(typedActor) must be(true) "TypedActors" must { "be able to instantiate" in { val t = newFooBar - app.typedActor.isTypedActor(t) must be(true) + system.typedActor.isTypedActor(t) must be(true) mustStop(t) } @@ -168,7 +169,7 @@ class TypedActorSpec extends AkkaSpec with BeforeAndAfterEach with BeforeAndAfte } "not stop non-started ones" in { - app.typedActor.stop(null) must be(false) + system.typedActor.stop(null) must be(false) } "throw an IllegalStateExcpetion when TypedActor.self is called in the wrong scope" in { @@ -187,7 +188,7 @@ class TypedActorSpec extends AkkaSpec with BeforeAndAfterEach with BeforeAndAfte "be able to call toString" in { val t = newFooBar - t.toString must be(app.typedActor.getActorRefFor(t).toString) + t.toString must be(system.typedActor.getActorRefFor(t).toString) mustStop(t) } @@ -200,7 +201,7 @@ class TypedActorSpec extends AkkaSpec with BeforeAndAfterEach with BeforeAndAfte "be able to call hashCode" in { val t = newFooBar - t.hashCode must be(app.typedActor.getActorRefFor(t).hashCode) + t.hashCode must be(system.typedActor.getActorRefFor(t).hashCode) mustStop(t) } @@ -295,7 +296,7 @@ class TypedActorSpec extends AkkaSpec with BeforeAndAfterEach with BeforeAndAfte } "be able to support implementation only typed actors" in { - val t = app.typedActorOf[Foo, Bar](Props()) + val t = system.typedActorOf[Foo, Bar](Props()) val f = t.futurePigdog(200) val f2 = t.futurePigdog(0) f2.isCompleted must be(false) @@ -305,7 +306,7 @@ class TypedActorSpec extends AkkaSpec with BeforeAndAfterEach with BeforeAndAfte } "be able to support implementation only typed actors with complex interfaces" in { - val t = app.typedActorOf[Stackable1 with Stackable2, StackedImpl]() + val t = system.typedActorOf[Stackable1 with Stackable2, StackedImpl]() t.stackable1 must be("foo") t.stackable2 must be("bar") mustStop(t) @@ -314,7 +315,7 @@ class TypedActorSpec extends AkkaSpec with BeforeAndAfterEach with BeforeAndAfte "be able to use work-stealing dispatcher" in { val props = Props( timeout = Timeout(6600), - dispatcher = app.dispatcherFactory.newBalancingDispatcher("pooled-dispatcher") + dispatcher = system.dispatcherFactory.newBalancingDispatcher("pooled-dispatcher") .withNewThreadPoolWithLinkedBlockingQueueWithUnboundedCapacity .setCorePoolSize(60) .setMaxPoolSize(60) @@ -332,7 +333,8 @@ class TypedActorSpec extends AkkaSpec with BeforeAndAfterEach with BeforeAndAfte "be able to serialize and deserialize invocations" in { import java.io._ - val m = TypedActor.MethodCall(app, classOf[Foo].getDeclaredMethod("pigdog"), Array[AnyRef]()) + val serialization = SerializationExtension(system).serialization + val m = TypedActor.MethodCall(serialization, classOf[Foo].getDeclaredMethod("pigdog"), Array[AnyRef]()) val baos = new ByteArrayOutputStream(8192 * 4) val out = new ObjectOutputStream(baos) @@ -341,7 +343,7 @@ class TypedActorSpec extends AkkaSpec with BeforeAndAfterEach with BeforeAndAfte val in = new ObjectInputStream(new ByteArrayInputStream(baos.toByteArray)) - Serialization.app.withValue(app) { + Serialization.system.withValue(system.asInstanceOf[ActorSystemImpl]) { val mNew = in.readObject().asInstanceOf[TypedActor.MethodCall] mNew.method must be(m.method) @@ -351,7 +353,8 @@ class TypedActorSpec extends AkkaSpec with BeforeAndAfterEach with BeforeAndAfte "be able to serialize and deserialize invocations' parameters" in { import java.io._ val someFoo: Foo = new Bar - val m = TypedActor.MethodCall(app, classOf[Foo].getDeclaredMethod("testMethodCallSerialization", Array[Class[_]](classOf[Foo], classOf[String], classOf[Int]): _*), Array[AnyRef](someFoo, null, 1.asInstanceOf[AnyRef])) + val serialization = SerializationExtension(system).serialization + val m = TypedActor.MethodCall(serialization, classOf[Foo].getDeclaredMethod("testMethodCallSerialization", Array[Class[_]](classOf[Foo], classOf[String], classOf[Int]): _*), Array[AnyRef](someFoo, null, 1.asInstanceOf[AnyRef])) val baos = new ByteArrayOutputStream(8192 * 4) val out = new ObjectOutputStream(baos) @@ -360,7 +363,7 @@ class TypedActorSpec extends AkkaSpec with BeforeAndAfterEach with BeforeAndAfte val in = new ObjectInputStream(new ByteArrayInputStream(baos.toByteArray)) - Serialization.app.withValue(app) { + Serialization.system.withValue(system.asInstanceOf[ActorSystemImpl]) { val mNew = in.readObject().asInstanceOf[TypedActor.MethodCall] mNew.method must be(m.method) diff --git a/akka-actor-tests/src/test/scala/akka/actor/dispatch/ActorModelSpec.scala b/akka-actor-tests/src/test/scala/akka/actor/dispatch/ActorModelSpec.scala index c31f054b4a..e21f965c51 100644 --- a/akka-actor-tests/src/test/scala/akka/actor/dispatch/ActorModelSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/actor/dispatch/ActorModelSpec.scala @@ -4,7 +4,7 @@ package akka.actor.dispatch import org.scalatest.Assertions._ -import akka.testkit.{ filterEvents, EventFilter, AkkaSpec } +import akka.testkit._ import akka.dispatch._ import java.util.concurrent.atomic.AtomicLong import java.util.concurrent.{ ConcurrentHashMap, CountDownLatch, TimeUnit } @@ -141,13 +141,13 @@ object ActorModelSpec { } def assertDispatcher(dispatcher: MessageDispatcherInterceptor)( - stops: Long = dispatcher.stops.get())(implicit app: ActorSystem) { - val deadline = System.currentTimeMillis + dispatcher.timeoutMs * 5 + stops: Long = dispatcher.stops.get())(implicit system: ActorSystem) { + val deadline = System.currentTimeMillis + dispatcher.shutdownTimeout.toMillis * 5 try { await(deadline)(stops == dispatcher.stops.get) } catch { case e ⇒ - app.eventStream.publish(Error(e, dispatcher, "actual: stops=" + dispatcher.stops.get + + system.eventStream.publish(Error(e, dispatcher.toString, "actual: stops=" + dispatcher.stops.get + " required: stops=" + stops)) throw e } @@ -173,7 +173,7 @@ object ActorModelSpec { unregisters: Long = 0, msgsReceived: Long = 0, msgsProcessed: Long = 0, - restarts: Long = 0)(implicit app: ActorSystem) { + restarts: Long = 0)(implicit system: ActorSystem) { assertRef(actorRef, dispatcher)( suspensions, resumes, @@ -191,7 +191,7 @@ object ActorModelSpec { unregisters: Long = statsFor(actorRef).unregisters.get(), msgsReceived: Long = statsFor(actorRef).msgsReceived.get(), msgsProcessed: Long = statsFor(actorRef).msgsProcessed.get(), - restarts: Long = statsFor(actorRef).restarts.get())(implicit app: ActorSystem) { + restarts: Long = statsFor(actorRef).restarts.get())(implicit system: ActorSystem) { val stats = statsFor(actorRef, Option(dispatcher).getOrElse(actorRef.asInstanceOf[LocalActorRef].underlying.dispatcher)) val deadline = System.currentTimeMillis + 1000 try { @@ -204,7 +204,7 @@ object ActorModelSpec { await(deadline)(stats.restarts.get() == restarts) } catch { case e ⇒ - app.eventStream.publish(Error(e, dispatcher, "actual: " + stats + ", required: InterceptorStats(susp=" + suspensions + + system.eventStream.publish(Error(e, dispatcher.toString, "actual: " + stats + ", required: InterceptorStats(susp=" + suspensions + ",res=" + resumes + ",reg=" + registers + ",unreg=" + unregisters + ",recv=" + msgsReceived + ",proc=" + msgsProcessed + ",restart=" + restarts)) throw e @@ -227,14 +227,11 @@ abstract class ActorModelSpec extends AkkaSpec { import ActorModelSpec._ - def newTestActor(dispatcher: MessageDispatcher) = app.actorOf(Props[DispatcherActor].withDispatcher(dispatcher)) + def newTestActor(dispatcher: MessageDispatcher) = system.actorOf(Props[DispatcherActor].withDispatcher(dispatcher)) protected def newInterceptedDispatcher: MessageDispatcherInterceptor protected def dispatcherType: String - // BalancingDispatcher of course does not work when another actor is in the pool, so overridden below - protected def wavesSupervisorDispatcher(dispatcher: MessageDispatcher) = dispatcher - "A " + dispatcherType must { "must dynamically handle its own life cycle" in { @@ -310,7 +307,7 @@ abstract class ActorModelSpec extends AkkaSpec { try { f } catch { - case e ⇒ app.eventStream.publish(Error(e, this, "error in spawned thread")) + case e ⇒ system.eventStream.publish(Error(e, "spawn", "error in spawned thread")) } } } @@ -347,9 +344,29 @@ abstract class ActorModelSpec extends AkkaSpec { val boss = actorOf(Props(context ⇒ { case "run" ⇒ for (_ ← 1 to num) (context.self startsWatching context.actorOf(props)) ! cachedMessage case Terminated(child) ⇒ stopLatch.countDown() - }).withDispatcher(wavesSupervisorDispatcher(dispatcher))) + }).withDispatcher(system.dispatcherFactory.newPinnedDispatcher("boss"))) boss ! "run" - assertCountDown(cachedMessage.latch, waitTime, "Counting down from " + num) + try { + assertCountDown(cachedMessage.latch, waitTime, "Counting down from " + num) + } catch { + case e ⇒ + dispatcher match { + case dispatcher: BalancingDispatcher ⇒ + val buddies = dispatcher.buddies + val mq = dispatcher.messageQueue + + System.err.println("Buddies left: ") + buddies.toArray foreach { + case cell: ActorCell ⇒ + System.err.println(" - " + cell.self.path + " " + cell.isTerminated + " " + cell.mailbox.status + " " + cell.mailbox.numberOfMessages + " " + SystemMessage.size(cell.mailbox.systemDrain())) + } + + System.err.println("Mailbox: " + mq.numberOfMessages + " " + mq.hasMessages + " ") + case _ ⇒ + } + + throw e + } assertCountDown(stopLatch, waitTime, "Expected all children to stop") boss.stop() } @@ -407,10 +424,10 @@ class DispatcherModelSpec extends ActorModelSpec { import ActorModelSpec._ def newInterceptedDispatcher = ThreadPoolConfigDispatcherBuilder(config ⇒ - new Dispatcher(app, "foo", app.AkkaConfig.DispatcherThroughput, - app.dispatcherFactory.ThroughputDeadlineTimeMillis, app.dispatcherFactory.MailboxType, - config, app.dispatcherFactory.DispatcherShutdownMillis) with MessageDispatcherInterceptor, - ThreadPoolConfig(app)).build.asInstanceOf[MessageDispatcherInterceptor] + new Dispatcher(system.dispatcherFactory.prerequisites, "foo", system.settings.DispatcherThroughput, + system.settings.DispatcherThroughputDeadlineTime, system.dispatcherFactory.MailboxType, + config, system.settings.DispatcherDefaultShutdown) with MessageDispatcherInterceptor, + ThreadPoolConfig()).build.asInstanceOf[MessageDispatcherInterceptor] def dispatcherType = "Dispatcher" @@ -431,7 +448,7 @@ class DispatcherModelSpec extends ActorModelSpec { a.stop b.stop - while (!a.isShutdown && !b.isShutdown) {} //Busy wait for termination + while (!a.isTerminated && !b.isTerminated) {} //Busy wait for termination assertRefDefaultZero(a)(registers = 1, unregisters = 1, msgsReceived = 1, msgsProcessed = 1) assertRefDefaultZero(b)(registers = 1, unregisters = 1, msgsReceived = 1, msgsProcessed = 1) @@ -444,15 +461,13 @@ class BalancingDispatcherModelSpec extends ActorModelSpec { import ActorModelSpec._ def newInterceptedDispatcher = ThreadPoolConfigDispatcherBuilder(config ⇒ - new BalancingDispatcher(app, "foo", 1, // TODO check why 1 here? (came from old test) - app.dispatcherFactory.ThroughputDeadlineTimeMillis, app.dispatcherFactory.MailboxType, - config, app.dispatcherFactory.DispatcherShutdownMillis) with MessageDispatcherInterceptor, - ThreadPoolConfig(app)).build.asInstanceOf[MessageDispatcherInterceptor] + new BalancingDispatcher(system.dispatcherFactory.prerequisites, "foo", 1, // TODO check why 1 here? (came from old test) + system.settings.DispatcherThroughputDeadlineTime, system.dispatcherFactory.MailboxType, + config, system.settings.DispatcherDefaultShutdown) with MessageDispatcherInterceptor, + ThreadPoolConfig()).build.asInstanceOf[MessageDispatcherInterceptor] def dispatcherType = "Balancing Dispatcher" - override def wavesSupervisorDispatcher(dispatcher: MessageDispatcher) = app.dispatcher - "A " + dispatcherType must { "process messages in parallel" in { implicit val dispatcher = newInterceptedDispatcher @@ -470,7 +485,7 @@ class BalancingDispatcherModelSpec extends ActorModelSpec { a.stop b.stop - while (!a.isShutdown && !b.isShutdown) {} //Busy wait for termination + while (!a.isTerminated && !b.isTerminated) {} //Busy wait for termination assertRefDefaultZero(a)(registers = 1, unregisters = 1, msgsReceived = 1, msgsProcessed = 1) assertRefDefaultZero(b)(registers = 1, unregisters = 1, msgsReceived = 1, msgsProcessed = 1) diff --git a/akka-actor-tests/src/test/scala/akka/actor/dispatch/BalancingDispatcherSpec.scala b/akka-actor-tests/src/test/scala/akka/actor/dispatch/BalancingDispatcherSpec.scala index c30db1d5bc..b6ef8468f7 100644 --- a/akka-actor-tests/src/test/scala/akka/actor/dispatch/BalancingDispatcherSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/actor/dispatch/BalancingDispatcherSpec.scala @@ -8,7 +8,7 @@ import akka.testkit.AkkaSpec @org.junit.runner.RunWith(classOf[org.scalatest.junit.JUnitRunner]) class BalancingDispatcherSpec extends AkkaSpec { - def newWorkStealer() = app.dispatcherFactory.newBalancingDispatcher("pooled-dispatcher", 1).build + def newWorkStealer() = system.dispatcherFactory.newBalancingDispatcher("pooled-dispatcher", 1).build val delayableActorDispatcher, sharedActorDispatcher, parentActorDispatcher = newWorkStealer() diff --git a/akka-actor-tests/src/test/scala/akka/actor/dispatch/DispatcherActorSpec.scala b/akka-actor-tests/src/test/scala/akka/actor/dispatch/DispatcherActorSpec.scala index 2ce2171438..0084cc0ae5 100644 --- a/akka-actor-tests/src/test/scala/akka/actor/dispatch/DispatcherActorSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/actor/dispatch/DispatcherActorSpec.scala @@ -5,6 +5,8 @@ import java.util.concurrent.atomic.{ AtomicBoolean, AtomicInteger } import akka.testkit.{ filterEvents, EventFilter, AkkaSpec } import akka.dispatch.{ PinnedDispatcher, Dispatchers, Dispatcher } import akka.actor.{ Props, Actor } +import akka.util.Duration +import akka.util.duration._ object DispatcherActorSpec { class TestActor extends Actor { @@ -33,22 +35,22 @@ class DispatcherActorSpec extends AkkaSpec { "A Dispatcher and an Actor" must { "support tell" in { - val actor = actorOf(Props[OneWayTestActor].withDispatcher(app.dispatcherFactory.newDispatcher("test").build)) + val actor = actorOf(Props[OneWayTestActor].withDispatcher(system.dispatcherFactory.newDispatcher("test").build)) val result = actor ! "OneWay" assert(OneWayTestActor.oneWay.await(1, TimeUnit.SECONDS)) actor.stop() } "support ask/reply" in { - val actor = actorOf(Props[TestActor].withDispatcher(app.dispatcherFactory.newDispatcher("test").build)) + val actor = actorOf(Props[TestActor].withDispatcher(system.dispatcherFactory.newDispatcher("test").build)) val result = (actor ? "Hello").as[String] assert("World" === result.get) actor.stop() } "respect the throughput setting" in { - val throughputDispatcher = app.dispatcherFactory. - newDispatcher("THROUGHPUT", 101, 0, app.dispatcherFactory.MailboxType). + val throughputDispatcher = system.dispatcherFactory. + newDispatcher("THROUGHPUT", 101, Duration.Zero, system.dispatcherFactory.MailboxType). setCorePoolSize(1). build @@ -75,9 +77,9 @@ class DispatcherActorSpec extends AkkaSpec { } "respect throughput deadline" in { - val deadlineMs = 100 - val throughputDispatcher = app.dispatcherFactory. - newDispatcher("THROUGHPUT", 2, deadlineMs, app.dispatcherFactory.MailboxType). + val deadline = 100 millis + val throughputDispatcher = system.dispatcherFactory. + newDispatcher("THROUGHPUT", 2, deadline, system.dispatcherFactory.MailboxType). setCorePoolSize(1). build val works = new AtomicBoolean(true) @@ -100,7 +102,7 @@ class DispatcherActorSpec extends AkkaSpec { slowOne ! "ping" fastOne ! "ping" assert(ready.await(2, TimeUnit.SECONDS) === true) - Thread.sleep(deadlineMs + 10) // wait just a bit more than the deadline + Thread.sleep(deadline.toMillis + 10) // wait just a bit more than the deadline start.countDown() assert(latch.await(2, TimeUnit.SECONDS) === true) } diff --git a/akka-actor-tests/src/test/scala/akka/actor/dispatch/DispatchersSpec.scala b/akka-actor-tests/src/test/scala/akka/actor/dispatch/DispatchersSpec.scala index 795c0b9335..53fa4ea5bf 100644 --- a/akka-actor-tests/src/test/scala/akka/actor/dispatch/DispatchersSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/actor/dispatch/DispatchersSpec.scala @@ -7,12 +7,15 @@ import java.util.concurrent.{ CountDownLatch, TimeUnit } import scala.reflect.{ Manifest } import akka.dispatch._ import akka.testkit.AkkaSpec -import akka.config.Configuration +import scala.collection.JavaConverters._ +import com.typesafe.config.ConfigFactory +import com.typesafe.config.ConfigParseOptions @org.junit.runner.RunWith(classOf[org.scalatest.junit.JUnitRunner]) class DispatchersSpec extends AkkaSpec { - import app.dispatcherFactory._ + val df = system.dispatcherFactory + import df._ val tipe = "type" val keepalivems = "keep-alive-time" @@ -30,19 +33,45 @@ class DispatchersSpec extends AkkaSpec { def validTypes = typesAndValidators.keys.toList + val defaultDispatcherConfig = settings.config.getConfig("akka.actor.default-dispatcher") + + val dispatcherConf = ConfigFactory.parseString(""" + myapp { + mydispatcher { + throughput = 17 + } + } + """, ConfigParseOptions.defaults) + lazy val allDispatchers: Map[String, Option[MessageDispatcher]] = { - validTypes.map(t ⇒ (t, from(Configuration.fromMap(Map(tipe -> t))))).toMap + validTypes.map(t ⇒ (t, from(ConfigFactory.parseMap(Map(tipe -> t).asJava).withFallback(defaultDispatcherConfig)))).toMap } "Dispatchers" must { - "yield None if type is missing" in { - assert(from(Configuration.fromMap(Map())) === None) + "use default dispatcher if type is missing" in { + val dispatcher = from(ConfigFactory.empty.withFallback(defaultDispatcherConfig)) + dispatcher.map(_.name) must be(Some("DefaultDispatcher")) + } + + "use defined properties" in { + val dispatcher = from(ConfigFactory.parseMap(Map("throughput" -> 17).asJava).withFallback(defaultDispatcherConfig)) + dispatcher.map(_.throughput) must be(Some(17)) + } + + "use defined properties when fromConfig" in { + val dispatcher = fromConfig("myapp.mydispatcher", cfg = dispatcherConf) + dispatcher.throughput must be(17) + } + + "use specific name when fromConfig" in { + val dispatcher = fromConfig("myapp.mydispatcher", cfg = dispatcherConf) + dispatcher.name must be("mydispatcher") } "throw IllegalArgumentException if type does not exist" in { intercept[IllegalArgumentException] { - from(Configuration.fromMap(Map(tipe -> "typedoesntexist"))) + from(ConfigFactory.parseMap(Map(tipe -> "typedoesntexist").asJava).withFallback(defaultDispatcherConfig)) } } @@ -53,9 +82,6 @@ class DispatchersSpec extends AkkaSpec { assert(typesAndValidators.forall(tuple ⇒ tuple._2(allDispatchers(tuple._1).get))) } - "default to default while loading the default" in { - assert(from(Configuration.fromMap(Map())).getOrElse(defaultGlobalDispatcher) == defaultGlobalDispatcher) - } } } diff --git a/akka-actor-tests/src/test/scala/akka/actor/dispatch/PinnedActorSpec.scala b/akka-actor-tests/src/test/scala/akka/actor/dispatch/PinnedActorSpec.scala index d2b410a7bf..d9feed3209 100644 --- a/akka-actor-tests/src/test/scala/akka/actor/dispatch/PinnedActorSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/actor/dispatch/PinnedActorSpec.scala @@ -27,14 +27,14 @@ class PinnedActorSpec extends AkkaSpec with BeforeAndAfterEach { "support tell" in { var oneWay = new CountDownLatch(1) - val actor = actorOf(Props(self ⇒ { case "OneWay" ⇒ oneWay.countDown() }).withDispatcher(app.dispatcherFactory.newPinnedDispatcher("test"))) + val actor = actorOf(Props(self ⇒ { case "OneWay" ⇒ oneWay.countDown() }).withDispatcher(system.dispatcherFactory.newPinnedDispatcher("test"))) val result = actor ! "OneWay" assert(oneWay.await(1, TimeUnit.SECONDS)) actor.stop() } "support ask/reply" in { - val actor = actorOf(Props[TestActor].withDispatcher(app.dispatcherFactory.newPinnedDispatcher("test"))) + val actor = actorOf(Props[TestActor].withDispatcher(system.dispatcherFactory.newPinnedDispatcher("test"))) val result = (actor ? "Hello").as[String] assert("World" === result.get) actor.stop() diff --git a/akka-actor-tests/src/test/scala/akka/config/ConfigSpec.scala b/akka-actor-tests/src/test/scala/akka/config/ConfigSpec.scala index 39e3374700..b594ea0f19 100644 --- a/akka-actor-tests/src/test/scala/akka/config/ConfigSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/config/ConfigSpec.scala @@ -5,40 +5,44 @@ package akka.config import akka.testkit.AkkaSpec -import akka.actor.ActorSystem +import com.typesafe.config.ConfigFactory +import com.typesafe.config.ConfigParseOptions +import scala.collection.JavaConverters._ +import akka.util.duration._ +import akka.util.Duration @org.junit.runner.RunWith(classOf[org.scalatest.junit.JUnitRunner]) -class ConfigSpec extends AkkaSpec(ActorSystem("ConfigSpec", Configuration.fromFile("config/akka-reference.conf"))) { +class ConfigSpec extends AkkaSpec(ConfigFactory.parseResource(classOf[ConfigSpec], "/akka-actor-reference.conf", ConfigParseOptions.defaults)) { - "The default configuration file (i.e. akka-reference.conf)" must { + "The default configuration file (i.e. akka-actor-reference.conf)" must { "contain all configuration properties for akka-actor that are used in code with their correct defaults" in { - val config = app.config + val settings = system.settings + val config = settings.config import config._ - getList("akka.boot") must equal(Nil) - getString("akka.time-unit") must equal(Some("seconds")) - getString("akka.version") must equal(Some("2.0-SNAPSHOT")) + getList("akka.boot").asScala.toSeq must equal(Nil) + getString("akka.version") must equal("2.0-SNAPSHOT") + settings.ConfigVersion must equal("2.0-SNAPSHOT") - getString("akka.actor.default-dispatcher.type") must equal(Some("Dispatcher")) - getInt("akka.actor.default-dispatcher.keep-alive-time") must equal(Some(60)) - getDouble("akka.actor.default-dispatcher.core-pool-size-factor") must equal(Some(8.0)) - getDouble("akka.actor.default-dispatcher.max-pool-size-factor") must equal(Some(8.0)) - getInt("akka.actor.default-dispatcher.task-queue-size") must equal(Some(-1)) - getString("akka.actor.default-dispatcher.task-queue-type") must equal(Some("linked")) - getBool("akka.actor.default-dispatcher.allow-core-timeout") must equal(Some(true)) - getInt("akka.actor.default-dispatcher.mailbox-capacity") must equal(Some(-1)) - getInt("akka.actor.default-dispatcher.mailbox-push-timeout-time") must equal(Some(10)) - getLong("akka.actor.dispatcher-shutdown-timeout") must equal(Some(1)) - getInt("akka.actor.default-dispatcher.throughput") must equal(Some(5)) - getInt("akka.actor.default-dispatcher.throughput-deadline-time") must equal(Some(-1)) - getBool("akka.actor.serialize-messages") must equal(Some(false)) - getInt("akka.actor.timeout") must equal(Some(5)) - getInt("akka.actor.throughput") must equal(Some(5)) - getInt("akka.actor.throughput-deadline-time") must equal(Some(-1)) + getString("akka.actor.default-dispatcher.type") must equal("Dispatcher") + getMilliseconds("akka.actor.default-dispatcher.keep-alive-time") must equal(60 * 1000) + getDouble("akka.actor.default-dispatcher.core-pool-size-factor") must equal(8.0) + getDouble("akka.actor.default-dispatcher.max-pool-size-factor") must equal(8.0) + getInt("akka.actor.default-dispatcher.task-queue-size") must equal(-1) + getString("akka.actor.default-dispatcher.task-queue-type") must equal("linked") + getBoolean("akka.actor.default-dispatcher.allow-core-timeout") must equal(true) + getInt("akka.actor.default-dispatcher.mailbox-capacity") must equal(-1) + getMilliseconds("akka.actor.default-dispatcher.mailbox-push-timeout-time") must equal(10 * 1000) + getMilliseconds("akka.actor.dispatcher-shutdown-timeout") must equal(1 * 1000) + settings.DispatcherDefaultShutdown must equal(1 second) + getInt("akka.actor.default-dispatcher.throughput") must equal(5) + settings.DispatcherThroughput must equal(5) + getMilliseconds("akka.actor.default-dispatcher.throughput-deadline-time") must equal(0) + settings.DispatcherThroughputDeadlineTime must equal(Duration.Zero) + getBoolean("akka.actor.serialize-messages") must equal(false) + settings.SerializeAllMessages must equal(false) - getString("akka.remote.layer") must equal(Some("akka.cluster.netty.NettyRemoteSupport")) - getInt("akka.remote.server.port") must equal(Some(2552)) } } } diff --git a/akka-actor-tests/src/test/scala/akka/dataflow/Future2Actor.scala b/akka-actor-tests/src/test/scala/akka/dataflow/Future2Actor.scala index 94e59ebbf1..35924a2b14 100644 --- a/akka-actor-tests/src/test/scala/akka/dataflow/Future2Actor.scala +++ b/akka-actor-tests/src/test/scala/akka/dataflow/Future2Actor.scala @@ -19,7 +19,7 @@ class Future2ActorSpec extends AkkaSpec { } "support reply via sender" in { - val actor = app.actorOf(Props(new Actor { + val actor = system.actorOf(Props(new Actor { def receive = { case "do" ⇒ Future(31) pipeTo context.sender case "ex" ⇒ Future(throw new AssertionError) pipeTo context.sender diff --git a/akka-actor-tests/src/test/scala/akka/dispatch/MailboxConfigSpec.scala b/akka-actor-tests/src/test/scala/akka/dispatch/MailboxConfigSpec.scala index ea9cda5d66..7af8f057d8 100644 --- a/akka-actor-tests/src/test/scala/akka/dispatch/MailboxConfigSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/dispatch/MailboxConfigSpec.scala @@ -1,14 +1,8 @@ package akka.dispatch -import org.scalatest.WordSpec -import org.scalatest.matchers.MustMatchers import org.scalatest.{ BeforeAndAfterAll, BeforeAndAfterEach } -import org.scalatest.junit.JUnitRunner -import org.junit.runner.RunWith -import java.util.concurrent.{ TimeUnit, CountDownLatch, BlockingQueue } -import java.util.{ Queue } +import java.util.concurrent.{ TimeUnit, BlockingQueue } import akka.util._ -import akka.util.Duration._ -import akka.actor.{ LocalActorRef, Actor } +import akka.util.duration._ import akka.testkit.AkkaSpec @org.junit.runner.RunWith(classOf[org.scalatest.junit.JUnitRunner]) @@ -23,7 +17,7 @@ abstract class MailboxSpec extends AkkaSpec with BeforeAndAfterAll with BeforeAn val q = factory(config) ensureInitialMailboxState(config, q) - implicit val within = Duration(1, TimeUnit.SECONDS) + implicit val within = 1 second val f = spawn { q.dequeue @@ -33,7 +27,7 @@ abstract class MailboxSpec extends AkkaSpec with BeforeAndAfterAll with BeforeAn } "create a bounded mailbox with 10 capacity and with push timeout" in { - val config = BoundedMailbox(10, Duration(10, TimeUnit.MILLISECONDS)) + val config = BoundedMailbox(10, 10 milliseconds) val q = factory(config) ensureInitialMailboxState(config, q) @@ -58,11 +52,11 @@ abstract class MailboxSpec extends AkkaSpec with BeforeAndAfterAll with BeforeAn } "dequeue what was enqueued properly for bounded mailboxes" in { - testEnqueueDequeue(BoundedMailbox(10000, Duration(-1, TimeUnit.MILLISECONDS))) + testEnqueueDequeue(BoundedMailbox(10000, -1 millisecond)) } "dequeue what was enqueued properly for bounded mailboxes with pushTimeout" in { - testEnqueueDequeue(BoundedMailbox(10000, Duration(100, TimeUnit.MILLISECONDS))) + testEnqueueDequeue(BoundedMailbox(10000, 100 milliseconds)) } } @@ -80,7 +74,7 @@ abstract class MailboxSpec extends AkkaSpec with BeforeAndAfterAll with BeforeAn result } - def createMessageInvocation(msg: Any): Envelope = Envelope(msg, app.deadLetters) + def createMessageInvocation(msg: Any): Envelope = Envelope(msg, system.deadLetters) def ensureInitialMailboxState(config: MailboxType, q: Mailbox) { q must not be null @@ -97,7 +91,7 @@ abstract class MailboxSpec extends AkkaSpec with BeforeAndAfterAll with BeforeAn } def testEnqueueDequeue(config: MailboxType) { - implicit val within = Duration(10, TimeUnit.SECONDS) + implicit val within = 10 seconds val q = factory(config) ensureInitialMailboxState(config, q) diff --git a/akka-actor-tests/src/test/scala/akka/dispatch/PriorityDispatcherSpec.scala b/akka-actor-tests/src/test/scala/akka/dispatch/PriorityDispatcherSpec.scala index ebc42c92d9..282835e6fc 100644 --- a/akka-actor-tests/src/test/scala/akka/dispatch/PriorityDispatcherSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/dispatch/PriorityDispatcherSpec.scala @@ -2,6 +2,7 @@ package akka.dispatch import akka.actor.{ Props, LocalActorRef, Actor } import akka.testkit.AkkaSpec +import akka.util.Duration @org.junit.runner.RunWith(classOf[org.scalatest.junit.JUnitRunner]) class PriorityDispatcherSpec extends AkkaSpec { @@ -18,12 +19,12 @@ class PriorityDispatcherSpec extends AkkaSpec { testOrdering(BoundedPriorityMailbox(PriorityGenerator({ case i: Int ⇒ i //Reverse order case 'Result ⇒ Int.MaxValue - }: Any ⇒ Int), 1000, app.AkkaConfig.MailboxPushTimeout)) + }: Any ⇒ Int), 1000, system.settings.MailboxPushTimeout)) } } def testOrdering(mboxType: MailboxType) { - val dispatcher = app.dispatcherFactory.newDispatcher("Test", 1, -1, mboxType).build + val dispatcher = system.dispatcherFactory.newDispatcher("Test", 1, Duration.Zero, mboxType).build val actor = actorOf(Props(new Actor { var acc: List[Int] = Nil diff --git a/akka-actor-tests/src/test/scala/akka/event/EventStreamSpec.scala b/akka-actor-tests/src/test/scala/akka/event/EventStreamSpec.scala index 22ecb6ac73..14158e7454 100644 --- a/akka-actor-tests/src/test/scala/akka/event/EventStreamSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/event/EventStreamSpec.scala @@ -4,17 +4,29 @@ package akka.event import akka.testkit.AkkaSpec -import akka.config.Configuration import akka.util.duration._ -import akka.actor.{ Actor, ActorRef } +import akka.actor.{ Actor, ActorRef, ActorSystemImpl } +import com.typesafe.config.ConfigFactory +import com.typesafe.config.ConfigParseOptions +import scala.collection.JavaConverters._ +import akka.actor.ActorSystem object EventStreamSpec { + + val config = ConfigFactory.parseString(""" + akka { + stdout-loglevel = WARNING + loglevel = INFO + event-handlers = ["akka.event.EventStreamSpec$MyLog", "%s"] + } + """.format(Logging.StandardOutLoggerName), ConfigParseOptions.defaults) + case class M(i: Int) case class SetTarget(ref: ActorRef) class MyLog extends Actor { - var dst: ActorRef = app.deadLetters + var dst: ActorRef = system.deadLetters def receive = { case Logging.InitializeLogger(bus) ⇒ bus.subscribe(context.self, classOf[SetTarget]); sender ! Logging.LoggerInitialized case SetTarget(ref) ⇒ dst = ref; dst ! "OK" @@ -29,18 +41,18 @@ object EventStreamSpec { class C extends B1 } -class EventStreamSpec extends AkkaSpec(Configuration( - "akka.stdout-loglevel" -> "WARNING", - "akka.loglevel" -> "INFO", - "akka.event-handlers" -> Seq("akka.event.EventStreamSpec$MyLog", Logging.StandardOutLoggerName))) { +@org.junit.runner.RunWith(classOf[org.scalatest.junit.JUnitRunner]) +class EventStreamSpec extends AkkaSpec(EventStreamSpec.config) { import EventStreamSpec._ + val impl = system.asInstanceOf[ActorSystemImpl] + "An EventStream" must { "manage subscriptions" in { val bus = new EventStream(true) - bus.start(app) + bus.start(impl) bus.subscribe(testActor, classOf[M]) bus.publish(M(42)) within(1 second) { @@ -53,8 +65,8 @@ class EventStreamSpec extends AkkaSpec(Configuration( "manage log levels" in { val bus = new EventStream(false) - bus.start(app) - bus.startDefaultLoggers(app, app.AkkaConfig) + bus.start(impl) + bus.startDefaultLoggers(impl) bus.publish(SetTarget(testActor)) expectMsg("OK") within(2 seconds) { @@ -75,7 +87,7 @@ class EventStreamSpec extends AkkaSpec(Configuration( val b2 = new B2 val c = new C val bus = new EventStream(false) - bus.start(app) + bus.start(impl) within(2 seconds) { bus.subscribe(testActor, classOf[B2]) === true bus.publish(c) @@ -100,7 +112,7 @@ class EventStreamSpec extends AkkaSpec(Configuration( private def verifyLevel(bus: LoggingBus, level: Logging.LogLevel) { import Logging._ - val allmsg = Seq(Debug(this, "debug"), Info(this, "info"), Warning(this, "warning"), Error(this, "error")) + val allmsg = Seq(Debug("", "debug"), Info("", "info"), Warning("", "warning"), Error("", "error")) val msg = allmsg filter (_.level <= level) allmsg foreach bus.publish msg foreach (x ⇒ expectMsg(x)) diff --git a/akka-actor-tests/src/test/scala/akka/performance/microbench/TellLatencyPerformanceSpec.scala b/akka-actor-tests/src/test/scala/akka/performance/microbench/TellLatencyPerformanceSpec.scala index 14acef4373..7eec58b70c 100644 --- a/akka-actor-tests/src/test/scala/akka/performance/microbench/TellLatencyPerformanceSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/performance/microbench/TellLatencyPerformanceSpec.scala @@ -17,7 +17,7 @@ import org.apache.commons.math.stat.descriptive.SynchronizedDescriptiveStatistic class TellLatencyPerformanceSpec extends PerformanceSpec { import TellLatencyPerformanceSpec._ - val clientDispatcher = app.dispatcherFactory.newDispatcher("client-dispatcher") + val clientDispatcher = system.dispatcherFactory.newDispatcher("client-dispatcher") .withNewThreadPoolWithLinkedBlockingQueueWithUnboundedCapacity .setCorePoolSize(8) .build @@ -62,13 +62,13 @@ class TellLatencyPerformanceSpec extends PerformanceSpec { val latch = new CountDownLatch(numberOfClients) val repeatsPerClient = repeat / numberOfClients val clients = (for (i ← 0 until numberOfClients) yield { - val destination = app.actorOf[Destination] - val w4 = app.actorOf(new Waypoint(destination)) - val w3 = app.actorOf(new Waypoint(w4)) - val w2 = app.actorOf(new Waypoint(w3)) - val w1 = app.actorOf(new Waypoint(w2)) + val destination = system.actorOf[Destination] + val w4 = system.actorOf(new Waypoint(destination)) + val w3 = system.actorOf(new Waypoint(w4)) + val w2 = system.actorOf(new Waypoint(w3)) + val w1 = system.actorOf(new Waypoint(w2)) Props(new Client(w1, latch, repeatsPerClient, clientDelayMicros, stat)).withDispatcher(clientDispatcher) - }).toList.map(app.actorOf(_)) + }).toList.map(system.actorOf(_)) val start = System.nanoTime clients.foreach(_ ! Run) diff --git a/akka-actor-tests/src/test/scala/akka/performance/microbench/TellThroughputPerformanceSpec.scala b/akka-actor-tests/src/test/scala/akka/performance/microbench/TellThroughputPerformanceSpec.scala index 2825684bad..a49e837ac4 100644 --- a/akka-actor-tests/src/test/scala/akka/performance/microbench/TellThroughputPerformanceSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/performance/microbench/TellThroughputPerformanceSpec.scala @@ -1,19 +1,20 @@ package akka.performance.microbench import akka.performance.workbench.PerformanceSpec -import org.apache.commons.math.stat.descriptive.DescriptiveStatistics import akka.actor._ import java.util.concurrent.{ ThreadPoolExecutor, CountDownLatch, TimeUnit } import akka.dispatch._ -import java.util.concurrent.ThreadPoolExecutor.AbortPolicy +import akka.util.Duration +import akka.util.duration._ // -server -Xms512M -Xmx1024M -XX:+UseParallelGC -Dbenchmark=true -Dbenchmark.repeatFactor=500 @org.junit.runner.RunWith(classOf[org.scalatest.junit.JUnitRunner]) class TellThroughputPerformanceSpec extends PerformanceSpec { import TellThroughputPerformanceSpec._ - def createDispatcher(name: String) = ThreadPoolConfigDispatcherBuilder(config ⇒ new Dispatcher(app, name, 5, - 0, UnboundedMailbox(), config, 60000), ThreadPoolConfig(app)) + def createDispatcher(name: String) = ThreadPoolConfigDispatcherBuilder(config ⇒ + new Dispatcher(system.dispatcherFactory.prerequisites, name, 5, + Duration.Zero, UnboundedMailbox(), config, 60 seconds), ThreadPoolConfig()) .withNewThreadPoolWithLinkedBlockingQueueWithUnboundedCapacity .setCorePoolSize(maxClients) .build @@ -71,9 +72,9 @@ class TellThroughputPerformanceSpec extends PerformanceSpec { val latch = new CountDownLatch(numberOfClients) val repeatsPerClient = repeat / numberOfClients val destinations = for (i ← 0 until numberOfClients) - yield app.actorOf(Props(new Destination).withDispatcher(destinationDispatcher)) + yield system.actorOf(Props(new Destination).withDispatcher(destinationDispatcher)) val clients = for (dest ← destinations) - yield app.actorOf(Props(new Client(dest, latch, repeatsPerClient)).withDispatcher(clientDispatcher)) + yield system.actorOf(Props(new Client(dest, latch, repeatsPerClient)).withDispatcher(clientDispatcher)) val start = System.nanoTime clients.foreach(_ ! Run) @@ -150,4 +151,4 @@ object TellThroughputPerformanceSpec { } -} \ No newline at end of file +} diff --git a/akka-actor-tests/src/test/scala/akka/performance/trading/system/TradingLatencyPerformanceSpec.scala b/akka-actor-tests/src/test/scala/akka/performance/trading/system/TradingLatencyPerformanceSpec.scala index 95249cfe29..06a4bd9fa7 100644 --- a/akka-actor-tests/src/test/scala/akka/performance/trading/system/TradingLatencyPerformanceSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/performance/trading/system/TradingLatencyPerformanceSpec.scala @@ -21,7 +21,7 @@ import akka.performance.trading.domain.Orderbook @org.junit.runner.RunWith(classOf[org.scalatest.junit.JUnitRunner]) class TradingLatencyPerformanceSpec extends PerformanceSpec { - val clientDispatcher = app.dispatcherFactory.newDispatcher("client-dispatcher") + val clientDispatcher = system.dispatcherFactory.newDispatcher("client-dispatcher") .withNewThreadPoolWithLinkedBlockingQueueWithUnboundedCapacity .setCorePoolSize(maxClients) .build @@ -38,7 +38,7 @@ class TradingLatencyPerformanceSpec extends PerformanceSpec { override def beforeEach() { super.beforeEach() stat = new SynchronizedDescriptiveStatistics - tradingSystem = new AkkaTradingSystem(app) + tradingSystem = new AkkaTradingSystem(system) tradingSystem.start() TotalTradeCounter.reset() stat = new SynchronizedDescriptiveStatistics @@ -99,7 +99,7 @@ class TradingLatencyPerformanceSpec extends PerformanceSpec { val clients = (for (i ← 0 until numberOfClients) yield { val receiver = receivers(i % receivers.size) val props = Props(new Client(receiver, orders, latch, ordersPerClient, clientDelayMicros)).withDispatcher(clientDispatcher) - app.actorOf(props) + system.actorOf(props) }) clients.foreach(_ ! "run") diff --git a/akka-actor-tests/src/test/scala/akka/performance/trading/system/TradingSystem.scala b/akka-actor-tests/src/test/scala/akka/performance/trading/system/TradingSystem.scala index 88afa18d3d..21096b3c07 100644 --- a/akka-actor-tests/src/test/scala/akka/performance/trading/system/TradingSystem.scala +++ b/akka-actor-tests/src/test/scala/akka/performance/trading/system/TradingSystem.scala @@ -34,7 +34,7 @@ trait TradingSystem { case class MatchingEngineInfo(primary: ME, standby: Option[ME], orderbooks: List[Orderbook]) } -class AkkaTradingSystem(val app: ActorSystem) extends TradingSystem { +class AkkaTradingSystem(val system: ActorSystem) extends TradingSystem { type ME = ActorRef type OR = ActorRef @@ -70,8 +70,8 @@ class AkkaTradingSystem(val app: ActorSystem) extends TradingSystem { def createMatchingEngine(meId: String, orderbooks: List[Orderbook]) = meDispatcher match { - case Some(d) ⇒ app.actorOf(Props(new AkkaMatchingEngine(meId, orderbooks)).withDispatcher(d)) - case _ ⇒ app.actorOf(Props(new AkkaMatchingEngine(meId, orderbooks))) + case Some(d) ⇒ system.actorOf(Props(new AkkaMatchingEngine(meId, orderbooks)).withDispatcher(d)) + case _ ⇒ system.actorOf(Props(new AkkaMatchingEngine(meId, orderbooks))) } override def createOrderReceivers: List[ActorRef] = { @@ -91,8 +91,8 @@ class AkkaTradingSystem(val app: ActorSystem) extends TradingSystem { } def createOrderReceiver() = orDispatcher match { - case Some(d) ⇒ app.actorOf(Props(new AkkaOrderReceiver()).withDispatcher(d)) - case _ ⇒ app.actorOf(Props(new AkkaOrderReceiver())) + case Some(d) ⇒ system.actorOf(Props(new AkkaOrderReceiver()).withDispatcher(d)) + case _ ⇒ system.actorOf(Props(new AkkaOrderReceiver())) } override def start() { diff --git a/akka-actor-tests/src/test/scala/akka/performance/trading/system/TradingThroughputPerformanceSpec.scala b/akka-actor-tests/src/test/scala/akka/performance/trading/system/TradingThroughputPerformanceSpec.scala index 4e5e108e0a..7ec84137a7 100644 --- a/akka-actor-tests/src/test/scala/akka/performance/trading/system/TradingThroughputPerformanceSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/performance/trading/system/TradingThroughputPerformanceSpec.scala @@ -21,7 +21,7 @@ import akka.performance.trading.domain.Orderbook @org.junit.runner.RunWith(classOf[org.scalatest.junit.JUnitRunner]) class TradingThroughputPerformanceSpec extends PerformanceSpec { - val clientDispatcher = app.dispatcherFactory.newDispatcher("client-dispatcher") + val clientDispatcher = system.dispatcherFactory.newDispatcher("client-dispatcher") .withNewThreadPoolWithLinkedBlockingQueueWithUnboundedCapacity .setCorePoolSize(maxClients) .build @@ -30,7 +30,7 @@ class TradingThroughputPerformanceSpec extends PerformanceSpec { override def beforeEach() { super.beforeEach() - tradingSystem = new AkkaTradingSystem(app) + tradingSystem = new AkkaTradingSystem(system) tradingSystem.start() TotalTradeCounter.reset() } @@ -92,7 +92,7 @@ class TradingThroughputPerformanceSpec extends PerformanceSpec { val clients = (for (i ← 0 until numberOfClients) yield { val receiver = receivers(i % receivers.size) val props = Props(new Client(receiver, orders, latch, ordersPerClient)).withDispatcher(clientDispatcher) - app.actorOf(props) + system.actorOf(props) }) clients.foreach(_ ! "run") diff --git a/akka-actor-tests/src/test/scala/akka/performance/workbench/PerformanceSpec.scala b/akka-actor-tests/src/test/scala/akka/performance/workbench/PerformanceSpec.scala index 93d7282b14..b17bb5913c 100644 --- a/akka-actor-tests/src/test/scala/akka/performance/workbench/PerformanceSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/performance/workbench/PerformanceSpec.scala @@ -11,8 +11,6 @@ import akka.actor.ActorSystem trait PerformanceSpec extends AkkaSpec with BeforeAndAfterEach { - def app: ActorSystem - def isBenchmark() = System.getProperty("benchmark") == "true" def minClients() = System.getProperty("benchmark.minClients", "1").toInt; @@ -29,7 +27,7 @@ trait PerformanceSpec extends AkkaSpec with BeforeAndAfterEach { } val resultRepository = BenchResultRepository() - lazy val report = new Report(app, resultRepository, compareResultWith) + lazy val report = new Report(system, resultRepository, compareResultWith) /** * To compare two tests with each other you can override this method, in diff --git a/akka-actor-tests/src/test/scala/akka/performance/workbench/Report.scala b/akka-actor-tests/src/test/scala/akka/performance/workbench/Report.scala index b42e1b87c6..764afefe3c 100644 --- a/akka-actor-tests/src/test/scala/akka/performance/workbench/Report.scala +++ b/akka-actor-tests/src/test/scala/akka/performance/workbench/Report.scala @@ -3,19 +3,17 @@ package akka.performance.workbench import java.lang.management.ManagementFactory import java.text.SimpleDateFormat import java.util.Date -import scala.collection.JavaConversions.asScalaBuffer -import scala.collection.JavaConversions.enumerationAsScalaIterator import akka.actor.ActorSystem import akka.event.Logging import scala.collection.immutable.TreeMap class Report( - app: ActorSystem, + system: ActorSystem, resultRepository: BenchResultRepository, compareResultWith: Option[String] = None) { private def doLog = System.getProperty("benchmark.logResult", "true").toBoolean - val log = Logging(app, this) + val log = Logging(system, "Report") val dateTimeFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm") val legendTimeFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm") @@ -221,11 +219,11 @@ class Report( sb.append("Args:\n ").append(args) sb.append("\n") - sb.append("Akka version: ").append(app.AkkaConfig.ConfigVersion) + sb.append("Akka version: ").append(system.settings.ConfigVersion) sb.append("\n") sb.append("Akka config:") - for (key ← app.config.keys) { - sb.append("\n ").append(key).append("=").append(app.config(key)) + for ((key, value) ← system.settings.config.toObject) { + sb.append("\n ").append(key).append("=").append(value) } sb.toString diff --git a/akka-actor-tests/src/test/scala/akka/routing/ActorPoolSpec.scala b/akka-actor-tests/src/test/scala/akka/routing/ActorPoolSpec.scala index 53bb402e02..e4ccf34768 100644 --- a/akka-actor-tests/src/test/scala/akka/routing/ActorPoolSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/routing/ActorPoolSpec.scala @@ -2,7 +2,7 @@ package akka.routing import akka.dispatch.{ KeptPromise, Future } import akka.actor._ -import akka.testkit.{ TestLatch, filterEvents, EventFilter, filterException } +import akka.testkit._ import akka.util.duration._ import java.util.concurrent.atomic.{ AtomicBoolean, AtomicInteger } import akka.testkit.AkkaSpec @@ -29,7 +29,7 @@ class TypedActorPoolSpec extends AkkaSpec { import ActorPoolSpec._ "Actor Pool (2)" must { "support typed actors" in { - val pool = app.createProxy[Foo](new Actor with DefaultActorPool with BoundedCapacityStrategy with MailboxPressureCapacitor with SmallestMailboxSelector with Filter with RunningMeanBackoff with BasicRampup { + val pool = system.createProxy[Foo](new Actor with DefaultActorPool with BoundedCapacityStrategy with MailboxPressureCapacitor with SmallestMailboxSelector with Filter with RunningMeanBackoff with BasicRampup { def lowerBound = 1 def upperBound = 5 def pressureThreshold = 1 @@ -38,7 +38,7 @@ class TypedActorPoolSpec extends AkkaSpec { def rampupRate = 0.1 def backoffRate = 0.50 def backoffThreshold = 0.50 - def instance(p: Props) = app.typedActor.getActorRefFor(context.typedActorOf[Foo, FooImpl](props = p.withTimeout(10 seconds))) + def instance(p: Props) = system.typedActor.getActorRefFor(context.typedActorOf[Foo, FooImpl](props = p.withTimeout(10 seconds))) def receive = _route }, Props().withTimeout(10 seconds).withFaultHandler(faultHandler)) @@ -47,7 +47,7 @@ class TypedActorPoolSpec extends AkkaSpec { for ((i, r) ← results) r.get must equal(i * i) - app.typedActor.stop(pool) + system.typedActor.stop(pool) } } } @@ -239,7 +239,7 @@ class ActorPoolSpec extends AkkaSpec { (pool ? ActorPool.Stat).as[ActorPool.Stats].get.size must be(2) - // send a bunch over the theshold and observe an increment + // send a bunch over the threshold and observe an increment loops = 15 loop(500) diff --git a/akka-actor-tests/src/test/scala/akka/routing/ConfiguredLocalRoutingSpec.scala b/akka-actor-tests/src/test/scala/akka/routing/ConfiguredLocalRoutingSpec.scala index 424e897fa4..575fcfb7fe 100644 --- a/akka-actor-tests/src/test/scala/akka/routing/ConfiguredLocalRoutingSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/routing/ConfiguredLocalRoutingSpec.scala @@ -11,12 +11,14 @@ import akka.routing.Routing.Broadcast @org.junit.runner.RunWith(classOf[org.scalatest.junit.JUnitRunner]) class ConfiguredLocalRoutingSpec extends AkkaSpec { + val deployer = system.asInstanceOf[ActorSystemImpl].provider.deployer + "round robin router" must { "be able to shut down its instance" in { - val path = app / "round-robin-0" + val path = system / "round-robin-0" - app.provider.deployer.deploy( + deployer.deploy( Deploy( path.toString, None, @@ -27,7 +29,7 @@ class ConfiguredLocalRoutingSpec extends AkkaSpec { val helloLatch = new CountDownLatch(5) val stopLatch = new CountDownLatch(5) - val actor = app.actorOf(Props(new Actor { + val actor = system.actorOf(Props(new Actor { def receive = { case "hello" ⇒ helloLatch.countDown() } @@ -49,9 +51,9 @@ class ConfiguredLocalRoutingSpec extends AkkaSpec { } "deliver messages in a round robin fashion" in { - val path = app / "round-robin-1" + val path = system / "round-robin-1" - app.provider.deployer.deploy( + deployer.deploy( Deploy( path.toString, None, @@ -69,7 +71,7 @@ class ConfiguredLocalRoutingSpec extends AkkaSpec { replies = replies + (i -> 0) } - val actor = app.actorOf(Props(new Actor { + val actor = system.actorOf(Props(new Actor { lazy val id = counter.getAndIncrement() def receive = { case "hit" ⇒ sender ! id @@ -93,9 +95,9 @@ class ConfiguredLocalRoutingSpec extends AkkaSpec { } "deliver a broadcast message using the !" in { - val path = app / "round-robin-2" + val path = system / "round-robin-2" - app.provider.deployer.deploy( + deployer.deploy( Deploy( path.toString, None, @@ -106,7 +108,7 @@ class ConfiguredLocalRoutingSpec extends AkkaSpec { val helloLatch = new CountDownLatch(5) val stopLatch = new CountDownLatch(5) - val actor = app.actorOf(Props(new Actor { + val actor = system.actorOf(Props(new Actor { def receive = { case "hello" ⇒ helloLatch.countDown() } @@ -127,9 +129,9 @@ class ConfiguredLocalRoutingSpec extends AkkaSpec { "random router" must { "be able to shut down its instance" in { - val path = app / "random-0" + val path = system / "random-0" - app.provider.deployer.deploy( + deployer.deploy( Deploy( path.toString, None, @@ -139,7 +141,7 @@ class ConfiguredLocalRoutingSpec extends AkkaSpec { val stopLatch = new CountDownLatch(7) - val actor = app.actorOf(Props(new Actor { + val actor = system.actorOf(Props(new Actor { def receive = { case "hello" ⇒ {} } @@ -160,9 +162,9 @@ class ConfiguredLocalRoutingSpec extends AkkaSpec { } "deliver messages in a random fashion" in { - val path = app / "random-1" + val path = system / "random-1" - app.provider.deployer.deploy( + deployer.deploy( Deploy( path.toString, None, @@ -180,7 +182,7 @@ class ConfiguredLocalRoutingSpec extends AkkaSpec { replies = replies + (i -> 0) } - val actor = app.actorOf(Props(new Actor { + val actor = system.actorOf(Props(new Actor { lazy val id = counter.getAndIncrement() def receive = { case "hit" ⇒ sender ! id @@ -204,9 +206,9 @@ class ConfiguredLocalRoutingSpec extends AkkaSpec { } "deliver a broadcast message using the !" in { - val path = app / "random-2" + val path = system / "random-2" - app.provider.deployer.deploy( + deployer.deploy( Deploy( path.toString, None, @@ -217,7 +219,7 @@ class ConfiguredLocalRoutingSpec extends AkkaSpec { val helloLatch = new CountDownLatch(6) val stopLatch = new CountDownLatch(6) - val actor = app.actorOf(Props(new Actor { + val actor = system.actorOf(Props(new Actor { def receive = { case "hello" ⇒ helloLatch.countDown() } diff --git a/akka-actor-tests/src/test/scala/akka/routing/RoutingSpec.scala b/akka-actor-tests/src/test/scala/akka/routing/RoutingSpec.scala index a744100a9b..6dd5f56577 100644 --- a/akka-actor-tests/src/test/scala/akka/routing/RoutingSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/routing/RoutingSpec.scala @@ -22,6 +22,8 @@ object RoutingSpec { @org.junit.runner.RunWith(classOf[org.scalatest.junit.JUnitRunner]) class RoutingSpec extends AkkaSpec { + val impl = system.asInstanceOf[ActorSystemImpl] + import akka.routing.RoutingSpec._ "direct router" must { @@ -29,8 +31,8 @@ class RoutingSpec extends AkkaSpec { val actor1 = actorOf[TestActor] val props = RoutedProps(routerFactory = () ⇒ new DirectRouter, connectionManager = new LocalConnectionManager(List(actor1))) - val actor = new RoutedActorRef(app, props, app.guardian, "foo") - actor.isShutdown must be(false) + val actor = new RoutedActorRef(system, props, impl.guardian, "foo") + actor.isTerminated must be(false) } "send message to connection" in { @@ -45,7 +47,7 @@ class RoutingSpec extends AkkaSpec { }) val props = RoutedProps(routerFactory = () ⇒ new DirectRouter, connectionManager = new LocalConnectionManager(List(connection1))) - val routedActor = new RoutedActorRef(app, props, app.guardian, "foo") + val routedActor = new RoutedActorRef(system, props, impl.guardian, "foo") routedActor ! "hello" routedActor ! "end" @@ -66,7 +68,7 @@ class RoutingSpec extends AkkaSpec { }) val props = RoutedProps(routerFactory = () ⇒ new DirectRouter, connectionManager = new LocalConnectionManager(List(connection1))) - val actor = new RoutedActorRef(app, props, app.guardian, "foo") + val actor = new RoutedActorRef(system, props, impl.guardian, "foo") actor ! Broadcast(1) actor ! "end" @@ -83,8 +85,8 @@ class RoutingSpec extends AkkaSpec { val actor1 = actorOf[TestActor] val props = RoutedProps(routerFactory = () ⇒ new RoundRobinRouter, connectionManager = new LocalConnectionManager(List(actor1))) - val actor = new RoutedActorRef(app, props, app.guardian, "foo") - actor.isShutdown must be(false) + val actor = new RoutedActorRef(system, props, impl.guardian, "foo") + actor.isTerminated must be(false) } //In this test a bunch of actors are created and each actor has its own counter. @@ -113,7 +115,7 @@ class RoutingSpec extends AkkaSpec { //create the routed actor. val props = RoutedProps(routerFactory = () ⇒ new RoundRobinRouter, connectionManager = new LocalConnectionManager(connections)) - val actor = new RoutedActorRef(app, props, app.guardian, "foo") + val actor = new RoutedActorRef(system, props, impl.guardian, "foo") //send messages to the actor. for (i ← 0 until iterationCount) { @@ -152,7 +154,7 @@ class RoutingSpec extends AkkaSpec { }) val props = RoutedProps(routerFactory = () ⇒ new RoundRobinRouter, connectionManager = new LocalConnectionManager(List(connection1, connection2))) - val actor = new RoutedActorRef(app, props, app.guardian, "foo") + val actor = new RoutedActorRef(system, props, impl.guardian, "foo") actor ! Broadcast(1) actor ! Broadcast("end") @@ -175,7 +177,7 @@ class RoutingSpec extends AkkaSpec { }) val props = RoutedProps(routerFactory = () ⇒ new RoundRobinRouter, connectionManager = new LocalConnectionManager(List(connection1))) - val actor = new RoutedActorRef(app, props, app.guardian, "foo") + val actor = new RoutedActorRef(system, props, impl.guardian, "foo") intercept[RoutingException] { actor ? Broadcast(1) } @@ -192,8 +194,8 @@ class RoutingSpec extends AkkaSpec { val actor1 = actorOf[TestActor] val props = RoutedProps(routerFactory = () ⇒ new RandomRouter, connectionManager = new LocalConnectionManager(List(actor1))) - val actor = new RoutedActorRef(app, props, app.guardian, "foo") - actor.isShutdown must be(false) + val actor = new RoutedActorRef(system, props, impl.guardian, "foo") + actor.isTerminated must be(false) } "deliver a broadcast message" in { @@ -216,7 +218,7 @@ class RoutingSpec extends AkkaSpec { }) val props = RoutedProps(routerFactory = () ⇒ new RandomRouter, connectionManager = new LocalConnectionManager(List(connection1, connection2))) - val actor = new RoutedActorRef(app, props, app.guardian, "foo") + val actor = new RoutedActorRef(system, props, impl.guardian, "foo") actor ! Broadcast(1) actor ! Broadcast("end") @@ -239,7 +241,7 @@ class RoutingSpec extends AkkaSpec { }) val props = RoutedProps(routerFactory = () ⇒ new RandomRouter, connectionManager = new LocalConnectionManager(List(connection1))) - val actor = new RoutedActorRef(app, props, app.guardian, "foo") + val actor = new RoutedActorRef(system, props, impl.guardian, "foo") try { actor ? Broadcast(1) @@ -262,7 +264,7 @@ class RoutingSpec extends AkkaSpec { val props = RoutedProps(routerFactory = () ⇒ new ScatterGatherFirstCompletedRouter, connectionManager = new LocalConnectionManager(List(newActor(0, Some(shutdownLatch)), newActor(1, Some(shutdownLatch))))) - val actor = new RoutedActorRef(app, props, app.guardian, "foo") + val actor = new RoutedActorRef(system, props, impl.guardian, "foo") actor ! Broadcast(Stop(Some(0))) @@ -277,7 +279,7 @@ class RoutingSpec extends AkkaSpec { val props = RoutedProps(routerFactory = () ⇒ new ScatterGatherFirstCompletedRouter, connectionManager = new LocalConnectionManager(List(newActor(0, Some(shutdownLatch)), newActor(1, Some(shutdownLatch))))) - val actor = new RoutedActorRef(app, props, app.guardian, "foo") + val actor = new RoutedActorRef(system, props, impl.guardian, "foo") actor ! Broadcast(Stop()) @@ -293,7 +295,7 @@ class RoutingSpec extends AkkaSpec { val props = RoutedProps(routerFactory = () ⇒ new ScatterGatherFirstCompletedRouter, connectionManager = new LocalConnectionManager(List(newActor(0), newActor(1)))) - val actor = new RoutedActorRef(app, props, app.guardian, "foo") + val actor = new RoutedActorRef(system, props, impl.guardian, "foo") (actor ? Broadcast("Hi!")).get.asInstanceOf[Int] must be(0) @@ -302,16 +304,16 @@ class RoutingSpec extends AkkaSpec { "return the first response from connections, when some of them failed to reply" in { val props = RoutedProps(routerFactory = () ⇒ new ScatterGatherFirstCompletedRouter, connectionManager = new LocalConnectionManager(List(newActor(0), newActor(1)))) - val actor = new RoutedActorRef(app, props, app.guardian, "foo") + val actor = new RoutedActorRef(system, props, impl.guardian, "foo") (actor ? Broadcast(0)).get.asInstanceOf[Int] must be(1) } "be started when constructed" in { val props = RoutedProps(routerFactory = () ⇒ new ScatterGatherFirstCompletedRouter, connectionManager = new LocalConnectionManager(List(newActor(0)))) - val actor = new RoutedActorRef(app, props, app.guardian, "foo") + val actor = new RoutedActorRef(system, props, impl.guardian, "foo") - actor.isShutdown must be(false) + actor.isTerminated must be(false) } "deliver one-way messages in a round robin fashion" in { @@ -324,7 +326,7 @@ class RoutingSpec extends AkkaSpec { for (i ← 0 until connectionCount) { counters = counters :+ new AtomicInteger() - val connection = app.actorOf(new Actor { + val connection = system.actorOf(new Actor { def receive = { case "end" ⇒ doneLatch.countDown() case msg: Int ⇒ counters.get(i).get.addAndGet(msg) @@ -335,7 +337,7 @@ class RoutingSpec extends AkkaSpec { val props = RoutedProps(routerFactory = () ⇒ new ScatterGatherFirstCompletedRouter, connectionManager = new LocalConnectionManager(connections)) - val actor = new RoutedActorRef(app, props, app.guardian, "foo") + val actor = new RoutedActorRef(system, props, impl.guardian, "foo") for (i ← 0 until iterationCount) { for (k ← 0 until connectionCount) { @@ -357,7 +359,7 @@ class RoutingSpec extends AkkaSpec { val doneLatch = new TestLatch(2) val counter1 = new AtomicInteger - val connection1 = app.actorOf(new Actor { + val connection1 = system.actorOf(new Actor { def receive = { case "end" ⇒ doneLatch.countDown() case msg: Int ⇒ counter1.addAndGet(msg) @@ -365,7 +367,7 @@ class RoutingSpec extends AkkaSpec { }) val counter2 = new AtomicInteger - val connection2 = app.actorOf(new Actor { + val connection2 = system.actorOf(new Actor { def receive = { case "end" ⇒ doneLatch.countDown() case msg: Int ⇒ counter2.addAndGet(msg) @@ -374,7 +376,7 @@ class RoutingSpec extends AkkaSpec { val props = RoutedProps(routerFactory = () ⇒ new ScatterGatherFirstCompletedRouter, connectionManager = new LocalConnectionManager(List(connection1, connection2))) - val actor = new RoutedActorRef(app, props, app.guardian, "foo") + val actor = new RoutedActorRef(system, props, impl.guardian, "foo") actor ! Broadcast(1) actor ! Broadcast("end") @@ -387,7 +389,7 @@ class RoutingSpec extends AkkaSpec { case class Stop(id: Option[Int] = None) - def newActor(id: Int, shudownLatch: Option[TestLatch] = None) = app.actorOf(new Actor { + def newActor(id: Int, shudownLatch: Option[TestLatch] = None) = system.actorOf(new Actor { def receive = { case Stop(None) ⇒ self.stop() case Stop(Some(_id)) if (_id == id) ⇒ self.stop() @@ -407,8 +409,8 @@ class RoutingSpec extends AkkaSpec { val actor1 = actorOf[TestActor] val props = RoutedProps(routerFactory = () ⇒ new BroadcastRouter, connectionManager = new LocalConnectionManager(List(actor1))) - val actor = new RoutedActorRef(app, props, app.guardian, "foo") - actor.isShutdown must be(false) + val actor = new RoutedActorRef(system, props, system.asInstanceOf[ActorSystemImpl].guardian, "foo") + actor.isTerminated must be(false) } "broadcast message using !" in { @@ -431,7 +433,7 @@ class RoutingSpec extends AkkaSpec { }) val props = RoutedProps(routerFactory = () ⇒ new BroadcastRouter, connectionManager = new LocalConnectionManager(List(connection1, connection2))) - val actor = new RoutedActorRef(app, props, app.guardian, "foo") + val actor = new RoutedActorRef(system, props, system.asInstanceOf[ActorSystemImpl].guardian, "foo") actor ! 1 actor ! "end" @@ -464,7 +466,7 @@ class RoutingSpec extends AkkaSpec { }) val props = RoutedProps(routerFactory = () ⇒ new BroadcastRouter, connectionManager = new LocalConnectionManager(List(connection1, connection2))) - val actor = new RoutedActorRef(app, props, app.guardian, "foo") + val actor = new RoutedActorRef(system, props, system.asInstanceOf[ActorSystemImpl].guardian, "foo") actor ? 1 actor ! "end" diff --git a/akka-actor-tests/src/test/scala/akka/serialization/SerializeSpec.scala b/akka-actor-tests/src/test/scala/akka/serialization/SerializeSpec.scala index 36cc6c03ca..8022edcc62 100644 --- a/akka-actor-tests/src/test/scala/akka/serialization/SerializeSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/serialization/SerializeSpec.scala @@ -7,11 +7,33 @@ package akka.serialization import akka.serialization.Serialization._ import scala.reflect._ import akka.testkit.AkkaSpec -import akka.actor.ActorSystem +import akka.actor.{ ActorSystem, ActorSystemImpl } import java.io.{ ObjectInputStream, ByteArrayInputStream, ByteArrayOutputStream, ObjectOutputStream } import akka.actor.DeadLetterActorRef +import com.typesafe.config.ConfigFactory +import com.typesafe.config.ConfigParseOptions object SerializeSpec { + + val serializationConf = ConfigFactory.parseString(""" + akka { + actor { + serializers { + java = "akka.serialization.JavaSerializer" + proto = "akka.testing.ProtobufSerializer" + sjson = "akka.testing.SJSONSerializer" + default = "akka.serialization.JavaSerializer" + } + + serialization-bindings { + java = ["akka.serialization.SerializeSpec$Address", "akka.serialization.MyJavaSerializableActor", "akka.serialization.MyStatelessActorWithMessagesInMailbox", "akka.serialization.MyActorWithProtobufMessagesInMailbox"] + sjson = ["akka.serialization.SerializeSpec$Person"] + proto = ["com.google.protobuf.Message", "akka.actor.ProtobufProtocol$MyMessage"] + } + } + } + """, ConfigParseOptions.defaults) + @BeanInfo case class Address(no: String, street: String, city: String, zip: String) { def this() = this("", "", "", "") } @BeanInfo @@ -21,15 +43,23 @@ object SerializeSpec { } @org.junit.runner.RunWith(classOf[org.scalatest.junit.JUnitRunner]) -class SerializeSpec extends AkkaSpec { +class SerializeSpec extends AkkaSpec(SerializeSpec.serializationConf) { import SerializeSpec._ - import app.serialization._ + val ser = SerializationExtension(system).serialization + import ser._ + + val addr = Address("120", "Monroe Street", "Santa Clara", "95050") + val person = Person("debasish ghosh", 25, Address("120", "Monroe Street", "Santa Clara", "95050")) "Serialization" must { + "have correct bindings" in { + ser.bindings(addr.getClass.getName) must be("java") + ser.bindings(person.getClass.getName) must be("sjson") + } + "serialize Address" in { - val addr = Address("120", "Monroe Street", "Santa Clara", "95050") val b = serialize(addr) match { case Left(exception) ⇒ fail(exception) case Right(bytes) ⇒ bytes @@ -41,7 +71,7 @@ class SerializeSpec extends AkkaSpec { } "serialize Person" in { - val person = Person("debasish ghosh", 25, Address("120", "Monroe Street", "Santa Clara", "95050")) + val b = serialize(person) match { case Left(exception) ⇒ fail(exception) case Right(bytes) ⇒ bytes @@ -53,7 +83,7 @@ class SerializeSpec extends AkkaSpec { } "serialize record with default serializer" in { - val person = Person("debasish ghosh", 25, Address("120", "Monroe Street", "Santa Clara", "95050")) + val r = Record(100, person) val b = serialize(r) match { case Left(exception) ⇒ fail(exception) @@ -68,13 +98,13 @@ class SerializeSpec extends AkkaSpec { "serialize DeadLetterActorRef" in { val outbuf = new ByteArrayOutputStream() val out = new ObjectOutputStream(outbuf) - val a = new ActorSystem() + val a = ActorSystem() out.writeObject(a.deadLetters) out.flush() out.close() val in = new ObjectInputStream(new ByteArrayInputStream(outbuf.toByteArray)) - Serialization.app.withValue(a) { + Serialization.system.withValue(a.asInstanceOf[ActorSystemImpl]) { val deadLetters = in.readObject().asInstanceOf[DeadLetterActorRef] (deadLetters eq a.deadLetters) must be(true) } diff --git a/akka-actor-tests/src/test/scala/akka/serialization/akka-serializer.conf b/akka-actor-tests/src/test/scala/akka/serialization/akka-serializer.conf deleted file mode 100644 index d1e948840e..0000000000 --- a/akka-actor-tests/src/test/scala/akka/serialization/akka-serializer.conf +++ /dev/null @@ -1,16 +0,0 @@ -akka { - actor { - serializers { - java = "akka.serialization.JavaSerializer" - proto = "akka.testing.ProtobufSerializer" - sjson = "akka.testing.SJSONSerializer" - default = "akka.serialization.JavaSerializer" - } - - serialization-bindings { - java = ["akka.serialization.SerializeSpec$Address", "akka.serialization.MyJavaSerializableActor", "akka.serialization.MyStatelessActorWithMessagesInMailbox", "akka.serialization.MyActorWithProtobufMessagesInMailbox"] - sjson = ["akka.serialization.SerializeSpec$Person"] - proto = ["com.google.protobuf.Message", "akka.actor.ProtobufProtocol$MyMessage"] - } - } -} diff --git a/akka-actor-tests/src/test/scala/akka/testkit/CallingThreadDispatcherModelSpec.scala b/akka-actor-tests/src/test/scala/akka/testkit/CallingThreadDispatcherModelSpec.scala index bd3816417e..d771a5de93 100644 --- a/akka-actor-tests/src/test/scala/akka/testkit/CallingThreadDispatcherModelSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/testkit/CallingThreadDispatcherModelSpec.scala @@ -11,7 +11,7 @@ import org.junit.{ After, Test } class CallingThreadDispatcherModelSpec extends ActorModelSpec { import ActorModelSpec._ - def newInterceptedDispatcher = new CallingThreadDispatcher(app, "test") with MessageDispatcherInterceptor + def newInterceptedDispatcher = new CallingThreadDispatcher(system.dispatcherFactory.prerequisites, "test") with MessageDispatcherInterceptor def dispatcherType = "Calling Thread Dispatcher" } diff --git a/akka-actor/src/main/java/akka/actor/Actors.java b/akka-actor/src/main/java/akka/actor/Actors.java index d0cb8ccb21..dd8763aad0 100644 --- a/akka-actor/src/main/java/akka/actor/Actors.java +++ b/akka-actor/src/main/java/akka/actor/Actors.java @@ -4,51 +4,48 @@ package akka.actor; -import akka.japi.Creator; -import akka.remote.RemoteSupport; - -import com.eaio.uuid.UUID; - /** - * JAVA API for - * - creating actors, - * - creating remote actors, - * - locating actors + * JAVA API for - creating actors, - creating remote actors, - locating actors */ public class Actors { - /** - * The message that is sent when an Actor gets a receive timeout. - *
-     *  if( message == receiveTimeout() ) {
-     *    //Timed out
-     *  }
-     * 
- * @return the single instance of ReceiveTimeout - */ - public final static ReceiveTimeout$ receiveTimeout() { - return ReceiveTimeout$.MODULE$; - } + /** + * The message that is sent when an Actor gets a receive timeout. + * + *
+   * if (message == receiveTimeout()) {
+   *   // Timed out
+   * }
+   * 
+ * + * @return the single instance of ReceiveTimeout + */ + public final static ReceiveTimeout$ receiveTimeout() { + return ReceiveTimeout$.MODULE$; + } - /** - * The message that when sent to an Actor kills it by throwing an exception. - *
-     *  actor.tell(kill());
-     * 
- * @return the single instance of Kill - */ - public final static Kill$ kill() { - return Kill$.MODULE$; - } + /** + * The message that when sent to an Actor kills it by throwing an exception. + * + *
+   * actor.tell(kill());
+   * 
+ * + * @return the single instance of Kill + */ + public final static Kill$ kill() { + return Kill$.MODULE$; + } - - /** - * The message that when sent to an Actor shuts it down by calling 'stop'. - *
-     *  actor.tell(poisonPill());
-     * 
- * @return the single instance of PoisonPill - */ - public final static PoisonPill$ poisonPill() { - return PoisonPill$.MODULE$; - } + /** + * The message that when sent to an Actor shuts it down by calling 'stop'. + * + *
+   * actor.tell(poisonPill());
+   * 
+ * + * @return the single instance of PoisonPill + */ + public final static PoisonPill$ poisonPill() { + return PoisonPill$.MODULE$; + } } diff --git a/akka-actor/src/main/java/akka/dispatch/AbstractMailbox.java b/akka-actor/src/main/java/akka/dispatch/AbstractMailbox.java index 0f37dec003..80cc4c9675 100644 --- a/akka-actor/src/main/java/akka/dispatch/AbstractMailbox.java +++ b/akka-actor/src/main/java/akka/dispatch/AbstractMailbox.java @@ -4,15 +4,18 @@ package akka.dispatch; -import java.util.concurrent.atomic.AtomicIntegerFieldUpdater; -import java.util.concurrent.atomic.AtomicReferenceFieldUpdater; +import akka.util.Unsafe; -abstract class AbstractMailbox { - private volatile int _status; // not initialized because this is faster: 0 == Open - protected final static AtomicIntegerFieldUpdater updater = - AtomicIntegerFieldUpdater.newUpdater(AbstractMailbox.class, "_status"); +final class AbstractMailbox { + final static long mailboxStatusOffset; + final static long systemMessageOffset; - private volatile SystemMessage _systemQueue; // not initialized because this is faster - protected final static AtomicReferenceFieldUpdater systemQueueUpdater = - AtomicReferenceFieldUpdater.newUpdater(AbstractMailbox.class, SystemMessage.class, "_systemQueue"); + static { + try { + mailboxStatusOffset = Unsafe.instance.objectFieldOffset(Mailbox.class.getDeclaredField("_status")); + systemMessageOffset = Unsafe.instance.objectFieldOffset(Mailbox.class.getDeclaredField("_systemQueue")); + } catch(Throwable t){ + throw new ExceptionInInitializerError(t); + } + } } diff --git a/akka-actor/src/main/java/com/typesafe/config/Config.java b/akka-actor/src/main/java/com/typesafe/config/Config.java new file mode 100644 index 0000000000..3a820033a8 --- /dev/null +++ b/akka-actor/src/main/java/com/typesafe/config/Config.java @@ -0,0 +1,265 @@ +/** + * Copyright (C) 2011 Typesafe Inc. + */ +package com.typesafe.config; + +import java.util.List; + +/** + * This class represents an immutable map from config paths to config values. It + * also contains some static methods for creating configs. + * + * Throughout the API, there is a distinction between "keys" and "paths". A key + * is a key in a JSON object; it's just a string that's the key in a map. A + * "path" is a parseable expression with a syntax and it refers to a series of + * keys. Path expressions are described in the spec for "HOCON", which can be + * found at https://github.com/havocp/config/blob/master/HOCON.md; in brief, a + * path is period-separated so "a.b.c" looks for key c in object b in object a + * in the root object. Sometimes double quotes are needed around special + * characters in path expressions. + * + * The API for a Config is in terms of path expressions, while the API for a + * ConfigObject is in terms of keys. Conceptually, Config is a one-level map + * from paths to values, while a ConfigObject is a tree of maps from keys to + * values. + * + * Another difference between Config and ConfigObject is that conceptually, + * ConfigValue with valueType() of ConfigValueType.NULL exist in a ConfigObject, + * while a Config treats null values as if they were missing. + * + * Config is an immutable object and thus safe to use from multiple threads. + * + * The "getters" on a Config all work in the same way. They never return null, + * nor do they return a ConfigValue with valueType() of ConfigValueType.NULL. + * Instead, they throw ConfigException.Missing if the value is completely absent + * or set to null. If the value is set to null, a subtype of + * ConfigException.Missing called ConfigException.Null will be thrown. + * ConfigException.WrongType will be thrown anytime you ask for a type and the + * value has an incompatible type. Reasonable type conversions are performed for + * you though. + * + * If you want to iterate over the contents of a Config, you have to get its + * ConfigObject with toObject, and then iterate over the ConfigObject. + * + */ +public interface Config extends ConfigMergeable { + /** + * Gets the config as a tree of ConfigObject. This is a constant-time + * operation (it is not proportional to the number of values in the Config). + * + * @return + */ + ConfigObject toObject(); + + ConfigOrigin origin(); + + @Override + Config withFallback(ConfigMergeable other); + + @Override + ConfigObject toValue(); + + /** + * Checks whether a value is present and non-null at the given path. This + * differs in two ways from ConfigObject.containsKey(): it looks for a path + * expression, not a key; and it returns false for null values, while + * containsKey() returns true indicating that the object contains a null + * value for the key. + * + * If a path exists according to hasPath(), then getValue() will never throw + * an exception. However, the typed getters, such as getInt(), will still + * throw if the value is not convertible to the requested type. + * + * @param path + * the path expression + * @return true if a non-null value is present at the path + * @throws ConfigException.BadPath + * if the path expression is invalid + */ + boolean hasPath(String path); + + boolean isEmpty(); + + /** + * + * @param path + * @return + * @throws ConfigException.Missing + * if value is absent or null + * @throws ConfigException.WrongType + * if value is not convertible to boolean + */ + boolean getBoolean(String path); + + /** + * @param path + * @return + * @throws ConfigException.Missing + * if value is absent or null + * @throws ConfigException.WrongType + * if value is not convertible to a number + */ + Number getNumber(String path); + + /** + * @param path + * @return + * @throws ConfigException.Missing + * if value is absent or null + * @throws ConfigException.WrongType + * if value is not convertible to an int + */ + int getInt(String path); + + /** + * @param path + * @return + * @throws ConfigException.Missing + * if value is absent or null + * @throws ConfigException.WrongType + * if value is not convertible to a long + */ + long getLong(String path); + + /** + * @param path + * @return + * @throws ConfigException.Missing + * if value is absent or null + * @throws ConfigException.WrongType + * if value is not convertible to a double + */ + double getDouble(String path); + + /** + * @param path + * @return + * @throws ConfigException.Missing + * if value is absent or null + * @throws ConfigException.WrongType + * if value is not convertible to a string + */ + String getString(String path); + + /** + * @param path + * @return + * @throws ConfigException.Missing + * if value is absent or null + * @throws ConfigException.WrongType + * if value is not convertible to an object + */ + ConfigObject getObject(String path); + + /** + * @param path + * @return + * @throws ConfigException.Missing + * if value is absent or null + * @throws ConfigException.WrongType + * if value is not convertible to a Config + */ + Config getConfig(String path); + + /** + * Gets the value at the path as an unwrapped Java boxed value (Boolean, + * Integer, Long, etc.) + * + * @throws ConfigException.Missing + * if value is absent or null + */ + Object getAnyRef(String path); + + /** + * Gets the value at the given path, unless the value is a null value or + * missing, in which case it throws just like the other getters. Use get() + * from the Map interface if you want an unprocessed value. + * + * @param path + * @return + * @throws ConfigException.Missing + * if value is absent or null + */ + ConfigValue getValue(String path); + + /** + * Get value as a size in bytes (parses special strings like "128M"). The + * size units are interpreted as for memory, not as for disk space, so they + * are in powers of two. + * + * @throws ConfigException.Missing + * if value is absent or null + * @throws ConfigException.WrongType + * if value is not convertible to Long or String + * @throws ConfigException.BadValue + * if value cannot be parsed as a memory size + */ + Long getMemorySizeInBytes(String path); + + /** + * Get value as a duration in milliseconds. If the value is already a + * number, then it's left alone; if it's a string, it's parsed understanding + * units suffixes like "10m" or "5ns" + * + * @throws ConfigException.Missing + * if value is absent or null + * @throws ConfigException.WrongType + * if value is not convertible to Long or String + * @throws ConfigException.BadValue + * if value cannot be parsed as a number of milliseconds + */ + Long getMilliseconds(String path); + + /** + * Get value as a duration in nanoseconds. If the value is already a number + * it's taken as milliseconds and converted to nanoseconds. If it's a + * string, it's parsed understanding unit suffixes. + * + * @throws ConfigException.Missing + * if value is absent or null + * @throws ConfigException.WrongType + * if value is not convertible to Long or String + * @throws ConfigException.BadValue + * if value cannot be parsed as a number of nanoseconds + */ + Long getNanoseconds(String path); + + /** + * Gets a list value (with any element type) as a ConfigList, which + * implements java.util.List. Throws if the path is unset or + * null. + * + * @param path + * the path to the list value. + * @return the ConfigList at the path + * @throws ConfigException.Missing + * if value is absent or null + * @throws ConfigException.WrongType + * if value is not convertible to a ConfigList + */ + ConfigList getList(String path); + + List getBooleanList(String path); + + List getNumberList(String path); + + List getIntList(String path); + + List getLongList(String path); + + List getDoubleList(String path); + + List getStringList(String path); + + List getObjectList(String path); + + List getConfigList(String path); + + List getAnyRefList(String path); + + List getMemorySizeInBytesList(String path); + + List getMillisecondsList(String path); + + List getNanosecondsList(String path); +} diff --git a/akka-actor/src/main/java/com/typesafe/config/ConfigException.java b/akka-actor/src/main/java/com/typesafe/config/ConfigException.java new file mode 100644 index 0000000000..7763231108 --- /dev/null +++ b/akka-actor/src/main/java/com/typesafe/config/ConfigException.java @@ -0,0 +1,233 @@ +/** + * Copyright (C) 2011 Typesafe Inc. + */ +package com.typesafe.config; + +/** + * All exceptions thrown by the library are subclasses of ConfigException. + */ +public class ConfigException extends RuntimeException { + private static final long serialVersionUID = 1L; + + protected ConfigException(ConfigOrigin origin, String message, + Throwable cause) { + super(origin.description() + ": " + message, cause); + } + + protected ConfigException(ConfigOrigin origin, String message) { + this(origin.description() + ": " + message, null); + } + + protected ConfigException(String message, Throwable cause) { + super(message, cause); + } + + protected ConfigException(String message) { + this(message, null); + } + + /** + * Exception indicating that the type of a value does not match the type you + * requested. + * + */ + public static class WrongType extends ConfigException { + private static final long serialVersionUID = 1L; + + public WrongType(ConfigOrigin origin, String path, String expected, + String actual, + Throwable cause) { + super(origin, path + " has type " + actual + " rather than " + + expected, + cause); + } + + public WrongType(ConfigOrigin origin, String path, String expected, + String actual) { + this(origin, path, expected, actual, null); + } + + WrongType(ConfigOrigin origin, String message, Throwable cause) { + super(origin, message, cause); + } + + WrongType(ConfigOrigin origin, String message) { + this(origin, message, null); + } + } + + /** + * Exception indicates that the setting was never set to anything, not even + * null. + */ + public static class Missing extends ConfigException { + private static final long serialVersionUID = 1L; + + public Missing(String path, Throwable cause) { + super("No configuration setting found for key '" + path + "'", + cause); + } + + public Missing(String path) { + this(path, null); + } + + protected Missing(ConfigOrigin origin, String message, Throwable cause) { + super(origin, message, cause); + } + + protected Missing(ConfigOrigin origin, String message) { + this(origin, message, null); + } + } + + /** + * Exception indicates that the setting was treated as missing because it + * was set to null. + */ + public static class Null extends Missing { + private static final long serialVersionUID = 1L; + + private static String makeMessage(String path, String expected) { + if (expected != null) { + return "Configuration key '" + path + + "' is set to null but expected " + expected; + } else { + return "Configuration key '" + path + "' is null"; + } + } + + public Null(ConfigOrigin origin, String path, String expected, + Throwable cause) { + super(origin, makeMessage(path, expected), cause); + } + + public Null(ConfigOrigin origin, String path, String expected) { + this(origin, path, expected, null); + } + } + + /** + * Exception indicating that a value was messed up, for example you may have + * asked for a duration and the value can't be sensibly parsed as a + * duration. + * + */ + public static class BadValue extends ConfigException { + private static final long serialVersionUID = 1L; + + public BadValue(ConfigOrigin origin, String path, String message, + Throwable cause) { + super(origin, "Invalid value at '" + path + "': " + message, cause); + } + + public BadValue(ConfigOrigin origin, String path, String message) { + this(origin, path, message, null); + } + + public BadValue(String path, String message, Throwable cause) { + super("Invalid value at '" + path + "': " + message, cause); + } + + public BadValue(String path, String message) { + this(path, message, null); + } + } + + public static class BadPath extends ConfigException { + private static final long serialVersionUID = 1L; + + public BadPath(ConfigOrigin origin, String path, String message, + Throwable cause) { + super(origin, + path != null ? ("Invalid path '" + path + "': " + message) + : message, cause); + } + + public BadPath(ConfigOrigin origin, String path, String message) { + this(origin, path, message, null); + } + + public BadPath(String path, String message, Throwable cause) { + super(path != null ? ("Invalid path '" + path + "': " + message) + : message, cause); + } + + public BadPath(String path, String message) { + this(path, message, null); + } + + public BadPath(ConfigOrigin origin, String message) { + this(origin, null, message); + } + } + + /** + * Exception indicating that there's a bug in something or the runtime + * environment is broken. This exception should never be handled; instead, + * something should be fixed to keep the exception from occurring. + * + */ + public static class BugOrBroken extends ConfigException { + private static final long serialVersionUID = 1L; + + public BugOrBroken(String message, Throwable cause) { + super(message, cause); + } + + public BugOrBroken(String message) { + this(message, null); + } + } + + /** + * Exception indicating that there was an IO error. + * + */ + public static class IO extends ConfigException { + private static final long serialVersionUID = 1L; + + public IO(ConfigOrigin origin, String message, Throwable cause) { + super(origin, message, cause); + } + + public IO(ConfigOrigin origin, String message) { + this(origin, message, null); + } + } + + /** + * Exception indicating that there was a parse error. + * + */ + public static class Parse extends ConfigException { + private static final long serialVersionUID = 1L; + + public Parse(ConfigOrigin origin, String message, Throwable cause) { + super(origin, message, cause); + } + + public Parse(ConfigOrigin origin, String message) { + this(origin, message, null); + } + } + + /** + * Exception indicating that you tried to use a function that requires + * substitutions to be resolved, but substitutions have not been resolved. + * This is always a bug in either application code or the library; it's + * wrong to write a handler for this exception because you should be able to + * fix the code to avoid it. + */ + public static class NotResolved extends BugOrBroken { + private static final long serialVersionUID = 1L; + + public NotResolved(String message, Throwable cause) { + super(message, cause); + } + + public NotResolved(String message) { + this(message, null); + } + } +} diff --git a/akka-actor/src/main/java/com/typesafe/config/ConfigFactory.java b/akka-actor/src/main/java/com/typesafe/config/ConfigFactory.java new file mode 100644 index 0000000000..b55c9abdf8 --- /dev/null +++ b/akka-actor/src/main/java/com/typesafe/config/ConfigFactory.java @@ -0,0 +1,241 @@ +/** + * Copyright (C) 2011 Typesafe Inc. + */ +package com.typesafe.config; + +import java.io.File; +import java.io.Reader; +import java.net.URL; +import java.util.Map; +import java.util.Properties; + +import com.typesafe.config.impl.ConfigImpl; +import com.typesafe.config.impl.Parseable; + +/** + * This class contains static methods for creating Config objects. + * + * The static methods with "load" in the name do some sort of higher-level + * operation potentially parsing multiple resources and resolving substitutions, + * while the ones with "parse" in the name just create a ConfigValue from a + * resource and nothing else. + */ +public final class ConfigFactory { + /** + * Loads a configuration for the given root path in a "standard" way. + * Oversimplified, if your root path is foo.bar then this will load files + * from the classpath: foo-bar.conf, foo-bar.json, foo-bar.properties, + * foo-bar-reference.conf, foo-bar-reference.json, + * foo-bar-reference.properties. It will override all those files with any + * system properties that begin with "foo.bar.", as well. + * + * The root path should be a path expression, usually just a single short + * word, that scopes the package being configured; typically it's the + * package name or something similar. System properties overriding values in + * the configuration will have to be prefixed with the root path. The root + * path may have periods in it if you like but other punctuation or + * whitespace will probably cause you headaches. Example root paths: "akka", + * "sbt", "jsoup", "heroku", "mongo", etc. + * + * The loaded object will already be resolved (substitutions have already + * been processed). As a result, if you add more fallbacks then they won't + * be seen by substitutions. Substitutions are the "${foo.bar}" syntax. If + * you want to parse additional files or something then you need to use + * loadWithoutResolving(). + * + * @param rootPath + * the configuration "domain" + * @return configuration object for the requested root path + */ + public static ConfigRoot load(String rootPath) { + return loadWithoutResolving(rootPath).resolve(); + } + + public static ConfigRoot load(String rootPath, + ConfigParseOptions parseOptions, ConfigResolveOptions resolveOptions) { + return loadWithoutResolving(rootPath, parseOptions).resolve( + resolveOptions); + } + + /** + * Like load() but does not resolve the object, so you can go ahead and add + * more fallbacks and stuff and have them seen by substitutions when you do + * call {@link ConfigRoot.resolve()}. + * + * @param rootPath + * @return + */ + public static ConfigRoot loadWithoutResolving(String rootPath) { + return loadWithoutResolving(rootPath, ConfigParseOptions.defaults()); + } + + public static ConfigRoot loadWithoutResolving(String rootPath, + ConfigParseOptions options) { + ConfigRoot system = systemPropertiesRoot(rootPath); + + Config mainFiles = parseResourcesForPath(rootPath, options); + Config referenceFiles = parseResourcesForPath(rootPath + ".reference", + options); + + return system.withFallback(mainFiles).withFallback(referenceFiles); + } + + public static ConfigRoot emptyRoot(String rootPath) { + return emptyRoot(rootPath, null); + } + + public static Config empty() { + return empty(null); + } + + public static ConfigRoot emptyRoot(String rootPath, String originDescription) { + return ConfigImpl.emptyRoot(rootPath, originDescription); + } + + public static Config empty(String originDescription) { + return ConfigImpl.emptyConfig(originDescription); + } + + public static ConfigRoot systemPropertiesRoot(String rootPath) { + return ConfigImpl.systemPropertiesRoot(rootPath); + } + + public static Config systemProperties() { + return ConfigImpl.systemPropertiesAsConfig(); + } + + public static Config systemEnvironment() { + return ConfigImpl.envVariablesAsConfig(); + } + + /** + * Converts a Java Properties object to a ConfigObject using the rules + * documented in https://github.com/havocp/config/blob/master/HOCON.md The + * keys in the Properties object are split on the period character '.' and + * treated as paths. The values will all end up as string values. If you + * have both "a=foo" and "a.b=bar" in your properties file, so "a" is both + * the object containing "b" and the string "foo", then the string value is + * dropped. + * + * If you want to get System.getProperties() as a ConfigObject, it's better + * to use the systemProperties() or systemPropertiesRoot() methods. Those + * methods are able to use a cached global singleton ConfigObject for the + * system properties. + * + * @param properties + * a Java Properties object + * @param options + * @return + */ + public static Config parseProperties(Properties properties, + ConfigParseOptions options) { + return Parseable.newProperties(properties, options).parse().toConfig(); + } + + public static Config parseReader(Reader reader, ConfigParseOptions options) { + return Parseable.newReader(reader, options).parse().toConfig(); + } + + public static Config parseURL(URL url, ConfigParseOptions options) { + return Parseable.newURL(url, options).parse().toConfig(); + } + + public static Config parseFile(File file, ConfigParseOptions options) { + return Parseable.newFile(file, options).parse().toConfig(); + } + + /** + * Parses a file. If the fileBasename already ends in a known extension, + * just parses it according to that extension. If the fileBasename does not + * end in an extension, then parse all known extensions and merge whatever + * is found. If options force a specific syntax, only parse files with an + * extension matching that syntax. If options.getAllowMissing() is true, + * then no files have to exist; if false, then at least one file has to + * exist. + * + * @param fileBasename + * @param options + * @return + */ + public static Config parseFileAnySyntax(File fileBasename, + ConfigParseOptions options) { + return ConfigImpl.parseFileAnySyntax(fileBasename, options).toConfig(); + } + + public static Config parseResource(Class klass, String resource, + ConfigParseOptions options) { + return Parseable.newResource(klass, resource, options).parse() + .toConfig(); + } + + /** + * Same behavior as parseFileAnySyntax() but for classpath resources + * instead. + * + * @param klass + * @param resourceBasename + * @param options + * @return + */ + public static Config parseResourceAnySyntax(Class klass, String resourceBasename, + ConfigParseOptions options) { + return ConfigImpl.parseResourceAnySyntax(klass, resourceBasename, + options).toConfig(); + } + + public static Config parseString(String s, ConfigParseOptions options) { + return Parseable.newString(s, options).parse().toConfig(); + } + + /** + * Parses classpath resources corresponding to this path expression. + * Essentially if the path is "foo.bar" then the resources are + * "/foo-bar.conf", "/foo-bar.json", and "/foo-bar.properties". If more than + * one of those exists, they are merged. + * + * @param path + * @param options + * @return + */ + public static Config parseResourcesForPath(String rootPath, + ConfigParseOptions options) { + // null originDescription is allowed in parseResourcesForPath + return ConfigImpl.parseResourcesForPath(rootPath, options).toConfig(); + } + + /** + * Similar to ConfigValueFactory.fromMap(), but the keys in the map are path + * expressions, rather than keys; and correspondingly it returns a Config + * instead of a ConfigObject. This is more convenient if you are writing + * literal maps in code, and less convenient if you are getting your maps + * from some data source such as a parser. + * + * An exception will be thrown (and it is a bug in the caller of the method) + * if a path is both an object and a value, for example if you had both + * "a=foo" and "a.b=bar", then "a" is both the string "foo" and the parent + * object of "b". The caller of this method should ensure that doesn't + * happen. + * + * @param values + * @param originDescription + * description of what this map represents, like a filename, or + * "default settings" (origin description is used in error + * messages) + * @return + */ + public static Config parseMap(Map values, + String originDescription) { + return ConfigImpl.fromPathMap(values, originDescription).toConfig(); + } + + /** + * See the other overload of parseMap() for details, this one just uses a + * default origin description. + * + * @param values + * @return + */ + public static Config parseMap(Map values) { + return parseMap(values, null); + } +} diff --git a/akka-actor/src/main/java/com/typesafe/config/ConfigIncludeContext.java b/akka-actor/src/main/java/com/typesafe/config/ConfigIncludeContext.java new file mode 100644 index 0000000000..a770250171 --- /dev/null +++ b/akka-actor/src/main/java/com/typesafe/config/ConfigIncludeContext.java @@ -0,0 +1,29 @@ +/** + * Copyright (C) 2011 Typesafe Inc. + */ +package com.typesafe.config; + + +/** + * A ConfigIncludeContext is passed to a ConfigIncluder. This interface is not + * intended for apps to implement. + */ +public interface ConfigIncludeContext { + /** + * Tries to find a name relative to whatever is doing the including, for + * example in the same directory as the file doing the including. Returns + * null if it can't meaningfully create a relative name. The returned + * parseable may not exist; this function is not required to do any IO, just + * compute what the name would be. + * + * The passed-in filename has to be a complete name (with extension), not + * just a basename. (Include statements in config files are allowed to give + * just a basename.) + * + * @param filename + * the name to make relative to the resource doing the including + * @return parseable item relative to the resource doing the including, or + * null + */ + ConfigParseable relativeTo(String filename); +} diff --git a/akka-actor/src/main/java/com/typesafe/config/ConfigIncluder.java b/akka-actor/src/main/java/com/typesafe/config/ConfigIncluder.java new file mode 100644 index 0000000000..364b8c5e30 --- /dev/null +++ b/akka-actor/src/main/java/com/typesafe/config/ConfigIncluder.java @@ -0,0 +1,40 @@ +/** + * Copyright (C) 2011 Typesafe Inc. + */ +package com.typesafe.config; + +/** + * Interface you have to implement to customize "include" statements in config + * files. + */ +public interface ConfigIncluder { + /** + * Returns a new includer that falls back to the given includer. This is how + * you can obtain the default includer; it will be provided as a fallback. + * It's up to your includer to chain to it if you want to. You might want to + * merge any files found by the fallback includer with any objects you load + * yourself. + * + * It's important to handle the case where you already have the fallback + * with a "return this", i.e. this method should not create a new object if + * the fallback is the same one you already have. The same fallback may be + * added repeatedly. + * + * @param fallback + * @return a new includer + */ + ConfigIncluder withFallback(ConfigIncluder fallback); + + /** + * Parses another item to be included. The returned object typically would + * not have substitutions resolved. You can throw a ConfigException here to + * abort parsing, or return an empty object, but may not return null. + * + * @param context + * some info about the include context + * @param what + * the include statement's argument + * @return a non-null ConfigObject + */ + ConfigObject include(ConfigIncludeContext context, String what); +} diff --git a/akka-actor/src/main/java/com/typesafe/config/ConfigList.java b/akka-actor/src/main/java/com/typesafe/config/ConfigList.java new file mode 100644 index 0000000000..0688c29abe --- /dev/null +++ b/akka-actor/src/main/java/com/typesafe/config/ConfigList.java @@ -0,0 +1,23 @@ +/** + * Copyright (C) 2011 Typesafe Inc. + */ +package com.typesafe.config; + +import java.util.List; + +/** + * A list (aka array) value corresponding to ConfigValueType.LIST or JSON's + * "[1,2,3]" value. Implements java.util.List so you can use it + * like a regular Java list. + * + */ +public interface ConfigList extends List, ConfigValue { + + /** + * Recursively unwraps the list, returning a list of plain Java values such + * as Integer or String or whatever is in the list. + */ + @Override + List unwrapped(); + +} diff --git a/akka-actor/src/main/java/com/typesafe/config/ConfigMergeable.java b/akka-actor/src/main/java/com/typesafe/config/ConfigMergeable.java new file mode 100644 index 0000000000..deec42bec1 --- /dev/null +++ b/akka-actor/src/main/java/com/typesafe/config/ConfigMergeable.java @@ -0,0 +1,41 @@ +/** + * Copyright (C) 2011 Typesafe Inc. + */ +package com.typesafe.config; + +/** + * This is a marker for types that can be merged as a fallback into a Config or + * a ConfigValue. Both Config and ConfigValue are mergeable. + */ +public interface ConfigMergeable { + /** + * Converts the mergeable to a ConfigValue to be merged. + * + * @return + */ + ConfigValue toValue(); + + /** + * Returns a new value computed by merging this value with another, with + * keys in this value "winning" over the other one. Only ConfigObject and + * Config instances do anything in this method (they need to merge the + * fallback keys into themselves). All other values just return the original + * value, since they automatically override any fallback. + * + * The semantics of merging are described in + * https://github.com/havocp/config/blob/master/HOCON.md + * + * Note that objects do not merge "across" non-objects; if you do + * object.withFallback(nonObject).withFallback(otherObject), + * then otherObject will simply be ignored. This is an + * intentional part of how merging works. Both non-objects, and any object + * which has fallen back to a non-object, block subsequent fallbacks. + * + * @param other + * an object whose keys should be used if the keys are not + * present in this one + * @return a new object (or the original one, if the fallback doesn't get + * used) + */ + ConfigMergeable withFallback(ConfigMergeable other); +} diff --git a/akka-actor/src/main/java/com/typesafe/config/ConfigObject.java b/akka-actor/src/main/java/com/typesafe/config/ConfigObject.java new file mode 100644 index 0000000000..c559f9e4ee --- /dev/null +++ b/akka-actor/src/main/java/com/typesafe/config/ConfigObject.java @@ -0,0 +1,72 @@ +/** + * Copyright (C) 2011 Typesafe Inc. + */ +package com.typesafe.config; + +import java.util.Map; + +/** + * A ConfigObject is a read-only configuration object, which may have nested + * child objects. Implementations of ConfigObject should be immutable (at least + * from the perspective of anyone using this interface) and thus thread-safe. + * + * In most cases you want to use the Config interface rather than this one. Call + * toConfig() to convert a ConfigObject to a config. + * + * The API for a ConfigObject is in terms of keys, while the API for a Config is + * in terms of path expressions. Conceptually, ConfigObject is a tree of maps + * from keys to values, while a ConfigObject is a one-level map from paths to + * values. + * + * Throughout the API, there is a distinction between "keys" and "paths". A key + * is a key in a JSON object; it's just a string that's the key in a map. A + * "path" is a parseable expression with a syntax and it refers to a series of + * keys. A path is used to traverse nested ConfigObject by looking up each key + * in the path. Path expressions are described in the spec for "HOCON", which + * can be found at https://github.com/havocp/config/blob/master/HOCON.md; in + * brief, a path is period-separated so "a.b.c" looks for key c in object b in + * object a in the root object. Sometimes double quotes are needed around + * special characters in path expressions. + * + * ConfigObject implements java.util.Map and all methods + * work with keys, not path expressions. + * + * While ConfigObject implements the standard Java Map interface, the mutator + * methods all throw UnsupportedOperationException. This Map is immutable. + * + * The Map may contain null values, which will have ConfigValue.valueType() == + * ConfigValueType.NULL. If get() returns Java's null then the key was not + * present in the parsed file (or wherever this value tree came from). If get() + * returns a ConfigValue with type ConfigValueType.NULL then the key was set to + * null explicitly. + */ +public interface ConfigObject extends ConfigValue, Map { + + /** + * Converts this object to a Config instance, enabling you to use path + * expressions to find values in the object. This is a constant-time + * operation (it is not proportional to the size of the object). + * + * @return + */ + Config toConfig(); + + /** + * Recursively unwraps the object, returning a map from String to whatever + * plain Java values are unwrapped from the object's values. + */ + @Override + Map unwrapped(); + + @Override + ConfigObject withFallback(ConfigMergeable other); + + /** + * Gets a ConfigValue at the given key, or returns null if there is no + * value. The returned ConfigValue may have ConfigValueType.NULL or any + * other type, and the passed-in key must be a key in this object, rather + * than a path expression. + */ + @Override + ConfigValue get(Object key); +} diff --git a/akka-actor/src/main/java/com/typesafe/config/ConfigOrigin.java b/akka-actor/src/main/java/com/typesafe/config/ConfigOrigin.java new file mode 100644 index 0000000000..73240736e5 --- /dev/null +++ b/akka-actor/src/main/java/com/typesafe/config/ConfigOrigin.java @@ -0,0 +1,12 @@ +/** + * Copyright (C) 2011 Typesafe Inc. + */ +package com.typesafe.config; + +/** + * ConfigOrigin is used to track the origin (such as filename and line number) + * of a ConfigValue or other object. The origin is used in error messages. + */ +public interface ConfigOrigin { + public String description(); +} diff --git a/akka-actor/src/main/java/com/typesafe/config/ConfigParseOptions.java b/akka-actor/src/main/java/com/typesafe/config/ConfigParseOptions.java new file mode 100644 index 0000000000..ac0c8f3974 --- /dev/null +++ b/akka-actor/src/main/java/com/typesafe/config/ConfigParseOptions.java @@ -0,0 +1,132 @@ +/** + * Copyright (C) 2011 Typesafe Inc. + */ +package com.typesafe.config; + + +public final class ConfigParseOptions { + final ConfigSyntax syntax; + final String originDescription; + final boolean allowMissing; + final ConfigIncluder includer; + + protected ConfigParseOptions(ConfigSyntax syntax, String originDescription, + boolean allowMissing, ConfigIncluder includer) { + this.syntax = syntax; + this.originDescription = originDescription; + this.allowMissing = allowMissing; + this.includer = includer; + } + + public static ConfigParseOptions defaults() { + return new ConfigParseOptions(null, null, true, null); + } + + /** + * Set the file format. If set to null, try to guess from any available + * filename extension; if guessing fails, assume ConfigSyntax.CONF. + * + * @param syntax + * @return + */ + public ConfigParseOptions setSyntax(ConfigSyntax syntax) { + if (this.syntax == syntax) + return this; + else + return new ConfigParseOptions(syntax, this.originDescription, + this.allowMissing, this.includer); + } + + public ConfigSyntax getSyntax() { + return syntax; + } + + /** + * Set a description for the thing being parsed. In most cases this will be + * set up for you to something like the filename, but if you provide just an + * input stream you might want to improve on it. Set to null to allow the + * library to come up with something automatically. + * + * @param originDescription + * @return + */ + public ConfigParseOptions setOriginDescription(String originDescription) { + if (this.originDescription == originDescription) + return this; + else if (this.originDescription != null && originDescription != null + && this.originDescription.equals(originDescription)) + return this; + else + return new ConfigParseOptions(this.syntax, originDescription, + this.allowMissing, this.includer); + } + + public String getOriginDescription() { + return originDescription; + } + + /** this is package-private, not public API */ + ConfigParseOptions withFallbackOriginDescription(String originDescription) { + if (this.originDescription == null) + return setOriginDescription(originDescription); + else + return this; + } + + /** + * Set to false to throw an exception if the item being parsed (for example + * a file) is missing. Set to true to just return an empty document in that + * case. + * + * @param allowMissing + * @return + */ + public ConfigParseOptions setAllowMissing(boolean allowMissing) { + if (this.allowMissing == allowMissing) + return this; + else + return new ConfigParseOptions(this.syntax, this.originDescription, + allowMissing, this.includer); + } + + public boolean getAllowMissing() { + return allowMissing; + } + + /** + * Set a ConfigIncluder which customizes how includes are handled. + * + * @param includer + * @return new version of the parse options with different includer + */ + public ConfigParseOptions setIncluder(ConfigIncluder includer) { + if (this.includer == includer) + return this; + else + return new ConfigParseOptions(this.syntax, this.originDescription, + this.allowMissing, includer); + } + + public ConfigParseOptions prependIncluder(ConfigIncluder includer) { + if (this.includer == includer) + return this; + else if (this.includer != null) + return setIncluder(includer.withFallback(this.includer)); + else + return setIncluder(includer); + } + + public ConfigParseOptions appendIncluder(ConfigIncluder includer) { + if (this.includer == includer) + return this; + else if (this.includer != null) + return setIncluder(this.includer.withFallback(includer)); + else + return setIncluder(includer); + } + + public ConfigIncluder getIncluder() { + return includer; + } + +} diff --git a/akka-actor/src/main/java/com/typesafe/config/ConfigParseable.java b/akka-actor/src/main/java/com/typesafe/config/ConfigParseable.java new file mode 100644 index 0000000000..4c19b5451a --- /dev/null +++ b/akka-actor/src/main/java/com/typesafe/config/ConfigParseable.java @@ -0,0 +1,23 @@ +/** + * Copyright (C) 2011 Typesafe Inc. + */ +package com.typesafe.config; + +import java.net.URL; + +/** An opaque handle to something that can be parsed. */ +public interface ConfigParseable { + /** + * Parse whatever it is. + * + * @param options + * parse options, should be based on the ones from options() + */ + ConfigObject parse(ConfigParseOptions options); + + /** Possibly return a URL representing the resource; this may return null. */ + URL url(); + + /** Get the initial options, which can be modified then passed to parse(). */ + ConfigParseOptions options(); +} diff --git a/akka-actor/src/main/java/com/typesafe/config/ConfigResolveOptions.java b/akka-actor/src/main/java/com/typesafe/config/ConfigResolveOptions.java new file mode 100644 index 0000000000..6f572a84e8 --- /dev/null +++ b/akka-actor/src/main/java/com/typesafe/config/ConfigResolveOptions.java @@ -0,0 +1,39 @@ +/** + * Copyright (C) 2011 Typesafe Inc. + */ +package com.typesafe.config; + +public final class ConfigResolveOptions { + private final boolean useSystemProperties; + private final boolean useSystemEnvironment; + + private ConfigResolveOptions(boolean useSystemProperties, + boolean useSystemEnvironment) { + this.useSystemProperties = useSystemProperties; + this.useSystemEnvironment = useSystemEnvironment; + } + + public static ConfigResolveOptions defaults() { + return new ConfigResolveOptions(true, true); + } + + public static ConfigResolveOptions noSystem() { + return new ConfigResolveOptions(false, false); + } + + public ConfigResolveOptions setUseSystemProperties(boolean value) { + return new ConfigResolveOptions(value, useSystemEnvironment); + } + + public ConfigResolveOptions setUseSystemEnvironment(boolean value) { + return new ConfigResolveOptions(useSystemProperties, value); + } + + public boolean getUseSystemProperties() { + return useSystemProperties; + } + + public boolean getUseSystemEnvironment() { + return useSystemEnvironment; + } +} diff --git a/akka-actor/src/main/java/com/typesafe/config/ConfigRoot.java b/akka-actor/src/main/java/com/typesafe/config/ConfigRoot.java new file mode 100644 index 0000000000..d8f25e89d6 --- /dev/null +++ b/akka-actor/src/main/java/com/typesafe/config/ConfigRoot.java @@ -0,0 +1,33 @@ +/** + * Copyright (C) 2011 Typesafe Inc. + */ +package com.typesafe.config; + +/** + * A root object. The only special thing about a root object is that you can + * resolve substitutions against it. So it can have a resolve() method that + * doesn't require you to pass in an object to resolve against. + */ +public interface ConfigRoot extends Config { + /** + * Returns a replacement root object with all substitutions (the + * "${foo.bar}" syntax) resolved. Substitutions are looked up in this root + * object. A configuration value tree must be resolved before you can use + * it. This method uses ConfigResolveOptions.defaults(). + * + * @return an immutable object with substitutions resolved + */ + ConfigRoot resolve(); + + ConfigRoot resolve(ConfigResolveOptions options); + + @Override + ConfigRoot withFallback(ConfigMergeable fallback); + + /** + * Gets the global app name that this root represents. + * + * @return the app's root config path + */ + String rootPath(); +} diff --git a/akka-actor/src/main/java/com/typesafe/config/ConfigSyntax.java b/akka-actor/src/main/java/com/typesafe/config/ConfigSyntax.java new file mode 100644 index 0000000000..4f43fe7365 --- /dev/null +++ b/akka-actor/src/main/java/com/typesafe/config/ConfigSyntax.java @@ -0,0 +1,8 @@ +/** + * Copyright (C) 2011 Typesafe Inc. + */ +package com.typesafe.config; + +public enum ConfigSyntax { + JSON, CONF, PROPERTIES; +} diff --git a/akka-actor/src/main/java/com/typesafe/config/ConfigValue.java b/akka-actor/src/main/java/com/typesafe/config/ConfigValue.java new file mode 100644 index 0000000000..8b50f7b205 --- /dev/null +++ b/akka-actor/src/main/java/com/typesafe/config/ConfigValue.java @@ -0,0 +1,35 @@ +/** + * Copyright (C) 2011 Typesafe Inc. + */ +package com.typesafe.config; + +/** + * Interface implemented by any configuration value. From the perspective of + * users of this interface, the object is immutable. It is therefore safe to use + * from multiple threads. + */ +public interface ConfigValue extends ConfigMergeable { + /** + * The origin of the value, for debugging and error messages. + * + * @return where the value came from + */ + ConfigOrigin origin(); + + /** + * The type of the value; matches the JSON type schema. + * + * @return value's type + */ + ConfigValueType valueType(); + + /** + * Returns the config value as a plain Java boxed value, should be a String, + * Number, etc. matching the valueType() of the ConfigValue. If the value is + * a ConfigObject or ConfigList, it is recursively unwrapped. + */ + Object unwrapped(); + + @Override + ConfigValue withFallback(ConfigMergeable other); +} diff --git a/akka-actor/src/main/java/com/typesafe/config/ConfigValueFactory.java b/akka-actor/src/main/java/com/typesafe/config/ConfigValueFactory.java new file mode 100644 index 0000000000..fd63a6ffe5 --- /dev/null +++ b/akka-actor/src/main/java/com/typesafe/config/ConfigValueFactory.java @@ -0,0 +1,129 @@ +/** + * Copyright (C) 2011 Typesafe Inc. + */ +package com.typesafe.config; + +import java.util.Collection; +import java.util.Map; + +import com.typesafe.config.impl.ConfigImpl; + +/** + * This class holds some static factory methods for building ConfigValue. See + * also ConfigFactory which has methods for parsing files and certain in-memory + * data structures. + */ +public final class ConfigValueFactory { + /** + * Creates a ConfigValue from a plain Java boxed value, which may be a + * Boolean, Number, String, Map, Iterable, or null. A Map must be a Map from + * String to more values that can be supplied to fromAnyRef(). An Iterable + * must iterate over more values that can be supplied to fromAnyRef(). A Map + * will become a ConfigObject and an Iterable will become a ConfigList. If + * the Iterable is not an ordered collection, results could be strange, + * since ConfigList is ordered. + * + * In a Map passed to fromAnyRef(), the map's keys are plain keys, not path + * expressions. So if your Map has a key "foo.bar" then you will get one + * object with a key called "foo.bar", rather than an object with a key + * "foo" containing another object with a key "bar". + * + * The originDescription will be used to set the origin() field on the + * ConfigValue. It should normally be the name of the file the values came + * from, or something short describing the value such as "default settings". + * The originDescription is prefixed to error messages so users can tell + * where problematic values are coming from. + * + * Supplying the result of ConfigValue.unwrapped() to this function is + * guaranteed to work and should give you back a ConfigValue that matches + * the one you unwrapped. The re-wrapped ConfigValue will lose some + * information that was present in the original such as its origin, but it + * will have matching values. + * + * This function throws if you supply a value that cannot be converted to a + * ConfigValue, but supplying such a value is a bug in your program, so you + * should never handle the exception. Just fix your program (or report a bug + * against this library). + * + * @param object + * object to convert to ConfigValue + * @param originDescription + * name of origin file or brief description of what the value is + * @return a new value + */ + public static ConfigValue fromAnyRef(Object object, String originDescription) { + return ConfigImpl.fromAnyRef(object, originDescription); + } + + /** + * See the fromAnyRef() documentation for details. This is a typesafe + * wrapper that only works on Map and returns ConfigObject rather than + * ConfigValue. + * + * If your Map has a key "foo.bar" then you will get one object with a key + * called "foo.bar", rather than an object with a key "foo" containing + * another object with a key "bar". The keys in the map are keys; not path + * expressions. That is, the Map corresponds exactly to a single + * ConfigObject. The keys will not be parsed or modified, and the values are + * wrapped in ConfigValue. To get nested ConfigObject, some of the values in + * the map would have to be more maps. + * + * There is a separate fromPathMap() that interprets the keys in the map as + * path expressions. + * + * @param values + * @param originDescription + * @return + */ + public static ConfigObject fromMap(Map values, + String originDescription) { + return (ConfigObject) fromAnyRef(values, originDescription); + } + + /** + * See the fromAnyRef() documentation for details. This is a typesafe + * wrapper that only works on Iterable and returns ConfigList rather than + * ConfigValue. + * + * @param values + * @param originDescription + * @return + */ + public static ConfigList fromIterable(Iterable values, + String originDescription) { + return (ConfigList) fromAnyRef(values, originDescription); + } + + /** + * See the other overload of fromAnyRef() for details, this one just uses a + * default origin description. + * + * @param object + * @return + */ + public static ConfigValue fromAnyRef(Object object) { + return fromAnyRef(object, null); + } + + /** + * See the other overload of fromMap() for details, this one just uses a + * default origin description. + * + * @param values + * @return + */ + public static ConfigObject fromMap(Map values) { + return fromMap(values, null); + } + + /** + * See the other overload of fromIterable() for details, this one just uses + * a default origin description. + * + * @param values + * @return + */ + public static ConfigList fromIterable(Collection values) { + return fromIterable(values, null); + } +} diff --git a/akka-actor/src/main/java/com/typesafe/config/ConfigValueType.java b/akka-actor/src/main/java/com/typesafe/config/ConfigValueType.java new file mode 100644 index 0000000000..0ead8e1965 --- /dev/null +++ b/akka-actor/src/main/java/com/typesafe/config/ConfigValueType.java @@ -0,0 +1,11 @@ +/** + * Copyright (C) 2011 Typesafe Inc. + */ +package com.typesafe.config; + +/** + * The type of a configuration value. Value types follow the JSON type schema. + */ +public enum ConfigValueType { + OBJECT, LIST, NUMBER, BOOLEAN, NULL, STRING +} diff --git a/akka-actor/src/main/java/com/typesafe/config/impl/AbstractConfigObject.java b/akka-actor/src/main/java/com/typesafe/config/impl/AbstractConfigObject.java new file mode 100644 index 0000000000..d9d41e3f04 --- /dev/null +++ b/akka-actor/src/main/java/com/typesafe/config/impl/AbstractConfigObject.java @@ -0,0 +1,369 @@ +/** + * Copyright (C) 2011 Typesafe Inc. + */ +package com.typesafe.config.impl; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; + +import com.typesafe.config.ConfigException; +import com.typesafe.config.ConfigMergeable; +import com.typesafe.config.ConfigObject; +import com.typesafe.config.ConfigOrigin; +import com.typesafe.config.ConfigResolveOptions; +import com.typesafe.config.ConfigValue; +import com.typesafe.config.ConfigValueType; + +abstract class AbstractConfigObject extends AbstractConfigValue implements + ConfigObject { + final private SimpleConfig config; + + protected AbstractConfigObject(ConfigOrigin origin) { + super(origin); + this.config = new SimpleConfig(this); + } + + @Override + public SimpleConfig toConfig() { + return config; + } + + /** + * This looks up the key with no transformation or type conversion of any + * kind, and returns null if the key is not present. + * + * @param key + * @return the unmodified raw value or null + */ + protected abstract AbstractConfigValue peek(String key); + + protected AbstractConfigValue peek(String key, + SubstitutionResolver resolver, int depth, + ConfigResolveOptions options) { + AbstractConfigValue v = peek(key); + + if (v != null && resolver != null) { + v = resolver.resolve(v, depth, options); + } + + return v; + } + + /** + * Looks up the path with no transformation, type conversion, or exceptions + * (just returns null if path not found). Does however resolve the path, if + * resolver != null. + */ + protected ConfigValue peekPath(Path path, SubstitutionResolver resolver, + int depth, ConfigResolveOptions options) { + return peekPath(this, path, resolver, depth, options); + } + + private static ConfigValue peekPath(AbstractConfigObject self, Path path, + SubstitutionResolver resolver, int depth, + ConfigResolveOptions options) { + String key = path.first(); + Path next = path.remainder(); + + if (next == null) { + ConfigValue v = self.peek(key, resolver, depth, options); + return v; + } else { + // it's important to ONLY resolve substitutions here, not + // all values, because if you resolve arrays or objects + // it creates unnecessary cycles as a side effect (any sibling + // of the object we want to follow could cause a cycle, not just + // the object we want to follow). + + ConfigValue v = self.peek(key); + + if (v instanceof ConfigSubstitution && resolver != null) { + v = resolver.resolve((AbstractConfigValue) v, depth, options); + } + + if (v instanceof AbstractConfigObject) { + return peekPath((AbstractConfigObject) v, next, resolver, + depth, options); + } else { + return null; + } + } + } + + @Override + public ConfigValueType valueType() { + return ConfigValueType.OBJECT; + } + + protected abstract AbstractConfigObject newCopy(ResolveStatus status, + boolean ignoresFallbacks); + + @Override + protected AbstractConfigObject newCopy(boolean ignoresFallbacks) { + return newCopy(resolveStatus(), ignoresFallbacks); + } + + @Override + protected final AbstractConfigObject mergedWithTheUnmergeable(Unmergeable fallback) { + if (ignoresFallbacks()) + throw new ConfigException.BugOrBroken("should not be reached"); + + List stack = new ArrayList(); + if (this instanceof Unmergeable) { + stack.addAll(((Unmergeable) this).unmergedValues()); + } else { + stack.add(this); + } + stack.addAll(fallback.unmergedValues()); + return new ConfigDelayedMergeObject(mergeOrigins(stack), stack, + ((AbstractConfigValue) fallback).ignoresFallbacks()); + } + + @Override + protected AbstractConfigObject mergedWithObject(AbstractConfigObject fallback) { + if (ignoresFallbacks()) + throw new ConfigException.BugOrBroken("should not be reached"); + + boolean allResolved = true; + Map merged = new HashMap(); + Set allKeys = new HashSet(); + allKeys.addAll(this.keySet()); + allKeys.addAll(fallback.keySet()); + for (String key : allKeys) { + AbstractConfigValue first = this.peek(key); + AbstractConfigValue second = fallback.peek(key); + AbstractConfigValue kept; + if (first == null) + kept = second; + else if (second == null) + kept = first; + else + kept = first.withFallback(second); + merged.put(key, kept); + if (kept.resolveStatus() == ResolveStatus.UNRESOLVED) + allResolved = false; + } + return new SimpleConfigObject(mergeOrigins(this, fallback), merged, + ResolveStatus.fromBoolean(allResolved), fallback.ignoresFallbacks()); + } + + @Override + public AbstractConfigObject withFallback(ConfigMergeable mergeable) { + return (AbstractConfigObject) super.withFallback(mergeable); + } + + static ConfigOrigin mergeOrigins( + Collection stack) { + if (stack.isEmpty()) + throw new ConfigException.BugOrBroken( + "can't merge origins on empty list"); + final String prefix = "merge of "; + StringBuilder sb = new StringBuilder(); + ConfigOrigin firstOrigin = null; + int numMerged = 0; + for (AbstractConfigValue v : stack) { + if (firstOrigin == null) + firstOrigin = v.origin(); + + String desc = v.origin().description(); + if (desc.startsWith(prefix)) + desc = desc.substring(prefix.length()); + + if (v instanceof AbstractConfigObject + && ((AbstractConfigObject) v).resolveStatus() == ResolveStatus.RESOLVED + && ((ConfigObject) v).isEmpty()) { + // don't include empty files or the .empty() + // config in the description, since they are + // likely to be "implementation details" + } else { + sb.append(desc); + sb.append(","); + numMerged += 1; + } + } + if (numMerged > 0) { + sb.setLength(sb.length() - 1); // chop comma + if (numMerged > 1) { + return new SimpleConfigOrigin(prefix + sb.toString()); + } else { + return new SimpleConfigOrigin(sb.toString()); + } + } else { + // the configs were all empty. + return firstOrigin; + } + } + + static ConfigOrigin mergeOrigins(AbstractConfigObject... stack) { + return mergeOrigins(Arrays.asList(stack)); + } + + private AbstractConfigObject modify(Modifier modifier, + ResolveStatus newResolveStatus) { + Map changes = null; + for (String k : keySet()) { + AbstractConfigValue v = peek(k); + AbstractConfigValue modified = modifier.modifyChild(v); + if (modified != v) { + if (changes == null) + changes = new HashMap(); + changes.put(k, modified); + } + } + if (changes == null) { + return newCopy(newResolveStatus, ignoresFallbacks()); + } else { + Map modified = new HashMap(); + for (String k : keySet()) { + if (changes.containsKey(k)) { + modified.put(k, changes.get(k)); + } else { + modified.put(k, peek(k)); + } + } + return new SimpleConfigObject(origin(), modified, newResolveStatus, + ignoresFallbacks()); + } + } + + @Override + AbstractConfigObject resolveSubstitutions(final SubstitutionResolver resolver, + final int depth, + final ConfigResolveOptions options) { + if (resolveStatus() == ResolveStatus.RESOLVED) + return this; + + return modify(new Modifier() { + + @Override + public AbstractConfigValue modifyChild(AbstractConfigValue v) { + return resolver.resolve(v, depth, options); + } + + }, ResolveStatus.RESOLVED); + } + + @Override + AbstractConfigObject relativized(final Path prefix) { + return modify(new Modifier() { + + @Override + public AbstractConfigValue modifyChild(AbstractConfigValue v) { + return v.relativized(prefix); + } + + }, resolveStatus()); + } + + @Override + public AbstractConfigValue get(Object key) { + if (key instanceof String) + return peek((String) key); + else + return null; + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append(valueType().name()); + sb.append("("); + for (String k : keySet()) { + sb.append(k); + sb.append("->"); + sb.append(peek(k).toString()); + sb.append(","); + } + if (!keySet().isEmpty()) + sb.setLength(sb.length() - 1); // chop comma + sb.append(")"); + return sb.toString(); + } + + private static boolean mapEquals(Map a, + Map b) { + Set aKeys = a.keySet(); + Set bKeys = b.keySet(); + + if (!aKeys.equals(bKeys)) + return false; + + for (String key : aKeys) { + if (!a.get(key).equals(b.get(key))) + return false; + } + return true; + } + + private static int mapHash(Map m) { + // the keys have to be sorted, otherwise we could be equal + // to another map but have a different hashcode. + List keys = new ArrayList(); + keys.addAll(m.keySet()); + Collections.sort(keys); + + int valuesHash = 0; + for (String k : keys) { + valuesHash += m.get(k).hashCode(); + } + return 41 * (41 + keys.hashCode()) + valuesHash; + } + + @Override + protected boolean canEqual(Object other) { + return other instanceof ConfigObject; + } + + @Override + public boolean equals(Object other) { + // note that "origin" is deliberately NOT part of equality. + // neither are other "extras" like ignoresFallbacks or resolve status. + if (other instanceof ConfigObject) { + // optimization to avoid unwrapped() for two ConfigObject, + // which is what AbstractConfigValue does. + return canEqual(other) && mapEquals(this, ((ConfigObject) other)); + } else { + return false; + } + } + + @Override + public int hashCode() { + // note that "origin" is deliberately NOT part of equality + // neither are other "extras" like ignoresFallbacks or resolve status. + return mapHash(this); + } + + private static UnsupportedOperationException weAreImmutable(String method) { + return new UnsupportedOperationException( + "ConfigObject is immutable, you can't call Map.'" + method + + "'"); + } + + @Override + public void clear() { + throw weAreImmutable("clear"); + } + + @Override + public ConfigValue put(String arg0, ConfigValue arg1) { + throw weAreImmutable("put"); + } + + @Override + public void putAll(Map arg0) { + throw weAreImmutable("putAll"); + } + + @Override + public ConfigValue remove(Object arg0) { + throw weAreImmutable("remove"); + } +} diff --git a/akka-actor/src/main/java/com/typesafe/config/impl/AbstractConfigValue.java b/akka-actor/src/main/java/com/typesafe/config/impl/AbstractConfigValue.java new file mode 100644 index 0000000000..2125cf723b --- /dev/null +++ b/akka-actor/src/main/java/com/typesafe/config/impl/AbstractConfigValue.java @@ -0,0 +1,177 @@ +/** + * Copyright (C) 2011 Typesafe Inc. + */ +package com.typesafe.config.impl; + +import com.typesafe.config.ConfigException; +import com.typesafe.config.ConfigMergeable; +import com.typesafe.config.ConfigOrigin; +import com.typesafe.config.ConfigResolveOptions; +import com.typesafe.config.ConfigValue; + +/** + * + * Trying very hard to avoid a parent reference in config values; when you have + * a tree like this, the availability of parent() tends to result in a lot of + * improperly-factored and non-modular code. Please don't add parent(). + * + */ +abstract class AbstractConfigValue implements ConfigValue { + + final private ConfigOrigin origin; + + AbstractConfigValue(ConfigOrigin origin) { + this.origin = origin; + } + + @Override + public ConfigOrigin origin() { + return this.origin; + } + + /** + * Called only by SubstitutionResolver object. + * + * @param resolver + * the resolver doing the resolving + * @param depth + * the number of substitutions followed in resolving the current + * one + * @param options + * whether to look at system props and env vars + * @return a new value if there were changes, or this if no changes + */ + AbstractConfigValue resolveSubstitutions(SubstitutionResolver resolver, + int depth, + ConfigResolveOptions options) { + return this; + } + + ResolveStatus resolveStatus() { + return ResolveStatus.RESOLVED; + } + + /** + * This is used when including one file in another; the included file is + * relativized to the path it's included into in the parent file. The point + * is that if you include a file at foo.bar in the parent, and the included + * file as a substitution ${a.b.c}, the included substitution now needs to + * be ${foo.bar.a.b.c} because we resolve substitutions globally only after + * parsing everything. + * + * @param prefix + * @return value relativized to the given path or the same value if nothing + * to do + */ + AbstractConfigValue relativized(Path prefix) { + return this; + } + + protected interface Modifier { + AbstractConfigValue modifyChild(AbstractConfigValue v); + } + + @Override + public AbstractConfigValue toValue() { + return this; + } + + protected AbstractConfigValue newCopy(boolean ignoresFallbacks) { + return this; + } + + // this is virtualized rather than a field because only some subclasses + // really need to store the boolean, and they may be able to pack it + // with another boolean to save space. + protected boolean ignoresFallbacks() { + return true; + } + + private ConfigException badMergeException() { + if (ignoresFallbacks()) + throw new ConfigException.BugOrBroken( + "method should not have been called with ignoresFallbacks=true" + + getClass().getSimpleName()); + else + throw new ConfigException.BugOrBroken("should override this in " + + getClass().getSimpleName()); + } + + protected AbstractConfigValue mergedWithTheUnmergeable(Unmergeable fallback) { + throw badMergeException(); + } + + protected AbstractConfigValue mergedWithObject(AbstractConfigObject fallback) { + throw badMergeException(); + } + + @Override + public AbstractConfigValue withFallback(ConfigMergeable mergeable) { + if (ignoresFallbacks()) { + return this; + } else { + ConfigValue other = mergeable.toValue(); + + if (other instanceof Unmergeable) { + return mergedWithTheUnmergeable((Unmergeable) other); + } else if (other instanceof AbstractConfigObject) { + AbstractConfigObject fallback = (AbstractConfigObject) other; + if (fallback.resolveStatus() == ResolveStatus.RESOLVED && fallback.isEmpty()) { + if (fallback.ignoresFallbacks()) + return newCopy(true /* ignoresFallbacks */); + else + return this; + } else { + return mergedWithObject((AbstractConfigObject) other); + } + } else { + // falling back to a non-object doesn't merge anything, and also + // prohibits merging any objects that we fall back to later. + // so we have to switch to ignoresFallbacks mode. + return newCopy(true /* ignoresFallbacks */); + } + } + } + + protected boolean canEqual(Object other) { + return other instanceof ConfigValue; + } + + @Override + public boolean equals(Object other) { + // note that "origin" is deliberately NOT part of equality + if (other instanceof ConfigValue) { + return canEqual(other) + && (this.valueType() == + ((ConfigValue) other).valueType()) + && ConfigUtil.equalsHandlingNull(this.unwrapped(), + ((ConfigValue) other).unwrapped()); + } else { + return false; + } + } + + @Override + public int hashCode() { + // note that "origin" is deliberately NOT part of equality + Object o = this.unwrapped(); + if (o == null) + return 0; + else + return o.hashCode(); + } + + @Override + public String toString() { + return valueType().name() + "(" + unwrapped() + ")"; + } + + // toString() is a debugging-oriented string but this is defined + // to create a string that would parse back to the value in JSON. + // It only works for primitive values (that would be a single token) + // which are auto-converted to strings when concatenating with + // other strings or by the DefaultTransformer. + String transformToString() { + return null; + } +} diff --git a/akka-actor/src/main/java/com/typesafe/config/impl/ConfigBoolean.java b/akka-actor/src/main/java/com/typesafe/config/impl/ConfigBoolean.java new file mode 100644 index 0000000000..d45dbd1326 --- /dev/null +++ b/akka-actor/src/main/java/com/typesafe/config/impl/ConfigBoolean.java @@ -0,0 +1,32 @@ +/** + * Copyright (C) 2011 Typesafe Inc. + */ +package com.typesafe.config.impl; + +import com.typesafe.config.ConfigOrigin; +import com.typesafe.config.ConfigValueType; + +final class ConfigBoolean extends AbstractConfigValue { + + final private boolean value; + + ConfigBoolean(ConfigOrigin origin, boolean value) { + super(origin); + this.value = value; + } + + @Override + public ConfigValueType valueType() { + return ConfigValueType.BOOLEAN; + } + + @Override + public Boolean unwrapped() { + return value; + } + + @Override + String transformToString() { + return value ? "true" : "false"; + } +} diff --git a/akka-actor/src/main/java/com/typesafe/config/impl/ConfigDelayedMerge.java b/akka-actor/src/main/java/com/typesafe/config/impl/ConfigDelayedMerge.java new file mode 100644 index 0000000000..0c20aa5701 --- /dev/null +++ b/akka-actor/src/main/java/com/typesafe/config/impl/ConfigDelayedMerge.java @@ -0,0 +1,175 @@ +/** + * Copyright (C) 2011 Typesafe Inc. + */ +package com.typesafe.config.impl; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; + +import com.typesafe.config.ConfigException; +import com.typesafe.config.ConfigOrigin; +import com.typesafe.config.ConfigResolveOptions; +import com.typesafe.config.ConfigValueType; + +/** + * The issue here is that we want to first merge our stack of config files, and + * then we want to evaluate substitutions. But if two substitutions both expand + * to an object, we might need to merge those two objects. Thus, we can't ever + * "override" a substitution when we do a merge; instead we have to save the + * stack of values that should be merged, and resolve the merge when we evaluate + * substitutions. + */ +final class ConfigDelayedMerge extends AbstractConfigValue implements + Unmergeable { + + // earlier items in the stack win + final private List stack; + final private boolean ignoresFallbacks; + + ConfigDelayedMerge(ConfigOrigin origin, List stack, + boolean ignoresFallbacks) { + super(origin); + this.stack = stack; + this.ignoresFallbacks = ignoresFallbacks; + if (stack.isEmpty()) + throw new ConfigException.BugOrBroken( + "creating empty delayed merge value"); + + for (AbstractConfigValue v : stack) { + if (v instanceof ConfigDelayedMerge || v instanceof ConfigDelayedMergeObject) + throw new ConfigException.BugOrBroken( + "placed nested DelayedMerge in a ConfigDelayedMerge, should have consolidated stack"); + } + } + + ConfigDelayedMerge(ConfigOrigin origin, List stack) { + this(origin, stack, false /* ignoresFallbacks */); + } + + @Override + public ConfigValueType valueType() { + throw new ConfigException.NotResolved( + "called valueType() on value with unresolved substitutions, need to resolve first"); + } + + @Override + public Object unwrapped() { + throw new ConfigException.NotResolved( + "called unwrapped() on value with unresolved substitutions, need to resolve first"); + } + + @Override + AbstractConfigValue resolveSubstitutions(SubstitutionResolver resolver, + int depth, ConfigResolveOptions options) { + return resolveSubstitutions(stack, resolver, depth, options); + } + + // static method also used by ConfigDelayedMergeObject + static AbstractConfigValue resolveSubstitutions( + List stack, SubstitutionResolver resolver, + int depth, ConfigResolveOptions options) { + // to resolve substitutions, we need to recursively resolve + // the stack of stuff to merge, and merge the stack so + // we won't be a delayed merge anymore. + + AbstractConfigValue merged = null; + for (AbstractConfigValue v : stack) { + AbstractConfigValue resolved = resolver.resolve(v, depth, options); + if (merged == null) + merged = resolved; + else + merged = merged.withFallback(resolved); + } + + return merged; + } + + @Override + ResolveStatus resolveStatus() { + return ResolveStatus.UNRESOLVED; + } + + @Override + ConfigDelayedMerge relativized(Path prefix) { + List newStack = new ArrayList(); + for (AbstractConfigValue o : stack) { + newStack.add(o.relativized(prefix)); + } + return new ConfigDelayedMerge(origin(), newStack, ignoresFallbacks); + } + + @Override + protected boolean ignoresFallbacks() { + return ignoresFallbacks; + } + + @Override + protected final ConfigDelayedMerge mergedWithTheUnmergeable(Unmergeable fallback) { + if (ignoresFallbacks) + throw new ConfigException.BugOrBroken("should not be reached"); + + // if we turn out to be an object, and the fallback also does, + // then a merge may be required; delay until we resolve. + List newStack = new ArrayList(); + newStack.addAll(stack); + newStack.addAll(fallback.unmergedValues()); + return new ConfigDelayedMerge(AbstractConfigObject.mergeOrigins(newStack), newStack, + ((AbstractConfigValue) fallback).ignoresFallbacks()); + } + + @Override + protected final ConfigDelayedMerge mergedWithObject(AbstractConfigObject fallback) { + if (ignoresFallbacks) + throw new ConfigException.BugOrBroken("should not be reached"); + + // if we turn out to be an object, and the fallback also does, + // then a merge may be required; delay until we resolve. + List newStack = new ArrayList(); + newStack.addAll(stack); + newStack.add(fallback); + return new ConfigDelayedMerge(AbstractConfigObject.mergeOrigins(newStack), newStack, + fallback.ignoresFallbacks()); + } + + @Override + public Collection unmergedValues() { + return stack; + } + + @Override + protected boolean canEqual(Object other) { + return other instanceof ConfigDelayedMerge; + } + + @Override + public boolean equals(Object other) { + // note that "origin" is deliberately NOT part of equality + if (other instanceof ConfigDelayedMerge) { + return canEqual(other) + && this.stack.equals(((ConfigDelayedMerge) other).stack); + } else { + return false; + } + } + + @Override + public int hashCode() { + // note that "origin" is deliberately NOT part of equality + return stack.hashCode(); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("DELAYED_MERGE"); + sb.append("("); + for (Object s : stack) { + sb.append(s.toString()); + sb.append(","); + } + sb.setLength(sb.length() - 1); // chop comma + sb.append(")"); + return sb.toString(); + } +} diff --git a/akka-actor/src/main/java/com/typesafe/config/impl/ConfigDelayedMergeObject.java b/akka-actor/src/main/java/com/typesafe/config/impl/ConfigDelayedMergeObject.java new file mode 100644 index 0000000000..6f381afc48 --- /dev/null +++ b/akka-actor/src/main/java/com/typesafe/config/impl/ConfigDelayedMergeObject.java @@ -0,0 +1,204 @@ +/** + * Copyright (C) 2011 Typesafe Inc. + */ +package com.typesafe.config.impl; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; +import java.util.Map; +import java.util.Set; + +import com.typesafe.config.ConfigException; +import com.typesafe.config.ConfigMergeable; +import com.typesafe.config.ConfigOrigin; +import com.typesafe.config.ConfigResolveOptions; +import com.typesafe.config.ConfigValue; + +// This is just like ConfigDelayedMerge except we know statically +// that it will turn out to be an object. +class ConfigDelayedMergeObject extends AbstractConfigObject implements + Unmergeable { + + final private List stack; + final private boolean ignoresFallbacks; + + ConfigDelayedMergeObject(ConfigOrigin origin, + List stack) { + this(origin, stack, false /* ignoresFallbacks */); + } + + ConfigDelayedMergeObject(ConfigOrigin origin, List stack, + boolean ignoresFallbacks) { + super(origin); + this.stack = stack; + this.ignoresFallbacks = ignoresFallbacks; + + if (stack.isEmpty()) + throw new ConfigException.BugOrBroken( + "creating empty delayed merge object"); + if (!(stack.get(0) instanceof AbstractConfigObject)) + throw new ConfigException.BugOrBroken( + "created a delayed merge object not guaranteed to be an object"); + + for (AbstractConfigValue v : stack) { + if (v instanceof ConfigDelayedMerge || v instanceof ConfigDelayedMergeObject) + throw new ConfigException.BugOrBroken( + "placed nested DelayedMerge in a ConfigDelayedMergeObject, should have consolidated stack"); + } + } + + @Override + protected ConfigDelayedMergeObject newCopy(ResolveStatus status, + boolean ignoresFallbacks) { + if (status != resolveStatus()) + throw new ConfigException.BugOrBroken( + "attempt to create resolved ConfigDelayedMergeObject"); + return new ConfigDelayedMergeObject(origin(), stack, ignoresFallbacks); + } + + @Override + AbstractConfigObject resolveSubstitutions(SubstitutionResolver resolver, + int depth, ConfigResolveOptions options) { + AbstractConfigValue merged = ConfigDelayedMerge.resolveSubstitutions( + stack, resolver, depth, + options); + if (merged instanceof AbstractConfigObject) { + return (AbstractConfigObject) merged; + } else { + throw new ConfigException.BugOrBroken( + "somehow brokenly merged an object and didn't get an object"); + } + } + + @Override + ResolveStatus resolveStatus() { + return ResolveStatus.UNRESOLVED; + } + + @Override + ConfigDelayedMergeObject relativized(Path prefix) { + List newStack = new ArrayList(); + for (AbstractConfigValue o : stack) { + newStack.add(o.relativized(prefix)); + } + return new ConfigDelayedMergeObject(origin(), newStack, + ignoresFallbacks); + } + + @Override + protected boolean ignoresFallbacks() { + return ignoresFallbacks; + } + + @Override + protected ConfigDelayedMergeObject mergedWithObject(AbstractConfigObject fallback) { + if (ignoresFallbacks) + throw new ConfigException.BugOrBroken("should not be reached"); + + // since we are an object, and the fallback is, we'll need to + // merge the fallback once we resolve. + List newStack = new ArrayList(); + newStack.addAll(stack); + newStack.add(fallback); + return new ConfigDelayedMergeObject(AbstractConfigObject.mergeOrigins(newStack), newStack, + fallback.ignoresFallbacks()); + } + + @Override + public ConfigDelayedMergeObject withFallback(ConfigMergeable mergeable) { + return (ConfigDelayedMergeObject) super.withFallback(mergeable); + } + + @Override + public Collection unmergedValues() { + return stack; + } + + @Override + protected boolean canEqual(Object other) { + return other instanceof ConfigDelayedMergeObject; + } + + @Override + public boolean equals(Object other) { + // note that "origin" is deliberately NOT part of equality + if (other instanceof ConfigDelayedMergeObject) { + return canEqual(other) + && this.stack + .equals(((ConfigDelayedMergeObject) other).stack); + } else { + return false; + } + } + + @Override + public int hashCode() { + // note that "origin" is deliberately NOT part of equality + return stack.hashCode(); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("DELAYED_MERGE_OBJECT"); + sb.append("("); + for (Object s : stack) { + sb.append(s.toString()); + sb.append(","); + } + sb.setLength(sb.length() - 1); // chop comma + sb.append(")"); + return sb.toString(); + } + + private static ConfigException notResolved() { + return new ConfigException.NotResolved( + "bug: this object has not had substitutions resolved, so can't be used"); + } + + @Override + public Map unwrapped() { + throw notResolved(); + } + + @Override + public boolean containsKey(Object key) { + throw notResolved(); + } + + @Override + public boolean containsValue(Object value) { + throw notResolved(); + } + + @Override + public Set> entrySet() { + throw notResolved(); + } + + @Override + public boolean isEmpty() { + throw notResolved(); + } + + @Override + public Set keySet() { + throw notResolved(); + } + + @Override + public int size() { + throw notResolved(); + } + + @Override + public Collection values() { + throw notResolved(); + } + + @Override + protected AbstractConfigValue peek(String key) { + throw notResolved(); + } +} diff --git a/akka-actor/src/main/java/com/typesafe/config/impl/ConfigDouble.java b/akka-actor/src/main/java/com/typesafe/config/impl/ConfigDouble.java new file mode 100644 index 0000000000..3317974453 --- /dev/null +++ b/akka-actor/src/main/java/com/typesafe/config/impl/ConfigDouble.java @@ -0,0 +1,46 @@ +/** + * Copyright (C) 2011 Typesafe Inc. + */ +package com.typesafe.config.impl; + +import com.typesafe.config.ConfigOrigin; +import com.typesafe.config.ConfigValueType; + +final class ConfigDouble extends ConfigNumber { + + final private double value; + + ConfigDouble(ConfigOrigin origin, double value, String originalText) { + super(origin, originalText); + this.value = value; + } + + @Override + public ConfigValueType valueType() { + return ConfigValueType.NUMBER; + } + + @Override + public Double unwrapped() { + return value; + } + + @Override + String transformToString() { + String s = super.transformToString(); + if (s == null) + return Double.toString(value); + else + return s; + } + + @Override + protected long longValue() { + return (long) value; + } + + @Override + protected double doubleValue() { + return value; + } +} diff --git a/akka-actor/src/main/java/com/typesafe/config/impl/ConfigImpl.java b/akka-actor/src/main/java/com/typesafe/config/impl/ConfigImpl.java new file mode 100644 index 0000000000..8c2bd01ec1 --- /dev/null +++ b/akka-actor/src/main/java/com/typesafe/config/impl/ConfigImpl.java @@ -0,0 +1,409 @@ +/** + * Copyright (C) 2011 Typesafe Inc. + */ +package com.typesafe.config.impl; + +import java.io.File; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; +import java.util.Map; + +import com.typesafe.config.Config; +import com.typesafe.config.ConfigException; +import com.typesafe.config.ConfigIncludeContext; +import com.typesafe.config.ConfigIncluder; +import com.typesafe.config.ConfigObject; +import com.typesafe.config.ConfigOrigin; +import com.typesafe.config.ConfigParseOptions; +import com.typesafe.config.ConfigParseable; +import com.typesafe.config.ConfigRoot; +import com.typesafe.config.ConfigSyntax; +import com.typesafe.config.ConfigValue; + +/** This is public but is only supposed to be used by the "config" package */ +public class ConfigImpl { + + private interface NameSource { + ConfigParseable nameToParseable(String name); + } + + // this function is a little tricky because there are three places we're + // trying to use it; for 'include "basename"' in a .conf file, for + // loading app.{conf,json,properties} from classpath, and for + // loading app.{conf,json,properties} from the filesystem. + private static ConfigObject fromBasename(NameSource source, String name, + ConfigParseOptions options) { + ConfigObject obj; + if (name.endsWith(".conf") || name.endsWith(".json") + || name.endsWith(".properties")) { + ConfigParseable p = source.nameToParseable(name); + + if (p != null) { + obj = p.parse(p.options().setAllowMissing( + options.getAllowMissing())); + } else { + obj = SimpleConfigObject.emptyMissing(new SimpleConfigOrigin( + name)); + } + } else { + ConfigParseable confHandle = source.nameToParseable(name + ".conf"); + ConfigParseable jsonHandle = source.nameToParseable(name + ".json"); + ConfigParseable propsHandle = source.nameToParseable(name + + ".properties"); + + if (!options.getAllowMissing() && confHandle == null + && jsonHandle == null && propsHandle == null) { + throw new ConfigException.IO(new SimpleConfigOrigin(name), + "No config files {.conf,.json,.properties} found"); + } + + ConfigSyntax syntax = options.getSyntax(); + + obj = SimpleConfigObject.empty(new SimpleConfigOrigin(name)); + if (confHandle != null + && (syntax == null || syntax == ConfigSyntax.CONF)) { + obj = confHandle.parse(confHandle.options() + .setAllowMissing(true).setSyntax(ConfigSyntax.CONF)); + } + + if (jsonHandle != null + && (syntax == null || syntax == ConfigSyntax.JSON)) { + ConfigObject parsed = jsonHandle.parse(jsonHandle + .options().setAllowMissing(true) + .setSyntax(ConfigSyntax.JSON)); + obj = obj.withFallback(parsed); + } + + if (propsHandle != null + && (syntax == null || syntax == ConfigSyntax.PROPERTIES)) { + ConfigObject parsed = propsHandle.parse(propsHandle.options() + .setAllowMissing(true) + .setSyntax(ConfigSyntax.PROPERTIES)); + obj = obj.withFallback(parsed); + } + } + + return obj; + } + + private static String makeResourceBasename(Path path) { + StringBuilder sb = new StringBuilder("/"); + String next = path.first(); + Path remaining = path.remainder(); + while (next != null) { + sb.append(next); + sb.append('-'); + + if (remaining == null) + break; + + next = remaining.first(); + remaining = remaining.remainder(); + } + sb.setLength(sb.length() - 1); // chop extra hyphen + return sb.toString(); + } + + /** For use ONLY by library internals, DO NOT TOUCH not guaranteed ABI */ + public static ConfigObject parseResourcesForPath(String expression, + final ConfigParseOptions baseOptions) { + Path path = Parser.parsePath(expression); + String basename = makeResourceBasename(path); + return parseResourceAnySyntax(ConfigImpl.class, basename, baseOptions); + } + + /** For use ONLY by library internals, DO NOT TOUCH not guaranteed ABI */ + public static ConfigObject parseResourceAnySyntax(final Class klass, + String resourceBasename, final ConfigParseOptions baseOptions) { + NameSource source = new NameSource() { + @Override + public ConfigParseable nameToParseable(String name) { + return Parseable.newResource(klass, name, baseOptions); + } + }; + return fromBasename(source, resourceBasename, baseOptions); + } + + /** For use ONLY by library internals, DO NOT TOUCH not guaranteed ABI */ + public static ConfigObject parseFileAnySyntax(final File basename, + final ConfigParseOptions baseOptions) { + NameSource source = new NameSource() { + @Override + public ConfigParseable nameToParseable(String name) { + return Parseable.newFile(new File(name), baseOptions); + } + }; + return fromBasename(source, basename.getPath(), baseOptions); + } + + /** For use ONLY by library internals, DO NOT TOUCH not guaranteed ABI */ + public static ConfigRoot emptyRoot(String rootPath, String originDescription) { + String desc = originDescription != null ? originDescription : rootPath; + return emptyObject(desc).toConfig().asRoot( + Path.newPath(rootPath)); + } + + static AbstractConfigObject emptyObject(String originDescription) { + ConfigOrigin origin = originDescription != null ? new SimpleConfigOrigin( + originDescription) : null; + return emptyObject(origin); + } + + /** For use ONLY by library internals, DO NOT TOUCH not guaranteed ABI */ + public static Config emptyConfig(String originDescription) { + return emptyObject(originDescription).toConfig(); + } + + static AbstractConfigObject empty(ConfigOrigin origin) { + return emptyObject(origin); + } + + // default origin for values created with fromAnyRef and no origin specified + final private static ConfigOrigin defaultValueOrigin = new SimpleConfigOrigin( + "hardcoded value"); + final private static ConfigBoolean defaultTrueValue = new ConfigBoolean( + defaultValueOrigin, true); + final private static ConfigBoolean defaultFalseValue = new ConfigBoolean( + defaultValueOrigin, false); + final private static ConfigNull defaultNullValue = new ConfigNull( + defaultValueOrigin); + final private static SimpleConfigList defaultEmptyList = new SimpleConfigList( + defaultValueOrigin, Collections. emptyList()); + final private static SimpleConfigObject defaultEmptyObject = SimpleConfigObject + .empty(defaultValueOrigin); + + private static SimpleConfigList emptyList(ConfigOrigin origin) { + if (origin == null || origin == defaultValueOrigin) + return defaultEmptyList; + else + return new SimpleConfigList(origin, + Collections. emptyList()); + } + + private static AbstractConfigObject emptyObject(ConfigOrigin origin) { + // we want null origin to go to SimpleConfigObject.empty() to get the + // origin "empty config" rather than "hardcoded value" + if (origin == defaultValueOrigin) + return defaultEmptyObject; + else + return SimpleConfigObject.empty(origin); + } + + private static ConfigOrigin valueOrigin(String originDescription) { + if (originDescription == null) + return defaultValueOrigin; + else + return new SimpleConfigOrigin(originDescription); + } + + /** For use ONLY by library internals, DO NOT TOUCH not guaranteed ABI */ + public static ConfigValue fromAnyRef(Object object, String originDescription) { + ConfigOrigin origin = valueOrigin(originDescription); + return fromAnyRef(object, origin, FromMapMode.KEYS_ARE_KEYS); + } + + /** For use ONLY by library internals, DO NOT TOUCH not guaranteed ABI */ + public static ConfigObject fromPathMap( + Map pathMap, String originDescription) { + ConfigOrigin origin = valueOrigin(originDescription); + return (ConfigObject) fromAnyRef(pathMap, origin, + FromMapMode.KEYS_ARE_PATHS); + } + + static AbstractConfigValue fromAnyRef(Object object, ConfigOrigin origin, + FromMapMode mapMode) { + if (origin == null) + throw new ConfigException.BugOrBroken( + "origin not supposed to be null"); + + if (object == null) { + if (origin != defaultValueOrigin) + return new ConfigNull(origin); + else + return defaultNullValue; + } else if (object instanceof Boolean) { + if (origin != defaultValueOrigin) { + return new ConfigBoolean(origin, (Boolean) object); + } else if ((Boolean) object) { + return defaultTrueValue; + } else { + return defaultFalseValue; + } + } else if (object instanceof String) { + return new ConfigString(origin, (String) object); + } else if (object instanceof Number) { + // here we always keep the same type that was passed to us, + // rather than figuring out if a Long would fit in an Int + // or a Double has no fractional part. i.e. deliberately + // not using ConfigNumber.newNumber() when we have a + // Double, Integer, or Long. + if (object instanceof Double) { + return new ConfigDouble(origin, (Double) object, null); + } else if (object instanceof Integer) { + return new ConfigInt(origin, (Integer) object, null); + } else if (object instanceof Long) { + return new ConfigLong(origin, (Long) object, null); + } else { + return ConfigNumber.newNumber(origin, + ((Number) object).doubleValue(), null); + } + } else if (object instanceof Map) { + if (((Map) object).isEmpty()) + return emptyObject(origin); + + if (mapMode == FromMapMode.KEYS_ARE_KEYS) { + Map values = new HashMap(); + for (Map.Entry entry : ((Map) object).entrySet()) { + Object key = entry.getKey(); + if (!(key instanceof String)) + throw new ConfigException.BugOrBroken( + "bug in method caller: not valid to create ConfigObject from map with non-String key: " + + key); + AbstractConfigValue value = fromAnyRef(entry.getValue(), + origin, mapMode); + values.put((String) key, value); + } + + return new SimpleConfigObject(origin, values); + } else { + return PropertiesParser.fromPathMap(origin, (Map) object); + } + } else if (object instanceof Iterable) { + Iterator i = ((Iterable) object).iterator(); + if (!i.hasNext()) + return emptyList(origin); + + List values = new ArrayList(); + while (i.hasNext()) { + AbstractConfigValue v = fromAnyRef(i.next(), origin, mapMode); + values.add(v); + } + + return new SimpleConfigList(origin, values); + } else { + throw new ConfigException.BugOrBroken( + "bug in method caller: not valid to create ConfigValue from: " + + object); + } + } + + /** For use ONLY by library internals, DO NOT TOUCH not guaranteed ABI */ + public static ConfigRoot systemPropertiesRoot(String rootPath) { + Path path = Parser.parsePath(rootPath); + try { + return systemPropertiesAsConfigObject().toConfig().getConfig(rootPath) + .asRoot(path); + } catch (ConfigException.Missing e) { + return emptyObject("system properties").toConfig().asRoot(path); + } + } + + private static class SimpleIncluder implements ConfigIncluder { + + private ConfigIncluder fallback; + + SimpleIncluder(ConfigIncluder fallback) { + this.fallback = fallback; + } + + @Override + public ConfigObject include(final ConfigIncludeContext context, + String name) { + NameSource source = new NameSource() { + @Override + public ConfigParseable nameToParseable(String name) { + return context.relativeTo(name); + } + }; + + ConfigObject obj = fromBasename(source, name, ConfigParseOptions + .defaults().setAllowMissing(true)); + + // now use the fallback includer if any and merge + // its result. + if (fallback != null) { + return obj.withFallback(fallback.include(context, name)); + } else { + return obj; + } + } + + @Override + public ConfigIncluder withFallback(ConfigIncluder fallback) { + if (this == fallback) { + throw new ConfigException.BugOrBroken( + "trying to create includer cycle"); + } else if (this.fallback == fallback) { + return this; + } else if (this.fallback != null) { + return new SimpleIncluder(this.fallback.withFallback(fallback)); + } else { + return new SimpleIncluder(fallback); + } + } + } + + private static ConfigIncluder defaultIncluder = null; + + synchronized static ConfigIncluder defaultIncluder() { + if (defaultIncluder == null) { + defaultIncluder = new SimpleIncluder(null); + } + return defaultIncluder; + } + + private static AbstractConfigObject systemProperties = null; + + synchronized static AbstractConfigObject systemPropertiesAsConfigObject() { + if (systemProperties == null) { + systemProperties = loadSystemProperties(); + } + return systemProperties; + } + + private static AbstractConfigObject loadSystemProperties() { + return (AbstractConfigObject) Parseable.newProperties( + System.getProperties(), + ConfigParseOptions.defaults().setOriginDescription( + "system properties")).parse(); + } + + /** For use ONLY by library internals, DO NOT TOUCH not guaranteed ABI */ + public static Config systemPropertiesAsConfig() { + return systemPropertiesAsConfigObject().toConfig(); + } + + // this is a hack to let us set system props in the test suite + synchronized static void dropSystemPropertiesConfig() { + systemProperties = null; + } + + private static AbstractConfigObject envVariables = null; + + synchronized static AbstractConfigObject envVariablesAsConfigObject() { + if (envVariables == null) { + envVariables = loadEnvVariables(); + } + return envVariables; + } + + private static AbstractConfigObject loadEnvVariables() { + Map env = System.getenv(); + Map m = new HashMap(); + for (Map.Entry entry : env.entrySet()) { + String key = entry.getKey(); + m.put(key, new ConfigString( + new SimpleConfigOrigin("env var " + key), entry.getValue())); + } + return new SimpleConfigObject(new SimpleConfigOrigin("env variables"), + m, ResolveStatus.RESOLVED, false /* ignoresFallbacks */); + } + + /** For use ONLY by library internals, DO NOT TOUCH not guaranteed ABI */ + public static Config envVariablesAsConfig() { + return envVariablesAsConfigObject().toConfig(); + } +} diff --git a/akka-actor/src/main/java/com/typesafe/config/impl/ConfigInt.java b/akka-actor/src/main/java/com/typesafe/config/impl/ConfigInt.java new file mode 100644 index 0000000000..4ce4a58545 --- /dev/null +++ b/akka-actor/src/main/java/com/typesafe/config/impl/ConfigInt.java @@ -0,0 +1,46 @@ +/** + * Copyright (C) 2011 Typesafe Inc. + */ +package com.typesafe.config.impl; + +import com.typesafe.config.ConfigOrigin; +import com.typesafe.config.ConfigValueType; + +final class ConfigInt extends ConfigNumber { + + final private int value; + + ConfigInt(ConfigOrigin origin, int value, String originalText) { + super(origin, originalText); + this.value = value; + } + + @Override + public ConfigValueType valueType() { + return ConfigValueType.NUMBER; + } + + @Override + public Integer unwrapped() { + return value; + } + + @Override + String transformToString() { + String s = super.transformToString(); + if (s == null) + return Integer.toString(value); + else + return s; + } + + @Override + protected long longValue() { + return value; + } + + @Override + protected double doubleValue() { + return value; + } +} diff --git a/akka-actor/src/main/java/com/typesafe/config/impl/ConfigLong.java b/akka-actor/src/main/java/com/typesafe/config/impl/ConfigLong.java new file mode 100644 index 0000000000..feb3897bb3 --- /dev/null +++ b/akka-actor/src/main/java/com/typesafe/config/impl/ConfigLong.java @@ -0,0 +1,46 @@ +/** + * Copyright (C) 2011 Typesafe Inc. + */ +package com.typesafe.config.impl; + +import com.typesafe.config.ConfigOrigin; +import com.typesafe.config.ConfigValueType; + +final class ConfigLong extends ConfigNumber { + + final private long value; + + ConfigLong(ConfigOrigin origin, long value, String originalText) { + super(origin, originalText); + this.value = value; + } + + @Override + public ConfigValueType valueType() { + return ConfigValueType.NUMBER; + } + + @Override + public Long unwrapped() { + return value; + } + + @Override + String transformToString() { + String s = super.transformToString(); + if (s == null) + return Long.toString(value); + else + return s; + } + + @Override + protected long longValue() { + return value; + } + + @Override + protected double doubleValue() { + return value; + } +} diff --git a/akka-actor/src/main/java/com/typesafe/config/impl/ConfigNull.java b/akka-actor/src/main/java/com/typesafe/config/impl/ConfigNull.java new file mode 100644 index 0000000000..ea8976e340 --- /dev/null +++ b/akka-actor/src/main/java/com/typesafe/config/impl/ConfigNull.java @@ -0,0 +1,37 @@ +/** + * Copyright (C) 2011 Typesafe Inc. + */ +package com.typesafe.config.impl; + +import com.typesafe.config.ConfigOrigin; +import com.typesafe.config.ConfigValueType; + +/** + * This exists because sometimes null is not the same as missing. Specifically, + * if a value is set to null we can give a better error message (indicating + * where it was set to null) in case someone asks for the value. Also, null + * overrides values set "earlier" in the search path, while missing values do + * not. + * + */ +final class ConfigNull extends AbstractConfigValue { + + ConfigNull(ConfigOrigin origin) { + super(origin); + } + + @Override + public ConfigValueType valueType() { + return ConfigValueType.NULL; + } + + @Override + public Object unwrapped() { + return null; + } + + @Override + String transformToString() { + return "null"; + } +} diff --git a/akka-actor/src/main/java/com/typesafe/config/impl/ConfigNumber.java b/akka-actor/src/main/java/com/typesafe/config/impl/ConfigNumber.java new file mode 100644 index 0000000000..3c01d9b950 --- /dev/null +++ b/akka-actor/src/main/java/com/typesafe/config/impl/ConfigNumber.java @@ -0,0 +1,99 @@ +/** + * Copyright (C) 2011 Typesafe Inc. + */ +package com.typesafe.config.impl; + +import com.typesafe.config.ConfigException; +import com.typesafe.config.ConfigOrigin; + +abstract class ConfigNumber extends AbstractConfigValue { + // This is so when we concatenate a number into a string (say it appears in + // a sentence) we always have it exactly as the person typed it into the + // config file. It's purely cosmetic; equals/hashCode don't consider this + // for example. + final private String originalText; + + protected ConfigNumber(ConfigOrigin origin, String originalText) { + super(origin); + this.originalText = originalText; + } + + @Override + public abstract Number unwrapped(); + + @Override + String transformToString() { + return originalText; + } + + int intValueRangeChecked(String path) { + long l = longValue(); + if (l < Integer.MIN_VALUE || l > Integer.MAX_VALUE) { + throw new ConfigException.WrongType(origin(), path, "32-bit integer", + "out-of-range value " + l); + } + return (int) l; + } + + protected abstract long longValue(); + + protected abstract double doubleValue(); + + private boolean isWhole() { + long asLong = longValue(); + return asLong == doubleValue(); + } + + @Override + protected boolean canEqual(Object other) { + return other instanceof ConfigNumber; + } + + @Override + public boolean equals(Object other) { + // note that "origin" is deliberately NOT part of equality + if (canEqual(other)) { + ConfigNumber n = (ConfigNumber) other; + if (isWhole()) { + return n.isWhole() && this.longValue() == n.longValue(); + } else { + return (!n.isWhole()) && this.doubleValue() == n.doubleValue(); + } + } else { + return false; + } + } + + @Override + public int hashCode() { + // note that "origin" is deliberately NOT part of equality + + // this matches what standard Long.hashCode and Double.hashCode + // do, though I don't think it really matters. + long asLong; + if (isWhole()) { + asLong = longValue(); + } else { + asLong = Double.doubleToLongBits(doubleValue()); + } + return (int) (asLong ^ (asLong >>> 32)); + } + + static ConfigNumber newNumber(ConfigOrigin origin, long number, + String originalText) { + if (number <= Integer.MAX_VALUE && number >= Integer.MIN_VALUE) + return new ConfigInt(origin, (int) number, originalText); + else + return new ConfigLong(origin, number, originalText); + } + + static ConfigNumber newNumber(ConfigOrigin origin, double number, + String originalText) { + long asLong = (long) number; + if (asLong == number) { + return newNumber(origin, asLong, originalText); + } else { + return new ConfigDouble(origin, number, originalText); + } + } +} diff --git a/akka-actor/src/main/java/com/typesafe/config/impl/ConfigString.java b/akka-actor/src/main/java/com/typesafe/config/impl/ConfigString.java new file mode 100644 index 0000000000..e054b6c91f --- /dev/null +++ b/akka-actor/src/main/java/com/typesafe/config/impl/ConfigString.java @@ -0,0 +1,32 @@ +/** + * Copyright (C) 2011 Typesafe Inc. + */ +package com.typesafe.config.impl; + +import com.typesafe.config.ConfigOrigin; +import com.typesafe.config.ConfigValueType; + +final class ConfigString extends AbstractConfigValue { + + final private String value; + + ConfigString(ConfigOrigin origin, String value) { + super(origin); + this.value = value; + } + + @Override + public ConfigValueType valueType() { + return ConfigValueType.STRING; + } + + @Override + public String unwrapped() { + return value; + } + + @Override + String transformToString() { + return value; + } +} diff --git a/akka-actor/src/main/java/com/typesafe/config/impl/ConfigSubstitution.java b/akka-actor/src/main/java/com/typesafe/config/impl/ConfigSubstitution.java new file mode 100644 index 0000000000..2c5531e74f --- /dev/null +++ b/akka-actor/src/main/java/com/typesafe/config/impl/ConfigSubstitution.java @@ -0,0 +1,258 @@ +/** + * Copyright (C) 2011 Typesafe Inc. + */ +package com.typesafe.config.impl; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.List; + +import com.typesafe.config.ConfigException; +import com.typesafe.config.ConfigOrigin; +import com.typesafe.config.ConfigResolveOptions; +import com.typesafe.config.ConfigValue; +import com.typesafe.config.ConfigValueType; + +/** + * A ConfigSubstitution represents a value with one or more substitutions in it; + * it can resolve to a value of any type, though if the substitution has more + * than one piece it always resolves to a string via value concatenation. + */ +final class ConfigSubstitution extends AbstractConfigValue implements + Unmergeable { + + // this is a list of String and Path where the Path + // have to be resolved to values, then if there's more + // than one piece everything is stringified and concatenated + final private List pieces; + // the length of any prefixes added with relativized() + final private int prefixLength; + final private boolean ignoresFallbacks; + + ConfigSubstitution(ConfigOrigin origin, List pieces) { + this(origin, pieces, 0, false); + } + + private ConfigSubstitution(ConfigOrigin origin, List pieces, + int prefixLength, boolean ignoresFallbacks) { + super(origin); + this.pieces = pieces; + this.prefixLength = prefixLength; + this.ignoresFallbacks = ignoresFallbacks; + } + + @Override + public ConfigValueType valueType() { + throw new ConfigException.NotResolved( + "tried to get value type on an unresolved substitution: " + + this); + } + + @Override + public Object unwrapped() { + throw new ConfigException.NotResolved( + "tried to unwrap an unresolved substitution: " + this); + } + + @Override + protected ConfigSubstitution newCopy(boolean ignoresFallbacks) { + return new ConfigSubstitution(origin(), pieces, prefixLength, ignoresFallbacks); + } + + @Override + protected boolean ignoresFallbacks() { + return ignoresFallbacks; + } + + @Override + protected AbstractConfigValue mergedWithTheUnmergeable(Unmergeable fallback) { + if (ignoresFallbacks) + throw new ConfigException.BugOrBroken("should not be reached"); + + // if we turn out to be an object, and the fallback also does, + // then a merge may be required; delay until we resolve. + List newStack = new ArrayList(); + newStack.add(this); + newStack.addAll(fallback.unmergedValues()); + return new ConfigDelayedMerge(AbstractConfigObject.mergeOrigins(newStack), newStack, + ((AbstractConfigValue) fallback).ignoresFallbacks()); + } + + @Override + protected AbstractConfigValue mergedWithObject(AbstractConfigObject fallback) { + if (ignoresFallbacks) + throw new ConfigException.BugOrBroken("should not be reached"); + + // if we turn out to be an object, and the fallback also does, + // then a merge may be required; delay until we resolve. + List newStack = new ArrayList(); + newStack.add(this); + newStack.add(fallback); + return new ConfigDelayedMerge(AbstractConfigObject.mergeOrigins(newStack), newStack, + fallback.ignoresFallbacks()); + } + + @Override + public Collection unmergedValues() { + return Collections.singleton(this); + } + + List pieces() { + return pieces; + } + + // larger than anyone would ever want + private static final int MAX_DEPTH = 100; + + private ConfigValue findInObject(AbstractConfigObject root, + SubstitutionResolver resolver, /* null if we should not have refs */ + Path subst, int depth, ConfigResolveOptions options) { + if (depth > MAX_DEPTH) { + throw new ConfigException.BadValue(origin(), subst.render(), + "Substitution ${" + subst.render() + + "} is part of a cycle of substitutions"); + } + + ConfigValue result = root.peekPath(subst, resolver, depth, options); + + if (result instanceof ConfigSubstitution) { + throw new ConfigException.BugOrBroken( + "peek or peekPath returned an unresolved substitution"); + } + + return result; + } + + private ConfigValue resolve(SubstitutionResolver resolver, Path subst, + int depth, ConfigResolveOptions options) { + ConfigValue result = findInObject(resolver.root(), resolver, subst, + depth, options); + + // when looking up system props and env variables, + // we don't want the prefix that was added when + // we were included in another file. + Path unprefixed = subst.subPath(prefixLength); + + if (result == null && options.getUseSystemProperties()) { + result = findInObject(ConfigImpl.systemPropertiesAsConfigObject(), null, + unprefixed, depth, options); + } + + if (result == null && options.getUseSystemEnvironment()) { + result = findInObject(ConfigImpl.envVariablesAsConfigObject(), null, + unprefixed, depth, options); + } + + if (result == null) { + result = new ConfigNull(origin()); + } + + return result; + } + + private ConfigValue resolve(SubstitutionResolver resolver, int depth, + ConfigResolveOptions options) { + if (pieces.size() > 1) { + // need to concat everything into a string + StringBuilder sb = new StringBuilder(); + for (Object p : pieces) { + if (p instanceof String) { + sb.append((String) p); + } else { + ConfigValue v = resolve(resolver, (Path) p, depth, options); + switch (v.valueType()) { + case NULL: + // nothing; becomes empty string + break; + case LIST: + case OBJECT: + // cannot substitute lists and objects into strings + throw new ConfigException.WrongType(v.origin(), + ((Path) p).render(), + "not a list or object", v.valueType().name()); + default: + sb.append(((AbstractConfigValue) v).transformToString()); + } + } + } + return new ConfigString(origin(), sb.toString()); + } else { + if (!(pieces.get(0) instanceof Path)) + throw new ConfigException.BugOrBroken( + "ConfigSubstitution should never contain a single String piece"); + return resolve(resolver, (Path) pieces.get(0), depth, options); + } + } + + @Override + AbstractConfigValue resolveSubstitutions(SubstitutionResolver resolver, + int depth, + ConfigResolveOptions options) { + // only ConfigSubstitution adds to depth here, because the depth + // is the substitution depth not the recursion depth + AbstractConfigValue resolved = (AbstractConfigValue) resolve(resolver, + depth + 1, options); + return resolved; + } + + @Override + ResolveStatus resolveStatus() { + return ResolveStatus.UNRESOLVED; + } + + // when you graft a substitution into another object, + // you have to prefix it with the location in that object + // where you grafted it; but save prefixLength so + // system property and env variable lookups don't get + // broken. + @Override + ConfigSubstitution relativized(Path prefix) { + List newPieces = new ArrayList(); + for (Object p : pieces) { + if (p instanceof Path) { + newPieces.add(((Path) p).prepend(prefix)); + } else { + newPieces.add(p); + } + } + return new ConfigSubstitution(origin(), newPieces, prefixLength + + prefix.length(), ignoresFallbacks); + } + + @Override + protected boolean canEqual(Object other) { + return other instanceof ConfigSubstitution; + } + + @Override + public boolean equals(Object other) { + // note that "origin" is deliberately NOT part of equality + if (other instanceof ConfigSubstitution) { + return canEqual(other) + && this.pieces.equals(((ConfigSubstitution) other).pieces); + } else { + return false; + } + } + + @Override + public int hashCode() { + // note that "origin" is deliberately NOT part of equality + return pieces.hashCode(); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("SUBST"); + sb.append("("); + for (Object p : pieces) { + sb.append(p.toString()); + sb.append(","); + } + sb.setLength(sb.length() - 1); // chop comma + sb.append(")"); + return sb.toString(); + } +} diff --git a/akka-actor/src/main/java/com/typesafe/config/impl/ConfigUtil.java b/akka-actor/src/main/java/com/typesafe/config/impl/ConfigUtil.java new file mode 100644 index 0000000000..bfd8f05521 --- /dev/null +++ b/akka-actor/src/main/java/com/typesafe/config/impl/ConfigUtil.java @@ -0,0 +1,121 @@ +/** + * Copyright (C) 2011 Typesafe Inc. + */ +package com.typesafe.config.impl; + + +/** This is public just for the "config" package to use, don't touch it */ +final public class ConfigUtil { + static boolean equalsHandlingNull(Object a, Object b) { + if (a == null && b != null) + return false; + else if (a != null && b == null) + return false; + else if (a == b) // catches null == null plus optimizes identity case + return true; + else + return a.equals(b); + } + + static String renderJsonString(String s) { + StringBuilder sb = new StringBuilder(); + sb.append('"'); + for (int i = 0; i < s.length(); ++i) { + char c = s.charAt(i); + switch (c) { + case '"': + sb.append("\\\""); + break; + case '\\': + sb.append("\\\\"); + break; + case '\n': + sb.append("\\n"); + break; + case '\b': + sb.append("\\b"); + break; + case '\f': + sb.append("\\f"); + break; + case '\r': + sb.append("\\r"); + break; + case '\t': + sb.append("\\t"); + break; + default: + if (Character.isISOControl(c)) + sb.append(String.format("\\u%04x", (int) c)); + else + sb.append(c); + } + } + sb.append('"'); + return sb.toString(); + } + + static boolean isWhitespace(int codepoint) { + switch (codepoint) { + // try to hit the most common ASCII ones first, then the nonbreaking + // spaces that Java brokenly leaves out of isWhitespace. + case ' ': + case '\n': + case '\u00A0': + case '\u2007': + case '\u202F': + return true; + default: + return Character.isWhitespace(codepoint); + } + } + + /** This is public just for the "config" package to use, don't touch it! */ + public static String unicodeTrim(String s) { + // this is dumb because it looks like there aren't any whitespace + // characters that need surrogate encoding. But, points for + // pedantic correctness! It's future-proof or something. + // String.trim() actually is broken, since there are plenty of + // non-ASCII whitespace characters. + final int length = s.length(); + if (length == 0) + return s; + + int start = 0; + while (start < length) { + char c = s.charAt(start); + if (c == ' ' || c == '\n') { + start += 1; + } else { + int cp = s.codePointAt(start); + if (isWhitespace(cp)) + start += Character.charCount(cp); + else + break; + } + } + + int end = length; + while (end > start) { + char c = s.charAt(end - 1); + if (c == ' ' || c == '\n') { + --end; + } else { + int cp; + int delta; + if (Character.isLowSurrogate(c)) { + cp = s.codePointAt(end - 2); + delta = 2; + } else { + cp = s.codePointAt(end - 1); + delta = 1; + } + if (isWhitespace(cp)) + end -= delta; + else + break; + } + } + return s.substring(start, end); + } +} diff --git a/akka-actor/src/main/java/com/typesafe/config/impl/DefaultTransformer.java b/akka-actor/src/main/java/com/typesafe/config/impl/DefaultTransformer.java new file mode 100644 index 0000000000..4391814acb --- /dev/null +++ b/akka-actor/src/main/java/com/typesafe/config/impl/DefaultTransformer.java @@ -0,0 +1,81 @@ +/** + * Copyright (C) 2011 Typesafe Inc. + */ +package com.typesafe.config.impl; + +import com.typesafe.config.ConfigValueType; + +/** + * Default automatic type transformations. + */ +final class DefaultTransformer { + + static AbstractConfigValue transform(AbstractConfigValue value, + ConfigValueType requested) { + if (value.valueType() == ConfigValueType.STRING) { + String s = (String) value.unwrapped(); + switch (requested) { + case NUMBER: + try { + Long v = Long.parseLong(s); + return new ConfigLong(value.origin(), v, s); + } catch (NumberFormatException e) { + // try Double + } + try { + Double v = Double.parseDouble(s); + return new ConfigDouble(value.origin(), v, s); + } catch (NumberFormatException e) { + // oh well. + } + break; + case NULL: + if (s.equals("null")) + return new ConfigNull(value.origin()); + break; + case BOOLEAN: + if (s.equals("true") || s.equals("yes") || s.equals("on")) { + return new ConfigBoolean(value.origin(), true); + } else if (s.equals("false") || s.equals("no") + || s.equals("off")) { + return new ConfigBoolean(value.origin(), false); + } + break; + case LIST: + // can't go STRING to LIST automatically + break; + case OBJECT: + // can't go STRING to OBJECT automatically + break; + case STRING: + // no-op STRING to STRING + break; + } + } else if (requested == ConfigValueType.STRING) { + // if we converted null to string here, then you wouldn't properly + // get a missing-value error if you tried to get a null value + // as a string. + switch (value.valueType()) { + case NUMBER: // FALL THROUGH + case BOOLEAN: + return new ConfigString(value.origin(), + value.transformToString()); + case NULL: + // want to be sure this throws instead of returning "null" as a + // string + break; + case OBJECT: + // no OBJECT to STRING automatically + break; + case LIST: + // no LIST to STRING automatically + break; + case STRING: + // no-op STRING to STRING + break; + } + } + + return value; + } +} diff --git a/akka-actor/src/main/java/com/typesafe/config/impl/FromMapMode.java b/akka-actor/src/main/java/com/typesafe/config/impl/FromMapMode.java new file mode 100644 index 0000000000..ce6c3e3f0a --- /dev/null +++ b/akka-actor/src/main/java/com/typesafe/config/impl/FromMapMode.java @@ -0,0 +1,8 @@ +/** + * Copyright (C) 2011 Typesafe Inc. + */ +package com.typesafe.config.impl; + +enum FromMapMode { + KEYS_ARE_PATHS, KEYS_ARE_KEYS +} diff --git a/akka-actor/src/main/java/com/typesafe/config/impl/Parseable.java b/akka-actor/src/main/java/com/typesafe/config/impl/Parseable.java new file mode 100644 index 0000000000..eef9e75b2a --- /dev/null +++ b/akka-actor/src/main/java/com/typesafe/config/impl/Parseable.java @@ -0,0 +1,528 @@ +/** + * Copyright (C) 2011 Typesafe Inc. + */ +package com.typesafe.config.impl; + +import java.io.BufferedReader; +import java.io.File; +import java.io.FileInputStream; +import java.io.FilterReader; +import java.io.IOException; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.io.Reader; +import java.io.StringReader; +import java.io.UnsupportedEncodingException; +import java.net.MalformedURLException; +import java.net.URI; +import java.net.URISyntaxException; +import java.net.URL; +import java.util.Iterator; +import java.util.Properties; + +import com.typesafe.config.ConfigException; +import com.typesafe.config.ConfigIncludeContext; +import com.typesafe.config.ConfigObject; +import com.typesafe.config.ConfigOrigin; +import com.typesafe.config.ConfigParseOptions; +import com.typesafe.config.ConfigParseable; +import com.typesafe.config.ConfigSyntax; +import com.typesafe.config.ConfigValue; + +/** + * This is public but it's only for use by the config package; DO NOT TOUCH. The + * point of this class is to avoid "propagating" each overload on + * "thing which can be parsed" through multiple interfaces. Most interfaces can + * have just one overload that takes a Parseable. Also it's used as an abstract + * "resource handle" in the ConfigIncluder interface. + */ +public abstract class Parseable implements ConfigParseable { + private ConfigIncludeContext includeContext; + private ConfigParseOptions initialOptions; + + protected Parseable() { + + } + + private ConfigParseOptions fixupOptions(ConfigParseOptions baseOptions) { + ConfigSyntax syntax = baseOptions.getSyntax(); + if (syntax == null) { + syntax = guessSyntax(); + } + if (syntax == null) { + syntax = ConfigSyntax.CONF; + } + ConfigParseOptions modified = baseOptions.setSyntax(syntax); + + if (modified.getOriginDescription() == null) + modified = modified.setOriginDescription(originDescription()); + + modified = modified.appendIncluder(ConfigImpl.defaultIncluder()); + + return modified; + } + + protected void postConstruct(ConfigParseOptions baseOptions) { + this.initialOptions = fixupOptions(baseOptions); + + this.includeContext = new ConfigIncludeContext() { + @Override + public ConfigParseable relativeTo(String filename) { + return Parseable.this.relativeTo(filename); + } + }; + } + + // the general idea is that any work should be in here, not in the + // constructor, + // so that exceptions are thrown from the public parse() function and not + // from the creation of the Parseable. Essentially this is a lazy field. + // The parser should close the reader when it's done with it. + // ALSO, IMPORTANT: if the file or URL is not found, this must throw. + // to support the "allow missing" feature. + protected abstract Reader reader() throws IOException; + + ConfigSyntax guessSyntax() { + return null; + } + + ConfigParseable relativeTo(String filename) { + return null; + } + + ConfigIncludeContext includeContext() { + return includeContext; + } + + static AbstractConfigObject forceParsedToObject(ConfigValue value) { + if (value instanceof AbstractConfigObject) { + return (AbstractConfigObject) value; + } else { + throw new ConfigException.WrongType(value.origin(), "", + "object at file root", value.valueType().name()); + } + } + + @Override + public ConfigObject parse(ConfigParseOptions baseOptions) { + return forceParsedToObject(parseValue(baseOptions)); + } + + AbstractConfigValue parseValue(ConfigParseOptions baseOptions) { + // note that we are NOT using our "options" and "origin" fields, + // but using the ones from the passed-in options. The idea is that + // callers can get our original options and then parse with different + // ones if they want. + ConfigParseOptions options = fixupOptions(baseOptions); + ConfigOrigin origin = new SimpleConfigOrigin( + options.getOriginDescription()); + return parseValue(origin, options); + } + + protected AbstractConfigValue parseValue(ConfigOrigin origin, + ConfigParseOptions finalOptions) { + try { + Reader reader = reader(); + try { + if (finalOptions.getSyntax() == ConfigSyntax.PROPERTIES) { + return PropertiesParser.parse(reader, origin); + } else { + Iterator tokens = Tokenizer.tokenize(origin, reader, + finalOptions.getSyntax()); + return Parser.parse(tokens, origin, finalOptions, + includeContext()); + } + } finally { + reader.close(); + } + } catch (IOException e) { + if (finalOptions.getAllowMissing()) { + return SimpleConfigObject.emptyMissing(origin); + } else { + throw new ConfigException.IO(origin, e.getMessage(), e); + } + } + } + + public ConfigObject parse() { + return forceParsedToObject(parseValue(options())); + } + + AbstractConfigValue parseValue() { + return parseValue(options()); + } + + abstract String originDescription(); + + @Override + public URL url() { + return null; + } + + @Override + public ConfigParseOptions options() { + return initialOptions; + } + + @Override + public String toString() { + return getClass().getSimpleName(); + } + + private static ConfigSyntax syntaxFromExtension(String name) { + if (name.endsWith(".json")) + return ConfigSyntax.JSON; + else if (name.endsWith(".conf")) + return ConfigSyntax.CONF; + else if (name.endsWith(".properties")) + return ConfigSyntax.PROPERTIES; + else + return null; + } + + private static Reader readerFromStream(InputStream input) { + try { + // well, this is messed up. If we aren't going to close + // the passed-in InputStream then we have no way to + // close these readers. So maybe we should not have an + // InputStream version, only a Reader version. + Reader reader = new InputStreamReader(input, "UTF-8"); + return new BufferedReader(reader); + } catch (UnsupportedEncodingException e) { + throw new ConfigException.BugOrBroken( + "Java runtime does not support UTF-8", e); + } + } + + private static Reader doNotClose(Reader input) { + return new FilterReader(input) { + @Override + public void close() { + // NOTHING. + } + }; + } + + static URL relativeTo(URL url, String filename) { + // I'm guessing this completely fails on Windows, help wanted + if (new File(filename).isAbsolute()) + return null; + + try { + URI siblingURI = url.toURI(); + URI relative = new URI(filename); + + // this seems wrong, but it's documented that the last + // element of the path in siblingURI gets stripped out, + // so to get something in the same directory as + // siblingURI we just call resolve(). + URL resolved = siblingURI.resolve(relative).toURL(); + + return resolved; + } catch (MalformedURLException e) { + return null; + } catch (URISyntaxException e) { + return null; + } catch (IllegalArgumentException e) { + return null; + } + } + + private final static class ParseableInputStream extends Parseable { + final private InputStream input; + + ParseableInputStream(InputStream input, ConfigParseOptions options) { + this.input = input; + postConstruct(options); + } + + @Override + protected Reader reader() { + return doNotClose(readerFromStream(input)); + } + + @Override + String originDescription() { + return "InputStream"; + } + } + + /** + * note that we will never close this stream; you have to do it when parsing + * is complete. + */ + public static Parseable newInputStream(InputStream input, + ConfigParseOptions options) { + return new ParseableInputStream(input, options); + } + + private final static class ParseableReader extends Parseable { + final private Reader reader; + + ParseableReader(Reader reader, ConfigParseOptions options) { + this.reader = reader; + postConstruct(options); + } + + @Override + protected Reader reader() { + return reader; + } + + @Override + String originDescription() { + return "Reader"; + } + } + + /** + * note that we will never close this reader; you have to do it when parsing + * is complete. + */ + public static Parseable newReader(Reader reader, ConfigParseOptions options) { + return new ParseableReader(doNotClose(reader), options); + } + + private final static class ParseableString extends Parseable { + final private String input; + + ParseableString(String input, ConfigParseOptions options) { + this.input = input; + postConstruct(options); + } + + @Override + protected Reader reader() { + return new StringReader(input); + } + + @Override + String originDescription() { + return "String"; + } + } + + public static Parseable newString(String input, ConfigParseOptions options) { + return new ParseableString(input, options); + } + + private final static class ParseableURL extends Parseable { + final private URL input; + + ParseableURL(URL input, ConfigParseOptions options) { + this.input = input; + postConstruct(options); + } + + @Override + protected Reader reader() throws IOException { + InputStream stream = input.openStream(); + return readerFromStream(stream); + } + + @Override + ConfigSyntax guessSyntax() { + return syntaxFromExtension(input.getPath()); + } + + @Override + ConfigParseable relativeTo(String filename) { + URL url = relativeTo(input, filename); + if (url == null) + return null; + return newURL(url, options() + .setOriginDescription(null)); + } + + @Override + String originDescription() { + return input.toExternalForm(); + } + + @Override + public URL url() { + return input; + } + + @Override + public String toString() { + return getClass().getSimpleName() + "(" + input.toExternalForm() + + ")"; + } + } + + public static Parseable newURL(URL input, ConfigParseOptions options) { + return new ParseableURL(input, options); + } + + private final static class ParseableFile extends Parseable { + final private File input; + + ParseableFile(File input, ConfigParseOptions options) { + this.input = input; + postConstruct(options); + } + + @Override + protected Reader reader() throws IOException { + InputStream stream = new FileInputStream(input); + return readerFromStream(stream); + } + + @Override + ConfigSyntax guessSyntax() { + return syntaxFromExtension(input.getName()); + } + + @Override + ConfigParseable relativeTo(String filename) { + try { + URL url = relativeTo(input.toURI().toURL(), filename); + if (url == null) + return null; + return newURL(url, options().setOriginDescription(null)); + } catch (MalformedURLException e) { + return null; + } + } + + @Override + String originDescription() { + return input.getPath(); + } + + @Override + public URL url() { + try { + return input.toURI().toURL(); + } catch (MalformedURLException e) { + return null; + } + } + + @Override + public String toString() { + return getClass().getSimpleName() + "(" + input.getPath() + ")"; + } + } + + public static Parseable newFile(File input, ConfigParseOptions options) { + return new ParseableFile(input, options); + } + + private final static class ParseableResource extends Parseable { + final private Class klass; + final private String resource; + + ParseableResource(Class klass, String resource, + ConfigParseOptions options) { + this.klass = klass; + this.resource = resource; + postConstruct(options); + } + + @Override + protected Reader reader() throws IOException { + InputStream stream = klass.getResourceAsStream(resource); + if (stream == null) { + throw new IOException("resource not found on classpath: " + + resource); + } + return readerFromStream(stream); + } + + @Override + ConfigSyntax guessSyntax() { + return syntaxFromExtension(resource); + } + + @Override + ConfigParseable relativeTo(String filename) { + // not using File.isAbsolute because resource paths always use '/' + // (?) + if (filename.startsWith("/")) + return null; + + // here we want to build a new resource name and let + // the class loader have it, rather than getting the + // url with getResource() and relativizing to that url. + // This is needed in case the class loader is going to + // search a classpath. + File parent = new File(resource).getParentFile(); + if (parent == null) + return newResource(klass, "/" + filename, options() + .setOriginDescription(null)); + else + return newResource(klass, new File(parent, filename).getPath(), + options().setOriginDescription(null)); + } + + @Override + String originDescription() { + return resource + " on classpath"; + } + + @Override + public URL url() { + return klass.getResource(resource); + } + + @Override + public String toString() { + return getClass().getSimpleName() + "(" + resource + "," + + klass.getName() + + ")"; + } + } + + public static Parseable newResource(Class klass, String resource, + ConfigParseOptions options) { + return new ParseableResource(klass, resource, options); + } + + private final static class ParseableProperties extends Parseable { + final private Properties props; + + ParseableProperties(Properties props, ConfigParseOptions options) { + this.props = props; + postConstruct(options); + } + + @Override + protected Reader reader() throws IOException { + throw new ConfigException.BugOrBroken( + "reader() should not be called on props"); + } + + @Override + protected AbstractConfigObject parseValue(ConfigOrigin origin, + ConfigParseOptions finalOptions) { + return PropertiesParser.fromProperties(origin, props); + } + + @Override + ConfigSyntax guessSyntax() { + return ConfigSyntax.PROPERTIES; + } + + @Override + String originDescription() { + return "properties"; + } + + @Override + public URL url() { + return null; + } + + @Override + public String toString() { + return getClass().getSimpleName() + "(" + props.size() + " props)"; + } + } + + public static Parseable newProperties(Properties properties, + ConfigParseOptions options) { + return new ParseableProperties(properties, options); + } +} diff --git a/akka-actor/src/main/java/com/typesafe/config/impl/Parser.java b/akka-actor/src/main/java/com/typesafe/config/impl/Parser.java new file mode 100644 index 0000000000..d168e8d57a --- /dev/null +++ b/akka-actor/src/main/java/com/typesafe/config/impl/Parser.java @@ -0,0 +1,741 @@ +/** + * Copyright (C) 2011 Typesafe Inc. + */ +package com.typesafe.config.impl; + +import java.io.StringReader; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.Iterator; +import java.util.LinkedList; +import java.util.List; +import java.util.ListIterator; +import java.util.Map; +import java.util.Stack; + +import com.typesafe.config.ConfigException; +import com.typesafe.config.ConfigIncludeContext; +import com.typesafe.config.ConfigIncluder; +import com.typesafe.config.ConfigOrigin; +import com.typesafe.config.ConfigParseOptions; +import com.typesafe.config.ConfigSyntax; +import com.typesafe.config.ConfigValueType; + +final class Parser { + + static AbstractConfigValue parse(Iterator tokens, + ConfigOrigin origin, ConfigParseOptions options, + ConfigIncludeContext includeContext) { + ParseContext context = new ParseContext(options.getSyntax(), origin, + tokens, options.getIncluder(), includeContext); + return context.parse(); + } + + static private final class ParseContext { + private int lineNumber; + final private Stack buffer; + final private Iterator tokens; + final private ConfigIncluder includer; + final private ConfigIncludeContext includeContext; + final private ConfigSyntax flavor; + final private ConfigOrigin baseOrigin; + final private LinkedList pathStack; + + ParseContext(ConfigSyntax flavor, ConfigOrigin origin, + Iterator tokens, ConfigIncluder includer, + ConfigIncludeContext includeContext) { + lineNumber = 1; + buffer = new Stack(); + this.tokens = tokens; + this.flavor = flavor; + this.baseOrigin = origin; + this.includer = includer; + this.includeContext = includeContext; + this.pathStack = new LinkedList(); + } + + private Token nextToken() { + Token t = null; + if (buffer.isEmpty()) { + t = tokens.next(); + } else { + t = buffer.pop(); + } + + if (flavor == ConfigSyntax.JSON) { + if (Tokens.isUnquotedText(t)) { + throw parseError("Token not allowed in valid JSON: '" + + Tokens.getUnquotedText(t) + "'"); + } else if (Tokens.isSubstitution(t)) { + throw parseError("Substitutions (${} syntax) not allowed in JSON"); + } + } + + return t; + } + + private void putBack(Token token) { + buffer.push(token); + } + + private Token nextTokenIgnoringNewline() { + Token t = nextToken(); + while (Tokens.isNewline(t)) { + // line number tokens have the line that was _ended_ by the + // newline, so we have to add one. + lineNumber = Tokens.getLineNumber(t) + 1; + t = nextToken(); + } + return t; + } + + // In arrays and objects, comma can be omitted + // as long as there's at least one newline instead. + // this skips any newlines in front of a comma, + // skips the comma, and returns true if it found + // either a newline or a comma. The iterator + // is left just after the comma or the newline. + private boolean checkElementSeparator() { + if (flavor == ConfigSyntax.JSON) { + Token t = nextTokenIgnoringNewline(); + if (t == Tokens.COMMA) { + return true; + } else { + putBack(t); + return false; + } + } else { + boolean sawSeparatorOrNewline = false; + Token t = nextToken(); + while (true) { + if (Tokens.isNewline(t)) { + lineNumber = Tokens.getLineNumber(t); + sawSeparatorOrNewline = true; + // we want to continue to also eat + // a comma if there is one. + } else if (t == Tokens.COMMA) { + return true; + } else { + // non-newline-or-comma + putBack(t); + return sawSeparatorOrNewline; + } + t = nextToken(); + } + } + } + + // merge a bunch of adjacent values into one + // value; change unquoted text into a string + // value. + private void consolidateValueTokens() { + // this trick is not done in JSON + if (flavor == ConfigSyntax.JSON) + return; + + List values = null; // create only if we have value tokens + Token t = nextTokenIgnoringNewline(); // ignore a newline up front + while (Tokens.isValue(t) || Tokens.isUnquotedText(t) + || Tokens.isSubstitution(t)) { + if (values == null) + values = new ArrayList(); + values.add(t); + t = nextToken(); // but don't consolidate across a newline + } + // the last one wasn't a value token + putBack(t); + + if (values == null) + return; + + if (values.size() == 1 && Tokens.isValue(values.get(0))) { + // a single value token requires no consolidation + putBack(values.get(0)); + return; + } + + // this will be a list of String and Path + List minimized = new ArrayList(); + + // we have multiple value tokens or one unquoted text token; + // collapse into a string token. + StringBuilder sb = new StringBuilder(); + ConfigOrigin firstOrigin = null; + for (Token valueToken : values) { + if (Tokens.isValue(valueToken)) { + AbstractConfigValue v = Tokens.getValue(valueToken); + sb.append(v.transformToString()); + if (firstOrigin == null) + firstOrigin = v.origin(); + } else if (Tokens.isUnquotedText(valueToken)) { + String text = Tokens.getUnquotedText(valueToken); + if (firstOrigin == null) + firstOrigin = Tokens.getUnquotedTextOrigin(valueToken); + sb.append(text); + } else if (Tokens.isSubstitution(valueToken)) { + if (firstOrigin == null) + firstOrigin = Tokens.getSubstitutionOrigin(valueToken); + + if (sb.length() > 0) { + // save string so far + minimized.add(sb.toString()); + sb.setLength(0); + } + // now save substitution + List expression = Tokens + .getSubstitutionPathExpression(valueToken); + Path path = parsePathExpression(expression.iterator(), + Tokens.getSubstitutionOrigin(valueToken)); + minimized.add(path); + } else { + throw new ConfigException.BugOrBroken( + "should not be trying to consolidate token: " + + valueToken); + } + } + + if (sb.length() > 0) { + // save string so far + minimized.add(sb.toString()); + } + + if (minimized.isEmpty()) + throw new ConfigException.BugOrBroken( + "trying to consolidate values to nothing"); + + Token consolidated = null; + + if (minimized.size() == 1 && minimized.get(0) instanceof String) { + consolidated = Tokens.newString(firstOrigin, + (String) minimized.get(0)); + } else { + // there's some substitution to do later (post-parse step) + consolidated = Tokens.newValue(new ConfigSubstitution( + firstOrigin, minimized)); + } + + putBack(consolidated); + } + + private ConfigOrigin lineOrigin() { + return new SimpleConfigOrigin(baseOrigin.description() + ": line " + + lineNumber); + } + + private ConfigException parseError(String message) { + return parseError(message, null); + } + + private ConfigException parseError(String message, Throwable cause) { + return new ConfigException.Parse(lineOrigin(), message, cause); + } + + private AbstractConfigValue parseValue(Token token) { + if (Tokens.isValue(token)) { + return Tokens.getValue(token); + } else if (token == Tokens.OPEN_CURLY) { + return parseObject(true); + } else if (token == Tokens.OPEN_SQUARE) { + return parseArray(); + } else { + throw parseError("Expecting a value but got wrong token: " + + token); + } + } + + private static AbstractConfigObject createValueUnderPath(Path path, + AbstractConfigValue value) { + // for path foo.bar, we are creating + // { "foo" : { "bar" : value } } + List keys = new ArrayList(); + + String key = path.first(); + Path remaining = path.remainder(); + while (key != null) { + keys.add(key); + if (remaining == null) { + break; + } else { + key = remaining.first(); + remaining = remaining.remainder(); + } + } + ListIterator i = keys.listIterator(keys.size()); + String deepest = i.previous(); + AbstractConfigObject o = new SimpleConfigObject(value.origin(), + Collections. singletonMap( + deepest, value)); + while (i.hasPrevious()) { + Map m = Collections. singletonMap( + i.previous(), o); + o = new SimpleConfigObject(value.origin(), m); + } + + return o; + } + + private Path parseKey(Token token) { + if (flavor == ConfigSyntax.JSON) { + if (Tokens.isValueWithType(token, ConfigValueType.STRING)) { + String key = (String) Tokens.getValue(token).unwrapped(); + return Path.newKey(key); + } else { + throw parseError("Expecting close brace } or a field name, got " + + token); + } + } else { + List expression = new ArrayList(); + Token t = token; + while (Tokens.isValue(t) || Tokens.isUnquotedText(t)) { + expression.add(t); + t = nextToken(); // note: don't cross a newline + } + putBack(t); // put back the token we ended with + return parsePathExpression(expression.iterator(), lineOrigin()); + } + } + + private static boolean isIncludeKeyword(Token t) { + return Tokens.isUnquotedText(t) + && Tokens.getUnquotedText(t).equals("include"); + } + + private static boolean isUnquotedWhitespace(Token t) { + if (!Tokens.isUnquotedText(t)) + return false; + + String s = Tokens.getUnquotedText(t); + + for (int i = 0; i < s.length(); ++i) { + char c = s.charAt(i); + if (!ConfigUtil.isWhitespace(c)) + return false; + } + return true; + } + + private void parseInclude(Map values) { + Token t = nextTokenIgnoringNewline(); + while (isUnquotedWhitespace(t)) { + t = nextTokenIgnoringNewline(); + } + + if (Tokens.isValueWithType(t, ConfigValueType.STRING)) { + String name = (String) Tokens.getValue(t).unwrapped(); + AbstractConfigObject obj = (AbstractConfigObject) includer + .include(includeContext, name); + + if (!pathStack.isEmpty()) { + Path prefix = new Path(pathStack); + obj = obj.relativized(prefix); + } + + for (String key : obj.keySet()) { + AbstractConfigValue v = obj.get(key); + AbstractConfigValue existing = values.get(key); + if (existing != null) { + values.put(key, v.withFallback(existing)); + } else { + values.put(key, v); + } + } + + } else { + throw parseError("include keyword is not followed by a quoted string, but by: " + + t); + } + } + + private boolean isKeyValueSeparatorToken(Token t) { + if (flavor == ConfigSyntax.JSON) { + return t == Tokens.COLON; + } else { + return t == Tokens.COLON || t == Tokens.EQUALS; + } + } + + private AbstractConfigObject parseObject(boolean hadOpenCurly) { + // invoked just after the OPEN_CURLY (or START, if !hadOpenCurly) + Map values = new HashMap(); + ConfigOrigin objectOrigin = lineOrigin(); + boolean afterComma = false; + while (true) { + Token t = nextTokenIgnoringNewline(); + if (t == Tokens.CLOSE_CURLY) { + if (flavor == ConfigSyntax.JSON && afterComma) { + throw parseError("expecting a field name after comma, got a close brace }"); + } else if (!hadOpenCurly) { + throw parseError("unbalanced close brace '}' with no open brace"); + } + break; + } else if (t == Tokens.END && !hadOpenCurly) { + putBack(t); + break; + } else if (flavor != ConfigSyntax.JSON && isIncludeKeyword(t)) { + parseInclude(values); + + afterComma = false; + } else { + Path path = parseKey(t); + Token afterKey = nextTokenIgnoringNewline(); + + // path must be on-stack while we parse the value + pathStack.push(path); + + Token valueToken; + AbstractConfigValue newValue; + if (flavor == ConfigSyntax.CONF + && afterKey == Tokens.OPEN_CURLY) { + // can omit the ':' or '=' before an object value + valueToken = afterKey; + newValue = parseObject(true); + } else { + if (!isKeyValueSeparatorToken(afterKey)) { + throw parseError("Key may not be followed by token: " + + afterKey); + } + + consolidateValueTokens(); + valueToken = nextTokenIgnoringNewline(); + newValue = parseValue(valueToken); + } + + pathStack.pop(); + + String key = path.first(); + Path remaining = path.remainder(); + + if (remaining == null) { + AbstractConfigValue existing = values.get(key); + if (existing != null) { + // In strict JSON, dups should be an error; while in + // our custom config language, they should be merged + // if the value is an object (or substitution that + // could become an object). + + if (flavor == ConfigSyntax.JSON) { + throw parseError("JSON does not allow duplicate fields: '" + + key + + "' was already seen at " + + existing.origin().description()); + } else { + newValue = newValue.withFallback(existing); + } + } + values.put(key, newValue); + } else { + if (flavor == ConfigSyntax.JSON) { + throw new ConfigException.BugOrBroken( + "somehow got multi-element path in JSON mode"); + } + + AbstractConfigObject obj = createValueUnderPath( + remaining, newValue); + AbstractConfigValue existing = values.get(key); + if (existing != null) { + obj = obj.withFallback(existing); + } + values.put(key, obj); + } + + afterComma = false; + } + + if (checkElementSeparator()) { + // continue looping + afterComma = true; + } else { + t = nextTokenIgnoringNewline(); + if (t == Tokens.CLOSE_CURLY) { + if (!hadOpenCurly) { + throw parseError("unbalanced close brace '}' with no open brace"); + } + break; + } else if (hadOpenCurly) { + throw parseError("Expecting close brace } or a comma, got " + + t); + } else { + if (t == Tokens.END) { + putBack(t); + break; + } else { + throw parseError("Expecting end of input or a comma, got " + + t); + } + } + } + } + return new SimpleConfigObject(objectOrigin, + values); + } + + private SimpleConfigList parseArray() { + // invoked just after the OPEN_SQUARE + ConfigOrigin arrayOrigin = lineOrigin(); + List values = new ArrayList(); + + consolidateValueTokens(); + + Token t = nextTokenIgnoringNewline(); + + // special-case the first element + if (t == Tokens.CLOSE_SQUARE) { + return new SimpleConfigList(arrayOrigin, + Collections. emptyList()); + } else if (Tokens.isValue(t)) { + values.add(parseValue(t)); + } else if (t == Tokens.OPEN_CURLY) { + values.add(parseObject(true)); + } else if (t == Tokens.OPEN_SQUARE) { + values.add(parseArray()); + } else { + throw parseError("List should have ] or a first element after the open [, instead had token: " + + t); + } + + // now remaining elements + while (true) { + // just after a value + if (checkElementSeparator()) { + // comma (or newline equivalent) consumed + } else { + t = nextTokenIgnoringNewline(); + if (t == Tokens.CLOSE_SQUARE) { + return new SimpleConfigList(arrayOrigin, values); + } else { + throw parseError("List should have ended with ] or had a comma, instead had token: " + + t); + } + } + + // now just after a comma + consolidateValueTokens(); + + t = nextTokenIgnoringNewline(); + if (Tokens.isValue(t)) { + values.add(parseValue(t)); + } else if (t == Tokens.OPEN_CURLY) { + values.add(parseObject(true)); + } else if (t == Tokens.OPEN_SQUARE) { + values.add(parseArray()); + } else if (flavor != ConfigSyntax.JSON + && t == Tokens.CLOSE_SQUARE) { + // we allow one trailing comma + putBack(t); + } else { + throw parseError("List should have had new element after a comma, instead had token: " + + t); + } + } + } + + AbstractConfigValue parse() { + Token t = nextTokenIgnoringNewline(); + if (t == Tokens.START) { + // OK + } else { + throw new ConfigException.BugOrBroken( + "token stream did not begin with START, had " + t); + } + + t = nextTokenIgnoringNewline(); + AbstractConfigValue result = null; + if (t == Tokens.OPEN_CURLY) { + result = parseObject(true); + } else if (t == Tokens.OPEN_SQUARE) { + result = parseArray(); + } else { + if (flavor == ConfigSyntax.JSON) { + if (t == Tokens.END) { + throw parseError("Empty document"); + } else { + throw parseError("Document must have an object or array at root, unexpected token: " + + t); + } + } else { + // the root object can omit the surrounding braces. + // this token should be the first field's key, or part + // of it, so put it back. + putBack(t); + result = parseObject(false); + } + } + + t = nextTokenIgnoringNewline(); + if (t == Tokens.END) { + return result; + } else { + throw parseError("Document has trailing tokens after first object or array: " + + t); + } + } + } + + static class Element { + StringBuilder sb; + // an element can be empty if it has a quoted empty string "" in it + boolean canBeEmpty; + + Element(String initial, boolean canBeEmpty) { + this.canBeEmpty = canBeEmpty; + this.sb = new StringBuilder(initial); + } + + @Override + public String toString() { + return "Element(" + sb.toString() + "," + canBeEmpty + ")"; + } + } + + private static void addPathText(List buf, boolean wasQuoted, + String newText) { + int i = wasQuoted ? -1 : newText.indexOf('.'); + Element current = buf.get(buf.size() - 1); + if (i < 0) { + // add to current path element + current.sb.append(newText); + // any empty quoted string means this element can + // now be empty. + if (wasQuoted && current.sb.length() == 0) + current.canBeEmpty = true; + } else { + // "buf" plus up to the period is an element + current.sb.append(newText.substring(0, i)); + // then start a new element + buf.add(new Element("", false)); + // recurse to consume remainder of newText + addPathText(buf, false, newText.substring(i + 1)); + } + } + + private static Path parsePathExpression(Iterator expression, + ConfigOrigin origin) { + return parsePathExpression(expression, origin, null); + } + + // originalText may be null if not available + private static Path parsePathExpression(Iterator expression, + ConfigOrigin origin, String originalText) { + // each builder in "buf" is an element in the path. + List buf = new ArrayList(); + buf.add(new Element("", false)); + + if (!expression.hasNext()) { + throw new ConfigException.BadPath(origin, originalText, + "Expecting a field name or path here, but got nothing"); + } + + while (expression.hasNext()) { + Token t = expression.next(); + if (Tokens.isValueWithType(t, ConfigValueType.STRING)) { + AbstractConfigValue v = Tokens.getValue(t); + // this is a quoted string; so any periods + // in here don't count as path separators + String s = v.transformToString(); + + addPathText(buf, true, s); + } else if (t == Tokens.END) { + // ignore this; when parsing a file, it should not happen + // since we're parsing a token list rather than the main + // token iterator, and when parsing a path expression from the + // API, it's expected to have an END. + } else { + // any periods outside of a quoted string count as + // separators + String text; + if (Tokens.isValue(t)) { + // appending a number here may add + // a period, but we _do_ count those as path + // separators, because we basically want + // "foo 3.0bar" to parse as a string even + // though there's a number in it. The fact that + // we tokenize non-string values is largely an + // implementation detail. + AbstractConfigValue v = Tokens.getValue(t); + text = v.transformToString(); + } else if (Tokens.isUnquotedText(t)) { + text = Tokens.getUnquotedText(t); + } else { + throw new ConfigException.BadPath(origin, originalText, + "Token not allowed in path expression: " + + t); + } + + addPathText(buf, false, text); + } + } + + PathBuilder pb = new PathBuilder(); + for (Element e : buf) { + if (e.sb.length() == 0 && !e.canBeEmpty) { + throw new ConfigException.BadPath( + origin, + originalText, + "path has a leading, trailing, or two adjacent period '.' (use quoted \"\" empty string if you want an empty element)"); + } else { + pb.appendKey(e.sb.toString()); + } + } + + return pb.result(); + } + + static ConfigOrigin apiOrigin = new SimpleConfigOrigin("path parameter"); + + static Path parsePath(String path) { + Path speculated = speculativeFastParsePath(path); + if (speculated != null) + return speculated; + + StringReader reader = new StringReader(path); + + try { + Iterator tokens = Tokenizer.tokenize(apiOrigin, reader, + ConfigSyntax.CONF); + tokens.next(); // drop START + return parsePathExpression(tokens, apiOrigin, path); + } finally { + reader.close(); + } + } + + // the idea is to see if the string has any chars that might require the + // full parser to deal with. + private static boolean hasUnsafeChars(String s) { + for (int i = 0; i < s.length(); ++i) { + char c = s.charAt(i); + if (Character.isLetter(c) || c == '.') + continue; + else + return true; + } + return false; + } + + private static void appendPathString(PathBuilder pb, String s) { + int splitAt = s.indexOf('.'); + if (splitAt < 0) { + pb.appendKey(s); + } else { + pb.appendKey(s.substring(0, splitAt)); + appendPathString(pb, s.substring(splitAt + 1)); + } + } + + // do something much faster than the full parser if + // we just have something like "foo" or "foo.bar" + private static Path speculativeFastParsePath(String path) { + String s = ConfigUtil.unicodeTrim(path); + if (s.isEmpty()) + return null; + if (hasUnsafeChars(s)) + return null; + if (s.startsWith(".") || s.endsWith(".") || s.contains("..")) + return null; // let the full parser throw the error + + PathBuilder pb = new PathBuilder(); + appendPathString(pb, s); + return pb.result(); + } +} diff --git a/akka-actor/src/main/java/com/typesafe/config/impl/Path.java b/akka-actor/src/main/java/com/typesafe/config/impl/Path.java new file mode 100644 index 0000000000..b0434f0f14 --- /dev/null +++ b/akka-actor/src/main/java/com/typesafe/config/impl/Path.java @@ -0,0 +1,191 @@ +/** + * Copyright (C) 2011 Typesafe Inc. + */ +package com.typesafe.config.impl; + +import java.util.Iterator; +import java.util.List; + +import com.typesafe.config.ConfigException; + +final class Path { + + final private String first; + final private Path remainder; + + Path(String first, Path remainder) { + this.first = first; + this.remainder = remainder; + } + + Path(String... elements) { + if (elements.length == 0) + throw new ConfigException.BugOrBroken("empty path"); + this.first = elements[0]; + if (elements.length > 1) { + PathBuilder pb = new PathBuilder(); + for (int i = 1; i < elements.length; ++i) { + pb.appendKey(elements[i]); + } + this.remainder = pb.result(); + } else { + this.remainder = null; + } + } + + // append all the paths in the list together into one path + Path(List pathsToConcat) { + if (pathsToConcat.isEmpty()) + throw new ConfigException.BugOrBroken("empty path"); + + Iterator i = pathsToConcat.iterator(); + Path firstPath = i.next(); + this.first = firstPath.first; + + PathBuilder pb = new PathBuilder(); + if (firstPath.remainder != null) { + pb.appendPath(firstPath.remainder); + } + while (i.hasNext()) { + pb.appendPath(i.next()); + } + this.remainder = pb.result(); + } + + String first() { + return first; + } + + /** + * + * @return path minus the first element or null if no more elements + */ + Path remainder() { + return remainder; + } + + /** + * + * @return path minus the last element or null if we have just one element + */ + Path parent() { + if (remainder == null) + return null; + + PathBuilder pb = new PathBuilder(); + Path p = this; + while (p.remainder != null) { + pb.appendKey(p.first); + p = p.remainder; + } + return pb.result(); + } + + /** + * + * @return last element in the path + */ + String last() { + Path p = this; + while (p.remainder != null) { + p = p.remainder; + } + return p.first; + } + + Path prepend(Path toPrepend) { + PathBuilder pb = new PathBuilder(); + pb.appendPath(toPrepend); + pb.appendPath(this); + return pb.result(); + } + + int length() { + int count = 1; + Path p = remainder; + while (p != null) { + count += 1; + p = p.remainder; + } + return count; + } + + Path subPath(int removeFromFront) { + int count = removeFromFront; + Path p = this; + while (p != null && count > 0) { + count -= 1; + p = p.remainder; + } + return p; + } + + @Override + public boolean equals(Object other) { + if (other instanceof Path) { + Path that = (Path) other; + return this.first.equals(that.first) + && ConfigUtil.equalsHandlingNull(this.remainder, + that.remainder); + } else { + return false; + } + } + + @Override + public int hashCode() { + return 41 * (41 + first.hashCode()) + + (remainder == null ? 0 : remainder.hashCode()); + } + + // this doesn't have a very precise meaning, just to reduce + // noise from quotes in the rendered path + static boolean hasFunkyChars(String s) { + for (int i = 0; i < s.length(); ++i) { + char c = s.charAt(i); + if (Character.isLetterOrDigit(c) || c == ' ') + continue; + else + return true; + } + return false; + } + + private void appendToStringBuilder(StringBuilder sb) { + if (hasFunkyChars(first)) + sb.append(ConfigUtil.renderJsonString(first)); + else + sb.append(first); + if (remainder != null) { + sb.append("."); + remainder.appendToStringBuilder(sb); + } + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("Path("); + appendToStringBuilder(sb); + sb.append(")"); + return sb.toString(); + } + + /** + * toString() is a debugging-oriented version while this is an + * error-message-oriented human-readable one. + */ + String render() { + StringBuilder sb = new StringBuilder(); + appendToStringBuilder(sb); + return sb.toString(); + } + + static Path newKey(String key) { + return new Path(key, null); + } + + static Path newPath(String path) { + return Parser.parsePath(path); + } +} diff --git a/akka-actor/src/main/java/com/typesafe/config/impl/PathBuilder.java b/akka-actor/src/main/java/com/typesafe/config/impl/PathBuilder.java new file mode 100644 index 0000000000..f46e78201e --- /dev/null +++ b/akka-actor/src/main/java/com/typesafe/config/impl/PathBuilder.java @@ -0,0 +1,60 @@ +/** + * Copyright (C) 2011 Typesafe Inc. + */ +package com.typesafe.config.impl; + +import java.util.Stack; + +import com.typesafe.config.ConfigException; + +final class PathBuilder { + // the keys are kept "backward" (top of stack is end of path) + final private Stack keys; + private Path result; + + PathBuilder() { + keys = new Stack(); + } + + private void checkCanAppend() { + if (result != null) + throw new ConfigException.BugOrBroken( + "Adding to PathBuilder after getting result"); + } + + void appendKey(String key) { + checkCanAppend(); + + keys.push(key); + } + + void appendPath(Path path) { + checkCanAppend(); + + String first = path.first(); + Path remainder = path.remainder(); + while (true) { + keys.push(first); + if (remainder != null) { + first = remainder.first(); + remainder = remainder.remainder(); + } else { + break; + } + } + } + + Path result() { + // note: if keys is empty, we want to return null, which is a valid + // empty path + if (result == null) { + Path remainder = null; + while (!keys.isEmpty()) { + String key = keys.pop(); + remainder = new Path(key, remainder); + } + result = remainder; + } + return result; + } +} diff --git a/akka-actor/src/main/java/com/typesafe/config/impl/PropertiesParser.java b/akka-actor/src/main/java/com/typesafe/config/impl/PropertiesParser.java new file mode 100644 index 0000000000..bd822e65e9 --- /dev/null +++ b/akka-actor/src/main/java/com/typesafe/config/impl/PropertiesParser.java @@ -0,0 +1,191 @@ +/** + * Copyright (C) 2011 Typesafe Inc. + */ +package com.typesafe.config.impl; + +import java.io.IOException; +import java.io.Reader; +import java.util.ArrayList; +import java.util.Collections; +import java.util.Comparator; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Properties; +import java.util.Set; + +import com.typesafe.config.ConfigException; +import com.typesafe.config.ConfigOrigin; + +final class PropertiesParser { + static AbstractConfigObject parse(Reader reader, + ConfigOrigin origin) throws IOException { + Properties props = new Properties(); + props.load(reader); + return fromProperties(origin, props); + } + + static String lastElement(String path) { + int i = path.lastIndexOf('.'); + if (i < 0) + return path; + else + return path.substring(i + 1); + } + + static String exceptLastElement(String path) { + int i = path.lastIndexOf('.'); + if (i < 0) + return null; + else + return path.substring(0, i); + } + + static Path pathFromPropertyKey(String key) { + String last = lastElement(key); + String exceptLast = exceptLastElement(key); + Path path = new Path(last, null); + while (exceptLast != null) { + last = lastElement(exceptLast); + exceptLast = exceptLastElement(exceptLast); + path = new Path(last, path); + } + return path; + } + + static AbstractConfigObject fromProperties(ConfigOrigin origin, + Properties props) { + Map pathMap = new HashMap(); + for (Map.Entry entry : props.entrySet()) { + Object key = entry.getKey(); + if (key instanceof String) { + Path path = pathFromPropertyKey((String) key); + pathMap.put(path, entry.getValue()); + } + } + return fromPathMap(origin, pathMap, true /* from properties */); + } + + static AbstractConfigObject fromPathMap(ConfigOrigin origin, + Map pathExpressionMap) { + Map pathMap = new HashMap(); + for (Map.Entry entry : pathExpressionMap.entrySet()) { + Object keyObj = entry.getKey(); + if (!(keyObj instanceof String)) { + throw new ConfigException.BugOrBroken( + "Map has a non-string as a key, expecting a path expression as a String"); + } + Path path = Path.newPath((String) keyObj); + pathMap.put(path, entry.getValue()); + } + return fromPathMap(origin, pathMap, false /* from properties */); + } + + private static AbstractConfigObject fromPathMap(ConfigOrigin origin, + Map pathMap, boolean convertedFromProperties) { + /* + * First, build a list of paths that will have values, either string or + * object values. + */ + Set scopePaths = new HashSet(); + Set valuePaths = new HashSet(); + for (Path path : pathMap.keySet()) { + // add value's path + valuePaths.add(path); + + // all parent paths are objects + Path next = path.parent(); + while (next != null) { + scopePaths.add(next); + next = next.parent(); + } + } + + if (convertedFromProperties) { + /* + * If any string values are also objects containing other values, + * drop those string values - objects "win". + */ + valuePaths.removeAll(scopePaths); + } else { + /* If we didn't start out as properties, then this is an error. */ + for (Path path : valuePaths) { + if (scopePaths.contains(path)) { + throw new ConfigException.BugOrBroken( + "In the map, path '" + + path.render() + + "' occurs as both the parent object of a value and as a value. " + + "Because Map has no defined ordering, this is a broken situation."); + } + } + } + + /* + * Create maps for the object-valued values. + */ + Map root = new HashMap(); + Map> scopes = new HashMap>(); + + for (Path path : scopePaths) { + Map scope = new HashMap(); + scopes.put(path, scope); + } + + /* Store string values in the associated scope maps */ + for (Path path : valuePaths) { + Path parentPath = path.parent(); + Map parent = parentPath != null ? scopes + .get(parentPath) : root; + + String last = path.last(); + Object rawValue = pathMap.get(path); + AbstractConfigValue value; + if (convertedFromProperties) { + value = new ConfigString(origin, (String) rawValue); + } else { + value = ConfigImpl.fromAnyRef(pathMap.get(path), origin, + FromMapMode.KEYS_ARE_PATHS); + } + parent.put(last, value); + } + + /* + * Make a list of scope paths from longest to shortest, so children go + * before parents. + */ + List sortedScopePaths = new ArrayList(); + sortedScopePaths.addAll(scopePaths); + // sort descending by length + Collections.sort(sortedScopePaths, new Comparator() { + @Override + public int compare(Path a, Path b) { + // Path.length() is O(n) so in theory this sucks + // but in practice we can make Path precompute length + // if it ever matters. + return b.length() - a.length(); + } + }); + + /* + * Create ConfigObject for each scope map, working from children to + * parents to avoid modifying any already-created ConfigObject. This is + * where we need the sorted list. + */ + for (Path scopePath : sortedScopePaths) { + Map scope = scopes.get(scopePath); + + Path parentPath = scopePath.parent(); + Map parent = parentPath != null ? scopes + .get(parentPath) : root; + + AbstractConfigObject o = new SimpleConfigObject(origin, scope, + ResolveStatus.RESOLVED, false /* ignoresFallbacks */); + parent.put(scopePath.last(), o); + } + + // return root config object + return new SimpleConfigObject(origin, root, ResolveStatus.RESOLVED, + false /* ignoresFallbacks */); + } +} diff --git a/akka-actor/src/main/java/com/typesafe/config/impl/ResolveStatus.java b/akka-actor/src/main/java/com/typesafe/config/impl/ResolveStatus.java new file mode 100644 index 0000000000..3f73eb5221 --- /dev/null +++ b/akka-actor/src/main/java/com/typesafe/config/impl/ResolveStatus.java @@ -0,0 +1,26 @@ +/** + * Copyright (C) 2011 Typesafe Inc. + */ +package com.typesafe.config.impl; + +import java.util.Collection; + +/** + * Status of substitution resolution. + */ +enum ResolveStatus { + UNRESOLVED, RESOLVED; + + final static ResolveStatus fromValues( + Collection values) { + for (AbstractConfigValue v : values) { + if (v.resolveStatus() == ResolveStatus.UNRESOLVED) + return ResolveStatus.UNRESOLVED; + } + return ResolveStatus.RESOLVED; + } + + final static ResolveStatus fromBoolean(boolean resolved) { + return resolved ? ResolveStatus.RESOLVED : ResolveStatus.UNRESOLVED; + } +} diff --git a/akka-actor/src/main/java/com/typesafe/config/impl/RootConfig.java b/akka-actor/src/main/java/com/typesafe/config/impl/RootConfig.java new file mode 100644 index 0000000000..723b9fad89 --- /dev/null +++ b/akka-actor/src/main/java/com/typesafe/config/impl/RootConfig.java @@ -0,0 +1,61 @@ +/** + * Copyright (C) 2011 Typesafe Inc. + */ +package com.typesafe.config.impl; + +import com.typesafe.config.ConfigMergeable; +import com.typesafe.config.ConfigResolveOptions; +import com.typesafe.config.ConfigRoot; + +final class RootConfig extends SimpleConfig implements ConfigRoot { + + final private Path rootPath; + + RootConfig(AbstractConfigObject underlying, Path rootPath) { + super(underlying); + this.rootPath = rootPath; + } + + @Override + protected RootConfig asRoot(AbstractConfigObject underlying, + Path newRootPath) { + if (newRootPath.equals(this.rootPath)) + return this; + else + return new RootConfig(underlying, newRootPath); + } + + @Override + public RootConfig resolve() { + return resolve(ConfigResolveOptions.defaults()); + } + + @Override + public RootConfig resolve(ConfigResolveOptions options) { + // if the object is already resolved then we should end up returning + // "this" here, since asRoot() should return this if the path + // is unchanged. + AbstractConfigObject resolved = resolvedObject(options); + return newRootIfObjectChanged(this, resolved); + } + + @Override + public RootConfig withFallback(ConfigMergeable value) { + // this can return "this" if the withFallback does nothing + return newRootIfObjectChanged(this, super.withFallback(value).toObject()); + } + + Path rootPathObject() { + return rootPath; + } + + @Override + public String rootPath() { + return rootPath.render(); + } + + @Override + public String toString() { + return "Root" + super.toString(); + } +} diff --git a/akka-actor/src/main/java/com/typesafe/config/impl/SimpleConfig.java b/akka-actor/src/main/java/com/typesafe/config/impl/SimpleConfig.java new file mode 100644 index 0000000000..d64ce4625a --- /dev/null +++ b/akka-actor/src/main/java/com/typesafe/config/impl/SimpleConfig.java @@ -0,0 +1,597 @@ +/** + * Copyright (C) 2011 Typesafe Inc. + */ +package com.typesafe.config.impl; + +import java.util.ArrayList; +import java.util.List; +import java.util.concurrent.TimeUnit; + +import com.typesafe.config.Config; +import com.typesafe.config.ConfigException; +import com.typesafe.config.ConfigList; +import com.typesafe.config.ConfigMergeable; +import com.typesafe.config.ConfigObject; +import com.typesafe.config.ConfigOrigin; +import com.typesafe.config.ConfigResolveOptions; +import com.typesafe.config.ConfigValue; +import com.typesafe.config.ConfigValueType; + +/** + * One thing to keep in mind in the future: if any Collection-like APIs are + * added here, including iterators or size() or anything, then we'd have to + * grapple with whether ConfigNull values are "in" the Config (probably not) and + * we'd probably want to make the collection look flat - not like a tree. So the + * key-value pairs would be all the tree's leaf values, in a big flat list with + * their full paths. + */ +class SimpleConfig implements Config { + + AbstractConfigObject object; + + SimpleConfig(AbstractConfigObject object) { + this.object = object; + } + + @Override + public AbstractConfigObject toObject() { + return object; + } + + @Override + public ConfigOrigin origin() { + return object.origin(); + } + + /** + * Returns a version of this config that implements the ConfigRoot + * interface. + * + * @return a config root + */ + RootConfig asRoot(Path rootPath) { + return asRoot(object, rootPath); + } + + // RootConfig overrides this to avoid a new object on unchanged path. + protected RootConfig asRoot(AbstractConfigObject underlying, + Path newRootPath) { + return new RootConfig(underlying, newRootPath); + } + + static protected RootConfig newRootIfObjectChanged(RootConfig self, AbstractConfigObject underlying) { + if (underlying == self.object) + return self; + else + return new RootConfig(underlying, self.rootPathObject()); + } + + protected AbstractConfigObject resolvedObject(ConfigResolveOptions options) { + AbstractConfigValue resolved = SubstitutionResolver.resolve(object, + object, options); + return (AbstractConfigObject) resolved; + } + + @Override + public boolean hasPath(String pathExpression) { + Path path = Path.newPath(pathExpression); + ConfigValue peeked = object.peekPath(path, null, 0, null); + return peeked != null && peeked.valueType() != ConfigValueType.NULL; + } + + @Override + public boolean isEmpty() { + return object.isEmpty(); + } + + static private AbstractConfigValue find(AbstractConfigObject self, + String pathExpression, ConfigValueType expected, String originalPath) { + Path path = Path.newPath(pathExpression); + return find(self, path, expected, originalPath); + } + + static private AbstractConfigValue findKey(AbstractConfigObject self, + String key, ConfigValueType expected, String originalPath) { + AbstractConfigValue v = self.peek(key); + if (v == null) + throw new ConfigException.Missing(originalPath); + + if (expected != null) + v = DefaultTransformer.transform(v, expected); + + if (v.valueType() == ConfigValueType.NULL) + throw new ConfigException.Null(v.origin(), originalPath, + expected != null ? expected.name() : null); + else if (expected != null && v.valueType() != expected) + throw new ConfigException.WrongType(v.origin(), originalPath, + expected.name(), v.valueType().name()); + else + return v; + } + + static private AbstractConfigValue find(AbstractConfigObject self, + Path path, ConfigValueType expected, String originalPath) { + String key = path.first(); + Path next = path.remainder(); + if (next == null) { + return findKey(self, key, expected, originalPath); + } else { + AbstractConfigObject o = (AbstractConfigObject) findKey(self, key, + ConfigValueType.OBJECT, originalPath); + assert (o != null); // missing was supposed to throw + return find(o, next, expected, originalPath); + } + } + + AbstractConfigValue find(String pathExpression, ConfigValueType expected, + String originalPath) { + return find(object, pathExpression, expected, originalPath); + } + + @Override + public AbstractConfigValue getValue(String path) { + return find(path, null, path); + } + + @Override + public boolean getBoolean(String path) { + ConfigValue v = find(path, ConfigValueType.BOOLEAN, path); + return (Boolean) v.unwrapped(); + } + + private ConfigNumber getConfigNumber(String path) { + ConfigValue v = find(path, ConfigValueType.NUMBER, path); + return (ConfigNumber) v; + } + + @Override + public Number getNumber(String path) { + return getConfigNumber(path).unwrapped(); + } + + @Override + public int getInt(String path) { + ConfigNumber n = getConfigNumber(path); + return n.intValueRangeChecked(path); + } + + @Override + public long getLong(String path) { + return getNumber(path).longValue(); + } + + @Override + public double getDouble(String path) { + return getNumber(path).doubleValue(); + } + + @Override + public String getString(String path) { + ConfigValue v = find(path, ConfigValueType.STRING, path); + return (String) v.unwrapped(); + } + + @Override + public ConfigList getList(String path) { + AbstractConfigValue v = find(path, ConfigValueType.LIST, path); + return (ConfigList) v; + } + + @Override + public AbstractConfigObject getObject(String path) { + AbstractConfigObject obj = (AbstractConfigObject) find(path, + ConfigValueType.OBJECT, path); + return obj; + } + + @Override + public SimpleConfig getConfig(String path) { + return getObject(path).toConfig(); + } + + @Override + public Object getAnyRef(String path) { + ConfigValue v = find(path, null, path); + return v.unwrapped(); + } + + @Override + public Long getMemorySizeInBytes(String path) { + Long size = null; + try { + size = getLong(path); + } catch (ConfigException.WrongType e) { + ConfigValue v = find(path, ConfigValueType.STRING, path); + size = parseMemorySizeInBytes((String) v.unwrapped(), + v.origin(), path); + } + return size; + } + + @Override + public Long getMilliseconds(String path) { + long ns = getNanoseconds(path); + long ms = TimeUnit.NANOSECONDS.toMillis(ns); + return ms; + } + + @Override + public Long getNanoseconds(String path) { + Long ns = null; + try { + ns = TimeUnit.MILLISECONDS.toNanos(getLong(path)); + } catch (ConfigException.WrongType e) { + ConfigValue v = find(path, ConfigValueType.STRING, path); + ns = parseDuration((String) v.unwrapped(), v.origin(), path); + } + return ns; + } + + @SuppressWarnings("unchecked") + private List getHomogeneousUnwrappedList(String path, + ConfigValueType expected) { + List l = new ArrayList(); + List list = getList(path); + for (ConfigValue cv : list) { + // variance would be nice, but stupid cast will do + AbstractConfigValue v = (AbstractConfigValue) cv; + if (expected != null) { + v = DefaultTransformer.transform(v, expected); + } + if (v.valueType() != expected) + throw new ConfigException.WrongType(v.origin(), path, + "list of " + expected.name(), "list of " + + v.valueType().name()); + l.add((T) v.unwrapped()); + } + return l; + } + + @Override + public List getBooleanList(String path) { + return getHomogeneousUnwrappedList(path, ConfigValueType.BOOLEAN); + } + + @Override + public List getNumberList(String path) { + return getHomogeneousUnwrappedList(path, ConfigValueType.NUMBER); + } + + @Override + public List getIntList(String path) { + List l = new ArrayList(); + List numbers = getHomogeneousWrappedList(path, ConfigValueType.NUMBER); + for (AbstractConfigValue v : numbers) { + l.add(((ConfigNumber) v).intValueRangeChecked(path)); + } + return l; + } + + @Override + public List getLongList(String path) { + List l = new ArrayList(); + List numbers = getNumberList(path); + for (Number n : numbers) { + l.add(n.longValue()); + } + return l; + } + + @Override + public List getDoubleList(String path) { + List l = new ArrayList(); + List numbers = getNumberList(path); + for (Number n : numbers) { + l.add(n.doubleValue()); + } + return l; + } + + @Override + public List getStringList(String path) { + return getHomogeneousUnwrappedList(path, ConfigValueType.STRING); + } + + @SuppressWarnings("unchecked") + private List getHomogeneousWrappedList( + String path, ConfigValueType expected) { + List l = new ArrayList(); + List list = getList(path); + for (ConfigValue cv : list) { + // variance would be nice, but stupid cast will do + AbstractConfigValue v = (AbstractConfigValue) cv; + if (expected != null) { + v = DefaultTransformer.transform(v, expected); + } + if (v.valueType() != expected) + throw new ConfigException.WrongType(v.origin(), path, + "list of " + expected.name(), "list of " + + v.valueType().name()); + l.add((T) v); + } + return l; + } + + @Override + public List getObjectList(String path) { + return getHomogeneousWrappedList(path, ConfigValueType.OBJECT); + } + + @Override + public List getConfigList(String path) { + List objects = getObjectList(path); + List l = new ArrayList(); + for (ConfigObject o : objects) { + l.add(o.toConfig()); + } + return l; + } + + @Override + public List getAnyRefList(String path) { + List l = new ArrayList(); + List list = getList(path); + for (ConfigValue v : list) { + l.add(v.unwrapped()); + } + return l; + } + + @Override + public List getMemorySizeInBytesList(String path) { + List l = new ArrayList(); + List list = getList(path); + for (ConfigValue v : list) { + if (v.valueType() == ConfigValueType.NUMBER) { + l.add(((Number) v.unwrapped()).longValue()); + } else if (v.valueType() == ConfigValueType.STRING) { + String s = (String) v.unwrapped(); + Long n = parseMemorySizeInBytes(s, v.origin(), path); + l.add(n); + } else { + throw new ConfigException.WrongType(v.origin(), path, + "memory size string or number of bytes", v.valueType() + .name()); + } + } + return l; + } + + @Override + public List getMillisecondsList(String path) { + List nanos = getNanosecondsList(path); + List l = new ArrayList(); + for (Long n : nanos) { + l.add(TimeUnit.NANOSECONDS.toMillis(n)); + } + return l; + } + + @Override + public List getNanosecondsList(String path) { + List l = new ArrayList(); + List list = getList(path); + for (ConfigValue v : list) { + if (v.valueType() == ConfigValueType.NUMBER) { + l.add(TimeUnit.MILLISECONDS.toNanos(((Number) v.unwrapped()) + .longValue())); + } else if (v.valueType() == ConfigValueType.STRING) { + String s = (String) v.unwrapped(); + Long n = parseDuration(s, v.origin(), path); + l.add(n); + } else { + throw new ConfigException.WrongType(v.origin(), path, + "duration string or number of nanoseconds", v + .valueType().name()); + } + } + return l; + } + + @Override + public AbstractConfigObject toValue() { + return object; + } + + @Override + public SimpleConfig withFallback(ConfigMergeable other) { + // this can return "this" if the withFallback doesn't need a new + // ConfigObject + return object.withFallback(other).toConfig(); + } + + @Override + public final boolean equals(Object other) { + if (other instanceof SimpleConfig) { + return object.equals(((SimpleConfig) other).object); + } else { + return false; + } + } + + @Override + public final int hashCode() { + // we do the "41*" just so our hash code won't match that of the + // underlying object. there's no real reason it can't match, but + // making it not match might catch some kinds of bug. + return 41 * object.hashCode(); + } + + @Override + public String toString() { + return "Config(" + object.toString() + ")"; + } + + private static String getUnits(String s) { + int i = s.length() - 1; + while (i >= 0) { + char c = s.charAt(i); + if (!Character.isLetter(c)) + break; + i -= 1; + } + return s.substring(i + 1); + } + + /** + * Parses a duration string. If no units are specified in the string, it is + * assumed to be in milliseconds. The returned duration is in nanoseconds. + * The purpose of this function is to implement the duration-related methods + * in the ConfigObject interface. + * + * @param input + * the string to parse + * @param originForException + * origin of the value being parsed + * @param pathForException + * path to include in exceptions + * @return duration in nanoseconds + * @throws ConfigException + * if string is invalid + */ + public static long parseDuration(String input, + ConfigOrigin originForException, String pathForException) { + String s = ConfigUtil.unicodeTrim(input); + String originalUnitString = getUnits(s); + String unitString = originalUnitString; + String numberString = ConfigUtil.unicodeTrim(s.substring(0, s.length() + - unitString.length())); + TimeUnit units = null; + + // this would be caught later anyway, but the error message + // is more helpful if we check it here. + if (numberString.length() == 0) + throw new ConfigException.BadValue(originForException, + pathForException, "No number in duration value '" + input + + "'"); + + if (unitString.length() > 2 && !unitString.endsWith("s")) + unitString = unitString + "s"; + + // note that this is deliberately case-sensitive + if (unitString.equals("") || unitString.equals("ms") + || unitString.equals("milliseconds")) { + units = TimeUnit.MILLISECONDS; + } else if (unitString.equals("us") || unitString.equals("microseconds")) { + units = TimeUnit.MICROSECONDS; + } else if (unitString.equals("ns") || unitString.equals("nanoseconds")) { + units = TimeUnit.NANOSECONDS; + } else if (unitString.equals("d") || unitString.equals("days")) { + units = TimeUnit.DAYS; + } else if (unitString.equals("h") || unitString.equals("hours")) { + units = TimeUnit.HOURS; + } else if (unitString.equals("s") || unitString.equals("seconds")) { + units = TimeUnit.SECONDS; + } else if (unitString.equals("m") || unitString.equals("minutes")) { + units = TimeUnit.MINUTES; + } else { + throw new ConfigException.BadValue(originForException, + pathForException, "Could not parse time unit '" + + originalUnitString + + "' (try ns, us, ms, s, m, d)"); + } + + try { + // if the string is purely digits, parse as an integer to avoid + // possible precision loss; + // otherwise as a double. + if (numberString.matches("[0-9]+")) { + return units.toNanos(Long.parseLong(numberString)); + } else { + long nanosInUnit = units.toNanos(1); + return (long) (Double.parseDouble(numberString) * nanosInUnit); + } + } catch (NumberFormatException e) { + throw new ConfigException.BadValue(originForException, + pathForException, "Could not parse duration number '" + + numberString + "'"); + } + } + + private static enum MemoryUnit { + BYTES(1), KILOBYTES(1024), MEGABYTES(1024 * 1024), GIGABYTES( + 1024 * 1024 * 1024), TERABYTES(1024 * 1024 * 1024 * 1024); + + int bytes; + + MemoryUnit(int bytes) { + this.bytes = bytes; + } + } + + /** + * Parses a memory-size string. If no units are specified in the string, it + * is assumed to be in bytes. The returned value is in bytes. The purpose of + * this function is to implement the memory-size-related methods in the + * ConfigObject interface. The units parsed are interpreted as powers of + * two, that is, the convention for memory rather than the convention for + * disk space. + * + * @param input + * the string to parse + * @param originForException + * origin of the value being parsed + * @param pathForException + * path to include in exceptions + * @return size in bytes + * @throws ConfigException + * if string is invalid + */ + public static long parseMemorySizeInBytes(String input, + ConfigOrigin originForException, String pathForException) { + String s = ConfigUtil.unicodeTrim(input); + String unitStringMaybePlural = getUnits(s); + String unitString; + if (unitStringMaybePlural.endsWith("s")) + unitString = unitStringMaybePlural.substring(0, + unitStringMaybePlural.length() - 1); + else + unitString = unitStringMaybePlural; + String unitStringLower = unitString.toLowerCase(); + String numberString = ConfigUtil.unicodeTrim(s.substring(0, s.length() + - unitStringMaybePlural.length())); + + // this would be caught later anyway, but the error message + // is more helpful if we check it here. + if (numberString.length() == 0) + throw new ConfigException.BadValue(originForException, + pathForException, "No number in size-in-bytes value '" + + input + "'"); + + MemoryUnit units = null; + + // the short abbreviations are case-insensitive but you can't write the + // long form words in all caps. + if (unitString.equals("") || unitStringLower.equals("b") + || unitString.equals("byte")) { + units = MemoryUnit.BYTES; + } else if (unitStringLower.equals("k") || unitString.equals("kilobyte")) { + units = MemoryUnit.KILOBYTES; + } else if (unitStringLower.equals("m") || unitString.equals("megabyte")) { + units = MemoryUnit.MEGABYTES; + } else if (unitStringLower.equals("g") || unitString.equals("gigabyte")) { + units = MemoryUnit.GIGABYTES; + } else if (unitStringLower.equals("t") || unitString.equals("terabyte")) { + units = MemoryUnit.TERABYTES; + } else { + throw new ConfigException.BadValue(originForException, + pathForException, "Could not parse size unit '" + + unitStringMaybePlural + "' (try b, k, m, g, t)"); + } + + try { + // if the string is purely digits, parse as an integer to avoid + // possible precision loss; + // otherwise as a double. + if (numberString.matches("[0-9]+")) { + return Long.parseLong(numberString) * units.bytes; + } else { + return (long) (Double.parseDouble(numberString) * units.bytes); + } + } catch (NumberFormatException e) { + throw new ConfigException.BadValue(originForException, + pathForException, "Could not parse memory size number '" + + numberString + "'"); + } + } +} diff --git a/akka-actor/src/main/java/com/typesafe/config/impl/SimpleConfigList.java b/akka-actor/src/main/java/com/typesafe/config/impl/SimpleConfigList.java new file mode 100644 index 0000000000..9e610858c9 --- /dev/null +++ b/akka-actor/src/main/java/com/typesafe/config/impl/SimpleConfigList.java @@ -0,0 +1,343 @@ +/** + * Copyright (C) 2011 Typesafe Inc. + */ +package com.typesafe.config.impl; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.Iterator; +import java.util.List; +import java.util.ListIterator; + +import com.typesafe.config.ConfigException; +import com.typesafe.config.ConfigList; +import com.typesafe.config.ConfigOrigin; +import com.typesafe.config.ConfigResolveOptions; +import com.typesafe.config.ConfigValue; +import com.typesafe.config.ConfigValueType; + +final class SimpleConfigList extends AbstractConfigValue implements ConfigList { + + final private List value; + final private boolean resolved; + + SimpleConfigList(ConfigOrigin origin, List value) { + this(origin, value, ResolveStatus.fromValues(value)); + } + + SimpleConfigList(ConfigOrigin origin, List value, + ResolveStatus status) { + super(origin); + this.value = value; + this.resolved = status == ResolveStatus.RESOLVED; + } + + @Override + public ConfigValueType valueType() { + return ConfigValueType.LIST; + } + + @Override + public List unwrapped() { + List list = new ArrayList(); + for (AbstractConfigValue v : value) { + list.add(v.unwrapped()); + } + return list; + } + + @Override + ResolveStatus resolveStatus() { + return ResolveStatus.fromBoolean(resolved); + } + + private SimpleConfigList modify(Modifier modifier, + ResolveStatus newResolveStatus) { + // lazy-create for optimization + List changed = null; + int i = 0; + for (AbstractConfigValue v : value) { + AbstractConfigValue modified = modifier.modifyChild(v); + + // lazy-create the new list if required + if (changed == null && modified != v) { + changed = new ArrayList(); + for (int j = 0; j < i; ++j) { + changed.add(value.get(j)); + } + } + + // once the new list is created, all elements + // have to go in it. + if (changed != null) { + changed.add(modified); + } + + i += 1; + } + + if (changed != null) { + if (changed.size() != value.size()) + throw new ConfigException.BugOrBroken( + "substituted list's size doesn't match"); + return new SimpleConfigList(origin(), changed, newResolveStatus); + } else { + return this; + } + } + + @Override + SimpleConfigList resolveSubstitutions(final SubstitutionResolver resolver, + final int depth, final ConfigResolveOptions options) { + if (resolved) + return this; + + return modify(new Modifier() { + @Override + public AbstractConfigValue modifyChild(AbstractConfigValue v) { + return resolver.resolve(v, depth, options); + } + + }, ResolveStatus.RESOLVED); + } + + @Override + SimpleConfigList relativized(final Path prefix) { + return modify(new Modifier() { + @Override + public AbstractConfigValue modifyChild(AbstractConfigValue v) { + return v.relativized(prefix); + } + + }, resolveStatus()); + } + + @Override + protected boolean canEqual(Object other) { + return other instanceof SimpleConfigList; + } + + @Override + public boolean equals(Object other) { + // note that "origin" is deliberately NOT part of equality + if (other instanceof SimpleConfigList) { + // optimization to avoid unwrapped() for two ConfigList + return canEqual(other) && value.equals(((SimpleConfigList) other).value); + } else { + return false; + } + } + + @Override + public int hashCode() { + // note that "origin" is deliberately NOT part of equality + return value.hashCode(); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append(valueType().name()); + sb.append("("); + for (ConfigValue e : value) { + sb.append(e.toString()); + sb.append(","); + } + if (!value.isEmpty()) + sb.setLength(sb.length() - 1); // chop comma + sb.append(")"); + return sb.toString(); + } + + @Override + public boolean contains(Object o) { + return value.contains(o); + } + + @Override + public boolean containsAll(Collection c) { + return value.containsAll(c); + } + + @Override + public ConfigValue get(int index) { + return value.get(index); + } + + @Override + public int indexOf(Object o) { + return value.indexOf(o); + } + + @Override + public boolean isEmpty() { + return value.isEmpty(); + } + + @Override + public Iterator iterator() { + final Iterator i = value.iterator(); + + return new Iterator() { + @Override + public boolean hasNext() { + return i.hasNext(); + } + + @Override + public ConfigValue next() { + return i.next(); + } + + @Override + public void remove() { + throw weAreImmutable("iterator().remove"); + } + }; + } + + @Override + public int lastIndexOf(Object o) { + return value.lastIndexOf(o); + } + + private static ListIterator wrapListIterator( + final ListIterator i) { + return new ListIterator() { + @Override + public boolean hasNext() { + return i.hasNext(); + } + + @Override + public ConfigValue next() { + return i.next(); + } + + @Override + public void remove() { + throw weAreImmutable("listIterator().remove"); + } + + @Override + public void add(ConfigValue arg0) { + throw weAreImmutable("listIterator().add"); + } + + @Override + public boolean hasPrevious() { + return i.hasPrevious(); + } + + @Override + public int nextIndex() { + return i.nextIndex(); + } + + @Override + public ConfigValue previous() { + return i.previous(); + } + + @Override + public int previousIndex() { + return i.previousIndex(); + } + + @Override + public void set(ConfigValue arg0) { + throw weAreImmutable("listIterator().set"); + } + }; + } + + @Override + public ListIterator listIterator() { + return wrapListIterator(value.listIterator()); + } + + @Override + public ListIterator listIterator(int index) { + return wrapListIterator(value.listIterator(index)); + } + + @Override + public int size() { + return value.size(); + } + + @Override + public List subList(int fromIndex, int toIndex) { + List list = new ArrayList(); + // yay bloat caused by lack of type variance + for (AbstractConfigValue v : value.subList(fromIndex, toIndex)) { + list.add(v); + } + return list; + } + + @Override + public Object[] toArray() { + return value.toArray(); + } + + @Override + public T[] toArray(T[] a) { + return value.toArray(a); + } + + private static UnsupportedOperationException weAreImmutable(String method) { + return new UnsupportedOperationException( + "ConfigList is immutable, you can't call List.'" + method + "'"); + } + + @Override + public boolean add(ConfigValue e) { + throw weAreImmutable("add"); + } + + @Override + public void add(int index, ConfigValue element) { + throw weAreImmutable("add"); + } + + @Override + public boolean addAll(Collection c) { + throw weAreImmutable("addAll"); + } + + @Override + public boolean addAll(int index, Collection c) { + throw weAreImmutable("addAll"); + } + + @Override + public void clear() { + throw weAreImmutable("clear"); + } + + @Override + public boolean remove(Object o) { + throw weAreImmutable("remove"); + } + + @Override + public ConfigValue remove(int index) { + throw weAreImmutable("remove"); + } + + @Override + public boolean removeAll(Collection c) { + throw weAreImmutable("removeAll"); + } + + @Override + public boolean retainAll(Collection c) { + throw weAreImmutable("retainAll"); + } + + @Override + public ConfigValue set(int index, ConfigValue element) { + throw weAreImmutable("set"); + } +} diff --git a/akka-actor/src/main/java/com/typesafe/config/impl/SimpleConfigObject.java b/akka-actor/src/main/java/com/typesafe/config/impl/SimpleConfigObject.java new file mode 100644 index 0000000000..88b2b9090f --- /dev/null +++ b/akka-actor/src/main/java/com/typesafe/config/impl/SimpleConfigObject.java @@ -0,0 +1,135 @@ +/** + * Copyright (C) 2011 Typesafe Inc. + */ +package com.typesafe.config.impl; + +import java.util.AbstractMap; +import java.util.Collection; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Map; +import java.util.Set; + +import com.typesafe.config.ConfigException; +import com.typesafe.config.ConfigOrigin; +import com.typesafe.config.ConfigValue; + +final class SimpleConfigObject extends AbstractConfigObject { + + // this map should never be modified - assume immutable + final private Map value; + final private boolean resolved; + final private boolean ignoresFallbacks; + + SimpleConfigObject(ConfigOrigin origin, + Map value, ResolveStatus status, + boolean ignoresFallbacks) { + super(origin); + if (value == null) + throw new ConfigException.BugOrBroken( + "creating config object with null map"); + this.value = value; + this.resolved = status == ResolveStatus.RESOLVED; + this.ignoresFallbacks = ignoresFallbacks; + } + + SimpleConfigObject(ConfigOrigin origin, + Map value) { + this(origin, value, ResolveStatus.fromValues(value.values()), false /* ignoresFallbacks */); + } + + @Override + protected AbstractConfigValue peek(String key) { + return value.get(key); + } + + @Override + protected SimpleConfigObject newCopy(ResolveStatus newStatus, boolean newIgnoresFallbacks) { + return new SimpleConfigObject(origin(), value, newStatus, newIgnoresFallbacks); + } + + @Override + ResolveStatus resolveStatus() { + return ResolveStatus.fromBoolean(resolved); + } + + @Override + protected boolean ignoresFallbacks() { + return ignoresFallbacks; + } + + @Override + public Map unwrapped() { + Map m = new HashMap(); + for (Map.Entry e : value.entrySet()) { + m.put(e.getKey(), e.getValue().unwrapped()); + } + return m; + } + + @Override + public boolean containsKey(Object key) { + return value.containsKey(key); + } + + @Override + public Set keySet() { + return value.keySet(); + } + + @Override + public boolean containsValue(Object v) { + return value.containsValue(v); + } + + @Override + public Set> entrySet() { + // total bloat just to work around lack of type variance + + HashSet> entries = new HashSet>(); + for (Map.Entry e : value.entrySet()) { + entries.add(new AbstractMap.SimpleImmutableEntry( + e.getKey(), e + .getValue())); + } + return entries; + } + + @Override + public boolean isEmpty() { + return value.isEmpty(); + } + + @Override + public int size() { + return value.size(); + } + + @Override + public Collection values() { + return new HashSet(value.values()); + } + + final private static String EMPTY_NAME = "empty config"; + final private static SimpleConfigObject emptyInstance = empty(new SimpleConfigOrigin( + EMPTY_NAME)); + + final static SimpleConfigObject empty() { + return emptyInstance; + } + + final static SimpleConfigObject empty(ConfigOrigin origin) { + if (origin == null) + return empty(); + else + return new SimpleConfigObject(origin, + Collections. emptyMap()); + } + + final static SimpleConfigObject emptyMissing(ConfigOrigin baseOrigin) { + return new SimpleConfigObject(new SimpleConfigOrigin( + baseOrigin.description() + " (not found)"), + Collections. emptyMap()); + } +} diff --git a/akka-actor/src/main/java/com/typesafe/config/impl/SimpleConfigOrigin.java b/akka-actor/src/main/java/com/typesafe/config/impl/SimpleConfigOrigin.java new file mode 100644 index 0000000000..6e37756638 --- /dev/null +++ b/akka-actor/src/main/java/com/typesafe/config/impl/SimpleConfigOrigin.java @@ -0,0 +1,40 @@ +/** + * Copyright (C) 2011 Typesafe Inc. + */ +package com.typesafe.config.impl; + +import com.typesafe.config.ConfigOrigin; + +final class SimpleConfigOrigin implements ConfigOrigin { + + final private String description; + + SimpleConfigOrigin(String description) { + this.description = description; + } + + @Override + public String description() { + return description; + } + + @Override + public boolean equals(Object other) { + if (other instanceof SimpleConfigOrigin) { + return this.description + .equals(((SimpleConfigOrigin) other).description); + } else { + return false; + } + } + + @Override + public int hashCode() { + return description.hashCode(); + } + + @Override + public String toString() { + return "ConfigOrigin(" + description + ")"; + } +} diff --git a/akka-actor/src/main/java/com/typesafe/config/impl/SubstitutionResolver.java b/akka-actor/src/main/java/com/typesafe/config/impl/SubstitutionResolver.java new file mode 100644 index 0000000000..7f77570d02 --- /dev/null +++ b/akka-actor/src/main/java/com/typesafe/config/impl/SubstitutionResolver.java @@ -0,0 +1,51 @@ +/** + * Copyright (C) 2011 Typesafe Inc. + */ +package com.typesafe.config.impl; + +import java.util.IdentityHashMap; +import java.util.Map; + +import com.typesafe.config.ConfigException; +import com.typesafe.config.ConfigResolveOptions; + +/** + * This exists because we have to memoize resolved substitutions as we go + * through the config tree; otherwise we could end up creating multiple copies + * of values or whole trees of values as we follow chains of substitutions. + */ +final class SubstitutionResolver { + final private AbstractConfigObject root; + final private Map memos; + + SubstitutionResolver(AbstractConfigObject root) { + this.root = root; + // note: the memoization is by object identity, not object value + this.memos = new IdentityHashMap(); + } + + AbstractConfigValue resolve(AbstractConfigValue original, int depth, + ConfigResolveOptions options) { + if (memos.containsKey(original)) { + return memos.get(original); + } else { + AbstractConfigValue resolved = original.resolveSubstitutions(this, + depth, options); + if (resolved.resolveStatus() != ResolveStatus.RESOLVED) + throw new ConfigException.BugOrBroken( + "resolveSubstitutions() did not give us a resolved object"); + memos.put(original, resolved); + return resolved; + } + } + + AbstractConfigObject root() { + return this.root; + } + + static AbstractConfigValue resolve(AbstractConfigValue value, + AbstractConfigObject root, ConfigResolveOptions options) { + SubstitutionResolver resolver = new SubstitutionResolver(root); + return resolver.resolve(value, 0, options); + } +} diff --git a/akka-actor/src/main/java/com/typesafe/config/impl/Token.java b/akka-actor/src/main/java/com/typesafe/config/impl/Token.java new file mode 100644 index 0000000000..7c888c748e --- /dev/null +++ b/akka-actor/src/main/java/com/typesafe/config/impl/Token.java @@ -0,0 +1,40 @@ +/** + * Copyright (C) 2011 Typesafe Inc. + */ +package com.typesafe.config.impl; + +class Token { + final private TokenType tokenType; + + Token(TokenType tokenType) { + this.tokenType = tokenType; + } + + public TokenType tokenType() { + return tokenType; + } + + @Override + public String toString() { + return tokenType.name(); + } + + protected boolean canEqual(Object other) { + return other instanceof Token; + } + + @Override + public boolean equals(Object other) { + if (other instanceof Token) { + return canEqual(other) + && this.tokenType == ((Token) other).tokenType; + } else { + return false; + } + } + + @Override + public int hashCode() { + return tokenType.hashCode(); + } +} diff --git a/akka-actor/src/main/java/com/typesafe/config/impl/TokenType.java b/akka-actor/src/main/java/com/typesafe/config/impl/TokenType.java new file mode 100644 index 0000000000..19b6a106a9 --- /dev/null +++ b/akka-actor/src/main/java/com/typesafe/config/impl/TokenType.java @@ -0,0 +1,8 @@ +/** + * Copyright (C) 2011 Typesafe Inc. + */ +package com.typesafe.config.impl; + +enum TokenType { + START, END, COMMA, EQUALS, COLON, OPEN_CURLY, CLOSE_CURLY, OPEN_SQUARE, CLOSE_SQUARE, VALUE, NEWLINE, UNQUOTED_TEXT, SUBSTITUTION; +} diff --git a/akka-actor/src/main/java/com/typesafe/config/impl/Tokenizer.java b/akka-actor/src/main/java/com/typesafe/config/impl/Tokenizer.java new file mode 100644 index 0000000000..147917bf40 --- /dev/null +++ b/akka-actor/src/main/java/com/typesafe/config/impl/Tokenizer.java @@ -0,0 +1,535 @@ +/** + * Copyright (C) 2011 Typesafe Inc. + */ +package com.typesafe.config.impl; + +import java.io.IOException; +import java.io.Reader; +import java.util.ArrayList; +import java.util.Iterator; +import java.util.LinkedList; +import java.util.List; +import java.util.Queue; + +import com.typesafe.config.ConfigException; +import com.typesafe.config.ConfigOrigin; +import com.typesafe.config.ConfigSyntax; + +final class Tokenizer { + /** + * Tokenizes a Reader. Does not close the reader; you have to arrange to do + * that after you're done with the returned iterator. + */ + static Iterator tokenize(ConfigOrigin origin, Reader input, ConfigSyntax flavor) { + return new TokenIterator(origin, input, flavor != ConfigSyntax.JSON); + } + + private static class TokenIterator implements Iterator { + + private static class WhitespaceSaver { + // has to be saved inside value concatenations + private StringBuilder whitespace; + // may need to value-concat with next value + private boolean lastTokenWasSimpleValue; + + WhitespaceSaver() { + whitespace = new StringBuilder(); + lastTokenWasSimpleValue = false; + } + + void add(int c) { + if (lastTokenWasSimpleValue) + whitespace.appendCodePoint(c); + } + + Token check(Token t, ConfigOrigin baseOrigin, int lineNumber) { + if (isSimpleValue(t)) { + return nextIsASimpleValue(baseOrigin, lineNumber); + } else { + nextIsNotASimpleValue(); + return null; + } + } + + // called if the next token is not a simple value; + // discards any whitespace we were saving between + // simple values. + private void nextIsNotASimpleValue() { + lastTokenWasSimpleValue = false; + whitespace.setLength(0); + } + + // called if the next token IS a simple value, + // so creates a whitespace token if the previous + // token also was. + private Token nextIsASimpleValue(ConfigOrigin baseOrigin, + int lineNumber) { + if (lastTokenWasSimpleValue) { + // need to save whitespace between the two so + // the parser has the option to concatenate it. + if (whitespace.length() > 0) { + Token t = Tokens.newUnquotedText( + lineOrigin(baseOrigin, lineNumber), + whitespace.toString()); + whitespace.setLength(0); // reset + return t; + } else { + // lastTokenWasSimpleValue = true still + return null; + } + } else { + lastTokenWasSimpleValue = true; + whitespace.setLength(0); + return null; + } + } + } + + final private ConfigOrigin origin; + final private Reader input; + final private LinkedList buffer; + private int lineNumber; + final private Queue tokens; + final private WhitespaceSaver whitespaceSaver; + final private boolean allowComments; + + TokenIterator(ConfigOrigin origin, Reader input, boolean allowComments) { + this.origin = origin; + this.input = input; + this.allowComments = allowComments; + this.buffer = new LinkedList(); + lineNumber = 1; + tokens = new LinkedList(); + tokens.add(Tokens.START); + whitespaceSaver = new WhitespaceSaver(); + } + + + // this should ONLY be called from nextCharSkippingComments + // or when inside a quoted string, everything else should + // use nextCharSkippingComments(). + private int nextCharRaw() { + if (buffer.isEmpty()) { + try { + return input.read(); + } catch (IOException e) { + throw new ConfigException.IO(origin, "read error: " + + e.getMessage(), e); + } + } else { + int c = buffer.pop(); + return c; + } + } + + private void putBack(int c) { + if (buffer.size() > 2) { + throw new ConfigException.BugOrBroken( + "bug: putBack() three times, undesirable look-ahead"); + } + buffer.push(c); + } + + static boolean isWhitespace(int c) { + return ConfigUtil.isWhitespace(c); + } + + static boolean isWhitespaceNotNewline(int c) { + return c != '\n' && ConfigUtil.isWhitespace(c); + } + + private int slurpComment() { + for (;;) { + int c = nextCharRaw(); + if (c == -1 || c == '\n') { + return c; + } + } + } + + // get next char, skipping comments + private int nextCharSkippingComments() { + for (;;) { + int c = nextCharRaw(); + + if (c == -1) { + return -1; + } else { + if (allowComments) { + if (c == '#') { + return slurpComment(); + } else if (c == '/') { + int maybeSecondSlash = nextCharRaw(); + if (maybeSecondSlash == '/') { + return slurpComment(); + } else { + putBack(maybeSecondSlash); + return c; + } + } else { + return c; + } + } else { + return c; + } + } + } + } + + // get next char, skipping non-newline whitespace + private int nextCharAfterWhitespace(WhitespaceSaver saver) { + for (;;) { + int c = nextCharSkippingComments(); + + if (c == -1) { + return -1; + } else { + if (isWhitespaceNotNewline(c)) { + saver.add(c); + continue; + } else { + return c; + } + } + } + } + + private ConfigException parseError(String message) { + return parseError(message, null); + } + + private ConfigException parseError(String message, Throwable cause) { + return parseError(lineOrigin(), message, cause); + } + + private static ConfigException parseError(ConfigOrigin origin, + String message, + Throwable cause) { + return new ConfigException.Parse(origin, message, cause); + } + + private static ConfigException parseError(ConfigOrigin origin, + String message) { + return parseError(origin, message, null); + } + + private ConfigOrigin lineOrigin() { + return lineOrigin(origin, lineNumber); + } + + private static ConfigOrigin lineOrigin(ConfigOrigin baseOrigin, + int lineNumber) { + return new SimpleConfigOrigin(baseOrigin.description() + ": line " + + lineNumber); + } + + // chars JSON allows a number to start with + static final String firstNumberChars = "0123456789-"; + // chars JSON allows to be part of a number + static final String numberChars = "0123456789eE+-."; + // chars that stop an unquoted string + static final String notInUnquotedText = "$\"{}[]:=,\\+#"; + + // The rules here are intended to maximize convenience while + // avoiding confusion with real valid JSON. Basically anything + // that parses as JSON is treated the JSON way and otherwise + // we assume it's a string and let the parser sort it out. + private Token pullUnquotedText() { + ConfigOrigin origin = lineOrigin(); + StringBuilder sb = new StringBuilder(); + int c = nextCharSkippingComments(); + while (true) { + if (c == -1) { + break; + } else if (notInUnquotedText.indexOf(c) >= 0) { + break; + } else if (isWhitespace(c)) { + break; + } else { + sb.appendCodePoint(c); + } + + // we parse true/false/null tokens as such no matter + // what is after them, as long as they are at the + // start of the unquoted token. + if (sb.length() == 4) { + String s = sb.toString(); + if (s.equals("true")) + return Tokens.newBoolean(origin, true); + else if (s.equals("null")) + return Tokens.newNull(origin); + } else if (sb.length() == 5) { + String s = sb.toString(); + if (s.equals("false")) + return Tokens.newBoolean(origin, false); + } + + c = nextCharSkippingComments(); + } + + // put back the char that ended the unquoted text + putBack(c); + + String s = sb.toString(); + return Tokens.newUnquotedText(origin, s); + } + + private Token pullNumber(int firstChar) { + StringBuilder sb = new StringBuilder(); + sb.appendCodePoint(firstChar); + boolean containedDecimalOrE = false; + int c = nextCharSkippingComments(); + while (c != -1 && numberChars.indexOf(c) >= 0) { + if (c == '.' || c == 'e' || c == 'E') + containedDecimalOrE = true; + sb.appendCodePoint(c); + c = nextCharSkippingComments(); + } + // the last character we looked at wasn't part of the number, put it + // back + putBack(c); + String s = sb.toString(); + try { + if (containedDecimalOrE) { + // force floating point representation + return Tokens.newDouble(lineOrigin(), + Double.parseDouble(s), s); + } else { + // this should throw if the integer is too large for Long + return Tokens.newLong(lineOrigin(), Long.parseLong(s), s); + } + } catch (NumberFormatException e) { + throw parseError("Invalid number: '" + s + + "' (if this is in a path, try quoting it with double quotes)", + e); + } + } + + private void pullEscapeSequence(StringBuilder sb) { + int escaped = nextCharRaw(); + if (escaped == -1) + throw parseError("End of input but backslash in string had nothing after it"); + + switch (escaped) { + case '"': + sb.append('"'); + break; + case '\\': + sb.append('\\'); + break; + case '/': + sb.append('/'); + break; + case 'b': + sb.append('\b'); + break; + case 'f': + sb.append('\f'); + break; + case 'n': + sb.append('\n'); + break; + case 'r': + sb.append('\r'); + break; + case 't': + sb.append('\t'); + break; + case 'u': { + // kind of absurdly slow, but screw it for now + char[] a = new char[4]; + for (int i = 0; i < 4; ++i) { + int c = nextCharSkippingComments(); + if (c == -1) + throw parseError("End of input but expecting 4 hex digits for \\uXXXX escape"); + a[i] = (char) c; + } + String digits = new String(a); + try { + sb.appendCodePoint(Integer.parseInt(digits, 16)); + } catch (NumberFormatException e) { + throw parseError( + String.format( + "Malformed hex digits after \\u escape in string: '%s'", + digits), e); + } + } + break; + default: + throw parseError(String + .format("backslash followed by '%c', this is not a valid escape sequence", + escaped)); + } + } + + private ConfigException controlCharacterError(int c) { + String asString; + if (c == '\n') + asString = "newline"; + else if (c == '\t') + asString = "tab"; + else + asString = String.format("control character 0x%x", c); + return parseError("JSON does not allow unescaped " + asString + + " in quoted strings, use a backslash escape"); + } + + private Token pullQuotedString() { + // the open quote has already been consumed + StringBuilder sb = new StringBuilder(); + int c = '\0'; // value doesn't get used + do { + c = nextCharRaw(); + if (c == -1) + throw parseError("End of input but string quote was still open"); + + if (c == '\\') { + pullEscapeSequence(sb); + } else if (c == '"') { + // end the loop, done! + } else if (Character.isISOControl(c)) { + throw controlCharacterError(c); + } else { + sb.appendCodePoint(c); + } + } while (c != '"'); + return Tokens.newString(lineOrigin(), sb.toString()); + } + + private Token pullSubstitution() { + // the initial '$' has already been consumed + ConfigOrigin origin = lineOrigin(); + int c = nextCharSkippingComments(); + if (c != '{') { + throw parseError("'$' not followed by {"); + } + + WhitespaceSaver saver = new WhitespaceSaver(); + List expression = new ArrayList(); + + Token t; + do { + t = pullNextToken(saver); + + // note that we avoid validating the allowed tokens inside + // the substitution here; we even allow nested substitutions + // in the tokenizer. The parser sorts it out. + if (t == Tokens.CLOSE_CURLY) { + // end the loop, done! + break; + } else if (t == Tokens.END) { + throw parseError(origin, + "Substitution ${ was not closed with a }"); + } else { + Token whitespace = saver.check(t, origin, lineNumber); + if (whitespace != null) + expression.add(whitespace); + expression.add(t); + } + } while (true); + + return Tokens.newSubstitution(origin, expression); + } + + private Token pullNextToken(WhitespaceSaver saver) { + int c = nextCharAfterWhitespace(saver); + if (c == -1) { + return Tokens.END; + } else if (c == '\n') { + // newline tokens have the just-ended line number + lineNumber += 1; + return Tokens.newLine(lineNumber - 1); + } else { + Token t = null; + switch (c) { + case '"': + t = pullQuotedString(); + break; + case '$': + t = pullSubstitution(); + break; + case ':': + t = Tokens.COLON; + break; + case ',': + t = Tokens.COMMA; + break; + case '=': + t = Tokens.EQUALS; + break; + case '{': + t = Tokens.OPEN_CURLY; + break; + case '}': + t = Tokens.CLOSE_CURLY; + break; + case '[': + t = Tokens.OPEN_SQUARE; + break; + case ']': + t = Tokens.CLOSE_SQUARE; + break; + } + + if (t == null) { + if (firstNumberChars.indexOf(c) >= 0) { + t = pullNumber(c); + } else if (notInUnquotedText.indexOf(c) >= 0) { + throw parseError(String + .format("Character '%c' is not the start of any valid token", + c)); + } else { + putBack(c); + t = pullUnquotedText(); + } + } + + if (t == null) + throw new ConfigException.BugOrBroken( + "bug: failed to generate next token"); + + return t; + } + } + + private static boolean isSimpleValue(Token t) { + if (Tokens.isSubstitution(t) || Tokens.isUnquotedText(t) + || Tokens.isValue(t)) { + return true; + } else { + return false; + } + } + + private void queueNextToken() { + Token t = pullNextToken(whitespaceSaver); + Token whitespace = whitespaceSaver.check(t, origin, lineNumber); + if (whitespace != null) + tokens.add(whitespace); + tokens.add(t); + } + + @Override + public boolean hasNext() { + return !tokens.isEmpty(); + } + + @Override + public Token next() { + Token t = tokens.remove(); + if (tokens.isEmpty() && t != Tokens.END) { + queueNextToken(); + if (tokens.isEmpty()) + throw new ConfigException.BugOrBroken( + "bug: tokens queue should not be empty here"); + } + return t; + } + + @Override + public void remove() { + throw new UnsupportedOperationException( + "Does not make sense to remove items from token stream"); + } + } +} diff --git a/akka-actor/src/main/java/com/typesafe/config/impl/Tokens.java b/akka-actor/src/main/java/com/typesafe/config/impl/Tokens.java new file mode 100644 index 0000000000..9ec73a819c --- /dev/null +++ b/akka-actor/src/main/java/com/typesafe/config/impl/Tokens.java @@ -0,0 +1,293 @@ +/** + * Copyright (C) 2011 Typesafe Inc. + */ +package com.typesafe.config.impl; + +import java.util.List; + +import com.typesafe.config.ConfigException; +import com.typesafe.config.ConfigOrigin; +import com.typesafe.config.ConfigValueType; + +final class Tokens { + static private class Value extends Token { + + final private AbstractConfigValue value; + + Value(AbstractConfigValue value) { + super(TokenType.VALUE); + this.value = value; + } + + AbstractConfigValue value() { + return value; + } + + @Override + public String toString() { + String s = tokenType().name() + "(" + value.valueType().name() + + ")"; + + return s + "='" + value().unwrapped() + "'"; + } + + @Override + protected boolean canEqual(Object other) { + return other instanceof Value; + } + + @Override + public boolean equals(Object other) { + return super.equals(other) && ((Value) other).value.equals(value); + } + + @Override + public int hashCode() { + return 41 * (41 + super.hashCode()) + value.hashCode(); + } + } + + static private class Line extends Token { + final private int lineNumber; + + Line(int lineNumber) { + super(TokenType.NEWLINE); + this.lineNumber = lineNumber; + } + + int lineNumber() { + return lineNumber; + } + + @Override + public String toString() { + return "NEWLINE@" + lineNumber; + } + + @Override + protected boolean canEqual(Object other) { + return other instanceof Line; + } + + @Override + public boolean equals(Object other) { + return super.equals(other) + && ((Line) other).lineNumber == lineNumber; + } + + @Override + public int hashCode() { + return 41 * (41 + super.hashCode()) + lineNumber; + } + } + + // This is not a Value, because it requires special processing + static private class UnquotedText extends Token { + final private ConfigOrigin origin; + final private String value; + + UnquotedText(ConfigOrigin origin, String s) { + super(TokenType.UNQUOTED_TEXT); + this.origin = origin; + this.value = s; + } + + ConfigOrigin origin() { + return origin; + } + + String value() { + return value; + } + + @Override + public String toString() { + return tokenType().name() + "(" + value + ")"; + } + + @Override + protected boolean canEqual(Object other) { + return other instanceof UnquotedText; + } + + @Override + public boolean equals(Object other) { + return super.equals(other) + && ((UnquotedText) other).value.equals(value); + } + + @Override + public int hashCode() { + return 41 * (41 + super.hashCode()) + value.hashCode(); + } + } + + // This is not a Value, because it requires special processing + static private class Substitution extends Token { + final private ConfigOrigin origin; + final private List value; + + Substitution(ConfigOrigin origin, List expression) { + super(TokenType.SUBSTITUTION); + this.origin = origin; + this.value = expression; + } + + ConfigOrigin origin() { + return origin; + } + + List value() { + return value; + } + + @Override + public String toString() { + return tokenType().name() + "(" + value.toString() + ")"; + } + + @Override + protected boolean canEqual(Object other) { + return other instanceof Substitution; + } + + @Override + public boolean equals(Object other) { + return super.equals(other) + && ((Substitution) other).value.equals(value); + } + + @Override + public int hashCode() { + return 41 * (41 + super.hashCode()) + value.hashCode(); + } + } + + static boolean isValue(Token token) { + return token instanceof Value; + } + + static AbstractConfigValue getValue(Token token) { + if (token instanceof Value) { + return ((Value) token).value(); + } else { + throw new ConfigException.BugOrBroken( + "tried to get value of non-value token " + token); + } + } + + static boolean isValueWithType(Token t, ConfigValueType valueType) { + return isValue(t) && getValue(t).valueType() == valueType; + } + + static boolean isNewline(Token token) { + return token instanceof Line; + } + + static int getLineNumber(Token token) { + if (token instanceof Line) { + return ((Line) token).lineNumber(); + } else { + throw new ConfigException.BugOrBroken( + "tried to get line number from non-newline " + token); + } + } + + static boolean isUnquotedText(Token token) { + return token instanceof UnquotedText; + } + + static String getUnquotedText(Token token) { + if (token instanceof UnquotedText) { + return ((UnquotedText) token).value(); + } else { + throw new ConfigException.BugOrBroken( + "tried to get unquoted text from " + token); + } + } + + static ConfigOrigin getUnquotedTextOrigin(Token token) { + if (token instanceof UnquotedText) { + return ((UnquotedText) token).origin(); + } else { + throw new ConfigException.BugOrBroken( + "tried to get unquoted text from " + token); + } + } + + static boolean isSubstitution(Token token) { + return token instanceof Substitution; + } + + static List getSubstitutionPathExpression(Token token) { + if (token instanceof Substitution) { + return ((Substitution) token).value(); + } else { + throw new ConfigException.BugOrBroken( + "tried to get substitution from " + token); + } + } + + static ConfigOrigin getSubstitutionOrigin(Token token) { + if (token instanceof Substitution) { + return ((Substitution) token).origin(); + } else { + throw new ConfigException.BugOrBroken( + "tried to get substitution origin from " + token); + } + } + + final static Token START = new Token(TokenType.START); + final static Token END = new Token(TokenType.END); + final static Token COMMA = new Token(TokenType.COMMA); + final static Token EQUALS = new Token(TokenType.EQUALS); + final static Token COLON = new Token(TokenType.COLON); + final static Token OPEN_CURLY = new Token(TokenType.OPEN_CURLY); + final static Token CLOSE_CURLY = new Token(TokenType.CLOSE_CURLY); + final static Token OPEN_SQUARE = new Token(TokenType.OPEN_SQUARE); + final static Token CLOSE_SQUARE = new Token(TokenType.CLOSE_SQUARE); + + static Token newLine(int lineNumberJustEnded) { + return new Line(lineNumberJustEnded); + } + + static Token newUnquotedText(ConfigOrigin origin, String s) { + return new UnquotedText(origin, s); + } + + static Token newSubstitution(ConfigOrigin origin, List expression) { + return new Substitution(origin, expression); + } + + static Token newValue(AbstractConfigValue value) { + return new Value(value); + } + + static Token newString(ConfigOrigin origin, String value) { + return newValue(new ConfigString(origin, value)); + } + + static Token newInt(ConfigOrigin origin, int value, String originalText) { + return newValue(ConfigNumber.newNumber(origin, value, + originalText)); + } + + static Token newDouble(ConfigOrigin origin, double value, + String originalText) { + return newValue(ConfigNumber.newNumber(origin, value, + originalText)); + } + + static Token newLong(ConfigOrigin origin, long value, String originalText) { + return newValue(ConfigNumber.newNumber(origin, value, + originalText)); + } + + static Token newNull(ConfigOrigin origin) { + return newValue(new ConfigNull(origin)); + } + + static Token newBoolean(ConfigOrigin origin, boolean value) { + return newValue(new ConfigBoolean(origin, value)); + } +} diff --git a/akka-actor/src/main/java/com/typesafe/config/impl/Unmergeable.java b/akka-actor/src/main/java/com/typesafe/config/impl/Unmergeable.java new file mode 100644 index 0000000000..e0d114e78d --- /dev/null +++ b/akka-actor/src/main/java/com/typesafe/config/impl/Unmergeable.java @@ -0,0 +1,16 @@ +/** + * Copyright (C) 2011 Typesafe Inc. + */ +package com.typesafe.config.impl; + +import java.util.Collection; + +/** + * Interface that tags a ConfigValue that is not mergeable until after + * substitutions are resolved. Basically these are special ConfigValue that + * never appear in a resolved tree, like {@link ConfigSubstitution} and + * {@link ConfigDelayedMerge}. + */ +interface Unmergeable { + Collection unmergedValues(); +} diff --git a/akka-actor/src/main/java/org/jboss/netty/akka/util/HashedWheelTimer.java b/akka-actor/src/main/java/org/jboss/netty/akka/util/HashedWheelTimer.java index 328d2dc39f..6d31b327c6 100644 --- a/akka-actor/src/main/java/org/jboss/netty/akka/util/HashedWheelTimer.java +++ b/akka-actor/src/main/java/org/jboss/netty/akka/util/HashedWheelTimer.java @@ -16,6 +16,7 @@ package org.jboss.netty.akka.util; import akka.event.LoggingAdapter; +import akka.util.Duration; import org.jboss.netty.akka.util.internal.ConcurrentIdentityHashMap; import org.jboss.netty.akka.util.internal.ReusableIterator; @@ -73,14 +74,13 @@ import java.util.concurrent.locks.ReentrantReadWriteLock; * @author The Netty Project * @author Trustin Lee * @version $Rev: 2297 $, $Date: 2010-06-07 10:50:02 +0900 (Mon, 07 Jun 2010) $ + * + * The original implementation has been slightly altered to fit the specific requirements of Akka. */ public class HashedWheelTimer implements Timer { - private static final AtomicInteger id = new AtomicInteger(); - private final Worker worker = new Worker(); final Thread workerThread; final AtomicBoolean shutdown = new AtomicBoolean(); - private final long roundDuration; final long tickDuration; final Set[] wheel; @@ -96,28 +96,26 @@ public class HashedWheelTimer implements Timer { * @param threadFactory a {@link java.util.concurrent.ThreadFactory} that creates a * background {@link Thread} which is dedicated to * {@link TimerTask} execution. - * @param tickDuration the duration between tick - * @param unit the time unit of the {@code tickDuration} + * @param duration the duration between ticks * @param ticksPerWheel the size of the wheel */ public HashedWheelTimer( LoggingAdapter logger, ThreadFactory threadFactory, - long tickDuration, TimeUnit unit, int ticksPerWheel) { + Duration duration, + int ticksPerWheel) { if (threadFactory == null) { throw new NullPointerException("threadFactory"); } - if (unit == null) { - throw new NullPointerException("unit"); + if (duration == null) { + throw new NullPointerException("duration"); } - if (tickDuration <= 0) { - throw new IllegalArgumentException( - "tickDuration must be greater than 0: " + tickDuration); + if (duration.toNanos() <= 0) { + throw new IllegalArgumentException("duration must be greater than 0 ns: " + duration.toNanos()); } if (ticksPerWheel <= 0) { - throw new IllegalArgumentException( - "ticksPerWheel must be greater than 0: " + ticksPerWheel); + throw new IllegalArgumentException("ticksPerWheel must be greater than 0: " + ticksPerWheel); } this.logger = logger; @@ -127,15 +125,12 @@ public class HashedWheelTimer implements Timer { iterators = createIterators(wheel); mask = wheel.length - 1; - // Convert tickDuration to milliseconds. - this.tickDuration = tickDuration = unit.toMillis(tickDuration); + // Convert to standardized tickDuration + this.tickDuration = duration.toNanos(); // Prevent overflow. - if (tickDuration == Long.MAX_VALUE || - tickDuration >= Long.MAX_VALUE / wheel.length) { - throw new IllegalArgumentException( - "tickDuration is too long: " + - tickDuration + ' ' + unit); + if (tickDuration == Long.MAX_VALUE || tickDuration >= Long.MAX_VALUE / wheel.length) { + throw new IllegalArgumentException("tickDuration is too long: " + tickDuration + ' ' + duration.unit()); } roundDuration = tickDuration * wheel.length; @@ -156,8 +151,7 @@ public class HashedWheelTimer implements Timer { ticksPerWheel = normalizeTicksPerWheel(ticksPerWheel); Set[] wheel = new Set[ticksPerWheel]; for (int i = 0; i < wheel.length; i ++) { - wheel[i] = new MapBackedSet( - new ConcurrentIdentityHashMap(16, 0.95f, 4)); + wheel[i] = new MapBackedSet(new ConcurrentIdentityHashMap(16, 0.95f, 4)); } return wheel; } @@ -231,23 +225,22 @@ public class HashedWheelTimer implements Timer { return Collections.unmodifiableSet(unprocessedTimeouts); } - public Timeout newTimeout(TimerTask task, long delay, TimeUnit unit) { - final long currentTime = System.currentTimeMillis(); + public Timeout newTimeout(TimerTask task, Duration delay) { + final long currentTime = System.nanoTime(); if (task == null) { throw new NullPointerException("task"); } - if (unit == null) { - throw new NullPointerException("unit"); + if (delay == null) { + throw new NullPointerException("delay"); } if (!workerThread.isAlive()) { start(); } - delay = unit.toMillis(delay); - HashedWheelTimeout timeout = new HashedWheelTimeout(task, currentTime + delay); - scheduleTimeout(timeout, delay); + HashedWheelTimeout timeout = new HashedWheelTimeout(task, currentTime + delay.toNanos()); + scheduleTimeout(timeout, delay.toNanos()); return timeout; } @@ -261,11 +254,8 @@ public class HashedWheelTimer implements Timer { // Prepare the required parameters to schedule the timeout object. final long lastRoundDelay = delay % roundDuration; final long lastTickDelay = delay % tickDuration; - final long relativeIndex = - lastRoundDelay / tickDuration + (lastTickDelay != 0? 1 : 0); - - final long remainingRounds = - delay / roundDuration - (delay % roundDuration == 0? 1 : 0); + final long relativeIndex = lastRoundDelay / tickDuration + (lastTickDelay != 0? 1 : 0); + final long remainingRounds = delay / roundDuration - (delay % roundDuration == 0? 1 : 0); // Add the timeout to the wheel. lock.readLock().lock(); @@ -292,7 +282,7 @@ public class HashedWheelTimer implements Timer { List expiredTimeouts = new ArrayList(); - startTime = System.currentTimeMillis(); + startTime = System.nanoTime(); tick = 1; while (!shutdown.get()) { @@ -304,8 +294,7 @@ public class HashedWheelTimer implements Timer { } } - private void fetchExpiredTimeouts( - List expiredTimeouts, long deadline) { + private void fetchExpiredTimeouts(List expiredTimeouts, long deadline) { // Find the expired timeouts and decrease the round counter // if necessary. Note that we don't send the notification @@ -371,15 +360,17 @@ public class HashedWheelTimer implements Timer { long deadline = startTime + tickDuration * tick; for (;;) { - final long currentTime = System.currentTimeMillis(); - final long sleepTime = tickDuration * tick - (currentTime - startTime); + final long currentTime = System.nanoTime(); + final long sleepTime = (tickDuration * tick - (currentTime - startTime)); if (sleepTime <= 0) { break; } try { - Thread.sleep(sleepTime); + long milliSeconds = TimeUnit.NANOSECONDS.toMillis(sleepTime); + int nanoSeconds = (int) (sleepTime - (milliSeconds * 1000000)); + Thread.sleep(milliSeconds, nanoSeconds); } catch (InterruptedException e) { if (shutdown.get()) { return -1; @@ -451,7 +442,7 @@ public class HashedWheelTimer implements Timer { @Override public String toString() { - long currentTime = System.currentTimeMillis(); + long currentTime = System.nanoTime(); long remaining = deadline - currentTime; StringBuilder buf = new StringBuilder(192); @@ -477,3 +468,4 @@ public class HashedWheelTimer implements Timer { } } } + diff --git a/akka-actor/src/main/java/org/jboss/netty/akka/util/MapBackedSet.java b/akka-actor/src/main/java/org/jboss/netty/akka/util/MapBackedSet.java index a40e72cead..2bc1bc25e0 100644 --- a/akka-actor/src/main/java/org/jboss/netty/akka/util/MapBackedSet.java +++ b/akka-actor/src/main/java/org/jboss/netty/akka/util/MapBackedSet.java @@ -19,56 +19,55 @@ import java.io.Serializable; import java.util.AbstractSet; import java.util.Iterator; import java.util.Map; -import java.util.Set; /** * A {@link java.util.Map}-backed {@link java.util.Set}. - * + * * @author The Netty Project * @author Trustin Lee - * + * * @version $Rev: 2080 $, $Date: 2010-01-26 18:04:19 +0900 (Tue, 26 Jan 2010) $ */ final class MapBackedSet extends AbstractSet implements Serializable { - private static final long serialVersionUID = -6761513279741915432L; + private static final long serialVersionUID = -6761513279741915432L; - private final Map map; + private final Map map; - /** - * Creates a new instance which wraps the specified {@code map}. - */ - MapBackedSet(Map map) { - this.map = map; - } + /** + * Creates a new instance which wraps the specified {@code map}. + */ + MapBackedSet(Map map) { + this.map = map; + } - @Override - public int size() { - return map.size(); - } + @Override + public int size() { + return map.size(); + } - @Override - public boolean contains(Object o) { - return map.containsKey(o); - } + @Override + public boolean contains(Object o) { + return map.containsKey(o); + } - @Override - public boolean add(E o) { - return map.put(o, Boolean.TRUE) == null; - } + @Override + public boolean add(E o) { + return map.put(o, Boolean.TRUE) == null; + } - @Override - public boolean remove(Object o) { - return map.remove(o) != null; - } + @Override + public boolean remove(Object o) { + return map.remove(o) != null; + } - @Override - public void clear() { - map.clear(); - } + @Override + public void clear() { + map.clear(); + } - @Override - public Iterator iterator() { - return map.keySet().iterator(); - } + @Override + public Iterator iterator() { + return map.keySet().iterator(); + } } diff --git a/akka-actor/src/main/java/org/jboss/netty/akka/util/Timer.java b/akka-actor/src/main/java/org/jboss/netty/akka/util/Timer.java index 43ddec9604..b5bd8c6a7c 100644 --- a/akka-actor/src/main/java/org/jboss/netty/akka/util/Timer.java +++ b/akka-actor/src/main/java/org/jboss/netty/akka/util/Timer.java @@ -15,6 +15,7 @@ */ package org.jboss.netty.akka.util; +import akka.util.Duration; import java.util.Set; import java.util.concurrent.TimeUnit; @@ -41,7 +42,7 @@ public interface Timer { * @throws IllegalStateException if this timer has been * {@linkplain #stop() stopped} already */ - Timeout newTimeout(TimerTask task, long delay, TimeUnit unit); + Timeout newTimeout(TimerTask task, Duration delay); /** * Releases all resources acquired by this {@link org.jboss.netty.akka.util.Timer} and cancels all diff --git a/akka-actor/src/main/java/org/jboss/netty/akka/util/TimerTask.java b/akka-actor/src/main/java/org/jboss/netty/akka/util/TimerTask.java index 341f43ad68..3d0190d8f5 100644 --- a/akka-actor/src/main/java/org/jboss/netty/akka/util/TimerTask.java +++ b/akka-actor/src/main/java/org/jboss/netty/akka/util/TimerTask.java @@ -15,23 +15,24 @@ */ package org.jboss.netty.akka.util; -import java.util.concurrent.TimeUnit; - /** * A task which is executed after the delay specified with - * {@link Timer#newTimeout(org.jboss.netty.akka.util.TimerTask, long, java.util.concurrent.TimeUnit)}. - * + * {@link Timer#newTimeout(org.jboss.netty.akka.util.TimerTask, long, java.util.concurrent.TimeUnit)} + * . + * * @author The Netty Project * @author Trustin Lee * @version $Rev: 2080 $, $Date: 2010-01-26 18:04:19 +0900 (Tue, 26 Jan 2010) $ */ public interface TimerTask { - /** - * Executed after the delay specified with - * {@link Timer#newTimeout(org.jboss.netty.akka.util.TimerTask, long, java.util.concurrent.TimeUnit)}. - * - * @param timeout a handle which is associated with this task - */ - void run(Timeout timeout) throws Exception; + /** + * Executed after the delay specified with + * {@link Timer#newTimeout(org.jboss.netty.akka.util.TimerTask, long, java.util.concurrent.TimeUnit)} + * . + * + * @param timeout + * a handle which is associated with this task + */ + void run(Timeout timeout) throws Exception; } diff --git a/akka-actor/src/main/resources/akka-actor-reference.conf b/akka-actor/src/main/resources/akka-actor-reference.conf new file mode 100644 index 0000000000..a4b74ce474 --- /dev/null +++ b/akka-actor/src/main/resources/akka-actor-reference.conf @@ -0,0 +1,142 @@ +############################## +# Akka Reference Config File # +############################## + +# This the reference config file has all the default settings. +# Make your edits/overrides in your akka.conf. + +akka { + version = "2.0-SNAPSHOT" # Akka version, checked against the runtime version of Akka. + + home = "" # Home directory of Akka, modules in the deploy directory will be loaded + + enabled-modules = [] # Comma separated list of the enabled modules. Options: ["cluster", "camel", "http"] + + event-handlers = ["akka.event.Logging$DefaultLogger"] # Event handlers to register at boot time (Logging$DefaultLogger logs to STDOUT) + loglevel = "WARNING" # Options: ERROR, WARNING, INFO, DEBUG + # this level is used by the configured loggers (see "event-handlers") as soon + # as they have been started; before that, see "stdout-loglevel" + stdout-loglevel = "WARNING" # Loglevel for the very basic logger activated during AkkaApplication startup + + extensions = [] # list FQCN of extensions which shall be loaded at actor system startup + + # These boot classes are loaded (and created) automatically when the Akka Microkernel boots up + # Can be used to bootstrap your application(s) + # Should be the FQN (Fully Qualified Name) of the boot class which needs to have a default constructor + # boot = ["sample.camel.Boot", + # "sample.rest.java.Boot", + # "sample.rest.scala.Boot", + # "sample.security.Boot"] + boot = [] + + actor { + provider = "akka.actor.LocalActorRefProvider" + timeout = 5s # Default timeout for Future based invocations + # - Actor: ask && ? + # - UntypedActor: ask + # - TypedActor: methods with non-void return type + serialize-messages = off # Does a deep clone of (non-primitive) messages to ensure immutability + dispatcher-shutdown-timeout = 1s # How long dispatchers by default will wait for new actors until they shut down + + deployment { + + default { # deployment id pattern, e.g. /app/service-ping + + router = "direct" # routing (load-balance) scheme to use + # available: "direct", "round-robin", "random", "scatter-gather" + # "least-cpu", "least-ram", "least-messages" + # or: fully qualified class name of the router class + # default is "direct"; + # if 'replication' is used then the only available router is "direct" + + nr-of-instances = 1 # number of actor instances in the cluster + # available: positive integer (1-N) or the string "auto" for auto-scaling + # default is '1' + # if the "direct" router is used then this element is ignored (always '1') + + + # optional + create-as { # FIXME document 'create-as' + class = "" # fully qualified class name of recipe implementation + } + + remote { + nodes = [] # A list of hostnames and ports for instantiating the remote actor instances + # The format should be on "hostname:port", where: + # - hostname can be either hostname or IP address the remote actor should connect to + # - port should be the port for the remote server on the other node + } + + cluster { # defines the actor as a clustered actor + # default (if omitted) is local non-clustered actor + + preferred-nodes = [] # a list of preferred nodes for instantiating the actor instances on + # on format "host:", "ip:" or "node:" + + + # optional + replication { # use replication or not? only makes sense for a stateful actor + # FIXME should we have this config option here? If so, implement it all through. + serialize-mailbox = off # should the actor mailbox be part of the serialized snapshot? + # default is 'off' + + storage = "transaction-log" # storage model for replication + # available: "transaction-log" and "data-grid" + # default is "transaction-log" + + strategy = "write-through" # guarantees for replication + # available: "write-through" and "write-behind" + # default is "write-through" + + } + } + } + } + + default-dispatcher { + type = "Dispatcher" # Must be one of the following + # Dispatcher, (BalancingDispatcher, only valid when all actors using it are of the same type), + # A FQCN to a class inheriting MessageDispatcherConfigurator with a no-arg visible constructor + name = "DefaultDispatcher" # Optional, will be a generated UUID if omitted + keep-alive-time = 60s # Keep alive time for threads + core-pool-size-factor = 8.0 # No of core threads ... ceil(available processors * factor) + max-pool-size-factor = 8.0 # Max no of threads ... ceil(available processors * factor) + task-queue-size = -1 # Specifies the bounded capacity of the task queue (< 1 == unbounded) + task-queue-type = "linked" # Specifies which type of task queue will be used, can be "array" or "linked" (default) + allow-core-timeout = on # Allow core threads to time out + throughput = 5 # Throughput for Dispatcher, set to 1 for complete fairness + throughput-deadline-time = 0ms # Throughput deadline for Dispatcher, set to 0 or negative for no deadline + mailbox-capacity = -1 # If negative (or zero) then an unbounded mailbox is used (default) + # If positive then a bounded mailbox is used and the capacity is set using the property + # NOTE: setting a mailbox to 'blocking' can be a bit dangerous, could lead to deadlock, use with care + # The following are only used for Dispatcher and only if mailbox-capacity > 0 + mailbox-push-timeout-time = 10s # Specifies the timeout to add a new message to a mailbox that is full - negative number means infinite timeout + } + + debug { + receive = off # enable function of Actor.loggable(), which is to log any received message at DEBUG level + autoreceive = off # enable DEBUG logging of all AutoReceiveMessages (Kill, PoisonPill and the like) + lifecycle = off # enable DEBUG logging of actor lifecycle changes + fsm = off # enable DEBUG logging of all LoggingFSMs for events, transitions and timers + event-stream = off # enable DEBUG logging of subscription changes on the eventStream + } + + } + + # Used to set the behavior of the scheduler. + # Changing the default values may change the system behavior drastically so make sure you know what you're doing! + # + scheduler { + # The HashedWheelTimer (HWT) implementation from Netty is used as the default scheduler in the system. + # + # HWT does not execute the scheduled tasks on exact time. + # It will, on every tick, check if there are any tasks behind the schedule and execute them. + # You can increase or decrease the accuracy of the execution timing by specifying smaller or larger tick duration. + # If you are scheduling a lot of tasks you should consider increasing the ticks per wheel. + # For more information see: http://www.jboss.org/netty/ + tickDuration = 100ms + ticksPerWheel = 512 + } + + +} diff --git a/akka-actor/src/main/resources/akka-serialization-reference.conf b/akka-actor/src/main/resources/akka-serialization-reference.conf new file mode 100644 index 0000000000..fb6f134b93 --- /dev/null +++ b/akka-actor/src/main/resources/akka-serialization-reference.conf @@ -0,0 +1,32 @@ +############################################ +# Akka Serialization Reference Config File # +############################################ + +# This the reference config file has all the default settings. +# Make your edits/overrides in your akka.conf. + +akka { + actor { + + # Entries for pluggable serializers and their bindings. If a binding for a specific class is not found, + # then the default serializer (Java serialization) is used. + # + serializers { + # java = "akka.serialization.JavaSerializer" + # proto = "akka.testing.ProtobufSerializer" + # sjson = "akka.testing.SJSONSerializer" + default = "akka.serialization.JavaSerializer" + } + + # serialization-bindings { + # java = ["akka.serialization.SerializeSpec$Address", + # "akka.serialization.MyJavaSerializableActor", + # "akka.serialization.MyStatelessActorWithMessagesInMailbox", + # "akka.serialization.MyActorWithProtobufMessagesInMailbox"] + # sjson = ["akka.serialization.SerializeSpec$Person"] + # proto = ["com.google.protobuf.Message", + # "akka.actor.ProtobufProtocol$MyMessage"] + # } + } + +} diff --git a/akka-actor/src/main/scala/akka/actor/Actor.scala b/akka-actor/src/main/scala/akka/actor/Actor.scala index 1175e0bb12..b8c0bbb327 100644 --- a/akka-actor/src/main/scala/akka/actor/Actor.scala +++ b/akka-actor/src/main/scala/akka/actor/Actor.scala @@ -6,14 +6,13 @@ package akka.actor import DeploymentConfig._ import akka.dispatch._ -import akka.config._ import akka.routing._ import akka.util.Duration import akka.remote.RemoteSupport -import akka.cluster.ClusterNode import akka.japi.{ Creator, Procedure } import akka.serialization.{ Serializer, Serialization } import akka.event.Logging.Debug +import akka.event.LogSource import akka.experimental import akka.AkkaException @@ -150,11 +149,11 @@ object Timeout { implicit def durationToTimeout(duration: Duration) = new Timeout(duration) implicit def intToTimeout(timeout: Int) = new Timeout(timeout) implicit def longToTimeout(timeout: Long) = new Timeout(timeout) - implicit def defaultTimeout(implicit app: ActorSystem) = app.AkkaConfig.ActorTimeout + implicit def defaultTimeout(implicit system: ActorSystem) = system.settings.ActorTimeout } trait ActorLogging { this: Actor ⇒ - val log = akka.event.Logging(app.eventStream, context.self) + val log = akka.event.Logging(system.eventStream, context.self) } object Actor { @@ -164,17 +163,17 @@ object Actor { /** * This decorator adds invocation logging to a Receive function. */ - class LoggingReceive(source: AnyRef, r: Receive)(implicit app: ActorSystem) extends Receive { + class LoggingReceive(source: AnyRef, r: Receive)(implicit system: ActorSystem) extends Receive { def isDefinedAt(o: Any) = { val handled = r.isDefinedAt(o) - app.eventStream.publish(Debug(source, "received " + (if (handled) "handled" else "unhandled") + " message " + o)) + system.eventStream.publish(Debug(LogSource.fromAnyRef(source), "received " + (if (handled) "handled" else "unhandled") + " message " + o)) handled } def apply(o: Any): Unit = r(o) } object LoggingReceive { - def apply(source: AnyRef, r: Receive)(implicit app: ActorSystem): Receive = r match { + def apply(source: AnyRef, r: Receive)(implicit system: ActorSystem): Receive = r match { case _: LoggingReceive ⇒ r case _ ⇒ new LoggingReceive(source, r) } @@ -229,12 +228,12 @@ trait Actor { c } - implicit def app = context.app + implicit def system = context.system /** * The default timeout, based on the config setting 'akka.actor.timeout' */ - implicit def defaultTimeout = app.AkkaConfig.ActorTimeout + implicit def defaultTimeout = system.settings.ActorTimeout /** * Wrap a Receive partial function in a logging enclosure, which sends a @@ -250,7 +249,7 @@ trait Actor { * This method does NOT modify the given Receive unless * akka.actor.debug.receive is set within akka.conf. */ - def loggable(self: AnyRef)(r: Receive): Receive = if (app.AkkaConfig.AddLoggingReceive) LoggingReceive(self, r) else r //TODO FIXME Shouldn't this be in a Loggable-trait? + def loggable(self: AnyRef)(r: Receive): Receive = if (system.settings.AddLoggingReceive) LoggingReceive(self, r) else r //TODO FIXME Shouldn't this be in a Loggable-trait? /** * The 'self' field holds the ActorRef for this actor. @@ -266,7 +265,6 @@ trait Actor { * The reference sender Actor of the last received message. * Is defined if the message was sent from another Actor, else None. */ - @inline final def sender: ActorRef = context.sender /** diff --git a/akka-actor/src/main/scala/akka/actor/ActorCell.scala b/akka-actor/src/main/scala/akka/actor/ActorCell.scala index 2d95768223..330824290f 100644 --- a/akka-actor/src/main/scala/akka/actor/ActorCell.scala +++ b/akka-actor/src/main/scala/akka/actor/ActorCell.scala @@ -9,6 +9,7 @@ import scala.annotation.tailrec import scala.collection.immutable.{ Stack, TreeMap } import java.util.concurrent.TimeUnit import akka.event.Logging.{ Debug, Warning, Error } +import akka.util.{ Duration, Helpers } /** * The actor context - the view of the actor cell from the actor. @@ -17,9 +18,7 @@ import akka.event.Logging.{ Debug, Warning, Error } */ trait ActorContext extends ActorRefFactory with TypedActorFactory { - def self: ActorRef with ScalaActorRef - - def hasMessages: Boolean + def self: ActorRef def receiveTimeout: Option[Long] @@ -45,7 +44,7 @@ trait ActorContext extends ActorRefFactory with TypedActorFactory { def handleChildTerminated(child: ActorRef): Unit - def app: ActorSystem + def system: ActorSystem def parent: ActorRef } @@ -55,34 +54,48 @@ private[akka] object ActorCell { override def initialValue = Stack[ActorContext]() } - val emptyChildrenRefs = TreeMap[String, ActorRef]() + val emptyChildrenRefs = TreeMap[String, ChildRestartStats]() - val emptyChildrenStats = TreeMap[ActorRef, ChildRestartStats]() + final val emptyCancellable: Cancellable = new Cancellable { + def isCancelled = false + def cancel() {} + } + + final val emptyReceiveTimeoutData: (Long, Cancellable) = (-1, emptyCancellable) } +//ACTORCELL IS 64bytes and should stay that way unless very good reason not to (machine sympathy, cache line fit) //vars don't need volatile since it's protected with the mailbox status //Make sure that they are not read/written outside of a message processing (systemInvoke/invoke) private[akka] class ActorCell( - val app: ActorSystem, + val system: ActorSystemImpl, val self: ActorRef with ScalaActorRef, val props: Props, val parent: ActorRef, - var receiveTimeout: Option[Long], + /*no member*/ _receiveTimeout: Option[Long], var hotswap: Stack[PartialFunction[Any, Unit]]) extends ActorContext { import ActorCell._ + def systemImpl = system + protected final def guardian = self - protected def typedActor = app.typedActor + protected def typedActor = system.typedActor - final def provider = app.provider + final def provider = system.provider - var futureTimeout: Option[Cancellable] = None + override def receiveTimeout: Option[Long] = if (receiveTimeoutData._1 > 0) Some(receiveTimeoutData._1) else None - var childrenRefs = emptyChildrenRefs + override def receiveTimeout_=(timeout: Option[Long]): Unit = { + val timeoutMs = if (timeout.isDefined && timeout.get > 0) timeout.get else -1 + receiveTimeoutData = (timeoutMs, receiveTimeoutData._2) + } - var childrenStats = emptyChildrenStats + var receiveTimeoutData: (Long, Cancellable) = + if (_receiveTimeout.isDefined) (_receiveTimeout.get, emptyCancellable) else emptyReceiveTimeoutData + + var childrenRefs: TreeMap[String, ChildRestartStats] = emptyChildrenRefs var currentMessage: Envelope = null @@ -90,15 +103,22 @@ private[akka] class ActorCell( var stopping = false - @inline - final def dispatcher: MessageDispatcher = if (props.dispatcher == Props.defaultDispatcher) app.dispatcher else props.dispatcher - - final def isShutdown: Boolean = mailbox.isClosed - @volatile //This must be volatile since it isn't protected by the mailbox status var mailbox: Mailbox = _ - def hasMessages: Boolean = mailbox.hasMessages + var nextNameSequence: Long = 0 + + //Not thread safe, so should only be used inside the actor that inhabits this ActorCell + override protected def randomName(): String = { + val n = nextNameSequence + 1 + nextNameSequence = n + Helpers.base64(n) + } + + @inline + final def dispatcher: MessageDispatcher = if (props.dispatcher == Props.defaultDispatcher) system.dispatcher else props.dispatcher + + final def isTerminated: Boolean = mailbox.isClosed final def start(): Unit = { mailbox = dispatcher.createMailbox(this) @@ -130,21 +150,18 @@ private[akka] class ActorCell( subject } - final def children: Iterable[ActorRef] = childrenStats.keys + final def children: Iterable[ActorRef] = childrenRefs.values.view.map(_.child) - final def getChild(name: String): Option[ActorRef] = { - val isClosed = mailbox.isClosed // fence plus volatile read - if (isClosed) None - else childrenRefs.get(name) - } + final def getChild(name: String): Option[ActorRef] = + if (isTerminated) None else childrenRefs.get(name).map(_.child) final def tell(message: Any, sender: ActorRef): Unit = - dispatcher.dispatch(this, Envelope(message, if (sender eq null) app.deadLetters else sender)) + dispatcher.dispatch(this, Envelope(message, if (sender eq null) system.deadLetters else sender)) final def sender: ActorRef = currentMessage match { - case null ⇒ app.deadLetters + case null ⇒ system.deadLetters case msg if msg.sender ne null ⇒ msg.sender - case _ ⇒ app.deadLetters + case _ ⇒ system.deadLetters } //This method is in charge of setting up the contextStack and create a new instance of the Actor @@ -165,6 +182,7 @@ private[akka] class ActorCell( } } + //Memory consistency is handled by the Mailbox (reading mailbox status then processing messages, then writing mailbox status final def systemInvoke(message: SystemMessage) { def create(): Unit = try { @@ -172,11 +190,11 @@ private[akka] class ActorCell( actor = created created.preStart() checkReceiveTimeout - if (app.AkkaConfig.DebugLifecycle) app.eventStream.publish(Debug(self, "started (" + actor + ")")) + if (system.settings.DebugLifecycle) system.eventStream.publish(Debug(self.toString, "started (" + actor + ")")) } catch { case e ⇒ try { - app.eventStream.publish(Error(e, self, "error while creating actor")) + system.eventStream.publish(Error(e, self.toString, "error while creating actor")) // prevent any further messages to be processed until the actor has been restarted dispatcher.suspend(this) } finally { @@ -186,7 +204,7 @@ private[akka] class ActorCell( def recreate(cause: Throwable): Unit = try { val failedActor = actor - if (app.AkkaConfig.DebugLifecycle) app.eventStream.publish(Debug(self, "restarting")) + if (system.settings.DebugLifecycle) system.eventStream.publish(Debug(self.toString, "restarting")) val freshActor = newActor() if (failedActor ne null) { val c = currentMessage //One read only plz @@ -200,14 +218,14 @@ private[akka] class ActorCell( } actor = freshActor // assign it here so if preStart fails, we can null out the sef-refs next call freshActor.postRestart(cause) - if (app.AkkaConfig.DebugLifecycle) app.eventStream.publish(Debug(self, "restarted")) + if (system.settings.DebugLifecycle) system.eventStream.publish(Debug(self.toString, "restarted")) dispatcher.resume(this) //FIXME should this be moved down? props.faultHandler.handleSupervisorRestarted(cause, self, children) } catch { case e ⇒ try { - app.eventStream.publish(Error(e, self, "error while creating actor")) + system.eventStream.publish(Error(e, self.toString, "error while creating actor")) // prevent any further messages to be processed until the actor has been restarted dispatcher.suspend(this) } finally { @@ -226,95 +244,90 @@ private[akka] class ActorCell( val c = children if (c.isEmpty) doTerminate() else { - if (app.AkkaConfig.DebugLifecycle) app.eventStream.publish(Debug(self, "stopping")) + if (system.settings.DebugLifecycle) system.eventStream.publish(Debug(self.toString, "stopping")) for (child ← c) child.stop() stopping = true } } def supervise(child: ActorRef): Unit = { - val stats = childrenStats - if (!stats.contains(child)) { - childrenRefs = childrenRefs.updated(child.name, child) - childrenStats = childrenStats.updated(child, ChildRestartStats()) - if (app.AkkaConfig.DebugLifecycle) app.eventStream.publish(Debug(self, "now supervising " + child)) - } else app.eventStream.publish(Warning(self, "Already supervising " + child)) + val stat = childrenRefs.get(child.name) + if (stat.isDefined) { + if (stat.get.child == child) + system.eventStream.publish(Warning(self.toString, "Already supervising " + child)) + else + system.eventStream.publish(Warning(self.toString, "Already supervising other child with same name '" + child.name + "', old: " + stat.get + " new: " + child)) + } else { + childrenRefs = childrenRefs.updated(child.name, ChildRestartStats(child)) + if (system.settings.DebugLifecycle) system.eventStream.publish(Debug(self.toString, "now supervising " + child)) + } } try { - val isClosed = mailbox.isClosed //Fence plus volatile read - if (!isClosed) { - if (stopping) message match { - case Terminate() ⇒ terminate() // to allow retry - case _ ⇒ - } - else message match { - case Create() ⇒ create() - case Recreate(cause) ⇒ recreate(cause) - case Link(subject) ⇒ - app.deathWatch.subscribe(self, subject) - if (app.AkkaConfig.DebugLifecycle) app.eventStream.publish(Debug(self, "now monitoring " + subject)) - case Unlink(subject) ⇒ - app.deathWatch.unsubscribe(self, subject) - if (app.AkkaConfig.DebugLifecycle) app.eventStream.publish(Debug(self, "stopped monitoring " + subject)) - case Suspend() ⇒ suspend() - case Resume() ⇒ resume() - case Terminate() ⇒ terminate() - case Supervise(child) ⇒ supervise(child) - } + if (stopping) message match { + case Terminate() ⇒ terminate() // to allow retry + case _ ⇒ + } + else message match { + case Create() ⇒ create() + case Recreate(cause) ⇒ recreate(cause) + case Link(subject) ⇒ + system.deathWatch.subscribe(self, subject) + if (system.settings.DebugLifecycle) system.eventStream.publish(Debug(self.toString, "now monitoring " + subject)) + case Unlink(subject) ⇒ + system.deathWatch.unsubscribe(self, subject) + if (system.settings.DebugLifecycle) system.eventStream.publish(Debug(self.toString, "stopped monitoring " + subject)) + case Suspend() ⇒ suspend() + case Resume() ⇒ resume() + case Terminate() ⇒ terminate() + case Supervise(child) ⇒ supervise(child) } } catch { case e ⇒ //Should we really catch everything here? - app.eventStream.publish(Error(e, self, "error while processing " + message)) + system.eventStream.publish(Error(e, self.toString, "error while processing " + message)) //TODO FIXME How should problems here be handled? throw e } } + //Memory consistency is handled by the Mailbox (reading mailbox status then processing messages, then writing mailbox status final def invoke(messageHandle: Envelope) { try { - val isClosed = mailbox.isClosed //Fence plus volatile read - if (!isClosed) { - currentMessage = messageHandle + currentMessage = messageHandle + try { try { - try { - cancelReceiveTimeout() // FIXME: leave this here? - messageHandle.message match { - case msg: AutoReceivedMessage ⇒ autoReceiveMessage(messageHandle) - case msg ⇒ - if (stopping) { - // receiving Terminated in response to stopping children is too common to generate noise - if (!msg.isInstanceOf[Terminated]) app.deadLetterMailbox.enqueue(self, messageHandle) - } else { - actor(msg) - } - } - currentMessage = null // reset current message after successful invocation - } catch { - case e ⇒ - app.eventStream.publish(Error(e, self, e.getMessage)) - - // prevent any further messages to be processed until the actor has been restarted - dispatcher.suspend(this) - - // make sure that InterruptedException does not leave this thread - if (e.isInstanceOf[InterruptedException]) { - val ex = ActorInterruptedException(e) - props.faultHandler.handleSupervisorFailing(self, children) - parent.tell(Failed(ex), self) - throw e //Re-throw InterruptedExceptions as expected - } else { - props.faultHandler.handleSupervisorFailing(self, children) - parent.tell(Failed(e), self) - } - } finally { - checkReceiveTimeout // Reschedule receive timeout + cancelReceiveTimeout() // FIXME: leave this here? + messageHandle.message match { + case msg: AutoReceivedMessage ⇒ autoReceiveMessage(messageHandle) + case msg if stopping ⇒ // receiving Terminated in response to stopping children is too common to generate noise + if (!msg.isInstanceOf[Terminated]) system.deadLetterMailbox.enqueue(self, messageHandle) + case msg ⇒ actor(msg) } + currentMessage = null // reset current message after successful invocation } catch { case e ⇒ - app.eventStream.publish(Error(e, self, e.getMessage)) - throw e + system.eventStream.publish(Error(e, self.toString, e.getMessage)) + + // prevent any further messages to be processed until the actor has been restarted + dispatcher.suspend(this) + + // make sure that InterruptedException does not leave this thread + if (e.isInstanceOf[InterruptedException]) { + val ex = ActorInterruptedException(e) + props.faultHandler.handleSupervisorFailing(self, children) + parent.tell(Failed(ex), self) + throw e //Re-throw InterruptedExceptions as expected + } else { + props.faultHandler.handleSupervisorFailing(self, children) + parent.tell(Failed(e), self) + } + } finally { + checkReceiveTimeout // Reschedule receive timeout } + } catch { + case e ⇒ + system.eventStream.publish(Error(e, self.toString, e.getMessage)) + throw e } } } @@ -330,11 +343,11 @@ private[akka] class ActorCell( } def autoReceiveMessage(msg: Envelope) { - if (app.AkkaConfig.DebugAutoReceive) app.eventStream.publish(Debug(self, "received AutoReceiveMessage " + msg)) + if (system.settings.DebugAutoReceive) system.eventStream.publish(Debug(self.toString, "received AutoReceiveMessage " + msg)) if (stopping) msg.message match { case ChildTerminated ⇒ handleChildTerminated(sender) - case _ ⇒ app.deadLetterMailbox.enqueue(self, msg) + case _ ⇒ system.deadLetterMailbox.enqueue(self, msg) } else msg.message match { case HotSwap(code, discardOld) ⇒ become(code(self), discardOld) @@ -347,7 +360,9 @@ private[akka] class ActorCell( } private def doTerminate() { - app.provider.evict(self.path.toString) + if (!system.provider.evict(self.path.toString)) + system.eventStream.publish(Warning(self.toString, "evict of " + self.path.toString + " failed")) + dispatcher.detach(this) try { @@ -356,8 +371,8 @@ private[akka] class ActorCell( } finally { try { parent.tell(ChildTerminated, self) - app.deathWatch.publish(Terminated(self)) - if (app.AkkaConfig.DebugLifecycle) app.eventStream.publish(Debug(self, "stopped")) + system.deathWatch.publish(Terminated(self)) + if (system.settings.DebugLifecycle) system.eventStream.publish(Debug(self.toString, "stopped")) } finally { currentMessage = null clearActorFields() @@ -365,34 +380,36 @@ private[akka] class ActorCell( } } - final def handleFailure(child: ActorRef, cause: Throwable): Unit = childrenStats.get(child) match { - case Some(stats) ⇒ if (!props.faultHandler.handleFailure(child, cause, stats, childrenStats)) throw cause - case None ⇒ app.eventStream.publish(Warning(self, "dropping Failed(" + cause + ") from unknown child")) + final def handleFailure(child: ActorRef, cause: Throwable): Unit = childrenRefs.get(child.name) match { + case Some(stats) if stats.child == child ⇒ if (!props.faultHandler.handleFailure(child, cause, stats, childrenRefs.values)) throw cause + case Some(stats) ⇒ system.eventStream.publish(Warning(self.toString, "dropping Failed(" + cause + ") from unknown child " + child + " matching names but not the same, was: " + stats.child)) + case None ⇒ system.eventStream.publish(Warning(self.toString, "dropping Failed(" + cause + ") from unknown child " + child)) } final def handleChildTerminated(child: ActorRef): Unit = { childrenRefs -= child.name - childrenStats -= child props.faultHandler.handleChildTerminated(child, children) - if (stopping && childrenStats.isEmpty) doTerminate() + if (stopping && childrenRefs.isEmpty) doTerminate() } // ➡➡➡ NEVER SEND THE SAME SYSTEM MESSAGE OBJECT TO TWO ACTORS ⬅⬅⬅ final def restart(cause: Throwable): Unit = dispatcher.systemDispatch(this, Recreate(cause)) final def checkReceiveTimeout() { - cancelReceiveTimeout() - val recvtimeout = receiveTimeout - if (recvtimeout.isDefined && dispatcher.mailboxIsEmpty(this)) { + val recvtimeout = receiveTimeoutData + if (recvtimeout._1 > 0 && dispatcher.mailboxIsEmpty(this)) { + recvtimeout._2.cancel() //Cancel any ongoing future //Only reschedule if desired and there are currently no more messages to be processed - futureTimeout = Some(app.scheduler.scheduleOnce(self, ReceiveTimeout, recvtimeout.get, TimeUnit.MILLISECONDS)) - } + receiveTimeoutData = (recvtimeout._1, system.scheduler.scheduleOnce(self, ReceiveTimeout, Duration(recvtimeout._1, TimeUnit.MILLISECONDS))) + } else cancelReceiveTimeout() + } final def cancelReceiveTimeout() { - if (futureTimeout.isDefined) { - futureTimeout.get.cancel() - futureTimeout = None + //Only cancel if + if (receiveTimeoutData._2 ne emptyCancellable) { + receiveTimeoutData._2.cancel() + receiveTimeoutData = (receiveTimeoutData._1, emptyCancellable) } } diff --git a/akka-actor/src/main/scala/akka/actor/ActorPath.scala b/akka-actor/src/main/scala/akka/actor/ActorPath.scala index 1b98863b29..a741705e4f 100644 --- a/akka-actor/src/main/scala/akka/actor/ActorPath.scala +++ b/akka-actor/src/main/scala/akka/actor/ActorPath.scala @@ -4,6 +4,8 @@ package akka.actor +import akka.remote.RemoteAddress + object ActorPath { final val separator = "/" @@ -12,14 +14,14 @@ object ActorPath { /** * Create an actor path from a string. */ - def apply(app: ActorSystem, path: String): ActorPath = - apply(app, split(path)) + def apply(system: ActorSystem, path: String): ActorPath = + apply(system, split(path)) /** * Create an actor path from an iterable. */ - def apply(app: ActorSystem, path: Iterable[String]): ActorPath = - path.foldLeft(app.root)(_ / _) + def apply(system: ActorSystem, path: Iterable[String]): ActorPath = + path.foldLeft(system.asInstanceOf[ActorSystemImpl].provider.rootPath)(_ / _) /** * Split a string path into an iterable. @@ -58,9 +60,9 @@ object ActorPath { */ trait ActorPath { /** - * The akka application for this path. + * The RemoteAddress for this path. */ - def app: ActorSystem + def remoteAddress: RemoteAddress /** * The name of the actor that this path refers to. @@ -78,9 +80,9 @@ trait ActorPath { def /(child: String): ActorPath /** - * Find the ActorRef for this path. + * Recursively create a descendant’s path by appending all child names. */ - def ref: Option[ActorRef] + def /(child: Iterable[String]): ActorPath = (this /: child)(_ / _) /** * String representation of this path. Different from toString for root path. @@ -98,15 +100,13 @@ trait ActorPath { def isRoot: Boolean } -class RootActorPath(val app: ActorSystem) extends ActorPath { +class RootActorPath(val remoteAddress: RemoteAddress) extends ActorPath { def name: String = "/" def parent: ActorPath = this - def /(child: String): ActorPath = new ChildActorPath(app, this, child) - - def ref: Option[ActorRef] = app.actorFor(path) + def /(child: String): ActorPath = new ChildActorPath(remoteAddress, this, child) def string: String = "" @@ -117,11 +117,9 @@ class RootActorPath(val app: ActorSystem) extends ActorPath { override def toString = ActorPath.separator } -class ChildActorPath(val app: ActorSystem, val parent: ActorPath, val name: String) extends ActorPath { +class ChildActorPath(val remoteAddress: RemoteAddress, val parent: ActorPath, val name: String) extends ActorPath { - def /(child: String): ActorPath = new ChildActorPath(app, this, child) - - def ref: Option[ActorRef] = app.actorFor(path) + def /(child: String): ActorPath = new ChildActorPath(remoteAddress, this, child) def string: String = parent.string + ActorPath.separator + name diff --git a/akka-actor/src/main/scala/akka/actor/ActorRef.scala b/akka-actor/src/main/scala/akka/actor/ActorRef.scala index 5227dc4d44..e3229fba30 100644 --- a/akka-actor/src/main/scala/akka/actor/ActorRef.scala +++ b/akka-actor/src/main/scala/akka/actor/ActorRef.scala @@ -12,6 +12,8 @@ import akka.serialization.Serialization import java.net.InetSocketAddress import akka.remote.RemoteAddress import java.util.concurrent.TimeUnit +import akka.event.EventStream +import akka.event.DeathWatch /** * ActorRef is an immutable and serializable handle to an Actor. @@ -124,7 +126,7 @@ abstract class ActorRef extends java.lang.Comparable[ActorRef] with Serializable /** * Is the actor shut down? */ - def isShutdown: Boolean + def isTerminated: Boolean /** * Registers this actor to be a death monitor of the provided ActorRef @@ -160,7 +162,7 @@ abstract class ActorRef extends java.lang.Comparable[ActorRef] with Serializable * @author Jonas Bonér */ class LocalActorRef private[akka] ( - app: ActorSystem, + system: ActorSystemImpl, _props: Props, _supervisor: ActorRef, val path: ActorPath, @@ -171,18 +173,26 @@ class LocalActorRef private[akka] ( def name = path.name - def address: String = app.address + path.toString + def address: String = path.toString + /* + * actorCell.start() publishes actorCell & this to the dispatcher, which + * means that messages may be processed theoretically before the constructor + * ends. The JMM guarantees visibility for final fields only after the end + * of the constructor, so publish the actorCell safely by making it a + * @volatile var which is NOT TO BE WRITTEN TO. The alternative would be to + * move start() outside of the constructor, which would basically require + * us to use purely factory methods for creating LocalActorRefs. + */ @volatile - private var actorCell = new ActorCell(app, this, _props, _supervisor, _receiveTimeout, _hotswap) + private var actorCell = new ActorCell(system, this, _props, _supervisor, _receiveTimeout, _hotswap) actorCell.start() /** - * Is the actor shut down? + * Is the actor terminated? * If this method returns true, it will never return false again, but if it returns false, you cannot be sure if it's alive still (race condition) */ - //FIXME TODO RENAME TO isTerminated - def isShutdown: Boolean = actorCell.isShutdown + override def isTerminated: Boolean = actorCell.isTerminated /** * Suspends the actor so that it will not process messages until resumed. The @@ -231,7 +241,7 @@ class LocalActorRef private[akka] ( // @deprecated("This method does a spin-lock to block for the actor, which might never be there, do not use this", "2.0") protected[akka] def underlyingActorInstance: Actor = { var instance = actorCell.actor - while ((instance eq null) && !actorCell.isShutdown) { + while ((instance eq null) && !actorCell.isTerminated) { try { Thread.sleep(1) } catch { case i: InterruptedException ⇒ } instance = actorCell.actor } @@ -295,17 +305,17 @@ trait ScalaActorRef { ref: ActorRef ⇒ */ case class SerializedActorRef(hostname: String, port: Int, path: String) { - import akka.serialization.Serialization.app + import akka.serialization.Serialization.system def this(remoteAddress: RemoteAddress, path: String) = this(remoteAddress.hostname, remoteAddress.port, path) def this(remoteAddress: InetSocketAddress, path: String) = this(remoteAddress.getAddress.getHostAddress, remoteAddress.getPort, path) //TODO FIXME REMOVE @throws(classOf[java.io.ObjectStreamException]) def readResolve(): AnyRef = { - if (app.value eq null) throw new IllegalStateException( + if (system.value eq null) throw new IllegalStateException( "Trying to deserialize a serialized ActorRef without an ActorSystem in scope." + - " Use akka.serialization.Serialization.app.withValue(akkaApplication) { ... }") - app.value.provider.deserialize(this) match { + " Use akka.serialization.Serialization.system.withValue(system) { ... }") + system.value.provider.deserialize(this) match { case Some(actor) ⇒ actor case None ⇒ throw new IllegalStateException("Could not deserialize ActorRef") } @@ -328,7 +338,7 @@ trait MinimalActorRef extends ActorRef with ScalaActorRef { def stop(): Unit = () - def isShutdown = false + def isTerminated = false def !(message: Any)(implicit sender: ActorRef = null): Unit = () @@ -344,31 +354,42 @@ case class DeadLetter(message: Any, sender: ActorRef, recipient: ActorRef) object DeadLetterActorRef { class SerializedDeadLetterActorRef extends Serializable { //TODO implement as Protobuf for performance? @throws(classOf[java.io.ObjectStreamException]) - private def readResolve(): AnyRef = Serialization.app.value.deadLetters + private def readResolve(): AnyRef = Serialization.system.value.deadLetters } val serialized = new SerializedDeadLetterActorRef } -class DeadLetterActorRef(val app: ActorSystem) extends MinimalActorRef { - val brokenPromise = new KeptPromise[Any](Left(new ActorKilledException("In DeadLetterActorRef, promises are always broken.")))(app.dispatcher) +class DeadLetterActorRef(val eventStream: EventStream) extends MinimalActorRef { + @volatile + private var brokenPromise: Future[Any] = _ + @volatile + private var _path: ActorPath = _ + def path: ActorPath = { + assert(_path != null) + _path + } + + private[akka] def init(dispatcher: MessageDispatcher, rootPath: ActorPath) { + _path = rootPath / "nul" + brokenPromise = new KeptPromise[Any](Left(new ActorKilledException("In DeadLetterActorRef, promises are always broken.")))(dispatcher) + } override val name: String = "dead-letter" - // FIXME (actor path): put this under the sys guardian supervisor - val path: ActorPath = app.root / "sys" / name + def address: String = path.toString - def address: String = app.address + path.toString - - override def isShutdown(): Boolean = true + override def isTerminated(): Boolean = true override def !(message: Any)(implicit sender: ActorRef = null): Unit = message match { - case d: DeadLetter ⇒ app.eventStream.publish(d) - case _ ⇒ app.eventStream.publish(DeadLetter(message, sender, this)) + case d: DeadLetter ⇒ eventStream.publish(d) + case _ ⇒ eventStream.publish(DeadLetter(message, sender, this)) } override def ?(message: Any)(implicit timeout: Timeout): Future[Any] = { - app.eventStream.publish(DeadLetter(message, app.provider.dummyAskSender, this)) + eventStream.publish(DeadLetter(message, this, this)) + // leave this in: guard with good visibility against really stupid/weird errors + assert(brokenPromise != null) brokenPromise } @@ -376,16 +397,15 @@ class DeadLetterActorRef(val app: ActorSystem) extends MinimalActorRef { private def writeReplace(): AnyRef = DeadLetterActorRef.serialized } -abstract class AskActorRef(protected val app: ActorSystem)(timeout: Timeout = app.AkkaConfig.ActorTimeout, dispatcher: MessageDispatcher = app.dispatcher) extends MinimalActorRef { +abstract class AskActorRef(val path: ActorPath, provider: ActorRefProvider, deathWatch: DeathWatch, timeout: Timeout, val dispatcher: MessageDispatcher) extends MinimalActorRef { final val result = new DefaultPromise[Any](timeout)(dispatcher) - // FIXME (actor path): put this under the tmp guardian supervisor - val path: ActorPath = app.root / "tmp" / name + override def name = path.name - def address: String = app.address + path.toString + def address: String = path.toString { - val callback: Future[Any] ⇒ Unit = { _ ⇒ app.deathWatch.publish(Terminated(AskActorRef.this)); whenDone() } + val callback: Future[Any] ⇒ Unit = { _ ⇒ deathWatch.publish(Terminated(AskActorRef.this)); whenDone() } result onComplete callback result onTimeout callback } @@ -406,10 +426,10 @@ abstract class AskActorRef(protected val app: ActorSystem)(timeout: Timeout = ap override def ?(message: Any)(implicit timeout: Timeout): Future[Any] = new KeptPromise[Any](Left(new UnsupportedOperationException("Ask/? is not supported for %s".format(getClass.getName))))(dispatcher) - override def isShutdown = result.isCompleted || result.isExpired + override def isTerminated = result.isCompleted || result.isExpired - override def stop(): Unit = if (!isShutdown) result.completeWithException(new ActorKilledException("Stopped")) + override def stop(): Unit = if (!isTerminated) result.completeWithException(new ActorKilledException("Stopped")) @throws(classOf[java.io.ObjectStreamException]) - private def writeReplace(): AnyRef = app.provider.serialize(this) + private def writeReplace(): AnyRef = provider.serialize(this) } diff --git a/akka-actor/src/main/scala/akka/actor/ActorRefProvider.scala b/akka-actor/src/main/scala/akka/actor/ActorRefProvider.scala index 67ff38f847..f3f8ac3f48 100644 --- a/akka-actor/src/main/scala/akka/actor/ActorRefProvider.scala +++ b/akka-actor/src/main/scala/akka/actor/ActorRefProvider.scala @@ -5,41 +5,69 @@ package akka.actor import java.util.concurrent.atomic.AtomicLong -import java.util.concurrent.ConcurrentHashMap -import java.util.concurrent.{ TimeUnit, Executors } - +import java.util.concurrent.{ ConcurrentHashMap, TimeUnit } import scala.annotation.tailrec - import org.jboss.netty.akka.util.{ TimerTask, HashedWheelTimer } - import akka.actor.Timeout.intToTimeout import akka.config.ConfigurationException -import akka.dispatch.{ SystemMessage, Supervise, Promise, MessageDispatcher, Future, DefaultPromise } -import akka.event.{ Logging, DeathWatch, ActorClassification } +import akka.dispatch.{ SystemMessage, Supervise, Promise, MessageDispatcher, Future, DefaultPromise, Dispatcher, Mailbox, Envelope } +import akka.event.{ Logging, DeathWatch, ActorClassification, EventStream } import akka.routing.{ ScatterGatherFirstCompletedRouter, Routing, RouterType, Router, RoutedProps, RoutedActorRef, RoundRobinRouter, RandomRouter, LocalConnectionManager, DirectRouter, BroadcastRouter } -import akka.util.Helpers import akka.AkkaException +import com.eaio.uuid.UUID +import akka.util.{ Duration, Switch, Helpers } +import akka.remote.RemoteAddress +import akka.remote.LocalOnly /** * Interface for all ActorRef providers to implement. */ trait ActorRefProvider { - def actorOf(props: Props, supervisor: ActorRef, name: String): ActorRef = actorOf(props, supervisor, name, false) + def actorOf(system: ActorSystemImpl, props: Props, supervisor: ActorRef, name: String): ActorRef = actorOf(system, props, supervisor, name, false) def actorFor(path: Iterable[String]): Option[ActorRef] + def guardian: ActorRef + + def systemGuardian: ActorRef + + def deathWatch: DeathWatch + + // FIXME: remove/replace? + def nodename: String + // FIXME: remove/replace? + def clustername: String + /** - * What deployer will be used to resolve deployment configuration? + * The root path for all actors within this actor system, including remote + * address if enabled. */ + def rootPath: ActorPath + + def settings: ActorSystem.Settings + + def init(system: ActorSystemImpl) + private[akka] def deployer: Deployer private[akka] def scheduler: Scheduler - private[akka] def actorOf(props: Props, supervisor: ActorRef, name: String, systemService: Boolean): ActorRef + /** + * Create an Actor with the given name below the given supervisor. + */ + private[akka] def actorOf(system: ActorSystemImpl, props: Props, supervisor: ActorRef, name: String, systemService: Boolean): ActorRef - private[akka] def actorOf(props: Props, supervisor: ActorRef, path: ActorPath, systemService: Boolean): ActorRef + /** + * Create an Actor with the given full path below the given supervisor. + * + * FIXME: Remove! this is dangerous! + */ + private[akka] def actorOf(system: ActorSystemImpl, props: Props, supervisor: ActorRef, path: ActorPath, systemService: Boolean): ActorRef + /** + * Remove this path from the lookup map. + */ private[akka] def evict(path: String): Boolean private[akka] def deserialize(actor: SerializedActorRef): Option[ActorRef] @@ -48,15 +76,16 @@ trait ActorRefProvider { private[akka] def createDeathWatch(): DeathWatch + /** + * Create AskActorRef to hook up message send to recipient with Future receiver. + */ private[akka] def ask(message: Any, recipient: ActorRef, within: Timeout): Future[Any] - private[akka] def theOneWhoWalksTheBubblesOfSpaceTime: ActorRef - - private[akka] def terminationFuture: Future[ActorSystem.ExitStatus] - - private[akka] def dummyAskSender: ActorRef - - private[akka] def tempPath: String + /** + * This Future is completed upon termination of this ActorRefProvider, which + * is usually initiated by stopping the guardian via ActorSystem.stop(). + */ + private[akka] def terminationFuture: Future[Unit] } /** @@ -64,23 +93,20 @@ trait ActorRefProvider { */ trait ActorRefFactory { - def provider: ActorRefProvider + protected def systemImpl: ActorSystemImpl - def dispatcher: MessageDispatcher + protected def provider: ActorRefProvider + + protected def dispatcher: MessageDispatcher /** * Father of all children created by this interface. */ protected def guardian: ActorRef - private val number = new AtomicLong + protected def randomName(): String - private def randomName: String = { - val l = number.getAndIncrement() - Helpers.base64(l) - } - - def actorOf(props: Props): ActorRef = provider.actorOf(props, guardian, randomName, false) + def actorOf(props: Props): ActorRef = provider.actorOf(systemImpl, props, guardian, randomName(), false) /* * TODO this will have to go at some point, because creating two actors with @@ -90,7 +116,7 @@ trait ActorRefFactory { def actorOf(props: Props, name: String): ActorRef = { if (name == null || name == "" || name.startsWith("$")) throw new ActorInitializationException("actor name must not be null, empty or start with $") - provider.actorOf(props, guardian, name, false) + provider.actorOf(systemImpl, props, guardian, name, false) } def actorOf[T <: Actor](implicit m: Manifest[T]): ActorRef = actorOf(Props(m.erasure.asInstanceOf[Class[_ <: Actor]])) @@ -104,6 +130,8 @@ trait ActorRefFactory { def actorOf(creator: UntypedActorFactory): ActorRef = actorOf(Props(() ⇒ creator.create())) + def actorFor(path: ActorPath): Option[ActorRef] = actorFor(path.path) + def actorFor(path: String): Option[ActorRef] = actorFor(ActorPath.split(path)) def actorFor(path: Iterable[String]): Option[ActorRef] = provider.actorFor(path) @@ -114,48 +142,64 @@ class ActorRefProviderException(message: String) extends AkkaException(message) /** * Local ActorRef provider. */ -class LocalActorRefProvider(val app: ActorSystem) extends ActorRefProvider { +class LocalActorRefProvider( + val settings: ActorSystem.Settings, + val eventStream: EventStream, + val scheduler: Scheduler, + val rootPath: ActorPath, + val nodename: String, + val clustername: String) extends ActorRefProvider { - val log = Logging(app.eventStream, this) - - private[akka] val deployer: Deployer = new Deployer(app) - - val terminationFuture = new DefaultPromise[ActorSystem.ExitStatus](Timeout.never)(app.dispatcher) - - private[akka] val scheduler: Scheduler = { //TODO FIXME Make this configurable - val s = new DefaultScheduler(new HashedWheelTimer(log, Executors.defaultThreadFactory, 100, TimeUnit.MILLISECONDS, 512)) - terminationFuture.onComplete(_ ⇒ s.stop()) - s + def this(settings: ActorSystem.Settings, eventStream: EventStream, scheduler: Scheduler) { + this(settings, eventStream, scheduler, new RootActorPath(LocalOnly), "local", "local") } + val log = Logging(eventStream, "LocalActorRefProvider") + + private[akka] val deployer: Deployer = new Deployer(settings, eventStream, nodename) + + /* + * generate name for temporary actor refs + */ + private val tempNumber = new AtomicLong + def tempName = "$_" + Helpers.base64(tempNumber.getAndIncrement()) + private val tempNode = rootPath / "tmp" + def tempPath = tempNode / tempName + + // FIXME (actor path): this could become a cache for the new tree traversal actorFor + // currently still used for tmp actors (e.g. ask actor refs) + private val actors = new ConcurrentHashMap[String, AnyRef] + /** * Top-level anchor for the supervision hierarchy of this actor system. Will * receive only Supervise/ChildTerminated system messages or Failure message. */ private[akka] val theOneWhoWalksTheBubblesOfSpaceTime: ActorRef = new MinimalActorRef { - @volatile - var stopped = false + val stopped = new Switch(false) - override val name = app.name + "-bubble-walker" + @volatile + var causeOfTermination: Option[Throwable] = None + + override val name = "bubble-walker" // FIXME (actor path): move the root path to the new root guardian - val path = app.root + val path = rootPath / name - val address = app.address + path.toString + val address = path.toString override def toString = name - override def stop() = stopped = true + override def stop() = stopped switchOn { terminationFuture.complete(causeOfTermination.toLeft(())) } - override def isShutdown = stopped + override def isTerminated = stopped.isOn - override def !(message: Any)(implicit sender: ActorRef = null): Unit = message match { - case Failed(ex) ⇒ sender.stop() - case ChildTerminated ⇒ terminationFuture.completeWithResult(ActorSystem.Stopped) + override def !(message: Any)(implicit sender: ActorRef = null): Unit = stopped.ifOff(message match { + case Failed(ex) ⇒ causeOfTermination = Some(ex); sender.stop() + case ChildTerminated ⇒ stop() case _ ⇒ log.error(this + " received unexpected message " + message) - } + }) - protected[akka] override def sendSystemMessage(message: SystemMessage) { + protected[akka] override def sendSystemMessage(message: SystemMessage): Unit = stopped ifOff { message match { case Supervise(child) ⇒ // TODO register child in some map to keep track of it and enable shutdown after all dead case _ ⇒ log.error(this + " received unexpected system message " + message) @@ -163,12 +207,54 @@ class LocalActorRefProvider(val app: ActorSystem) extends ActorRefProvider { } } - // FIXME (actor path): this could become a cache for the new tree traversal actorFor - // currently still used for tmp actors (e.g. ask actor refs) - private val actors = new ConcurrentHashMap[String, AnyRef] + private class Guardian extends Actor { + def receive = { + case Terminated(_) ⇒ context.self.stop() + } + } + private class SystemGuardian extends Actor { + def receive = { + case Terminated(_) ⇒ + eventStream.stopDefaultLoggers() + context.self.stop() + } + } + private val guardianFaultHandlingStrategy = { + import akka.actor.FaultHandlingStrategy._ + OneForOneStrategy { + case _: ActorKilledException ⇒ Stop + case _: ActorInitializationException ⇒ Stop + case _: Exception ⇒ Restart + } + } + private val guardianProps = Props(new Guardian).withFaultHandler(guardianFaultHandlingStrategy) - // FIXME (actor path): should start at the new root guardian, and not use the tail (just to avoid the expected "app" name for now) - def actorFor(path: Iterable[String]): Option[ActorRef] = findInCache(ActorPath.join(path)) orElse findInTree(Some(app.guardian), path.tail) + /* + * The problem is that ActorRefs need a reference to the ActorSystem to + * provide their service. Hence they cannot be created while the + * constructors of ActorSystem and ActorRefProvider are still running. + * The solution is to split out that last part into an init() method, + * but it also requires these references to be @volatile and lazy. + */ + @volatile + private var system: ActorSystemImpl = _ + def dispatcher: MessageDispatcher = system.dispatcher + lazy val terminationFuture: DefaultPromise[Unit] = new DefaultPromise[Unit](Timeout.never)(dispatcher) + lazy val rootGuardian: ActorRef = actorOf(system, guardianProps, theOneWhoWalksTheBubblesOfSpaceTime, rootPath, true) + lazy val guardian: ActorRef = actorOf(system, guardianProps, rootGuardian, "app", true) + lazy val systemGuardian: ActorRef = actorOf(system, guardianProps.withCreator(new SystemGuardian), rootGuardian, "sys", true) + + val deathWatch = createDeathWatch() + + def init(_system: ActorSystemImpl) { + system = _system + // chain death watchers so that killing guardian stops the application + deathWatch.subscribe(systemGuardian, guardian) + deathWatch.subscribe(rootGuardian, systemGuardian) + } + + // FIXME (actor path): should start at the new root guardian, and not use the tail (just to avoid the expected "system" name for now) + def actorFor(path: Iterable[String]): Option[ActorRef] = findInCache(ActorPath.join(path)) orElse findInTree(Some(guardian), path.tail) @tailrec private def findInTree(start: Option[ActorRef], path: Iterable[String]): Option[ActorRef] = { @@ -193,12 +279,12 @@ class LocalActorRefProvider(val app: ActorSystem) extends ActorRefProvider { */ private[akka] def evict(path: String): Boolean = actors.remove(path) ne null - private[akka] def actorOf(props: Props, supervisor: ActorRef, name: String, systemService: Boolean): ActorRef = - actorOf(props, supervisor, supervisor.path / name, systemService) + private[akka] def actorOf(system: ActorSystemImpl, props: Props, supervisor: ActorRef, name: String, systemService: Boolean): ActorRef = + actorOf(system, props, supervisor, supervisor.path / name, systemService) - private[akka] def actorOf(props: Props, supervisor: ActorRef, path: ActorPath, systemService: Boolean): ActorRef = { + private[akka] def actorOf(system: ActorSystemImpl, props: Props, supervisor: ActorRef, path: ActorPath, systemService: Boolean): ActorRef = { val name = path.name - val newFuture = Promise[ActorRef](5000)(app.dispatcher) // FIXME is this proper timeout? + val newFuture = Promise[ActorRef](5000)(dispatcher) // FIXME is this proper timeout? actors.putIfAbsent(path.toString, newFuture) match { case null ⇒ @@ -207,17 +293,17 @@ class LocalActorRefProvider(val app: ActorSystem) extends ActorRefProvider { // create a local actor case None | Some(DeploymentConfig.Deploy(_, _, DeploymentConfig.Direct, _, DeploymentConfig.LocalScope)) ⇒ - new LocalActorRef(app, props, supervisor, path, systemService) // create a local actor + new LocalActorRef(system, props, supervisor, path, systemService) // create a local actor // create a routed actor ref case deploy @ Some(DeploymentConfig.Deploy(_, _, routerType, nrOfInstances, DeploymentConfig.LocalScope)) ⇒ - implicit val dispatcher = if (props.dispatcher == Props.defaultDispatcher) app.dispatcher else props.dispatcher - implicit val timeout = app.AkkaConfig.ActorTimeout + implicit val dispatcher = if (props.dispatcher == Props.defaultDispatcher) system.dispatcher else props.dispatcher + implicit val timeout = system.settings.ActorTimeout val routerFactory: () ⇒ Router = DeploymentConfig.routerTypeFor(routerType) match { case RouterType.Direct ⇒ () ⇒ new DirectRouter case RouterType.Random ⇒ () ⇒ new RandomRouter - case RouterType.Broadcast ⇒ () ⇒ new BroadcastRouter case RouterType.RoundRobin ⇒ () ⇒ new RoundRobinRouter + case RouterType.Broadcast ⇒ () ⇒ new BroadcastRouter case RouterType.ScatterGather ⇒ () ⇒ new ScatterGatherFirstCompletedRouter case RouterType.LeastCPU ⇒ sys.error("Router LeastCPU not supported yet") case RouterType.LeastRAM ⇒ sys.error("Router LeastRAM not supported yet") @@ -227,10 +313,10 @@ class LocalActorRefProvider(val app: ActorSystem) extends ActorRefProvider { val connections: Iterable[ActorRef] = (1 to nrOfInstances.factor) map { i ⇒ val routedPath = path.parent / (path.name + ":" + i) - new LocalActorRef(app, props, supervisor, routedPath, systemService) + new LocalActorRef(system, props, supervisor, routedPath, systemService) } - actorOf(RoutedProps(routerFactory = routerFactory, connectionManager = new LocalConnectionManager(connections)), supervisor, path.toString) + actorOf(system, RoutedProps(routerFactory = routerFactory, connectionManager = new LocalConnectionManager(connections)), supervisor, path.toString) case unknown ⇒ throw new Exception("Don't know how to create this actor ref! Why? Got: " + unknown) } @@ -255,7 +341,7 @@ class LocalActorRefProvider(val app: ActorSystem) extends ActorRefProvider { /** * Creates (or fetches) a routed actor reference, configured by the 'props: RoutedProps' configuration. */ - def actorOf(props: RoutedProps, supervisor: ActorRef, name: String): ActorRef = { + def actorOf(system: ActorSystem, props: RoutedProps, supervisor: ActorRef, name: String): ActorRef = { // FIXME: this needs to take supervision into account! //FIXME clustering should be implemented by cluster actor ref provider @@ -268,34 +354,26 @@ class LocalActorRefProvider(val app: ActorSystem) extends ActorRefProvider { // val localOnly = props.localOnly // if (clusteringEnabled && !props.localOnly) ReflectiveAccess.ClusterModule.newClusteredActorRef(props) // else new RoutedActorRef(props, address) - new RoutedActorRef(app, props, supervisor, name) + new RoutedActorRef(system, props, supervisor, name) } private[akka] def deserialize(actor: SerializedActorRef): Option[ActorRef] = actorFor(ActorPath.split(actor.path)) - private[akka] def serialize(actor: ActorRef): SerializedActorRef = new SerializedActorRef(app.address, actor.path.toString) + private[akka] def serialize(actor: ActorRef): SerializedActorRef = new SerializedActorRef(rootPath.remoteAddress, actor.path.toString) private[akka] def createDeathWatch(): DeathWatch = new LocalDeathWatch private[akka] def ask(message: Any, recipient: ActorRef, within: Timeout): Future[Any] = { import akka.dispatch.DefaultPromise - (if (within == null) app.AkkaConfig.ActorTimeout else within) match { + (if (within == null) settings.ActorTimeout else within) match { case t if t.duration.length <= 0 ⇒ - new DefaultPromise[Any](0)(app.dispatcher) //Abort early if nonsensical timeout + new DefaultPromise[Any](0)(dispatcher) //Abort early if nonsensical timeout case t ⇒ - val a = new AskActorRef(app)(timeout = t) { def whenDone() = actors.remove(this.path.toString) } + val a = new AskActorRef(tempPath, this, deathWatch, t, dispatcher) { def whenDone() = actors.remove(this) } assert(actors.putIfAbsent(a.path.toString, a) eq null) //If this fails, we're in deep trouble recipient.tell(message, a) a.result } } - - private[akka] val dummyAskSender = new DeadLetterActorRef(app) - - private val tempNumber = new AtomicLong - def tempPath = { - val l = tempNumber.getAndIncrement() - "$_" + Helpers.base64(l) - } } class LocalDeathWatch extends DeathWatch with ActorClassification { @@ -317,20 +395,20 @@ class LocalDeathWatch extends DeathWatch with ActorClassification { class DefaultScheduler(hashedWheelTimer: HashedWheelTimer) extends Scheduler { - def schedule(receiver: ActorRef, message: Any, initialDelay: Long, delay: Long, timeUnit: TimeUnit): Cancellable = - new DefaultCancellable(hashedWheelTimer.newTimeout(createContinuousTask(receiver, message, delay, timeUnit), initialDelay, timeUnit)) + def schedule(receiver: ActorRef, message: Any, initialDelay: Duration, delay: Duration): Cancellable = + new DefaultCancellable(hashedWheelTimer.newTimeout(createContinuousTask(receiver, message, delay), initialDelay)) - def scheduleOnce(runnable: Runnable, delay: Long, timeUnit: TimeUnit): Cancellable = - new DefaultCancellable(hashedWheelTimer.newTimeout(createSingleTask(runnable), delay, timeUnit)) + def schedule(f: () ⇒ Unit, initialDelay: Duration, delay: Duration): Cancellable = + new DefaultCancellable(hashedWheelTimer.newTimeout(createContinuousTask(f, delay), initialDelay)) - def scheduleOnce(receiver: ActorRef, message: Any, delay: Long, timeUnit: TimeUnit): Cancellable = - new DefaultCancellable(hashedWheelTimer.newTimeout(createSingleTask(receiver, message), delay, timeUnit)) + def scheduleOnce(runnable: Runnable, delay: Duration): Cancellable = + new DefaultCancellable(hashedWheelTimer.newTimeout(createSingleTask(runnable), delay)) - def schedule(f: () ⇒ Unit, initialDelay: Long, delay: Long, timeUnit: TimeUnit): Cancellable = - new DefaultCancellable(hashedWheelTimer.newTimeout(createContinuousTask(f, delay, timeUnit), initialDelay, timeUnit)) + def scheduleOnce(receiver: ActorRef, message: Any, delay: Duration): Cancellable = + new DefaultCancellable(hashedWheelTimer.newTimeout(createSingleTask(receiver, message), delay)) - def scheduleOnce(f: () ⇒ Unit, delay: Long, timeUnit: TimeUnit): Cancellable = - new DefaultCancellable(hashedWheelTimer.newTimeout(createSingleTask(f), delay, timeUnit)) + def scheduleOnce(f: () ⇒ Unit, delay: Duration): Cancellable = + new DefaultCancellable(hashedWheelTimer.newTimeout(createSingleTask(f), delay)) private def createSingleTask(runnable: Runnable): TimerTask = new TimerTask() { def run(timeout: org.jboss.netty.akka.util.Timeout) { runnable.run() } } @@ -338,23 +416,23 @@ class DefaultScheduler(hashedWheelTimer: HashedWheelTimer) extends Scheduler { private def createSingleTask(receiver: ActorRef, message: Any): TimerTask = new TimerTask { def run(timeout: org.jboss.netty.akka.util.Timeout) { receiver ! message } } - private def createContinuousTask(receiver: ActorRef, message: Any, delay: Long, timeUnit: TimeUnit): TimerTask = { + private def createSingleTask(f: () ⇒ Unit): TimerTask = + new TimerTask { def run(timeout: org.jboss.netty.akka.util.Timeout) { f() } } + + private def createContinuousTask(receiver: ActorRef, message: Any, delay: Duration): TimerTask = { new TimerTask { def run(timeout: org.jboss.netty.akka.util.Timeout) { receiver ! message - timeout.getTimer.newTimeout(this, delay, timeUnit) + timeout.getTimer.newTimeout(this, delay) } } } - private def createSingleTask(f: () ⇒ Unit): TimerTask = - new TimerTask { def run(timeout: org.jboss.netty.akka.util.Timeout) { f() } } - - private def createContinuousTask(f: () ⇒ Unit, delay: Long, timeUnit: TimeUnit): TimerTask = { + private def createContinuousTask(f: () ⇒ Unit, delay: Duration): TimerTask = { new TimerTask { def run(timeout: org.jboss.netty.akka.util.Timeout) { f() - timeout.getTimer.newTimeout(this, delay, timeUnit) + timeout.getTimer.newTimeout(this, delay) } } } diff --git a/akka-actor/src/main/scala/akka/actor/ActorSystem.scala b/akka-actor/src/main/scala/akka/actor/ActorSystem.scala index cd993a040c..1bf1ea2bd1 100644 --- a/akka-actor/src/main/scala/akka/actor/ActorSystem.scala +++ b/akka-actor/src/main/scala/akka/actor/ActorSystem.scala @@ -3,22 +3,35 @@ */ package akka.actor -import akka.config._ +import akka.config.ConfigurationException import akka.actor._ import akka.event._ +import akka.dispatch._ import akka.util.duration._ import java.net.InetAddress import com.eaio.uuid.UUID -import akka.dispatch.{ Dispatchers, Future, Mailbox, Envelope, SystemMessage } -import akka.util.Duration -import akka.util.ReflectiveAccess import akka.serialization.Serialization import akka.remote.RemoteAddress +import org.jboss.netty.akka.util.HashedWheelTimer +import java.util.concurrent.TimeUnit.SECONDS +import java.util.concurrent.TimeUnit.MILLISECONDS +import java.util.concurrent.TimeUnit.NANOSECONDS +import java.io.File +import com.typesafe.config.Config +import com.typesafe.config.ConfigParseOptions +import com.typesafe.config.ConfigRoot +import com.typesafe.config.ConfigFactory +import java.lang.reflect.InvocationTargetException +import akka.util.{ Helpers, Duration, ReflectiveAccess } +import java.util.concurrent.atomic.AtomicLong +import java.util.concurrent.CountDownLatch +import java.util.concurrent.ConcurrentHashMap +import java.util.concurrent.Executors +import scala.annotation.tailrec +import akka.serialization.SerializationExtension object ActorSystem { - type AkkaConfig = a.AkkaConfig.type forSome { val a: ActorSystem } - val Version = "2.0-SNAPSHOT" val envHome = System.getenv("AKKA_HOME") match { @@ -33,183 +46,282 @@ object ActorSystem { val GlobalHome = systemHome orElse envHome - val envConf = System.getenv("AKKA_MODE") match { - case null | "" ⇒ None - case value ⇒ Some(value) + def create(name: String, config: Config): ActorSystem = apply(name, config) + def apply(name: String, config: Config): ActorSystem = new ActorSystemImpl(name, config).start() + + def create(name: String): ActorSystem = apply(name) + def apply(name: String): ActorSystem = apply(name, DefaultConfigurationLoader.defaultConfig) + + def create(): ActorSystem = apply() + def apply(): ActorSystem = apply("default") + + class Settings(cfg: Config) { + private def referenceConfig: Config = + ConfigFactory.parseResource(classOf[ActorSystem], "/akka-actor-reference.conf", + ConfigParseOptions.defaults.setAllowMissing(false)) + val config: ConfigRoot = ConfigFactory.emptyRoot("akka-actor").withFallback(cfg).withFallback(referenceConfig).resolve() + + import scala.collection.JavaConverters._ + import config._ + val ConfigVersion = getString("akka.version") + + val ProviderClass = getString("akka.actor.provider") + + val ActorTimeout = Timeout(Duration(getMilliseconds("akka.actor.timeout"), MILLISECONDS)) + // TODO This isn't used anywhere. Remove? + val SerializeAllMessages = getBoolean("akka.actor.serialize-messages") + + val LogLevel = getString("akka.loglevel") + val StdoutLogLevel = getString("akka.stdout-loglevel") + val EventHandlers: Seq[String] = getStringList("akka.event-handlers").asScala + val AddLoggingReceive = getBoolean("akka.actor.debug.receive") + val DebugAutoReceive = getBoolean("akka.actor.debug.autoreceive") + val DebugLifecycle = getBoolean("akka.actor.debug.lifecycle") + val FsmDebugEvent = getBoolean("akka.actor.debug.fsm") + val DebugEventStream = getBoolean("akka.actor.debug.event-stream") + + val DispatcherThroughput = getInt("akka.actor.default-dispatcher.throughput") + val DispatcherDefaultShutdown = Duration(getMilliseconds("akka.actor.dispatcher-shutdown-timeout"), MILLISECONDS) + val MailboxCapacity = getInt("akka.actor.default-dispatcher.mailbox-capacity") + val MailboxPushTimeout = Duration(getNanoseconds("akka.actor.default-dispatcher.mailbox-push-timeout-time"), NANOSECONDS) + val DispatcherThroughputDeadlineTime = Duration(getNanoseconds("akka.actor.default-dispatcher.throughput-deadline-time"), NANOSECONDS) + + val Home = config.getString("akka.home") match { + case "" ⇒ None + case x ⇒ Some(x) + } + val BootClasses: Seq[String] = getStringList("akka.boot").asScala + + val EnabledModules: Seq[String] = getStringList("akka.enabled-modules").asScala + + val SchedulerTickDuration = Duration(getMilliseconds("akka.scheduler.tickDuration"), MILLISECONDS) + val SchedulerTicksPerWheel = getInt("akka.scheduler.ticksPerWheel") + + if (ConfigVersion != Version) + throw new ConfigurationException("Akka JAR version [" + Version + + "] does not match the provided config version [" + ConfigVersion + "]") + } - val systemConf = System.getProperty("akka.mode") match { - case null | "" ⇒ None - case value ⇒ Some(value) + object DefaultConfigurationLoader { + + val defaultConfig: Config = fromProperties orElse fromClasspath orElse fromHome getOrElse emptyConfig + + // file extensions (.conf, .json, .properties), are handled by parseFileAnySyntax + val defaultLocation: String = (systemMode orElse envMode).map("akka." + _).getOrElse("akka") + + private def envMode = System.getenv("AKKA_MODE") match { + case null | "" ⇒ None + case value ⇒ Some(value) + } + + private def systemMode = System.getProperty("akka.mode") match { + case null | "" ⇒ None + case value ⇒ Some(value) + } + + private def configParseOptions = ConfigParseOptions.defaults.setAllowMissing(false) + + private def fromProperties = try { + val property = Option(System.getProperty("akka.config")) + property.map(p ⇒ + ConfigFactory.systemProperties.withFallback( + ConfigFactory.parseFileAnySyntax(new File(p), configParseOptions))) + } catch { case _ ⇒ None } + + private def fromClasspath = try { + Option(ConfigFactory.systemProperties.withFallback( + ConfigFactory.parseResourceAnySyntax(ActorSystem.getClass, "/" + defaultLocation, configParseOptions))) + } catch { case _ ⇒ None } + + private def fromHome = try { + Option(ConfigFactory.systemProperties.withFallback( + ConfigFactory.parseFileAnySyntax(new File(GlobalHome.get + "/config/" + defaultLocation), configParseOptions))) + } catch { case _ ⇒ None } + + private def emptyConfig = ConfigFactory.systemProperties + } - val defaultLocation = (systemConf orElse envConf).map("akka." + _ + ".conf").getOrElse("akka.conf") - - val fromProperties = try { - Some(Configuration.fromFile(System.getProperty("akka.config", ""))) - } catch { case _ ⇒ None } - - val fromClasspath = try { - Some(Configuration.fromResource(defaultLocation, getClass.getClassLoader)) - } catch { case _ ⇒ None } - - val fromHome = try { - Some(Configuration.fromFile(GlobalHome.get + "/config/" + defaultLocation)) - } catch { case _ ⇒ None } - - val emptyConfig = Configuration.fromString("akka { version = \"" + Version + "\" }") - - val defaultConfig = fromProperties orElse fromClasspath orElse fromHome getOrElse emptyConfig - - def apply(name: String, config: Configuration) = new ActorSystem(name, config) - - def apply(name: String): ActorSystem = new ActorSystem(name) - - def apply(): ActorSystem = new ActorSystem() - - sealed trait ExitStatus - case object Stopped extends ExitStatus - case class Failed(cause: Throwable) extends ExitStatus - } -class ActorSystem(val name: String, val config: Configuration) extends ActorRefFactory with TypedActorFactory { +/** + * An actor system is a hierarchical group of actors which share common + * configuration, e.g. dispatchers, deployments, remote capabilities and + * addresses. It is also the entry point for creating or looking up actors. + */ +abstract class ActorSystem extends ActorRefFactory with TypedActorFactory { + import ActorSystem._ - def this(name: String) = this(name, ActorSystem.defaultConfig) - def this() = this("default") + /** + * The name of this actor system, used to distinguish multiple ones within + * the same JVM & class loader. + */ + def name: String + + /** + * The core settings extracted from the supplied configuration. + */ + def settings: Settings + + /** + * The logical node name where this actor system resides. + */ + def nodename: String + + /** + * The logical name of the cluster this actor system belongs to. + */ + def clustername: String + + /** + * Construct a path below the application guardian to be used with [[ActorSystem.actorFor]]. + */ + def /(name: String): ActorPath + + /** + * Start-up time in milliseconds since the epoch. + */ + val startTime = System.currentTimeMillis + + /** + * Up-time of this actor system in seconds. + */ + def uptime = (System.currentTimeMillis - startTime) / 1000 + + /** + * Main event bus of this actor system, used for example for logging. + */ + def eventStream: EventStream + + /** + * Convenient logging adapter for logging to the [[ActorSystem.eventStream]]. + */ + def log: LoggingAdapter + + /** + * Actor reference where messages are re-routed to which were addressed to + * stopped or non-existing actors. Delivery to this actor is done on a best + * effort basis and hence not strictly guaranteed. + */ + def deadLetters: ActorRef + // FIXME: do not publish this + def deadLetterMailbox: Mailbox + + // FIXME: TypedActor should be an extension + def typedActor: TypedActor + + /** + * Light-weight scheduler for running asynchronous tasks after some deadline + * in the future. Not terribly precise but cheap. + */ + def scheduler: Scheduler + + /** + * Helper object for creating new dispatchers and passing in all required + * information. + */ + def dispatcherFactory: Dispatchers + + /** + * Default dispatcher as configured. This dispatcher is used for all actors + * in the actor system which do not have a different dispatcher configured + * explicitly. + */ + def dispatcher: MessageDispatcher + + /** + * Register a block of code to run after all actors in this actor system have + * been stopped. + */ + def registerOnTermination(code: ⇒ Unit) + + /** + * Register a block of code to run after all actors in this actor system have + * been stopped (Java API). + */ + def registerOnTermination(code: Runnable) + + /** + * Stop this actor system. This will stop the guardian actor, which in turn + * will recursively stop all its child actors, then the system guardian + * (below which the logging actors reside) and the execute all registered + * termination handlers (see [[ActorSystem.registerOnTermination]]). + */ + def stop() + + /** + * Register an [[akka.actor.Extension]] within this actor system. The supplied + * object is interrogated for the extension’s key with which the extension is + * accessible from anywhere you have a reference to this actor system in + * scope, e.g. within actors (see [[ActorSystem.extension]]). + * + * Extensions can be registered automatically by adding their fully-qualified + * class name to the `akka.extensions` configuration key. + */ + def registerExtension[T <: AnyRef](ext: Extension[T]): Extension[T] + + /** + * Obtain a reference to a registered extension by passing in the key which + * the extension object returned from its init method (typically a static + * field or Scala `object`): + * + * {{{ + * class MyActor extends Actor { + * val ext: MyExtension = context.app.extension(MyExtension.key) + * } + * }}} + * + * Throws IllegalArgumentException if the extension key is not found. + */ + def extension[T <: AnyRef](key: ExtensionKey[T]): T + + /** + * Query presence of a specific extension. Beware that this key needs to be + * “the same” as the one used for registration (it is using a HashMap). + */ + def hasExtension(key: ExtensionKey[_]): Boolean +} + +class ActorSystemImpl(val name: String, val applicationConfig: Config) extends ActorSystem { import ActorSystem._ - object AkkaConfig { - import config._ - val ConfigVersion = getString("akka.version", Version) + val settings = new Settings(applicationConfig) - val ProviderClass = getString("akka.actor.provider", "akka.actor.LocalActorRefProvider") + protected def systemImpl = this - val DefaultTimeUnit = Duration.timeUnit(getString("akka.time-unit", "seconds")) - val ActorTimeout = Timeout(Duration(getInt("akka.actor.timeout", 5), DefaultTimeUnit)) - val ActorTimeoutMillis = ActorTimeout.duration.toMillis - val SerializeAllMessages = getBool("akka.actor.serialize-messages", false) + private[akka] def systemActorOf(props: Props, address: String): ActorRef = provider.actorOf(this, props, systemGuardian, address, true) - val TestTimeFactor = - try java.lang.Double.parseDouble(System.getProperty("akka.test.timefactor")) catch { - case _: Exception ⇒ getDouble("akka.test.timefactor", 1.0) - } - val SingleExpectDefaultTimeout = Duration(getDouble("akka.test.single-expect-default", 1), DefaultTimeUnit) - val TestEventFilterLeeway = Duration(getDouble("akka.test.filter-leeway", 0.5), DefaultTimeUnit) - - val LogLevel = getString("akka.loglevel", "INFO") - val StdoutLogLevel = getString("akka.stdout-loglevel", LogLevel) - val EventHandlers = getList("akka.event-handlers") - val AddLoggingReceive = getBool("akka.actor.debug.receive", false) - val DebugAutoReceive = getBool("akka.actor.debug.autoreceive", false) - val DebugLifecycle = getBool("akka.actor.debug.lifecycle", false) - val FsmDebugEvent = getBool("akka.actor.debug.fsm", false) - val DebugEventStream = getBool("akka.actor.debug.event-stream", false) - - val DispatcherThroughput = getInt("akka.actor.throughput", 5) - val DispatcherDefaultShutdown = getLong("akka.actor.dispatcher-shutdown-timeout"). - map(time ⇒ Duration(time, DefaultTimeUnit)).getOrElse(1 second) - val MailboxCapacity = getInt("akka.actor.default-dispatcher.mailbox-capacity", -1) - val MailboxPushTimeout = Duration(getInt("akka.actor.default-dispatcher.mailbox-push-timeout-time", 10), DefaultTimeUnit) - val DispatcherThroughputDeadlineTime = Duration(getInt("akka.actor.throughput-deadline-time", -1), DefaultTimeUnit) - - val Home = getString("akka.home") - val BootClasses = getList("akka.boot") - - val EnabledModules = getList("akka.enabled-modules") - val ClusterEnabled = EnabledModules exists (_ == "cluster") - val ClusterName = getString("akka.cluster.name", "default") - - val RemoteTransport = getString("akka.remote.layer", "akka.remote.netty.NettyRemoteSupport") - val RemoteServerPort = getInt("akka.remote.server.port", 2552) - - val FailureDetectorThreshold: Int = getInt("akka.remote.failure-detector.threshold", 8) - val FailureDetectorMaxSampleSize: Int = getInt("akka.remote.failure-detector.max-sample-size", 1000) - } - - private[akka] def systemActorOf(props: Props, address: String): ActorRef = provider.actorOf(props, systemGuardian, address, true) - - import AkkaConfig._ - - if (ConfigVersion != Version) - throw new ConfigurationException("Akka JAR version [" + Version + - "] does not match the provided config version [" + ConfigVersion + "]") - - val startTime = System.currentTimeMillis - def uptime = (System.currentTimeMillis - startTime) / 1000 - - val nodename: String = System.getProperty("akka.cluster.nodename") match { - case null | "" ⇒ new UUID().toString - case value ⇒ value - } - - val address = RemoteAddress(System.getProperty("akka.remote.hostname") match { - case null | "" ⇒ InetAddress.getLocalHost.getHostAddress - case value ⇒ value - }, System.getProperty("akka.remote.port") match { - case null | "" ⇒ AkkaConfig.RemoteServerPort - case value ⇒ value.toInt - }) + import settings._ // this provides basic logging (to stdout) until .start() is called below val eventStream = new EventStream(DebugEventStream) - eventStream.startStdoutLogger(AkkaConfig) - val log = new BusLogging(eventStream, this) + eventStream.startStdoutLogger(settings) + val log = new BusLogging(eventStream, "ActorSystem") // “this” used only for .getClass in tagging messages - // TODO correctly pull its config from the config - val dispatcherFactory = new Dispatchers(this) + val scheduler = new DefaultScheduler(new HashedWheelTimer(log, Executors.defaultThreadFactory, settings.SchedulerTickDuration, settings.SchedulerTicksPerWheel)) - implicit val dispatcher = dispatcherFactory.defaultGlobalDispatcher + val provider: ActorRefProvider = { + val providerClass = ReflectiveAccess.getClassFor(ProviderClass) match { + case Left(e) ⇒ throw e + case Right(b) ⇒ b + } + val arguments = Seq( + classOf[Settings] -> settings, + classOf[EventStream] -> eventStream, + classOf[Scheduler] -> scheduler) + val types: Array[Class[_]] = arguments map (_._1) toArray + val values: Array[AnyRef] = arguments map (_._2) toArray - def scheduler = provider.scheduler - - // TODO think about memory consistency effects when doing funky stuff inside constructor - val reflective = new ReflectiveAccess(this) - - /** - * The root actor path for this application. - */ - val root: ActorPath = new RootActorPath(this) - - // TODO think about memory consistency effects when doing funky stuff inside constructor - val provider: ActorRefProvider = reflective.createProvider - - def terminationFuture: Future[ExitStatus] = provider.terminationFuture - - private class Guardian extends Actor { - def receive = { - case Terminated(_) ⇒ context.self.stop() + ReflectiveAccess.createInstance[ActorRefProvider](providerClass, types, values) match { + case Left(e: InvocationTargetException) ⇒ throw e.getTargetException + case Left(e) ⇒ throw e + case Right(p) ⇒ p } } - private class SystemGuardian extends Actor { - def receive = { - case Terminated(_) ⇒ - eventStream.stopDefaultLoggers() - context.self.stop() - } - } - private val guardianFaultHandlingStrategy = { - import akka.actor.FaultHandlingStrategy._ - OneForOneStrategy { - case _: ActorKilledException ⇒ Stop - case _: ActorInitializationException ⇒ Stop - case _: Exception ⇒ Restart - } - } - private val guardianProps = Props(new Guardian).withFaultHandler(guardianFaultHandlingStrategy) - private val rootGuardian: ActorRef = - provider.actorOf(guardianProps, provider.theOneWhoWalksTheBubblesOfSpaceTime, root, true) - - protected[akka] val guardian: ActorRef = - provider.actorOf(guardianProps, rootGuardian, "app", true) - - protected[akka] val systemGuardian: ActorRef = - provider.actorOf(guardianProps.withCreator(new SystemGuardian), rootGuardian, "sys", true) - - // TODO think about memory consistency effects when doing funky stuff inside constructor - val deadLetters = new DeadLetterActorRef(this) + val deadLetters = new DeadLetterActorRef(eventStream) val deadLetterMailbox = new Mailbox(null) { becomeClosed() override def enqueue(receiver: ActorRef, envelope: Envelope) { deadLetters ! DeadLetter(envelope.message, envelope.sender, receiver) } @@ -221,34 +333,109 @@ class ActorSystem(val name: String, val config: Configuration) extends ActorRefF override def numberOfMessages = 0 } - val deathWatch = provider.createDeathWatch() + val dispatcherFactory = new Dispatchers(settings, DefaultDispatcherPrerequisites(eventStream, deadLetterMailbox, scheduler)) + implicit val dispatcher = dispatcherFactory.defaultGlobalDispatcher - // chain death watchers so that killing guardian stops the application - deathWatch.subscribe(systemGuardian, guardian) - deathWatch.subscribe(rootGuardian, systemGuardian) + //FIXME Set this to a Failure when things bubble to the top + def terminationFuture: Future[Unit] = provider.terminationFuture + def guardian: ActorRef = provider.guardian + def systemGuardian: ActorRef = provider.systemGuardian + def deathWatch: DeathWatch = provider.deathWatch + def nodename: String = provider.nodename + def clustername: String = provider.clustername - // this starts the reaper actor and the user-configured logging subscribers, which are also actors - eventStream.start(this) - eventStream.startDefaultLoggers(this, AkkaConfig) + private final val nextName = new AtomicLong + override protected def randomName(): String = Helpers.base64(nextName.incrementAndGet()) - // TODO think about memory consistency effects when doing funky stuff inside an ActorRefProvider's constructor - val deployer = new Deployer(this) + @volatile + private var _typedActor: TypedActor = _ + def typedActor = _typedActor - // TODO think about memory consistency effects when doing funky stuff inside constructor - val typedActor = new TypedActor(this) - - // TODO think about memory consistency effects when doing funky stuff inside constructor - val serialization = new Serialization(this) - - /** - * Create an actor path under the application supervisor (/app). - */ def /(actorName: String): ActorPath = guardian.path / actorName + private lazy val _start: this.type = { + // TODO can we do something better than loading SerializationExtension from here? + _typedActor = new TypedActor(settings, SerializationExtension(this).serialization) + provider.init(this) + deadLetters.init(dispatcher, provider.rootPath) + // this starts the reaper actor and the user-configured logging subscribers, which are also actors + eventStream.start(this) + eventStream.startDefaultLoggers(this) + loadExtensions() + this + } + + def start() = _start + + def registerOnTermination(code: ⇒ Unit) { terminationFuture onComplete (_ ⇒ code) } + def registerOnTermination(code: Runnable) { terminationFuture onComplete (_ ⇒ code.run) } + // TODO shutdown all that other stuff, whatever that may be def stop() { guardian.stop() + terminationFuture onComplete (_ ⇒ scheduler.stop()) + terminationFuture onComplete (_ ⇒ dispatcher.shutdown()) } - terminationFuture.onComplete(_ ⇒ dispatcher.shutdown()) + private val extensions = new ConcurrentHashMap[ExtensionKey[_], AnyRef] + + /** + * Attempts to initialize and register this extension if the key associated with it isn't already registered. + * The extension will only be initialized if it isn't already registered. + * Rethrows anything thrown when initializing the extension (doesn't register in that case) + * Returns the registered extension, might be another already registered instance. + */ + @tailrec + final def registerExtension[T <: AnyRef](ext: Extension[T]): Extension[T] = { + /** + * Returns any extension registered to the specified key or returns null if not registered + */ + @tailrec + def findExtension[T <: AnyRef](key: ExtensionKey[T]): Option[T] = extensions.get(key) match { + case c: CountDownLatch ⇒ c.await(); findExtension(key) //Registration in process, await completion and retry + case e: Extension[_] ⇒ Some(e.asInstanceOf[T]) //Profit! + case null ⇒ None //Doesn't exist + } + + findExtension(ext.key) match { + case Some(e: Extension[_]) ⇒ e.asInstanceOf[Extension[T]] //Profit! + case None ⇒ //Doesn't already exist, commence registration + val inProcessOfRegistration = new CountDownLatch(1) + extensions.putIfAbsent(ext.key, inProcessOfRegistration) match { // Signal that registration is in process + case null ⇒ try { // Signal was successfully sent + ext.init(this) //Initialize the new extension + extensions.replace(ext.key, inProcessOfRegistration, ext) //Replace our in process signal with the initialized extension + ext //Profit! + } catch { + case t ⇒ + extensions.remove(ext.key, inProcessOfRegistration) //In case shit hits the fan, remove the inProcess signal + throw t //Escalate to caller + } finally { + inProcessOfRegistration.countDown //Always notify listeners of the inProcess signal + } + case other ⇒ registerExtension(ext) //Someone else is in process of registering an extension for this key, retry + } + } + } + + def extension[T <: AnyRef](key: ExtensionKey[T]): T = extensions.get(key) match { + case x: Extension[_] ⇒ x.asInstanceOf[T] + case _ ⇒ throw new IllegalArgumentException("trying to get non-registered extension " + key) + } + + def hasExtension(key: ExtensionKey[_]): Boolean = extensions.get(key) match { + case x: Extension[_] ⇒ true + case _ ⇒ false + } + + private def loadExtensions() { + import scala.collection.JavaConversions._ + settings.config.getStringList("akka.extensions") foreach { fqcn ⇒ + import ReflectiveAccess._ + createInstance[Extension[_ <: AnyRef]](fqcn, noParams, noArgs) match { + case Left(ex) ⇒ log.error(ex, "Exception trying to load extension " + fqcn) + case Right(ext) ⇒ if (ext.isInstanceOf[Extension[_]]) registerExtension(ext) else log.error("Class {} is not an Extension", fqcn) + } + } + } } diff --git a/akka-actor/src/main/scala/akka/actor/BootableActorLoaderService.scala b/akka-actor/src/main/scala/akka/actor/BootableActorLoaderService.scala index 0876671d6e..6e0f99b50d 100644 --- a/akka-actor/src/main/scala/akka/actor/BootableActorLoaderService.scala +++ b/akka-actor/src/main/scala/akka/actor/BootableActorLoaderService.scala @@ -14,14 +14,14 @@ import akka.util.Bootable */ trait BootableActorLoaderService extends Bootable { - def app: ActorSystem + def system: ActorSystem - val BOOT_CLASSES = app.AkkaConfig.BootClasses + val BOOT_CLASSES = system.settings.BootClasses lazy val applicationLoader = createApplicationClassLoader() protected def createApplicationClassLoader(): Option[ClassLoader] = Some({ - if (app.AkkaConfig.Home.isDefined) { - val DEPLOY = app.AkkaConfig.Home.get + "/deploy" + if (system.settings.Home.isDefined) { + val DEPLOY = system.settings.Home.get + "/deploy" val DEPLOY_DIR = new File(DEPLOY) if (!DEPLOY_DIR.exists) { System.exit(-1) @@ -59,11 +59,11 @@ trait BootableActorLoaderService extends Bootable { super.onUnload() // FIXME shutdown all actors - // app.registry.local.shutdownAll + // system.registry.local.shutdownAll } } /** * Java API for the default JAX-RS/Mist Initializer */ -class DefaultBootableActorLoaderService(val app: ActorSystem) extends BootableActorLoaderService +class DefaultBootableActorLoaderService(val system: ActorSystem) extends BootableActorLoaderService diff --git a/akka-actor/src/main/scala/akka/actor/Deployer.scala b/akka-actor/src/main/scala/akka/actor/Deployer.scala index 3dc309f207..ec1d8dfc4c 100644 --- a/akka-actor/src/main/scala/akka/actor/Deployer.scala +++ b/akka-actor/src/main/scala/akka/actor/Deployer.scala @@ -5,16 +5,16 @@ package akka.actor import collection.immutable.Seq - import java.util.concurrent.ConcurrentHashMap - import akka.event.Logging import akka.actor.DeploymentConfig._ import akka.AkkaException -import akka.config.{ Configuration, ConfigurationException } +import akka.config.ConfigurationException import akka.util.Duration import java.net.InetSocketAddress import akka.remote.RemoteAddress +import akka.event.EventStream +import com.typesafe.config.Config trait ActorDeployer { private[akka] def init(deployments: Seq[Deploy]): Unit @@ -34,10 +34,10 @@ trait ActorDeployer { * * @author Jonas Bonér */ -class Deployer(val app: ActorSystem) extends ActorDeployer { +class Deployer(val settings: ActorSystem.Settings, val eventStream: EventStream, val nodename: String) extends ActorDeployer { - val deploymentConfig = new DeploymentConfig(app) - val log = Logging(app.eventStream, this) + val deploymentConfig = new DeploymentConfig(nodename) + val log = Logging(eventStream, "Deployer") val instance: ActorDeployer = { val deployer = new LocalDeployer() @@ -78,188 +78,183 @@ class Deployer(val app: ActorSystem) extends ActorDeployer { instance.lookupDeploymentFor(path) private[akka] def deploymentsInConfig: List[Deploy] = { - for { - path ← pathsInConfig - deployment ← lookupInConfig(path) - } yield deployment + for (path ← pathsInConfig) yield lookupInConfig(path) } private[akka] def pathsInConfig: List[String] = { - val deploymentPath = "akka.actor.deployment" - app.config.getSection(deploymentPath) match { - case None ⇒ Nil - case Some(pathConfig) ⇒ - pathConfig.map.keySet - .map(path ⇒ path.substring(0, path.indexOf("."))) - .toSet.toList // toSet to force uniqueness + def pathSubstring(path: String) = { + val i = path.indexOf(".") + if (i == -1) path else path.substring(0, i) } + + import scala.collection.JavaConverters._ + settings.config.getConfig("akka.actor.deployment").toObject.keySet.asScala + .filterNot("default" ==) + .map(path ⇒ pathSubstring(path)) + .toSet.toList // toSet to force uniqueness } /** * Lookup deployment in 'akka.conf' configuration file. */ - private[akka] def lookupInConfig(path: String, configuration: Configuration = app.config): Option[Deploy] = { - import akka.util.ReflectiveAccess.{ createInstance, emptyArguments, emptyParams, getClassFor } + private[akka] def lookupInConfig(path: String, configuration: Config = settings.config): Deploy = { + import scala.collection.JavaConverters._ + import akka.util.ReflectiveAccess.getClassFor + + val defaultDeploymentConfig = configuration.getConfig("akka.actor.deployment.default") // -------------------------------- // akka.actor.deployment. // -------------------------------- val deploymentKey = "akka.actor.deployment." + path - configuration.getSection(deploymentKey) match { - case None ⇒ None - case Some(pathConfig) ⇒ + val deployment = configuration.getConfig(deploymentKey) - // -------------------------------- - // akka.actor.deployment..router - // -------------------------------- - val router: Routing = pathConfig.getString("router", "direct") match { - case "direct" ⇒ Direct - case "round-robin" ⇒ RoundRobin - case "random" ⇒ Random - case "scatter-gather" ⇒ ScatterGather - case "least-cpu" ⇒ LeastCPU - case "least-ram" ⇒ LeastRAM - case "least-messages" ⇒ LeastMessages - case routerClassName ⇒ CustomRouter(routerClassName) - } - - // -------------------------------- - // akka.actor.deployment..nr-of-instances - // -------------------------------- - val nrOfInstances = { - if (router == Direct) OneNrOfInstances - else { - pathConfig.getAny("nr-of-instances", "1") match { - case "auto" ⇒ AutoNrOfInstances - case "1" ⇒ OneNrOfInstances - case "0" ⇒ ZeroNrOfInstances - case nrOfReplicas: String ⇒ - try { - new NrOfInstances(nrOfReplicas.toInt) - } catch { - case e: Exception ⇒ - throw new ConfigurationException( - "Config option [" + deploymentKey + - ".nr-of-instances] needs to be either [\"auto\"] or [1-N] - was [" + - nrOfReplicas + "]") - } - } - } - } - - // -------------------------------- - // akka.actor.deployment..create-as - // -------------------------------- - val recipe: Option[ActorRecipe] = pathConfig.getSection("create-as") map { section ⇒ - val implementationClass = section.getString("class") match { - case Some(impl) ⇒ - getClassFor[Actor](impl).fold(e ⇒ throw new ConfigurationException( - "Config option [" + deploymentKey + ".create-as.class] load failed", e), identity) - case None ⇒ - throw new ConfigurationException( - "Config option [" + deploymentKey + ".create-as.class] is missing, need the fully qualified name of the class") - } - ActorRecipe(implementationClass) - } - - // -------------------------------- - // akka.actor.deployment..remote - // -------------------------------- - pathConfig.getSection("remote") match { - case Some(remoteConfig) ⇒ // we have a 'remote' config section - - if (pathConfig.getSection("cluster").isDefined) throw new ConfigurationException( - "Configuration for deployment ID [" + path + "] can not have both 'remote' and 'cluster' sections.") - - // -------------------------------- - // akka.actor.deployment..remote.nodes - // -------------------------------- - val remoteAddresses = remoteConfig.getList("nodes") match { - case Nil ⇒ Nil - case nodes ⇒ - def raiseRemoteNodeParsingError() = throw new ConfigurationException( - "Config option [" + deploymentKey + - ".remote.nodes] needs to be a list with elements on format \":\", was [" + nodes.mkString(", ") + "]") - - nodes map { node ⇒ - val tokenizer = new java.util.StringTokenizer(node, ":") - val hostname = tokenizer.nextElement.toString - if ((hostname eq null) || (hostname == "")) raiseRemoteNodeParsingError() - val port = try tokenizer.nextElement.toString.toInt catch { - case e: Exception ⇒ raiseRemoteNodeParsingError() - } - if (port == 0) raiseRemoteNodeParsingError() - - RemoteAddress(new InetSocketAddress(hostname, port)) - } - } - - Some(Deploy(path, recipe, router, nrOfInstances, RemoteScope(remoteAddresses))) - - case None ⇒ // check for 'cluster' config section - - // -------------------------------- - // akka.actor.deployment..cluster - // -------------------------------- - pathConfig.getSection("cluster") match { - case None ⇒ None - case Some(clusterConfig) ⇒ - - // -------------------------------- - // akka.actor.deployment..cluster.preferred-nodes - // -------------------------------- - - val preferredNodes = clusterConfig.getList("preferred-nodes") match { - case Nil ⇒ Nil - case homes ⇒ - def raiseHomeConfigError() = throw new ConfigurationException( - "Config option [" + deploymentKey + - ".cluster.preferred-nodes] needs to be a list with elements on format\n'host:', 'ip:' or 'node:', was [" + - homes + "]") - - homes map { home ⇒ - if (!(home.startsWith("host:") || home.startsWith("node:") || home.startsWith("ip:"))) raiseHomeConfigError() - - val tokenizer = new java.util.StringTokenizer(home, ":") - val protocol = tokenizer.nextElement - val address = tokenizer.nextElement.asInstanceOf[String] - - protocol match { - case "node" ⇒ Node(address) - case _ ⇒ raiseHomeConfigError() - } - } - } - - // -------------------------------- - // akka.actor.deployment..cluster.replication - // -------------------------------- - clusterConfig.getSection("replication") match { - case None ⇒ - Some(Deploy(path, recipe, router, nrOfInstances, deploymentConfig.ClusterScope(preferredNodes, Transient))) - - case Some(replicationConfig) ⇒ - val storage = replicationConfig.getString("storage", "transaction-log") match { - case "transaction-log" ⇒ TransactionLog - case "data-grid" ⇒ DataGrid - case unknown ⇒ - throw new ConfigurationException("Config option [" + deploymentKey + - ".cluster.replication.storage] needs to be either [\"transaction-log\"] or [\"data-grid\"] - was [" + - unknown + "]") - } - val strategy = replicationConfig.getString("strategy", "write-through") match { - case "write-through" ⇒ WriteThrough - case "write-behind" ⇒ WriteBehind - case unknown ⇒ - throw new ConfigurationException("Config option [" + deploymentKey + - ".cluster.replication.strategy] needs to be either [\"write-through\"] or [\"write-behind\"] - was [" + - unknown + "]") - } - Some(Deploy(path, recipe, router, nrOfInstances, deploymentConfig.ClusterScope(preferredNodes, Replication(storage, strategy)))) - } - } - } + val deploymentWithFallback = deployment.withFallback(defaultDeploymentConfig) + // -------------------------------- + // akka.actor.deployment..router + // -------------------------------- + val router: Routing = deploymentWithFallback.getString("router") match { + case "direct" ⇒ Direct + case "round-robin" ⇒ RoundRobin + case "random" ⇒ Random + case "scatter-gather" ⇒ ScatterGather + case "least-cpu" ⇒ LeastCPU + case "least-ram" ⇒ LeastRAM + case "least-messages" ⇒ LeastMessages + case routerClassName ⇒ CustomRouter(routerClassName) } + + // -------------------------------- + // akka.actor.deployment..nr-of-instances + // -------------------------------- + val nrOfInstances = { + if (router == Direct) OneNrOfInstances + else { + def invalidNrOfInstances(wasValue: Any) = new ConfigurationException( + "Config option [" + deploymentKey + + ".nr-of-instances] needs to be either [\"auto\"] or [1-N] - was [" + + wasValue + "]") + + deploymentWithFallback.getAnyRef("nr-of-instances").asInstanceOf[Any] match { + case "auto" ⇒ AutoNrOfInstances + case 1 ⇒ OneNrOfInstances + case 0 ⇒ ZeroNrOfInstances + case nrOfReplicas: Number ⇒ + try { + new NrOfInstances(nrOfReplicas.intValue) + } catch { + case e: Exception ⇒ throw invalidNrOfInstances(nrOfReplicas) + } + case unknown ⇒ throw invalidNrOfInstances(unknown) + } + } + } + + // -------------------------------- + // akka.actor.deployment..create-as + // -------------------------------- + val recipe: Option[ActorRecipe] = + deploymentWithFallback.getString("create-as.class") match { + case "" ⇒ None + case impl ⇒ + val implementationClass = getClassFor[Actor](impl).fold(e ⇒ throw new ConfigurationException( + "Config option [" + deploymentKey + ".create-as.class] load failed", e), identity) + Some(ActorRecipe(implementationClass)) + } + + val remoteNodes = deploymentWithFallback.getStringList("remote.nodes").asScala.toSeq + val clusterPreferredNodes = deploymentWithFallback.getStringList("cluster.preferred-nodes").asScala.toSeq + + // -------------------------------- + // akka.actor.deployment..remote + // -------------------------------- + def parseRemote: Scope = { + def raiseRemoteNodeParsingError() = throw new ConfigurationException( + "Config option [" + deploymentKey + + ".remote.nodes] needs to be a list with elements on format \":\", was [" + remoteNodes.mkString(", ") + "]") + + val remoteAddresses = remoteNodes map { node ⇒ + val tokenizer = new java.util.StringTokenizer(node, ":") + val hostname = tokenizer.nextElement.toString + if ((hostname eq null) || (hostname == "")) raiseRemoteNodeParsingError() + val port = try tokenizer.nextElement.toString.toInt catch { + case e: Exception ⇒ raiseRemoteNodeParsingError() + } + if (port == 0) raiseRemoteNodeParsingError() + + RemoteAddress(new InetSocketAddress(hostname, port)) + } + + RemoteScope(remoteAddresses) + } + + // -------------------------------- + // akka.actor.deployment..cluster + // -------------------------------- + def parseCluster: Scope = { + def raiseHomeConfigError() = throw new ConfigurationException( + "Config option [" + deploymentKey + + ".cluster.preferred-nodes] needs to be a list with elements on format\n'host:', 'ip:' or 'node:', was [" + + clusterPreferredNodes + "]") + + val remoteNodes = clusterPreferredNodes map { home ⇒ + if (!(home.startsWith("host:") || home.startsWith("node:") || home.startsWith("ip:"))) raiseHomeConfigError() + + val tokenizer = new java.util.StringTokenizer(home, ":") + val protocol = tokenizer.nextElement + val address = tokenizer.nextElement.asInstanceOf[String] + + // TODO host and ip protocols? + protocol match { + case "node" ⇒ Node(address) + case _ ⇒ raiseHomeConfigError() + } + } + deploymentConfig.ClusterScope(remoteNodes, parseClusterReplication) + } + + // -------------------------------- + // akka.actor.deployment..cluster.replication + // -------------------------------- + def parseClusterReplication: ReplicationScheme = { + deployment.hasPath("cluster.replication") match { + case false ⇒ Transient + case true ⇒ + val replicationConfigWithFallback = deploymentWithFallback.getConfig("cluster.replication") + val storage = replicationConfigWithFallback.getString("storage") match { + case "transaction-log" ⇒ TransactionLog + case "data-grid" ⇒ DataGrid + case unknown ⇒ + throw new ConfigurationException("Config option [" + deploymentKey + + ".cluster.replication.storage] needs to be either [\"transaction-log\"] or [\"data-grid\"] - was [" + + unknown + "]") + } + val strategy = replicationConfigWithFallback.getString("strategy") match { + case "write-through" ⇒ WriteThrough + case "write-behind" ⇒ WriteBehind + case unknown ⇒ + throw new ConfigurationException("Config option [" + deploymentKey + + ".cluster.replication.strategy] needs to be either [\"write-through\"] or [\"write-behind\"] - was [" + + unknown + "]") + } + Replication(storage, strategy) + } + } + + val scope = (remoteNodes, clusterPreferredNodes) match { + case (Nil, Nil) ⇒ + LocalScope + case (_, Nil) ⇒ + // we have a 'remote' config section + parseRemote + case (Nil, _) ⇒ + // we have a 'cluster' config section + parseCluster + case (_, _) ⇒ throw new ConfigurationException( + "Configuration for deployment ID [" + path + "] can not have both 'remote' and 'cluster' sections.") + } + + Deploy(path, recipe, router, nrOfInstances, scope) } private[akka] def throwDeploymentBoundException(deployment: Deploy): Nothing = { diff --git a/akka-actor/src/main/scala/akka/actor/DeploymentConfig.scala b/akka-actor/src/main/scala/akka/actor/DeploymentConfig.scala index 0184ad9fef..9a3d934f01 100644 --- a/akka-actor/src/main/scala/akka/actor/DeploymentConfig.scala +++ b/akka-actor/src/main/scala/akka/actor/DeploymentConfig.scala @@ -75,10 +75,11 @@ object DeploymentConfig { // -------------------------------- class NrOfInstances(val factor: Int) extends Serializable { - if (factor < 0) throw new IllegalArgumentException("nr-of-instances can not be negative") + // note that -1 is used for AutoNrOfInstances + if (factor < -1) throw new IllegalArgumentException("nr-of-instances can not be negative") override def hashCode = 0 + factor.## override def equals(other: Any) = NrOfInstances.unapply(this) == NrOfInstances.unapply(other) - override def toString = "NrOfInstances(" + factor + ")" + override def toString = if (factor == -1) "NrOfInstances(auto)" else "NrOfInstances(" + factor + ")" } object NrOfInstances { @@ -97,7 +98,7 @@ object DeploymentConfig { // For Java API class AutoNrOfInstances extends NrOfInstances(-1) class ZeroNrOfInstances extends NrOfInstances(0) - class OneNrOfInstances extends NrOfInstances(0) + class OneNrOfInstances extends NrOfInstances(1) // For Scala API case object AutoNrOfInstances extends AutoNrOfInstances @@ -217,13 +218,13 @@ object DeploymentConfig { * * @author Jonas Bonér */ -class DeploymentConfig(val app: ActorSystem) { +class DeploymentConfig(val nodename: String) { import DeploymentConfig._ - case class ClusterScope(preferredNodes: Iterable[Home] = Vector(Node(app.nodename)), replication: ReplicationScheme = Transient) extends Scope + case class ClusterScope(preferredNodes: Iterable[Home] = Vector(Node(nodename)), replication: ReplicationScheme = Transient) extends Scope - def isHomeNode(homes: Iterable[Home]): Boolean = homes exists (home ⇒ nodeNameFor(home) == app.nodename) + def isHomeNode(homes: Iterable[Home]): Boolean = homes exists (home ⇒ nodeNameFor(home) == nodename) def replicationSchemeFor(deployment: Deploy): Option[ReplicationScheme] = deployment match { case Deploy(_, _, _, _, ClusterScope(_, replicationScheme)) ⇒ Some(replicationScheme) diff --git a/akka-actor/src/main/scala/akka/actor/Extension.scala b/akka-actor/src/main/scala/akka/actor/Extension.scala new file mode 100644 index 0000000000..7c582fa8c4 --- /dev/null +++ b/akka-actor/src/main/scala/akka/actor/Extension.scala @@ -0,0 +1,68 @@ +/** + * Copyright (C) 2009-2011 Typesafe Inc. + */ +package akka.actor + +/** + * The basic ActorSystem covers all that is needed for locally running actors, + * using futures and so on. In addition, more features can hook into it and + * thus become visible to actors et al by registering themselves as extensions. + * This is accomplished by providing an extension—which is an object + * implementing this trait—to `ActorSystem.registerExtension(...)` or by + * specifying the corresponding option in the configuration passed to + * ActorSystem, which will then instantiate (without arguments) each FQCN and + * register the result. + * + * The extension itself can be created in any way desired and has full access + * to the ActorSystem implementation. + * + * Scala example: + * + * {{{ + * class MyExtension extends Extension[MyExtension] { + * def key = MyExtension + * def init(system: ActorSystemImpl) { + * ... // initialize here + * } + * } + * object MyExtension extends ExtensionKey[MyExtension] + * }}} + * + * Java example: + * + * {{{ + * static class MyExtension implements Extension { + * public static ExtensionKey key = new ExtensionKey() {}; + * + * public ExtensionKey key() { + * return key; + * } + * public void init(ActorSystemImpl system) { + * ... // initialize here + * } + * } + * }}} + */ +trait Extension[T <: AnyRef] { + + /** + * This method is called by the ActorSystem upon registering this extension. + * The key returned is used for looking up extensions, hence it must be a + * suitable hash key and available to all clients of the extension. This is + * best achieved by storing it in a static field (Java) or as/in an object + * (Scala). + */ + def key: ExtensionKey[T] + + // FIXME ActorSystemImpl exposed to user API. We might well choose to introduce a new interface for this level of access, just so we can shuffle around the implementation + /** + * This method is called by the ActorSystem when the extension is registered + * to trigger initialization of the extension. + */ + def init(system: ActorSystemImpl): Unit +} + +/** + * Marker trait identifying a registered [[akka.actor.Extension]]. + */ +trait ExtensionKey[T <: AnyRef] diff --git a/akka-actor/src/main/scala/akka/actor/FSM.scala b/akka-actor/src/main/scala/akka/actor/FSM.scala index d4b65ba453..76495843fc 100644 --- a/akka-actor/src/main/scala/akka/actor/FSM.scala +++ b/akka-actor/src/main/scala/akka/actor/FSM.scala @@ -7,6 +7,7 @@ import akka.util._ import scala.collection.mutable import akka.event.Logging +import akka.util.Duration._ object FSM { @@ -28,14 +29,14 @@ object FSM { case object StateTimeout case class TimeoutMarker(generation: Long) - case class Timer(name: String, msg: Any, repeat: Boolean, generation: Int)(implicit app: ActorSystem) { + case class Timer(name: String, msg: Any, repeat: Boolean, generation: Int)(implicit system: ActorSystem) { private var ref: Option[Cancellable] = _ def schedule(actor: ActorRef, timeout: Duration) { if (repeat) { - ref = Some(app.scheduler.schedule(actor, this, timeout.length, timeout.length, timeout.unit)) + ref = Some(system.scheduler.schedule(actor, this, timeout, timeout)) } else { - ref = Some(app.scheduler.scheduleOnce(actor, this, timeout.length, timeout.unit)) + ref = Some(system.scheduler.scheduleOnce(actor, this, timeout)) } } @@ -188,7 +189,7 @@ trait FSM[S, D] extends ListenerManagement { type Timeout = Option[Duration] type TransitionHandler = PartialFunction[(S, S), Unit] - val log = Logging(app.eventStream, context.self) + val log = Logging(system, context.self) /** * **************************************** @@ -522,7 +523,7 @@ trait FSM[S, D] extends ListenerManagement { if (timeout.isDefined) { val t = timeout.get if (t.finite_? && t.length >= 0) { - timeoutFuture = Some(app.scheduler.scheduleOnce(self, TimeoutMarker(generation), t.length, t.unit)) + timeoutFuture = Some(system.scheduler.scheduleOnce(self, TimeoutMarker(generation), t)) } } } @@ -565,7 +566,7 @@ trait LoggingFSM[S, D] extends FSM[S, D] { this: Actor ⇒ def logDepth: Int = 0 - private val debugEvent = context.app.AkkaConfig.FsmDebugEvent + private val debugEvent = system.settings.FsmDebugEvent private val events = new Array[Event](logDepth) private val states = new Array[AnyRef](logDepth) diff --git a/akka-actor/src/main/scala/akka/actor/FaultHandling.scala b/akka-actor/src/main/scala/akka/actor/FaultHandling.scala index 90da6dd10a..4656f5a3e3 100644 --- a/akka-actor/src/main/scala/akka/actor/FaultHandling.scala +++ b/akka-actor/src/main/scala/akka/actor/FaultHandling.scala @@ -9,7 +9,7 @@ import scala.collection.mutable.ArrayBuffer import scala.collection.JavaConversions._ import java.lang.{ Iterable ⇒ JIterable } -case class ChildRestartStats(var maxNrOfRetriesCount: Int = 0, var restartTimeWindowStartNanos: Long = 0L) { +case class ChildRestartStats(val child: ActorRef, var maxNrOfRetriesCount: Int = 0, var restartTimeWindowStartNanos: Long = 0L) { def requestRestartPermission(retriesWindow: (Option[Int], Option[Int])): Boolean = retriesWindow match { @@ -118,7 +118,7 @@ abstract class FaultHandlingStrategy { /** * This method is called to act on the failure of a child: restart if the flag is true, stop otherwise. */ - def processFailure(restart: Boolean, child: ActorRef, cause: Throwable, stats: ChildRestartStats, children: Iterable[(ActorRef, ChildRestartStats)]): Unit + def processFailure(restart: Boolean, child: ActorRef, cause: Throwable, stats: ChildRestartStats, children: Iterable[ChildRestartStats]): Unit def handleSupervisorFailing(supervisor: ActorRef, children: Iterable[ActorRef]): Unit = { if (children.nonEmpty) @@ -133,7 +133,7 @@ abstract class FaultHandlingStrategy { /** * Returns whether it processed the failure or not */ - def handleFailure(child: ActorRef, cause: Throwable, stats: ChildRestartStats, children: Iterable[(ActorRef, ChildRestartStats)]): Boolean = { + def handleFailure(child: ActorRef, cause: Throwable, stats: ChildRestartStats, children: Iterable[ChildRestartStats]): Boolean = { val action = if (decider.isDefinedAt(cause)) decider(cause) else Escalate action match { case Resume ⇒ child.resume(); true @@ -191,12 +191,12 @@ case class AllForOneStrategy(decider: FaultHandlingStrategy.Decider, //TODO optimization to drop all children here already? } - def processFailure(restart: Boolean, child: ActorRef, cause: Throwable, stats: ChildRestartStats, children: Iterable[(ActorRef, ChildRestartStats)]): Unit = { + def processFailure(restart: Boolean, child: ActorRef, cause: Throwable, stats: ChildRestartStats, children: Iterable[ChildRestartStats]): Unit = { if (children.nonEmpty) { - if (restart && children.forall(_._2.requestRestartPermission(retriesWindow))) - children.foreach(_._1.restart(cause)) + if (restart && children.forall(_.requestRestartPermission(retriesWindow))) + children.foreach(_.child.restart(cause)) else - children.foreach(_._1.stop()) + children.foreach(_.child.stop()) } } } @@ -245,7 +245,7 @@ case class OneForOneStrategy(decider: FaultHandlingStrategy.Decider, def handleChildTerminated(child: ActorRef, children: Iterable[ActorRef]): Unit = {} - def processFailure(restart: Boolean, child: ActorRef, cause: Throwable, stats: ChildRestartStats, children: Iterable[(ActorRef, ChildRestartStats)]): Unit = { + def processFailure(restart: Boolean, child: ActorRef, cause: Throwable, stats: ChildRestartStats, children: Iterable[ChildRestartStats]): Unit = { if (restart && stats.requestRestartPermission(retriesWindow)) child.restart(cause) else diff --git a/akka-actor/src/main/scala/akka/actor/IO.scala b/akka-actor/src/main/scala/akka/actor/IO.scala index 4f3219a4e2..aeb4e53573 100644 --- a/akka-actor/src/main/scala/akka/actor/IO.scala +++ b/akka-actor/src/main/scala/akka/actor/IO.scala @@ -71,13 +71,11 @@ object IO { case class ServerHandle(owner: ActorRef, ioManager: ActorRef, uuid: UUID = new UUID()) extends Handle { override def asServer = this - def accept(socketOwner: ActorRef): SocketHandle = { + def accept()(implicit socketOwner: ActorRef): SocketHandle = { val socket = SocketHandle(socketOwner, ioManager) ioManager ! Accept(socket, this) socket } - - def accept()(implicit socketOwner: ScalaActorRef): SocketHandle = accept(socketOwner) } sealed trait IOMessage @@ -91,35 +89,23 @@ object IO { case class Read(handle: ReadHandle, bytes: ByteString) extends IOMessage case class Write(handle: WriteHandle, bytes: ByteString) extends IOMessage - def listen(ioManager: ActorRef, address: InetSocketAddress, owner: ActorRef): ServerHandle = { + def listen(ioManager: ActorRef, address: InetSocketAddress)(implicit owner: ActorRef): ServerHandle = { val server = ServerHandle(owner, ioManager) ioManager ! Listen(server, address) server } - def listen(ioManager: ActorRef, address: InetSocketAddress)(implicit sender: ScalaActorRef): ServerHandle = - listen(ioManager, address, sender) + def listen(ioManager: ActorRef, host: String, port: Int)(implicit owner: ActorRef): ServerHandle = + listen(ioManager, new InetSocketAddress(host, port)) - def listen(ioManager: ActorRef, host: String, port: Int, owner: ActorRef): ServerHandle = - listen(ioManager, new InetSocketAddress(host, port), owner) - - def listen(ioManager: ActorRef, host: String, port: Int)(implicit sender: ScalaActorRef): ServerHandle = - listen(ioManager, new InetSocketAddress(host, port), sender) - - def connect(ioManager: ActorRef, address: InetSocketAddress, owner: ActorRef): SocketHandle = { + def connect(ioManager: ActorRef, address: InetSocketAddress)(implicit owner: ActorRef): SocketHandle = { val socket = SocketHandle(owner, ioManager) ioManager ! Connect(socket, address) socket } - def connect(ioManager: ActorRef, address: InetSocketAddress)(implicit sender: ScalaActorRef): SocketHandle = - connect(ioManager, address, sender) - - def connect(ioManager: ActorRef, host: String, port: Int, owner: ActorRef): SocketHandle = - connect(ioManager, new InetSocketAddress(host, port), owner) - - def connect(ioManager: ActorRef, host: String, port: Int)(implicit sender: ScalaActorRef): SocketHandle = - connect(ioManager, new InetSocketAddress(host, port), sender) + def connect(ioManager: ActorRef, host: String, port: Int)(implicit sender: ActorRef): SocketHandle = + connect(ioManager, new InetSocketAddress(host, port)) private class HandleState(var readBytes: ByteString, var connected: Boolean) { def this() = this(ByteString.empty, false) @@ -253,7 +239,7 @@ class IOManager(bufferSize: Int = 8192) extends Actor { var worker: IOWorker = _ override def preStart { - worker = new IOWorker(app, self, bufferSize) + worker = new IOWorker(system, self, bufferSize) worker.start() } @@ -290,7 +276,7 @@ private[akka] object IOWorker { case object Shutdown extends Request } -private[akka] class IOWorker(app: ActorSystem, ioManager: ActorRef, val bufferSize: Int) { +private[akka] class IOWorker(system: ActorSystem, ioManager: ActorRef, val bufferSize: Int) { import SelectionKey.{ OP_READ, OP_WRITE, OP_ACCEPT, OP_CONNECT } import IOWorker._ diff --git a/akka-actor/src/main/scala/akka/actor/Scheduler.scala b/akka-actor/src/main/scala/akka/actor/Scheduler.scala index d12dcb6329..52a63f1730 100644 --- a/akka-actor/src/main/scala/akka/actor/Scheduler.scala +++ b/akka-actor/src/main/scala/akka/actor/Scheduler.scala @@ -9,46 +9,50 @@ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. - * - * Rework of David Pollak's ActorPing class in the Lift Project - * which is licensed under the Apache 2 License. */ package akka.actor -import java.util.concurrent._ import akka.util.Duration -import akka.AkkaException -case class SchedulerException(msg: String, e: Throwable) extends AkkaException(msg, e) { - def this(msg: String) = this(msg, null) -} +trait Scheduler { + /** + * Schedules a message to be sent repeatedly with an initial delay and frequency. + * E.g. if you would like a message to be sent immediately and thereafter every 500ms you would set + * delay = Duration.Zero and frequency = Duration(500, TimeUnit.MILLISECONDS) + */ + def schedule(receiver: ActorRef, message: Any, initialDelay: Duration, frequency: Duration): Cancellable -trait JScheduler { - def schedule(receiver: ActorRef, message: Any, initialDelay: Long, delay: Long, timeUnit: TimeUnit): Cancellable - def scheduleOnce(runnable: Runnable, delay: Long, timeUnit: TimeUnit): Cancellable - def scheduleOnce(receiver: ActorRef, message: Any, delay: Long, timeUnit: TimeUnit): Cancellable -} + /** + * Schedules a function to be run repeatedly with an initial delay and a frequency. + * E.g. if you would like the function to be run after 2 seconds and thereafter every 100ms you would set + * delay = Duration(2, TimeUnit.SECONDS) and frequency = Duration(100, TimeUnit.MILLISECONDS) + */ + def schedule(f: () ⇒ Unit, initialDelay: Duration, frequency: Duration): Cancellable -abstract class Scheduler extends JScheduler { - def schedule(f: () ⇒ Unit, initialDelay: Long, delay: Long, timeUnit: TimeUnit): Cancellable + /** + * Schedules a Runnable to be run once with a delay, i.e. a time period that has to pass before the runnable is executed. + */ + def scheduleOnce(runnable: Runnable, delay: Duration): Cancellable - def scheduleOnce(f: () ⇒ Unit, delay: Long, timeUnit: TimeUnit): Cancellable + /** + * Schedules a message to be sent once with a delay, i.e. a time period that has to pass before the message is sent. + */ + def scheduleOnce(receiver: ActorRef, message: Any, delay: Duration): Cancellable - def schedule(receiver: ActorRef, message: Any, initialDelay: Duration, delay: Duration): Cancellable = - schedule(receiver, message, initialDelay.toNanos, delay.toNanos, TimeUnit.NANOSECONDS) - - def schedule(f: () ⇒ Unit, initialDelay: Duration, delay: Duration): Cancellable = - schedule(f, initialDelay.toNanos, delay.toNanos, TimeUnit.NANOSECONDS) - - def scheduleOnce(receiver: ActorRef, message: Any, delay: Duration): Cancellable = - scheduleOnce(receiver, message, delay.length, delay.unit) - - def scheduleOnce(f: () ⇒ Unit, delay: Duration): Cancellable = - scheduleOnce(f, delay.length, delay.unit) + /** + * Schedules a function to be run once with a delay, i.e. a time period that has to pass before the function is run. + */ + def scheduleOnce(f: () ⇒ Unit, delay: Duration): Cancellable } trait Cancellable { + /** + * Cancels the underlying scheduled task. + */ def cancel(): Unit + /** + * Checks if the underlying scheduled task has been cancelled. + */ def isCancelled: Boolean } \ No newline at end of file diff --git a/akka-actor/src/main/scala/akka/actor/TypedActor.scala b/akka-actor/src/main/scala/akka/actor/TypedActor.scala index abc3df50d2..3409aa05e4 100644 --- a/akka-actor/src/main/scala/akka/actor/TypedActor.scala +++ b/akka-actor/src/main/scala/akka/actor/TypedActor.scala @@ -10,13 +10,14 @@ import akka.util.{ Duration } import java.util.concurrent.atomic.{ AtomicReference ⇒ AtomVar } import akka.serialization.{ Serializer, Serialization } import akka.dispatch._ +import akka.serialization.SerializationExtension object TypedActor { /** * This class represents a Method call, and has a reference to the Method to be called and the parameters to supply * It's sent to the ActorRef backing the TypedActor and can be serialized and deserialized */ - case class MethodCall(app: ActorSystem, method: Method, parameters: Array[AnyRef]) { + case class MethodCall(ser: Serialization, method: Method, parameters: Array[AnyRef]) { def isOneWay = method.getReturnType == java.lang.Void.TYPE def returnsFuture_? = classOf[Future[_]].isAssignableFrom(method.getReturnType) @@ -40,7 +41,7 @@ object TypedActor { case null ⇒ SerializedMethodCall(method.getDeclaringClass, method.getName, method.getParameterTypes, null, null) case ps if ps.length == 0 ⇒ SerializedMethodCall(method.getDeclaringClass, method.getName, method.getParameterTypes, Array[Serializer.Identifier](), Array[Array[Byte]]()) case ps ⇒ - val serializers: Array[Serializer] = ps map app.serialization.findSerializerFor + val serializers: Array[Serializer] = ps map ser.findSerializerFor val serializedParameters: Array[Array[Byte]] = Array.ofDim[Array[Byte]](serializers.length) for (i ← 0 until serializers.length) serializedParameters(i) = serializers(i) toBinary parameters(i) //Mutable for the sake of sanity @@ -57,17 +58,18 @@ object TypedActor { //TODO implement writeObject and readObject to serialize //TODO Possible optimization is to special encode the parameter-types to conserve space private def readResolve(): AnyRef = { - val app = akka.serialization.Serialization.app.value - if (app eq null) throw new IllegalStateException( + val system = akka.serialization.Serialization.system.value + if (system eq null) throw new IllegalStateException( "Trying to deserialize a SerializedMethodCall without an ActorSystem in scope." + - " Use akka.serialization.Serialization.app.withValue(akkaApplication) { ... }") - MethodCall(app, ownerType.getDeclaredMethod(methodName, parameterTypes: _*), serializedParameters match { + " Use akka.serialization.Serialization.system.withValue(system) { ... }") + val serialization = SerializationExtension(system).serialization + MethodCall(serialization, ownerType.getDeclaredMethod(methodName, parameterTypes: _*), serializedParameters match { case null ⇒ null case a if a.length == 0 ⇒ Array[AnyRef]() case a ⇒ val deserializedParameters: Array[AnyRef] = Array.ofDim[AnyRef](a.length) //Mutable for the sake of sanity for (i ← 0 until a.length) { - deserializedParameters(i) = app.serialization.serializerByIdentity(serializerIdentifiers(i)).fromBinary(serializedParameters(i)) + deserializedParameters(i) = serialization.serializerByIdentity(serializerIdentifiers(i)).fromBinary(serializedParameters(i)) } deserializedParameters }) @@ -101,22 +103,22 @@ object TypedActor { } /** - * Returns the akka app (for a TypedActor) when inside a method call in a TypedActor. + * Returns the akka system (for a TypedActor) when inside a method call in a TypedActor. */ - def app = appReference.get match { - case null ⇒ throw new IllegalStateException("Calling TypedActor.app outside of a TypedActor implementation method!") + def system = appReference.get match { + case null ⇒ throw new IllegalStateException("Calling TypedActor.system outside of a TypedActor implementation method!") case some ⇒ some } /** * Returns the default dispatcher (for a TypedActor) when inside a method call in a TypedActor. */ - implicit def dispatcher = app.dispatcher + implicit def dispatcher = system.dispatcher /** * Returns the default timeout (for a TypedActor) when inside a method call in a TypedActor. */ - implicit def timeout = app.AkkaConfig.ActorTimeout + implicit def timeout = system.settings.ActorTimeout } trait TypedActorFactory { this: ActorRefFactory ⇒ @@ -264,7 +266,7 @@ trait TypedActorFactory { this: ActorRefFactory ⇒ * * TypedActors needs, just like Actors, to be Stopped when they are no longer needed, use TypedActor.stop(proxy) */ -class TypedActor(val app: ActorSystem) { +class TypedActor(val settings: ActorSystem.Settings, var ser: Serialization) { import TypedActor.MethodCall /** @@ -313,7 +315,7 @@ class TypedActor(val app: ActorSystem) { //Warning, do not change order of the following statements, it's some elaborate chicken-n-egg handling val actorVar = new AtomVar[ActorRef](null) val timeout = props.timeout match { - case Props.`defaultTimeout` ⇒ app.AkkaConfig.ActorTimeout + case Props.`defaultTimeout` ⇒ settings.ActorTimeout case x ⇒ x } val proxy: T = Proxy.newProxyInstance(loader, interfaces, new TypedActorInvocationHandler(actorVar, timeout)).asInstanceOf[T] @@ -330,7 +332,7 @@ class TypedActor(val app: ActorSystem) { def receive = { case m: MethodCall ⇒ TypedActor.selfReference set proxyVar.get - TypedActor.appReference set app + TypedActor.appReference set system try { if (m.isOneWay) m(me) else { @@ -365,7 +367,7 @@ class TypedActor(val app: ActorSystem) { case "equals" ⇒ (args.length == 1 && (proxy eq args(0)) || actor == getActorRefFor(args(0))).asInstanceOf[AnyRef] //Force boxing of the boolean case "hashCode" ⇒ actor.hashCode.asInstanceOf[AnyRef] case _ ⇒ - MethodCall(app, method, args) match { + MethodCall(ser, method, args) match { case m if m.isOneWay ⇒ actor ! m; null //Null return value case m if m.returnsFuture_? ⇒ actor.?(m, timeout) case m if m.returnsJOption_? || m.returnsOption_? ⇒ diff --git a/akka-actor/src/main/scala/akka/actor/package.scala b/akka-actor/src/main/scala/akka/actor/package.scala index a3ffb5ae97..569c66f03e 100644 --- a/akka-actor/src/main/scala/akka/actor/package.scala +++ b/akka-actor/src/main/scala/akka/actor/package.scala @@ -8,10 +8,6 @@ package object actor { implicit def actorRef2Scala(ref: ActorRef): ScalaActorRef = ref.asInstanceOf[ScalaActorRef] implicit def scala2ActorRef(ref: ScalaActorRef): ActorRef = ref.asInstanceOf[ActorRef] - // actor path can be used as an actor ref (note: does a lookup in the app using path.ref) - implicit def actorPath2Ref(path: ActorPath): ActorRef = path.ref.getOrElse(path.app.deadLetters) - implicit def actorPath2ScalaRef(path: ActorPath): ScalaActorRef = actorPath2Ref(path).asInstanceOf[ScalaActorRef] - type Uuid = com.eaio.uuid.UUID def newUuid(): Uuid = new Uuid() diff --git a/akka-actor/src/main/scala/akka/cluster/ClusterInterface.scala b/akka-actor/src/main/scala/akka/cluster/ClusterInterface.scala index 37c75716d5..98a9f9f188 100644 --- a/akka-actor/src/main/scala/akka/cluster/ClusterInterface.scala +++ b/akka-actor/src/main/scala/akka/cluster/ClusterInterface.scala @@ -103,7 +103,7 @@ class NodeAddress(val clusterName: String, val nodeName: String) { */ object NodeAddress { def apply(clusterName: String, nodeName: String): NodeAddress = new NodeAddress(clusterName, nodeName) - def apply(app: ActorSystem): NodeAddress = new NodeAddress(app.AkkaConfig.ClusterName, app.nodename) + def apply(system: ActorSystem): NodeAddress = new NodeAddress(system.clustername, system.nodename) def unapply(other: Any) = other match { case address: NodeAddress ⇒ Some((address.clusterName, address.nodeName)) diff --git a/akka-actor/src/main/scala/akka/config/ConfigParser.scala b/akka-actor/src/main/scala/akka/config/ConfigParser.scala deleted file mode 100644 index 4b3d4abdaa..0000000000 --- a/akka-actor/src/main/scala/akka/config/ConfigParser.scala +++ /dev/null @@ -1,74 +0,0 @@ -/** - * Copyright (C) 2009-2011 Typesafe Inc. - * - * Based on Configgy by Robey Pointer. - * Copyright 2009 Robey Pointer - * http://www.apache.org/licenses/LICENSE-2.0 - */ - -package akka.config - -import scala.collection.mutable -import scala.util.parsing.combinator._ - -class ConfigParser(var prefix: String = "", map: mutable.Map[String, Any] = mutable.Map.empty[String, Any], importer: Importer) extends RegexParsers { - val sections = mutable.Stack[String]() - - def createPrefix = { - prefix = if (sections.isEmpty) "" else sections.toList.reverse.mkString("", ".", ".") - } - - override val whiteSpace = """(\s+|#[^\n]*\n)+""".r - - // tokens - - val numberToken: Parser[String] = """-?\d+(\.\d+)?""".r - val stringToken: Parser[String] = ("\"" + """([^\\\"]|\\[^ux]|\\\n|\\u[0-9a-fA-F]{4}|\\x[0-9a-fA-F]{2})*""" + "\"").r - val booleanToken: Parser[String] = "(true|on|false|off)".r - val identToken: Parser[String] = """([\da-zA-Z_/][-\w]*)(\.[a-zA-Z_/][-/\w]*)*""".r - val assignToken: Parser[String] = "=".r - val sectionToken: Parser[String] = """[a-zA-Z_/][-/\w]*""".r - - // values - - def value: Parser[Any] = number | string | list | boolean - def number = numberToken - def string = stringToken ^^ { s ⇒ s.substring(1, s.length - 1) } - def list = "[" ~> repsep(string | numberToken, opt(",")) <~ (opt(",") ~ "]") - def boolean = booleanToken - - // parser - - def root = rep(includeFile | assignment | sectionOpen | sectionClose) - - def includeFile = "include" ~> string ^^ { - case filename: String ⇒ - new ConfigParser(prefix, map, importer) parse importer.importFile(filename) - } - - def assignment = identToken ~ assignToken ~ value ^^ { - case k ~ a ~ v ⇒ map(prefix + k) = v - } - - def sectionOpen = sectionToken <~ "{" ^^ { name ⇒ - sections push name - createPrefix - } - - def sectionClose = "}" ^^ { _ ⇒ - if (sections.isEmpty) { - failure("dangling close tag") - } else { - sections.pop - createPrefix - } - } - - def parse(in: String): Map[String, Any] = { - parseAll(root, in) match { - case Success(result, _) ⇒ map.toMap - case x @ Failure(msg, _) ⇒ throw new ConfigurationException(x.toString) - case x @ Error(msg, _) ⇒ throw new ConfigurationException(x.toString) - } - } -} diff --git a/akka-actor/src/main/scala/akka/config/Configuration.scala b/akka-actor/src/main/scala/akka/config/Configuration.scala deleted file mode 100644 index bbd7b9a6c4..0000000000 --- a/akka-actor/src/main/scala/akka/config/Configuration.scala +++ /dev/null @@ -1,194 +0,0 @@ -/** - * Copyright (C) 2009-2011 Typesafe Inc. - * - * Based on Configgy by Robey Pointer. - * Copyright 2009 Robey Pointer - * http://www.apache.org/licenses/LICENSE-2.0 - */ - -package akka.config - -import java.io.File - -object Configuration { - val DefaultPath = new File(".").getCanonicalPath - val DefaultImporter = new FilesystemImporter(DefaultPath) - - val outputConfigSources = System.getProperty("akka.output.config.source") ne null - - def load(data: String, importer: Importer = DefaultImporter): Configuration = { - val parser = new ConfigParser(importer = importer) - new Configuration(parser parse data) - } - - def fromFile(filename: String, importer: Importer): Configuration = { - load(importer.importFile(filename), importer) - } - - def fromFile(path: String, filename: String): Configuration = { - val importer = new FilesystemImporter(path) - fromFile(filename, importer) - } - - def fromFile(filename: String): Configuration = { - val n = filename.lastIndexOf('/') - if (n < 0) { - fromFile(DefaultPath, filename) - } else { - fromFile(filename.substring(0, n), filename.substring(n + 1)) - } - } - - def fromResource(filename: String): Configuration = { - fromResource(filename, ClassLoader.getSystemClassLoader) - } - - def fromResource(filename: String, classLoader: ClassLoader): Configuration = { - val importer = new ResourceImporter(classLoader) - fromFile(filename, importer) - } - - def fromMap(map: Map[String, Any]) = { - new Configuration(map) - } - - def fromString(data: String): Configuration = { - load(data) - } - - def apply(pairs: (String, Any)*) = { - new Configuration(Map(pairs: _*)) - } -} - -class Configuration(val map: Map[String, Any]) { - private val trueValues = Set("true", "on") - private val falseValues = Set("false", "off") - - def ++(other: Configuration) = new Configuration(map ++ other.map) - - private def outputIfDesiredAndReturnInput[T](key: String, t: T): T = { - if (Configuration.outputConfigSources) - println("Akka config is using default value for: " + key) - t - } - - def contains(key: String): Boolean = map contains key - - def keys: Iterable[String] = map.keys - - def getAny(key: String): Option[Any] = { - try { - Some(map(key)) - } catch { - case _ ⇒ None - } - } - - def getAny(key: String, defaultValue: Any): Any = - getAny(key).getOrElse(outputIfDesiredAndReturnInput(key, defaultValue)) - - def getListAny(key: String): Seq[Any] = { - try { - map(key).asInstanceOf[Seq[Any]] - } catch { - case _ ⇒ Seq.empty[Any] - } - } - - def getString(key: String): Option[String] = map.get(key).map(_.toString) - - def getString(key: String, defaultValue: String): String = - getString(key).getOrElse(outputIfDesiredAndReturnInput(key, defaultValue)) - - def getList(key: String): Seq[String] = { - try { - map(key).asInstanceOf[Seq[String]] - } catch { - case _ ⇒ Seq.empty[String] - } - } - - def getInt(key: String): Option[Int] = { - try { - Some(map(key).toString.toInt) - } catch { - case _ ⇒ None - } - } - - def getInt(key: String, defaultValue: Int): Int = - getInt(key).getOrElse(outputIfDesiredAndReturnInput(key, defaultValue)) - - def getLong(key: String): Option[Long] = { - try { - Some(map(key).toString.toLong) - } catch { - case _ ⇒ None - } - } - - def getLong(key: String, defaultValue: Long): Long = - getLong(key).getOrElse(outputIfDesiredAndReturnInput(key, defaultValue)) - - def getFloat(key: String): Option[Float] = { - try { - Some(map(key).toString.toFloat) - } catch { - case _ ⇒ None - } - } - - def getFloat(key: String, defaultValue: Float): Float = - getFloat(key).getOrElse(outputIfDesiredAndReturnInput(key, defaultValue)) - - def getDouble(key: String): Option[Double] = { - try { - Some(map(key).toString.toDouble) - } catch { - case _ ⇒ None - } - } - - def getDouble(key: String, defaultValue: Double): Double = - getDouble(key).getOrElse(outputIfDesiredAndReturnInput(key, defaultValue)) - - def getBoolean(key: String): Option[Boolean] = { - getString(key) flatMap { s ⇒ - val isTrue = trueValues.contains(s) - if (!isTrue && !falseValues.contains(s)) None - else Some(isTrue) - } - } - - def getBoolean(key: String, defaultValue: Boolean): Boolean = - getBool(key).getOrElse(outputIfDesiredAndReturnInput(key, defaultValue)) - - def getBool(key: String): Option[Boolean] = getBoolean(key) - - def getBool(key: String, defaultValue: Boolean): Boolean = - getBoolean(key, defaultValue) - - def apply(key: String): String = getString(key) match { - case None ⇒ throw new ConfigurationException("undefined config: " + key) - case Some(v) ⇒ v - } - - def apply(key: String, defaultValue: String) = getString(key, defaultValue) - - def apply(key: String, defaultValue: Int) = getInt(key, defaultValue) - - def apply(key: String, defaultValue: Long) = getLong(key, defaultValue) - - def apply(key: String, defaultValue: Boolean) = getBool(key, defaultValue) - - def getSection(name: String): Option[Configuration] = { - val l = name.length + 1 - val pattern = name + "." - val m = map.collect { - case (k, v) if k.startsWith(pattern) ⇒ (k.substring(l), v) - } - if (m.isEmpty) None - else Some(new Configuration(m)) - } -} diff --git a/akka-actor/src/main/scala/akka/config/Config.scala b/akka-actor/src/main/scala/akka/config/ConfigurationException.scala similarity index 100% rename from akka-actor/src/main/scala/akka/config/Config.scala rename to akka-actor/src/main/scala/akka/config/ConfigurationException.scala diff --git a/akka-actor/src/main/scala/akka/config/Importer.scala b/akka-actor/src/main/scala/akka/config/Importer.scala deleted file mode 100644 index 6045662f35..0000000000 --- a/akka-actor/src/main/scala/akka/config/Importer.scala +++ /dev/null @@ -1,64 +0,0 @@ -/** - * Copyright (C) 2009-2011 Typesafe Inc. - * - * Based on Configgy by Robey Pointer. - * Copyright 2009 Robey Pointer - * http://www.apache.org/licenses/LICENSE-2.0 - */ - -package akka.config - -import java.io.{ BufferedReader, File, FileInputStream, InputStream, InputStreamReader } - -/** - * An interface for finding config files and reading them into strings for - * parsing. This is used to handle `include` directives in config files. - */ -trait Importer { - - def importFile(filename: String): String - - private val BUFFER_SIZE = 8192 - - protected def streamToString(in: InputStream): String = { - try { - val reader = new BufferedReader(new InputStreamReader(in, "UTF-8")) - val buffer = new Array[Char](BUFFER_SIZE) - val sb = new StringBuilder - var n = 0 - while (n >= 0) { - n = reader.read(buffer, 0, buffer.length) - if (n >= 0) { - sb.appendAll(buffer, 0, n) - } - } - in.close() - sb.toString - } catch { - case x ⇒ throw new ConfigurationException(x.toString) - } - } -} - -/** - * An Importer that looks for imported config files in the filesystem. - * This is the default importer. - */ -class FilesystemImporter(val baseDir: String) extends Importer { - def importFile(filename: String): String = { - val f = new File(filename) - val file = if (f.isAbsolute) f else new File(baseDir, filename) - streamToString(new FileInputStream(file)) - } -} - -/** - * An Importer that looks for imported config files in the java resources - * of the system class loader (usually the jar used to launch this app). - */ -class ResourceImporter(classLoader: ClassLoader) extends Importer { - def importFile(filename: String): String = { - val stream = classLoader.getResourceAsStream(filename) - streamToString(stream) - } -} diff --git a/akka-actor/src/main/scala/akka/dispatch/AbstractDispatcher.scala b/akka-actor/src/main/scala/akka/dispatch/AbstractDispatcher.scala index 5a91d93d0d..208eae51ca 100644 --- a/akka-actor/src/main/scala/akka/dispatch/AbstractDispatcher.scala +++ b/akka-actor/src/main/scala/akka/dispatch/AbstractDispatcher.scala @@ -6,7 +6,6 @@ package akka.dispatch import java.util.concurrent._ import akka.event.Logging.Error -import akka.config.Configuration import akka.util.{ Duration, Switch, ReentrantGuard } import atomic.{ AtomicInteger, AtomicLong } import java.util.concurrent.ThreadPoolExecutor.{ AbortPolicy, CallerRunsPolicy, DiscardOldestPolicy, DiscardPolicy } @@ -14,6 +13,9 @@ import akka.actor._ import akka.actor.ActorSystem import locks.ReentrantLock import scala.annotation.tailrec +import akka.event.EventStream +import akka.actor.ActorSystem.Settings +import com.typesafe.config.Config /** * @author Jonas Bonér @@ -62,12 +64,12 @@ case class Supervise(child: ActorRef) extends SystemMessage // sent to superviso case class Link(subject: ActorRef) extends SystemMessage // sent to self from ActorCell.startsWatching case class Unlink(subject: ActorRef) extends SystemMessage // sent to self from ActorCell.stopsWatching -final case class TaskInvocation(app: ActorSystem, function: () ⇒ Unit, cleanup: () ⇒ Unit) extends Runnable { +final case class TaskInvocation(eventStream: EventStream, function: () ⇒ Unit, cleanup: () ⇒ Unit) extends Runnable { def run() { try { function() } catch { - case e ⇒ app.eventStream.publish(Error(e, this, e.getMessage)) + case e ⇒ eventStream.publish(Error(e, "TaskInvocation", e.getMessage)) } finally { cleanup() } @@ -79,26 +81,23 @@ object MessageDispatcher { val SCHEDULED = 1 val RESCHEDULED = 2 - implicit def defaultDispatcher(implicit app: ActorSystem) = app.dispatcher + implicit def defaultDispatcher(implicit system: ActorSystem) = system.dispatcher } /** * @author Jonas Bonér */ -abstract class MessageDispatcher(val app: ActorSystem) extends AbstractMessageDispatcher with Serializable { +abstract class MessageDispatcher(val prerequisites: DispatcherPrerequisites) extends AbstractMessageDispatcher with Serializable { + import MessageDispatcher._ import AbstractMessageDispatcher.{ inhabitantsUpdater, shutdownScheduleUpdater } + import prerequisites._ /** * Creates and returns a mailbox for the given actor. */ protected[akka] def createMailbox(actor: ActorCell): Mailbox - /** - * a blackhole mailbox for the purpose of replacing the real one upon actor termination - */ - import app.deadLetterMailbox - /** * Name of this dispatcher. */ @@ -119,7 +118,7 @@ abstract class MessageDispatcher(val app: ActorSystem) extends AbstractMessageDi } protected[akka] final def dispatchTask(block: () ⇒ Unit) { - val invocation = TaskInvocation(app, block, taskCleanup) + val invocation = TaskInvocation(eventStream, block, taskCleanup) inhabitantsUpdater.incrementAndGet(this) try { executeTask(invocation) @@ -136,7 +135,7 @@ abstract class MessageDispatcher(val app: ActorSystem) extends AbstractMessageDi shutdownScheduleUpdater.get(this) match { case UNSCHEDULED ⇒ if (shutdownScheduleUpdater.compareAndSet(this, UNSCHEDULED, SCHEDULED)) { - app.scheduler.scheduleOnce(shutdownAction, timeoutMs, TimeUnit.MILLISECONDS) + scheduler.scheduleOnce(shutdownAction, Duration(shutdownTimeout.toMillis, TimeUnit.MILLISECONDS)) () } else ifSensibleToDoSoThenScheduleShutdown() case SCHEDULED ⇒ @@ -211,17 +210,18 @@ abstract class MessageDispatcher(val app: ActorSystem) extends AbstractMessageDi } case RESCHEDULED ⇒ if (shutdownScheduleUpdater.compareAndSet(MessageDispatcher.this, RESCHEDULED, SCHEDULED)) - app.scheduler.scheduleOnce(this, timeoutMs, TimeUnit.MILLISECONDS) + scheduler.scheduleOnce(this, Duration(shutdownTimeout.toMillis, TimeUnit.MILLISECONDS)) else run() } } } /** - * When the dispatcher no longer has any actors registered, how long will it wait until it shuts itself down, in Ms - * defaulting to your akka configs "akka.actor.dispatcher-shutdown-timeout" or otherwise, 1 Second + * When the dispatcher no longer has any actors registered, how long will it wait until it shuts itself down, + * defaulting to your akka configs "akka.actor.dispatcher-shutdown-timeout" or default specified in + * akka-actor-reference.conf */ - protected[akka] def timeoutMs: Long + protected[akka] def shutdownTimeout: Duration /** * After the call to this method, the dispatcher mustn't begin any new message processing for the specified reference @@ -237,10 +237,8 @@ abstract class MessageDispatcher(val app: ActorSystem) extends AbstractMessageDi */ def resume(actor: ActorCell): Unit = { val mbox = actor.mailbox - if (mbox.dispatcher eq this) { - mbox.becomeOpen() + if ((mbox.dispatcher eq this) && mbox.becomeOpen()) registerForExecution(mbox, false, false) - } } /** @@ -260,10 +258,10 @@ abstract class MessageDispatcher(val app: ActorSystem) extends AbstractMessageDi // TODO check whether this should not actually be a property of the mailbox protected[akka] def throughput: Int - protected[akka] def throughputDeadlineTime: Int + protected[akka] def throughputDeadlineTime: Duration @inline - protected[akka] final val isThroughputDeadlineTimeDefined = throughputDeadlineTime > 0 + protected[akka] final val isThroughputDeadlineTimeDefined = throughputDeadlineTime.toMillis > 0 @inline protected[akka] final val isThroughputDefined = throughput > 1 @@ -289,35 +287,36 @@ abstract class MessageDispatcher(val app: ActorSystem) extends AbstractMessageDi /** * Trait to be used for hooking in new dispatchers into Dispatchers.fromConfig */ -abstract class MessageDispatcherConfigurator(val app: ActorSystem) { +abstract class MessageDispatcherConfigurator() { /** * Returns an instance of MessageDispatcher given a Configuration */ - def configure(config: Configuration): MessageDispatcher + def configure(config: Config, settings: Settings, prerequisites: DispatcherPrerequisites): MessageDispatcher - def mailboxType(config: Configuration): MailboxType = { - val capacity = config.getInt("mailbox-capacity", app.AkkaConfig.MailboxCapacity) + def mailboxType(config: Config, settings: Settings): MailboxType = { + val capacity = config.getInt("mailbox-capacity") if (capacity < 1) UnboundedMailbox() else { - val duration = Duration( - config.getInt("mailbox-push-timeout-time", app.AkkaConfig.MailboxPushTimeout.toMillis.toInt), - app.AkkaConfig.DefaultTimeUnit) + val duration = Duration(config.getNanoseconds("mailbox-push-timeout-time"), TimeUnit.NANOSECONDS) BoundedMailbox(capacity, duration) } } - def configureThreadPool(config: Configuration, createDispatcher: ⇒ (ThreadPoolConfig) ⇒ MessageDispatcher): ThreadPoolConfigDispatcherBuilder = { + def configureThreadPool(config: Config, + settings: Settings, + createDispatcher: ⇒ (ThreadPoolConfig) ⇒ MessageDispatcher): ThreadPoolConfigDispatcherBuilder = { import ThreadPoolConfigDispatcherBuilder.conf_? //Apply the following options to the config if they are present in the config - ThreadPoolConfigDispatcherBuilder(createDispatcher, ThreadPoolConfig(app)).configure( - conf_?(config getInt "keep-alive-time")(time ⇒ _.setKeepAliveTime(Duration(time, app.AkkaConfig.DefaultTimeUnit))), - conf_?(config getDouble "core-pool-size-factor")(factor ⇒ _.setCorePoolSizeFromFactor(factor)), - conf_?(config getDouble "max-pool-size-factor")(factor ⇒ _.setMaxPoolSizeFromFactor(factor)), - conf_?(config getBool "allow-core-timeout")(allow ⇒ _.setAllowCoreThreadTimeout(allow)), - conf_?(config getInt "task-queue-size" flatMap { + + ThreadPoolConfigDispatcherBuilder(createDispatcher, ThreadPoolConfig()).configure( + conf_?(Some(config getMilliseconds "keep-alive-time"))(time ⇒ _.setKeepAliveTime(Duration(time, TimeUnit.MILLISECONDS))), + conf_?(Some(config getDouble "core-pool-size-factor"))(factor ⇒ _.setCorePoolSizeFromFactor(factor)), + conf_?(Some(config getDouble "max-pool-size-factor"))(factor ⇒ _.setMaxPoolSizeFromFactor(factor)), + conf_?(Some(config getBoolean "allow-core-timeout"))(allow ⇒ _.setAllowCoreThreadTimeout(allow)), + conf_?(Some(config getInt "task-queue-size") flatMap { case size if size > 0 ⇒ - config getString "task-queue-type" map { + Some(config getString "task-queue-type") map { case "array" ⇒ ThreadPoolConfig.arrayBlockingQueue(size, false) //TODO config fairness? case "" | "linked" ⇒ ThreadPoolConfig.linkedBlockingQueue(size) case x ⇒ throw new IllegalArgumentException("[%s] is not a valid task-queue-type [array|linked]!" format x) diff --git a/akka-actor/src/main/scala/akka/dispatch/BalancingDispatcher.scala b/akka-actor/src/main/scala/akka/dispatch/BalancingDispatcher.scala index 32a8268c29..6f45d8629c 100644 --- a/akka-actor/src/main/scala/akka/dispatch/BalancingDispatcher.scala +++ b/akka-actor/src/main/scala/akka/dispatch/BalancingDispatcher.scala @@ -10,6 +10,10 @@ import java.util.concurrent.{ LinkedBlockingQueue, ConcurrentLinkedQueue, Concur import java.util.{ Comparator, Queue } import annotation.tailrec import akka.actor.ActorSystem +import akka.event.EventStream +import akka.actor.Scheduler +import java.util.concurrent.atomic.AtomicBoolean +import akka.util.Duration /** * An executor based event driven dispatcher which will try to redistribute work from busy actors to idle actors. It is assumed @@ -28,27 +32,24 @@ import akka.actor.ActorSystem * @author Viktor Klang */ class BalancingDispatcher( - _app: ActorSystem, + _prerequisites: DispatcherPrerequisites, _name: String, throughput: Int, - throughputDeadlineTime: Int, + throughputDeadlineTime: Duration, mailboxType: MailboxType, config: ThreadPoolConfig, - _timeoutMs: Long) - extends Dispatcher(_app, _name, throughput, throughputDeadlineTime, mailboxType, config, _timeoutMs) { + _shutdownTimeout: Duration) + extends Dispatcher(_prerequisites, _name, throughput, throughputDeadlineTime, mailboxType, config, _shutdownTimeout) { - import app.deadLetterMailbox + val buddies = new ConcurrentSkipListSet[ActorCell](akka.util.Helpers.IdentityHashComparator) + val rebalance = new AtomicBoolean(false) - private val buddies = new ConcurrentSkipListSet[ActorCell](akka.util.Helpers.IdentityHashComparator) - - protected val messageQueue: MessageQueue = mailboxType match { + val messageQueue: MessageQueue = mailboxType match { case u: UnboundedMailbox ⇒ new QueueBasedMessageQueue with UnboundedMessageQueueSemantics { final val queue = new ConcurrentLinkedQueue[Envelope] - final val dispatcher = BalancingDispatcher.this } case BoundedMailbox(cap, timeout) ⇒ new QueueBasedMessageQueue with BoundedMessageQueueSemantics { final val queue = new LinkedBlockingQueue[Envelope](cap) - final val dispatcher = BalancingDispatcher.this final val pushTimeOut = timeout } case other ⇒ throw new IllegalArgumentException("Only handles BoundedMailbox and UnboundedMailbox, but you specified [" + other + "]") @@ -68,50 +69,49 @@ class BalancingDispatcher( protected[akka] override def register(actor: ActorCell) = { super.register(actor) - registerForExecution(actor.mailbox, false, false) //Allow newcomers to be productive from the first moment + buddies.add(actor) } protected[akka] override def unregister(actor: ActorCell) = { - super.unregister(actor) - intoTheFray(except = actor) buddies.remove(actor) + super.unregister(actor) + intoTheFray(except = actor) //When someone leaves, he tosses a friend into the fray } protected override def cleanUpMailboxFor(actor: ActorCell, mailBox: Mailbox) { if (mailBox.hasSystemMessages) { - var messages = mailBox.systemDrain() - while (messages ne null) { - deadLetterMailbox.systemEnqueue(actor.self, messages) //Send to dead letter queue - messages = messages.next - if (messages eq null) //Make sure that any system messages received after the current drain are also sent to the dead letter mbox - messages = mailBox.systemDrain() + var message = mailBox.systemDrain() + while (message ne null) { + // message must be “virgin” before being able to systemEnqueue again + val next = message.next + message.next = null + prerequisites.deadLetterMailbox.systemEnqueue(actor.self, message) + message = next } } } - protected[akka] override def registerForExecution(mbox: Mailbox, hasMessagesHint: Boolean, hasSystemMessagesHint: Boolean): Boolean = { - if (!super.registerForExecution(mbox, hasMessagesHint, hasSystemMessagesHint)) { - mbox match { - case share: SharingMailbox if !share.isClosed ⇒ buddies.add(share.actor); false - case _ ⇒ false - } - } else true - } + def intoTheFray(except: ActorCell): Unit = + if (rebalance.compareAndSet(false, true)) { + try { + val i = buddies.iterator() - def intoTheFray(except: ActorCell): Unit = { - var buddy = buddies.pollFirst() - while (buddy ne null) { - val mbox = buddy.mailbox - buddy = if ((buddy eq except) || (!registerForExecution(mbox, false, false) && mbox.isClosed)) buddies.pollFirst() else null + @tailrec + def throwIn(): Unit = { + val n = if (i.hasNext) i.next() else null + if (n eq null) () + else if ((n ne except) && registerForExecution(n.mailbox, false, false)) () + else throwIn() + } + throwIn() + } finally { + rebalance.set(false) + } } - } override protected[akka] def dispatch(receiver: ActorCell, invocation: Envelope) = { messageQueue.enqueue(receiver.self, invocation) - + registerForExecution(receiver.mailbox, false, false) intoTheFray(except = receiver) - - if (!registerForExecution(receiver.mailbox, false, false)) - intoTheFray(except = receiver) } } diff --git a/akka-actor/src/main/scala/akka/dispatch/Dispatcher.scala b/akka-actor/src/main/scala/akka/dispatch/Dispatcher.scala index fda365badb..ee28fd586e 100644 --- a/akka-actor/src/main/scala/akka/dispatch/Dispatcher.scala +++ b/akka-actor/src/main/scala/akka/dispatch/Dispatcher.scala @@ -9,6 +9,9 @@ import java.util.concurrent.atomic.AtomicReference import java.util.concurrent.{ TimeUnit, ExecutorService, RejectedExecutionException, ConcurrentLinkedQueue } import akka.actor.{ ActorCell, ActorKilledException } import akka.actor.ActorSystem +import akka.event.EventStream +import akka.actor.Scheduler +import akka.util.Duration /** * Default settings are: @@ -33,7 +36,7 @@ import akka.actor.ActorSystem * .withNewThreadPoolWithBoundedBlockingQueue(100) * .setCorePoolSize(16) * .setMaxPoolSize(128) - * .setKeepAliveTimeInMillis(60000) + * .setKeepAliveTime(60 seconds) * .buildThreadPool * *

@@ -47,7 +50,7 @@ import akka.actor.ActorSystem * .withNewThreadPoolWithBoundedBlockingQueue(100) * .setCorePoolSize(16) * .setMaxPoolSize(128) - * .setKeepAliveTimeInMillis(60000) + * .setKeepAliveTime(60 seconds) * .buildThreadPool(); * *

@@ -62,14 +65,14 @@ import akka.actor.ActorSystem * Larger values (or zero or negative) increase throughput, smaller values increase fairness */ class Dispatcher( - _app: ActorSystem, + _prerequisites: DispatcherPrerequisites, val name: String, val throughput: Int, - val throughputDeadlineTime: Int, + val throughputDeadlineTime: Duration, val mailboxType: MailboxType, executorServiceFactoryProvider: ExecutorServiceFactoryProvider, - val timeoutMs: Long) - extends MessageDispatcher(_app) { + val shutdownTimeout: Duration) + extends MessageDispatcher(_prerequisites) { protected[akka] val executorServiceFactory = executorServiceFactoryProvider.createExecutorServiceFactory(name) protected[akka] val executorService = new AtomicReference[ExecutorService](new ExecutorServiceDelegate { @@ -97,7 +100,7 @@ class Dispatcher( executorService.get() execute invocation } catch { case e2: RejectedExecutionException ⇒ - app.eventStream.publish(Warning(this, e2.toString)) + prerequisites.eventStream.publish(Warning("Dispatcher", e2.toString)) throw e2 } } @@ -105,20 +108,16 @@ class Dispatcher( protected[akka] def createMailbox(actor: ActorCell): Mailbox = mailboxType.create(actor) - protected[akka] def shutdown { - executorService.getAndSet(new ExecutorServiceDelegate { + protected[akka] def shutdown: Unit = + Option(executorService.getAndSet(new ExecutorServiceDelegate { lazy val executor = executorServiceFactory.createExecutorService - }) match { - case null ⇒ - case some ⇒ some.shutdown() - } - } + })) foreach { _.shutdown() } /** * Returns if it was registered */ protected[akka] override def registerForExecution(mbox: Mailbox, hasMessageHint: Boolean, hasSystemMessageHint: Boolean): Boolean = { - if (mbox.shouldBeScheduledForExecution(hasMessageHint, hasSystemMessageHint)) { //This needs to be here to ensure thread safety and no races + if (mbox.canBeScheduledForExecution(hasMessageHint, hasSystemMessageHint)) { //This needs to be here to ensure thread safety and no races if (mbox.setAsScheduled()) { try { executorService.get() execute mbox diff --git a/akka-actor/src/main/scala/akka/dispatch/Dispatchers.scala b/akka-actor/src/main/scala/akka/dispatch/Dispatchers.scala index a91af81cb1..42c96c8296 100644 --- a/akka-actor/src/main/scala/akka/dispatch/Dispatchers.scala +++ b/akka-actor/src/main/scala/akka/dispatch/Dispatchers.scala @@ -7,9 +7,24 @@ package akka.dispatch import akka.actor.LocalActorRef import akka.actor.newUuid import akka.util.{ Duration, ReflectiveAccess } -import akka.config.Configuration import java.util.concurrent.TimeUnit import akka.actor.ActorSystem +import akka.event.EventStream +import akka.actor.Scheduler +import akka.actor.ActorSystem.Settings +import com.typesafe.config.Config +import com.typesafe.config.ConfigFactory + +trait DispatcherPrerequisites { + def eventStream: EventStream + def deadLetterMailbox: Mailbox + def scheduler: Scheduler +} + +case class DefaultDispatcherPrerequisites( + val eventStream: EventStream, + val deadLetterMailbox: Mailbox, + val scheduler: Scheduler) extends DispatcherPrerequisites /** * Scala API. Dispatcher factory. @@ -21,7 +36,7 @@ import akka.actor.ActorSystem * .withNewThreadPoolWithLinkedBlockingQueueWithCapacity(100) * .setCorePoolSize(16) * .setMaxPoolSize(128) - * .setKeepAliveTimeInMillis(60000) + * .setKeepAliveTime(60 seconds) * .build * *

@@ -34,22 +49,24 @@ import akka.actor.ActorSystem * .withNewThreadPoolWithLinkedBlockingQueueWithCapacity(100) * .setCorePoolSize(16) * .setMaxPoolSize(128) - * .setKeepAliveTimeInMillis(60000) + * .setKeepAliveTime(60 seconds) * .build(); * *

* * @author Jonas Bonér */ -class Dispatchers(val app: ActorSystem) { - val ThroughputDeadlineTimeMillis = app.AkkaConfig.DispatcherThroughputDeadlineTime.toMillis.toInt - val MailboxType: MailboxType = - if (app.AkkaConfig.MailboxCapacity < 1) UnboundedMailbox() - else BoundedMailbox(app.AkkaConfig.MailboxCapacity, app.AkkaConfig.MailboxPushTimeout) - val DispatcherShutdownMillis = app.AkkaConfig.DispatcherDefaultShutdown.toMillis +class Dispatchers(val settings: ActorSystem.Settings, val prerequisites: DispatcherPrerequisites) { + val MailboxType: MailboxType = + if (settings.MailboxCapacity < 1) UnboundedMailbox() + else BoundedMailbox(settings.MailboxCapacity, settings.MailboxPushTimeout) + + val defaultDispatcherConfig = settings.config.getConfig("akka.actor.default-dispatcher") + + // TODO PN Shouldn't we fail hard if default-dispatcher is wrong? lazy val defaultGlobalDispatcher = - app.config.getSection("akka.actor.default-dispatcher").flatMap(from) getOrElse newDispatcher("AkkaDefaultGlobalDispatcher", 1, MailboxType).build + from(defaultDispatcherConfig) getOrElse newDispatcher("AkkaDefaultGlobalDispatcher", 1, MailboxType).build /** * Creates an thread based dispatcher serving a single actor through the same single thread. @@ -58,8 +75,8 @@ class Dispatchers(val app: ActorSystem) { * E.g. each actor consumes its own thread. */ def newPinnedDispatcher(actor: LocalActorRef) = actor match { - case null ⇒ new PinnedDispatcher(app, null, "anon", MailboxType, DispatcherShutdownMillis) - case some ⇒ new PinnedDispatcher(app, some.underlying, some.address, MailboxType, DispatcherShutdownMillis) + case null ⇒ new PinnedDispatcher(prerequisites, null, "anon", MailboxType, settings.DispatcherDefaultShutdown) + case some ⇒ new PinnedDispatcher(prerequisites, some.underlying, some.address, MailboxType, settings.DispatcherDefaultShutdown) } /** @@ -69,8 +86,8 @@ class Dispatchers(val app: ActorSystem) { * E.g. each actor consumes its own thread. */ def newPinnedDispatcher(actor: LocalActorRef, mailboxType: MailboxType) = actor match { - case null ⇒ new PinnedDispatcher(app, null, "anon", mailboxType, DispatcherShutdownMillis) - case some ⇒ new PinnedDispatcher(app, some.underlying, some.address, mailboxType, DispatcherShutdownMillis) + case null ⇒ new PinnedDispatcher(prerequisites, null, "anon", mailboxType, settings.DispatcherDefaultShutdown) + case some ⇒ new PinnedDispatcher(prerequisites, some.underlying, some.address, mailboxType, settings.DispatcherDefaultShutdown) } /** @@ -79,7 +96,7 @@ class Dispatchers(val app: ActorSystem) { * E.g. each actor consumes its own thread. */ def newPinnedDispatcher(name: String, mailboxType: MailboxType) = - new PinnedDispatcher(app, null, name, mailboxType, DispatcherShutdownMillis) + new PinnedDispatcher(prerequisites, null, name, mailboxType, settings.DispatcherDefaultShutdown) /** * Creates an thread based dispatcher serving a single actor through the same single thread. @@ -87,7 +104,7 @@ class Dispatchers(val app: ActorSystem) { * E.g. each actor consumes its own thread. */ def newPinnedDispatcher(name: String) = - new PinnedDispatcher(app, null, name, MailboxType, DispatcherShutdownMillis) + new PinnedDispatcher(prerequisites, null, name, MailboxType, settings.DispatcherDefaultShutdown) /** * Creates a executor-based event-driven dispatcher serving multiple (millions) of actors through a thread pool. @@ -95,8 +112,8 @@ class Dispatchers(val app: ActorSystem) { * Has a fluent builder interface for configuring its semantics. */ def newDispatcher(name: String) = - ThreadPoolConfigDispatcherBuilder(config ⇒ new Dispatcher(app, name, app.AkkaConfig.DispatcherThroughput, - ThroughputDeadlineTimeMillis, MailboxType, config, DispatcherShutdownMillis), ThreadPoolConfig(app)) + ThreadPoolConfigDispatcherBuilder(config ⇒ new Dispatcher(prerequisites, name, settings.DispatcherThroughput, + settings.DispatcherThroughputDeadlineTime, MailboxType, config, settings.DispatcherDefaultShutdown), ThreadPoolConfig()) /** * Creates a executor-based event-driven dispatcher serving multiple (millions) of actors through a thread pool. @@ -105,16 +122,17 @@ class Dispatchers(val app: ActorSystem) { */ def newDispatcher(name: String, throughput: Int, mailboxType: MailboxType) = ThreadPoolConfigDispatcherBuilder(config ⇒ - new Dispatcher(app, name, throughput, ThroughputDeadlineTimeMillis, mailboxType, config, DispatcherShutdownMillis), ThreadPoolConfig(app)) + new Dispatcher(prerequisites, name, throughput, settings.DispatcherThroughputDeadlineTime, mailboxType, + config, settings.DispatcherDefaultShutdown), ThreadPoolConfig()) /** * Creates a executor-based event-driven dispatcher serving multiple (millions) of actors through a thread pool. *

* Has a fluent builder interface for configuring its semantics. */ - def newDispatcher(name: String, throughput: Int, throughputDeadlineMs: Int, mailboxType: MailboxType) = + def newDispatcher(name: String, throughput: Int, throughputDeadline: Duration, mailboxType: MailboxType) = ThreadPoolConfigDispatcherBuilder(config ⇒ - new Dispatcher(app, name, throughput, throughputDeadlineMs, mailboxType, config, DispatcherShutdownMillis), ThreadPoolConfig(app)) + new Dispatcher(prerequisites, name, throughput, throughputDeadline, mailboxType, config, settings.DispatcherDefaultShutdown), ThreadPoolConfig()) /** * Creates a executor-based event-driven dispatcher, with work-stealing, serving multiple (millions) of actors through a thread pool. @@ -122,8 +140,8 @@ class Dispatchers(val app: ActorSystem) { * Has a fluent builder interface for configuring its semantics. */ def newBalancingDispatcher(name: String) = - ThreadPoolConfigDispatcherBuilder(config ⇒ new BalancingDispatcher(app, name, app.AkkaConfig.DispatcherThroughput, - ThroughputDeadlineTimeMillis, MailboxType, config, DispatcherShutdownMillis), ThreadPoolConfig(app)) + ThreadPoolConfigDispatcherBuilder(config ⇒ new BalancingDispatcher(prerequisites, name, settings.DispatcherThroughput, + settings.DispatcherThroughputDeadlineTime, MailboxType, config, settings.DispatcherDefaultShutdown), ThreadPoolConfig()) /** * Creates a executor-based event-driven dispatcher, with work-stealing, serving multiple (millions) of actors through a thread pool. @@ -132,7 +150,8 @@ class Dispatchers(val app: ActorSystem) { */ def newBalancingDispatcher(name: String, throughput: Int) = ThreadPoolConfigDispatcherBuilder(config ⇒ - new BalancingDispatcher(app, name, throughput, ThroughputDeadlineTimeMillis, MailboxType, config, DispatcherShutdownMillis), ThreadPoolConfig(app)) + new BalancingDispatcher(prerequisites, name, throughput, settings.DispatcherThroughputDeadlineTime, MailboxType, + config, settings.DispatcherDefaultShutdown), ThreadPoolConfig()) /** * Creates a executor-based event-driven dispatcher, with work-stealing, serving multiple (millions) of actors through a thread pool. @@ -141,27 +160,41 @@ class Dispatchers(val app: ActorSystem) { */ def newBalancingDispatcher(name: String, throughput: Int, mailboxType: MailboxType) = ThreadPoolConfigDispatcherBuilder(config ⇒ - new BalancingDispatcher(app, name, throughput, ThroughputDeadlineTimeMillis, mailboxType, config, DispatcherShutdownMillis), ThreadPoolConfig(app)) + new BalancingDispatcher(prerequisites, name, throughput, settings.DispatcherThroughputDeadlineTime, mailboxType, + config, settings.DispatcherDefaultShutdown), ThreadPoolConfig()) /** * Creates a executor-based event-driven dispatcher, with work-stealing, serving multiple (millions) of actors through a thread pool. *

* Has a fluent builder interface for configuring its semantics. */ - def newBalancingDispatcher(name: String, throughput: Int, throughputDeadlineMs: Int, mailboxType: MailboxType) = + def newBalancingDispatcher(name: String, throughput: Int, throughputDeadline: Duration, mailboxType: MailboxType) = ThreadPoolConfigDispatcherBuilder(config ⇒ - new BalancingDispatcher(app, name, throughput, throughputDeadlineMs, mailboxType, config, DispatcherShutdownMillis), ThreadPoolConfig(app)) + new BalancingDispatcher(prerequisites, name, throughput, throughputDeadline, mailboxType, + config, settings.DispatcherDefaultShutdown), ThreadPoolConfig()) /** * Utility function that tries to load the specified dispatcher config from the akka.conf - * or else use the supplied default dispatcher + * or if not defined it uses the supplied dispatcher. + * Uses default values from default-dispatcher, i.e. all options doesn't need to be defined + * in config. */ - def fromConfig(key: String, default: ⇒ MessageDispatcher = defaultGlobalDispatcher): MessageDispatcher = - app.config getSection key flatMap from getOrElse default + def fromConfig(key: String, default: ⇒ MessageDispatcher = defaultGlobalDispatcher, cfg: Config = settings.config): MessageDispatcher = { + import scala.collection.JavaConverters._ + def simpleName = key.substring(key.lastIndexOf('.') + 1) + cfg.hasPath(key) match { + case false ⇒ default + case true ⇒ + val conf = cfg.getConfig(key) + val confWithName = conf.withFallback(ConfigFactory.parseMap(Map("name" -> simpleName).asJava)) + from(confWithName).getOrElse(default) + } + } /* - * Creates of obtains a dispatcher from a ConfigMap according to the format below + * Creates of obtains a dispatcher from a ConfigMap according to the format below. + * Uses default values from default-dispatcher. * - * default-dispatcher { + * my-dispatcher { * type = "Dispatcher" # Must be one of the following * # Dispatcher, (BalancingDispatcher, only valid when all actors using it are of the same type), * # A FQCN to a class inheriting MessageDispatcherConfigurator with a no-arg visible constructor @@ -172,18 +205,18 @@ class Dispatchers(val app: ActorSystem) { * allow-core-timeout = on # Allow core threads to time out * throughput = 5 # Throughput for Dispatcher * } - * ex: from(config.getConfigMap(identifier).get) + * ex: from(config.getConfig(identifier).get) * * Gotcha: Only configures the dispatcher if possible - * Returns: None if "type" isn't specified in the config * Throws: IllegalArgumentException if the value of "type" is not valid * IllegalArgumentException if it cannot create the MessageDispatcherConfigurator */ - def from(cfg: Configuration): Option[MessageDispatcher] = { - cfg.getString("type") flatMap { - case "Dispatcher" ⇒ Some(new DispatcherConfigurator(app)) - case "BalancingDispatcher" ⇒ Some(new BalancingDispatcherConfigurator(app)) - case "GlobalDispatcher" ⇒ None //TODO FIXME remove this + def from(cfg: Config): Option[MessageDispatcher] = { + val cfgWithFallback = cfg.withFallback(defaultDispatcherConfig) + + val dispatcherConfigurator = cfgWithFallback.getString("type") match { + case "Dispatcher" ⇒ Some(new DispatcherConfigurator()) + case "BalancingDispatcher" ⇒ Some(new BalancingDispatcherConfigurator()) case fqn ⇒ ReflectiveAccess.getClassFor[MessageDispatcherConfigurator](fqn) match { case Right(clazz) ⇒ @@ -196,32 +229,36 @@ class Dispatchers(val app: ActorSystem) { case Left(exception) ⇒ throw new IllegalArgumentException("Unknown MessageDispatcherConfigurator type [%s]" format fqn, exception) } - } map { - _ configure cfg } + + dispatcherConfigurator map (_.configure(cfgWithFallback, settings, prerequisites)) } } -class DispatcherConfigurator(app: ActorSystem) extends MessageDispatcherConfigurator(app) { - def configure(config: Configuration): MessageDispatcher = { - configureThreadPool(config, threadPoolConfig ⇒ new Dispatcher(app, - config.getString("name", newUuid.toString), - config.getInt("throughput", app.AkkaConfig.DispatcherThroughput), - config.getInt("throughput-deadline-time", app.AkkaConfig.DispatcherThroughputDeadlineTime.toMillis.toInt), - mailboxType(config), - threadPoolConfig, - app.AkkaConfig.DispatcherDefaultShutdown.toMillis)).build +class DispatcherConfigurator() extends MessageDispatcherConfigurator() { + def configure(config: Config, settings: Settings, prerequisites: DispatcherPrerequisites): MessageDispatcher = { + configureThreadPool(config, + settings, + threadPoolConfig ⇒ new Dispatcher(prerequisites, + config.getString("name"), + config.getInt("throughput"), + Duration(config.getNanoseconds("throughput-deadline-time"), TimeUnit.NANOSECONDS), + mailboxType(config, settings), + threadPoolConfig, + settings.DispatcherDefaultShutdown)).build } } -class BalancingDispatcherConfigurator(app: ActorSystem) extends MessageDispatcherConfigurator(app) { - def configure(config: Configuration): MessageDispatcher = { - configureThreadPool(config, threadPoolConfig ⇒ new BalancingDispatcher(app, - config.getString("name", newUuid.toString), - config.getInt("throughput", app.AkkaConfig.DispatcherThroughput), - config.getInt("throughput-deadline-time", app.AkkaConfig.DispatcherThroughputDeadlineTime.toMillis.toInt), - mailboxType(config), - threadPoolConfig, - app.AkkaConfig.DispatcherDefaultShutdown.toMillis)).build +class BalancingDispatcherConfigurator() extends MessageDispatcherConfigurator() { + def configure(config: Config, settings: Settings, prerequisites: DispatcherPrerequisites): MessageDispatcher = { + configureThreadPool(config, + settings, + threadPoolConfig ⇒ new BalancingDispatcher(prerequisites, + config.getString("name"), + config.getInt("throughput"), + Duration(config.getNanoseconds("throughput-deadline-time"), TimeUnit.NANOSECONDS), + mailboxType(config, settings), + threadPoolConfig, + settings.DispatcherDefaultShutdown)).build } } diff --git a/akka-actor/src/main/scala/akka/dispatch/Future.scala b/akka-actor/src/main/scala/akka/dispatch/Future.scala index b418ca7994..e9a3035ea8 100644 --- a/akka-actor/src/main/scala/akka/dispatch/Future.scala +++ b/akka-actor/src/main/scala/akka/dispatch/Future.scala @@ -262,7 +262,7 @@ object Future { result completeWithResult currentValue } catch { case e: Exception ⇒ - dispatcher.app.eventStream.publish(Error(e, this, e.getMessage)) + dispatcher.prerequisites.eventStream.publish(Error(e, "Future.fold", e.getMessage)) result completeWithException e } finally { results.clear @@ -631,7 +631,7 @@ sealed trait Future[+T] extends japi.Future[T] { Right(f(res)) } catch { case e: Exception ⇒ - dispatcher.app.eventStream.publish(Error(e, this, e.getMessage)) + dispatcher.prerequisites.eventStream.publish(Error(e, "Future.map", e.getMessage)) Left(e) }) } @@ -683,7 +683,7 @@ sealed trait Future[+T] extends japi.Future[T] { future.completeWith(f(r)) } catch { case e: Exception ⇒ - dispatcher.app.eventStream.publish(Error(e, this, e.getMessage)) + dispatcher.prerequisites.eventStream.publish(Error(e, "Future.flatMap", e.getMessage)) future complete Left(e) } } @@ -716,7 +716,7 @@ sealed trait Future[+T] extends japi.Future[T] { if (p(res)) r else Left(new MatchError(res)) } catch { case e: Exception ⇒ - dispatcher.app.eventStream.publish(Error(e, this, e.getMessage)) + dispatcher.prerequisites.eventStream.publish(Error(e, "Future.filter", e.getMessage)) Left(e) }) } @@ -734,29 +734,6 @@ sealed trait Future[+T] extends japi.Future[T] { } } -package japi { - /* Java API */ - trait Future[+T] { self: akka.dispatch.Future[T] ⇒ - private[japi] final def onTimeout[A >: T](proc: Procedure[akka.dispatch.Future[A]]): this.type = self.onTimeout(proc(_)) - private[japi] final def onResult[A >: T](proc: Procedure[A]): this.type = self.onResult({ case r ⇒ proc(r.asInstanceOf[A]) }: PartialFunction[T, Unit]) - private[japi] final def onException(proc: Procedure[Throwable]): this.type = self.onException({ case t: Throwable ⇒ proc(t) }: PartialFunction[Throwable, Unit]) - private[japi] final def onComplete[A >: T](proc: Procedure[akka.dispatch.Future[A]]): this.type = self.onComplete(proc(_)) - private[japi] final def map[A >: T, B](f: JFunc[A, B], timeout: Timeout): akka.dispatch.Future[B] = { - implicit val t = timeout - self.map(f(_)) - } - private[japi] final def flatMap[A >: T, B](f: JFunc[A, akka.dispatch.Future[B]], timeout: Timeout): akka.dispatch.Future[B] = { - implicit val t = timeout - self.flatMap(f(_)) - } - private[japi] final def foreach[A >: T](proc: Procedure[A]): Unit = self.foreach(proc(_)) - private[japi] final def filter[A >: T](p: JFunc[A, java.lang.Boolean], timeout: Timeout): akka.dispatch.Future[A] = { - implicit val t = timeout - self.filter((a: Any) ⇒ p(a.asInstanceOf[A])).asInstanceOf[akka.dispatch.Future[A]] - } - } -} - object Promise { /** @@ -811,7 +788,7 @@ trait Promise[T] extends Future[T] { fr completeWith cont(f) } catch { case e: Exception ⇒ - dispatcher.app.eventStream.publish(Error(e, this, e.getMessage)) + dispatcher.prerequisites.eventStream.publish(Error(e, "Promise.completeWith", e.getMessage)) fr completeWithException e } } @@ -825,7 +802,7 @@ trait Promise[T] extends Future[T] { fr completeWith cont(f) } catch { case e: Exception ⇒ - dispatcher.app.eventStream.publish(Error(e, this, e.getMessage)) + dispatcher.prerequisites.eventStream.publish(Error(e, "Promise.completeWith", e.getMessage)) fr completeWithException e } } @@ -979,12 +956,12 @@ class DefaultPromise[T](val timeout: Timeout)(implicit val dispatcher: MessageDi val runnable = new Runnable { def run() { if (!isCompleted) { - if (!isExpired) dispatcher.app.scheduler.scheduleOnce(this, timeLeftNoinline(), NANOS) + if (!isExpired) dispatcher.prerequisites.scheduler.scheduleOnce(this, Duration(timeLeftNoinline(), TimeUnit.NANOSECONDS)) else func(DefaultPromise.this) } } } - val timeoutFuture = dispatcher.app.scheduler.scheduleOnce(runnable, timeLeft(), NANOS) + val timeoutFuture = dispatcher.prerequisites.scheduler.scheduleOnce(runnable, Duration(timeLeft(), TimeUnit.NANOSECONDS)) onComplete(_ ⇒ timeoutFuture.cancel()) false } else true @@ -1006,18 +983,18 @@ class DefaultPromise[T](val timeout: Timeout)(implicit val dispatcher: MessageDi val runnable = new Runnable { def run() { if (!isCompleted) { - if (!isExpired) dispatcher.app.scheduler.scheduleOnce(this, timeLeftNoinline(), NANOS) + if (!isExpired) dispatcher.prerequisites.scheduler.scheduleOnce(this, Duration(timeLeftNoinline(), TimeUnit.NANOSECONDS)) else promise complete (try { Right(fallback) } catch { case e ⇒ Left(e) }) } } } - dispatcher.app.scheduler.scheduleOnce(runnable, timeLeft(), NANOS) + dispatcher.prerequisites.scheduler.scheduleOnce(runnable, Duration(timeLeft(), TimeUnit.NANOSECONDS)) promise } } else this private def notifyCompleted(func: Future[T] ⇒ Unit) { - try { func(this) } catch { case e ⇒ dispatcher.app.eventStream.publish(Error(e, this, "Future onComplete-callback raised an exception")) } //TODO catch, everything? Really? + try { func(this) } catch { case e ⇒ dispatcher.prerequisites.eventStream.publish(Error(e, "Future", "Future onComplete-callback raised an exception")) } //TODO catch, everything? Really? } @inline diff --git a/akka-actor/src/main/scala/akka/dispatch/Mailbox.scala b/akka-actor/src/main/scala/akka/dispatch/Mailbox.scala index 6a49ab8d0b..7d3b3c3d8b 100644 --- a/akka-actor/src/main/scala/akka/dispatch/Mailbox.scala +++ b/akka-actor/src/main/scala/akka/dispatch/Mailbox.scala @@ -36,11 +36,17 @@ object Mailbox { /** * @author Jonas Bonér */ -abstract class Mailbox(val actor: ActorCell) extends AbstractMailbox with MessageQueue with SystemMessageQueue with Runnable { +abstract class Mailbox(val actor: ActorCell) extends MessageQueue with SystemMessageQueue with Runnable { import Mailbox._ + @volatile + protected var _status: Status = _ //0 by default + + @volatile + protected var _systemQueue: SystemMessage = _ //null by default + @inline - final def status: Mailbox.Status = AbstractMailbox.updater.get(this) + final def status: Mailbox.Status = _status @inline final def shouldProcessMessage: Boolean = (status & 3) == Open @@ -56,11 +62,10 @@ abstract class Mailbox(val actor: ActorCell) extends AbstractMailbox with Messag @inline protected final def updateStatus(oldStatus: Status, newStatus: Status): Boolean = - AbstractMailbox.updater.compareAndSet(this, oldStatus, newStatus) + Unsafe.instance.compareAndSwapInt(this, AbstractMailbox.mailboxStatusOffset, oldStatus, newStatus) @inline - protected final def setStatus(newStatus: Status): Unit = - AbstractMailbox.updater.set(this, newStatus) + protected final def setStatus(newStatus: Status): Unit = _status = newStatus /** * set new primary status Open. Caller does not need to worry about whether @@ -125,18 +130,24 @@ abstract class Mailbox(val actor: ActorCell) extends AbstractMailbox with Messag /* * AtomicReferenceFieldUpdater for system queue */ - protected final def systemQueueGet: SystemMessage = AbstractMailbox.systemQueueUpdater.get(this) - protected final def systemQueuePut(_old: SystemMessage, _new: SystemMessage): Boolean = AbstractMailbox.systemQueueUpdater.compareAndSet(this, _old, _new) + protected final def systemQueueGet: SystemMessage = _systemQueue + protected final def systemQueuePut(_old: SystemMessage, _new: SystemMessage): Boolean = + Unsafe.instance.compareAndSwapObject(this, AbstractMailbox.systemMessageOffset, _old, _new) - def shouldBeScheduledForExecution(hasMessageHint: Boolean, hasSystemMessageHint: Boolean): Boolean = status match { + final def canBeScheduledForExecution(hasMessageHint: Boolean, hasSystemMessageHint: Boolean): Boolean = status match { case Open | Scheduled ⇒ hasMessageHint || hasSystemMessageHint || hasSystemMessages || hasMessages case Closed ⇒ false case _ ⇒ hasSystemMessageHint || hasSystemMessages } final def run = { - try processMailbox() finally { - setAsIdle() + try { + if (!isClosed) { //Volatile read, needed here + processAllSystemMessages() //First, deal with any system messages + processMailbox() //Then deal with messages + } + } finally { + setAsIdle() //Volatile write, needed here dispatcher.registerForExecution(this, false, false) } } @@ -146,15 +157,13 @@ abstract class Mailbox(val actor: ActorCell) extends AbstractMailbox with Messag * * @return true if the processing finished before the mailbox was empty, due to the throughput constraint */ - final def processMailbox() { - processAllSystemMessages() //First, process all system messages - + private final def processMailbox() { if (shouldProcessMessage) { var nextMessage = dequeue() if (nextMessage ne null) { //If we have a message if (dispatcher.isThroughputDefined) { //If we're using throughput, we need to do some book-keeping var processedMessages = 0 - val deadlineNs = if (dispatcher.isThroughputDeadlineTimeDefined) System.nanoTime + TimeUnit.MILLISECONDS.toNanos(dispatcher.throughputDeadlineTime) else 0 + val deadlineNs = if (dispatcher.isThroughputDeadlineTimeDefined) System.nanoTime + dispatcher.throughputDeadlineTime.toNanos else 0 do { if (debug) println(actor.self + " processing message " + nextMessage) actor invoke nextMessage @@ -175,11 +184,11 @@ abstract class Mailbox(val actor: ActorCell) extends AbstractMailbox with Messag } } - def processAllSystemMessages() { + final def processAllSystemMessages() { var nextMessage = systemDrain() try { while (nextMessage ne null) { - if (debug) println(actor.self + " processing system message " + nextMessage + " with children " + actor.childrenRefs + "/" + actor.childrenStats) + if (debug) println(actor.self + " processing system message " + nextMessage + " with children " + actor.childrenRefs) actor systemInvoke nextMessage nextMessage = nextMessage.next // don’t ever execute normal message when system message present! @@ -187,7 +196,7 @@ abstract class Mailbox(val actor: ActorCell) extends AbstractMailbox with Messag } } catch { case e ⇒ - actor.app.eventStream.publish(Error(e, actor.self, "exception during processing system messages, dropping " + SystemMessage.size(nextMessage) + " messages!")) + actor.system.eventStream.publish(Error(e, actor.self.toString, "exception during processing system messages, dropping " + SystemMessage.size(nextMessage) + " messages!")) throw e } } diff --git a/akka-actor/src/main/scala/akka/dispatch/PinnedDispatcher.scala b/akka-actor/src/main/scala/akka/dispatch/PinnedDispatcher.scala index f40fe953d0..ed0b3cde99 100644 --- a/akka-actor/src/main/scala/akka/dispatch/PinnedDispatcher.scala +++ b/akka-actor/src/main/scala/akka/dispatch/PinnedDispatcher.scala @@ -7,14 +7,29 @@ package akka.dispatch import java.util.concurrent.atomic.AtomicReference import akka.actor.ActorCell import akka.actor.ActorSystem +import akka.event.EventStream +import akka.actor.Scheduler +import akka.util.Duration +import java.util.concurrent.TimeUnit /** * Dedicates a unique thread for each actor passed in as reference. Served through its messageQueue. * * @author Jonas Bonér */ -class PinnedDispatcher(_app: ActorSystem, _actor: ActorCell, _name: String, _mailboxType: MailboxType, _timeoutMs: Long) - extends Dispatcher(_app, _name, Int.MaxValue, -1, _mailboxType, PinnedDispatcher.oneThread(_app), _timeoutMs) { +class PinnedDispatcher( + _prerequisites: DispatcherPrerequisites, + _actor: ActorCell, + _name: String, + _mailboxType: MailboxType, + _shutdownTimeout: Duration) + extends Dispatcher(_prerequisites, + _name, + Int.MaxValue, + Duration.Zero, + _mailboxType, + ThreadPoolConfig(allowCorePoolTimeout = true, corePoolSize = 1, maxPoolSize = 1), + _shutdownTimeout) { @volatile protected[akka] var owner: ActorCell = _actor @@ -33,7 +48,3 @@ class PinnedDispatcher(_app: ActorSystem, _actor: ActorCell, _name: String, _mai } } -object PinnedDispatcher { - def oneThread(app: ActorSystem): ThreadPoolConfig = ThreadPoolConfig(app, allowCorePoolTimeout = true, corePoolSize = 1, maxPoolSize = 1) -} - diff --git a/akka-actor/src/main/scala/akka/dispatch/ThreadPoolBuilder.scala b/akka-actor/src/main/scala/akka/dispatch/ThreadPoolBuilder.scala index 11fc885446..f543e5c016 100644 --- a/akka-actor/src/main/scala/akka/dispatch/ThreadPoolBuilder.scala +++ b/akka-actor/src/main/scala/akka/dispatch/ThreadPoolBuilder.scala @@ -10,6 +10,7 @@ import akka.util.Duration import akka.event.Logging.{ Warning, Error } import akka.actor.ActorSystem import java.util.concurrent._ +import akka.event.EventStream object ThreadPoolConfig { type Bounds = Int @@ -63,8 +64,7 @@ trait ExecutorServiceFactoryProvider { /** * A small configuration DSL to create ThreadPoolExecutors that can be provided as an ExecutorServiceFactoryProvider to Dispatcher */ -case class ThreadPoolConfig(app: ActorSystem, - allowCorePoolTimeout: Boolean = ThreadPoolConfig.defaultAllowCoreThreadTimeout, +case class ThreadPoolConfig(allowCorePoolTimeout: Boolean = ThreadPoolConfig.defaultAllowCoreThreadTimeout, corePoolSize: Int = ThreadPoolConfig.defaultCorePoolSize, maxPoolSize: Int = ThreadPoolConfig.defaultMaxPoolSize, threadTimeout: Duration = ThreadPoolConfig.defaultTimeout, diff --git a/akka-actor/src/main/scala/akka/dispatch/japi/Future.scala b/akka-actor/src/main/scala/akka/dispatch/japi/Future.scala new file mode 100644 index 0000000000..e923dd6c18 --- /dev/null +++ b/akka-actor/src/main/scala/akka/dispatch/japi/Future.scala @@ -0,0 +1,29 @@ +/** + * Copyright (C) 2009-2011 Typesafe Inc. + */ +package akka.dispatch.japi + +import akka.japi.{ Procedure, Function ⇒ JFunc, Option ⇒ JOption } +import akka.actor.Timeout + +/* Java API */ +trait Future[+T] { self: akka.dispatch.Future[T] ⇒ + private[japi] final def onTimeout[A >: T](proc: Procedure[akka.dispatch.Future[A]]): this.type = self.onTimeout(proc(_)) + private[japi] final def onResult[A >: T](proc: Procedure[A]): this.type = self.onResult({ case r ⇒ proc(r.asInstanceOf[A]) }: PartialFunction[T, Unit]) + private[japi] final def onException(proc: Procedure[Throwable]): this.type = self.onException({ case t: Throwable ⇒ proc(t) }: PartialFunction[Throwable, Unit]) + private[japi] final def onComplete[A >: T](proc: Procedure[akka.dispatch.Future[A]]): this.type = self.onComplete(proc(_)) + private[japi] final def map[A >: T, B](f: JFunc[A, B], timeout: Timeout): akka.dispatch.Future[B] = { + implicit val t = timeout + self.map(f(_)) + } + private[japi] final def flatMap[A >: T, B](f: JFunc[A, akka.dispatch.Future[B]], timeout: Timeout): akka.dispatch.Future[B] = { + implicit val t = timeout + self.flatMap(f(_)) + } + private[japi] final def foreach[A >: T](proc: Procedure[A]): Unit = self.foreach(proc(_)) + private[japi] final def filter[A >: T](p: JFunc[A, java.lang.Boolean], timeout: Timeout): akka.dispatch.Future[A] = { + implicit val t = timeout + self.filter((a: Any) ⇒ p(a.asInstanceOf[A])).asInstanceOf[akka.dispatch.Future[A]] + } +} + diff --git a/akka-actor/src/main/scala/akka/event/EventBus.scala b/akka-actor/src/main/scala/akka/event/EventBus.scala index 7efa43f932..f2013b1253 100644 --- a/akka-actor/src/main/scala/akka/event/EventBus.scala +++ b/akka-actor/src/main/scala/akka/event/EventBus.scala @@ -246,19 +246,19 @@ trait ActorClassification { this: ActorEventBus with ActorClassifier ⇒ val current = mappings get monitored current match { case null ⇒ - if (monitored.isShutdown) false + if (monitored.isTerminated) false else { if (mappings.putIfAbsent(monitored, Vector(monitor)) ne null) associate(monitored, monitor) - else if (monitored.isShutdown) !dissociate(monitored, monitor) else true + else if (monitored.isTerminated) !dissociate(monitored, monitor) else true } case raw: Vector[_] ⇒ val v = raw.asInstanceOf[Vector[ActorRef]] - if (monitored.isShutdown) false + if (monitored.isTerminated) false if (v.contains(monitor)) true else { val added = v :+ monitor if (!mappings.replace(monitored, v, added)) associate(monitored, monitor) - else if (monitored.isShutdown) !dissociate(monitored, monitor) else true + else if (monitored.isTerminated) !dissociate(monitored, monitor) else true } } } diff --git a/akka-actor/src/main/scala/akka/event/EventStream.scala b/akka-actor/src/main/scala/akka/event/EventStream.scala index d64fc0a6ce..3906d2cb04 100644 --- a/akka-actor/src/main/scala/akka/event/EventStream.scala +++ b/akka-actor/src/main/scala/akka/event/EventStream.scala @@ -3,11 +3,16 @@ */ package akka.event -import akka.actor.{ ActorRef, Actor, Props } -import akka.actor.ActorSystem -import akka.actor.Terminated +import akka.actor.{ ActorRef, Actor, Props, ActorSystemImpl, Terminated, ActorSystem, simpleName } import akka.util.Subclassification +object EventStream { + implicit def fromActorSystem(system: ActorSystem) = system.eventStream +} + +class A(x: Int = 0) extends Exception("x=" + x) +class B extends A + class EventStream(debug: Boolean = false) extends LoggingBus with SubchannelClassification { type Event = AnyRef @@ -26,23 +31,23 @@ class EventStream(debug: Boolean = false) extends LoggingBus with SubchannelClas protected def publish(event: AnyRef, subscriber: ActorRef) = subscriber ! event override def subscribe(subscriber: ActorRef, channel: Class[_]): Boolean = { - if (debug) publish(Logging.Debug(this, "subscribing " + subscriber + " to channel " + channel)) + if (debug) publish(Logging.Debug(simpleName(this), "subscribing " + subscriber + " to channel " + channel)) if (reaper ne null) reaper ! subscriber super.subscribe(subscriber, channel) } override def unsubscribe(subscriber: ActorRef, channel: Class[_]): Boolean = { - if (debug) publish(Logging.Debug(this, "unsubscribing " + subscriber + " from channel " + channel)) + if (debug) publish(Logging.Debug(simpleName(this), "unsubscribing " + subscriber + " from channel " + channel)) super.unsubscribe(subscriber, channel) } override def unsubscribe(subscriber: ActorRef) { - if (debug) publish(Logging.Debug(this, "unsubscribing " + subscriber + " from all channels")) + if (debug) publish(Logging.Debug(simpleName(this), "unsubscribing " + subscriber + " from all channels")) super.unsubscribe(subscriber) } - def start(app: ActorSystem) { - reaper = app.systemActorOf(Props(new Actor { + def start(system: ActorSystemImpl) { + reaper = system.systemActorOf(Props(new Actor { def receive = { case ref: ActorRef ⇒ watch(ref) case Terminated(ref) ⇒ unsubscribe(ref) diff --git a/akka-actor/src/main/scala/akka/event/Logging.scala b/akka-actor/src/main/scala/akka/event/Logging.scala index 0f26437df3..5b3ae4b801 100644 --- a/akka-actor/src/main/scala/akka/event/Logging.scala +++ b/akka-actor/src/main/scala/akka/event/Logging.scala @@ -3,9 +3,9 @@ */ package akka.event -import akka.actor.{ Actor, ActorPath, ActorRef, MinimalActorRef, LocalActorRef, Props, ActorSystem, simpleName } +import akka.actor.{ Actor, ActorPath, ActorRef, MinimalActorRef, LocalActorRef, Props, ActorSystem, ActorSystemImpl, simpleName } import akka.AkkaException -import akka.actor.ActorSystem.AkkaConfig +import akka.actor.ActorSystem.Settings import akka.util.ReflectiveAccess import akka.config.ConfigurationException import akka.util.ReentrantGuard @@ -13,6 +13,11 @@ import akka.util.duration._ import akka.actor.Timeout import akka.dispatch.FutureTimeoutException import java.util.concurrent.atomic.AtomicInteger +import akka.actor.ActorRefProvider + +object LoggingBus { + implicit def fromActorSystem(system: ActorSystem): LoggingBus = system.eventStream +} /** * This trait brings log level handling to the EventStream: it reads the log @@ -65,9 +70,9 @@ trait LoggingBus extends ActorEventBus { _logLevel = level } - private[akka] def startStdoutLogger(config: AkkaConfig) { + private[akka] def startStdoutLogger(config: Settings) { val level = levelFor(config.StdoutLogLevel) getOrElse { - StandardOutLogger.print(Error(new EventHandlerException, this, "unknown akka.stdout-loglevel " + config.StdoutLogLevel)) + StandardOutLogger.print(Error(new EventHandlerException, simpleName(this), "unknown akka.stdout-loglevel " + config.StdoutLogLevel)) ErrorLevel } AllLogLevels filter (level >= _) foreach (l ⇒ subscribe(StandardOutLogger, classFor(l))) @@ -75,16 +80,16 @@ trait LoggingBus extends ActorEventBus { loggers = Seq(StandardOutLogger) _logLevel = level } - publish(Info(this, "StandardOutLogger started")) + publish(Info(simpleName(this), "StandardOutLogger started")) } - private[akka] def startDefaultLoggers(app: ActorSystem, config: AkkaConfig) { - val level = levelFor(config.LogLevel) getOrElse { - StandardOutLogger.print(Error(new EventHandlerException, this, "unknown akka.stdout-loglevel " + config.LogLevel)) + private[akka] def startDefaultLoggers(system: ActorSystemImpl) { + val level = levelFor(system.settings.LogLevel) getOrElse { + StandardOutLogger.print(Error(new EventHandlerException, simpleName(this), "unknown akka.stdout-loglevel " + system.settings.LogLevel)) ErrorLevel } try { - val defaultLoggers = config.EventHandlers match { + val defaultLoggers = system.settings.EventHandlers match { case Nil ⇒ "akka.event.Logging$DefaultLogger" :: Nil case loggers ⇒ loggers } @@ -94,7 +99,7 @@ trait LoggingBus extends ActorEventBus { } yield { try { ReflectiveAccess.getClassFor[Actor](loggerName) match { - case Right(actorClass) ⇒ addLogger(app, actorClass, level) + case Right(actorClass) ⇒ addLogger(system, actorClass, level) case Left(exception) ⇒ throw exception } } catch { @@ -108,7 +113,7 @@ trait LoggingBus extends ActorEventBus { loggers = myloggers _logLevel = level } - publish(Info(this, "Default Loggers started")) + publish(Info(simpleName(this), "Default Loggers started")) if (!(defaultLoggers contains StandardOutLoggerName)) { unsubscribe(StandardOutLogger) } @@ -124,7 +129,7 @@ trait LoggingBus extends ActorEventBus { val level = _logLevel // volatile access before reading loggers if (!(loggers contains StandardOutLogger)) { AllLogLevels filter (level >= _) foreach (l ⇒ subscribe(StandardOutLogger, classFor(l))) - publish(Info(this, "shutting down: StandardOutLogger started")) + publish(Info(simpleName(this), "shutting down: StandardOutLogger started")) } for { logger ← loggers @@ -134,26 +139,61 @@ trait LoggingBus extends ActorEventBus { unsubscribe(logger) logger.stop() } - publish(Info(this, "all default loggers stopped")) + publish(Info(simpleName(this), "all default loggers stopped")) } - private def addLogger(app: ActorSystem, clazz: Class[_ <: Actor], level: LogLevel): ActorRef = { + private def addLogger(system: ActorSystemImpl, clazz: Class[_ <: Actor], level: LogLevel): ActorRef = { val name = "log" + loggerId.incrementAndGet + "-" + simpleName(clazz) - val actor = app.systemActorOf(Props(clazz), name) + val actor = system.systemActorOf(Props(clazz), name) implicit val timeout = Timeout(3 seconds) val response = try actor ? InitializeLogger(this) get catch { case _: FutureTimeoutException ⇒ - publish(Warning(this, "Logger " + name + " did not respond within " + timeout + " to InitializeLogger(bus)")) + publish(Warning(simpleName(this), "Logger " + name + " did not respond within " + timeout + " to InitializeLogger(bus)")) } if (response != LoggerInitialized) throw new LoggerInitializationException("Logger " + name + " did not respond with LoggerInitialized, sent instead " + response) AllLogLevels filter (level >= _) foreach (l ⇒ subscribe(actor, classFor(l))) - publish(Info(this, "logger " + name + " started")) + publish(Info(simpleName(this), "logger " + name + " started")) actor } } +trait LogSource[-T] { + def genString(t: T): String +} + +object LogSource { + implicit val fromString: LogSource[String] = new LogSource[String] { + def genString(s: String) = s + } + + implicit val fromActor: LogSource[Actor] = new LogSource[Actor] { + def genString(a: Actor) = a.self.toString + } + + implicit val fromActorRef: LogSource[ActorRef] = new LogSource[ActorRef] { + def genString(a: ActorRef) = a.toString + } + + // this one unfortunately does not work as implicit, because existential types have some weird behavior + val fromClass: LogSource[Class[_]] = new LogSource[Class[_]] { + def genString(c: Class[_]) = simpleName(c) + } + implicit def fromAnyClass[T]: LogSource[Class[T]] = fromClass.asInstanceOf[LogSource[Class[T]]] + + def apply[T: LogSource](o: T) = implicitly[LogSource[T]].genString(o) + + def fromAnyRef(o: AnyRef): String = + o match { + case c: Class[_] ⇒ fromClass.genString(c) + case a: Actor ⇒ fromActor.genString(a) + case a: ActorRef ⇒ fromActorRef.genString(a) + case s: String ⇒ s + case x ⇒ simpleName(x) + } +} + /** * Main entry point for Akka logging: log levels and message types (aka * channels) defined for the main transport medium, the main event bus. The @@ -234,24 +274,26 @@ object Logging { /** * Obtain LoggingAdapter for the given application and source object. The - * source object is used to identify the source of this logging channel. + * source is used to identify the source of this logging channel and must have + * a corresponding LogSource[T] instance in scope; by default these are + * provided for Class[_], Actor, ActorRef and String types. */ - def apply(app: ActorSystem, source: AnyRef): LoggingAdapter = new BusLogging(app.eventStream, source) + def apply[T: LogSource](eventStream: LoggingBus, logSource: T): LoggingAdapter = + new BusLogging(eventStream, implicitly[LogSource[T]].genString(logSource)) + /** * Java API: Obtain LoggingAdapter for the given application and source object. The - * source object is used to identify the source of this logging channel. + * source object is used to identify the source of this logging channel; if it is + * an Actor or ActorRef, its address is used, in case of a class an approximation of + * its simpleName and in all other cases the simpleName of its class. */ - def getLogger(app: ActorSystem, source: AnyRef): LoggingAdapter = apply(app, source) - /** - * Obtain LoggingAdapter for the given event bus and source object. The - * source object is used to identify the source of this logging channel. - */ - def apply(bus: LoggingBus, source: AnyRef): LoggingAdapter = new BusLogging(bus, source) + def getLogger(system: ActorSystem, logSource: AnyRef): LoggingAdapter = apply(system.eventStream, LogSource.fromAnyRef(logSource)) + /** * Java API: Obtain LoggingAdapter for the given event bus and source object. The * source object is used to identify the source of this logging channel. */ - def getLogger(bus: LoggingBus, source: AnyRef): LoggingAdapter = apply(bus, source) + def getLogger(bus: LoggingBus, logSource: AnyRef): LoggingAdapter = apply(bus, LogSource.fromAnyRef(logSource)) /** * Artificial exception injected into Error events if no Throwable is @@ -265,22 +307,22 @@ object Logging { def level: LogLevel } - case class Error(cause: Throwable, instance: AnyRef, message: Any = "") extends LogEvent { + case class Error(cause: Throwable, logSource: String, message: Any = "") extends LogEvent { def level = ErrorLevel } object Error { - def apply(instance: AnyRef, message: Any) = new Error(new EventHandlerException, instance, message) + def apply(logSource: String, message: Any) = new Error(new EventHandlerException, logSource, message) } - case class Warning(instance: AnyRef, message: Any = "") extends LogEvent { + case class Warning(logSource: String, message: Any = "") extends LogEvent { def level = WarningLevel } - case class Info(instance: AnyRef, message: Any = "") extends LogEvent { + case class Info(logSource: String, message: Any = "") extends LogEvent { def level = InfoLevel } - case class Debug(instance: AnyRef, message: Any = "") extends LogEvent { + case class Debug(logSource: String, message: Any = "") extends LogEvent { def level = DebugLevel } @@ -317,7 +359,7 @@ object Logging { case e: Warning ⇒ warning(e) case e: Info ⇒ info(e) case e: Debug ⇒ debug(e) - case e ⇒ warning(Warning(this, "received unexpected event of class " + e.getClass + ": " + e)) + case e ⇒ warning(Warning(simpleName(this), "received unexpected event of class " + e.getClass + ": " + e)) } } @@ -325,7 +367,7 @@ object Logging { println(errorFormat.format( timestamp, event.thread.getName, - instanceName(event.instance), + event.logSource, event.message, stackTraceFor(event.cause))) @@ -333,21 +375,21 @@ object Logging { println(warningFormat.format( timestamp, event.thread.getName, - instanceName(event.instance), + event.logSource, event.message)) def info(event: Info) = println(infoFormat.format( timestamp, event.thread.getName, - instanceName(event.instance), + event.logSource, event.message)) def debug(event: Debug) = println(debugFormat.format( timestamp, event.thread.getName, - instanceName(event.instance), + event.logSource, event.message)) def instanceName(instance: AnyRef): String = instance match { @@ -490,7 +532,7 @@ trait LoggingAdapter { } } -class BusLogging(val bus: LoggingBus, val loggingInstance: AnyRef) extends LoggingAdapter { +class BusLogging(val bus: LoggingBus, val logSource: String) extends LoggingAdapter { import Logging._ @@ -499,14 +541,14 @@ class BusLogging(val bus: LoggingBus, val loggingInstance: AnyRef) extends Loggi def isInfoEnabled = bus.logLevel >= InfoLevel def isDebugEnabled = bus.logLevel >= DebugLevel - protected def notifyError(message: String) { bus.publish(Error(loggingInstance, message)) } + protected def notifyError(message: String) { bus.publish(Error(logSource, message)) } - protected def notifyError(cause: Throwable, message: String) { bus.publish(Error(cause, loggingInstance, message)) } + protected def notifyError(cause: Throwable, message: String) { bus.publish(Error(cause, logSource, message)) } - protected def notifyWarning(message: String) { bus.publish(Warning(loggingInstance, message)) } + protected def notifyWarning(message: String) { bus.publish(Warning(logSource, message)) } - protected def notifyInfo(message: String) { bus.publish(Info(loggingInstance, message)) } + protected def notifyInfo(message: String) { bus.publish(Info(logSource, message)) } - protected def notifyDebug(message: String) { bus.publish(Debug(loggingInstance, message)) } + protected def notifyDebug(message: String) { bus.publish(Debug(logSource, message)) } } diff --git a/akka-actor/src/main/scala/akka/event/EventBusJavaAPI.scala b/akka-actor/src/main/scala/akka/event/japi/EventBusJavaAPI.scala similarity index 100% rename from akka-actor/src/main/scala/akka/event/EventBusJavaAPI.scala rename to akka-actor/src/main/scala/akka/event/japi/EventBusJavaAPI.scala diff --git a/akka-actor/src/main/scala/akka/remote/RemoteInterface.scala b/akka-actor/src/main/scala/akka/remote/RemoteInterface.scala index d5837c95b1..7d33508f46 100644 --- a/akka-actor/src/main/scala/akka/remote/RemoteInterface.scala +++ b/akka-actor/src/main/scala/akka/remote/RemoteInterface.scala @@ -26,6 +26,8 @@ object RemoteAddress { } } +object LocalOnly extends RemoteAddress(0, "local") + case class RemoteAddress private[akka] (port: Int, hostname: String) { @transient override lazy val toString = "" + hostname + ":" + port @@ -128,7 +130,7 @@ case class CannotInstantiateRemoteExceptionDueToRemoteProtocolParsingErrorExcept override def printStackTrace(printWriter: PrintWriter) = cause.printStackTrace(printWriter) } -abstract class RemoteSupport(val app: ActorSystem) { +abstract class RemoteSupport(val system: ActorSystem) { /** * Shuts down the remoting */ @@ -162,7 +164,7 @@ abstract class RemoteSupport(val app: ActorSystem) { recipient: ActorRef, loader: Option[ClassLoader]): Unit - protected[akka] def notifyListeners(message: RemoteLifeCycleEvent): Unit = app.eventStream.publish(message) + protected[akka] def notifyListeners(message: RemoteLifeCycleEvent): Unit = system.eventStream.publish(message) override def toString = name } diff --git a/akka-actor/src/main/scala/akka/routing/Routing.scala b/akka-actor/src/main/scala/akka/routing/Routing.scala index 0c813ef92f..293c1abb4b 100644 --- a/akka-actor/src/main/scala/akka/routing/Routing.scala +++ b/akka-actor/src/main/scala/akka/routing/Routing.scala @@ -159,7 +159,7 @@ object Routing { /** * An Abstract convenience implementation for building an ActorReference that uses a Router. */ -abstract private[akka] class AbstractRoutedActorRef(val app: ActorSystem, val props: RoutedProps) extends MinimalActorRef { +abstract private[akka] class AbstractRoutedActorRef(val system: ActorSystem, val props: RoutedProps) extends MinimalActorRef { val router = props.routerFactory() override def !(message: Any)(implicit sender: ActorRef = null): Unit = router.route(message)(sender) @@ -171,7 +171,7 @@ abstract private[akka] class AbstractRoutedActorRef(val app: ActorSystem, val pr * A RoutedActorRef is an ActorRef that has a set of connected ActorRef and it uses a Router to send a message to * on (or more) of these actors. */ -private[akka] class RoutedActorRef(app: ActorSystem, val routedProps: RoutedProps, val supervisor: ActorRef, override val name: String) extends AbstractRoutedActorRef(app, routedProps) { +private[akka] class RoutedActorRef(system: ActorSystem, val routedProps: RoutedProps, val supervisor: ActorRef, override val name: String) extends AbstractRoutedActorRef(system, routedProps) { val path = supervisor.path / name @@ -181,7 +181,7 @@ private[akka] class RoutedActorRef(app: ActorSystem, val routedProps: RoutedProp @volatile private var running: Boolean = true - override def isShutdown: Boolean = !running + override def isTerminated: Boolean = !running override def stop() { synchronized { @@ -444,7 +444,7 @@ trait ScatterGatherRouter extends BasicRouter with Serializable { private def scatterGather[S, G >: S](message: Any, timeout: Timeout): Future[G] = { val responses = connectionManager.connections.iterable.flatMap { actor ⇒ try { - if (actor.isShutdown) throw ActorInitializationException(actor, "For compatability - check death first", new Exception) // for stack trace + if (actor.isTerminated) throw ActorInitializationException(actor, "For compatability - check death first", new Exception) // for stack trace Some(actor.?(message, timeout).asInstanceOf[Future[S]]) } catch { case e: Exception ⇒ diff --git a/akka-actor/src/main/scala/akka/serialization/Serialization.scala b/akka-actor/src/main/scala/akka/serialization/Serialization.scala index f57cde814d..2e10bf22af 100644 --- a/akka-actor/src/main/scala/akka/serialization/Serialization.scala +++ b/akka-actor/src/main/scala/akka/serialization/Serialization.scala @@ -6,9 +6,8 @@ package akka.serialization import akka.AkkaException import akka.util.ReflectiveAccess -import akka.actor.ActorSystem +import akka.actor.{ ActorSystem, ActorSystemImpl } import scala.util.DynamicVariable -import akka.remote.RemoteSupport case class NoSerializerFoundException(m: String) extends AkkaException(m) @@ -16,7 +15,7 @@ case class NoSerializerFoundException(m: String) extends AkkaException(m) * Serialization module. Contains methods for serialization and deserialization as well as * locating a Serializer for a particular class as defined in the mapping in the 'akka.conf' file. */ -class Serialization(val app: ActorSystem) { +class Serialization(val system: ActorSystemImpl) { //TODO document me def serialize(o: AnyRef): Either[Exception, Array[Byte]] = @@ -28,7 +27,7 @@ class Serialization(val app: ActorSystem) { clazz: Class[_], classLoader: Option[ClassLoader]): Either[Exception, AnyRef] = try { - Serialization.app.withValue(app) { + Serialization.system.withValue(system) { Right(serializerFor(clazz).fromBinary(bytes, Some(clazz), classLoader)) } } catch { case e: Exception ⇒ Left(e) } @@ -46,7 +45,7 @@ class Serialization(val app: ActorSystem) { * Tries to load the specified Serializer by the FQN */ def serializerOf(serializerFQN: String): Either[Exception, Serializer] = - ReflectiveAccess.createInstance(serializerFQN, ReflectiveAccess.emptyParams, ReflectiveAccess.emptyArguments) + ReflectiveAccess.createInstance(serializerFQN, ReflectiveAccess.noParams, ReflectiveAccess.noArgs) private def serializerForBestMatchClass(cl: Class[_]): Either[Exception, Serializer] = { if (bindings.isEmpty) @@ -64,44 +63,48 @@ class Serialization(val app: ActorSystem) { } } + // serializers and bindings needs to be lazy because Serialization is initialized from SerializationExtension, which is needed here + /** * A Map of serializer from alias to implementation (class implementing akka.serialization.Serializer) * By default always contains the following mapping: "default" -> akka.serialization.JavaSerializer * But "default" can be overridden in config */ - val serializers: Map[String, Serializer] = - app.config.getSection("akka.actor.serializers") - .map(_.map) - .getOrElse(Map()) - .foldLeft(Map[String, Serializer]("default" -> akka.serialization.JavaSerializer)) { - case (result, (k: String, v: String)) ⇒ result + (k -> serializerOf(v).fold(throw _, identity)) - case (result, _) ⇒ result - } + lazy val serializers: Map[String, Serializer] = { + val serializersConf = SerializationExtension(system).settings.Serializers + for ((k: String, v: String) ← serializersConf) + yield k -> serializerOf(v).fold(throw _, identity) + } /** * bindings is a Map whose keys = FQN of class that is serializable and values = the alias of the serializer to be used */ - val bindings: Map[String, String] = app.config.getSection("akka.actor.serialization-bindings") map { - _.map.foldLeft(Map[String, String]()) { - case (result, (k: String, vs: List[_])) ⇒ result ++ (vs collect { case v: String ⇒ (v, k) }) //All keys which are lists, take the Strings from them and Map them - case (result, _) ⇒ result //For any other values, just skip them, TODO: print out warnings? + lazy val bindings: Map[String, String] = { + val configBindings = SerializationExtension(system).settings.SerializationBindings + configBindings.foldLeft(Map[String, String]()) { + case (result, (k: String, vs: Seq[_])) ⇒ + //All keys which are lists, take the Strings from them and Map them + result ++ (vs collect { case v: String ⇒ (v, k) }) + case (result, x) ⇒ + //For any other values, just skip them + result } - } getOrElse Map() + } /** * serializerMap is a Map whose keys = FQN of class that is serializable and values = the FQN of the serializer to be used for that class */ - val serializerMap: Map[String, Serializer] = bindings mapValues serializers + lazy val serializerMap: Map[String, Serializer] = bindings mapValues serializers /** * Maps from a Serializer.Identifier (Byte) to a Serializer instance (optimization) */ - val serializerByIdentity: Map[Serializer.Identifier, Serializer] = + lazy val serializerByIdentity: Map[Serializer.Identifier, Serializer] = Map(NullSerializer.identifier -> NullSerializer) ++ serializers map { case (_, v) ⇒ (v.identifier, v) } } object Serialization { // TODO ensure that these are always set (i.e. withValue()) when doing deserialization - val app = new DynamicVariable[ActorSystem](null) + val system = new DynamicVariable[ActorSystemImpl](null) } diff --git a/akka-actor/src/main/scala/akka/serialization/SerializationExtension.scala b/akka-actor/src/main/scala/akka/serialization/SerializationExtension.scala new file mode 100644 index 0000000000..4fc0b1be3c --- /dev/null +++ b/akka-actor/src/main/scala/akka/serialization/SerializationExtension.scala @@ -0,0 +1,79 @@ +/** + * Copyright (C) 2009-2011 Typesafe Inc. + */ +package akka.serialization + +import akka.actor.ActorSystem +import akka.actor.ExtensionKey +import akka.actor.Extension +import akka.actor.ActorSystemImpl +import com.typesafe.config.Config +import com.typesafe.config.ConfigFactory +import com.typesafe.config.ConfigParseOptions +import com.typesafe.config.ConfigRoot +import akka.config.ConfigurationException + +object SerializationExtensionKey extends ExtensionKey[SerializationExtension] + +object SerializationExtension { + def apply(system: ActorSystem): SerializationExtension = { + if (!system.hasExtension(SerializationExtensionKey)) { + system.registerExtension(new SerializationExtension) + } + system.extension(SerializationExtensionKey) + } + + class Settings(cfg: Config) { + private def referenceConfig: Config = + ConfigFactory.parseResource(classOf[ActorSystem], "/akka-serialization-reference.conf", + ConfigParseOptions.defaults.setAllowMissing(false)) + val config: ConfigRoot = ConfigFactory.emptyRoot("akka-serialization").withFallback(cfg).withFallback(referenceConfig).resolve() + + import scala.collection.JavaConverters._ + import config._ + + val Serializers: Map[String, String] = { + toStringMap(getConfig("akka.actor.serializers")) + } + + val SerializationBindings: Map[String, Seq[String]] = { + val configPath = "akka.actor.serialization-bindings" + hasPath(configPath) match { + case false ⇒ Map() + case true ⇒ + val serializationBindings: Map[String, Seq[String]] = getConfig(configPath).toObject.unwrapped.asScala.toMap.map { + case (k: String, v: java.util.Collection[_]) ⇒ (k -> v.asScala.toSeq.asInstanceOf[Seq[String]]) + case invalid ⇒ throw new ConfigurationException("Invalid serialization-bindings [%s]".format(invalid)) + } + serializationBindings + + } + } + + private def toStringMap(mapConfig: Config): Map[String, String] = { + mapConfig.toObject.unwrapped.asScala.toMap.map { entry ⇒ + (entry._1 -> entry._2.toString) + } + } + + } +} + +class SerializationExtension extends Extension[SerializationExtension] { + import SerializationExtension._ + @volatile + private var _settings: Settings = _ + @volatile + private var _serialization: Serialization = _ + def serialization = _serialization + + def key = SerializationExtensionKey + + def init(system: ActorSystemImpl) { + _settings = new Settings(system.applicationConfig) + _serialization = new Serialization(system) + } + + def settings: Settings = _settings + +} \ No newline at end of file diff --git a/akka-actor/src/main/scala/akka/util/AkkaLoader.scala b/akka-actor/src/main/scala/akka/util/AkkaLoader.scala index 1d4f23e03f..f2bf63c137 100644 --- a/akka-actor/src/main/scala/akka/util/AkkaLoader.scala +++ b/akka-actor/src/main/scala/akka/util/AkkaLoader.scala @@ -8,7 +8,7 @@ import akka.actor.ActorSystem /* * This class is responsible for booting up a stack of bundles and then shutting them down */ -class AkkaLoader(app: ActorSystem) { +class AkkaLoader(system: ActorSystem) { private val hasBooted = new Switch(false) @volatile diff --git a/akka-actor/src/main/scala/akka/util/Duration.scala b/akka-actor/src/main/scala/akka/util/Duration.scala index fbb27526f1..eec371d724 100644 --- a/akka-actor/src/main/scala/akka/util/Duration.scala +++ b/akka-actor/src/main/scala/akka/util/Duration.scala @@ -278,7 +278,6 @@ abstract class Duration extends Serializable { def /(other: Duration): Double def unary_- : Duration def finite_? : Boolean - def dilated(implicit app: ActorSystem): Duration = this * app.AkkaConfig.TestTimeFactor def min(other: Duration): Duration = if (this < other) this else other def max(other: Duration): Duration = if (this > other) this else other def sleep(): Unit = Thread.sleep(toMillis) @@ -483,3 +482,4 @@ class DurationDouble(d: Double) { def days = Duration(d, DAYS) def day = Duration(d, DAYS) } + diff --git a/akka-actor/src/main/scala/akka/util/Helpers.scala b/akka-actor/src/main/scala/akka/util/Helpers.scala index 67a77aa150..2b9f59d757 100644 --- a/akka-actor/src/main/scala/akka/util/Helpers.scala +++ b/akka-actor/src/main/scala/akka/util/Helpers.scala @@ -12,8 +12,6 @@ import scala.annotation.tailrec */ object Helpers { - implicit def null2Option[T](t: T): Option[T] = Option(t) - def compareIdentityHash(a: AnyRef, b: AnyRef): Int = { /* * make sure that there is no overflow or underflow in comparisons, so @@ -28,19 +26,6 @@ object Helpers { def compare(a: AnyRef, b: AnyRef): Int = compareIdentityHash(a, b) } - def intToBytes(value: Int): Array[Byte] = { - val bytes = new Array[Byte](4) - bytes(0) = (value >>> 24).asInstanceOf[Byte] - bytes(1) = (value >>> 16).asInstanceOf[Byte] - bytes(2) = (value >>> 8).asInstanceOf[Byte] - bytes(3) = value.asInstanceOf[Byte] - bytes - } - - def bytesToInt(bytes: Array[Byte], offset: Int): Int = { - (0 until 4).foldLeft(0)((value, index) ⇒ value + ((bytes(index + offset) & 0x000000FF) << ((4 - 1 - index) * 8))) - } - final val base64chars = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789*?" @tailrec @@ -72,77 +57,4 @@ object Helpers { throw e } } - - /** - * Convenience helper to cast the given Option of Any to an Option of the given type. Will throw a ClassCastException - * if the actual type is not assignable from the given one. - */ - def narrow[T](o: Option[Any]): Option[T] = { - require((o ne null), "Option to be narrowed must not be null!") - o.asInstanceOf[Option[T]] - } - - /** - * Convenience helper to cast the given Option of Any to an Option of the given type. Will swallow a possible - * ClassCastException and return None in that case. - */ - def narrowSilently[T: Manifest](o: Option[Any]): Option[T] = - try { - narrow(o) - } catch { - case e: ClassCastException ⇒ - None - } - - /** - * Reference that can hold either a typed value or an exception. - * - * Usage: - *

-   * scala> ResultOrError(1)
-   * res0: ResultOrError[Int] = ResultOrError@a96606
-   *
-   * scala> res0()
-   * res1: Int = 1
-   *
-   * scala> res0() = 3
-   *
-   * scala> res0()
-   * res3: Int = 3
-   *
-   * scala> res0() = { println("Hello world"); 3}
-   * Hello world
-   *
-   * scala> res0()
-   * res5: Int = 3
-   *
-   * scala> res0() = error("Lets see what happens here...")
-   *
-   * scala> res0()
-   * java.lang.RuntimeException: Lets see what happens here...
-   *    at ResultOrError.apply(Helper.scala:11)
-   *    at .(:6)
-   *    at .()
-   *    at Re...
-   * 
- */ - class ResultOrError[R](result: R) { - private[this] var contents: Either[R, Throwable] = Left(result) - - def update(value: ⇒ R) { - contents = try { - Left(value) - } catch { - case (error: Throwable) ⇒ Right(error) - } - } - - def apply() = contents match { - case Left(result) ⇒ result - case Right(error) ⇒ throw error.fillInStackTrace - } - } - object ResultOrError { - def apply[R](result: R) = new ResultOrError(result) - } } diff --git a/akka-actor/src/main/scala/akka/util/JMX.scala b/akka-actor/src/main/scala/akka/util/JMX.scala index 1c7465882b..2c87524843 100644 --- a/akka-actor/src/main/scala/akka/util/JMX.scala +++ b/akka-actor/src/main/scala/akka/util/JMX.scala @@ -18,20 +18,20 @@ object JMX { def nameFor(hostname: String, service: String, bean: String): ObjectName = new ObjectName("akka.%s:type=%s,name=%s".format(hostname, service, bean.replace(":", "_"))) - def register(name: ObjectName, mbean: AnyRef)(implicit app: ActorSystem): Option[ObjectInstance] = try { + def register(name: ObjectName, mbean: AnyRef)(implicit system: ActorSystem): Option[ObjectInstance] = try { Some(mbeanServer.registerMBean(mbean, name)) } catch { case e: InstanceAlreadyExistsException ⇒ Some(mbeanServer.getObjectInstance(name)) case e: Exception ⇒ - app.eventStream.publish(Error(e, this, "Error when registering mbean [%s]".format(mbean))) + system.eventStream.publish(Error(e, "JMX", "Error when registering mbean [%s]".format(mbean))) None } - def unregister(mbean: ObjectName)(implicit app: ActorSystem) = try { + def unregister(mbean: ObjectName)(implicit system: ActorSystem) = try { mbeanServer.unregisterMBean(mbean) } catch { case e: InstanceNotFoundException ⇒ {} - case e: Exception ⇒ app.eventStream.publish(Error(e, this, "Error while unregistering mbean [%s]".format(mbean))) + case e: Exception ⇒ system.eventStream.publish(Error(e, "JMX", "Error while unregistering mbean [%s]".format(mbean))) } } diff --git a/akka-actor/src/main/scala/akka/util/ListenerManagement.scala b/akka-actor/src/main/scala/akka/util/ListenerManagement.scala index 775f5f674e..fad8f5b20a 100644 --- a/akka-actor/src/main/scala/akka/util/ListenerManagement.scala +++ b/akka-actor/src/main/scala/akka/util/ListenerManagement.scala @@ -44,7 +44,7 @@ trait ListenerManagement { def hasListeners: Boolean = !listeners.isEmpty /** - * Checks if a specific listener is registered. Pruned eventually when isShutdown==true in notify. + * Checks if a specific listener is registered. Pruned eventually when isTerminated==true in notify. */ def hasListener(listener: ActorRef): Boolean = listeners.contains(listener) @@ -54,7 +54,7 @@ trait ListenerManagement { val iterator = listeners.iterator while (iterator.hasNext) { val listener = iterator.next - if (listener.isShutdown) iterator.remove() + if (listener.isTerminated) iterator.remove() else listener ! msg } } @@ -67,7 +67,7 @@ trait ListenerManagement { val iterator = listeners.iterator while (iterator.hasNext) { val listener = iterator.next - if (listener.isShutdown) iterator.remove() + if (listener.isTerminated) iterator.remove() else f(listener) } } diff --git a/akka-actor/src/main/scala/akka/util/ReflectiveAccess.scala b/akka-actor/src/main/scala/akka/util/ReflectiveAccess.scala index 285cdaeeb9..065e11ba78 100644 --- a/akka-actor/src/main/scala/akka/util/ReflectiveAccess.scala +++ b/akka-actor/src/main/scala/akka/util/ReflectiveAccess.scala @@ -4,23 +4,13 @@ package akka.util -import akka.dispatch.Envelope import akka.actor._ -import DeploymentConfig.ReplicationScheme -import akka.config.ModuleNotAvailableException -import akka.event.Logging.Debug -import akka.cluster.ClusterNode -import akka.routing.{ RoutedProps, Router } -import akka.actor.ActorSystem object ReflectiveAccess { val loader = getClass.getClassLoader - val emptyParams: Array[Class[_]] = Array() - val emptyArguments: Array[AnyRef] = Array() - - val noParams = Array[Class[_]]() - val noArgs = Array[AnyRef]() + val noParams: Array[Class[_]] = Array() + val noArgs: Array[AnyRef] = Array() def createInstance[T](clazz: Class[_], params: Array[Class[_]], @@ -112,29 +102,3 @@ object ReflectiveAccess { } -/** - * Helper class for reflective access to different modules in order to allow optional loading of modules. - * - * @author Jonas Bonér - */ -class ReflectiveAccess(val app: ActorSystem) { - - import ReflectiveAccess._ - - def providerClass: Class[_] = { - getClassFor(app.AkkaConfig.ProviderClass) match { - case Left(e) ⇒ throw e - case Right(b) ⇒ b - } - } - - def createProvider: ActorRefProvider = { - val params: Array[Class[_]] = Array(classOf[ActorSystem]) - val args: Array[AnyRef] = Array(app) - - createInstance[ActorRefProvider](providerClass, params, args) match { - case Right(p) ⇒ p - case Left(e) ⇒ throw e - } - } -} diff --git a/akka-actor/src/main/scala/akka/util/Unsafe.java b/akka-actor/src/main/scala/akka/util/Unsafe.java new file mode 100644 index 0000000000..4449f045be --- /dev/null +++ b/akka-actor/src/main/scala/akka/util/Unsafe.java @@ -0,0 +1,21 @@ +/** + * Copyright (C) 2009-2011 Typesafe Inc. + */ + + +package akka.util; + +import java.lang.reflect.Field; + +public final class Unsafe { + public final static sun.misc.Unsafe instance; + static { + try { + Field field = sun.misc.Unsafe.class.getDeclaredField("theUnsafe"); + field.setAccessible(true); + instance = (sun.misc.Unsafe) field.get(null); + } catch(Throwable t) { + throw new ExceptionInInitializerError(t); + } + } +} diff --git a/akka-camel/src/main/scala/akka/camel/CamelService.scala b/akka-camel/src/main/scala/akka/camel/CamelService.scala index 643d18fb18..eb3c8e4ae1 100644 --- a/akka-camel/src/main/scala/akka/camel/CamelService.scala +++ b/akka-camel/src/main/scala/akka/camel/CamelService.scala @@ -58,7 +58,7 @@ trait CamelService extends Bootable { * Starts this CamelService. */ def start: CamelService = { - // Only init and start if not already done by app + // Only init and start if not already done by system if (!CamelContextManager.initialized) CamelContextManager.init if (!CamelContextManager.started) CamelContextManager.start diff --git a/akka-camel/src/main/scala/akka/camel/Consumer.scala b/akka-camel/src/main/scala/akka/camel/Consumer.scala index 0518a7c271..86f9d7f519 100644 --- a/akka-camel/src/main/scala/akka/camel/Consumer.scala +++ b/akka-camel/src/main/scala/akka/camel/Consumer.scala @@ -35,7 +35,7 @@ trait Consumer { this: Actor ⇒ /** * Determines whether one-way communications between an endpoint and this consumer actor - * should be auto-acknowledged or app-acknowledged. + * should be auto-acknowledged or system-acknowledged. */ def autoack = true @@ -79,7 +79,7 @@ abstract class UntypedConsumerActor extends UntypedActor with Consumer { /** * Determines whether one-way communications between an endpoint and this consumer actor - * should be auto-acknowledged or app-acknowledged. + * should be auto-acknowledged or system-acknowledged. */ def isAutoack() = super.autoack } diff --git a/akka-camel/src/main/scala/akka/camel/Message.scala b/akka-camel/src/main/scala/akka/camel/Message.scala index e3ed12fec0..e5a553f34e 100644 --- a/akka-camel/src/main/scala/akka/camel/Message.scala +++ b/akka-camel/src/main/scala/akka/camel/Message.scala @@ -208,7 +208,7 @@ object Message { } /** - * Positive acknowledgement message (used for app-acknowledged message receipts). + * Positive acknowledgement message (used for system-acknowledged message receipts). * * @author Martin Krasser */ diff --git a/akka-camel/src/main/scala/akka/camel/Producer.scala b/akka-camel/src/main/scala/akka/camel/Producer.scala index 8c65d71c66..4a9367005a 100644 --- a/akka-camel/src/main/scala/akka/camel/Producer.scala +++ b/akka-camel/src/main/scala/akka/camel/Producer.scala @@ -50,7 +50,7 @@ trait ProducerSupport { this: Actor ⇒ /** * Returns the names of message headers to copy from a request message to a response message. * By default only the Message.MessageExchangeId is copied. Applications may override this to - * define an app-specific set of message headers to copy. + * define an system-specific set of message headers to copy. */ def headersToCopy: Set[String] = headersToCopyDefault diff --git a/akka-camel/src/main/scala/akka/camel/component/ActorComponent.scala b/akka-camel/src/main/scala/akka/camel/component/ActorComponent.scala index d305e76033..795fbf5a54 100644 --- a/akka-camel/src/main/scala/akka/camel/component/ActorComponent.scala +++ b/akka-camel/src/main/scala/akka/camel/component/ActorComponent.scala @@ -268,7 +268,7 @@ private[akka] class AsyncCallbackAdapter(exchange: Exchange, callback: AsyncCall @volatile private var running: Boolean = true - def isShutdown: Boolean = !running + def isTerminated: Boolean = !running def suspend(): Unit = () diff --git a/akka-camel/src/test/scala/akka/camel/ConsumerScalaTest.scala b/akka-camel/src/test/scala/akka/camel/ConsumerScalaTest.scala index 97eb8b49a3..efe7d6aee1 100644 --- a/akka-camel/src/test/scala/akka/camel/ConsumerScalaTest.scala +++ b/akka-camel/src/test/scala/akka/camel/ConsumerScalaTest.scala @@ -138,15 +138,15 @@ class ConsumerScalaTest extends WordSpec with BeforeAndAfterAll with MustMatcher "An non auto-acknowledging consumer" when { "started" must { - "must support acknowledgements on app level" in { + "must support acknowledgements on system level" in { var consumer: ActorRef = null service.awaitEndpointActivation(1) { - consumer = actorOf(new TestAckConsumer("direct:app-ack-test")) + consumer = actorOf(new TestAckConsumer("direct:system-ack-test")) } must be(true) - val endpoint = mandatoryContext.getEndpoint("direct:app-ack-test", classOf[DirectEndpoint]) + val endpoint = mandatoryContext.getEndpoint("direct:system-ack-test", classOf[DirectEndpoint]) val producer = endpoint.createProducer.asInstanceOf[AsyncProcessor] val exchange = endpoint.createExchange diff --git a/akka-cluster/src/main/scala/akka/cluster/BookKeeperServer.scala b/akka-cluster/src/main/scala/akka/cluster/BookKeeperServer.scala index 4bafaab001..e546d2d9af 100644 --- a/akka-cluster/src/main/scala/akka/cluster/BookKeeperServer.scala +++ b/akka-cluster/src/main/scala/akka/cluster/BookKeeperServer.scala @@ -9,13 +9,13 @@ import java.io.File /* A simple use of BookKeeper is to implement a write-ahead transaction log. A server maintains an in-memory data structure -(with periodic snapshots for example) and logs changes to that structure before it applies the change. The app +(with periodic snapshots for example) and logs changes to that structure before it applies the change. The system server creates a ledger at startup and store the ledger id and password in a well known place (ZooKeeper maybe). When it needs to make a change, the server adds an entry with the change information to a ledger and apply the change when BookKeeper adds the entry successfully. The server can even use asyncAddEntry to queue up many changes for high change throughput. BooKeeper meticulously logs the changes in order and call the completion functions in order. -When the app server dies, a backup server will come online, get the last snapshot and then it will open the +When the system server dies, a backup server will come online, get the last snapshot and then it will open the ledger of the old server and read all the entries from the time the snapshot was taken. (Since it doesn't know the last entry number it will use MAX_INTEGER). Once all the entries have been processed, it will close the ledger and start a new one for its use. diff --git a/akka-docs/dev/building-akka.rst b/akka-docs/dev/building-akka.rst index 3e46a2698e..a9db8fe7d2 100644 --- a/akka-docs/dev/building-akka.rst +++ b/akka-docs/dev/building-akka.rst @@ -128,7 +128,7 @@ Dependencies You can look at the Ivy dependency resolution information that is created on ``sbt update`` and found in ``~/.ivy2/cache``. For example, the -``.ivy2/cache/se.scalablesolutions.akka-akka-cluster-compile.xml`` file contains +``~/.ivy2/cache/com.typesafe.akka-akka-remote-compile.xml`` file contains the resolution information for the akka-cluster module compile dependencies. If you open this file in a web browser you will get an easy to navigate view of dependencies. diff --git a/akka-docs/disabled/examples/Pi.scala b/akka-docs/disabled/examples/Pi.scala index d0869426fe..2b0fb45914 100644 --- a/akka-docs/disabled/examples/Pi.scala +++ b/akka-docs/disabled/examples/Pi.scala @@ -10,7 +10,7 @@ import System.{currentTimeMillis => now} import java.util.concurrent.CountDownLatch //#imports -//#app +//#system object Pi extends App { calculate(nrOfWorkers = 4, nrOfElements = 10000, nrOfMessages = 10000) @@ -127,5 +127,5 @@ object Pi extends App { latch.await() } } -//#app +//#system diff --git a/akka-docs/general/code/ConfigDocSpec.scala b/akka-docs/general/code/ConfigDocSpec.scala new file mode 100644 index 0000000000..b7b106b94f --- /dev/null +++ b/akka-docs/general/code/ConfigDocSpec.scala @@ -0,0 +1,32 @@ +package akka.docs.config + +import org.scalatest.WordSpec +import org.scalatest.matchers.MustMatchers + +//#imports +import akka.actor.ActorSystem +import com.typesafe.config.ConfigFactory +import com.typesafe.config.ConfigParseOptions + +//#imports + +class ConfigDocSpec extends WordSpec { + + "programmatically configure ActorSystem" in { + //#custom-config + val customConf = ConfigFactory.parseString(""" + akka.actor.deployment { + /app/my-service { + router = round-robin + nr-of-instances = 3 + } + } + """, ConfigParseOptions.defaults) + val system = ActorSystem("MySystem", ConfigFactory.systemProperties.withFallback(customConf)) + //#custom-config + + system.stop() + + } + +} diff --git a/akka-docs/general/configuration.rst b/akka-docs/general/configuration.rst index 17c0e1070d..5e8c3e9344 100644 --- a/akka-docs/general/configuration.rst +++ b/akka-docs/general/configuration.rst @@ -11,9 +11,10 @@ Configuration Specifying the configuration file --------------------------------- -If you don't specify a configuration file then Akka uses default values, corresponding to the ``akka-reference.conf`` -that you see below. You can specify your own configuration file to override any property in the reference config. -You only have to define the properties that differ from the default configuration. +If you don't specify a configuration file then Akka uses default values, corresponding to the reference +configuration files that you see below. You can specify your own configuration file to override any +property in the reference config. You only have to define the properties that differ from the default +configuration. The location of the config file to use can be specified in various ways: @@ -29,22 +30,74 @@ The location of the config file to use can be specified in various ways: If several of these ways to specify the config file are used at the same time the precedence is the order as given above, i.e. you can always redefine the location with the ``-Dakka.config=...`` system property. +You may also specify the configuration programmatically when instantiating the ``ActorSystem``. + +.. includecode:: code/ConfigDocSpec.scala + :include: imports,custom-config + +The ``ConfigFactory`` provides several methods to parse the configuration from various sources. Defining the configuration file ------------------------------- -Here is the reference configuration file: +Each Akka module has a reference configuration file with the default values. -.. literalinclude:: ../../config/akka-reference.conf +*akka-actor:* + +.. literalinclude:: ../../akka-actor/src/main/resources/akka-actor-reference.conf + :language: none + +*akka-remote:* + +.. literalinclude:: ../../akka-remote/src/main/resources/akka-remote-reference.conf + :language: none + +*akka-serialization:* + +.. literalinclude:: ../../akka-actor/src/main/resources/akka-serialization-reference.conf + :language: none + +*akka-testkit:* + +.. literalinclude:: ../../akka-testkit/src/main/resources/akka-testkit-reference.conf + :language: none + +*akka-beanstalk-mailbox:* + +.. literalinclude:: ../../akka-durable-mailboxes/akka-beanstalk-mailbox/src/main/resources/akka-beanstalk-mailbox-reference.conf + :language: none + +*akka-file-mailbox:* + +.. literalinclude:: ../../akka-durable-mailboxes/akka-file-mailbox/src/main/resources/akka-file-mailbox-reference.conf + :language: none + +*akka-mongo-mailbox:* + +.. literalinclude:: ../../akka-durable-mailboxes/akka-mongo-mailbox/src/main/resources/akka-mongo-mailbox-reference.conf + :language: none + +*akka-redis-mailbox:* + +.. literalinclude:: ../../akka-durable-mailboxes/akka-redis-mailbox/src/main/resources/akka-redis-mailbox-reference.conf + :language: none + +*akka-zookeeper-mailbox:* + +.. literalinclude:: ../../akka-durable-mailboxes/akka-zookeeper-mailbox/src/main/resources/akka-zookeeper-mailbox-reference.conf :language: none A custom ``akka.conf`` might look like this:: - # In this file you can override any option defined in the 'akka-reference.conf' file. - # Copy in all or parts of the 'akka-reference.conf' file and modify as you please. + # In this file you can override any option defined in the reference files. + # Copy in parts of the reference files and modify as you please. akka { event-handlers = ["akka.event.slf4j.Slf4jEventHandler"] + loglevel = DEBUG # Options: ERROR, WARNING, INFO, DEBUG + # this level is used by the configured loggers (see "event-handlers") as soon + # as they have been started; before that, see "stdout-loglevel" + stdout-loglevel = DEBUG # Loglevel for the very basic logger activated during AkkaApplication startup # Comma separated list of the enabled modules. enabled-modules = ["camel", "remote"] @@ -56,7 +109,9 @@ A custom ``akka.conf`` might look like this:: "sample.myservice.Boot"] actor { - throughput = 10 # Throughput for Dispatcher, set to 1 for complete fairness + default-dispatcher { + throughput = 10 # Throughput for default Dispatcher, set to 1 for complete fairness + } } remote { @@ -68,6 +123,12 @@ A custom ``akka.conf`` might look like this:: .. _-Dakka.mode: +Config file format +------------------ + +The configuration file syntax is described in the `HOCON `_ +specification. Note that it supports three formats; conf, json, and properties. + Specifying files for different modes ------------------------------------ diff --git a/akka-docs/intro/code/tutorials/first/Pi.scala b/akka-docs/intro/code/tutorials/first/Pi.scala index 6bbf05ee27..6be88d0f32 100644 --- a/akka-docs/intro/code/tutorials/first/Pi.scala +++ b/akka-docs/intro/code/tutorials/first/Pi.scala @@ -11,10 +11,10 @@ // import java.util.concurrent.CountDownLatch // //#imports -// //#app +// //#system // object Pi extends App { -// val app = ActorSystem() +// val system = ActorSystem() // calculate(nrOfWorkers = 4, nrOfElements = 10000, nrOfMessages = 10000) @@ -66,10 +66,10 @@ // //#create-workers // // create the workers -// val workers = Vector.fill(nrOfWorkers)(app.actorOf[Worker]) +// val workers = Vector.fill(nrOfWorkers)(system.actorOf[Worker]) // // wrap them with a load-balancing router -// val router = app.actorOf(RoutedProps().withRoundRobinRouter.withLocalConnections(workers), "pi") +// val router = system.actorOf(RoutedProps().withRoundRobinRouter.withLocalConnections(workers), "pi") // //#create-workers // //#master-receive @@ -119,7 +119,7 @@ // val latch = new CountDownLatch(1) // // create the master -// val master = app.actorOf(new Master(nrOfWorkers, nrOfMessages, nrOfElements, latch)) +// val master = system.actorOf(new Master(nrOfWorkers, nrOfMessages, nrOfElements, latch)) // // start the calculation // master ! Calculate @@ -128,5 +128,5 @@ // latch.await() // } // } -// //#app +// //#system diff --git a/akka-docs/intro/deployment-scenarios.rst b/akka-docs/intro/deployment-scenarios.rst index 94047f242c..829d93829e 100644 --- a/akka-docs/intro/deployment-scenarios.rst +++ b/akka-docs/intro/deployment-scenarios.rst @@ -54,7 +54,7 @@ To use the plugin, first add a plugin definition to your sbt project by creating resolvers += Classpaths.typesafeResolver - addSbtPlugin("se.scalablesolutions.akka" % "akka-sbt-plugin" % "2.0-SNAPSHOT") + addSbtPlugin("com.typesafe.akka" % "akka-sbt-plugin" % "2.0-SNAPSHOT") Then use the AkkaKernelPlugin settings. In a 'light' configuration (build.sbt):: @@ -75,7 +75,7 @@ Or in a 'full' configuration (Build.scala). For example:: version := "0.1", scalaVersion := "2.9.1" resolvers += "Typesafe Repo" at "http://repo.typesafe.com/typesafe/releases/", - libraryDependencies += "se.scalablesolutions.akka" % "akka-kernel" % "2.0-SNAPSHOT" + libraryDependencies += "com.typesafe.akka" % "akka-kernel" % "2.0-SNAPSHOT" ) ) } diff --git a/akka-docs/intro/getting-started-first-java.rst b/akka-docs/intro/getting-started-first-java.rst index be79581416..ee890d723d 100644 --- a/akka-docs/intro/getting-started-first-java.rst +++ b/akka-docs/intro/getting-started-first-java.rst @@ -180,7 +180,7 @@ It should now look something like this: - se.scalablesolutions.akka + com.typesafe.akka akka-actor 2.0-SNAPSHOT diff --git a/akka-docs/intro/getting-started-first-scala.rst b/akka-docs/intro/getting-started-first-scala.rst index 563ab68f83..91a730819f 100644 --- a/akka-docs/intro/getting-started-first-scala.rst +++ b/akka-docs/intro/getting-started-first-scala.rst @@ -192,7 +192,7 @@ in the directory you want to create your project in:: resolvers += "Typesafe Repository" at "http://repo.typesafe.com/typesafe/releases/" - libraryDependencies += "se.scalablesolutions.akka" % "akka-actor" % "2.0-SNAPSHOT" + libraryDependencies += "com.typesafe.akka" % "akka-actor" % "2.0-SNAPSHOT" Create a directory ``src/main/scala`` in which you will store the Scala source files. @@ -202,11 +202,11 @@ modules beyond ``akka-actor``, you can add these as ``libraryDependencies`` in ``build.sbt``. Note that there must be a blank line between each. Here is an example adding ``akka-remote`` and ``akka-stm``:: - libraryDependencies += "se.scalablesolutions.akka" % "akka-actor" % "2.0-SNAPSHOT" + libraryDependencies += "com.typesafe.akka" % "akka-actor" % "2.0-SNAPSHOT" - libraryDependencies += "se.scalablesolutions.akka" % "akka-remote" % "2.0-SNAPSHOT" + libraryDependencies += "com.typesafe.akka" % "akka-remote" % "2.0-SNAPSHOT" - libraryDependencies += "se.scalablesolutions.akka" % "akka-stm" % "2.0-SNAPSHOT" + libraryDependencies += "com.typesafe.akka" % "akka-stm" % "2.0-SNAPSHOT" So, now we are all set. diff --git a/akka-docs/intro/getting-started.rst b/akka-docs/intro/getting-started.rst index 85d6663933..31d579f3ed 100644 --- a/akka-docs/intro/getting-started.rst +++ b/akka-docs/intro/getting-started.rst @@ -117,7 +117,7 @@ Summary of the essential parts for using Akka with Maven: .. code-block:: xml - se.scalablesolutions.akka + com.typesafe.akka akka-actor 2.0-SNAPSHOT @@ -145,7 +145,7 @@ SBT installation instructions on `https://github.com/harrah/xsbt/wiki/Setup `_ repository as +timestamped snapshot versions. Pick a timestamp from +``_. +All Akka modules that belong to the same build have the same timestamp. + +Make sure that you add the repository to the sbt resolvers or maven repositories:: + + resolvers += "Typesafe Timestamp Repo" at "http://repo.typesafe.com/typesafe/maven-timestamps/" + +Define the library dependencies with the timestamp as version:: + + libraryDependencies += "com.typesafe.akka" % "akka-actor" % "2.0-20111118-000627" + + libraryDependencies += "com.typesafe.akka" % "akka-remote" % "2.0-20111118-000627" + diff --git a/akka-docs/scala/code/ActorDocSpec.scala b/akka-docs/scala/code/ActorDocSpec.scala index fb2ccdde5b..b5665aede1 100644 --- a/akka-docs/scala/code/ActorDocSpec.scala +++ b/akka-docs/scala/code/ActorDocSpec.scala @@ -8,13 +8,12 @@ import akka.util.duration._ //#imports import akka.actor.Actor import akka.event.Logging -import akka.config.Configuration //#imports //#my-actor class MyActor extends Actor { - val log = Logging(app, this) + val log = Logging(system, this) def receive = { case "test" ⇒ log.info("received test") case _ ⇒ log.info("received unknown message") @@ -22,7 +21,7 @@ class MyActor extends Actor { } //#my-actor -class ActorDocSpec extends AkkaSpec(Configuration("akka.loglevel" -> "INFO")) { +class ActorDocSpec extends AkkaSpec(Map("akka.loglevel" -> "INFO")) { "creating actor with AkkaSpec.actorOf" in { //#creating-actorOf @@ -31,13 +30,13 @@ class ActorDocSpec extends AkkaSpec(Configuration("akka.loglevel" -> "INFO")) { // testing the actor - // TODO: convert docs to AkkaSpec(Configuration(...)) + // TODO: convert docs to AkkaSpec(Map(...)) val filter = EventFilter.custom { case e: Logging.Info ⇒ true case _ ⇒ false } - app.eventStream.publish(TestEvent.Mute(filter)) - app.eventStream.subscribe(testActor, classOf[Logging.Info]) + system.eventStream.publish(TestEvent.Mute(filter)) + system.eventStream.subscribe(testActor, classOf[Logging.Info]) myActor ! "test" expectMsgPF(1 second) { case Logging.Info(_, "received test") ⇒ true } @@ -45,8 +44,8 @@ class ActorDocSpec extends AkkaSpec(Configuration("akka.loglevel" -> "INFO")) { myActor ! "unknown" expectMsgPF(1 second) { case Logging.Info(_, "received unknown message") ⇒ true } - app.eventStream.unsubscribe(testActor) - app.eventStream.publish(TestEvent.UnMute(filter)) + system.eventStream.unsubscribe(testActor) + system.eventStream.publish(TestEvent.UnMute(filter)) myActor.stop() } diff --git a/akka-docs/scala/http.rst b/akka-docs/scala/http.rst index 835408ae9b..31cdbd9430 100644 --- a/akka-docs/scala/http.rst +++ b/akka-docs/scala/http.rst @@ -99,7 +99,7 @@ If you want to use jetty-run in SBT you need to exclude the version of Jetty tha override def ivyXML = - + diff --git a/akka-durable-mailboxes/akka-beanstalk-mailbox/src/main/resources/akka-beanstalk-mailbox-reference.conf b/akka-durable-mailboxes/akka-beanstalk-mailbox/src/main/resources/akka-beanstalk-mailbox-reference.conf new file mode 100644 index 0000000000..3e6b914bf7 --- /dev/null +++ b/akka-durable-mailboxes/akka-beanstalk-mailbox/src/main/resources/akka-beanstalk-mailbox-reference.conf @@ -0,0 +1,22 @@ +################################################## +# Akka Beanstalk Mailboxes Reference Config File # +################################################## + +# This the reference config file has all the default settings. +# Make your edits/overrides in your akka.conf. + +akka { + actor { + mailbox { + beanstalk { + hostname = "127.0.0.1" + port = 11300 + reconnect-window = 5s + message-submit-delay = 0s + message-submit-timeout = 5s + message-time-to-live = 120s + } + } + } + +} diff --git a/akka-durable-mailboxes/akka-beanstalk-mailbox/src/main/scala/akka/actor/mailbox/BeanstalkBasedMailbox.scala b/akka-durable-mailboxes/akka-beanstalk-mailbox/src/main/scala/akka/actor/mailbox/BeanstalkBasedMailbox.scala index c5565d1026..aa0ee6645d 100644 --- a/akka-durable-mailboxes/akka-beanstalk-mailbox/src/main/scala/akka/actor/mailbox/BeanstalkBasedMailbox.scala +++ b/akka-durable-mailboxes/akka-beanstalk-mailbox/src/main/scala/akka/actor/mailbox/BeanstalkBasedMailbox.scala @@ -5,6 +5,7 @@ package akka.actor.mailbox import com.surftools.BeanstalkClient._ import com.surftools.BeanstalkClientImpl._ +import java.util.concurrent.TimeUnit.MILLISECONDS import akka.actor.LocalActorRef import akka.util.Duration import akka.AkkaException @@ -20,15 +21,11 @@ class BeanstalkBasedMailboxException(message: String) extends AkkaException(mess */ class BeanstalkBasedMailbox(val owner: ActorCell) extends DurableMailbox(owner) with DurableMessageSerialization { - val hostname = app.config.getString("akka.actor.mailbox.beanstalk.hostname", "0.0.0.0") - val port = app.config.getInt("akka.actor.mailbox.beanstalk.port", 11300) - def defaultTimeUnit = app.AkkaConfig.DefaultTimeUnit - val reconnectWindow = Duration(app.config.getInt("akka.actor.mailbox.beanstalk.reconnect-window", 5), defaultTimeUnit).toSeconds.toInt - val messageSubmitDelay = Duration(app.config.getInt("akka.actor.mailbox.beanstalk.message-submit-delay", 0), defaultTimeUnit).toSeconds.toInt - val messageSubmitTimeout = Duration(app.config.getInt("akka.actor.mailbox.beanstalk.message-submit-timeout", 5), defaultTimeUnit).toSeconds.toInt - val messageTimeToLive = Duration(app.config.getInt("akka.actor.mailbox.beanstalk.message-time-to-live", 120), defaultTimeUnit).toSeconds.toInt + private val settings = BeanstalkBasedMailboxExtension(owner.system).settings + private val messageSubmitDelaySeconds = settings.MessageSubmitDelay.toSeconds.toInt + private val messageTimeToLiveSeconds = settings.MessageTimeToLive.toSeconds.toInt - val log = Logging(app, this) + val log = Logging(system, "BeanstalkBasedMailbox") private val queue = new ThreadLocal[Client] { override def initialValue = connect(name) } @@ -36,7 +33,7 @@ class BeanstalkBasedMailbox(val owner: ActorCell) extends DurableMailbox(owner) def enqueue(receiver: ActorRef, envelope: Envelope) { log.debug("ENQUEUING message in beanstalk-based mailbox [%s]".format(envelope)) - Some(queue.get.put(65536, messageSubmitDelay, messageTimeToLive, serialize(envelope)).toInt) + Some(queue.get.put(65536, messageSubmitDelaySeconds, messageTimeToLiveSeconds, serialize(envelope)).toInt) } def dequeue(): Envelope = try { @@ -87,15 +84,16 @@ class BeanstalkBasedMailbox(val owner: ActorCell) extends DurableMailbox(owner) while (!connected) { attempts += 1 try { - client = new ClientImpl(hostname, port) + client = new ClientImpl(settings.Hostname, settings.Port) client.useTube(name) client.watch(name) connected = true } catch { case e: Exception ⇒ - log.error(e, "Unable to connect to Beanstalk. Retrying in [%s] seconds: %s".format(reconnectWindow, e)) + log.error(e, "Unable to connect to Beanstalk. Retrying in [%s] seconds: %s". + format(settings.ReconnectWindow.toSeconds, e)) try { - Thread.sleep(1000 * reconnectWindow) + Thread.sleep(settings.ReconnectWindow.toMillis) } catch { case e: InterruptedException ⇒ {} } diff --git a/akka-durable-mailboxes/akka-beanstalk-mailbox/src/main/scala/akka/actor/mailbox/BeanstalkBasedMailboxExtension.scala b/akka-durable-mailboxes/akka-beanstalk-mailbox/src/main/scala/akka/actor/mailbox/BeanstalkBasedMailboxExtension.scala new file mode 100644 index 0000000000..539b5b45e1 --- /dev/null +++ b/akka-durable-mailboxes/akka-beanstalk-mailbox/src/main/scala/akka/actor/mailbox/BeanstalkBasedMailboxExtension.scala @@ -0,0 +1,58 @@ +/** + * Copyright (C) 2009-2011 Typesafe Inc. + */ +package akka.actor.mailbox + +import akka.actor.ActorSystem +import akka.actor.ExtensionKey +import akka.actor.Extension +import akka.actor.ActorSystemImpl +import com.typesafe.config.Config +import com.typesafe.config.ConfigFactory +import com.typesafe.config.ConfigParseOptions +import com.typesafe.config.ConfigRoot +import akka.util.Duration +import java.util.concurrent.TimeUnit.MILLISECONDS + +object BeanstalkBasedMailboxExtensionKey extends ExtensionKey[BeanstalkBasedMailboxExtension] + +object BeanstalkBasedMailboxExtension { + def apply(system: ActorSystem): BeanstalkBasedMailboxExtension = { + if (!system.hasExtension(BeanstalkBasedMailboxExtensionKey)) { + system.registerExtension(new BeanstalkBasedMailboxExtension) + } + system.extension(BeanstalkBasedMailboxExtensionKey) + } + + class Settings(cfg: Config) { + private def referenceConfig: Config = + ConfigFactory.parseResource(classOf[ActorSystem], "/akka-beanstalk-mailbox-reference.conf", + ConfigParseOptions.defaults.setAllowMissing(false)) + val config: ConfigRoot = ConfigFactory.emptyRoot("akka-beanstalk-mailbox").withFallback(cfg).withFallback(referenceConfig).resolve() + + import config._ + + val Hostname = getString("akka.actor.mailbox.beanstalk.hostname") + val Port = getInt("akka.actor.mailbox.beanstalk.port") + val ReconnectWindow = Duration(getMilliseconds("akka.actor.mailbox.beanstalk.reconnect-window"), MILLISECONDS) + val MessageSubmitDelay = Duration(getMilliseconds("akka.actor.mailbox.beanstalk.message-submit-delay"), MILLISECONDS) + val MessageSubmitTimeout = Duration(getMilliseconds("akka.actor.mailbox.beanstalk.message-submit-timeout"), MILLISECONDS) + val MessageTimeToLive = Duration(getMilliseconds("akka.actor.mailbox.beanstalk.message-time-to-live"), MILLISECONDS) + + } +} + +class BeanstalkBasedMailboxExtension extends Extension[BeanstalkBasedMailboxExtension] { + import BeanstalkBasedMailboxExtension._ + @volatile + private var _settings: Settings = _ + + def key = BeanstalkBasedMailboxExtensionKey + + def init(system: ActorSystemImpl) { + _settings = new Settings(system.applicationConfig) + } + + def settings: Settings = _settings + +} \ No newline at end of file diff --git a/akka-durable-mailboxes/akka-file-mailbox/src/main/resources/akka-file-mailbox-reference.conf b/akka-durable-mailboxes/akka-file-mailbox/src/main/resources/akka-file-mailbox-reference.conf new file mode 100644 index 0000000000..313b8d85e9 --- /dev/null +++ b/akka-durable-mailboxes/akka-file-mailbox/src/main/resources/akka-file-mailbox-reference.conf @@ -0,0 +1,28 @@ +############################################# +# Akka File Mailboxes Reference Config File # +############################################# + +# This the reference config file has all the default settings. +# Make your edits/overrides in your akka.conf. + +akka { + actor { + mailbox { + file-based { + directory-path = "./_mb" + max-items = 2147483647 + max-size = 2147483647 bytes + max-items = 2147483647 + max-item-size = 2147483647 bytes + max-age = 0s + max-journal-size = 16 megabytes + max-memory-size = 128 megabytes + max-journal-overflow = 10 + max-journal-size-absolute = 9223372036854775807 bytes + discard-old-when-full = on + keep-journal = on + sync-journal = off + } + } + } +} diff --git a/akka-durable-mailboxes/akka-file-mailbox/src/main/scala/akka/actor/mailbox/FileBasedMailboxExtension.scala b/akka-durable-mailboxes/akka-file-mailbox/src/main/scala/akka/actor/mailbox/FileBasedMailboxExtension.scala new file mode 100644 index 0000000000..f6ca730a1c --- /dev/null +++ b/akka-durable-mailboxes/akka-file-mailbox/src/main/scala/akka/actor/mailbox/FileBasedMailboxExtension.scala @@ -0,0 +1,65 @@ +/** + * Copyright (C) 2009-2011 Typesafe Inc. + */ +package akka.actor.mailbox + +import akka.actor.ActorSystem +import akka.actor.ExtensionKey +import akka.actor.Extension +import akka.actor.ActorSystemImpl +import com.typesafe.config.Config +import com.typesafe.config.ConfigFactory +import com.typesafe.config.ConfigParseOptions +import com.typesafe.config.ConfigRoot +import akka.util.Duration +import java.util.concurrent.TimeUnit.MILLISECONDS + +object FileBasedMailboxExtensionKey extends ExtensionKey[FileBasedMailboxExtension] + +object FileBasedMailboxExtension { + def apply(system: ActorSystem): FileBasedMailboxExtension = { + if (!system.hasExtension(FileBasedMailboxExtensionKey)) { + system.registerExtension(new FileBasedMailboxExtension) + } + system.extension(FileBasedMailboxExtensionKey) + } + + class Settings(cfg: Config) { + private def referenceConfig: Config = + ConfigFactory.parseResource(classOf[ActorSystem], "/akka-file-mailbox-reference.conf", + ConfigParseOptions.defaults.setAllowMissing(false)) + val config: ConfigRoot = ConfigFactory.emptyRoot("akka-file-mailbox").withFallback(cfg).withFallback(referenceConfig).resolve() + + import config._ + + val QueuePath = getString("akka.actor.mailbox.file-based.directory-path") + + val MaxItems = getInt("akka.actor.mailbox.file-based.max-items") + val MaxSize = getMemorySizeInBytes("akka.actor.mailbox.file-based.max-size") + val MaxItemSize = getMemorySizeInBytes("akka.actor.mailbox.file-based.max-item-size") + val MaxAge = Duration(getMilliseconds("akka.actor.mailbox.file-based.max-age"), MILLISECONDS) + val MaxJournalSize = getMemorySizeInBytes("akka.actor.mailbox.file-based.max-journal-size") + val MaxMemorySize = getMemorySizeInBytes("akka.actor.mailbox.file-based.max-memory-size") + val MaxJournalOverflow = getInt("akka.actor.mailbox.file-based.max-journal-overflow") + val MaxJournalSizeAbsolute = getMemorySizeInBytes("akka.actor.mailbox.file-based.max-journal-size-absolute") + val DiscardOldWhenFull = getBoolean("akka.actor.mailbox.file-based.discard-old-when-full") + val KeepJournal = getBoolean("akka.actor.mailbox.file-based.keep-journal") + val SyncJournal = getBoolean("akka.actor.mailbox.file-based.sync-journal") + + } +} + +class FileBasedMailboxExtension extends Extension[FileBasedMailboxExtension] { + import FileBasedMailboxExtension._ + @volatile + private var _settings: Settings = _ + + def key = FileBasedMailboxExtensionKey + + def init(system: ActorSystemImpl) { + _settings = new Settings(system.applicationConfig) + } + + def settings: Settings = _settings + +} \ No newline at end of file diff --git a/akka-durable-mailboxes/akka-file-mailbox/src/main/scala/akka/actor/mailbox/FiledBasedMailbox.scala b/akka-durable-mailboxes/akka-file-mailbox/src/main/scala/akka/actor/mailbox/FiledBasedMailbox.scala index addd493940..e167a88f27 100644 --- a/akka-durable-mailboxes/akka-file-mailbox/src/main/scala/akka/actor/mailbox/FiledBasedMailbox.scala +++ b/akka-durable-mailboxes/akka-file-mailbox/src/main/scala/akka/actor/mailbox/FiledBasedMailbox.scala @@ -6,26 +6,20 @@ package akka.actor.mailbox import org.apache.commons.io.FileUtils import akka.actor.ActorCell -import akka.config.Configuration import akka.dispatch.Envelope import akka.event.Logging import akka.actor.ActorRef -object FileBasedMailbox { - def queuePath(config: Configuration): String = { - config.getString("akka.actor.mailbox.file-based.directory-path", "./_mb") // /var/spool/akka - } -} - class FileBasedMailbox(val owner: ActorCell) extends DurableMailbox(owner) with DurableMessageSerialization { - val log = Logging(app, this) + val log = Logging(system, "FileBasedMailbox") - val queuePath = FileBasedMailbox.queuePath(owner.app.config) + private val settings = FileBasedMailboxExtension(owner.system).settings + val queuePath = settings.QueuePath private val queue = try { try { FileUtils.forceMkdir(new java.io.File(queuePath)) } catch { case e ⇒ {} } - val queue = new filequeue.PersistentQueue(queuePath, name, owner.app.config, log) + val queue = new filequeue.PersistentQueue(queuePath, name, settings, log) queue.setup // replays journal queue.discardExpired queue diff --git a/akka-durable-mailboxes/akka-file-mailbox/src/main/scala/akka/actor/mailbox/filequeue/PersistentQueue.scala b/akka-durable-mailboxes/akka-file-mailbox/src/main/scala/akka/actor/mailbox/filequeue/PersistentQueue.scala index 63e52a939c..1ae3cd9e06 100644 --- a/akka-durable-mailboxes/akka-file-mailbox/src/main/scala/akka/actor/mailbox/filequeue/PersistentQueue.scala +++ b/akka-durable-mailboxes/akka-file-mailbox/src/main/scala/akka/actor/mailbox/filequeue/PersistentQueue.scala @@ -19,8 +19,10 @@ package akka.actor.mailbox.filequeue import java.io._ import scala.collection.mutable -import akka.config.Configuration import akka.event.LoggingAdapter +import akka.util.Duration +import java.util.concurrent.TimeUnit +import akka.actor.mailbox.FileBasedMailboxExtension // a config value that's backed by a global setting but may be locally overridden class OverlaySetting[T](base: ⇒ T) { @@ -32,7 +34,7 @@ class OverlaySetting[T](base: ⇒ T) { def apply() = local.getOrElse(base) } -class PersistentQueue(persistencePath: String, val name: String, val config: Configuration, log: LoggingAdapter) { +class PersistentQueue(persistencePath: String, val name: String, val settings: FileBasedMailboxExtension.Settings, log: LoggingAdapter) { private case object ItemArrived @@ -123,22 +125,20 @@ class PersistentQueue(persistencePath: String, val name: String, val config: Con def memoryBytes: Long = synchronized { _memoryBytes } def inReadBehind = synchronized { journal.inReadBehind } - //FIXME, segment commented out, might have damaged semantics, investigate. - //config.subscribe { c => configure(c.getOrElse(new Config)) } - configure(config) + configure(settings) - def configure(config: Configuration) = synchronized { - maxItems set config.getInt("akka.actor.mailbox.file-based.max-items") - maxSize set config.getLong("akka.actor.mailbox.file-based.max-size") - maxItemSize set config.getLong("akka.actor.mailbox.file-based.max-item-size") - maxAge set config.getInt("akka.actor.mailbox.file-based.max-age") - maxJournalSize set config.getLong("akka.actor.mailbox.file-based.max-journal-size") - maxMemorySize set config.getLong("akka.actor.mailbox.file-based.max-memory-size") - maxJournalOverflow set config.getInt("akka.actor.mailbox.file-based.max-journal-overflow") - maxJournalSizeAbsolute set config.getLong("akka.actor.mailbox.file-based.max-journal-size-absolute") - discardOldWhenFull set config.getBool("akka.actor.mailbox.file-based.discard-old-when-full") - keepJournal set config.getBool("akka.actor.mailbox.file-based.journal") - syncJournal set config.getBool("akka.actor.mailbox.file-based.sync-journal") + def configure(settings: FileBasedMailboxExtension.Settings) = synchronized { + maxItems set Some(settings.MaxItems) + maxSize set Some(settings.MaxSize) + maxItemSize set Some(settings.MaxItemSize) + maxAge set Some(settings.MaxAge.toSeconds.toInt) + maxJournalSize set Some(settings.MaxJournalSize) + maxMemorySize set Some(settings.MaxMemorySize) + maxJournalOverflow set Some(settings.MaxJournalOverflow) + maxJournalSizeAbsolute set Some(settings.MaxJournalSizeAbsolute) + discardOldWhenFull set Some(settings.DiscardOldWhenFull) + keepJournal set Some(settings.KeepJournal) + syncJournal set Some(settings.SyncJournal) log.info("Configuring queue %s: journal=%s, max-items=%s, max-size=%s, max-age=%s, max-journal-size=%s, max-memory-size=%s, max-journal-overflow=%s, max-journal-size-absolute=%s, discard-old-when-full=%s, sync-journal=%s" .format( name, keepJournal(), maxItems(), maxSize(), maxAge(), maxJournalSize(), maxMemorySize(), diff --git a/akka-durable-mailboxes/akka-file-mailbox/src/main/scala/akka/actor/mailbox/filequeue/QueueCollection.scala b/akka-durable-mailboxes/akka-file-mailbox/src/main/scala/akka/actor/mailbox/filequeue/QueueCollection.scala index 69b3ba7605..ff5e12c86e 100644 --- a/akka-durable-mailboxes/akka-file-mailbox/src/main/scala/akka/actor/mailbox/filequeue/QueueCollection.scala +++ b/akka-durable-mailboxes/akka-file-mailbox/src/main/scala/akka/actor/mailbox/filequeue/QueueCollection.scala @@ -20,12 +20,12 @@ package akka.actor.mailbox.filequeue import java.io.File import java.util.concurrent.CountDownLatch import scala.collection.mutable -import akka.config.Configuration import akka.event.LoggingAdapter +import akka.actor.mailbox.FileBasedMailboxExtension class InaccessibleQueuePath extends Exception("Inaccessible queue path: Must be a directory and writable") -class QueueCollection(queueFolder: String, private var queueConfigs: Configuration, log: LoggingAdapter) { +class QueueCollection(queueFolder: String, settings: FileBasedMailboxExtension.Settings, log: LoggingAdapter) { private val path = new File(queueFolder) if (!path.isDirectory) { @@ -46,13 +46,6 @@ class QueueCollection(queueFolder: String, private var queueConfigs: Configurati val queueHits = new Counter() val queueMisses = new Counter() - /* FIXME, segment commented out, might have damaged semantics, investigate. - queueConfigs.subscribe { c => - synchronized { - queueConfigs = c.getOrElse(new Config) - } - }*/ - // preload any queues def loadQueues() { path.list() filter { name ⇒ !(name contains "~~") } map { queue(_) } @@ -79,9 +72,9 @@ class QueueCollection(queueFolder: String, private var queueConfigs: Configurati val master = name.split('+')(0) fanout_queues.getOrElseUpdate(master, new mutable.HashSet[String]) += name log.debug("Fanout queue {} added to {}", name, master) - new PersistentQueue(path.getPath, name, queueConfigs, log) + new PersistentQueue(path.getPath, name, settings, log) } else { - new PersistentQueue(path.getPath, name, queueConfigs, log) + new PersistentQueue(path.getPath, name, settings, log) } q.setup queues(name) = q diff --git a/akka-durable-mailboxes/akka-file-mailbox/src/main/scala/akka/actor/mailbox/filequeue/tools/QDumper.scala b/akka-durable-mailboxes/akka-file-mailbox/src/main/scala/akka/actor/mailbox/filequeue/tools/QDumper.scala index 608f5bc447..eb8a7f9e0b 100644 --- a/akka-durable-mailboxes/akka-file-mailbox/src/main/scala/akka/actor/mailbox/filequeue/tools/QDumper.scala +++ b/akka-durable-mailboxes/akka-file-mailbox/src/main/scala/akka/actor/mailbox/filequeue/tools/QDumper.scala @@ -142,11 +142,11 @@ object QDumper { System.exit(0) } - val app = ActorSystem() + val system = ActorSystem() for (filename ← filenames) { println("Queue: " + filename) - new QueueDumper(filename, app.log)() + new QueueDumper(filename, system.log)() } } } diff --git a/akka-durable-mailboxes/akka-file-mailbox/src/test/scala/akka/actor/mailbox/FileBasedMailboxSpec.scala b/akka-durable-mailboxes/akka-file-mailbox/src/test/scala/akka/actor/mailbox/FileBasedMailboxSpec.scala index bfb9a6e0e3..8b17ec9662 100644 --- a/akka-durable-mailboxes/akka-file-mailbox/src/test/scala/akka/actor/mailbox/FileBasedMailboxSpec.scala +++ b/akka-durable-mailboxes/akka-file-mailbox/src/test/scala/akka/actor/mailbox/FileBasedMailboxSpec.scala @@ -6,7 +6,7 @@ import org.apache.commons.io.FileUtils class FileBasedMailboxSpec extends DurableMailboxSpec("File", FileDurableMailboxType) { def clean { - val queuePath = FileBasedMailbox.queuePath(app.config) + val queuePath = FileBasedMailboxExtension(system).settings.QueuePath FileUtils.deleteDirectory(new java.io.File(queuePath)) } diff --git a/akka-durable-mailboxes/akka-mailboxes-common/src/main/scala/akka/actor/mailbox/DurableMailbox.scala b/akka-durable-mailboxes/akka-mailboxes-common/src/main/scala/akka/actor/mailbox/DurableMailbox.scala index 2473d75e39..b5d832d443 100644 --- a/akka-durable-mailboxes/akka-mailboxes-common/src/main/scala/akka/actor/mailbox/DurableMailbox.scala +++ b/akka-durable-mailboxes/akka-mailboxes-common/src/main/scala/akka/actor/mailbox/DurableMailbox.scala @@ -5,12 +5,10 @@ package akka.actor.mailbox import akka.util.ReflectiveAccess import java.lang.reflect.InvocationTargetException - import akka.AkkaException import akka.actor.ActorCell import akka.actor.ActorRef import akka.actor.SerializedActorRef -import akka.config.Configuration import akka.dispatch.Envelope import akka.dispatch.DefaultSystemMessageQueue import akka.dispatch.Dispatcher @@ -25,6 +23,7 @@ import akka.remote.RemoteProtocol.RemoteMessageProtocol import akka.remote.RemoteActorRefProvider import akka.remote.netty.NettyRemoteServer import akka.serialization.Serialization +import com.typesafe.config.Config private[akka] object DurableExecutableMailboxConfig { val Name = "[\\.\\/\\$\\s]".r @@ -40,7 +39,7 @@ class DurableMailboxException private[akka] (message: String, cause: Throwable) abstract class DurableMailbox(owner: ActorCell) extends Mailbox(owner) with DefaultSystemMessageQueue { import DurableExecutableMailboxConfig._ - def app = owner.app + def system = owner.system def ownerPath = owner.self.path val ownerPathString = ownerPath.path.mkString("/") val name = "mailbox_" + Name.replaceAllIn(ownerPathString, "_") @@ -54,11 +53,11 @@ trait DurableMessageSerialization { def serialize(durableMessage: Envelope): Array[Byte] = { def serializeActorRef(ref: ActorRef): ActorRefProtocol = { - val serRef = owner.app.provider.serialize(ref) + val serRef = owner.system.provider.serialize(ref) ActorRefProtocol.newBuilder.setPath(serRef.path).setHost(serRef.hostname).setPort(serRef.port).build } - val message = MessageSerializer.serialize(owner.app, durableMessage.message.asInstanceOf[AnyRef]) + val message = MessageSerializer.serialize(owner.system, durableMessage.message.asInstanceOf[AnyRef]) val builder = RemoteMessageProtocol.newBuilder .setMessage(message) .setRecipient(serializeActorRef(owner.self)) @@ -71,11 +70,11 @@ trait DurableMessageSerialization { def deserializeActorRef(refProtocol: ActorRefProtocol): ActorRef = { val serRef = SerializedActorRef(refProtocol.getHost, refProtocol.getPort, refProtocol.getPath) - owner.app.provider.deserialize(serRef).getOrElse(owner.app.deadLetters) + owner.system.provider.deserialize(serRef).getOrElse(owner.system.deadLetters) } val durableMessage = RemoteMessageProtocol.parseFrom(bytes) - val message = MessageSerializer.deserialize(owner.app, durableMessage.getMessage) + val message = MessageSerializer.deserialize(owner.system, durableMessage.getMessage) val sender = deserializeActorRef(durableMessage.getSender) new Envelope(message, sender) @@ -130,8 +129,9 @@ case class FqnDurableMailboxType(mailboxFQN: String) extends DurableMailboxType( class DurableMailboxConfigurator { // TODO PN #896: when and how is this class supposed to be used? Can we remove it? - def mailboxType(config: Configuration): MailboxType = { - val storage = config.getString("storage") map { + def mailboxType(config: Config): MailboxType = { + if (!config.hasPath("storage")) throw new DurableMailboxException("No 'storage' defined for durable mailbox") + config.getString("storage") match { case "redis" ⇒ RedisDurableMailboxType case "mongodb" ⇒ MongoDurableMailboxType case "beanstalk" ⇒ BeanstalkDurableMailboxType @@ -139,7 +139,5 @@ class DurableMailboxConfigurator { case "file" ⇒ FileDurableMailboxType case fqn ⇒ FqnDurableMailboxType(fqn) } - - storage.getOrElse(throw new DurableMailboxException("No 'storage' defined for durable mailbox")) } } diff --git a/akka-durable-mailboxes/akka-mailboxes-common/src/test/scala/akka/actor/mailbox/DurableMailboxSpec.scala b/akka-durable-mailboxes/akka-mailboxes-common/src/test/scala/akka/actor/mailbox/DurableMailboxSpec.scala index fa17bfd299..ab0f0206d3 100644 --- a/akka-durable-mailboxes/akka-mailboxes-common/src/test/scala/akka/actor/mailbox/DurableMailboxSpec.scala +++ b/akka-durable-mailboxes/akka-mailboxes-common/src/test/scala/akka/actor/mailbox/DurableMailboxSpec.scala @@ -26,7 +26,7 @@ object DurableMailboxSpecActorFactory { abstract class DurableMailboxSpec(val backendName: String, val mailboxType: DurableMailboxType) extends AkkaSpec with BeforeAndAfterEach { import DurableMailboxSpecActorFactory._ - implicit val dispatcher = new Dispatchers(app).newDispatcher(backendName, throughput = 1, mailboxType = mailboxType).build + implicit val dispatcher = system.dispatcherFactory.newDispatcher(backendName, throughput = 1, mailboxType = mailboxType).build def createMailboxTestActor(id: String)(implicit dispatcher: MessageDispatcher): ActorRef = actorOf(Props(new MailboxTestActor).withDispatcher(dispatcher)) diff --git a/akka-durable-mailboxes/akka-mongo-mailbox/src/main/resources/akka-mongo-mailbox-reference.conf b/akka-durable-mailboxes/akka-mongo-mailbox/src/main/resources/akka-mongo-mailbox-reference.conf new file mode 100644 index 0000000000..09a0c316ec --- /dev/null +++ b/akka-durable-mailboxes/akka-mongo-mailbox/src/main/resources/akka-mongo-mailbox-reference.conf @@ -0,0 +1,23 @@ +################################################ +# Akka MongoDB Mailboxes Reference Config File # +################################################ + +# This the reference config file has all the default settings. +# Make your edits/overrides in your akka.conf. + +akka { + actor { + mailbox { + mongodb { + # Any specified collection name will be used as a prefix for collections that use durable mongo mailboxes + uri = "mongodb://localhost/akka.mailbox" # Follow Mongo URI Spec - http://www.mongodb.org/display/DOCS/Connections + + # Configurable timeouts for certain ops + timeout { + read = 3000ms # time to wait for a read to succeed before timing out the future + write = 3000ms # time to wait for a write to succeed before timing out the future + } + } + } + } +} diff --git a/akka-durable-mailboxes/akka-mongo-mailbox/src/main/scala/akka/actor/mailbox/BSONSerialization.scala b/akka-durable-mailboxes/akka-mongo-mailbox/src/main/scala/akka/actor/mailbox/BSONSerialization.scala index 5a38135ed4..8882b2738e 100644 --- a/akka-durable-mailboxes/akka-mongo-mailbox/src/main/scala/akka/actor/mailbox/BSONSerialization.scala +++ b/akka-durable-mailboxes/akka-mongo-mailbox/src/main/scala/akka/actor/mailbox/BSONSerialization.scala @@ -17,9 +17,11 @@ import org.bson.DefaultBSONSerializer import akka.actor.SerializedActorRef import akka.remote.RemoteProtocol.MessageProtocol import akka.remote.MessageSerializer -import akka.actor.ActorSystem +import akka.actor.{ ActorSystem, ActorSystemImpl } -class BSONSerializableMailbox(app: ActorSystem) extends SerializableBSONObject[MongoDurableMessage] with Logging { +class BSONSerializableMailbox(system: ActorSystem) extends SerializableBSONObject[MongoDurableMessage] with Logging { + + val systemImpl = system.asInstanceOf[ActorSystemImpl] protected[akka] def serializeDurableMsg(msg: MongoDurableMessage)(implicit serializer: BSONSerializer) = { @@ -28,7 +30,7 @@ class BSONSerializableMailbox(app: ActorSystem) extends SerializableBSONObject[M b += "_id" -> msg._id b += "ownerPath" -> msg.ownerPath - val sender = app.provider.serialize(msg.sender) + val sender = systemImpl.provider.serialize(msg.sender) b += "senderPath" -> sender.path b += "senderHostname" -> sender.hostname b += "senderPort" -> sender.port @@ -37,10 +39,10 @@ class BSONSerializableMailbox(app: ActorSystem) extends SerializableBSONObject[M * TODO - Figure out a way for custom serialization of the message instance * TODO - Test if a serializer is registered for the message and if not, use toByteString */ - val msgData = MessageSerializer.serialize(app, msg.message.asInstanceOf[AnyRef]) + val msgData = MessageSerializer.serialize(system, msg.message.asInstanceOf[AnyRef]) b += "message" -> new org.bson.types.Binary(0, msgData.toByteArray) val doc = b.result - app.log.debug("Serialized Document: {}", doc) + system.log.debug("Serialized Document: {}", doc) serializer.putObject(doc) } @@ -68,15 +70,15 @@ class BSONSerializableMailbox(app: ActorSystem) extends SerializableBSONObject[M val deserializer = new DefaultBSONDeserializer // TODO - Skip the whole doc step for performance, fun, and profit! (Needs Salat / custom Deser) val doc = deserializer.decodeAndFetch(in).asInstanceOf[BSONDocument] - app.log.debug("Deserializing a durable message from MongoDB: {}", doc) + system.log.debug("Deserializing a durable message from MongoDB: {}", doc) val msgData = MessageProtocol.parseFrom(doc.as[org.bson.types.Binary]("message").getData) - val msg = MessageSerializer.deserialize(app, msgData) + val msg = MessageSerializer.deserialize(system, msgData) val ownerPath = doc.as[String]("ownerPath") val senderPath = doc.as[String]("senderPath") val senderHostname = doc.as[String]("senderHostname") val senderPort = doc.as[Int]("senderPort") - val sender = app.provider.deserialize(SerializedActorRef(senderHostname, senderPort, senderPath)). - getOrElse(app.deadLetters) + val sender = systemImpl.provider.deserialize(SerializedActorRef(senderHostname, senderPort, senderPath)). + getOrElse(system.deadLetters) MongoDurableMessage(ownerPath, msg, sender) } diff --git a/akka-durable-mailboxes/akka-mongo-mailbox/src/main/scala/akka/actor/mailbox/MongoBasedMailbox.scala b/akka-durable-mailboxes/akka-mongo-mailbox/src/main/scala/akka/actor/mailbox/MongoBasedMailbox.scala index 6ada60bd4b..65fda89354 100644 --- a/akka-durable-mailboxes/akka-mongo-mailbox/src/main/scala/akka/actor/mailbox/MongoBasedMailbox.scala +++ b/akka-durable-mailboxes/akka-mongo-mailbox/src/main/scala/akka/actor/mailbox/MongoBasedMailbox.scala @@ -28,17 +28,12 @@ class MongoBasedMailboxException(message: String) extends AkkaException(message) */ class MongoBasedMailbox(val owner: ActorCell) extends DurableMailbox(owner) { // this implicit object provides the context for reading/writing things as MongoDurableMessage - implicit val mailboxBSONSer = new BSONSerializableMailbox(app) + implicit val mailboxBSONSer = new BSONSerializableMailbox(system) implicit val safeWrite = WriteConcern.Safe // TODO - Replica Safe when appropriate! - val URI_CONFIG_KEY = "akka.actor.mailbox.mongodb.uri" - val WRITE_TIMEOUT_KEY = "akka.actor.mailbox.mongodb.timeout.write" - val READ_TIMEOUT_KEY = "akka.actor.mailbox.mongodb.timeout.read" - val mongoURI = app.config.getString(URI_CONFIG_KEY) - val writeTimeout = app.config.getInt(WRITE_TIMEOUT_KEY, 3000) - val readTimeout = app.config.getInt(READ_TIMEOUT_KEY, 3000) + private val settings = MongoBasedMailboxExtension(owner.system).settings - val log = Logging(app, this) + val log = Logging(system, "MongoBasedMailbox") @volatile private var mongo = connect() @@ -48,7 +43,7 @@ class MongoBasedMailbox(val owner: ActorCell) extends DurableMailbox(owner) { /* TODO - Test if a BSON serializer is registered for the message and only if not, use toByteString? */ val durableMessage = MongoDurableMessage(ownerPathString, envelope.message, envelope.sender) // todo - do we need to filter the actor name at all for safe collection naming? - val result = new DefaultPromise[Boolean](writeTimeout)(dispatcher) + val result = new DefaultPromise[Boolean](settings.WriteTimeout)(dispatcher) mongo.insert(durableMessage, false)(RequestFutures.write { wr: Either[Throwable, (Option[AnyRef], WriteResult)] ⇒ wr match { case Right((oid, wr)) ⇒ result.completeWithResult(true) @@ -67,7 +62,7 @@ class MongoBasedMailbox(val owner: ActorCell) extends DurableMailbox(owner) { * TODO - Should we have a specific query in place? Which way do we sort? * TODO - Error handling version! */ - val envelopePromise = new DefaultPromise[Envelope](readTimeout)(dispatcher) + val envelopePromise = new DefaultPromise[Envelope](settings.ReadTimeout)(dispatcher) mongo.findAndRemove(Document.empty) { doc: Option[MongoDurableMessage] ⇒ doc match { case Some(msg) ⇒ { @@ -87,7 +82,7 @@ class MongoBasedMailbox(val owner: ActorCell) extends DurableMailbox(owner) { } def numberOfMessages: Int = { - val count = new DefaultPromise[Int](readTimeout)(dispatcher) + val count = new DefaultPromise[Int](settings.ReadTimeout)(dispatcher) mongo.count()(count.completeWithResult) count.as[Int].getOrElse(-1) } @@ -96,9 +91,9 @@ class MongoBasedMailbox(val owner: ActorCell) extends DurableMailbox(owner) { def hasMessages: Boolean = numberOfMessages > 0 private[akka] def connect() = { - require(mongoURI.isDefined, "Mongo URI (%s) must be explicitly defined in akka.conf; will not assume defaults for safety sake.".format(URI_CONFIG_KEY)) - log.info("CONNECTING mongodb uri : [{}]", mongoURI) - val _dbh = MongoConnection.fromURI(mongoURI.get) match { + require(settings.MongoURI.isDefined, "Mongo URI (%s) must be explicitly defined in akka.conf; will not assume defaults for safety sake.".format(settings.UriConfigKey)) + log.info("CONNECTING mongodb uri : [{}]", settings.MongoURI) + val _dbh = MongoConnection.fromURI(settings.MongoURI.get) match { case (conn, None, None) ⇒ { throw new UnsupportedOperationException("You must specify a database name to use with MongoDB; please see the MongoDB Connection URI Spec: 'http://www.mongodb.org/display/DOCS/Connections'") } diff --git a/akka-durable-mailboxes/akka-mongo-mailbox/src/main/scala/akka/actor/mailbox/MongoBasedMailboxExtension.scala b/akka-durable-mailboxes/akka-mongo-mailbox/src/main/scala/akka/actor/mailbox/MongoBasedMailboxExtension.scala new file mode 100644 index 0000000000..e6ca3443e0 --- /dev/null +++ b/akka-durable-mailboxes/akka-mongo-mailbox/src/main/scala/akka/actor/mailbox/MongoBasedMailboxExtension.scala @@ -0,0 +1,56 @@ +/** + * Copyright (C) 2009-2011 Typesafe Inc. + */ +package akka.actor.mailbox + +import akka.actor.ActorSystem +import akka.actor.ExtensionKey +import akka.actor.Extension +import akka.actor.ActorSystemImpl +import com.typesafe.config.Config +import com.typesafe.config.ConfigFactory +import com.typesafe.config.ConfigParseOptions +import com.typesafe.config.ConfigRoot +import akka.util.Duration +import java.util.concurrent.TimeUnit.MILLISECONDS + +object MongoBasedMailboxExtensionKey extends ExtensionKey[MongoBasedMailboxExtension] + +object MongoBasedMailboxExtension { + def apply(system: ActorSystem): MongoBasedMailboxExtension = { + if (!system.hasExtension(MongoBasedMailboxExtensionKey)) { + system.registerExtension(new MongoBasedMailboxExtension) + } + system.extension(MongoBasedMailboxExtensionKey) + } + + class Settings(cfg: Config) { + private def referenceConfig: Config = + ConfigFactory.parseResource(classOf[ActorSystem], "/akka-mongo-mailbox-reference.conf", + ConfigParseOptions.defaults.setAllowMissing(false)) + val config: ConfigRoot = ConfigFactory.emptyRoot("akka-mongo-mailbox").withFallback(cfg).withFallback(referenceConfig).resolve() + + import config._ + + val UriConfigKey = "akka.actor.mailbox.mongodb.uri" + val MongoURI = if (config.hasPath(UriConfigKey)) Some(config.getString(UriConfigKey)) else None + val WriteTimeout = Duration(config.getMilliseconds("akka.actor.mailbox.mongodb.timeout.write"), MILLISECONDS) + val ReadTimeout = Duration(config.getMilliseconds("akka.actor.mailbox.mongodb.timeout.read"), MILLISECONDS) + + } +} + +class MongoBasedMailboxExtension extends Extension[MongoBasedMailboxExtension] { + import MongoBasedMailboxExtension._ + @volatile + private var _settings: Settings = _ + + def key = MongoBasedMailboxExtensionKey + + def init(system: ActorSystemImpl) { + _settings = new Settings(system.applicationConfig) + } + + def settings: Settings = _settings + +} \ No newline at end of file diff --git a/akka-durable-mailboxes/akka-redis-mailbox/src/main/resources/akka-redis-mailbox-reference.conf b/akka-durable-mailboxes/akka-redis-mailbox/src/main/resources/akka-redis-mailbox-reference.conf new file mode 100644 index 0000000000..20f1d03abd --- /dev/null +++ b/akka-durable-mailboxes/akka-redis-mailbox/src/main/resources/akka-redis-mailbox-reference.conf @@ -0,0 +1,17 @@ +############################################## +# Akka Redis Mailboxes Reference Config File # +############################################## + +# This the reference config file has all the default settings. +# Make your edits/overrides in your akka.conf. + +akka { + actor { + mailbox { + redis { + hostname = "127.0.0.1" + port = 6379 + } + } + } +} diff --git a/akka-durable-mailboxes/akka-redis-mailbox/src/main/scala/akka/actor/mailbox/RedisBasedMailbox.scala b/akka-durable-mailboxes/akka-redis-mailbox/src/main/scala/akka/actor/mailbox/RedisBasedMailbox.scala index 060b083a2c..d005d5675f 100644 --- a/akka-durable-mailboxes/akka-redis-mailbox/src/main/scala/akka/actor/mailbox/RedisBasedMailbox.scala +++ b/akka-durable-mailboxes/akka-redis-mailbox/src/main/scala/akka/actor/mailbox/RedisBasedMailbox.scala @@ -17,10 +17,13 @@ class RedisBasedMailboxException(message: String) extends AkkaException(message) * @author Jonas Bonér */ class RedisBasedMailbox(val owner: ActorCell) extends DurableMailbox(owner) with DurableMessageSerialization { + + private val settings = RedisBasedMailboxExtension(owner.system).settings + @volatile private var clients = connect() // returns a RedisClientPool for multiple asynchronous message handling - val log = Logging(app, this) + val log = Logging(system, "RedisBasedMailbox") def enqueue(receiver: ActorRef, envelope: Envelope) { log.debug("ENQUEUING message in redis-based mailbox [%s]".format(envelope)) @@ -57,9 +60,7 @@ class RedisBasedMailbox(val owner: ActorCell) extends DurableMailbox(owner) with def hasMessages: Boolean = numberOfMessages > 0 //TODO review find other solution, this will be very expensive private[akka] def connect() = { - new RedisClientPool( - app.config.getString("akka.actor.mailbox.redis.hostname", "127.0.0.1"), - app.config.getInt("akka.actor.mailbox.redis.port", 6379)) + new RedisClientPool(settings.Hostname, settings.Port) } private def withErrorHandling[T](body: ⇒ T): T = { diff --git a/akka-durable-mailboxes/akka-redis-mailbox/src/main/scala/akka/actor/mailbox/RedisBasedMailboxExtension.scala b/akka-durable-mailboxes/akka-redis-mailbox/src/main/scala/akka/actor/mailbox/RedisBasedMailboxExtension.scala new file mode 100644 index 0000000000..4b3d424e0f --- /dev/null +++ b/akka-durable-mailboxes/akka-redis-mailbox/src/main/scala/akka/actor/mailbox/RedisBasedMailboxExtension.scala @@ -0,0 +1,52 @@ +/** + * Copyright (C) 2009-2011 Typesafe Inc. + */ +package akka.actor.mailbox + +import akka.actor.ActorSystem +import akka.actor.ExtensionKey +import akka.actor.Extension +import akka.actor.ActorSystemImpl +import com.typesafe.config.Config +import com.typesafe.config.ConfigFactory +import com.typesafe.config.ConfigParseOptions +import com.typesafe.config.ConfigRoot + +object RedisBasedMailboxExtensionKey extends ExtensionKey[RedisBasedMailboxExtension] + +object RedisBasedMailboxExtension { + def apply(system: ActorSystem): RedisBasedMailboxExtension = { + if (!system.hasExtension(RedisBasedMailboxExtensionKey)) { + system.registerExtension(new RedisBasedMailboxExtension) + } + system.extension(RedisBasedMailboxExtensionKey) + } + + class Settings(cfg: Config) { + private def referenceConfig: Config = + ConfigFactory.parseResource(classOf[ActorSystem], "/akka-redis-mailbox-reference.conf", + ConfigParseOptions.defaults.setAllowMissing(false)) + val config: ConfigRoot = ConfigFactory.emptyRoot("akka-redis-mailbox").withFallback(cfg).withFallback(referenceConfig).resolve() + + import config._ + + val Hostname = getString("akka.actor.mailbox.redis.hostname") + val Port = getInt("akka.actor.mailbox.redis.port") + + } +} + +class RedisBasedMailboxExtension extends Extension[RedisBasedMailboxExtension] { + import RedisBasedMailboxExtension._ + @volatile + private var _settings: Settings = _ + + def key = RedisBasedMailboxExtensionKey + + def init(system: ActorSystemImpl) { + _settings = new Settings(system.applicationConfig) + } + + def settings: Settings = _settings + +} \ No newline at end of file diff --git a/akka-durable-mailboxes/akka-zookeeper-mailbox/src/main/resources/akka-zookeeper-mailbox-reference.conf b/akka-durable-mailboxes/akka-zookeeper-mailbox/src/main/resources/akka-zookeeper-mailbox-reference.conf new file mode 100644 index 0000000000..b31de45f76 --- /dev/null +++ b/akka-durable-mailboxes/akka-zookeeper-mailbox/src/main/resources/akka-zookeeper-mailbox-reference.conf @@ -0,0 +1,19 @@ +################################################## +# Akka ZooKepper Mailboxes Reference Config File # +################################################## + +# This the reference config file has all the default settings. +# Make your edits/overrides in your akka.conf. + +akka { + actor { + mailbox { + zookeeper { + server-addresses = "127.0.0.1:2181" + session-timeout = 60s + connection-timeout = 60s + blocking-queue = on + } + } + } +} diff --git a/akka-durable-mailboxes/akka-zookeeper-mailbox/src/main/scala/akka/actor/mailbox/ZooKeeperBasedMailbox.scala b/akka-durable-mailboxes/akka-zookeeper-mailbox/src/main/scala/akka/actor/mailbox/ZooKeeperBasedMailbox.scala index c70b5972e4..3979dfdf36 100644 --- a/akka-durable-mailboxes/akka-zookeeper-mailbox/src/main/scala/akka/actor/mailbox/ZooKeeperBasedMailbox.scala +++ b/akka-durable-mailboxes/akka-zookeeper-mailbox/src/main/scala/akka/actor/mailbox/ZooKeeperBasedMailbox.scala @@ -3,6 +3,7 @@ */ package akka.actor.mailbox +import java.util.concurrent.TimeUnit.MILLISECONDS import akka.actor.LocalActorRef import akka.util.Duration import akka.AkkaException @@ -21,19 +22,17 @@ class ZooKeeperBasedMailboxException(message: String) extends AkkaException(mess */ class ZooKeeperBasedMailbox(val owner: ActorCell) extends DurableMailbox(owner) with DurableMessageSerialization { - val zkServerAddresses = app.config.getString("akka.actor.mailbox.zookeeper.server-addresses", "localhost:2181") - def defaultTimeUnit = app.AkkaConfig.DefaultTimeUnit - val sessionTimeout = Duration(app.config.getInt("akka.actor.mailbox.zookeeper.session-timeout", 60), defaultTimeUnit).toMillis.toInt - val connectionTimeout = Duration(app.config.getInt("akka.actor.mailbox.zookeeper.connection-timeout", 60), defaultTimeUnit).toMillis.toInt - val blockingQueue = app.config.getBool("akka.actor.mailbox.zookeeper.blocking-queue", true) - + private val settings = ZooKeeperBasedMailboxExtension(owner.system).settings val queueNode = "/queues" val queuePathTemplate = queueNode + "/%s" - val log = Logging(app, this) + val log = Logging(system, "ZooKeeperBasedMailbox") - private val zkClient = new AkkaZkClient(zkServerAddresses, sessionTimeout, connectionTimeout) - private val queue = new ZooKeeperQueue[Array[Byte]](zkClient, queuePathTemplate.format(name), blockingQueue) + private val zkClient = new AkkaZkClient( + settings.ZkServerAddresses, + settings.SessionTimeout, + settings.ConnectionTimeout) + private val queue = new ZooKeeperQueue[Array[Byte]](zkClient, queuePathTemplate.format(name), settings.BlockingQueue) def enqueue(receiver: ActorRef, envelope: Envelope) { log.debug("ENQUEUING message in zookeeper-based mailbox [%s]".format(envelope)) diff --git a/akka-durable-mailboxes/akka-zookeeper-mailbox/src/main/scala/akka/actor/mailbox/ZooKeeperBasedMailboxExtension.scala b/akka-durable-mailboxes/akka-zookeeper-mailbox/src/main/scala/akka/actor/mailbox/ZooKeeperBasedMailboxExtension.scala new file mode 100644 index 0000000000..a08df43bf5 --- /dev/null +++ b/akka-durable-mailboxes/akka-zookeeper-mailbox/src/main/scala/akka/actor/mailbox/ZooKeeperBasedMailboxExtension.scala @@ -0,0 +1,56 @@ +/** + * Copyright (C) 2009-2011 Typesafe Inc. + */ +package akka.actor.mailbox + +import akka.actor.ActorSystem +import akka.actor.ExtensionKey +import akka.actor.Extension +import akka.actor.ActorSystemImpl +import com.typesafe.config.Config +import com.typesafe.config.ConfigFactory +import com.typesafe.config.ConfigParseOptions +import com.typesafe.config.ConfigRoot +import akka.util.Duration +import java.util.concurrent.TimeUnit.MILLISECONDS + +object ZooKeeperBasedMailboxExtensionKey extends ExtensionKey[ZooKeeperBasedMailboxExtension] + +object ZooKeeperBasedMailboxExtension { + def apply(system: ActorSystem): ZooKeeperBasedMailboxExtension = { + if (!system.hasExtension(ZooKeeperBasedMailboxExtensionKey)) { + system.registerExtension(new ZooKeeperBasedMailboxExtension) + } + system.extension(ZooKeeperBasedMailboxExtensionKey) + } + + class Settings(cfg: Config) { + private def referenceConfig: Config = + ConfigFactory.parseResource(classOf[ActorSystem], "/akka-zookeeper-mailbox-reference.conf", + ConfigParseOptions.defaults.setAllowMissing(false)) + val config: ConfigRoot = ConfigFactory.emptyRoot("akka-zookeeper-mailbox").withFallback(cfg).withFallback(referenceConfig).resolve() + + import config._ + + val ZkServerAddresses = getString("akka.actor.mailbox.zookeeper.server-addresses") + val SessionTimeout = Duration(getMilliseconds("akka.actor.mailbox.zookeeper.session-timeout"), MILLISECONDS) + val ConnectionTimeout = Duration(getMilliseconds("akka.actor.mailbox.zookeeper.connection-timeout"), MILLISECONDS) + val BlockingQueue = getBoolean("akka.actor.mailbox.zookeeper.blocking-queue") + + } +} + +class ZooKeeperBasedMailboxExtension extends Extension[ZooKeeperBasedMailboxExtension] { + import ZooKeeperBasedMailboxExtension._ + @volatile + private var _settings: Settings = _ + + def key = ZooKeeperBasedMailboxExtensionKey + + def init(system: ActorSystemImpl) { + _settings = new Settings(system.applicationConfig) + } + + def settings: Settings = _settings + +} \ No newline at end of file diff --git a/akka-durable-mailboxes/akka-zookeeper-mailbox/src/main/scala/akka/cluster/zookeeper/AkkaZkClient.scala b/akka-durable-mailboxes/akka-zookeeper-mailbox/src/main/scala/akka/cluster/zookeeper/AkkaZkClient.scala index fd27d894bf..4dbafdf6f1 100644 --- a/akka-durable-mailboxes/akka-zookeeper-mailbox/src/main/scala/akka/cluster/zookeeper/AkkaZkClient.scala +++ b/akka-durable-mailboxes/akka-zookeeper-mailbox/src/main/scala/akka/cluster/zookeeper/AkkaZkClient.scala @@ -6,15 +6,16 @@ package akka.cluster.zookeeper import org.I0Itec.zkclient._ import org.I0Itec.zkclient.serialize._ import org.I0Itec.zkclient.exception._ +import akka.util.Duration /** * ZooKeeper client. Holds the ZooKeeper connection and manages its session. */ class AkkaZkClient(zkServers: String, - sessionTimeout: Int, - connectionTimeout: Int, + sessionTimeout: Duration, + connectionTimeout: Duration, zkSerializer: ZkSerializer = new SerializableSerializer) - extends ZkClient(zkServers, sessionTimeout, connectionTimeout, zkSerializer) { + extends ZkClient(zkServers, sessionTimeout.toMillis.toInt, connectionTimeout.toMillis.toInt, zkSerializer) { def connection: ZkConnection = _connection.asInstanceOf[ZkConnection] diff --git a/akka-durable-mailboxes/akka-mailboxes-common/src/test/resources/zoo.cfg b/akka-durable-mailboxes/akka-zookeeper-mailbox/src/test/resources/zoo.cfg similarity index 100% rename from akka-durable-mailboxes/akka-mailboxes-common/src/test/resources/zoo.cfg rename to akka-durable-mailboxes/akka-zookeeper-mailbox/src/test/resources/zoo.cfg diff --git a/akka-kernel/src/main/scala/akka/servlet/Initializer.scala b/akka-kernel/src/main/scala/akka/servlet/Initializer.scala index 5eeb74cce2..b91e5ae439 100644 --- a/akka-kernel/src/main/scala/akka/servlet/Initializer.scala +++ b/akka-kernel/src/main/scala/akka/servlet/Initializer.scala @@ -14,13 +14,13 @@ import javax.servlet.{ ServletContextListener, ServletContextEvent } /** * This class can be added to web.xml mappings as a listener to start and postStop Akka. * - * + * * ... * * akka.servlet.Initializer * * ... - * + * */ class Initializer extends ServletContextListener { lazy val loader = new AkkaLoader diff --git a/akka-remote/src/main/resources/akka-remote-reference.conf b/akka-remote/src/main/resources/akka-remote-reference.conf new file mode 100644 index 0000000000..4d31549b73 --- /dev/null +++ b/akka-remote/src/main/resources/akka-remote-reference.conf @@ -0,0 +1,77 @@ +##################################### +# Akka Remote Reference Config File # +##################################### + +# This the reference config file has all the default settings. +# Make your edits/overrides in your akka.conf. + +akka { + + remote { + # FIXME rename to transport + layer = "akka.cluster.netty.NettyRemoteSupport" + + use-compression = off + + secure-cookie = "" # Generate your own with '$AKKA_HOME/scripts/generate_config_with_secure_cookie.sh' + # or using 'akka.util.Crypt.generateSecureCookie' + + remote-daemon-ack-timeout = 30s # Timeout for ACK of cluster operations, lik checking actor out etc. + + use-passive-connections = on # Reuse inbound connections for outbound messages + + failure-detector { # accrual failure detection config + threshold = 8 # defines the failure detector threshold + # A low threshold is prone to generate many wrong suspicions but ensures a + # quick detection in the event of a real crash. Conversely, a high threshold + # generates fewer mistakes but needs more time to detect actual crashes + max-sample-size = 1000 + } + + server { + hostname = "" # The hostname or ip to bind the remoting to, InetAddress.getLocalHost.getHostAddress is used if empty + port = 2552 # The default remote server port clients should connect to. Default is 2552 (AKKA) + message-frame-size = 1048576 # Increase this if you want to be able to send messages with large payloads + connection-timeout = 120s # Timeout duration + require-cookie = off # Should the remote server require that it peers share the same secure-cookie (defined in the 'remote' section)? + untrusted-mode = off # Enable untrusted mode for full security of server managed actors, allows untrusted clients to connect. + backlog = 4096 # Sets the size of the connection backlog + } + + client { + buffering { + retry-message-send-on-failure = off # Should message buffering on remote client error be used (buffer flushed on successful reconnect) + capacity = -1 # If negative (or zero) then an unbounded mailbox is used (default) + # If positive then a bounded mailbox is used and the capacity is set using the property + } + reconnect-delay = 5s + read-timeout = 3600s + message-frame-size = 1048576 + reconnection-time-window = 600s # Maximum time window that a client should try to reconnect for + } + } + + // TODO cluster config will go into akka-cluster-reference.conf when we enable that module + cluster { + name = "test-cluster" + nodename = "" + zookeeper-server-addresses = "localhost:2181" # comma-separated list of ':' elements + max-time-to-wait-until-connected = 30s + session-timeout = 60s + connection-timeout = 60s + include-ref-node-in-replica-set = on # Can a replica be instantiated on the same node as the cluster reference to the actor + # Default: on + log-directory = "_akka_cluster" # Where ZooKeeper should store the logs and data files + + replication { + digest-type = "MAC" # Options: CRC32 (cheap & unsafe), MAC (expensive & secure using password) + password = "secret" # FIXME: store open in file? + ensemble-size = 3 + quorum-size = 2 + snapshot-frequency = 1000 # The number of messages that should be logged between every actor snapshot + timeout = 30s # Timeout for asyncronous (write-behind) operations + } + } + + +} diff --git a/akka-remote/src/main/scala/akka/remote/AccrualFailureDetector.scala b/akka-remote/src/main/scala/akka/remote/AccrualFailureDetector.scala index 8a6d1fb8fa..01244a5fad 100644 --- a/akka-remote/src/main/scala/akka/remote/AccrualFailureDetector.scala +++ b/akka-remote/src/main/scala/akka/remote/AccrualFailureDetector.scala @@ -25,10 +25,10 @@ import akka.actor.ActorSystem */ class AccrualFailureDetector(val threshold: Int = 8, val maxSampleSize: Int = 1000) { - def this(app: ActorSystem) { + def this(system: ActorSystem) { this( - app.config.getInt("akka.remote.failure-detector.theshold", 8), - app.config.getInt("akka.remote.failure-detector.max-sample-size", 1000)) + RemoteExtension(system).settings.FailureDetectorThreshold, + RemoteExtension(system).settings.FailureDetectorMaxSampleSize) } private final val PhiFactor = 1.0 / math.log(10.0) diff --git a/akka-remote/src/main/scala/akka/remote/BootableRemoteActorService.scala b/akka-remote/src/main/scala/akka/remote/BootableRemoteActorService.scala index 07bc74ad30..e3bd903c07 100644 --- a/akka-remote/src/main/scala/akka/remote/BootableRemoteActorService.scala +++ b/akka-remote/src/main/scala/akka/remote/BootableRemoteActorService.scala @@ -21,26 +21,26 @@ trait BootableRemoteActorService extends Bootable { def settings: RemoteServerSettings protected lazy val remoteServerThread = new Thread(new Runnable() { - def run = app.remote.start(self.applicationLoader.getOrElse(null)) //Use config host/port + def run = system.remote.start(self.applicationLoader.getOrElse(null)) //Use config host/port }, "Akka RemoteModule Service") def startRemoteService() { remoteServerThread.start() } abstract override def onLoad() { - if (app.reflective.ClusterModule.isEnabled && settings.isRemotingEnabled) { - app.eventHandler.info(this, "Initializing Remote Actors Service...") + if (system.reflective.ClusterModule.isEnabled && settings.isRemotingEnabled) { + system.eventHandler.info(this, "Initializing Remote Actors Service...") startRemoteService() - app.eventHandler.info(this, "Remote Actors Service initialized") + system.eventHandler.info(this, "Remote Actors Service initialized") } super.onLoad() } abstract override def onUnload() { - app.eventHandler.info(this, "Shutting down Remote Actors Service") + system.eventHandler.info(this, "Shutting down Remote Actors Service") - app.remote.shutdown() + system.remote.shutdown() if (remoteServerThread.isAlive) remoteServerThread.join(1000) - app.eventHandler.info(this, "Remote Actors Service has been shut down") + system.eventHandler.info(this, "Remote Actors Service has been shut down") super.onUnload() } } diff --git a/akka-remote/src/main/scala/akka/remote/Gossiper.scala b/akka-remote/src/main/scala/akka/remote/Gossiper.scala index 475f146e3d..3735f6ceaf 100644 --- a/akka-remote/src/main/scala/akka/remote/Gossiper.scala +++ b/akka-remote/src/main/scala/akka/remote/Gossiper.scala @@ -8,17 +8,16 @@ import akka.actor._ import akka.actor.Status._ import akka.event.Logging import akka.util.duration._ +import akka.util.Duration import akka.remote.RemoteProtocol._ import akka.remote.RemoteProtocol.RemoteSystemDaemonMessageType._ - import java.util.concurrent.atomic.AtomicReference import java.security.SecureRandom import System.{ currentTimeMillis ⇒ newTimestamp } - import scala.collection.immutable.Map import scala.annotation.tailrec - import com.google.protobuf.ByteString +import akka.serialization.SerializationExtension /** * Interface for node membership change listener. @@ -101,13 +100,15 @@ class Gossiper(remote: Remote) { currentGossip: Gossip, nodeMembershipChangeListeners: Set[NodeMembershipChangeListener] = Set.empty[NodeMembershipChangeListener]) - private val app = remote.app - private val log = Logging(app, this) + private val system = remote.system + private val remoteExtension = RemoteExtension(system) + private val serializationExtension = SerializationExtension(system) + private val log = Logging(system, "Gossiper") private val failureDetector = remote.failureDetector - private val connectionManager = new RemoteConnectionManager(app, remote, Map.empty[RemoteAddress, ActorRef]) + private val connectionManager = new RemoteConnectionManager(system, remote, Map.empty[RemoteAddress, ActorRef]) private val seeds = Set(address) // FIXME read in list of seeds from config - private val address = app.address + private val address = system.asInstanceOf[ActorSystemImpl].provider.rootPath.remoteAddress private val nodeFingerprint = address.## private val random = SecureRandom.getInstance("SHA1PRNG") @@ -122,8 +123,8 @@ class Gossiper(remote: Remote) { { // start periodic gossip and cluster scrutinization - default is run them every second with 1/2 second in between - app.scheduler schedule (() ⇒ initateGossip(), initalDelayForGossip.toSeconds, gossipFrequency.toSeconds, timeUnit) - app.scheduler schedule (() ⇒ scrutinize(), initalDelayForGossip.toSeconds, gossipFrequency.toSeconds, timeUnit) + system.scheduler schedule (() ⇒ initateGossip(), Duration(initalDelayForGossip.toSeconds, timeUnit), Duration(gossipFrequency.toSeconds, timeUnit)) + system.scheduler schedule (() ⇒ scrutinize(), Duration(initalDelayForGossip.toSeconds, timeUnit), Duration(gossipFrequency.toSeconds, timeUnit)) } /** @@ -153,7 +154,7 @@ class Gossiper(remote: Remote) { node ← oldAvailableNodes if connectionManager.connectionFor(node).isEmpty } { - val connectionFactory = () ⇒ RemoteActorRef(remote.server, gossipingNode, remote.remoteDaemon.path, None) + val connectionFactory = () ⇒ RemoteActorRef(remote.system.provider, remote.server, gossipingNode, remote.remoteDaemon.path, None) connectionManager.putIfAbsent(node, connectionFactory) // create a new remote connection to the new node oldState.nodeMembershipChangeListeners foreach (_ nodeConnected node) // notify listeners about the new nodes } @@ -237,7 +238,7 @@ class Gossiper(remote: Remote) { throw new IllegalStateException("Connection for [" + peer + "] is not set up")) try { - (connection ? (toRemoteMessage(newGossip), remote.remoteSystemDaemonAckTimeout)).as[Status] match { + (connection ? (toRemoteMessage(newGossip), remoteExtension.settings.RemoteSystemDaemonAckTimeout)).as[Status] match { case Some(Success(receiver)) ⇒ log.debug("Gossip sent to [{}] was successfully received", receiver) @@ -299,7 +300,7 @@ class Gossiper(remote: Remote) { } private def toRemoteMessage(gossip: Gossip): RemoteProtocol.RemoteSystemDaemonMessageProtocol = { - val gossipAsBytes = app.serialization.serialize(gossip) match { + val gossipAsBytes = serializationExtension.serialization.serialize(gossip) match { case Left(error) ⇒ throw error case Right(bytes) ⇒ bytes } diff --git a/akka-remote/src/main/scala/akka/remote/MessageSerializer.scala b/akka-remote/src/main/scala/akka/remote/MessageSerializer.scala index 74f4073bde..628264b207 100644 --- a/akka-remote/src/main/scala/akka/remote/MessageSerializer.scala +++ b/akka-remote/src/main/scala/akka/remote/MessageSerializer.scala @@ -8,18 +8,19 @@ import akka.remote.RemoteProtocol._ import akka.serialization.Serialization import com.google.protobuf.ByteString import akka.actor.ActorSystem +import akka.serialization.SerializationExtension object MessageSerializer { - def deserialize(app: ActorSystem, messageProtocol: MessageProtocol, classLoader: Option[ClassLoader] = None): AnyRef = { + def deserialize(system: ActorSystem, messageProtocol: MessageProtocol, classLoader: Option[ClassLoader] = None): AnyRef = { val clazz = loadManifest(classLoader, messageProtocol) - app.serialization.deserialize(messageProtocol.getMessage.toByteArray, + SerializationExtension(system).serialization.deserialize(messageProtocol.getMessage.toByteArray, clazz, classLoader).fold(x ⇒ throw x, identity) } - def serialize(app: ActorSystem, message: AnyRef): MessageProtocol = { + def serialize(system: ActorSystem, message: AnyRef): MessageProtocol = { val builder = MessageProtocol.newBuilder - val bytes = app.serialization.serialize(message).fold(x ⇒ throw x, identity) + val bytes = SerializationExtension(system).serialization.serialize(message).fold(x ⇒ throw x, identity) builder.setMessage(ByteString.copyFrom(bytes)) builder.setMessageManifest(ByteString.copyFromUtf8(message.getClass.getName)) builder.build diff --git a/akka-remote/src/main/scala/akka/remote/NetworkEventStream.scala b/akka-remote/src/main/scala/akka/remote/NetworkEventStream.scala index 2763080121..3376ad9416 100644 --- a/akka-remote/src/main/scala/akka/remote/NetworkEventStream.scala +++ b/akka-remote/src/main/scala/akka/remote/NetworkEventStream.scala @@ -7,7 +7,7 @@ package akka.remote import scala.collection.mutable import akka.actor.{ LocalActorRef, Actor, ActorRef, Props, newUuid } import akka.actor.Actor._ -import akka.actor.ActorSystem +import akka.actor.ActorSystemImpl /** * Stream of all kinds of network events, remote failure and connection events, cluster failure and connection events etc. @@ -58,14 +58,14 @@ object NetworkEventStream { } } -class NetworkEventStream(val app: ActorSystem) { +class NetworkEventStream(system: ActorSystemImpl) { import NetworkEventStream._ // FIXME: check that this supervision is correct - private[akka] val sender = app.provider.actorOf( - Props[Channel].copy(dispatcher = app.dispatcherFactory.newPinnedDispatcher("NetworkEventStream")), - app.systemGuardian, "network-event-sender", systemService = true) + private[akka] val sender = system.provider.actorOf(system, + Props[Channel].copy(dispatcher = system.dispatcherFactory.newPinnedDispatcher("NetworkEventStream")), + system.systemGuardian, "network-event-sender", systemService = true) /** * Registers a network event stream listener (asyncronously). diff --git a/akka-remote/src/main/scala/akka/remote/Remote.scala b/akka-remote/src/main/scala/akka/remote/Remote.scala index ef29a8de00..123304c314 100644 --- a/akka-remote/src/main/scala/akka/remote/Remote.scala +++ b/akka-remote/src/main/scala/akka/remote/Remote.scala @@ -15,33 +15,33 @@ import akka.actor.DeploymentConfig._ import akka.serialization.Compression.LZF import akka.remote.RemoteProtocol._ import akka.remote.RemoteProtocol.RemoteSystemDaemonMessageType._ - import java.net.InetSocketAddress - import com.eaio.uuid.UUID import akka.serialization.{ JavaSerializer, Serialization, Serializer, Compression } import akka.dispatch.{ Terminate, Dispatchers, Future, PinnedDispatcher } +import java.util.concurrent.atomic.AtomicLong +import java.util.concurrent.TimeUnit.MILLISECONDS +import akka.serialization.SerializationExtension /** * Remote module - contains remote client and server config, remote server instance, remote daemon, remote dispatchers etc. * * @author Jonas Bonér */ -class Remote(val app: ActorSystem) { +class Remote(val system: ActorSystemImpl, val nodename: String) { - val log = Logging(app, this) + val log = Logging(system, "Remote") - import app._ - import app.config - import app.AkkaConfig._ + import system._ + import settings._ - val nodename = app.nodename + private[remote] val remoteExtension = RemoteExtension(system) + private[remote] val serializationExtension = SerializationExtension(system) + private[remote] val remoteAddress = { + RemoteAddress(remoteExtension.settings.serverSettings.Hostname, remoteExtension.settings.serverSettings.Port) + } - // TODO move to AkkaConfig? - val shouldCompressData = config.getBool("akka.remote.use-compression", false) - val remoteSystemDaemonAckTimeout = Duration(config.getInt("akka.remote.remote-daemon-ack-timeout", 30), DefaultTimeUnit).toMillis.toInt - - val failureDetector = new AccrualFailureDetector(app) + val failureDetector = new AccrualFailureDetector(system) // val gossiper = new Gossiper(this) @@ -50,17 +50,18 @@ class Remote(val app: ActorSystem) { // FIXME configure computeGridDispatcher to what? val computeGridDispatcher = dispatcherFactory.newDispatcher("akka:compute-grid").build - private[remote] lazy val remoteDaemonSupervisor = app.actorOf(Props( + // FIXME it is probably better to create another supervisor for handling the children created by handle_* + private[remote] lazy val remoteDaemonSupervisor = system.actorOf(Props( OneForOneStrategy(List(classOf[Exception]), None, None)), "akka-system-remote-supervisor") // is infinite restart what we want? private[remote] lazy val remoteDaemon = - app.provider.actorOf( + system.provider.actorOf(system, Props(new RemoteSystemDaemon(this)).withDispatcher(dispatcherFactory.newPinnedDispatcher(remoteDaemonServiceName)), remoteDaemonSupervisor, remoteDaemonServiceName, systemService = true) - private[remote] lazy val remoteClientLifeCycleHandler = app.actorOf(Props(new Actor { + private[remote] lazy val remoteClientLifeCycleHandler = system.actorOf(Props(new Actor { def receive = { case RemoteClientError(cause, remote, address) ⇒ remote.shutdownClientConnection(address) case RemoteClientDisconnected(remote, address) ⇒ remote.shutdownClientConnection(address) @@ -68,22 +69,22 @@ class Remote(val app: ActorSystem) { } }), "akka.remote.RemoteClientLifeCycleListener") - lazy val eventStream = new NetworkEventStream(app) + lazy val eventStream = new NetworkEventStream(system) lazy val server: RemoteSupport = { - val remote = new akka.remote.netty.NettyRemoteSupport(app) + val remote = new akka.remote.netty.NettyRemoteSupport(system) remote.start() //TODO FIXME Any application loader here? - app.eventStream.subscribe(eventStream.sender, classOf[RemoteLifeCycleEvent]) - app.eventStream.subscribe(remoteClientLifeCycleHandler, classOf[RemoteLifeCycleEvent]) + system.eventStream.subscribe(eventStream.sender, classOf[RemoteLifeCycleEvent]) + system.eventStream.subscribe(remoteClientLifeCycleHandler, classOf[RemoteLifeCycleEvent]) - // TODO actually register this provider in app in remote mode + // TODO actually register this provider in system in remote mode //provider.register(ActorRefProvider.RemoteProvider, new RemoteActorRefProvider) remote } def start(): Unit = { - val serverAddress = server.app.address //Force init of server + val serverAddress = server.system.asInstanceOf[ActorSystemImpl].provider.rootPath.remoteAddress //Force init of server val daemonAddress = remoteDaemon.address //Force init of daemon log.info("Starting remote server on [{}] and starting remoteDaemon with address [{}]", serverAddress, daemonAddress) } @@ -99,6 +100,7 @@ class Remote(val app: ActorSystem) { class RemoteSystemDaemon(remote: Remote) extends Actor { import remote._ + import remote.{ system ⇒ systemImpl } override def preRestart(reason: Throwable, msg: Option[Any]) { log.debug("RemoteSystemDaemon failed due to [{}] - restarting...", reason) @@ -132,19 +134,19 @@ class RemoteSystemDaemon(remote: Remote) extends Actor { if (message.hasActorPath) { val actorFactoryBytes = - if (shouldCompressData) LZF.uncompress(message.getPayload.toByteArray) else message.getPayload.toByteArray + if (remoteExtension.settings.ShouldCompressData) LZF.uncompress(message.getPayload.toByteArray) else message.getPayload.toByteArray val actorFactory = - app.serialization.deserialize(actorFactoryBytes, classOf[() ⇒ Actor], None) match { + serializationExtension.serialization.deserialize(actorFactoryBytes, classOf[() ⇒ Actor], None) match { case Left(error) ⇒ throw error case Right(instance) ⇒ instance.asInstanceOf[() ⇒ Actor] } - val actorPath = ActorPath(remote.app, message.getActorPath) - val parent = actorPath.parent.ref + val actorPath = ActorPath(systemImpl, message.getActorPath) + val parent = system.actorFor(actorPath.parent) if (parent.isDefined) { - app.provider.actorOf(Props(creator = actorFactory), parent.get, actorPath.name) + systemImpl.provider.actorOf(systemImpl, Props(creator = actorFactory), parent.get, actorPath.name) } else { log.error("Parent actor does not exist, ignoring remote system daemon command [{}]", message) } @@ -153,12 +155,13 @@ class RemoteSystemDaemon(remote: Remote) extends Actor { log.error("Actor 'address' for actor to instantiate is not defined, ignoring remote system daemon command [{}]", message) } - sender ! Success(app.address) + sender ! Success(remoteAddress) } catch { case error: Throwable ⇒ //FIXME doesn't seem sensible sender ! Failure(error) throw error } + } // FIXME implement handleRelease @@ -182,40 +185,47 @@ class RemoteSystemDaemon(remote: Remote) extends Actor { // } } + /* + * generate name for temporary actor refs + */ + private val tempNumber = new AtomicLong + def tempName = "$_" + Helpers.base64(tempNumber.getAndIncrement()) + def tempPath = remoteDaemon.path / tempName + // FIXME: handle real remote supervision def handle_fun0_unit(message: RemoteSystemDaemonMessageProtocol) { - new LocalActorRef(app, + new LocalActorRef(systemImpl, Props( context ⇒ { case f: Function0[_] ⇒ try { f() } finally { context.self.stop() } - }).copy(dispatcher = computeGridDispatcher), app.guardian, app.guardian.path / app.provider.tempPath, systemService = true) ! payloadFor(message, classOf[Function0[Unit]]) + }).copy(dispatcher = computeGridDispatcher), remoteDaemon, tempPath, systemService = true) ! payloadFor(message, classOf[Function0[Unit]]) } // FIXME: handle real remote supervision def handle_fun0_any(message: RemoteSystemDaemonMessageProtocol) { - new LocalActorRef(app, + new LocalActorRef(systemImpl, Props( context ⇒ { case f: Function0[_] ⇒ try { sender ! f() } finally { context.self.stop() } - }).copy(dispatcher = computeGridDispatcher), app.guardian, app.guardian.path / app.provider.tempPath, systemService = true) forward payloadFor(message, classOf[Function0[Any]]) + }).copy(dispatcher = computeGridDispatcher), remoteDaemon, tempPath, systemService = true) forward payloadFor(message, classOf[Function0[Any]]) } // FIXME: handle real remote supervision def handle_fun1_arg_unit(message: RemoteSystemDaemonMessageProtocol) { - new LocalActorRef(app, + new LocalActorRef(systemImpl, Props( context ⇒ { case (fun: Function[_, _], param: Any) ⇒ try { fun.asInstanceOf[Any ⇒ Unit].apply(param) } finally { context.self.stop() } - }).copy(dispatcher = computeGridDispatcher), app.guardian, app.guardian.path / app.provider.tempPath, systemService = true) ! payloadFor(message, classOf[Tuple2[Function1[Any, Unit], Any]]) + }).copy(dispatcher = computeGridDispatcher), remoteDaemon, tempPath, systemService = true) ! payloadFor(message, classOf[Tuple2[Function1[Any, Unit], Any]]) } // FIXME: handle real remote supervision def handle_fun1_arg_any(message: RemoteSystemDaemonMessageProtocol) { - new LocalActorRef(app, + new LocalActorRef(systemImpl, Props( context ⇒ { case (fun: Function[_, _], param: Any) ⇒ try { sender ! fun.asInstanceOf[Any ⇒ Any](param) } finally { context.self.stop() } - }).copy(dispatcher = computeGridDispatcher), app.guardian, app.guardian.path / app.provider.tempPath, systemService = true) forward payloadFor(message, classOf[Tuple2[Function1[Any, Any], Any]]) + }).copy(dispatcher = computeGridDispatcher), remoteDaemon, tempPath, systemService = true) forward payloadFor(message, classOf[Tuple2[Function1[Any, Any], Any]]) } def handleFailover(message: RemoteSystemDaemonMessageProtocol) { @@ -224,7 +234,7 @@ class RemoteSystemDaemon(remote: Remote) extends Actor { } private def payloadFor[T](message: RemoteSystemDaemonMessageProtocol, clazz: Class[T]): T = { - app.serialization.deserialize(message.getPayload.toByteArray, clazz, None) match { + serializationExtension.serialization.deserialize(message.getPayload.toByteArray, clazz, None) match { case Left(error) ⇒ throw error case Right(instance) ⇒ instance.asInstanceOf[T] } @@ -232,18 +242,21 @@ class RemoteSystemDaemon(remote: Remote) extends Actor { } class RemoteMessage(input: RemoteMessageProtocol, remote: RemoteSupport, classLoader: Option[ClassLoader] = None) { + + val provider = remote.system.asInstanceOf[ActorSystemImpl].provider + lazy val sender: ActorRef = if (input.hasSender) - remote.app.provider.deserialize( + provider.deserialize( SerializedActorRef(input.getSender.getHost, input.getSender.getPort, input.getSender.getPath)).getOrElse(throw new IllegalStateException("OHNOES")) else - remote.app.deadLetters + remote.system.deadLetters - lazy val recipient: ActorRef = remote.app.actorFor(input.getRecipient.getPath).getOrElse(remote.app.deadLetters) + lazy val recipient: ActorRef = remote.system.actorFor(input.getRecipient.getPath).getOrElse(remote.system.deadLetters) lazy val payload: Either[Throwable, AnyRef] = if (input.hasException) Left(parseException()) - else Right(MessageSerializer.deserialize(remote.app, input.getMessage, classLoader)) + else Right(MessageSerializer.deserialize(remote.system, input.getMessage, classLoader)) protected def parseException(): Throwable = { val exception = input.getException @@ -256,7 +269,7 @@ class RemoteMessage(input: RemoteMessageProtocol, remote: RemoteSupport, classLo .newInstance(exception.getMessage).asInstanceOf[Throwable] } catch { case problem: Exception ⇒ - remote.app.eventStream.publish(Logging.Error(problem, remote, problem.getMessage)) + remote.system.eventStream.publish(Logging.Error(problem, "RemoteMessage", problem.getMessage)) CannotInstantiateRemoteExceptionDueToRemoteProtocolParsingErrorException(problem, classname, exception.getMessage) } } @@ -266,7 +279,9 @@ class RemoteMessage(input: RemoteMessageProtocol, remote: RemoteSupport, classLo trait RemoteMarshallingOps { - def app: ActorSystem + def system: ActorSystem + + protected def useUntrustedMode: Boolean def createMessageSendEnvelope(rmp: RemoteMessageProtocol): AkkaRemoteProtocol = { val arp = AkkaRemoteProtocol.newBuilder @@ -284,7 +299,7 @@ trait RemoteMarshallingOps { * Serializes the ActorRef instance into a Protocol Buffers (protobuf) Message. */ def toRemoteActorRefProtocol(actor: ActorRef): ActorRefProtocol = { - val rep = app.provider.serialize(actor) + val rep = system.asInstanceOf[ActorSystemImpl].provider.serialize(actor) ActorRefProtocol.newBuilder.setHost(rep.hostname).setPort(rep.port).setPath(rep.path).build } @@ -297,7 +312,7 @@ trait RemoteMarshallingOps { message match { case Right(message) ⇒ - messageBuilder.setMessage(MessageSerializer.serialize(app, message.asInstanceOf[AnyRef])) + messageBuilder.setMessage(MessageSerializer.serialize(system, message.asInstanceOf[AnyRef])) case Left(exception) ⇒ messageBuilder.setException(ExceptionProtocol.newBuilder .setClassname(exception.getClass.getName) @@ -310,15 +325,15 @@ trait RemoteMarshallingOps { messageBuilder } - def receiveMessage(remoteMessage: RemoteMessage, untrustedMode: Boolean) { + def receiveMessage(remoteMessage: RemoteMessage) { val recipient = remoteMessage.recipient remoteMessage.payload match { case Left(t) ⇒ throw t case Right(r) ⇒ r match { - case _: Terminate ⇒ if (untrustedMode) throw new SecurityException("RemoteModule server is operating is untrusted mode, can not stop the actor") else recipient.stop() - case _: AutoReceivedMessage if (untrustedMode) ⇒ throw new SecurityException("RemoteModule server is operating is untrusted mode, can not pass on a AutoReceivedMessage to the remote actor") - case m ⇒ recipient.!(m)(remoteMessage.sender) + case _: Terminate ⇒ if (useUntrustedMode) throw new SecurityException("RemoteModule server is operating is untrusted mode, can not stop the actor") else recipient.stop() + case _: AutoReceivedMessage if (useUntrustedMode) ⇒ throw new SecurityException("RemoteModule server is operating is untrusted mode, can not pass on a AutoReceivedMessage to the remote actor") + case m ⇒ recipient.!(m)(remoteMessage.sender) } } } diff --git a/akka-remote/src/main/scala/akka/remote/RemoteActorRefProvider.scala b/akka-remote/src/main/scala/akka/remote/RemoteActorRefProvider.scala index 66189632a5..f388cd976a 100644 --- a/akka-remote/src/main/scala/akka/remote/RemoteActorRefProvider.scala +++ b/akka-remote/src/main/scala/akka/remote/RemoteActorRefProvider.scala @@ -13,54 +13,80 @@ import akka.dispatch._ import akka.util.duration._ import akka.config.ConfigurationException import akka.event.{ DeathWatch, Logging } -import akka.serialization.{ Serialization, Serializer, Compression } import akka.serialization.Compression.LZF import akka.remote.RemoteProtocol._ import akka.remote.RemoteProtocol.RemoteSystemDaemonMessageType._ - -import java.net.InetSocketAddress -import java.util.concurrent.ConcurrentHashMap - import com.google.protobuf.ByteString import java.util.concurrent.atomic.AtomicBoolean +import akka.event.EventStream +import java.util.concurrent.ConcurrentHashMap +import akka.dispatch.Promise +import java.net.InetAddress +import akka.serialization.SerializationExtension /** * Remote ActorRefProvider. Starts up actor on remote node and creates a RemoteActorRef representing it. * * @author Jonas Bonér */ -class RemoteActorRefProvider(val app: ActorSystem) extends ActorRefProvider { +class RemoteActorRefProvider( + val settings: ActorSystem.Settings, + val eventStream: EventStream, + val scheduler: Scheduler) extends ActorRefProvider { - val log = Logging(app, this) + val log = Logging(eventStream, "RemoteActorRefProvider") - import java.util.concurrent.ConcurrentHashMap - import akka.dispatch.Promise - - val local = new LocalActorRefProvider(app) - val remote = new Remote(app) + def deathWatch = local.deathWatch + def guardian = local.guardian + def systemGuardian = local.systemGuardian + def nodename = local.nodename + def clustername = local.clustername + def tempName = local.tempName private val actors = new ConcurrentHashMap[String, AnyRef] - private val remoteDaemonConnectionManager = new RemoteConnectionManager(app, remote) + /* + * The problem is that ActorRefs need a reference to the ActorSystem to + * provide their service. Hence they cannot be created while the + * constructors of ActorSystem and ActorRefProvider are still running. + * The solution is to split out that last part into an init() method, + * but it also requires these references to be @volatile and lazy. + */ + @volatile + private var system: ActorSystemImpl = _ + private lazy val remoteExtension = RemoteExtension(system) + private lazy val serializationExtension = SerializationExtension(system) + lazy val rootPath: ActorPath = { + val remoteAddress = RemoteAddress(remoteExtension.settings.serverSettings.Hostname, remoteExtension.settings.serverSettings.Port) + new RootActorPath(remoteAddress) + } + private lazy val local = new LocalActorRefProvider(settings, eventStream, scheduler, rootPath, + remoteExtension.settings.NodeName, remoteExtension.settings.ClusterName) + private[akka] lazy val remote = new Remote(system, nodename) + private lazy val remoteDaemonConnectionManager = new RemoteConnectionManager(system, remote) + + def init(_system: ActorSystemImpl) { + system = _system + local.init(_system) + terminationFuture.onComplete(_ ⇒ remote.server.shutdown()) + } private[akka] def theOneWhoWalksTheBubblesOfSpaceTime: ActorRef = local.theOneWhoWalksTheBubblesOfSpaceTime private[akka] def terminationFuture = local.terminationFuture private[akka] def deployer: Deployer = local.deployer - def defaultDispatcher = app.dispatcher - def defaultTimeout = app.AkkaConfig.ActorTimeout + def dispatcher = local.dispatcher + def defaultTimeout = settings.ActorTimeout - def scheduler: Scheduler = local.scheduler + private[akka] def actorOf(system: ActorSystemImpl, props: Props, supervisor: ActorRef, name: String, systemService: Boolean): ActorRef = + actorOf(system, props, supervisor, supervisor.path / name, systemService) - private[akka] def actorOf(props: Props, supervisor: ActorRef, name: String, systemService: Boolean): ActorRef = - actorOf(props, supervisor, supervisor.path / name, systemService) - - private[akka] def actorOf(props: Props, supervisor: ActorRef, path: ActorPath, systemService: Boolean): ActorRef = - if (systemService) local.actorOf(props, supervisor, path, systemService) + private[akka] def actorOf(system: ActorSystemImpl, props: Props, supervisor: ActorRef, path: ActorPath, systemService: Boolean): ActorRef = + if (systemService) local.actorOf(system, props, supervisor, path, systemService) else { val name = path.name - val newFuture = Promise[ActorRef](5000)(defaultDispatcher) // FIXME is this proper timeout? + val newFuture = Promise[ActorRef](5000)(dispatcher) // FIXME is this proper timeout? actors.putIfAbsent(path.toString, newFuture) match { // we won the race -- create the actor and resolve the future case null ⇒ @@ -68,17 +94,17 @@ class RemoteActorRefProvider(val app: ActorSystem) extends ActorRefProvider { deployer.lookupDeploymentFor(path.toString) match { case Some(DeploymentConfig.Deploy(_, _, routerType, nrOfInstances, DeploymentConfig.RemoteScope(remoteAddresses))) ⇒ - def isReplicaNode: Boolean = remoteAddresses exists { _ == app.address } + def isReplicaNode: Boolean = remoteAddresses exists { _ == rootPath.remoteAddress } - //app.eventHandler.debug(this, "%s: Deploy Remote Actor with address [%s] connected to [%s]: isReplica(%s)".format(app.defaultAddress, address, remoteAddresses.mkString, isReplicaNode)) + //system.eventHandler.debug(this, "%s: Deploy Remote Actor with address [%s] connected to [%s]: isReplica(%s)".format(system.defaultAddress, address, remoteAddresses.mkString, isReplicaNode)) if (isReplicaNode) { // we are on one of the replica node for this remote actor - local.actorOf(props, supervisor, name, true) //FIXME systemService = true here to bypass Deploy, should be fixed when create-or-get is replaced by get-or-create + local.actorOf(system, props, supervisor, name, true) //FIXME systemService = true here to bypass Deploy, should be fixed when create-or-get is replaced by get-or-create } else { - implicit val dispatcher = if (props.dispatcher == Props.defaultDispatcher) app.dispatcher else props.dispatcher - implicit val timeout = app.AkkaConfig.ActorTimeout + implicit val dispatcher = if (props.dispatcher == Props.defaultDispatcher) system.dispatcher else props.dispatcher + implicit val timeout = system.settings.ActorTimeout // we are on the single "reference" node uses the remote actors on the replica nodes val routerFactory: () ⇒ Router = DeploymentConfig.routerTypeFor(routerType) match { @@ -110,7 +136,7 @@ class RemoteActorRefProvider(val app: ActorSystem) extends ActorRefProvider { if (remoteAddresses.size < 1) throw new ConfigurationException( "Actor [%s] configured with ScatterGather router must have at least 1 remote node configured. Found [%s]" .format(name, remoteAddresses.mkString(", "))) - () ⇒ new ScatterGatherFirstCompletedRouter()(defaultDispatcher, defaultTimeout) + () ⇒ new ScatterGatherFirstCompletedRouter()(dispatcher, defaultTimeout) case RouterType.LeastCPU ⇒ sys.error("Router LeastCPU not supported yet") case RouterType.LeastRAM ⇒ sys.error("Router LeastRAM not supported yet") @@ -120,17 +146,17 @@ class RemoteActorRefProvider(val app: ActorSystem) extends ActorRefProvider { val connections = (Map.empty[RemoteAddress, ActorRef] /: remoteAddresses) { (conns, a) ⇒ val remoteAddress = RemoteAddress(a.hostname, a.port) - conns + (remoteAddress -> RemoteActorRef(remote.server, remoteAddress, path, None)) + conns + (remoteAddress -> RemoteActorRef(remote.system.provider, remote.server, remoteAddress, path, None)) } - val connectionManager = new RemoteConnectionManager(app, remote, connections) + val connectionManager = new RemoteConnectionManager(system, remote, connections) - connections.keys foreach { useActorOnNode(_, path.toString, props.creator) } + connections.keys foreach { useActorOnNode(system, _, path.toString, props.creator) } - actorOf(RoutedProps(routerFactory = routerFactory, connectionManager = connectionManager), supervisor, name) + actorOf(system, RoutedProps(routerFactory = routerFactory, connectionManager = connectionManager), supervisor, name) } - case deploy ⇒ local.actorOf(props, supervisor, name, systemService) + case deploy ⇒ local.actorOf(system, props, supervisor, name, systemService) } } catch { case e: Exception ⇒ @@ -138,7 +164,7 @@ class RemoteActorRefProvider(val app: ActorSystem) extends ActorRefProvider { throw e } - // actor foreach app.registry.register // only for ActorRegistry backward compat, will be removed later + // actor foreach system.registry.register // only for ActorRegistry backward compat, will be removed later newFuture completeWithResult actor actors.replace(path.toString, newFuture, actor) @@ -152,9 +178,9 @@ class RemoteActorRefProvider(val app: ActorSystem) extends ActorRefProvider { * Copied from LocalActorRefProvider... */ // FIXME: implement supervision - def actorOf(props: RoutedProps, supervisor: ActorRef, name: String): ActorRef = { + def actorOf(system: ActorSystem, props: RoutedProps, supervisor: ActorRef, name: String): ActorRef = { if (props.connectionManager.isEmpty) throw new ConfigurationException("RoutedProps used for creating actor [" + name + "] has zero connections configured; can't create a router") - new RoutedActorRef(app, props, supervisor, name) + new RoutedActorRef(system, props, supervisor, name) } def actorFor(path: Iterable[String]): Option[ActorRef] = actors.get(ActorPath.join(path)) match { @@ -163,6 +189,7 @@ class RemoteActorRefProvider(val app: ActorSystem) extends ActorRefProvider { case future: Future[_] ⇒ Some(future.get.asInstanceOf[ActorRef]) } + // TODO remove me val optimizeLocal = new AtomicBoolean(true) def optimizeLocalScoped_?() = optimizeLocal.get @@ -178,24 +205,24 @@ class RemoteActorRefProvider(val app: ActorSystem) extends ActorRefProvider { private[akka] def deserialize(actor: SerializedActorRef): Option[ActorRef] = { val remoteAddress = RemoteAddress(actor.hostname, actor.port) - if (optimizeLocalScoped_? && remoteAddress == app.address) { + if (optimizeLocalScoped_? && remoteAddress == rootPath.remoteAddress) { local.actorFor(ActorPath.split(actor.path)) } else { - log.debug("{}: Creating RemoteActorRef with address [{}] connected to [{}]", app.address, actor.path, remoteAddress) - Some(RemoteActorRef(remote.server, remoteAddress, ActorPath(app, actor.path), None)) //Should it be None here + log.debug("{}: Creating RemoteActorRef with address [{}] connected to [{}]", rootPath.remoteAddress, actor.path, remoteAddress) + Some(RemoteActorRef(remote.system.provider, remote.server, remoteAddress, rootPath / ActorPath.split(actor.path), None)) //Should it be None here } } /** * Using (checking out) actor on a specific node. */ - def useActorOnNode(remoteAddress: RemoteAddress, actorPath: String, actorFactory: () ⇒ Actor) { - log.debug("[{}] Instantiating Actor [{}] on node [{}]", app.address, actorPath, remoteAddress) + def useActorOnNode(system: ActorSystem, remoteAddress: RemoteAddress, actorPath: String, actorFactory: () ⇒ Actor) { + log.debug("[{}] Instantiating Actor [{}] on node [{}]", rootPath, actorPath, remoteAddress) val actorFactoryBytes = - app.serialization.serialize(actorFactory) match { + serializationExtension.serialization.serialize(actorFactory) match { case Left(error) ⇒ throw error - case Right(bytes) ⇒ if (remote.shouldCompressData) LZF.compress(bytes) else bytes + case Right(bytes) ⇒ if (remoteExtension.settings.ShouldCompressData) LZF.compress(bytes) else bytes } val command = RemoteSystemDaemonMessageProtocol.newBuilder @@ -215,7 +242,7 @@ class RemoteActorRefProvider(val app: ActorSystem) extends ActorRefProvider { private def sendCommandToRemoteNode(connection: ActorRef, command: RemoteSystemDaemonMessageProtocol, withACK: Boolean) { if (withACK) { try { - val f = connection ? (command, remote.remoteSystemDaemonAckTimeout) + val f = connection ? (command, remoteExtension.settings.RemoteSystemDaemonAckTimeout) (try f.await.value catch { case _: FutureTimeoutException ⇒ None }) match { case Some(Right(receiver)) ⇒ log.debug("Remote system command sent to [{}] successfully received", receiver) @@ -243,8 +270,6 @@ class RemoteActorRefProvider(val app: ActorSystem) extends ActorRefProvider { private[akka] def ask(message: Any, recipient: ActorRef, within: Timeout): Future[Any] = local.ask(message, recipient, within) - private[akka] def dummyAskSender = local.dummyAskSender - private[akka] def tempPath = local.tempPath } @@ -255,6 +280,7 @@ class RemoteActorRefProvider(val app: ActorSystem) extends ActorRefProvider { * @author Jonas Bonér */ private[akka] case class RemoteActorRef private[akka] ( + provider: ActorRefProvider, remote: RemoteSupport, remoteAddress: RemoteAddress, path: ActorPath, @@ -268,13 +294,13 @@ private[akka] case class RemoteActorRef private[akka] ( def address = remoteAddress + path.toString - def isShutdown: Boolean = !running + def isTerminated: Boolean = !running protected[akka] def sendSystemMessage(message: SystemMessage): Unit = unsupported override def !(message: Any)(implicit sender: ActorRef = null): Unit = remote.send(message, Option(sender), remoteAddress, this, loader) - override def ?(message: Any)(implicit timeout: Timeout): Future[Any] = remote.app.provider.ask(message, this, timeout) + override def ?(message: Any)(implicit timeout: Timeout): Future[Any] = provider.ask(message, this, timeout) def suspend(): Unit = () @@ -290,7 +316,7 @@ private[akka] case class RemoteActorRef private[akka] ( } @throws(classOf[java.io.ObjectStreamException]) - private def writeReplace(): AnyRef = remote.app.provider.serialize(this) + private def writeReplace(): AnyRef = provider.serialize(this) def startsWatching(actorRef: ActorRef): ActorRef = unsupported //FIXME Implement diff --git a/akka-remote/src/main/scala/akka/remote/RemoteConfig.scala b/akka-remote/src/main/scala/akka/remote/RemoteConfig.scala deleted file mode 100644 index 4f1d161dc1..0000000000 --- a/akka-remote/src/main/scala/akka/remote/RemoteConfig.scala +++ /dev/null @@ -1,41 +0,0 @@ -/** - * Copyright (C) 2009-2011 Typesafe Inc. - */ - -package akka.remote - -import akka.util.Duration -import akka.config.{ Configuration, ConfigurationException } -import java.util.concurrent.TimeUnit - -class RemoteClientSettings(config: Configuration, defaultTimeUnit: TimeUnit) { - val SECURE_COOKIE: Option[String] = config.getString("akka.remote.secure-cookie", "") match { - case "" ⇒ None - case cookie ⇒ Some(cookie) - } - - val RECONNECTION_TIME_WINDOW = Duration(config.getInt("akka.remote.client.reconnection-time-window", 600), defaultTimeUnit).toMillis - val READ_TIMEOUT = Duration(config.getInt("akka.remote.client.read-timeout", 3600), defaultTimeUnit) - val RECONNECT_DELAY = Duration(config.getInt("akka.remote.client.reconnect-delay", 5), defaultTimeUnit) - val MESSAGE_FRAME_SIZE = config.getInt("akka.remote.client.message-frame-size", 1048576) -} - -class RemoteServerSettings(config: Configuration, defaultTimeUnit: TimeUnit) { - val isRemotingEnabled = config.getList("akka.enabled-modules").exists(_ == "cluster") //TODO FIXME Shouldn't this be "remote"? - val MESSAGE_FRAME_SIZE = config.getInt("akka.remote.server.message-frame-size", 1048576) - val SECURE_COOKIE = config.getString("akka.remote.secure-cookie") - val REQUIRE_COOKIE = { - val requireCookie = config.getBool("akka.remote.server.require-cookie", false) - if (isRemotingEnabled && requireCookie && SECURE_COOKIE.isEmpty) throw new ConfigurationException( - "Configuration option 'akka.remote.server.require-cookie' is turned on but no secure cookie is defined in 'akka.remote.secure-cookie'.") - requireCookie - } - - val USE_PASSIVE_CONNECTIONS = config.getBool("akka.remote.use-passive-connections", false) - - val UNTRUSTED_MODE = config.getBool("akka.remote.server.untrusted-mode", false) - val PORT = config.getInt("akka.remote.server.port", 2552) - val CONNECTION_TIMEOUT = Duration(config.getInt("akka.remote.server.connection-timeout", 100), defaultTimeUnit) - - val BACKLOG = config.getInt("akka.remote.server.backlog", 4096) -} diff --git a/akka-remote/src/main/scala/akka/remote/RemoteConnectionManager.scala b/akka-remote/src/main/scala/akka/remote/RemoteConnectionManager.scala index dab5c579e6..7b739b6199 100644 --- a/akka-remote/src/main/scala/akka/remote/RemoteConnectionManager.scala +++ b/akka-remote/src/main/scala/akka/remote/RemoteConnectionManager.scala @@ -20,12 +20,12 @@ import java.util.concurrent.atomic.AtomicReference * @author Jonas Bonér */ class RemoteConnectionManager( - app: ActorSystem, + system: ActorSystem, remote: Remote, initialConnections: Map[RemoteAddress, ActorRef] = Map.empty[RemoteAddress, ActorRef]) extends ConnectionManager { - val log = Logging(app, this) + val log = Logging(system, "RemoteConnectionManager") // FIXME is this VersionedIterable really needed? It is not used I think. Complicates API. See 'def connections' etc. case class State(version: Long, connections: Map[RemoteAddress, ActorRef]) @@ -149,5 +149,5 @@ class RemoteConnectionManager( } private[remote] def newConnection(remoteAddress: RemoteAddress, actorPath: ActorPath) = - RemoteActorRef(remote.server, remoteAddress, actorPath, None) + RemoteActorRef(remote.system.provider, remote.server, remoteAddress, actorPath, None) } diff --git a/akka-remote/src/main/scala/akka/remote/RemoteExtension.scala b/akka-remote/src/main/scala/akka/remote/RemoteExtension.scala new file mode 100644 index 0000000000..e897bcf713 --- /dev/null +++ b/akka-remote/src/main/scala/akka/remote/RemoteExtension.scala @@ -0,0 +1,110 @@ +/** + * Copyright (C) 2009-2011 Typesafe Inc. + */ +package akka.remote + +import akka.actor.ActorSystem +import akka.actor.ExtensionKey +import akka.actor.Extension +import akka.actor.ActorSystemImpl +import com.typesafe.config.Config +import com.typesafe.config.ConfigFactory +import com.typesafe.config.ConfigParseOptions +import com.typesafe.config.ConfigRoot +import akka.util.Duration +import java.util.concurrent.TimeUnit.MILLISECONDS +import java.net.InetAddress +import akka.config.ConfigurationException +import com.eaio.uuid.UUID + +object RemoteExtensionKey extends ExtensionKey[RemoteExtension] + +object RemoteExtension { + def apply(system: ActorSystem): RemoteExtension = { + if (!system.hasExtension(RemoteExtensionKey)) { + system.registerExtension(new RemoteExtension) + } + system.extension(RemoteExtensionKey) + } + + class Settings(cfg: Config) { + private def referenceConfig: Config = + ConfigFactory.parseResource(classOf[ActorSystem], "/akka-remote-reference.conf", + ConfigParseOptions.defaults.setAllowMissing(false)) + val config: ConfigRoot = ConfigFactory.emptyRoot("akka-remote").withFallback(cfg).withFallback(referenceConfig).resolve() + + import config._ + + val RemoteTransport = getString("akka.remote.layer") + val FailureDetectorThreshold = getInt("akka.remote.failure-detector.threshold") + val FailureDetectorMaxSampleSize = getInt("akka.remote.failure-detector.max-sample-size") + val ShouldCompressData = config.getBoolean("akka.remote.use-compression") + val RemoteSystemDaemonAckTimeout = Duration(config.getMilliseconds("akka.remote.remote-daemon-ack-timeout"), MILLISECONDS) + + // TODO cluster config will go into akka-cluster-reference.conf when we enable that module + val ClusterName = getString("akka.cluster.name") + + val NodeName: String = config.getString("akka.cluster.nodename") match { + case "" ⇒ new UUID().toString + case value ⇒ value + } + + val serverSettings = new RemoteServerSettings + val clientSettings = new RemoteClientSettings + + class RemoteClientSettings { + val SecureCookie: Option[String] = config.getString("akka.remote.secure-cookie") match { + case "" ⇒ None + case cookie ⇒ Some(cookie) + } + + val ReconnectionTimeWindow = Duration(config.getMilliseconds("akka.remote.client.reconnection-time-window"), MILLISECONDS) + val ReadTimeout = Duration(config.getMilliseconds("akka.remote.client.read-timeout"), MILLISECONDS) + val ReconnectDelay = Duration(config.getMilliseconds("akka.remote.client.reconnect-delay"), MILLISECONDS) + val MessageFrameSize = config.getInt("akka.remote.client.message-frame-size") + } + + class RemoteServerSettings { + import scala.collection.JavaConverters._ + val MessageFrameSize = config.getInt("akka.remote.server.message-frame-size") + val SecureCookie: Option[String] = config.getString("akka.remote.secure-cookie") match { + case "" ⇒ None + case cookie ⇒ Some(cookie) + } + val RequireCookie = { + val requireCookie = config.getBoolean("akka.remote.server.require-cookie") + if (requireCookie && SecureCookie.isEmpty) throw new ConfigurationException( + "Configuration option 'akka.remote.server.require-cookie' is turned on but no secure cookie is defined in 'akka.remote.secure-cookie'.") + requireCookie + } + + val UsePassiveConnections = config.getBoolean("akka.remote.use-passive-connections") + + val UntrustedMode = config.getBoolean("akka.remote.server.untrusted-mode") + val Hostname = config.getString("akka.remote.server.hostname") match { + case "" ⇒ InetAddress.getLocalHost.getHostAddress + case value ⇒ value + } + val Port = config.getInt("akka.remote.server.port") + val ConnectionTimeout = Duration(config.getMilliseconds("akka.remote.server.connection-timeout"), MILLISECONDS) + + val Backlog = config.getInt("akka.remote.server.backlog") + } + + } +} + +class RemoteExtension extends Extension[RemoteExtension] { + import RemoteExtension._ + @volatile + private var _settings: Settings = _ + + def key = RemoteExtensionKey + + def init(system: ActorSystemImpl) { + _settings = new Settings(system.applicationConfig) + } + + def settings: Settings = _settings + +} \ No newline at end of file diff --git a/akka-remote/src/main/scala/akka/remote/netty/NettyRemoteSupport.scala b/akka-remote/src/main/scala/akka/remote/netty/NettyRemoteSupport.scala index 7545a45f1c..9424b93372 100644 --- a/akka-remote/src/main/scala/akka/remote/netty/NettyRemoteSupport.scala +++ b/akka-remote/src/main/scala/akka/remote/netty/NettyRemoteSupport.scala @@ -25,6 +25,7 @@ import akka.actor.ActorSystem import akka.event.Logging import locks.ReentrantReadWriteLock import org.jboss.netty.channel._ +import akka.actor.ActorSystemImpl class RemoteClientMessageBufferException(message: String, cause: Throwable = null) extends AkkaException(message, cause) { def this(msg: String) = this(msg, null) @@ -39,7 +40,7 @@ abstract class RemoteClient private[akka] ( val remoteSupport: NettyRemoteSupport, val remoteAddress: RemoteAddress) { - val log = Logging(remoteSupport.app, this) + val log = Logging(remoteSupport.system, "RemoteClient") val name = simpleName(this) + "@" + remoteAddress @@ -140,6 +141,8 @@ class ActiveRemoteClient private[akka] ( def currentChannel = connection.getChannel + private val senderRemoteAddress = remoteSupport.system.asInstanceOf[ActorSystemImpl].provider.rootPath.remoteAddress + /** * Connect to remote server. */ @@ -147,8 +150,11 @@ class ActiveRemoteClient private[akka] ( def sendSecureCookie(connection: ChannelFuture) { val handshake = RemoteControlProtocol.newBuilder.setCommandType(CommandType.CONNECT) - if (SECURE_COOKIE.nonEmpty) handshake.setCookie(SECURE_COOKIE.get) - handshake.setOrigin(RemoteProtocol.AddressProtocol.newBuilder.setHostname(remoteSupport.app.address.hostname).setPort(remoteSupport.app.address.port).build) + if (SecureCookie.nonEmpty) handshake.setCookie(SecureCookie.get) + handshake.setOrigin(RemoteProtocol.AddressProtocol.newBuilder + .setHostname(senderRemoteAddress.hostname) + .setPort(senderRemoteAddress.port) + .build) connection.getChannel.write(remoteSupport.createControlEnvelope(handshake.build)) } @@ -229,7 +235,7 @@ class ActiveRemoteClient private[akka] ( reconnectionTimeWindowStart = System.currentTimeMillis true } else { - val timeLeft = (RECONNECTION_TIME_WINDOW - (System.currentTimeMillis - reconnectionTimeWindowStart)) > 0 + val timeLeft = (ReconnectionTimeWindow.toMillis - (System.currentTimeMillis - reconnectionTimeWindowStart)) > 0 if (timeLeft) log.info("Will try to reconnect to remote server for another [{}] milliseconds", timeLeft) @@ -253,8 +259,8 @@ class ActiveRemoteClientPipelineFactory( import client.remoteSupport.clientSettings._ def getPipeline: ChannelPipeline = { - val timeout = new ReadTimeoutHandler(timer, READ_TIMEOUT.length, READ_TIMEOUT.unit) - val lenDec = new LengthFieldBasedFrameDecoder(MESSAGE_FRAME_SIZE, 0, 4, 0, 4) + val timeout = new ReadTimeoutHandler(timer, ReadTimeout.length, ReadTimeout.unit) + val lenDec = new LengthFieldBasedFrameDecoder(MessageFrameSize, 0, 4, 0, 4) val lenPrep = new LengthFieldPrepender(4) val protobufDec = new ProtobufDecoder(AkkaRemoteProtocol.getDefaultInstance) val protobufEnc = new ProtobufEncoder @@ -291,7 +297,7 @@ class ActiveRemoteClientHandler( } case arp: AkkaRemoteProtocol if arp.hasMessage ⇒ - client.remoteSupport.receiveMessage(new RemoteMessage(arp.getMessage, client.remoteSupport, client.loader), untrustedMode = false) //TODO FIXME Sensible or not? + client.remoteSupport.receiveMessage(new RemoteMessage(arp.getMessage, client.remoteSupport, client.loader)) case other ⇒ throw new RemoteClientException("Unknown message received in remote client handler: " + other, client.remoteSupport, client.remoteAddress) @@ -310,7 +316,7 @@ class ActiveRemoteClientHandler( client.connect(reconnectIfAlreadyConnected = true) } } - }, client.remoteSupport.clientSettings.RECONNECT_DELAY.toMillis, TimeUnit.MILLISECONDS) + }, client.remoteSupport.clientSettings.ReconnectDelay.toMillis, TimeUnit.MILLISECONDS) } else runOnceNow { client.remoteSupport.shutdownClientConnection(remoteAddress) // spawn in another thread } @@ -349,19 +355,24 @@ class ActiveRemoteClientHandler( /** * Provides the implementation of the Netty remote support */ -class NettyRemoteSupport(_app: ActorSystem) extends RemoteSupport(_app) with RemoteMarshallingOps { +class NettyRemoteSupport(_system: ActorSystem) extends RemoteSupport(_system) with RemoteMarshallingOps { + val log = Logging(system, "NettyRemoteSupport") - val serverSettings = new RemoteServerSettings(app.config, app.AkkaConfig.DefaultTimeUnit) - val clientSettings = new RemoteClientSettings(app.config, app.AkkaConfig.DefaultTimeUnit) + val serverSettings = RemoteExtension(system).settings.serverSettings + val clientSettings = RemoteExtension(system).settings.clientSettings private val remoteClients = new HashMap[RemoteAddress, RemoteClient] private val clientsLock = new ReentrantReadWriteLock - protected[akka] def send(message: Any, - senderOption: Option[ActorRef], - recipientAddress: RemoteAddress, - recipient: ActorRef, - loader: Option[ClassLoader]): Unit = { + override protected def useUntrustedMode = serverSettings.UntrustedMode + + protected[akka] def send( + message: Any, + senderOption: Option[ActorRef], + recipientAddress: RemoteAddress, + recipient: ActorRef, + loader: Option[ClassLoader]): Unit = { + clientsLock.readLock.lock try { val client = remoteClients.get(recipientAddress) match { @@ -447,7 +458,7 @@ class NettyRemoteSupport(_app: ActorSystem) extends RemoteSupport(_app) with Rem def name = currentServer.get match { case Some(server) ⇒ server.name - case None ⇒ "Non-running NettyRemoteServer@" + app.address + case None ⇒ "Non-running NettyRemoteServer@" + system.asInstanceOf[ActorSystemImpl].provider.rootPath.remoteAddress } private val _isRunning = new Switch(false) @@ -473,15 +484,16 @@ class NettyRemoteSupport(_app: ActorSystem) extends RemoteSupport(_app) with Rem remoteClients.clear() } finally { clientsLock.writeLock().unlock() + currentServer.getAndSet(None) foreach { _.shutdown() } } - currentServer.getAndSet(None) foreach { _.shutdown() } } } class NettyRemoteServer(val remoteSupport: NettyRemoteSupport, val loader: Option[ClassLoader]) { - val log = Logging(remoteSupport.app, this) + val log = Logging(remoteSupport.system, "NettyRemoteServer") import remoteSupport.serverSettings._ - import remoteSupport.app.address + + val address = remoteSupport.system.asInstanceOf[ActorSystemImpl].provider.rootPath.remoteAddress val name = "NettyRemoteServer@" + address @@ -494,11 +506,11 @@ class NettyRemoteServer(val remoteSupport: NettyRemoteSupport, val loader: Optio val pipelineFactory = new RemoteServerPipelineFactory(name, openChannels, loader, remoteSupport) bootstrap.setPipelineFactory(pipelineFactory) - bootstrap.setOption("backlog", BACKLOG) + bootstrap.setOption("backlog", Backlog) bootstrap.setOption("child.tcpNoDelay", true) bootstrap.setOption("child.keepAlive", true) bootstrap.setOption("child.reuseAddress", true) - bootstrap.setOption("child.connectTimeoutMillis", CONNECTION_TIMEOUT.toMillis) + bootstrap.setOption("child.connectTimeoutMillis", ConnectionTimeout.toMillis) openChannels.add(bootstrap.bind(new InetSocketAddress(address.hostname, address.port))) remoteSupport.notifyListeners(RemoteServerStarted(remoteSupport)) @@ -507,8 +519,8 @@ class NettyRemoteServer(val remoteSupport: NettyRemoteSupport, val loader: Optio try { val shutdownSignal = { val b = RemoteControlProtocol.newBuilder.setCommandType(CommandType.SHUTDOWN) - if (SECURE_COOKIE.nonEmpty) - b.setCookie(SECURE_COOKIE.get) + if (SecureCookie.nonEmpty) + b.setCookie(SecureCookie.get) b.build } openChannels.write(remoteSupport.createControlEnvelope(shutdownSignal)).awaitUninterruptibly @@ -534,12 +546,12 @@ class RemoteServerPipelineFactory( import remoteSupport.serverSettings._ def getPipeline: ChannelPipeline = { - val lenDec = new LengthFieldBasedFrameDecoder(MESSAGE_FRAME_SIZE, 0, 4, 0, 4) + val lenDec = new LengthFieldBasedFrameDecoder(MessageFrameSize, 0, 4, 0, 4) val lenPrep = new LengthFieldPrepender(4) val protobufDec = new ProtobufDecoder(AkkaRemoteProtocol.getDefaultInstance) val protobufEnc = new ProtobufEncoder - val authenticator = if (REQUIRE_COOKIE) new RemoteServerAuthenticationHandler(SECURE_COOKIE) :: Nil else Nil + val authenticator = if (RequireCookie) new RemoteServerAuthenticationHandler(SecureCookie) :: Nil else Nil val remoteServer = new RemoteServerHandler(name, openChannels, loader, remoteSupport) val stages: List[ChannelHandler] = lenDec :: protobufDec :: lenPrep :: protobufEnc :: authenticator ::: remoteServer :: Nil new StaticChannelPipeline(stages: _*) @@ -583,7 +595,7 @@ class RemoteServerHandler( val applicationLoader: Option[ClassLoader], val remoteSupport: NettyRemoteSupport) extends SimpleChannelUpstreamHandler { - val log = Logging(remoteSupport.app, this) + val log = Logging(remoteSupport.system, "RemoteServerHandler") import remoteSupport.serverSettings._ @@ -616,7 +628,7 @@ class RemoteServerHandler( override def channelClosed(ctx: ChannelHandlerContext, event: ChannelStateEvent) = getClientAddress(ctx.getChannel) match { case s @ Some(address) ⇒ - if (USE_PASSIVE_CONNECTIONS) + if (UsePassiveConnections) remoteSupport.unbindClient(address) remoteSupport.notifyListeners(RemoteServerClientClosed(remoteSupport, s)) case None ⇒ @@ -626,12 +638,12 @@ class RemoteServerHandler( override def messageReceived(ctx: ChannelHandlerContext, event: MessageEvent) = try { event.getMessage match { case remote: AkkaRemoteProtocol if remote.hasMessage ⇒ - remoteSupport.receiveMessage(new RemoteMessage(remote.getMessage, remoteSupport, applicationLoader), UNTRUSTED_MODE) + remoteSupport.receiveMessage(new RemoteMessage(remote.getMessage, remoteSupport, applicationLoader)) case remote: AkkaRemoteProtocol if remote.hasInstruction ⇒ val instruction = remote.getInstruction instruction.getCommandType match { - case CommandType.CONNECT if USE_PASSIVE_CONNECTIONS ⇒ + case CommandType.CONNECT if UsePassiveConnections ⇒ val origin = instruction.getOrigin val inbound = RemoteAddress(origin.getHostname, origin.getPort) val client = new PassiveRemoteClient(event.getChannel, remoteSupport, inbound) diff --git a/akka-remote/src/multi-jvm/scala/akka/remote/AkkaRemoteSpec.scala b/akka-remote/src/multi-jvm/scala/akka/remote/AkkaRemoteSpec.scala index 003f324217..f41ea0e855 100644 --- a/akka-remote/src/multi-jvm/scala/akka/remote/AkkaRemoteSpec.scala +++ b/akka-remote/src/multi-jvm/scala/akka/remote/AkkaRemoteSpec.scala @@ -5,6 +5,7 @@ package akka.remote import akka.testkit._ +import akka.actor.ActorSystemImpl abstract class AkkaRemoteSpec extends AkkaSpec with MultiJvmSync { @@ -12,7 +13,7 @@ abstract class AkkaRemoteSpec extends AkkaSpec with MultiJvmSync { * Helper function for accessing the underlying remoting. */ def remote: Remote = { - app.provider match { + system.asInstanceOf[ActorSystemImpl].provider match { case r: RemoteActorRefProvider ⇒ r.remote case _ ⇒ throw new Exception("Remoting is not enabled") } diff --git a/akka-remote/src/multi-jvm/scala/akka/remote/direct_routed/DirectRoutedRemoteActorMultiJvmNode1.conf b/akka-remote/src/multi-jvm/scala/akka/remote/direct_routed/DirectRoutedRemoteActorMultiJvmNode1.conf index 732cd00d48..1b1c7b398c 100644 --- a/akka-remote/src/multi-jvm/scala/akka/remote/direct_routed/DirectRoutedRemoteActorMultiJvmNode1.conf +++ b/akka-remote/src/multi-jvm/scala/akka/remote/direct_routed/DirectRoutedRemoteActorMultiJvmNode1.conf @@ -1,5 +1,11 @@ -akka.actor.provider = "akka.remote.RemoteActorRefProvider" -akka.loglevel = "WARNING" -akka.actor.deployment./app/service-hello.router = "direct" -akka.actor.deployment./app/service-hello.nr-of-instances = 1 -akka.actor.deployment./app/service-hello.remote.nodes = ["localhost:9991"] +akka { + loglevel = "WARNING" + actor { + provider = "akka.remote.RemoteActorRefProvider" + deployment { + /app/service-hello.router = "direct" + /app/service-hello.nr-of-instances = 1 + /app/service-hello.remote.nodes = ["localhost:9991"] + } + } +} diff --git a/akka-remote/src/multi-jvm/scala/akka/remote/direct_routed/DirectRoutedRemoteActorMultiJvmNode1.opts b/akka-remote/src/multi-jvm/scala/akka/remote/direct_routed/DirectRoutedRemoteActorMultiJvmNode1.opts index 6562171945..a3218fe698 100644 --- a/akka-remote/src/multi-jvm/scala/akka/remote/direct_routed/DirectRoutedRemoteActorMultiJvmNode1.opts +++ b/akka-remote/src/multi-jvm/scala/akka/remote/direct_routed/DirectRoutedRemoteActorMultiJvmNode1.opts @@ -1 +1 @@ --Dakka.cluster.nodename=node1 -Dakka.remote.hostname=localhost -Dakka.remote.port=9991 +-Dakka.cluster.nodename=node1 -Dakka.remote.server.hostname=localhost -Dakka.remote.server.port=9991 diff --git a/akka-remote/src/multi-jvm/scala/akka/remote/direct_routed/DirectRoutedRemoteActorMultiJvmNode2.conf b/akka-remote/src/multi-jvm/scala/akka/remote/direct_routed/DirectRoutedRemoteActorMultiJvmNode2.conf index 732cd00d48..1b1c7b398c 100644 --- a/akka-remote/src/multi-jvm/scala/akka/remote/direct_routed/DirectRoutedRemoteActorMultiJvmNode2.conf +++ b/akka-remote/src/multi-jvm/scala/akka/remote/direct_routed/DirectRoutedRemoteActorMultiJvmNode2.conf @@ -1,5 +1,11 @@ -akka.actor.provider = "akka.remote.RemoteActorRefProvider" -akka.loglevel = "WARNING" -akka.actor.deployment./app/service-hello.router = "direct" -akka.actor.deployment./app/service-hello.nr-of-instances = 1 -akka.actor.deployment./app/service-hello.remote.nodes = ["localhost:9991"] +akka { + loglevel = "WARNING" + actor { + provider = "akka.remote.RemoteActorRefProvider" + deployment { + /app/service-hello.router = "direct" + /app/service-hello.nr-of-instances = 1 + /app/service-hello.remote.nodes = ["localhost:9991"] + } + } +} diff --git a/akka-remote/src/multi-jvm/scala/akka/remote/direct_routed/DirectRoutedRemoteActorMultiJvmNode2.opts b/akka-remote/src/multi-jvm/scala/akka/remote/direct_routed/DirectRoutedRemoteActorMultiJvmNode2.opts index ba38f5b2ce..dcecc85ffb 100644 --- a/akka-remote/src/multi-jvm/scala/akka/remote/direct_routed/DirectRoutedRemoteActorMultiJvmNode2.opts +++ b/akka-remote/src/multi-jvm/scala/akka/remote/direct_routed/DirectRoutedRemoteActorMultiJvmNode2.opts @@ -1 +1 @@ --Dakka.cluster.nodename=node2 -Dakka.remote.hostname=localhost -Dakka.remote.port=9992 +-Dakka.cluster.nodename=node2 -Dakka.remote.server.hostname=localhost -Dakka.remote.server.port=9992 diff --git a/akka-remote/src/multi-jvm/scala/akka/remote/direct_routed/DirectRoutedRemoteActorMultiJvmSpec.scala b/akka-remote/src/multi-jvm/scala/akka/remote/direct_routed/DirectRoutedRemoteActorMultiJvmSpec.scala index 1577066d67..f1a6745d91 100644 --- a/akka-remote/src/multi-jvm/scala/akka/remote/direct_routed/DirectRoutedRemoteActorMultiJvmSpec.scala +++ b/akka-remote/src/multi-jvm/scala/akka/remote/direct_routed/DirectRoutedRemoteActorMultiJvmSpec.scala @@ -10,7 +10,7 @@ object DirectRoutedRemoteActorMultiJvmSpec { class SomeActor extends Actor with Serializable { def receive = { - case "identify" ⇒ sender ! app.nodename + case "identify" ⇒ sender ! system.nodename } } } @@ -47,7 +47,7 @@ class DirectRoutedRemoteActorMultiJvmNode2 extends AkkaRemoteSpec { barrier("start") - val actor = app.actorOf[SomeActor]("service-hello") + val actor = system.actorOf[SomeActor]("service-hello") actor.isInstanceOf[RoutedActorRef] must be(true) val result = (actor ? "identify").get diff --git a/akka-remote/src/multi-jvm/scala/akka/remote/new_remote_actor/NewRemoteActorMultiJvmNode1.conf b/akka-remote/src/multi-jvm/scala/akka/remote/new_remote_actor/NewRemoteActorMultiJvmNode1.conf index 727d892e66..9073ed4ed3 100644 --- a/akka-remote/src/multi-jvm/scala/akka/remote/new_remote_actor/NewRemoteActorMultiJvmNode1.conf +++ b/akka-remote/src/multi-jvm/scala/akka/remote/new_remote_actor/NewRemoteActorMultiJvmNode1.conf @@ -1,3 +1,9 @@ -akka.actor.provider = "akka.remote.RemoteActorRefProvider" -akka.loglevel = "WARNING" -akka.actor.deployment./app/service-hello.remote.nodes = ["localhost:9991"] +akka { + loglevel = "WARNING" + actor { + provider = "akka.remote.RemoteActorRefProvider" + deployment { + /app/service-hello.remote.nodes = ["localhost:9991"] + } + } +} diff --git a/akka-remote/src/multi-jvm/scala/akka/remote/new_remote_actor/NewRemoteActorMultiJvmNode1.opts b/akka-remote/src/multi-jvm/scala/akka/remote/new_remote_actor/NewRemoteActorMultiJvmNode1.opts index 6562171945..a3218fe698 100644 --- a/akka-remote/src/multi-jvm/scala/akka/remote/new_remote_actor/NewRemoteActorMultiJvmNode1.opts +++ b/akka-remote/src/multi-jvm/scala/akka/remote/new_remote_actor/NewRemoteActorMultiJvmNode1.opts @@ -1 +1 @@ --Dakka.cluster.nodename=node1 -Dakka.remote.hostname=localhost -Dakka.remote.port=9991 +-Dakka.cluster.nodename=node1 -Dakka.remote.server.hostname=localhost -Dakka.remote.server.port=9991 diff --git a/akka-remote/src/multi-jvm/scala/akka/remote/new_remote_actor/NewRemoteActorMultiJvmNode2.conf b/akka-remote/src/multi-jvm/scala/akka/remote/new_remote_actor/NewRemoteActorMultiJvmNode2.conf index 727d892e66..9073ed4ed3 100644 --- a/akka-remote/src/multi-jvm/scala/akka/remote/new_remote_actor/NewRemoteActorMultiJvmNode2.conf +++ b/akka-remote/src/multi-jvm/scala/akka/remote/new_remote_actor/NewRemoteActorMultiJvmNode2.conf @@ -1,3 +1,9 @@ -akka.actor.provider = "akka.remote.RemoteActorRefProvider" -akka.loglevel = "WARNING" -akka.actor.deployment./app/service-hello.remote.nodes = ["localhost:9991"] +akka { + loglevel = "WARNING" + actor { + provider = "akka.remote.RemoteActorRefProvider" + deployment { + /app/service-hello.remote.nodes = ["localhost:9991"] + } + } +} diff --git a/akka-remote/src/multi-jvm/scala/akka/remote/new_remote_actor/NewRemoteActorMultiJvmNode2.opts b/akka-remote/src/multi-jvm/scala/akka/remote/new_remote_actor/NewRemoteActorMultiJvmNode2.opts index ba38f5b2ce..dcecc85ffb 100644 --- a/akka-remote/src/multi-jvm/scala/akka/remote/new_remote_actor/NewRemoteActorMultiJvmNode2.opts +++ b/akka-remote/src/multi-jvm/scala/akka/remote/new_remote_actor/NewRemoteActorMultiJvmNode2.opts @@ -1 +1 @@ --Dakka.cluster.nodename=node2 -Dakka.remote.hostname=localhost -Dakka.remote.port=9992 +-Dakka.cluster.nodename=node2 -Dakka.remote.server.hostname=localhost -Dakka.remote.server.port=9992 diff --git a/akka-remote/src/multi-jvm/scala/akka/remote/new_remote_actor/NewRemoteActorMultiJvmSpec.scala b/akka-remote/src/multi-jvm/scala/akka/remote/new_remote_actor/NewRemoteActorMultiJvmSpec.scala index b1e8f793b9..3be4979964 100644 --- a/akka-remote/src/multi-jvm/scala/akka/remote/new_remote_actor/NewRemoteActorMultiJvmSpec.scala +++ b/akka-remote/src/multi-jvm/scala/akka/remote/new_remote_actor/NewRemoteActorMultiJvmSpec.scala @@ -8,7 +8,7 @@ object NewRemoteActorMultiJvmSpec { class SomeActor extends Actor with Serializable { def receive = { - case "identify" ⇒ sender ! app.nodename + case "identify" ⇒ sender ! system.nodename } } } @@ -46,7 +46,7 @@ class NewRemoteActorMultiJvmNode2 extends AkkaRemoteSpec { barrier("start") - val actor = app.actorOf[SomeActor]("service-hello") + val actor = system.actorOf[SomeActor]("service-hello") val result = (actor ? "identify").get result must equal("node1") diff --git a/akka-remote/src/multi-jvm/scala/akka/remote/random_routed/RandomRoutedRemoteActorMultiJvmNode1.conf b/akka-remote/src/multi-jvm/scala/akka/remote/random_routed/RandomRoutedRemoteActorMultiJvmNode1.conf index 7b9fdcd84b..e373bc9c0e 100644 --- a/akka-remote/src/multi-jvm/scala/akka/remote/random_routed/RandomRoutedRemoteActorMultiJvmNode1.conf +++ b/akka-remote/src/multi-jvm/scala/akka/remote/random_routed/RandomRoutedRemoteActorMultiJvmNode1.conf @@ -1,5 +1,11 @@ -akka.actor.provider = "akka.remote.RemoteActorRefProvider" -akka.loglevel = "WARNING" -akka.actor.deployment./app/service-hello.router = "random" -akka.actor.deployment./app/service-hello.nr-of-instances = 3 -akka.actor.deployment./app/service-hello.remote.nodes = ["localhost:9991","localhost:9992","localhost:9993"] +akka { + loglevel = "WARNING" + actor { + provider = "akka.remote.RemoteActorRefProvider" + deployment { + /app/service-hello.router = "random" + /app/service-hello.nr-of-instances = 3 + /app/service-hello.remote.nodes = ["localhost:9991","localhost:9992","localhost:9993"] + } + } +} diff --git a/akka-remote/src/multi-jvm/scala/akka/remote/random_routed/RandomRoutedRemoteActorMultiJvmNode1.opts b/akka-remote/src/multi-jvm/scala/akka/remote/random_routed/RandomRoutedRemoteActorMultiJvmNode1.opts index 6562171945..a3218fe698 100644 --- a/akka-remote/src/multi-jvm/scala/akka/remote/random_routed/RandomRoutedRemoteActorMultiJvmNode1.opts +++ b/akka-remote/src/multi-jvm/scala/akka/remote/random_routed/RandomRoutedRemoteActorMultiJvmNode1.opts @@ -1 +1 @@ --Dakka.cluster.nodename=node1 -Dakka.remote.hostname=localhost -Dakka.remote.port=9991 +-Dakka.cluster.nodename=node1 -Dakka.remote.server.hostname=localhost -Dakka.remote.server.port=9991 diff --git a/akka-remote/src/multi-jvm/scala/akka/remote/random_routed/RandomRoutedRemoteActorMultiJvmNode2.conf b/akka-remote/src/multi-jvm/scala/akka/remote/random_routed/RandomRoutedRemoteActorMultiJvmNode2.conf index 7b9fdcd84b..b6d6e7b3f9 100644 --- a/akka-remote/src/multi-jvm/scala/akka/remote/random_routed/RandomRoutedRemoteActorMultiJvmNode2.conf +++ b/akka-remote/src/multi-jvm/scala/akka/remote/random_routed/RandomRoutedRemoteActorMultiJvmNode2.conf @@ -1,5 +1,9 @@ -akka.actor.provider = "akka.remote.RemoteActorRefProvider" -akka.loglevel = "WARNING" -akka.actor.deployment./app/service-hello.router = "random" -akka.actor.deployment./app/service-hello.nr-of-instances = 3 -akka.actor.deployment./app/service-hello.remote.nodes = ["localhost:9991","localhost:9992","localhost:9993"] +akka { + loglevel = "WARNING" + actor { + provider = "akka.remote.RemoteActorRefProvider" + deployment./app/service-hello.router = "random" + deployment./app/service-hello.nr-of-instances = 3 + deployment./app/service-hello.remote.nodes = ["localhost:9991","localhost:9992","localhost:9993"] + } +} diff --git a/akka-remote/src/multi-jvm/scala/akka/remote/random_routed/RandomRoutedRemoteActorMultiJvmNode2.opts b/akka-remote/src/multi-jvm/scala/akka/remote/random_routed/RandomRoutedRemoteActorMultiJvmNode2.opts index ba38f5b2ce..dcecc85ffb 100644 --- a/akka-remote/src/multi-jvm/scala/akka/remote/random_routed/RandomRoutedRemoteActorMultiJvmNode2.opts +++ b/akka-remote/src/multi-jvm/scala/akka/remote/random_routed/RandomRoutedRemoteActorMultiJvmNode2.opts @@ -1 +1 @@ --Dakka.cluster.nodename=node2 -Dakka.remote.hostname=localhost -Dakka.remote.port=9992 +-Dakka.cluster.nodename=node2 -Dakka.remote.server.hostname=localhost -Dakka.remote.server.port=9992 diff --git a/akka-remote/src/multi-jvm/scala/akka/remote/random_routed/RandomRoutedRemoteActorMultiJvmNode3.conf b/akka-remote/src/multi-jvm/scala/akka/remote/random_routed/RandomRoutedRemoteActorMultiJvmNode3.conf index 7b9fdcd84b..e373bc9c0e 100644 --- a/akka-remote/src/multi-jvm/scala/akka/remote/random_routed/RandomRoutedRemoteActorMultiJvmNode3.conf +++ b/akka-remote/src/multi-jvm/scala/akka/remote/random_routed/RandomRoutedRemoteActorMultiJvmNode3.conf @@ -1,5 +1,11 @@ -akka.actor.provider = "akka.remote.RemoteActorRefProvider" -akka.loglevel = "WARNING" -akka.actor.deployment./app/service-hello.router = "random" -akka.actor.deployment./app/service-hello.nr-of-instances = 3 -akka.actor.deployment./app/service-hello.remote.nodes = ["localhost:9991","localhost:9992","localhost:9993"] +akka { + loglevel = "WARNING" + actor { + provider = "akka.remote.RemoteActorRefProvider" + deployment { + /app/service-hello.router = "random" + /app/service-hello.nr-of-instances = 3 + /app/service-hello.remote.nodes = ["localhost:9991","localhost:9992","localhost:9993"] + } + } +} diff --git a/akka-remote/src/multi-jvm/scala/akka/remote/random_routed/RandomRoutedRemoteActorMultiJvmNode3.opts b/akka-remote/src/multi-jvm/scala/akka/remote/random_routed/RandomRoutedRemoteActorMultiJvmNode3.opts index b23510ba4a..cabc575688 100644 --- a/akka-remote/src/multi-jvm/scala/akka/remote/random_routed/RandomRoutedRemoteActorMultiJvmNode3.opts +++ b/akka-remote/src/multi-jvm/scala/akka/remote/random_routed/RandomRoutedRemoteActorMultiJvmNode3.opts @@ -1 +1 @@ --Dakka.cluster.nodename=node3 -Dakka.remote.hostname=localhost -Dakka.remote.port=9993 +-Dakka.cluster.nodename=node3 -Dakka.remote.server.hostname=localhost -Dakka.remote.server.port=9993 diff --git a/akka-remote/src/multi-jvm/scala/akka/remote/random_routed/RandomRoutedRemoteActorMultiJvmNode4.conf b/akka-remote/src/multi-jvm/scala/akka/remote/random_routed/RandomRoutedRemoteActorMultiJvmNode4.conf index 7b9fdcd84b..e373bc9c0e 100644 --- a/akka-remote/src/multi-jvm/scala/akka/remote/random_routed/RandomRoutedRemoteActorMultiJvmNode4.conf +++ b/akka-remote/src/multi-jvm/scala/akka/remote/random_routed/RandomRoutedRemoteActorMultiJvmNode4.conf @@ -1,5 +1,11 @@ -akka.actor.provider = "akka.remote.RemoteActorRefProvider" -akka.loglevel = "WARNING" -akka.actor.deployment./app/service-hello.router = "random" -akka.actor.deployment./app/service-hello.nr-of-instances = 3 -akka.actor.deployment./app/service-hello.remote.nodes = ["localhost:9991","localhost:9992","localhost:9993"] +akka { + loglevel = "WARNING" + actor { + provider = "akka.remote.RemoteActorRefProvider" + deployment { + /app/service-hello.router = "random" + /app/service-hello.nr-of-instances = 3 + /app/service-hello.remote.nodes = ["localhost:9991","localhost:9992","localhost:9993"] + } + } +} diff --git a/akka-remote/src/multi-jvm/scala/akka/remote/random_routed/RandomRoutedRemoteActorMultiJvmNode4.opts b/akka-remote/src/multi-jvm/scala/akka/remote/random_routed/RandomRoutedRemoteActorMultiJvmNode4.opts index 0d61591255..4c7670d733 100644 --- a/akka-remote/src/multi-jvm/scala/akka/remote/random_routed/RandomRoutedRemoteActorMultiJvmNode4.opts +++ b/akka-remote/src/multi-jvm/scala/akka/remote/random_routed/RandomRoutedRemoteActorMultiJvmNode4.opts @@ -1 +1 @@ --Dakka.cluster.nodename=node4 -Dakka.remote.hostname=localhost -Dakka.remote.port=9994 +-Dakka.cluster.nodename=node4 -Dakka.remote.server.hostname=localhost -Dakka.remote.server.port=9994 diff --git a/akka-remote/src/multi-jvm/scala/akka/remote/random_routed/RandomRoutedRemoteActorMultiJvmSpec.scala b/akka-remote/src/multi-jvm/scala/akka/remote/random_routed/RandomRoutedRemoteActorMultiJvmSpec.scala index 380f4d1712..a5701cccd4 100644 --- a/akka-remote/src/multi-jvm/scala/akka/remote/random_routed/RandomRoutedRemoteActorMultiJvmSpec.scala +++ b/akka-remote/src/multi-jvm/scala/akka/remote/random_routed/RandomRoutedRemoteActorMultiJvmSpec.scala @@ -9,7 +9,7 @@ object RandomRoutedRemoteActorMultiJvmSpec { val NrOfNodes = 4 class SomeActor extends Actor with Serializable { def receive = { - case "hit" ⇒ sender ! app.nodename + case "hit" ⇒ sender ! system.nodename case "end" ⇒ self.stop() } } @@ -70,7 +70,7 @@ class RandomRoutedRemoteActorMultiJvmNode4 extends AkkaRemoteSpec { remote.start() barrier("start") - val actor = app.actorOf[SomeActor]("service-hello") + val actor = system.actorOf[SomeActor]("service-hello") actor.isInstanceOf[RoutedActorRef] must be(true) val connectionCount = NrOfNodes - 1 diff --git a/akka-remote/src/multi-jvm/scala/akka/remote/round_robin_routed/RoundRobinRoutedRemoteActorMultiJvmNode1.conf b/akka-remote/src/multi-jvm/scala/akka/remote/round_robin_routed/RoundRobinRoutedRemoteActorMultiJvmNode1.conf index d660a824de..a0ec833383 100644 --- a/akka-remote/src/multi-jvm/scala/akka/remote/round_robin_routed/RoundRobinRoutedRemoteActorMultiJvmNode1.conf +++ b/akka-remote/src/multi-jvm/scala/akka/remote/round_robin_routed/RoundRobinRoutedRemoteActorMultiJvmNode1.conf @@ -1,5 +1,11 @@ -akka.actor.provider = "akka.remote.RemoteActorRefProvider" -akka.loglevel = "WARNING" -akka.actor.deployment./app/service-hello.router = "round-robin" -akka.actor.deployment./app/service-hello.nr-of-instances = 3 -akka.actor.deployment./app/service-hello.remote.nodes = ["localhost:9991","localhost:9992","localhost:9993"] +akka { + loglevel = "WARNING" + actor { + provider = "akka.remote.RemoteActorRefProvider" + deployment { + /app/service-hello.router = "round-robin" + /app/service-hello.nr-of-instances = 3 + /app/service-hello.remote.nodes = ["localhost:9991","localhost:9992","localhost:9993"] + } + } +} diff --git a/akka-remote/src/multi-jvm/scala/akka/remote/round_robin_routed/RoundRobinRoutedRemoteActorMultiJvmNode1.opts b/akka-remote/src/multi-jvm/scala/akka/remote/round_robin_routed/RoundRobinRoutedRemoteActorMultiJvmNode1.opts index 6562171945..a3218fe698 100644 --- a/akka-remote/src/multi-jvm/scala/akka/remote/round_robin_routed/RoundRobinRoutedRemoteActorMultiJvmNode1.opts +++ b/akka-remote/src/multi-jvm/scala/akka/remote/round_robin_routed/RoundRobinRoutedRemoteActorMultiJvmNode1.opts @@ -1 +1 @@ --Dakka.cluster.nodename=node1 -Dakka.remote.hostname=localhost -Dakka.remote.port=9991 +-Dakka.cluster.nodename=node1 -Dakka.remote.server.hostname=localhost -Dakka.remote.server.port=9991 diff --git a/akka-remote/src/multi-jvm/scala/akka/remote/round_robin_routed/RoundRobinRoutedRemoteActorMultiJvmNode2.conf b/akka-remote/src/multi-jvm/scala/akka/remote/round_robin_routed/RoundRobinRoutedRemoteActorMultiJvmNode2.conf index d660a824de..a0ec833383 100644 --- a/akka-remote/src/multi-jvm/scala/akka/remote/round_robin_routed/RoundRobinRoutedRemoteActorMultiJvmNode2.conf +++ b/akka-remote/src/multi-jvm/scala/akka/remote/round_robin_routed/RoundRobinRoutedRemoteActorMultiJvmNode2.conf @@ -1,5 +1,11 @@ -akka.actor.provider = "akka.remote.RemoteActorRefProvider" -akka.loglevel = "WARNING" -akka.actor.deployment./app/service-hello.router = "round-robin" -akka.actor.deployment./app/service-hello.nr-of-instances = 3 -akka.actor.deployment./app/service-hello.remote.nodes = ["localhost:9991","localhost:9992","localhost:9993"] +akka { + loglevel = "WARNING" + actor { + provider = "akka.remote.RemoteActorRefProvider" + deployment { + /app/service-hello.router = "round-robin" + /app/service-hello.nr-of-instances = 3 + /app/service-hello.remote.nodes = ["localhost:9991","localhost:9992","localhost:9993"] + } + } +} diff --git a/akka-remote/src/multi-jvm/scala/akka/remote/round_robin_routed/RoundRobinRoutedRemoteActorMultiJvmNode2.opts b/akka-remote/src/multi-jvm/scala/akka/remote/round_robin_routed/RoundRobinRoutedRemoteActorMultiJvmNode2.opts index ba38f5b2ce..dcecc85ffb 100644 --- a/akka-remote/src/multi-jvm/scala/akka/remote/round_robin_routed/RoundRobinRoutedRemoteActorMultiJvmNode2.opts +++ b/akka-remote/src/multi-jvm/scala/akka/remote/round_robin_routed/RoundRobinRoutedRemoteActorMultiJvmNode2.opts @@ -1 +1 @@ --Dakka.cluster.nodename=node2 -Dakka.remote.hostname=localhost -Dakka.remote.port=9992 +-Dakka.cluster.nodename=node2 -Dakka.remote.server.hostname=localhost -Dakka.remote.server.port=9992 diff --git a/akka-remote/src/multi-jvm/scala/akka/remote/round_robin_routed/RoundRobinRoutedRemoteActorMultiJvmNode3.conf b/akka-remote/src/multi-jvm/scala/akka/remote/round_robin_routed/RoundRobinRoutedRemoteActorMultiJvmNode3.conf index d660a824de..a0ec833383 100644 --- a/akka-remote/src/multi-jvm/scala/akka/remote/round_robin_routed/RoundRobinRoutedRemoteActorMultiJvmNode3.conf +++ b/akka-remote/src/multi-jvm/scala/akka/remote/round_robin_routed/RoundRobinRoutedRemoteActorMultiJvmNode3.conf @@ -1,5 +1,11 @@ -akka.actor.provider = "akka.remote.RemoteActorRefProvider" -akka.loglevel = "WARNING" -akka.actor.deployment./app/service-hello.router = "round-robin" -akka.actor.deployment./app/service-hello.nr-of-instances = 3 -akka.actor.deployment./app/service-hello.remote.nodes = ["localhost:9991","localhost:9992","localhost:9993"] +akka { + loglevel = "WARNING" + actor { + provider = "akka.remote.RemoteActorRefProvider" + deployment { + /app/service-hello.router = "round-robin" + /app/service-hello.nr-of-instances = 3 + /app/service-hello.remote.nodes = ["localhost:9991","localhost:9992","localhost:9993"] + } + } +} diff --git a/akka-remote/src/multi-jvm/scala/akka/remote/round_robin_routed/RoundRobinRoutedRemoteActorMultiJvmNode3.opts b/akka-remote/src/multi-jvm/scala/akka/remote/round_robin_routed/RoundRobinRoutedRemoteActorMultiJvmNode3.opts index b23510ba4a..cabc575688 100644 --- a/akka-remote/src/multi-jvm/scala/akka/remote/round_robin_routed/RoundRobinRoutedRemoteActorMultiJvmNode3.opts +++ b/akka-remote/src/multi-jvm/scala/akka/remote/round_robin_routed/RoundRobinRoutedRemoteActorMultiJvmNode3.opts @@ -1 +1 @@ --Dakka.cluster.nodename=node3 -Dakka.remote.hostname=localhost -Dakka.remote.port=9993 +-Dakka.cluster.nodename=node3 -Dakka.remote.server.hostname=localhost -Dakka.remote.server.port=9993 diff --git a/akka-remote/src/multi-jvm/scala/akka/remote/round_robin_routed/RoundRobinRoutedRemoteActorMultiJvmNode4.conf b/akka-remote/src/multi-jvm/scala/akka/remote/round_robin_routed/RoundRobinRoutedRemoteActorMultiJvmNode4.conf index d660a824de..a0ec833383 100644 --- a/akka-remote/src/multi-jvm/scala/akka/remote/round_robin_routed/RoundRobinRoutedRemoteActorMultiJvmNode4.conf +++ b/akka-remote/src/multi-jvm/scala/akka/remote/round_robin_routed/RoundRobinRoutedRemoteActorMultiJvmNode4.conf @@ -1,5 +1,11 @@ -akka.actor.provider = "akka.remote.RemoteActorRefProvider" -akka.loglevel = "WARNING" -akka.actor.deployment./app/service-hello.router = "round-robin" -akka.actor.deployment./app/service-hello.nr-of-instances = 3 -akka.actor.deployment./app/service-hello.remote.nodes = ["localhost:9991","localhost:9992","localhost:9993"] +akka { + loglevel = "WARNING" + actor { + provider = "akka.remote.RemoteActorRefProvider" + deployment { + /app/service-hello.router = "round-robin" + /app/service-hello.nr-of-instances = 3 + /app/service-hello.remote.nodes = ["localhost:9991","localhost:9992","localhost:9993"] + } + } +} diff --git a/akka-remote/src/multi-jvm/scala/akka/remote/round_robin_routed/RoundRobinRoutedRemoteActorMultiJvmNode4.opts b/akka-remote/src/multi-jvm/scala/akka/remote/round_robin_routed/RoundRobinRoutedRemoteActorMultiJvmNode4.opts index 0d61591255..4c7670d733 100644 --- a/akka-remote/src/multi-jvm/scala/akka/remote/round_robin_routed/RoundRobinRoutedRemoteActorMultiJvmNode4.opts +++ b/akka-remote/src/multi-jvm/scala/akka/remote/round_robin_routed/RoundRobinRoutedRemoteActorMultiJvmNode4.opts @@ -1 +1 @@ --Dakka.cluster.nodename=node4 -Dakka.remote.hostname=localhost -Dakka.remote.port=9994 +-Dakka.cluster.nodename=node4 -Dakka.remote.server.hostname=localhost -Dakka.remote.server.port=9994 diff --git a/akka-remote/src/multi-jvm/scala/akka/remote/round_robin_routed/RoundRobinRoutedRemoteActorMultiJvmSpec.scala b/akka-remote/src/multi-jvm/scala/akka/remote/round_robin_routed/RoundRobinRoutedRemoteActorMultiJvmSpec.scala index a076a91786..413d7814a5 100644 --- a/akka-remote/src/multi-jvm/scala/akka/remote/round_robin_routed/RoundRobinRoutedRemoteActorMultiJvmSpec.scala +++ b/akka-remote/src/multi-jvm/scala/akka/remote/round_robin_routed/RoundRobinRoutedRemoteActorMultiJvmSpec.scala @@ -9,7 +9,7 @@ object RoundRobinRoutedRemoteActorMultiJvmSpec { val NrOfNodes = 4 class SomeActor extends Actor with Serializable { def receive = { - case "hit" ⇒ sender ! app.nodename + case "hit" ⇒ sender ! system.nodename case "end" ⇒ self.stop() } } @@ -70,7 +70,7 @@ class RoundRobinRoutedRemoteActorMultiJvmNode4 extends AkkaRemoteSpec { remote.start() barrier("start") - val actor = app.actorOf[SomeActor]("service-hello") + val actor = system.actorOf[SomeActor]("service-hello") actor.isInstanceOf[RoutedActorRef] must be(true) val connectionCount = NrOfNodes - 1 diff --git a/akka-remote/src/multi-jvm/scala/akka/remote/scatter_gather_routed/ScatterGatherRoutedRemoteActorMultiJvmNode1.conf b/akka-remote/src/multi-jvm/scala/akka/remote/scatter_gather_routed/ScatterGatherRoutedRemoteActorMultiJvmNode1.conf index ae28dee91e..80ad72e3de 100644 --- a/akka-remote/src/multi-jvm/scala/akka/remote/scatter_gather_routed/ScatterGatherRoutedRemoteActorMultiJvmNode1.conf +++ b/akka-remote/src/multi-jvm/scala/akka/remote/scatter_gather_routed/ScatterGatherRoutedRemoteActorMultiJvmNode1.conf @@ -1,5 +1,11 @@ -akka.actor.provider = "akka.remote.RemoteActorRefProvider" -akka.loglevel = "WARNING" -akka.actor.deployment./app/service-hello.router = "scatter-gather" -akka.actor.deployment./app/service-hello.nr-of-instances = 3 -akka.actor.deployment./app/service-hello.remote.nodes = ["localhost:9991","localhost:9992","localhost:9993"] +akka { + loglevel = "WARNING" + actor { + provider = "akka.remote.RemoteActorRefProvider" + deployment { + /app/service-hello.router = "scatter-gather" + /app/service-hello.nr-of-instances = 3 + /app/service-hello.remote.nodes = ["localhost:9991","localhost:9992","localhost:9993"] + } + } +} diff --git a/akka-remote/src/multi-jvm/scala/akka/remote/scatter_gather_routed/ScatterGatherRoutedRemoteActorMultiJvmNode1.opts b/akka-remote/src/multi-jvm/scala/akka/remote/scatter_gather_routed/ScatterGatherRoutedRemoteActorMultiJvmNode1.opts index 6562171945..a3218fe698 100644 --- a/akka-remote/src/multi-jvm/scala/akka/remote/scatter_gather_routed/ScatterGatherRoutedRemoteActorMultiJvmNode1.opts +++ b/akka-remote/src/multi-jvm/scala/akka/remote/scatter_gather_routed/ScatterGatherRoutedRemoteActorMultiJvmNode1.opts @@ -1 +1 @@ --Dakka.cluster.nodename=node1 -Dakka.remote.hostname=localhost -Dakka.remote.port=9991 +-Dakka.cluster.nodename=node1 -Dakka.remote.server.hostname=localhost -Dakka.remote.server.port=9991 diff --git a/akka-remote/src/multi-jvm/scala/akka/remote/scatter_gather_routed/ScatterGatherRoutedRemoteActorMultiJvmNode2.conf b/akka-remote/src/multi-jvm/scala/akka/remote/scatter_gather_routed/ScatterGatherRoutedRemoteActorMultiJvmNode2.conf index ae28dee91e..80ad72e3de 100644 --- a/akka-remote/src/multi-jvm/scala/akka/remote/scatter_gather_routed/ScatterGatherRoutedRemoteActorMultiJvmNode2.conf +++ b/akka-remote/src/multi-jvm/scala/akka/remote/scatter_gather_routed/ScatterGatherRoutedRemoteActorMultiJvmNode2.conf @@ -1,5 +1,11 @@ -akka.actor.provider = "akka.remote.RemoteActorRefProvider" -akka.loglevel = "WARNING" -akka.actor.deployment./app/service-hello.router = "scatter-gather" -akka.actor.deployment./app/service-hello.nr-of-instances = 3 -akka.actor.deployment./app/service-hello.remote.nodes = ["localhost:9991","localhost:9992","localhost:9993"] +akka { + loglevel = "WARNING" + actor { + provider = "akka.remote.RemoteActorRefProvider" + deployment { + /app/service-hello.router = "scatter-gather" + /app/service-hello.nr-of-instances = 3 + /app/service-hello.remote.nodes = ["localhost:9991","localhost:9992","localhost:9993"] + } + } +} diff --git a/akka-remote/src/multi-jvm/scala/akka/remote/scatter_gather_routed/ScatterGatherRoutedRemoteActorMultiJvmNode2.opts b/akka-remote/src/multi-jvm/scala/akka/remote/scatter_gather_routed/ScatterGatherRoutedRemoteActorMultiJvmNode2.opts index ba38f5b2ce..dcecc85ffb 100644 --- a/akka-remote/src/multi-jvm/scala/akka/remote/scatter_gather_routed/ScatterGatherRoutedRemoteActorMultiJvmNode2.opts +++ b/akka-remote/src/multi-jvm/scala/akka/remote/scatter_gather_routed/ScatterGatherRoutedRemoteActorMultiJvmNode2.opts @@ -1 +1 @@ --Dakka.cluster.nodename=node2 -Dakka.remote.hostname=localhost -Dakka.remote.port=9992 +-Dakka.cluster.nodename=node2 -Dakka.remote.server.hostname=localhost -Dakka.remote.server.port=9992 diff --git a/akka-remote/src/multi-jvm/scala/akka/remote/scatter_gather_routed/ScatterGatherRoutedRemoteActorMultiJvmNode3.conf b/akka-remote/src/multi-jvm/scala/akka/remote/scatter_gather_routed/ScatterGatherRoutedRemoteActorMultiJvmNode3.conf index ae28dee91e..80ad72e3de 100644 --- a/akka-remote/src/multi-jvm/scala/akka/remote/scatter_gather_routed/ScatterGatherRoutedRemoteActorMultiJvmNode3.conf +++ b/akka-remote/src/multi-jvm/scala/akka/remote/scatter_gather_routed/ScatterGatherRoutedRemoteActorMultiJvmNode3.conf @@ -1,5 +1,11 @@ -akka.actor.provider = "akka.remote.RemoteActorRefProvider" -akka.loglevel = "WARNING" -akka.actor.deployment./app/service-hello.router = "scatter-gather" -akka.actor.deployment./app/service-hello.nr-of-instances = 3 -akka.actor.deployment./app/service-hello.remote.nodes = ["localhost:9991","localhost:9992","localhost:9993"] +akka { + loglevel = "WARNING" + actor { + provider = "akka.remote.RemoteActorRefProvider" + deployment { + /app/service-hello.router = "scatter-gather" + /app/service-hello.nr-of-instances = 3 + /app/service-hello.remote.nodes = ["localhost:9991","localhost:9992","localhost:9993"] + } + } +} diff --git a/akka-remote/src/multi-jvm/scala/akka/remote/scatter_gather_routed/ScatterGatherRoutedRemoteActorMultiJvmNode3.opts b/akka-remote/src/multi-jvm/scala/akka/remote/scatter_gather_routed/ScatterGatherRoutedRemoteActorMultiJvmNode3.opts index b23510ba4a..cabc575688 100644 --- a/akka-remote/src/multi-jvm/scala/akka/remote/scatter_gather_routed/ScatterGatherRoutedRemoteActorMultiJvmNode3.opts +++ b/akka-remote/src/multi-jvm/scala/akka/remote/scatter_gather_routed/ScatterGatherRoutedRemoteActorMultiJvmNode3.opts @@ -1 +1 @@ --Dakka.cluster.nodename=node3 -Dakka.remote.hostname=localhost -Dakka.remote.port=9993 +-Dakka.cluster.nodename=node3 -Dakka.remote.server.hostname=localhost -Dakka.remote.server.port=9993 diff --git a/akka-remote/src/multi-jvm/scala/akka/remote/scatter_gather_routed/ScatterGatherRoutedRemoteActorMultiJvmNode4.conf b/akka-remote/src/multi-jvm/scala/akka/remote/scatter_gather_routed/ScatterGatherRoutedRemoteActorMultiJvmNode4.conf index ae28dee91e..80ad72e3de 100644 --- a/akka-remote/src/multi-jvm/scala/akka/remote/scatter_gather_routed/ScatterGatherRoutedRemoteActorMultiJvmNode4.conf +++ b/akka-remote/src/multi-jvm/scala/akka/remote/scatter_gather_routed/ScatterGatherRoutedRemoteActorMultiJvmNode4.conf @@ -1,5 +1,11 @@ -akka.actor.provider = "akka.remote.RemoteActorRefProvider" -akka.loglevel = "WARNING" -akka.actor.deployment./app/service-hello.router = "scatter-gather" -akka.actor.deployment./app/service-hello.nr-of-instances = 3 -akka.actor.deployment./app/service-hello.remote.nodes = ["localhost:9991","localhost:9992","localhost:9993"] +akka { + loglevel = "WARNING" + actor { + provider = "akka.remote.RemoteActorRefProvider" + deployment { + /app/service-hello.router = "scatter-gather" + /app/service-hello.nr-of-instances = 3 + /app/service-hello.remote.nodes = ["localhost:9991","localhost:9992","localhost:9993"] + } + } +} diff --git a/akka-remote/src/multi-jvm/scala/akka/remote/scatter_gather_routed/ScatterGatherRoutedRemoteActorMultiJvmNode4.opts b/akka-remote/src/multi-jvm/scala/akka/remote/scatter_gather_routed/ScatterGatherRoutedRemoteActorMultiJvmNode4.opts index 0d61591255..4c7670d733 100644 --- a/akka-remote/src/multi-jvm/scala/akka/remote/scatter_gather_routed/ScatterGatherRoutedRemoteActorMultiJvmNode4.opts +++ b/akka-remote/src/multi-jvm/scala/akka/remote/scatter_gather_routed/ScatterGatherRoutedRemoteActorMultiJvmNode4.opts @@ -1 +1 @@ --Dakka.cluster.nodename=node4 -Dakka.remote.hostname=localhost -Dakka.remote.port=9994 +-Dakka.cluster.nodename=node4 -Dakka.remote.server.hostname=localhost -Dakka.remote.server.port=9994 diff --git a/akka-remote/src/multi-jvm/scala/akka/remote/scatter_gather_routed/ScatterGatherRoutedRemoteActorMultiJvmSpec.scala b/akka-remote/src/multi-jvm/scala/akka/remote/scatter_gather_routed/ScatterGatherRoutedRemoteActorMultiJvmSpec.scala index a73fc5b908..95c5037e8f 100644 --- a/akka-remote/src/multi-jvm/scala/akka/remote/scatter_gather_routed/ScatterGatherRoutedRemoteActorMultiJvmSpec.scala +++ b/akka-remote/src/multi-jvm/scala/akka/remote/scatter_gather_routed/ScatterGatherRoutedRemoteActorMultiJvmSpec.scala @@ -9,7 +9,7 @@ object ScatterGatherRoutedRemoteActorMultiJvmSpec { val NrOfNodes = 4 class SomeActor extends Actor with Serializable { def receive = { - case "hit" ⇒ sender ! app.nodename + case "hit" ⇒ sender ! system.nodename case "end" ⇒ self.stop() } } @@ -70,7 +70,7 @@ class ScatterGatherRoutedRemoteActorMultiJvmNode4 extends AkkaRemoteSpec { remote.start() barrier("start") - val actor = app.actorOf[SomeActor]("service-hello") + val actor = system.actorOf[SomeActor]("service-hello") actor.isInstanceOf[RoutedActorRef] must be(true) actor.asInstanceOf[RoutedActorRef].router.isInstanceOf[ScatterGatherFirstCompletedRouter] must be(true) diff --git a/akka-remote/src/test/scala/akka/remote/AccrualFailureDetectorSpec.scala b/akka-remote/src/test/scala/akka/remote/AccrualFailureDetectorSpec.scala index 5b94895756..38d18ac6c5 100644 --- a/akka-remote/src/test/scala/akka/remote/AccrualFailureDetectorSpec.scala +++ b/akka-remote/src/test/scala/akka/remote/AccrualFailureDetectorSpec.scala @@ -1,13 +1,11 @@ package akka.remote -import org.scalatest.WordSpec -import org.scalatest.matchers.MustMatchers - import java.net.InetSocketAddress +import akka.testkit.AkkaSpec -class AccrualFailureDetectorSpec extends WordSpec with MustMatchers { +class AccrualFailureDetectorSpec extends AkkaSpec { - "An AccrualFailureDetector" should { + "An AccrualFailureDetector" must { val conn = RemoteAddress(new InetSocketAddress("localhost", 2552)) "mark node as available after a series of successful heartbeats" in { diff --git a/akka-remote/src/test/scala/akka/remote/GossiperSpec.scala b/akka-remote/src/test/scala/akka/remote/GossiperSpec.scala index 06d0b73c5b..12e2925b26 100644 --- a/akka-remote/src/test/scala/akka/remote/GossiperSpec.scala +++ b/akka-remote/src/test/scala/akka/remote/GossiperSpec.scala @@ -1,13 +1,11 @@ package akka.remote -import org.scalatest.WordSpec -import org.scalatest.matchers.MustMatchers - import java.net.InetSocketAddress +import akka.testkit.AkkaSpec -class GossiperSpec extends WordSpec with MustMatchers { +class GossiperSpec extends AkkaSpec { - "An Gossiper" should { + "An Gossiper" must { "..." in { } diff --git a/akka-remote/src/test/scala/akka/remote/RemoteConfigSpec.scala b/akka-remote/src/test/scala/akka/remote/RemoteConfigSpec.scala new file mode 100644 index 0000000000..d4e4b6b3bc --- /dev/null +++ b/akka-remote/src/test/scala/akka/remote/RemoteConfigSpec.scala @@ -0,0 +1,54 @@ +package akka.remote + +import akka.testkit.AkkaSpec + +@org.junit.runner.RunWith(classOf[org.scalatest.junit.JUnitRunner]) +class RemoteConfigSpec extends AkkaSpec { + + "ClusterSpec: A Deployer" must { + "be able to parse 'akka.actor.cluster._' config elements" in { + + val config = RemoteExtension(system).settings.config + import config._ + + //akka.remote.server + getInt("akka.remote.server.port") must equal(2552) + getInt("akka.remote.server.message-frame-size") must equal(1048576) + getMilliseconds("akka.remote.server.connection-timeout") must equal(120 * 1000) + getBoolean("akka.remote.server.require-cookie") must equal(false) + getBoolean("akka.remote.server.untrusted-mode") must equal(false) + getInt("akka.remote.server.backlog") must equal(4096) + + //akka.remote.client + getBoolean("akka.remote.client.buffering.retry-message-send-on-failure") must equal(false) + getInt("akka.remote.client.buffering.capacity") must equal(-1) + getMilliseconds("akka.remote.client.reconnect-delay") must equal(5 * 1000) + getMilliseconds("akka.remote.client.read-timeout") must equal(3600 * 1000) + getMilliseconds("akka.remote.client.reconnection-time-window") must equal(600 * 1000) + + // TODO cluster config will go into akka-cluster-reference.conf when we enable that module + //akka.cluster + getString("akka.cluster.name") must equal("test-cluster") + getString("akka.cluster.zookeeper-server-addresses") must equal("localhost:2181") + getInt("akka.remote.server.port") must equal(2552) + getMilliseconds("akka.cluster.max-time-to-wait-until-connected") must equal(30 * 1000) + getMilliseconds("akka.cluster.session-timeout") must equal(60 * 1000) + getMilliseconds("akka.cluster.connection-timeout") must equal(60 * 1000) + getMilliseconds("akka.remote.remote-daemon-ack-timeout") must equal(30 * 1000) + getBoolean("akka.cluster.include-ref-node-in-replica-set") must equal(true) + getString("akka.remote.layer") must equal("akka.cluster.netty.NettyRemoteSupport") + getString("akka.remote.secure-cookie") must equal("") + getBoolean("akka.remote.use-passive-connections") must equal(true) + getString("akka.cluster.log-directory") must equal("_akka_cluster") + + //akka.cluster.replication + getString("akka.cluster.replication.digest-type") must equal("MAC") + getString("akka.cluster.replication.password") must equal("secret") + getInt("akka.cluster.replication.ensemble-size") must equal(3) + getInt("akka.cluster.replication.quorum-size") must equal(2) + getInt("akka.cluster.replication.snapshot-frequency") must equal(1000) + getMilliseconds("akka.cluster.replication.timeout") must equal(30 * 1000) + + } + } +} diff --git a/akka-remote/src/test/scala/akka/remote/VectorClockSpec.scala b/akka-remote/src/test/scala/akka/remote/VectorClockSpec.scala index 6cb4414b6c..5bfda16666 100644 --- a/akka-remote/src/test/scala/akka/remote/VectorClockSpec.scala +++ b/akka-remote/src/test/scala/akka/remote/VectorClockSpec.scala @@ -1,11 +1,9 @@ package akka.remote -import org.scalatest.WordSpec -import org.scalatest.matchers.MustMatchers - import java.net.InetSocketAddress +import akka.testkit.AkkaSpec -class VectorClockSpec extends WordSpec with MustMatchers { +class VectorClockSpec extends AkkaSpec { import VectorClock._ "An VectorClock" must { diff --git a/akka-samples/akka-sample-fsm/src/main/scala/DiningHakkersOnBecome.scala b/akka-samples/akka-sample-fsm/src/main/scala/DiningHakkersOnBecome.scala index f322e62144..2c23940c9f 100644 --- a/akka-samples/akka-sample-fsm/src/main/scala/DiningHakkersOnBecome.scala +++ b/akka-samples/akka-sample-fsm/src/main/scala/DiningHakkersOnBecome.scala @@ -4,7 +4,7 @@ package sample.fsm.dining.become //http://www.dalnefre.com/wp/2010/08/dining-philosophers-in-humus/ import akka.actor.{ ActorRef, Actor, ActorSystem } -import java.util.concurrent.TimeUnit +import akka.util.duration._ /* * First we define our messages, they basically speak for themselves @@ -77,7 +77,7 @@ class Hakker(name: String, left: ActorRef, right: ActorRef) extends Actor { case Taken(`chopstickToWaitFor`) ⇒ println("%s has picked up %s and %s, and starts to eat", name, left.address, right.address) become(eating) - app.scheduler.scheduleOnce(self, Think, 5, TimeUnit.SECONDS) + system.scheduler.scheduleOnce(self, Think, 5 seconds) case Busy(chopstick) ⇒ become(thinking) @@ -106,7 +106,7 @@ class Hakker(name: String, left: ActorRef, right: ActorRef) extends Actor { left ! Put(self) right ! Put(self) println("%s puts down his chopsticks and starts to think", name) - app.scheduler.scheduleOnce(self, Eat, 5, TimeUnit.SECONDS) + system.scheduler.scheduleOnce(self, Eat, 5 seconds) } //All hakkers start in a non-eating state @@ -114,7 +114,7 @@ class Hakker(name: String, left: ActorRef, right: ActorRef) extends Actor { case Think ⇒ println("%s starts to think", name) become(thinking) - app.scheduler.scheduleOnce(self, Eat, 5, TimeUnit.SECONDS) + system.scheduler.scheduleOnce(self, Eat, 5 seconds) } } @@ -122,14 +122,14 @@ class Hakker(name: String, left: ActorRef, right: ActorRef) extends Actor { * Alright, here's our test-harness */ object DiningHakkers { - val app = ActorSystem() + val system = ActorSystem() def run { //Create 5 chopsticks - val chopsticks = for (i ← 1 to 5) yield app.actorOf(new Chopstick("Chopstick " + i)) + val chopsticks = for (i ← 1 to 5) yield system.actorOf(new Chopstick("Chopstick " + i)) //Create 5 awesome hakkers and assign them their left and right chopstick val hakkers = for { (name, i) ← List("Ghosh", "Bonér", "Klang", "Krasser", "Manie").zipWithIndex - } yield app.actorOf(new Hakker(name, chopsticks(i), chopsticks((i + 1) % 5))) + } yield system.actorOf(new Hakker(name, chopsticks(i), chopsticks((i + 1) % 5))) //Signal all hakkers that they should start thinking, and watch the show hakkers.foreach(_ ! Think) diff --git a/akka-samples/akka-sample-fsm/src/main/scala/DiningHakkersOnFsm.scala b/akka-samples/akka-sample-fsm/src/main/scala/DiningHakkersOnFsm.scala index 97696e0dbe..987f630784 100644 --- a/akka-samples/akka-sample-fsm/src/main/scala/DiningHakkersOnFsm.scala +++ b/akka-samples/akka-sample-fsm/src/main/scala/DiningHakkersOnFsm.scala @@ -32,7 +32,7 @@ case class TakenBy(hakker: ActorRef) class Chopstick(name: String) extends Actor with FSM[ChopstickState, TakenBy] { // A chopstick begins its existence as available and taken by no one - startWith(Available, TakenBy(app.deadLetters)) + startWith(Available, TakenBy(system.deadLetters)) // When a chopstick is available, it can be taken by a some hakker when(Available) { @@ -47,7 +47,7 @@ class Chopstick(name: String) extends Actor with FSM[ChopstickState, TakenBy] { case Event(Take, currentState) ⇒ stay replying Busy(self) case Event(Put, TakenBy(hakker)) if sender == hakker ⇒ - goto(Available) using TakenBy(app.deadLetters) + goto(Available) using TakenBy(system.deadLetters) } // Initialze the chopstick @@ -163,15 +163,15 @@ class FSMHakker(name: String, left: ActorRef, right: ActorRef) extends Actor wit */ object DiningHakkersOnFsm { - val app = ActorSystem() + val system = ActorSystem() def run = { // Create 5 chopsticks - val chopsticks = for (i ← 1 to 5) yield app.actorOf(new Chopstick("Chopstick " + i)) + val chopsticks = for (i ← 1 to 5) yield system.actorOf(new Chopstick("Chopstick " + i)) // Create 5 awesome fsm hakkers and assign them their left and right chopstick val hakkers = for { (name, i) ← List("Ghosh", "Bonér", "Klang", "Krasser", "Manie").zipWithIndex - } yield app.actorOf(new FSMHakker(name, chopsticks(i), chopsticks((i + 1) % 5))) + } yield system.actorOf(new FSMHakker(name, chopsticks(i), chopsticks((i + 1) % 5))) hakkers.foreach(_ ! Think) } diff --git a/akka-slf4j/src/main/scala/akka/event/slf4j/SLF4J.scala b/akka-slf4j/src/main/scala/akka/event/slf4j/SLF4J.scala index 80d913425a..dc175e4e82 100644 --- a/akka-slf4j/src/main/scala/akka/event/slf4j/SLF4J.scala +++ b/akka-slf4j/src/main/scala/akka/event/slf4j/SLF4J.scala @@ -21,7 +21,6 @@ trait SLF4JLogging { object Logger { def apply(logger: String): SLFLogger = SLFLoggerFactory getLogger logger - def apply(clazz: Class[_]): SLFLogger = apply(clazz.getName) def root: SLFLogger = apply(SLFLogger.ROOT_LOGGER_NAME) } @@ -33,29 +32,26 @@ object Logger { class Slf4jEventHandler extends Actor with SLF4JLogging { def receive = { - case event @ Error(cause, instance, message) ⇒ - logger(instance).error("[{}] [{}] [{}]", + case event @ Error(cause, logSource, message) ⇒ + Logger(logSource).error("[{}] [{}] [{}]", Array[AnyRef](event.thread.getName, message.asInstanceOf[AnyRef], stackTraceFor(cause))) - case event @ Warning(instance, message) ⇒ - logger(instance).warn("[{}] [{}]", + case event @ Warning(logSource, message) ⇒ + Logger(logSource).warn("[{}] [{}]", event.thread.getName, message.asInstanceOf[AnyRef]) - case event @ Info(instance, message) ⇒ - logger(instance).info("[{}] [{}]", + case event @ Info(logSource, message) ⇒ + Logger(logSource).info("[{}] [{}]", event.thread.getName, message.asInstanceOf[AnyRef]) - case event @ Debug(instance, message) ⇒ - logger(instance).debug("[{}] [{}]", + case event @ Debug(logSource, message) ⇒ + Logger(logSource).debug("[{}] [{}]", event.thread.getName, message.asInstanceOf[AnyRef]) - case InitializeLogger(_) ⇒ log.info("Slf4jEventHandler started"); sender ! LoggerInitialized + case InitializeLogger(_) ⇒ + log.info("Slf4jEventHandler started") + sender ! LoggerInitialized } - def logger(instance: AnyRef): SLFLogger = instance match { - // TODO make sure that this makes sense (i.e. should be the full path after Peter’s changes) - case a: ActorRef ⇒ Logger(a.address) - case _ ⇒ Logger(instance.getClass) - } } diff --git a/akka-spring/src/main/scala/akka/spring/ConfiggyPropertyPlaceholderConfigurer.scala b/akka-spring/src/main/scala/akka/spring/ConfiggyPropertyPlaceholderConfigurer.scala index c346d94263..1d78b0c14b 100644 --- a/akka-spring/src/main/scala/akka/spring/ConfiggyPropertyPlaceholderConfigurer.scala +++ b/akka-spring/src/main/scala/akka/spring/ConfiggyPropertyPlaceholderConfigurer.scala @@ -19,14 +19,14 @@ class ConfiggyPropertyPlaceholderConfigurer extends PropertyPlaceholderConfigure */ override def setLocation(configgyResource: Resource) { if (configgyResource eq null) throw new IllegalArgumentException("Property 'config' must be set") - val properties = loadAkkaConfig(configgyResource) + val properties = loadSettings(configgyResource) setProperties(properties) } /** * Load the akka.conf and transform to properties. */ - private def loadAkkaConfig(configgyResource: Resource): Properties = { + private def loadSettings(configgyResource: Resource): Properties = { val config = Configuration.fromFile(configgyResource.getFile.getPath) val properties = new Properties() config.map.foreach { case (k, v) ⇒ properties.put(k, v.asInstanceOf[AnyRef]); println("(k,v)=" + k + ", " + v) } diff --git a/akka-spring/src/test/java/akka/spring/foo/IFoo.java b/akka-spring/src/test/java/akka/spring/foo/IFoo.java index e47809f3af..0e5a294811 100644 --- a/akka-spring/src/test/java/akka/spring/foo/IFoo.java +++ b/akka-spring/src/test/java/akka/spring/foo/IFoo.java @@ -5,7 +5,7 @@ package akka.spring.foo; * User: michaelkober * Date: Aug 11, 2010 * Time: 12:49:58 PM - * To change this template use File | Settings | File Templates. + * To change this template use File | settings | File Templates. */ public interface IFoo { public String foo(); diff --git a/akka-spring/src/test/java/akka/spring/foo/IMyPojo.java b/akka-spring/src/test/java/akka/spring/foo/IMyPojo.java index 825d797cf2..24e673a0c3 100644 --- a/akka-spring/src/test/java/akka/spring/foo/IMyPojo.java +++ b/akka-spring/src/test/java/akka/spring/foo/IMyPojo.java @@ -5,7 +5,7 @@ package akka.spring.foo; * User: michaelkober * Date: Aug 11, 2010 * Time: 12:01:00 PM - * To change this template use File | Settings | File Templates. + * To change this template use File | settings | File Templates. */ public interface IMyPojo { public void oneWay(String message); diff --git a/akka-spring/src/test/scala/ActorFactoryBeanTest.scala b/akka-spring/src/test/scala/ActorFactoryBeanTest.scala index 58a112de83..ff7dd71f47 100644 --- a/akka-spring/src/test/scala/ActorFactoryBeanTest.scala +++ b/akka-spring/src/test/scala/ActorFactoryBeanTest.scala @@ -65,7 +65,7 @@ class ActorFactoryBeanTest extends Spec with ShouldMatchers with BeforeAndAfterA assert(target.getStringFromVal === entry.value) } - it("should create an app context and verify dependency injection for typed") { + it("should create an system context and verify dependency injection for typed") { var ctx = new ClassPathXmlApplicationContext("appContext.xml"); val ta = ctx.getBean("typedActor").asInstanceOf[PojoInf]; assert(ta.isPreStartInvoked) @@ -75,7 +75,7 @@ class ActorFactoryBeanTest extends Spec with ShouldMatchers with BeforeAndAfterA ctx.close } - it("should create an app context and verify dependency injection for untyped actors") { + it("should create an system context and verify dependency injection for untyped actors") { var ctx = new ClassPathXmlApplicationContext("appContext.xml") val uta = ctx.getBean("untypedActor").asInstanceOf[ActorRef] val ping = uta.actor.asInstanceOf[PingActor] diff --git a/akka-spring/src/test/scala/CamelServiceSpringFeatureTest.scala b/akka-spring/src/test/scala/CamelServiceSpringFeatureTest.scala index 967d94dd33..5975ba60e3 100644 --- a/akka-spring/src/test/scala/CamelServiceSpringFeatureTest.scala +++ b/akka-spring/src/test/scala/CamelServiceSpringFeatureTest.scala @@ -17,7 +17,7 @@ class CamelServiceSpringFeatureTest extends FeatureSpec with BeforeAndAfterEach Actor.registry.shutdownAll } - feature("start CamelService from Spring app context") { + feature("start CamelService from Spring system context") { import CamelContextManager._ scenario("with a custom CamelContext and access a registered typed actor") { val appctx = new ClassPathXmlApplicationContext("/appContextCamelServiceCustom.xml") diff --git a/akka-spring/src/test/scala/TypedActorSpringFeatureTest.scala b/akka-spring/src/test/scala/TypedActorSpringFeatureTest.scala index 69037bd148..9b9f428d3d 100644 --- a/akka-spring/src/test/scala/TypedActorSpringFeatureTest.scala +++ b/akka-spring/src/test/scala/TypedActorSpringFeatureTest.scala @@ -60,7 +60,7 @@ class TypedActorSpringFeatureTest extends FeatureSpec with ShouldMatchers with B myPojo } - feature("parse Spring app context") { + feature("parse Spring system context") { scenario("akka:typed-actor and akka:supervision and akka:dispatcher can be used as top level elements") { val context = new ClassPathResource("/typed-actor-config.xml") diff --git a/akka-spring/src/test/scala/UntypedActorSpringFeatureTest.scala b/akka-spring/src/test/scala/UntypedActorSpringFeatureTest.scala index ef111c57ab..6c7a0156e7 100644 --- a/akka-spring/src/test/scala/UntypedActorSpringFeatureTest.scala +++ b/akka-spring/src/test/scala/UntypedActorSpringFeatureTest.scala @@ -45,7 +45,7 @@ class UntypedActorSpringFeatureTest extends FeatureSpec with ShouldMatchers with pingActor } - feature("parse Spring app context") { + feature("parse Spring system context") { scenario("get a untyped actor") { val myactor = getPingActorFromContext("/untyped-actor-config.xml", "simple-untyped-actor") diff --git a/akka-stm/src/main/resources/akka-stm-reference.conf b/akka-stm/src/main/resources/akka-stm-reference.conf new file mode 100644 index 0000000000..98a3e70d5d --- /dev/null +++ b/akka-stm/src/main/resources/akka-stm-reference.conf @@ -0,0 +1,23 @@ +################################## +# Akka STM Reference Config File # +################################## + +# This the reference config file has all the default settings. +# Make your edits/overrides in your akka.conf. + +akka { + + stm { + fair = on # Should global transactions be fair or non-fair (non fair yield better performance) + max-retries = 1000 + timeout = 5s # Default timeout for blocking transactions and transaction set + write-skew = on + blocking-allowed = off + interruptible = off + speculative = on + quick-release = on + propagation = "requires" + trace-level = "none" + } + +} diff --git a/akka-stm/src/main/scala/akka/agent/Agent.scala b/akka-stm/src/main/scala/akka/agent/Agent.scala index 29bda7cea6..cfe618ce47 100644 --- a/akka-stm/src/main/scala/akka/agent/Agent.scala +++ b/akka-stm/src/main/scala/akka/agent/Agent.scala @@ -20,7 +20,7 @@ private[akka] case object Get * Factory method for creating an Agent. */ object Agent { - def apply[T](initialValue: T)(implicit app: ActorSystem) = new Agent(initialValue, app) + def apply[T](initialValue: T)(implicit system: ActorSystem) = new Agent(initialValue, system) } /** @@ -93,9 +93,9 @@ object Agent { * agent4.close * }}} */ -class Agent[T](initialValue: T, app: ActorSystem) { +class Agent[T](initialValue: T, system: ActorSystem) { private[akka] val ref = Ref(initialValue) - private[akka] val updater = app.actorOf(Props(new AgentUpdater(this))).asInstanceOf[LocalActorRef] //TODO can we avoid this somehow? + private[akka] val updater = system.actorOf(Props(new AgentUpdater(this))).asInstanceOf[LocalActorRef] //TODO can we avoid this somehow? /** * Read the internal state of the agent. @@ -123,7 +123,7 @@ class Agent[T](initialValue: T, app: ActorSystem) { def alter(f: T ⇒ T)(timeout: Timeout): Future[T] = { def dispatch = updater.?(Update(f), timeout).asInstanceOf[Future[T]] if (Stm.activeTransaction) { - val result = new DefaultPromise[T](timeout)(app.dispatcher) + val result = new DefaultPromise[T](timeout)(system.dispatcher) get //Join xa deferred { result completeWith dispatch } //Attach deferred-block to current transaction result @@ -151,8 +151,8 @@ class Agent[T](initialValue: T, app: ActorSystem) { def sendOff(f: T ⇒ T): Unit = { send((value: T) ⇒ { suspend() - val pinnedDispatcher = new PinnedDispatcher(app, null, "agent-send-off", UnboundedMailbox(), app.AkkaConfig.ActorTimeoutMillis) - val threadBased = app.actorOf(Props(new ThreadBasedAgentUpdater(this)).withDispatcher(pinnedDispatcher)) + val pinnedDispatcher = new PinnedDispatcher(system.dispatcherFactory.prerequisites, null, "agent-send-off", UnboundedMailbox(), system.settings.ActorTimeout.duration) + val threadBased = system.actorOf(Props(new ThreadBasedAgentUpdater(this)).withDispatcher(pinnedDispatcher)) threadBased ! Update(f) value }) @@ -166,11 +166,11 @@ class Agent[T](initialValue: T, app: ActorSystem) { * still be executed in order. */ def alterOff(f: T ⇒ T)(timeout: Timeout): Future[T] = { - val result = new DefaultPromise[T](timeout)(app.dispatcher) + val result = new DefaultPromise[T](timeout)(system.dispatcher) send((value: T) ⇒ { suspend() - val pinnedDispatcher = new PinnedDispatcher(app, null, "agent-alter-off", UnboundedMailbox(), app.AkkaConfig.ActorTimeoutMillis) - val threadBased = app.actorOf(Props(new ThreadBasedAgentUpdater(this)).withDispatcher(pinnedDispatcher)) + val pinnedDispatcher = new PinnedDispatcher(system.dispatcherFactory.prerequisites, null, "agent-alter-off", UnboundedMailbox(), system.settings.ActorTimeout.duration) + val threadBased = system.actorOf(Props(new ThreadBasedAgentUpdater(this)).withDispatcher(pinnedDispatcher)) result completeWith threadBased.?(Update(f), timeout).asInstanceOf[Future[T]] value }) @@ -192,7 +192,7 @@ class Agent[T](initialValue: T, app: ActorSystem) { * Map this agent to a new agent, applying the function to the internal state. * Does not change the value of this agent. */ - def map[B](f: T ⇒ B): Agent[B] = Agent(f(get))(app) + def map[B](f: T ⇒ B): Agent[B] = Agent(f(get))(system) /** * Flatmap this agent to a new agent, applying the function to the internal state. @@ -262,7 +262,7 @@ class Agent[T](initialValue: T, app: ActorSystem) { * Map this agent to a new agent, applying the function to the internal state. * Does not change the value of this agent. */ - def map[B](f: JFunc[T, B]): Agent[B] = Agent(f(get))(app) + def map[B](f: JFunc[T, B]): Agent[B] = Agent(f(get))(system) /** * Java API: diff --git a/akka-stm/src/test/java/akka/stm/example/EitherOrElseExample.java b/akka-stm/src/test/java/akka/stm/example/EitherOrElseExample.java index 2c76ef90f3..a8f3fd475c 100644 --- a/akka-stm/src/test/java/akka/stm/example/EitherOrElseExample.java +++ b/akka-stm/src/test/java/akka/stm/example/EitherOrElseExample.java @@ -9,7 +9,7 @@ public class EitherOrElseExample { System.out.println("EitherOrElse example"); System.out.println(); - ActorSystem application = new ActorSystem("UntypedTransactorExample"); + ActorSystem application = ActorSystem.create("UntypedTransactorExample"); final Ref left = new Ref(100); final Ref right = new Ref(100); diff --git a/akka-stm/src/test/java/akka/stm/example/RetryExample.java b/akka-stm/src/test/java/akka/stm/example/RetryExample.java index f0c55d35df..ad86126deb 100644 --- a/akka-stm/src/test/java/akka/stm/example/RetryExample.java +++ b/akka-stm/src/test/java/akka/stm/example/RetryExample.java @@ -3,48 +3,49 @@ package akka.stm.example; import akka.actor.ActorSystem; import akka.stm.*; import akka.actor.*; +import akka.testkit.AkkaSpec; public class RetryExample { - public static void main(String[] args) { - System.out.println(); - System.out.println("Retry example"); - System.out.println(); + public static void main(String[] args) { + System.out.println(); + System.out.println("Retry example"); + System.out.println(); - ActorSystem application = new ActorSystem("RetryExample"); + ActorSystem application = ActorSystem.create("RetryExample", AkkaSpec.testConf()); - final Ref account1 = new Ref(100.0); - final Ref account2 = new Ref(100.0); + final Ref account1 = new Ref(100.0); + final Ref account2 = new Ref(100.0); - ActorRef transferer = application.actorOf(new Props().withCreator(Transferer.class)); + ActorRef transferer = application.actorOf(new Props().withCreator(Transferer.class)); - transferer.tell(new Transfer(account1, account2, 500.0)); - // Transferer: not enough money - retrying + transferer.tell(new Transfer(account1, account2, 500.0)); + // Transferer: not enough money - retrying - new Atomic() { - public Object atomically() { - return account1.set(account1.get() + 2000); - } - }.execute(); - // Transferer: transferring + new Atomic() { + public Object atomically() { + return account1.set(account1.get() + 2000); + } + }.execute(); + // Transferer: transferring - Double acc1 = new Atomic() { - public Double atomically() { - return account1.get(); - } - }.execute(); + Double acc1 = new Atomic() { + public Double atomically() { + return account1.get(); + } + }.execute(); - Double acc2 = new Atomic() { - public Double atomically() { - return account2.get(); - } - }.execute(); + Double acc2 = new Atomic() { + public Double atomically() { + return account2.get(); + } + }.execute(); - System.out.println("Account 1: " + acc1); - // Account 1: 1600.0 + System.out.println("Account 1: " + acc1); + // Account 1: 1600.0 - System.out.println("Account 2: " + acc2); - // Account 2: 600.0 + System.out.println("Account 2: " + acc2); + // Account 2: 600.0 - transferer.stop(); - } + transferer.stop(); + } } diff --git a/akka-stm/src/test/java/akka/transactor/example/UntypedCoordinatedExample.java b/akka-stm/src/test/java/akka/transactor/example/UntypedCoordinatedExample.java index d6874ab6e6..344c98dfee 100644 --- a/akka-stm/src/test/java/akka/transactor/example/UntypedCoordinatedExample.java +++ b/akka-stm/src/test/java/akka/transactor/example/UntypedCoordinatedExample.java @@ -4,45 +4,46 @@ import akka.actor.ActorSystem; import akka.actor.ActorRef; import akka.actor.Props; import akka.dispatch.Future; +import akka.testkit.AkkaSpec; import akka.transactor.Coordinated; public class UntypedCoordinatedExample { - public static void main(String[] args) throws InterruptedException { - System.out.println(); - System.out.println("Untyped transactor example"); - System.out.println(); + public static void main(String[] args) throws InterruptedException { + System.out.println(); + System.out.println("Untyped transactor example"); + System.out.println(); - ActorSystem application = new ActorSystem("UntypedCoordinatedExample"); + ActorSystem application = ActorSystem.create("UntypedCoordinatedExample", AkkaSpec.testConf()); - ActorRef counter1 = application.actorOf(new Props().withCreator(UntypedCoordinatedCounter.class)); - ActorRef counter2 = application.actorOf(new Props().withCreator(UntypedCoordinatedCounter.class)); + ActorRef counter1 = application.actorOf(new Props().withCreator(UntypedCoordinatedCounter.class)); + ActorRef counter2 = application.actorOf(new Props().withCreator(UntypedCoordinatedCounter.class)); - counter1.tell(new Coordinated(new Increment(counter2))); + counter1.tell(new Coordinated(new Increment(counter2))); - Thread.sleep(3000); + Thread.sleep(3000); - long timeout = 5000; + long timeout = 5000; - Future future1 = counter1.ask("GetCount", timeout); - Future future2 = counter2.ask("GetCount", timeout); + Future future1 = counter1.ask("GetCount", timeout); + Future future2 = counter2.ask("GetCount", timeout); - future1.await(); - if (future1.isCompleted()) { - if (future1.result().isDefined()) { - int result = (Integer) future1.result().get(); - System.out.println("counter 1: " + result); - } - } - - future2.await(); - if (future2.isCompleted()) { - if (future2.result().isDefined()) { - int result = (Integer) future2.result().get(); - System.out.println("counter 2: " + result); - } - } - - counter1.stop(); - counter2.stop(); + future1.await(); + if (future1.isCompleted()) { + if (future1.result().isDefined()) { + int result = (Integer) future1.result().get(); + System.out.println("counter 1: " + result); + } } + + future2.await(); + if (future2.isCompleted()) { + if (future2.result().isDefined()) { + int result = (Integer) future2.result().get(); + System.out.println("counter 2: " + result); + } + } + + counter1.stop(); + counter2.stop(); + } } diff --git a/akka-stm/src/test/java/akka/transactor/example/UntypedTransactorExample.java b/akka-stm/src/test/java/akka/transactor/example/UntypedTransactorExample.java index 1e3a5caf3c..882d5b7b1f 100644 --- a/akka-stm/src/test/java/akka/transactor/example/UntypedTransactorExample.java +++ b/akka-stm/src/test/java/akka/transactor/example/UntypedTransactorExample.java @@ -4,44 +4,45 @@ import akka.actor.ActorSystem; import akka.actor.ActorRef; import akka.actor.Props; import akka.dispatch.Future; +import akka.testkit.AkkaSpec; public class UntypedTransactorExample { - public static void main(String[] args) throws InterruptedException { - System.out.println(); - System.out.println("Untyped transactor example"); - System.out.println(); + public static void main(String[] args) throws InterruptedException { + System.out.println(); + System.out.println("Untyped transactor example"); + System.out.println(); - ActorSystem application = new ActorSystem("UntypedTransactorExample"); + ActorSystem application = ActorSystem.create("UntypedTransactorExample", AkkaSpec.testConf()); - ActorRef counter1 = application.actorOf(new Props().withCreator(UntypedCounter.class)); - ActorRef counter2 = application.actorOf(new Props().withCreator(UntypedCounter.class)); + ActorRef counter1 = application.actorOf(new Props().withCreator(UntypedCounter.class)); + ActorRef counter2 = application.actorOf(new Props().withCreator(UntypedCounter.class)); - counter1.tell(new Increment(counter2)); + counter1.tell(new Increment(counter2)); - Thread.sleep(3000); + Thread.sleep(3000); - long timeout = 5000; + long timeout = 5000; - Future future1 = counter1.ask("GetCount", timeout); - Future future2 = counter2.ask("GetCount", timeout); + Future future1 = counter1.ask("GetCount", timeout); + Future future2 = counter2.ask("GetCount", timeout); - future1.await(); - if (future1.isCompleted()) { - if (future1.result().isDefined()) { - int result = (Integer) future1.result().get(); - System.out.println("counter 1: " + result); - } - } - - future2.await(); - if (future2.isCompleted()) { - if (future2.result().isDefined()) { - int result = (Integer) future2.result().get(); - System.out.println("counter 2: " + result); - } - } - - counter1.stop(); - counter2.stop(); + future1.await(); + if (future1.isCompleted()) { + if (future1.result().isDefined()) { + int result = (Integer) future1.result().get(); + System.out.println("counter 1: " + result); + } } + + future2.await(); + if (future2.isCompleted()) { + if (future2.result().isDefined()) { + int result = (Integer) future2.result().get(); + System.out.println("counter 2: " + result); + } + } + + counter1.stop(); + counter2.stop(); + } } diff --git a/akka-stm/src/test/java/akka/transactor/test/UntypedCoordinatedIncrementTest.java b/akka-stm/src/test/java/akka/transactor/test/UntypedCoordinatedIncrementTest.java index 7c201ca103..0d44d16496 100644 --- a/akka-stm/src/test/java/akka/transactor/test/UntypedCoordinatedIncrementTest.java +++ b/akka-stm/src/test/java/akka/transactor/test/UntypedCoordinatedIncrementTest.java @@ -14,6 +14,7 @@ import akka.actor.Props; import akka.actor.UntypedActor; import akka.actor.UntypedActorFactory; import akka.dispatch.Future; +import akka.testkit.AkkaSpec; import akka.testkit.EventFilter; import akka.testkit.ErrorFilter; import akka.testkit.TestEvent; @@ -30,70 +31,73 @@ import scala.collection.JavaConverters; import scala.collection.Seq; public class UntypedCoordinatedIncrementTest { - ActorSystem application = new ActorSystem("UntypedCoordinatedIncrementTest"); + ActorSystem application = ActorSystem.create("UntypedCoordinatedIncrementTest", AkkaSpec.testConf()); - List counters; - ActorRef failer; + List counters; + ActorRef failer; - int numCounters = 3; - int timeout = 5; - int askTimeout = 5000; + int numCounters = 3; + int timeout = 5; + int askTimeout = 5000; - @Before public void initialise() { - Props p = new Props().withCreator(UntypedFailer.class); - counters = new ArrayList(); - for (int i = 1; i <= numCounters; i++) { - final String name = "counter" + i; - ActorRef counter = application.actorOf(new Props().withCreator(new UntypedActorFactory() { - public UntypedActor create() { - return new UntypedCoordinatedCounter(name); - } - })); - counters.add(counter); + @Before + public void initialise() { + Props p = new Props().withCreator(UntypedFailer.class); + counters = new ArrayList(); + for (int i = 1; i <= numCounters; i++) { + final String name = "counter" + i; + ActorRef counter = application.actorOf(new Props().withCreator(new UntypedActorFactory() { + public UntypedActor create() { + return new UntypedCoordinatedCounter(name); } - failer = application.actorOf(p); + })); + counters.add(counter); } + failer = application.actorOf(p); + } - @Test public void incrementAllCountersWithSuccessfulTransaction() { - CountDownLatch incrementLatch = new CountDownLatch(numCounters); - Increment message = new Increment(counters.subList(1, counters.size()), incrementLatch); - counters.get(0).tell(new Coordinated(message)); - try { - incrementLatch.await(timeout, TimeUnit.SECONDS); - } catch (InterruptedException exception) {} - for (ActorRef counter : counters) { - Future future = counter.ask("GetCount", askTimeout); - assertEquals(1, ((Integer)future.get()).intValue()); - } + @Test + public void incrementAllCountersWithSuccessfulTransaction() { + CountDownLatch incrementLatch = new CountDownLatch(numCounters); + Increment message = new Increment(counters.subList(1, counters.size()), incrementLatch); + counters.get(0).tell(new Coordinated(message)); + try { + incrementLatch.await(timeout, TimeUnit.SECONDS); + } catch (InterruptedException exception) { } + for (ActorRef counter : counters) { + Future future = counter.ask("GetCount", askTimeout); + assertEquals(1, ((Integer) future.get()).intValue()); + } + } - @Test public void incrementNoCountersWithFailingTransaction() { - EventFilter expectedFailureFilter = (EventFilter) new ErrorFilter(ExpectedFailureException.class); - EventFilter coordinatedFilter = (EventFilter) new ErrorFilter(CoordinatedTransactionException.class); - Seq ignoreExceptions = seq(expectedFailureFilter, coordinatedFilter); - application.eventStream().publish(new TestEvent.Mute(ignoreExceptions)); - CountDownLatch incrementLatch = new CountDownLatch(numCounters); - List actors = new ArrayList(counters); - actors.add(failer); - Increment message = new Increment(actors.subList(1, actors.size()), incrementLatch); - actors.get(0).tell(new Coordinated(message)); - try { - incrementLatch.await(timeout, TimeUnit.SECONDS); - } catch (InterruptedException exception) {} - for (ActorRef counter : counters) { - Future future = counter.ask("GetCount", askTimeout); - assertEquals(0, ((Integer)future.get()).intValue()); - } + @Test + public void incrementNoCountersWithFailingTransaction() { + EventFilter expectedFailureFilter = (EventFilter) new ErrorFilter(ExpectedFailureException.class); + EventFilter coordinatedFilter = (EventFilter) new ErrorFilter(CoordinatedTransactionException.class); + Seq ignoreExceptions = seq(expectedFailureFilter, coordinatedFilter); + application.eventStream().publish(new TestEvent.Mute(ignoreExceptions)); + CountDownLatch incrementLatch = new CountDownLatch(numCounters); + List actors = new ArrayList(counters); + actors.add(failer); + Increment message = new Increment(actors.subList(1, actors.size()), incrementLatch); + actors.get(0).tell(new Coordinated(message)); + try { + incrementLatch.await(timeout, TimeUnit.SECONDS); + } catch (InterruptedException exception) { } + for (ActorRef counter : counters) { + Future future = counter.ask("GetCount", askTimeout); + assertEquals(0, ((Integer) future.get()).intValue()); + } + } - public Seq seq(A... args) { - return JavaConverters.collectionAsScalaIterableConverter(Arrays.asList(args)).asScala().toSeq(); - } + public Seq seq(A... args) { + return JavaConverters.collectionAsScalaIterableConverter(Arrays.asList(args)).asScala().toSeq(); + } - @After - public void stop() { - application.stop(); - } + @After + public void stop() { + application.stop(); + } } - - diff --git a/akka-stm/src/test/java/akka/transactor/test/UntypedTransactorTest.java b/akka-stm/src/test/java/akka/transactor/test/UntypedTransactorTest.java index 9c54a38b74..8d2a3e4db8 100644 --- a/akka-stm/src/test/java/akka/transactor/test/UntypedTransactorTest.java +++ b/akka-stm/src/test/java/akka/transactor/test/UntypedTransactorTest.java @@ -25,82 +25,86 @@ import java.util.concurrent.TimeUnit; import scala.Option; import scala.collection.JavaConverters; import scala.collection.Seq; +import akka.testkit.AkkaSpec; public class UntypedTransactorTest { - ActorSystem application = new ActorSystem("UntypedTransactorTest"); + ActorSystem application = ActorSystem.create("UntypedTransactorTest", AkkaSpec.testConf()); - List counters; - ActorRef failer; + List counters; + ActorRef failer; - int numCounters = 3; - int timeout = 5; - int askTimeout = 5000; + int numCounters = 3; + int timeout = 5; + int askTimeout = 5000; - @Before public void initialise() { - counters = new ArrayList(); - for (int i = 1; i <= numCounters; i++) { - final String name = "counter" + i; - ActorRef counter = application.actorOf(new Props().withCreator(new UntypedActorFactory() { - public UntypedActor create() { - return new UntypedCounter(name); - } - })); - counters.add(counter); + @Before + public void initialise() { + counters = new ArrayList(); + for (int i = 1; i <= numCounters; i++) { + final String name = "counter" + i; + ActorRef counter = application.actorOf(new Props().withCreator(new UntypedActorFactory() { + public UntypedActor create() { + return new UntypedCounter(name); } - failer = application.actorOf(new Props().withCreator(UntypedFailer.class)); + })); + counters.add(counter); } + failer = application.actorOf(new Props().withCreator(UntypedFailer.class)); + } - @Test public void incrementAllCountersWithSuccessfulTransaction() { - CountDownLatch incrementLatch = new CountDownLatch(numCounters); - Increment message = new Increment(counters.subList(1, counters.size()), incrementLatch); - counters.get(0).tell(message); - try { - incrementLatch.await(timeout, TimeUnit.SECONDS); - } catch (InterruptedException exception) {} - for (ActorRef counter : counters) { - Future future = counter.ask("GetCount", askTimeout); - future.await(); - if (future.isCompleted()) { - Option resultOption = future.result(); - if (resultOption.isDefined()) { - Object result = resultOption.get(); - int count = (Integer) result; - assertEquals(1, count); - } - } + @Test + public void incrementAllCountersWithSuccessfulTransaction() { + CountDownLatch incrementLatch = new CountDownLatch(numCounters); + Increment message = new Increment(counters.subList(1, counters.size()), incrementLatch); + counters.get(0).tell(message); + try { + incrementLatch.await(timeout, TimeUnit.SECONDS); + } catch (InterruptedException exception) { + } + for (ActorRef counter : counters) { + Future future = counter.ask("GetCount", askTimeout); + future.await(); + if (future.isCompleted()) { + Option resultOption = future.result(); + if (resultOption.isDefined()) { + Object result = resultOption.get(); + int count = (Integer) result; + assertEquals(1, count); } + } } + } - @Test public void incrementNoCountersWithFailingTransaction() { - EventFilter expectedFailureFilter = (EventFilter) new ErrorFilter(ExpectedFailureException.class); - EventFilter coordinatedFilter = (EventFilter) new ErrorFilter(CoordinatedTransactionException.class); - Seq ignoreExceptions = seq(expectedFailureFilter, coordinatedFilter); - application.eventStream().publish(new TestEvent.Mute(ignoreExceptions)); - CountDownLatch incrementLatch = new CountDownLatch(numCounters); - List actors = new ArrayList(counters); - actors.add(failer); - Increment message = new Increment(actors.subList(1, actors.size()), incrementLatch); - actors.get(0).tell(message); - try { - incrementLatch.await(timeout, TimeUnit.SECONDS); - } catch (InterruptedException exception) {} - for (ActorRef counter : counters) { - Future future = counter.ask("GetCount", askTimeout); - future.await(); - if (future.isCompleted()) { - Option resultOption = future.result(); - if (resultOption.isDefined()) { - Object result = resultOption.get(); - int count = (Integer) result; - assertEquals(0, count); - } - } + @Test + public void incrementNoCountersWithFailingTransaction() { + EventFilter expectedFailureFilter = (EventFilter) new ErrorFilter(ExpectedFailureException.class); + EventFilter coordinatedFilter = (EventFilter) new ErrorFilter(CoordinatedTransactionException.class); + Seq ignoreExceptions = seq(expectedFailureFilter, coordinatedFilter); + application.eventStream().publish(new TestEvent.Mute(ignoreExceptions)); + CountDownLatch incrementLatch = new CountDownLatch(numCounters); + List actors = new ArrayList(counters); + actors.add(failer); + Increment message = new Increment(actors.subList(1, actors.size()), incrementLatch); + actors.get(0).tell(message); + try { + incrementLatch.await(timeout, TimeUnit.SECONDS); + } catch (InterruptedException exception) { + } + for (ActorRef counter : counters) { + Future future = counter.ask("GetCount", askTimeout); + future.await(); + if (future.isCompleted()) { + Option resultOption = future.result(); + if (resultOption.isDefined()) { + Object result = resultOption.get(); + int count = (Integer) result; + assertEquals(0, count); } + } } + } - public Seq seq(A... args) { - return JavaConverters.collectionAsScalaIterableConverter(Arrays.asList(args)).asScala().toSeq(); - } + public Seq seq(A... args) { + return JavaConverters.collectionAsScalaIterableConverter(Arrays.asList(args)).asScala().toSeq(); + } } - - diff --git a/akka-stm/src/test/scala/agent/AgentSpec.scala b/akka-stm/src/test/scala/akka/agent/test/AgentSpec.scala similarity index 97% rename from akka-stm/src/test/scala/agent/AgentSpec.scala rename to akka-stm/src/test/scala/akka/agent/test/AgentSpec.scala index 6110ff78c5..9ef95594be 100644 --- a/akka-stm/src/test/scala/agent/AgentSpec.scala +++ b/akka-stm/src/test/scala/akka/agent/test/AgentSpec.scala @@ -2,15 +2,15 @@ package akka.agent.test import org.scalatest.WordSpec import org.scalatest.matchers.MustMatchers - import akka.actor.ActorSystem import akka.actor.Timeout import akka.agent.Agent import akka.stm._ import akka.util.Duration import akka.util.duration._ - import java.util.concurrent.CountDownLatch +import akka.testkit.AkkaSpec +import akka.testkit._ class CountDownFunction[A](num: Int = 1) extends Function1[A, A] { val latch = new CountDownLatch(num) @@ -18,12 +18,11 @@ class CountDownFunction[A](num: Int = 1) extends Function1[A, A] { def await(timeout: Duration) = latch.await(timeout.length, timeout.unit) } -class AgentSpec extends WordSpec with MustMatchers { +class AgentSpec extends AkkaSpec { - implicit val app = ActorSystem("AgentSpec") implicit val timeout = Timeout(5.seconds.dilated) - "Agent" should { + "Agent" must { "update with send dispatches in order sent" in { val countDown = new CountDownFunction[String] diff --git a/akka-stm/src/test/scala/akka/stm/test/ConfigSpec.scala b/akka-stm/src/test/scala/akka/stm/test/ConfigSpec.scala new file mode 100644 index 0000000000..19a4450cf3 --- /dev/null +++ b/akka-stm/src/test/scala/akka/stm/test/ConfigSpec.scala @@ -0,0 +1,38 @@ +/** + * Copyright (C) 2009-2011 Typesafe Inc. + */ + +package akka.stm.test + +import org.junit.runner.RunWith +import org.scalatest.WordSpec +import org.scalatest.junit.JUnitRunner +import org.scalatest.matchers.MustMatchers +import akka.actor.ActorSystem +import com.typesafe.config.ConfigFactory +import com.typesafe.config.ConfigParseOptions +import akka.testkit.AkkaSpec + +@RunWith(classOf[JUnitRunner]) +class ConfigSpec extends AkkaSpec(ConfigFactory.parseResource(classOf[ConfigSpec], "/akka-stm-reference.conf", ConfigParseOptions.defaults)) { + + "The default configuration file (i.e. akka-stm-reference.conf)" should { + "contain all configuration properties for akka-stm that are used in code with their correct defaults" in { + val config = system.settings.config + import config._ + + // TODO are these config values used anywhere? + + getBoolean("akka.stm.blocking-allowed") must equal(false) + getBoolean("akka.stm.fair") must equal(true) + getBoolean("akka.stm.interruptible") must equal(false) + getInt("akka.stm.max-retries") must equal(1000) + getString("akka.stm.propagation") must equal("requires") + getBoolean("akka.stm.quick-release") must equal(true) + getBoolean("akka.stm.speculative") must equal(true) + getMilliseconds("akka.stm.timeout") must equal(5 * 1000) + getString("akka.stm.trace-level") must equal("none") + getBoolean("akka.stm.write-skew") must equal(true) + } + } +} diff --git a/akka-stm/src/test/scala/stm/JavaStmSpec.scala b/akka-stm/src/test/scala/akka/stm/test/JavaStmSpec.scala similarity index 100% rename from akka-stm/src/test/scala/stm/JavaStmSpec.scala rename to akka-stm/src/test/scala/akka/stm/test/JavaStmSpec.scala diff --git a/akka-stm/src/test/scala/stm/RefSpec.scala b/akka-stm/src/test/scala/akka/stm/test/RefSpec.scala similarity index 100% rename from akka-stm/src/test/scala/stm/RefSpec.scala rename to akka-stm/src/test/scala/akka/stm/test/RefSpec.scala diff --git a/akka-stm/src/test/scala/stm/StmSpec.scala b/akka-stm/src/test/scala/akka/stm/test/StmSpec.scala similarity index 99% rename from akka-stm/src/test/scala/stm/StmSpec.scala rename to akka-stm/src/test/scala/akka/stm/test/StmSpec.scala index e7890b138d..1f547fc1ae 100644 --- a/akka-stm/src/test/scala/stm/StmSpec.scala +++ b/akka-stm/src/test/scala/akka/stm/test/StmSpec.scala @@ -1,7 +1,7 @@ package akka.stm.test import akka.actor.Actor -import Actor._ +import akka.actor.Actor._ import org.multiverse.api.exceptions.ReadonlyException diff --git a/akka-stm/src/test/scala/transactor/CoordinatedIncrementSpec.scala b/akka-stm/src/test/scala/akka/transactor/test/CoordinatedIncrementSpec.scala similarity index 91% rename from akka-stm/src/test/scala/transactor/CoordinatedIncrementSpec.scala rename to akka-stm/src/test/scala/akka/transactor/test/CoordinatedIncrementSpec.scala index c6dcb8fd31..eda336b78e 100644 --- a/akka-stm/src/test/scala/transactor/CoordinatedIncrementSpec.scala +++ b/akka-stm/src/test/scala/akka/transactor/test/CoordinatedIncrementSpec.scala @@ -1,13 +1,11 @@ -package akka.transactor.test +package akka.transactor import org.scalatest.BeforeAndAfterAll import akka.actor.ActorSystem -import akka.transactor.Coordinated import akka.actor._ import akka.stm.{ Ref, TransactionFactory } import akka.util.duration._ -import akka.transactor.CoordinatedTransactionException import akka.testkit._ object CoordinatedIncrement { @@ -61,9 +59,9 @@ class CoordinatedIncrementSpec extends AkkaSpec with BeforeAndAfterAll { val numCounters = 4 def actorOfs = { - def createCounter(i: Int) = app.actorOf(Props(new Counter("counter" + i))) + def createCounter(i: Int) = system.actorOf(Props(new Counter("counter" + i))) val counters = (1 to numCounters) map createCounter - val failer = app.actorOf(Props(new Failer)) + val failer = system.actorOf(Props(new Failer)) (counters, failer) } diff --git a/akka-stm/src/test/scala/transactor/FickleFriendsSpec.scala b/akka-stm/src/test/scala/akka/transactor/test/FickleFriendsSpec.scala similarity index 91% rename from akka-stm/src/test/scala/transactor/FickleFriendsSpec.scala rename to akka-stm/src/test/scala/akka/transactor/test/FickleFriendsSpec.scala index ed4da08623..a74490b410 100644 --- a/akka-stm/src/test/scala/transactor/FickleFriendsSpec.scala +++ b/akka-stm/src/test/scala/akka/transactor/test/FickleFriendsSpec.scala @@ -1,14 +1,12 @@ -package akka.transactor.test +package akka.transactor import org.scalatest.WordSpec import org.scalatest.matchers.MustMatchers import org.scalatest.BeforeAndAfterAll import akka.actor.ActorSystem -import akka.transactor.Coordinated import akka.actor._ import akka.stm._ import akka.util.duration._ -import akka.transactor.CoordinatedTransactionException import akka.testkit._ import scala.util.Random.{ nextInt ⇒ random } import java.util.concurrent.CountDownLatch @@ -104,9 +102,9 @@ class FickleFriendsSpec extends AkkaSpec with BeforeAndAfterAll { val numCounters = 2 def actorOfs = { - def createCounter(i: Int) = app.actorOf(Props(new FickleCounter("counter" + i))) + def createCounter(i: Int) = system.actorOf(Props(new FickleCounter("counter" + i))) val counters = (1 to numCounters) map createCounter - val coordinator = app.actorOf(Props(new Coordinator("coordinator"))) + val coordinator = system.actorOf(Props(new Coordinator("coordinator"))) (counters, coordinator) } @@ -116,7 +114,7 @@ class FickleFriendsSpec extends AkkaSpec with BeforeAndAfterAll { EventFilter[ExpectedFailureException](), EventFilter[CoordinatedTransactionException](), EventFilter[ActorTimeoutException]()) - app.eventStream.publish(Mute(ignoreExceptions)) + system.eventStream.publish(Mute(ignoreExceptions)) val (counters, coordinator) = actorOfs val latch = new CountDownLatch(1) coordinator ! FriendlyIncrement(counters, latch) diff --git a/akka-stm/src/test/scala/transactor/JavaUntypedCoordinatedSpec.scala b/akka-stm/src/test/scala/akka/transactor/test/JavaUntypedCoordinatedSpec.scala similarity index 87% rename from akka-stm/src/test/scala/transactor/JavaUntypedCoordinatedSpec.scala rename to akka-stm/src/test/scala/akka/transactor/test/JavaUntypedCoordinatedSpec.scala index c2a96f436c..f48705469c 100644 --- a/akka-stm/src/test/scala/transactor/JavaUntypedCoordinatedSpec.scala +++ b/akka-stm/src/test/scala/akka/transactor/test/JavaUntypedCoordinatedSpec.scala @@ -1,4 +1,4 @@ -package akka.transactor.test +package akka.transactor import org.scalatest.junit.JUnitWrapperSuite diff --git a/akka-stm/src/test/scala/transactor/JavaUntypedTransactorSpec.scala b/akka-stm/src/test/scala/akka/transactor/test/JavaUntypedTransactorSpec.scala similarity index 87% rename from akka-stm/src/test/scala/transactor/JavaUntypedTransactorSpec.scala rename to akka-stm/src/test/scala/akka/transactor/test/JavaUntypedTransactorSpec.scala index a643fb141d..d4da5f0545 100644 --- a/akka-stm/src/test/scala/transactor/JavaUntypedTransactorSpec.scala +++ b/akka-stm/src/test/scala/akka/transactor/test/JavaUntypedTransactorSpec.scala @@ -1,4 +1,4 @@ -package akka.transactor.test +package akka.transactor import org.scalatest.junit.JUnitWrapperSuite diff --git a/akka-stm/src/test/scala/transactor/TransactorSpec.scala b/akka-stm/src/test/scala/akka/transactor/test/TransactorSpec.scala similarity index 91% rename from akka-stm/src/test/scala/transactor/TransactorSpec.scala rename to akka-stm/src/test/scala/akka/transactor/test/TransactorSpec.scala index 84dc33c431..43ee399196 100644 --- a/akka-stm/src/test/scala/transactor/TransactorSpec.scala +++ b/akka-stm/src/test/scala/akka/transactor/test/TransactorSpec.scala @@ -1,14 +1,12 @@ -package akka.transactor.test +package akka.transactor import org.scalatest.WordSpec import org.scalatest.matchers.MustMatchers import akka.actor.ActorSystem -import akka.transactor.Transactor import akka.actor._ import akka.stm._ import akka.util.duration._ -import akka.transactor.CoordinatedTransactionException import akka.testkit._ object TransactorIncrement { @@ -84,9 +82,9 @@ class TransactorSpec extends AkkaSpec { val numCounters = 3 def createTransactors = { - def createCounter(i: Int) = app.actorOf(Props(new Counter("counter" + i))) + def createCounter(i: Int) = system.actorOf(Props(new Counter("counter" + i))) val counters = (1 to numCounters) map createCounter - val failer = app.actorOf(Props(new Failer)) + val failer = system.actorOf(Props(new Failer)) (counters, failer) } @@ -124,7 +122,7 @@ class TransactorSpec extends AkkaSpec { "Transactor" should { "be usable without overriding normally" in { - val transactor = app.actorOf(Props(new Setter)) + val transactor = system.actorOf(Props(new Setter)) val ref = Ref(0) val latch = TestLatch(1) transactor ! Set(ref, 5, latch) diff --git a/akka-stm/src/test/scala/config/ConfigSpec.scala b/akka-stm/src/test/scala/config/ConfigSpec.scala deleted file mode 100644 index 895605395a..0000000000 --- a/akka-stm/src/test/scala/config/ConfigSpec.scala +++ /dev/null @@ -1,35 +0,0 @@ -/** - * Copyright (C) 2009-2011 Typesafe Inc. - */ - -package akka.config - -import org.junit.runner.RunWith -import org.scalatest.WordSpec -import org.scalatest.junit.JUnitRunner -import org.scalatest.matchers.MustMatchers - -import akka.actor.ActorSystem - -@RunWith(classOf[JUnitRunner]) -class ConfigSpec extends WordSpec with MustMatchers { - - "The default configuration file (i.e. akka-reference.conf)" should { - "contain all configuration properties for akka-stm that are used in code with their correct defaults" in { - val config = ActorSystem("ConfigSpec").config - - import config._ - - getBool("akka.stm.blocking-allowed") must equal(Some(false)) - getBool("akka.stm.fair") must equal(Some(true)) - getBool("akka.stm.interruptible") must equal(Some(false)) - getInt("akka.stm.max-retries") must equal(Some(1000)) - getString("akka.stm.propagation") must equal(Some("requires")) - getBool("akka.stm.quick-release") must equal(Some(true)) - getBool("akka.stm.speculative") must equal(Some(true)) - getLong("akka.stm.timeout") must equal(Some(5)) - getString("akka.stm.trace-level") must equal(Some("none")) - getBool("akka.stm.write-skew") must equal(Some(true)) - } - } -} diff --git a/akka-testkit/src/main/resources/akka-testkit-reference.conf b/akka-testkit/src/main/resources/akka-testkit-reference.conf new file mode 100644 index 0000000000..0aa150e4b5 --- /dev/null +++ b/akka-testkit/src/main/resources/akka-testkit-reference.conf @@ -0,0 +1,14 @@ +###################################### +# Akka Testkit Reference Config File # +###################################### + +# This the reference config file has all the default settings. +# Make your edits/overrides in your akka.conf. + +akka { + test { + timefactor = 1.0 # factor by which to scale timeouts during tests, e.g. to account for shared build system load + filter-leeway = 3s # duration of EventFilter.intercept waits after the block is finished until all required messages are received + single-expect-default = 3s # duration to wait in expectMsg and friends outside of within() block by default + } +} diff --git a/akka-testkit/src/main/scala/akka/testkit/CallingThreadDispatcher.scala b/akka-testkit/src/main/scala/akka/testkit/CallingThreadDispatcher.scala index 32705ae58e..382c25523d 100644 --- a/akka-testkit/src/main/scala/akka/testkit/CallingThreadDispatcher.scala +++ b/akka-testkit/src/main/scala/akka/testkit/CallingThreadDispatcher.scala @@ -12,6 +12,10 @@ import java.lang.ref.WeakReference import scala.annotation.tailrec import akka.actor.{ ActorCell, ActorRef, ActorSystem } import akka.dispatch._ +import akka.actor.Scheduler +import akka.event.EventStream +import akka.util.Duration +import java.util.concurrent.TimeUnit /* * Locking rules: @@ -103,9 +107,13 @@ private[testkit] object CallingThreadDispatcher { * @author Roland Kuhn * @since 1.1 */ -class CallingThreadDispatcher(_app: ActorSystem, val name: String = "calling-thread") extends MessageDispatcher(_app) { +class CallingThreadDispatcher( + _prerequisites: DispatcherPrerequisites, + val name: String = "calling-thread") extends MessageDispatcher(_prerequisites) { import CallingThreadDispatcher._ + val log = akka.event.Logging(prerequisites.eventStream, "CallingThreadDispatcher") + protected[akka] override def createMailbox(actor: ActorCell) = new CallingThreadMailbox(actor) private def getMailbox(actor: ActorCell): Option[CallingThreadMailbox] = actor.mailbox match { @@ -116,10 +124,10 @@ class CallingThreadDispatcher(_app: ActorSystem, val name: String = "calling-thr protected[akka] override def shutdown() {} protected[akka] override def throughput = 0 - protected[akka] override def throughputDeadlineTime = 0 + protected[akka] override def throughputDeadlineTime = Duration.Zero protected[akka] override def registerForExecution(mbox: Mailbox, hasMessageHint: Boolean, hasSystemMessageHint: Boolean): Boolean = false - protected[akka] override def timeoutMs = 100L + protected[akka] override def shutdownTimeout = Duration(100L, TimeUnit.MILLISECONDS) override def suspend(actor: ActorCell) { getMailbox(actor) foreach (_.suspendSwitch.switchOn) @@ -211,12 +219,12 @@ class CallingThreadDispatcher(_app: ActorSystem, val name: String = "calling-thr true } catch { case ie: InterruptedException ⇒ - app.eventStream.publish(Error(this, ie)) + log.error(ie, "Interrupted during message processing") Thread.currentThread().interrupt() intex = ie true case e ⇒ - app.eventStream.publish(Error(this, e)) + log.error(e, "Error during message processing") queue.leave false } diff --git a/akka-testkit/src/main/scala/akka/testkit/TestActorRef.scala b/akka-testkit/src/main/scala/akka/testkit/TestActorRef.scala index 8843330c03..6f03df59b2 100644 --- a/akka-testkit/src/main/scala/akka/testkit/TestActorRef.scala +++ b/akka-testkit/src/main/scala/akka/testkit/TestActorRef.scala @@ -10,6 +10,8 @@ import com.eaio.uuid.UUID import akka.actor.Props._ import akka.actor.ActorSystem import java.util.concurrent.atomic.AtomicLong +import akka.event.EventStream +import akka.dispatch.{ DefaultDispatcherPrerequisites, DispatcherPrerequisites, Mailbox } /** * This special ActorRef is exclusively for use during unit testing in a single-threaded environment. Therefore, it @@ -19,8 +21,13 @@ import java.util.concurrent.atomic.AtomicLong * @author Roland Kuhn * @since 1.1 */ -class TestActorRef[T <: Actor](_app: ActorSystem, _props: Props, _supervisor: ActorRef, name: String) - extends LocalActorRef(_app, _props.withDispatcher(new CallingThreadDispatcher(_app)), _supervisor, _supervisor.path / name, false) { +class TestActorRef[T <: Actor]( + _system: ActorSystemImpl, + _prerequisites: DispatcherPrerequisites, + _props: Props, + _supervisor: ActorRef, + name: String) + extends LocalActorRef(_system, _props.withDispatcher(new CallingThreadDispatcher(_prerequisites)), _supervisor, _supervisor.path / name, false) { /** * Directly inject messages into actor receive behavior. Any exceptions * thrown will be available to you, while still being able to use @@ -48,21 +55,21 @@ object TestActorRef { "$" + akka.util.Helpers.base64(l) } - def apply[T <: Actor](factory: ⇒ T)(implicit app: ActorSystem): TestActorRef[T] = apply[T](Props(factory), randomName) + def apply[T <: Actor](factory: ⇒ T)(implicit system: ActorSystem): TestActorRef[T] = apply[T](Props(factory), randomName) - def apply[T <: Actor](factory: ⇒ T, name: String)(implicit app: ActorSystem): TestActorRef[T] = apply[T](Props(factory), name) + def apply[T <: Actor](factory: ⇒ T, name: String)(implicit system: ActorSystem): TestActorRef[T] = apply[T](Props(factory), name) - def apply[T <: Actor](props: Props)(implicit app: ActorSystem): TestActorRef[T] = apply[T](props, randomName) + def apply[T <: Actor](props: Props)(implicit system: ActorSystem): TestActorRef[T] = apply[T](props, randomName) - def apply[T <: Actor](props: Props, name: String)(implicit app: ActorSystem): TestActorRef[T] = apply[T](props, app.guardian, name) + def apply[T <: Actor](props: Props, name: String)(implicit system: ActorSystem): TestActorRef[T] = + apply[T](props, system.asInstanceOf[ActorSystemImpl].guardian, name) - def apply[T <: Actor](props: Props, supervisor: ActorRef, name: String)(implicit app: ActorSystem): TestActorRef[T] = { - new TestActorRef(app, props, supervisor, name) - } + def apply[T <: Actor](props: Props, supervisor: ActorRef, name: String)(implicit system: ActorSystem): TestActorRef[T] = + new TestActorRef(system.asInstanceOf[ActorSystemImpl], system.dispatcherFactory.prerequisites, props, supervisor, name) - def apply[T <: Actor](implicit m: Manifest[T], app: ActorSystem): TestActorRef[T] = apply[T](randomName) + def apply[T <: Actor](implicit m: Manifest[T], system: ActorSystem): TestActorRef[T] = apply[T](randomName) - def apply[T <: Actor](name: String)(implicit m: Manifest[T], app: ActorSystem): TestActorRef[T] = apply[T](Props({ + def apply[T <: Actor](name: String)(implicit m: Manifest[T], system: ActorSystem): TestActorRef[T] = apply[T](Props({ import ReflectiveAccess.{ createInstance, noParams, noArgs } createInstance[T](m.erasure, noParams, noArgs) match { case Right(value) ⇒ value diff --git a/akka-testkit/src/main/scala/akka/testkit/TestBarrier.scala b/akka-testkit/src/main/scala/akka/testkit/TestBarrier.scala index 5b37d0573a..22c10271b7 100644 --- a/akka-testkit/src/main/scala/akka/testkit/TestBarrier.scala +++ b/akka-testkit/src/main/scala/akka/testkit/TestBarrier.scala @@ -25,15 +25,15 @@ object TestBarrier { class TestBarrier(count: Int) { private val barrier = new CyclicBarrier(count) - def await()(implicit app: ActorSystem): Unit = await(TestBarrier.DefaultTimeout) + def await()(implicit system: ActorSystem): Unit = await(TestBarrier.DefaultTimeout) - def await(timeout: Duration)(implicit app: ActorSystem) { + def await(timeout: Duration)(implicit system: ActorSystem) { try { barrier.await(timeout.dilated.toNanos, TimeUnit.NANOSECONDS) } catch { case e: TimeoutException ⇒ throw new TestBarrierTimeoutException("Timeout of %s and time factor of %s" - format (timeout.toString, app.AkkaConfig.TestTimeFactor)) + format (timeout.toString, TestKitExtension(system).settings.TestTimeFactor)) } } diff --git a/akka-testkit/src/main/scala/akka/testkit/TestEventListener.scala b/akka-testkit/src/main/scala/akka/testkit/TestEventListener.scala index 27f9574b43..675bdfe8c1 100644 --- a/akka-testkit/src/main/scala/akka/testkit/TestEventListener.scala +++ b/akka-testkit/src/main/scala/akka/testkit/TestEventListener.scala @@ -79,42 +79,38 @@ abstract class EventFilter(occurrences: Int) { * Apply this filter while executing the given code block. Care is taken to * remove the filter when the block is finished or aborted. */ - def intercept[T](code: ⇒ T)(implicit app: ActorSystem): T = { - app.eventStream publish TestEvent.Mute(this) + def intercept[T](code: ⇒ T)(implicit system: ActorSystem): T = { + system.eventStream publish TestEvent.Mute(this) + val testKitExtension = TestKitExtension(system) + val leeway = testKitExtension.settings.TestEventFilterLeeway try { val result = code - if (!awaitDone(app.AkkaConfig.TestEventFilterLeeway)) + if (!awaitDone(leeway)) if (todo > 0) - throw new AssertionError("Timeout (" + app.AkkaConfig.TestEventFilterLeeway + ") waiting for " + todo + " messages on " + this) + throw new AssertionError("Timeout (" + leeway + ") waiting for " + todo + " messages on " + this) else throw new AssertionError("Received " + (-todo) + " messages too many on " + this) result - } finally app.eventStream publish TestEvent.UnMute(this) + } finally system.eventStream publish TestEvent.UnMute(this) } /* * these default values are just there for easier subclassing */ - protected val source: Option[AnyRef] = None + protected val source: Option[String] = None protected val message: Either[String, Regex] = Left("") protected val complete: Boolean = false /** * internal implementation helper, no guaranteed API */ - protected def doMatch(src: AnyRef, msg: Any) = { + protected def doMatch(src: String, msg: Any) = { val msgstr = if (msg != null) msg.toString else "null" - (source.isDefined && sourceMatch(src) || source.isEmpty) && + (source.isDefined && source.get == src || source.isEmpty) && (message match { case Left(s) ⇒ if (complete) msgstr == s else msgstr.startsWith(s) case Right(p) ⇒ p.findFirstIn(msgstr).isDefined }) } - private def sourceMatch(src: AnyRef) = { - source.get match { - case c: Class[_] ⇒ c isInstance src - case s ⇒ src == s - } - } } /** @@ -151,7 +147,7 @@ object EventFilter { * `null` does NOT work (passing `null` disables the * source filter).'' */ - def apply[A <: Throwable: Manifest](message: String = null, source: AnyRef = null, start: String = "", pattern: String = null, occurrences: Int = Int.MaxValue): EventFilter = + def apply[A <: Throwable: Manifest](message: String = null, source: String = null, start: String = "", pattern: String = null, occurrences: Int = Int.MaxValue): EventFilter = ErrorFilter(manifest[A].erasure, Option(source), if (message ne null) Left(message) else Option(pattern) map (new Regex(_)) toRight start, message ne null)(occurrences) @@ -170,7 +166,7 @@ object EventFilter { * `null` does NOT work (passing `null` disables the * source filter).'' */ - def warning(message: String = null, source: AnyRef = null, start: String = "", pattern: String = null, occurrences: Int = Int.MaxValue): EventFilter = + def warning(message: String = null, source: String = null, start: String = "", pattern: String = null, occurrences: Int = Int.MaxValue): EventFilter = WarningFilter(Option(source), if (message ne null) Left(message) else Option(pattern) map (new Regex(_)) toRight start, message ne null)(occurrences) @@ -189,7 +185,7 @@ object EventFilter { * `null` does NOT work (passing `null` disables the * source filter).'' */ - def info(message: String = null, source: AnyRef = null, start: String = "", pattern: String = null, occurrences: Int = Int.MaxValue): EventFilter = + def info(message: String = null, source: String = null, start: String = "", pattern: String = null, occurrences: Int = Int.MaxValue): EventFilter = InfoFilter(Option(source), if (message ne null) Left(message) else Option(pattern) map (new Regex(_)) toRight start, message ne null)(occurrences) @@ -208,7 +204,7 @@ object EventFilter { * `null` does NOT work (passing `null` disables the * source filter).'' */ - def debug(message: String = null, source: AnyRef = null, start: String = "", pattern: String = null, occurrences: Int = Int.MaxValue): EventFilter = + def debug(message: String = null, source: String = null, start: String = "", pattern: String = null, occurrences: Int = Int.MaxValue): EventFilter = DebugFilter(Option(source), if (message ne null) Left(message) else Option(pattern) map (new Regex(_)) toRight start, message ne null)(occurrences) @@ -244,7 +240,7 @@ object EventFilter { */ case class ErrorFilter( throwable: Class[_], - override val source: Option[AnyRef], + override val source: Option[String], override val message: Either[String, Regex], override val complete: Boolean)(occurrences: Int) extends EventFilter(occurrences) { @@ -272,7 +268,7 @@ case class ErrorFilter( * @param complete * whether the event’s message must match the given message string or pattern completely */ - def this(throwable: Class[_], source: AnyRef, message: String, pattern: Boolean, complete: Boolean, occurrences: Int) = + def this(throwable: Class[_], source: String, message: String, pattern: Boolean, complete: Boolean, occurrences: Int) = this(throwable, Option(source), if (message eq null) Left("") else if (pattern) Right(new Regex(message)) @@ -295,7 +291,7 @@ case class ErrorFilter( * If you want to match all Warning events, the most efficient is to use Left(""). */ case class WarningFilter( - override val source: Option[AnyRef], + override val source: Option[String], override val message: Either[String, Regex], override val complete: Boolean)(occurrences: Int) extends EventFilter(occurrences) { @@ -321,7 +317,7 @@ case class WarningFilter( * @param complete * whether the event’s message must match the given message string or pattern completely */ - def this(source: AnyRef, message: String, pattern: Boolean, complete: Boolean, occurrences: Int) = + def this(source: String, message: String, pattern: Boolean, complete: Boolean, occurrences: Int) = this(Option(source), if (message eq null) Left("") else if (pattern) Right(new Regex(message)) @@ -338,7 +334,7 @@ case class WarningFilter( * If you want to match all Info events, the most efficient is to use Left(""). */ case class InfoFilter( - override val source: Option[AnyRef], + override val source: Option[String], override val message: Either[String, Regex], override val complete: Boolean)(occurrences: Int) extends EventFilter(occurrences) { @@ -364,7 +360,7 @@ case class InfoFilter( * @param complete * whether the event’s message must match the given message string or pattern completely */ - def this(source: AnyRef, message: String, pattern: Boolean, complete: Boolean, occurrences: Int) = + def this(source: String, message: String, pattern: Boolean, complete: Boolean, occurrences: Int) = this(Option(source), if (message eq null) Left("") else if (pattern) Right(new Regex(message)) @@ -381,7 +377,7 @@ case class InfoFilter( * If you want to match all Debug events, the most efficient is to use Left(""). */ case class DebugFilter( - override val source: Option[AnyRef], + override val source: Option[String], override val message: Either[String, Regex], override val complete: Boolean)(occurrences: Int) extends EventFilter(occurrences) { @@ -407,7 +403,7 @@ case class DebugFilter( * @param complete * whether the event’s message must match the given message string or pattern completely */ - def this(source: AnyRef, message: String, pattern: Boolean, complete: Boolean, occurrences: Int) = + def this(source: String, message: String, pattern: Boolean, complete: Boolean, occurrences: Int) = this(Option(source), if (message eq null) Left("") else if (pattern) Right(new Regex(message)) @@ -452,12 +448,12 @@ class TestEventListener extends Logging.DefaultLogger { case event: LogEvent ⇒ if (!filter(event)) print(event) case DeadLetter(msg: SystemMessage, _, rcp) ⇒ if (!msg.isInstanceOf[Terminate]) { - val event = Warning(rcp, "received dead system message: " + msg) + val event = Warning(rcp.toString, "received dead system message: " + msg) if (!filter(event)) print(event) } case DeadLetter(msg, snd, rcp) ⇒ if (!msg.isInstanceOf[Terminated]) { - val event = Warning(rcp, "received dead letter from " + snd + ": " + msg) + val event = Warning(rcp.toString, "received dead letter from " + snd + ": " + msg) if (!filter(event)) print(event) } } diff --git a/akka-testkit/src/main/scala/akka/testkit/TestFSMRef.scala b/akka-testkit/src/main/scala/akka/testkit/TestFSMRef.scala index 87b6aa6765..36143965c3 100644 --- a/akka-testkit/src/main/scala/akka/testkit/TestFSMRef.scala +++ b/akka-testkit/src/main/scala/akka/testkit/TestFSMRef.scala @@ -8,6 +8,8 @@ import akka.actor._ import akka.util._ import com.eaio.uuid.UUID import akka.actor.ActorSystem +import akka.event.EventStream +import akka.dispatch.{ DispatcherPrerequisites, Mailbox } /** * This is a specialised form of the TestActorRef with support for querying and @@ -34,8 +36,13 @@ import akka.actor.ActorSystem * @author Roland Kuhn * @since 1.2 */ -class TestFSMRef[S, D, T <: Actor](app: ActorSystem, props: Props, supervisor: ActorRef, name: String)(implicit ev: T <:< FSM[S, D]) - extends TestActorRef(app, props, supervisor, name) { +class TestFSMRef[S, D, T <: Actor]( + system: ActorSystemImpl, + _prerequisites: DispatcherPrerequisites, + props: Props, + supervisor: ActorRef, + name: String)(implicit ev: T <:< FSM[S, D]) + extends TestActorRef(system, _prerequisites, props, supervisor, name) { private def fsm: T = underlyingActor @@ -80,9 +87,13 @@ class TestFSMRef[S, D, T <: Actor](app: ActorSystem, props: Props, supervisor: A object TestFSMRef { - def apply[S, D, T <: Actor](factory: ⇒ T)(implicit ev: T <:< FSM[S, D], app: ActorSystem): TestFSMRef[S, D, T] = - new TestFSMRef(app, Props(creator = () ⇒ factory), app.guardian, TestActorRef.randomName) + def apply[S, D, T <: Actor](factory: ⇒ T)(implicit ev: T <:< FSM[S, D], system: ActorSystem): TestFSMRef[S, D, T] = { + val impl = system.asInstanceOf[ActorSystemImpl] + new TestFSMRef(impl, system.dispatcherFactory.prerequisites, Props(creator = () ⇒ factory), impl.guardian, TestActorRef.randomName) + } - def apply[S, D, T <: Actor](factory: ⇒ T, name: String)(implicit ev: T <:< FSM[S, D], app: ActorSystem): TestFSMRef[S, D, T] = - new TestFSMRef(app, Props(creator = () ⇒ factory), app.guardian, name) + def apply[S, D, T <: Actor](factory: ⇒ T, name: String)(implicit ev: T <:< FSM[S, D], system: ActorSystem): TestFSMRef[S, D, T] = { + val impl = system.asInstanceOf[ActorSystemImpl] + new TestFSMRef(impl, system.dispatcherFactory.prerequisites, Props(creator = () ⇒ factory), impl.guardian, name) + } } diff --git a/akka-testkit/src/main/scala/akka/testkit/TestKit.scala b/akka-testkit/src/main/scala/akka/testkit/TestKit.scala index 0815942e81..88548e9cb2 100644 --- a/akka-testkit/src/main/scala/akka/testkit/TestKit.scala +++ b/akka-testkit/src/main/scala/akka/testkit/TestKit.scala @@ -71,16 +71,17 @@ class TestActor(queue: BlockingDeque[TestActor.Message]) extends Actor { * * It should be noted that for CI servers and the like all maximum Durations * are scaled using their Duration.dilated method, which uses the - * Duration.timeFactor settable via akka.conf entry "akka.test.timefactor". + * TestKitExtension.Settings.TestTimeFactor settable via akka.conf entry "akka.test.timefactor". * * @author Roland Kuhn * @since 1.1 */ -class TestKit(_app: ActorSystem) { +class TestKit(_system: ActorSystem) { import TestActor.{ Message, RealMessage, NullMessage } - implicit val app = _app + implicit val system = _system + val testKitExtension = TestKitExtension(system) private val queue = new LinkedBlockingDeque[Message]() private[akka] var lastMessage: Message = NullMessage @@ -91,8 +92,12 @@ class TestKit(_app: ActorSystem) { * ActorRef of the test actor. Access is provided to enable e.g. * registration as message target. */ - val testActor: ActorRef = app.systemActorOf(Props(new TestActor(queue)).copy(dispatcher = new CallingThreadDispatcher(app)), - "testActor" + TestKit.testActorId.incrementAndGet) + val testActor: ActorRef = { + val impl = system.asInstanceOf[ActorSystemImpl] + impl.systemActorOf(Props(new TestActor(queue)) + .copy(dispatcher = new CallingThreadDispatcher(system.dispatcherFactory.prerequisites)), + "testActor" + TestKit.testActorId.incrementAndGet) + } private var end: Duration = Duration.Undefined @@ -121,9 +126,9 @@ class TestKit(_app: ActorSystem) { /** * Obtain time remaining for execution of the innermost enclosing `within` * block or missing that it returns the properly dilated default for this - * case from AkkaConfig (key "akka.test.single-expect-default"). + * case from settings (key "akka.test.single-expect-default"). */ - def remaining: Duration = if (end == Duration.Undefined) app.AkkaConfig.SingleExpectDefaultTimeout.dilated else end - now + def remaining: Duration = if (end == Duration.Undefined) testKitExtension.settings.SingleExpectDefaultTimeout.dilated else end - now /** * Query queue status. @@ -137,7 +142,8 @@ class TestKit(_app: ActorSystem) { * If no timeout is given, take it from the innermost enclosing `within` * block. * - * Note that the timeout is scaled using Duration.timeFactor. + * Note that the timeout is scaled using Duration.dilated, + * which uses the configuration entry "akka.test.timefactor". */ def awaitCond(p: ⇒ Boolean, max: Duration = Duration.Undefined, interval: Duration = 100.millis) { val _max = if (max eq Duration.Undefined) remaining else max.dilated @@ -161,8 +167,8 @@ class TestKit(_app: ActorSystem) { * take maximum wait times are available in a version which implicitly uses * the remaining time governed by the innermost enclosing `within` block. * - * Note that the max Duration is scaled by Duration.timeFactor while the min - * Duration is not. + * Note that the timeout is scaled using Duration.dilated, which uses the + * configuration entry "akka.test.timefactor", while the min Duration is not. * *
    * val ret = within(50 millis) {
@@ -531,7 +537,8 @@ object TestKit {
    * If no timeout is given, take it from the innermost enclosing `within`
    * block.
    *
-   * Note that the timeout is scaled using Duration.timeFactor.
+   * Note that the timeout is scaled using Duration.dilated, which uses the
+   * configuration entry "akka.test.timefactor"
    */
   def awaitCond(p: ⇒ Boolean, max: Duration, interval: Duration = 100.millis, noThrow: Boolean = false): Boolean = {
     val stop = now + max
@@ -558,6 +565,14 @@ object TestKit {
    */
   def now: Duration = System.nanoTime().nanos
 
+  /**
+   * Java API. Scale timeouts (durations) during tests with the configured
+   * 'akka.test.timefactor'.
+   */
+  def dilated(duration: Duration, system: ActorSystem): Duration = {
+    duration * TestKitExtension(system).settings.TestTimeFactor
+  }
+
 }
 
 /**
@@ -594,9 +609,9 @@ class TestProbe(_application: ActorSystem) extends TestKit(_application) {
 }
 
 object TestProbe {
-  def apply()(implicit app: ActorSystem) = new TestProbe(app)
+  def apply()(implicit system: ActorSystem) = new TestProbe(system)
 }
 
 trait ImplicitSender { this: TestKit ⇒
-  implicit def implicitSenderTestActor = testActor
+  implicit def self = testActor
 }
diff --git a/akka-testkit/src/main/scala/akka/testkit/TestKitExtension.scala b/akka-testkit/src/main/scala/akka/testkit/TestKitExtension.scala
new file mode 100644
index 0000000000..d1ef60065f
--- /dev/null
+++ b/akka-testkit/src/main/scala/akka/testkit/TestKitExtension.scala
@@ -0,0 +1,55 @@
+/**
+ * Copyright (C) 2009-2011 Typesafe Inc. 
+ */
+package akka.testkit
+
+import akka.actor.ActorSystem
+import akka.actor.ExtensionKey
+import akka.actor.Extension
+import akka.actor.ActorSystemImpl
+import com.typesafe.config.Config
+import com.typesafe.config.ConfigFactory
+import com.typesafe.config.ConfigParseOptions
+import com.typesafe.config.ConfigRoot
+import akka.util.Duration
+import java.util.concurrent.TimeUnit.MILLISECONDS
+
+object TestKitExtensionKey extends ExtensionKey[TestKitExtension]
+
+object TestKitExtension {
+  def apply(system: ActorSystem): TestKitExtension = {
+    if (!system.hasExtension(TestKitExtensionKey)) {
+      system.registerExtension(new TestKitExtension)
+    }
+    system.extension(TestKitExtensionKey)
+  }
+
+  class Settings(cfg: Config) {
+    private def referenceConfig: Config =
+      ConfigFactory.parseResource(classOf[ActorSystem], "/akka-testkit-reference.conf",
+        ConfigParseOptions.defaults.setAllowMissing(false))
+    val config: ConfigRoot = ConfigFactory.emptyRoot("akka-testkit").withFallback(cfg).withFallback(referenceConfig).resolve()
+
+    import config._
+
+    val TestTimeFactor = getDouble("akka.test.timefactor")
+    val SingleExpectDefaultTimeout = Duration(getMilliseconds("akka.test.single-expect-default"), MILLISECONDS)
+    val TestEventFilterLeeway = Duration(getMilliseconds("akka.test.filter-leeway"), MILLISECONDS)
+
+  }
+}
+
+class TestKitExtension extends Extension[TestKitExtension] {
+  import TestKitExtension._
+  @volatile
+  private var _settings: Settings = _
+
+  def key = TestKitExtensionKey
+
+  def init(system: ActorSystemImpl) {
+    _settings = new Settings(system.applicationConfig)
+  }
+
+  def settings: Settings = _settings
+
+}
\ No newline at end of file
diff --git a/akka-testkit/src/main/scala/akka/testkit/TestLatch.scala b/akka-testkit/src/main/scala/akka/testkit/TestLatch.scala
index f8a0cf3578..2cfb2edc85 100644
--- a/akka-testkit/src/main/scala/akka/testkit/TestLatch.scala
+++ b/akka-testkit/src/main/scala/akka/testkit/TestLatch.scala
@@ -21,10 +21,10 @@ class TestLatchNoTimeoutException(message: String) extends RuntimeException(mess
 object TestLatch {
   val DefaultTimeout = Duration(5, TimeUnit.SECONDS)
 
-  def apply(count: Int = 1)(implicit app: ActorSystem) = new TestLatch(count)
+  def apply(count: Int = 1)(implicit system: ActorSystem) = new TestLatch(count)
 }
 
-class TestLatch(count: Int = 1)(implicit app: ActorSystem) {
+class TestLatch(count: Int = 1)(implicit system: ActorSystem) {
   private var latch = new CountDownLatch(count)
 
   def countDown() = latch.countDown()
@@ -34,9 +34,10 @@ class TestLatch(count: Int = 1)(implicit app: ActorSystem) {
   def await(): Boolean = await(TestLatch.DefaultTimeout)
 
   def await(timeout: Duration): Boolean = {
+    val testKitExtension = TestKitExtension(system)
     val opened = latch.await(timeout.dilated.toNanos, TimeUnit.NANOSECONDS)
     if (!opened) throw new TestLatchTimeoutException(
-      "Timeout of %s with time factor of %s" format (timeout.toString, app.AkkaConfig.TestTimeFactor))
+      "Timeout of %s with time factor of %s" format (timeout.toString, testKitExtension.settings.TestTimeFactor))
     opened
   }
 
@@ -44,9 +45,10 @@ class TestLatch(count: Int = 1)(implicit app: ActorSystem) {
    * Timeout is expected. Throws exception if latch is opened before timeout.
    */
   def awaitTimeout(timeout: Duration = TestLatch.DefaultTimeout) = {
+    val testKitExtension = TestKitExtension(system)
     val opened = latch.await(timeout.dilated.toNanos, TimeUnit.NANOSECONDS)
     if (opened) throw new TestLatchNoTimeoutException(
-      "Latch opened before timeout of %s with time factor of %s" format (timeout.toString, app.AkkaConfig.TestTimeFactor))
+      "Latch opened before timeout of %s with time factor of %s" format (timeout.toString, testKitExtension.settings.TestTimeFactor))
     opened
   }
 
diff --git a/akka-testkit/src/main/scala/akka/testkit/package.scala b/akka-testkit/src/main/scala/akka/testkit/package.scala
index 8a96b1b839..86b404c67d 100644
--- a/akka-testkit/src/main/scala/akka/testkit/package.scala
+++ b/akka-testkit/src/main/scala/akka/testkit/package.scala
@@ -5,25 +5,47 @@ import akka.util.Duration
 import java.util.concurrent.TimeUnit.MILLISECONDS
 
 package object testkit {
-  def filterEvents[T](eventFilters: Iterable[EventFilter])(block: ⇒ T)(implicit app: ActorSystem): T = {
+  def filterEvents[T](eventFilters: Iterable[EventFilter])(block: ⇒ T)(implicit system: ActorSystem): T = {
     def now = System.currentTimeMillis
 
-    app.eventStream.publish(TestEvent.Mute(eventFilters.toSeq))
+    system.eventStream.publish(TestEvent.Mute(eventFilters.toSeq))
     try {
       val result = block
 
-      val stop = now + app.AkkaConfig.TestEventFilterLeeway.toMillis
-      val failed = eventFilters filterNot (_.awaitDone(Duration(stop - now, MILLISECONDS))) map ("Timeout (" + app.AkkaConfig.TestEventFilterLeeway + ") waiting for " + _)
+      val testKitExtension = TestKitExtension(system)
+      val stop = now + testKitExtension.settings.TestEventFilterLeeway.toMillis
+      val failed = eventFilters filterNot (_.awaitDone(Duration(stop - now, MILLISECONDS))) map ("Timeout (" + testKitExtension.settings.TestEventFilterLeeway + ") waiting for " + _)
       if (failed.nonEmpty)
         throw new AssertionError("Filter completion error:\n" + failed.mkString("\n"))
 
       result
     } finally {
-      app.eventStream.publish(TestEvent.UnMute(eventFilters.toSeq))
+      system.eventStream.publish(TestEvent.UnMute(eventFilters.toSeq))
     }
   }
 
-  def filterEvents[T](eventFilters: EventFilter*)(block: ⇒ T)(implicit app: ActorSystem): T = filterEvents(eventFilters.toSeq)(block)
+  def filterEvents[T](eventFilters: EventFilter*)(block: ⇒ T)(implicit system: ActorSystem): T = filterEvents(eventFilters.toSeq)(block)
 
-  def filterException[T <: Throwable](block: ⇒ Unit)(implicit app: ActorSystem, m: Manifest[T]): Unit = EventFilter[T]() intercept (block)
+  def filterException[T <: Throwable](block: ⇒ Unit)(implicit system: ActorSystem, m: Manifest[T]): Unit = EventFilter[T]() intercept (block)
+
+  /**
+   * Scala API. Scale timeouts (durations) during tests with the configured
+   * 'akka.test.timefactor'.
+   * Implicit conversion to add dilated function to Duration.
+   * import akka.util.duration._
+   * import akka.testkit._
+   * 10.milliseconds.dilated
+   *
+   * Corresponding Java API is available in TestKit.dilated
+   */
+  implicit def duration2TestDuration(duration: Duration) = new TestDuration(duration)
+
+  /**
+   * Wrapper for implicit conversion to add dilated function to Duration.
+   */
+  class TestDuration(duration: Duration) {
+    def dilated(implicit system: ActorSystem): Duration = {
+      duration * TestKitExtension(system).settings.TestTimeFactor
+    }
+  }
 }
diff --git a/akka-testkit/src/test/scala/akka/testkit/AkkaSpec.scala b/akka-testkit/src/test/scala/akka/testkit/AkkaSpec.scala
index 74a9e57d3b..367efbbc88 100644
--- a/akka-testkit/src/test/scala/akka/testkit/AkkaSpec.scala
+++ b/akka-testkit/src/test/scala/akka/testkit/AkkaSpec.scala
@@ -3,29 +3,56 @@
  */
 package akka.testkit
 
-import akka.config.Configuration
-import org.scalatest.{ WordSpec, BeforeAndAfterAll }
+import org.scalatest.{ WordSpec, BeforeAndAfterAll, Tag }
 import org.scalatest.matchers.MustMatchers
-import akka.actor.ActorSystem
+import akka.actor.{ ActorSystem, ActorSystemImpl }
 import akka.actor.{ Actor, ActorRef, Props }
 import akka.dispatch.MessageDispatcher
 import akka.event.{ Logging, LoggingAdapter }
 import akka.util.duration._
 import akka.dispatch.FutureTimeoutException
+import com.typesafe.config.Config
+import com.typesafe.config.ConfigFactory
+import com.typesafe.config.ConfigParseOptions
 
-abstract class AkkaSpec(_application: ActorSystem = ActorSystem())
-  extends TestKit(_application) with WordSpec with MustMatchers with BeforeAndAfterAll {
+object TimingTest extends Tag("timing")
 
-  val log: LoggingAdapter = Logging(app.eventStream, this)
+object AkkaSpec {
+  val testConf =
+    ActorSystem.DefaultConfigurationLoader.defaultConfig.withFallback(
+      ConfigFactory.parseString("""
+      akka {
+        event-handlers = ["akka.testkit.TestEventListener"]
+        loglevel = "WARNING"
+        actor {
+          default-dispatcher {
+            core-pool-size = 4
+            max-pool-size  = 32
+          }
+        }
+      }
+      """, ConfigParseOptions.defaults))
+
+  def mapToConfig(map: Map[String, Any]): Config = {
+    import scala.collection.JavaConverters._
+    ConfigFactory.parseMap(map.asJava)
+  }
+
+}
+
+abstract class AkkaSpec(_system: ActorSystem = ActorSystem(getClass.getSimpleName, AkkaSpec.testConf))
+  extends TestKit(_system) with WordSpec with MustMatchers with BeforeAndAfterAll {
+
+  val log: LoggingAdapter = Logging(system, this.getClass)
 
   final override def beforeAll {
     atStartup()
   }
 
   final override def afterAll {
-    app.stop()
-    try app.terminationFuture.await(5 seconds) catch {
-      case _: FutureTimeoutException ⇒ app.log.warning("failed to stop within 5 seconds")
+    system.stop()
+    try system.asInstanceOf[ActorSystemImpl].terminationFuture.await(5 seconds) catch {
+      case _: FutureTimeoutException ⇒ system.log.warning("failed to stop within 5 seconds")
     }
     atTermination()
   }
@@ -34,9 +61,15 @@ abstract class AkkaSpec(_application: ActorSystem = ActorSystem())
 
   protected def atTermination() {}
 
-  def this(config: Configuration) = this(new ActorSystem(getClass.getSimpleName, ActorSystem.defaultConfig ++ config))
+  def this(config: Config) = this(ActorSystem(getClass.getSimpleName, config.withFallback(AkkaSpec.testConf)))
 
-  def actorOf(props: Props): ActorRef = app.actorOf(props)
+  def this(s: String) = this(ConfigFactory.parseString(s, ConfigParseOptions.defaults))
+
+  def this(configMap: Map[String, _]) = {
+    this(AkkaSpec.mapToConfig(configMap))
+  }
+
+  def actorOf(props: Props): ActorRef = system.actorOf(props)
 
   def actorOf[T <: Actor](clazz: Class[T]): ActorRef = actorOf(Props(clazz))
 
@@ -53,16 +86,18 @@ abstract class AkkaSpec(_application: ActorSystem = ActorSystem())
 class AkkaSpecSpec extends WordSpec with MustMatchers {
   "An AkkaSpec" must {
     "terminate all actors" in {
-      import ActorSystem.defaultConfig
-      val app = ActorSystem("test", defaultConfig ++ Configuration(
+      import ActorSystem.DefaultConfigurationLoader.defaultConfig
+      import scala.collection.JavaConverters._
+      val conf = Map(
         "akka.actor.debug.lifecycle" -> true, "akka.actor.debug.event-stream" -> true,
-        "akka.loglevel" -> "DEBUG", "akka.stdout-loglevel" -> "DEBUG"))
-      val spec = new AkkaSpec(app) {
-        val ref = Seq(testActor, app.actorOf(Props.empty, "name"))
+        "akka.loglevel" -> "DEBUG", "akka.stdout-loglevel" -> "DEBUG")
+      val system = ActorSystem("test", ConfigFactory.parseMap(conf.asJava).withFallback(defaultConfig))
+      val spec = new AkkaSpec(system) {
+        val ref = Seq(testActor, system.actorOf(Props.empty, "name"))
       }
-      spec.ref foreach (_ must not be 'shutdown)
-      app.stop()
-      spec.awaitCond(spec.ref forall (_.isShutdown), 2 seconds)
+      spec.ref foreach (_.isTerminated must not be true)
+      system.stop()
+      spec.awaitCond(spec.ref forall (_.isTerminated), 2 seconds)
     }
   }
 }
diff --git a/akka-testkit/src/test/scala/akka/testkit/TestActorRefSpec.scala b/akka-testkit/src/test/scala/akka/testkit/TestActorRefSpec.scala
index 3aa210d2b3..a6248ff63c 100644
--- a/akka-testkit/src/test/scala/akka/testkit/TestActorRefSpec.scala
+++ b/akka-testkit/src/test/scala/akka/testkit/TestActorRefSpec.scala
@@ -160,7 +160,7 @@ class TestActorRefSpec extends AkkaSpec with BeforeAndAfterEach {
         expectMsgPF(5 seconds) {
           case Terminated(`a`) ⇒ true
         }
-        a must be('shutdown)
+        a.isTerminated must be(true)
         assertThread
       }
     }
@@ -170,8 +170,8 @@ class TestActorRefSpec extends AkkaSpec with BeforeAndAfterEach {
         counter = 2
 
         val boss = TestActorRef(Props(new TActor {
-
-          val ref = new TestActorRef(app, Props(new TActor {
+          val impl = system.asInstanceOf[ActorSystemImpl]
+          val ref = new TestActorRef(impl, impl.dispatcherFactory.prerequisites, Props(new TActor {
             def receiveT = { case _ ⇒ }
             override def preRestart(reason: Throwable, msg: Option[Any]) { counter -= 1 }
             override def postRestart(reason: Throwable) { counter -= 1 }
@@ -224,7 +224,7 @@ class TestActorRefSpec extends AkkaSpec with BeforeAndAfterEach {
     "proxy apply for the underlying actor" in {
       val ref = TestActorRef[WorkerActor]
       ref("work")
-      ref.isShutdown must be(true)
+      ref.isTerminated must be(true)
     }
 
   }
diff --git a/akka-testkit/src/test/scala/akka/testkit/TestTimeSpec.scala b/akka-testkit/src/test/scala/akka/testkit/TestTimeSpec.scala
index 3dd2415e20..ff02a6c82b 100644
--- a/akka-testkit/src/test/scala/akka/testkit/TestTimeSpec.scala
+++ b/akka-testkit/src/test/scala/akka/testkit/TestTimeSpec.scala
@@ -3,21 +3,21 @@ package akka.testkit
 import org.scalatest.matchers.MustMatchers
 import org.scalatest.{ BeforeAndAfterEach, WordSpec }
 import akka.util.Duration
-import akka.config.Configuration
+import com.typesafe.config.Config
 
 @org.junit.runner.RunWith(classOf[org.scalatest.junit.JUnitRunner])
-class TestTimeSpec extends AkkaSpec(Configuration("akka.test.timefactor" -> 2.0)) with BeforeAndAfterEach {
+class TestTimeSpec extends AkkaSpec(Map("akka.test.timefactor" -> 2.0)) with BeforeAndAfterEach {
 
   "A TestKit" must {
 
-    "correctly dilate times" in {
+    "correctly dilate times" taggedAs TimingTest in {
       val probe = TestProbe()
       val now = System.nanoTime
       intercept[AssertionError] { probe.awaitCond(false, Duration("1 second")) }
       val diff = System.nanoTime - now
-      val target = (1000000000l * app.AkkaConfig.TestTimeFactor).toLong
+      val target = (1000000000l * testKitExtension.settings.TestTimeFactor).toLong
       diff must be > (target - 300000000l)
-      diff must be < (target + 1000000000l)
+      diff must be < (target + 300000000l)
     }
 
   }
diff --git a/akka-tutorials/akka-tutorial-first/src/main/java/akka/tutorial/first/java/Pi.java b/akka-tutorials/akka-tutorial-first/src/main/java/akka/tutorial/first/java/Pi.java
index f73550f16a..ca8fe597f7 100644
--- a/akka-tutorials/akka-tutorial-first/src/main/java/akka/tutorial/first/java/Pi.java
+++ b/akka-tutorials/akka-tutorial-first/src/main/java/akka/tutorial/first/java/Pi.java
@@ -24,7 +24,7 @@
 
 // public class Pi {
 
-//   private static final ActorSystem app = new ActorSystem();
+//   private static final ActorSystem system = new ActorSystem();
 
 //   public static void main(String[] args) throws Exception {
 //     Pi pi = new Pi();
@@ -109,11 +109,11 @@
 
 //       LinkedList workers = new LinkedList();
 //       for (int i = 0; i < nrOfWorkers; i++) {
-//           ActorRef worker = app.actorOf(Worker.class);
+//           ActorRef worker = system.actorOf(Worker.class);
 //           workers.add(worker);
 //       }
 
-//       router = app.actorOf(new RoutedProps().withRoundRobinRouter().withLocalConnections(workers), "pi");
+//       router = system.actorOf(new RoutedProps().withRoundRobinRouter().withLocalConnections(workers), "pi");
 //     }
 
 //     // message handler
@@ -167,7 +167,7 @@
 //     final CountDownLatch latch = new CountDownLatch(1);
 
 //     // create the master
-//     ActorRef master = app.actorOf(new UntypedActorFactory() {
+//     ActorRef master = system.actorOf(new UntypedActorFactory() {
 //       public UntypedActor create() {
 //         return new Master(nrOfWorkers, nrOfMessages, nrOfElements, latch);
 //       }
diff --git a/akka-tutorials/akka-tutorial-first/src/main/scala/Pi.scala b/akka-tutorials/akka-tutorial-first/src/main/scala/Pi.scala
index 3ea86ae6d6..836f766e12 100644
--- a/akka-tutorials/akka-tutorial-first/src/main/scala/Pi.scala
+++ b/akka-tutorials/akka-tutorial-first/src/main/scala/Pi.scala
@@ -12,7 +12,7 @@
 
 // object Pi extends App {
 
-//   val app = ActorSystem()
+//   val system = ActorSystem()
 
 //   calculate(nrOfWorkers = 4, nrOfElements = 10000, nrOfMessages = 10000)
 
@@ -56,10 +56,10 @@
 //     var start: Long = _
 
 //     // create the workers
-//     val workers = Vector.fill(nrOfWorkers)(app.actorOf[Worker])
+//     val workers = Vector.fill(nrOfWorkers)(system.actorOf[Worker])
 
 //     // wrap them with a load-balancing router
-//     val router = app.actorOf(RoutedProps().withRoundRobinRouter.withLocalConnections(workers), "pi")
+//     val router = system.actorOf(RoutedProps().withRoundRobinRouter.withLocalConnections(workers), "pi")
 
 //     // message handler
 //     def receive = {
@@ -102,7 +102,7 @@
 //     val latch = new CountDownLatch(1)
 
 //     // create the master
-//     val master = app.actorOf(new Master(nrOfWorkers, nrOfMessages, nrOfElements, latch))
+//     val master = system.actorOf(new Master(nrOfWorkers, nrOfMessages, nrOfElements, latch))
 
 //     // start the calculation
 //     master ! Calculate
diff --git a/config/akka-reference.conf b/config/akka-reference.conf
deleted file mode 100644
index 695bdb04cf..0000000000
--- a/config/akka-reference.conf
+++ /dev/null
@@ -1,299 +0,0 @@
-##############################
-# Akka Reference Config File #
-##############################
-
-# This the reference config file has all the default settings.
-# All these could be removed with no visible effect.
-# Modify as needed.
-# This file is imported in the 'akka.conf' file. Make your edits/overrides there.
-
-akka {
-  version = "2.0-SNAPSHOT" # Akka version, checked against the runtime version of Akka.
-
-  enabled-modules = []     # Comma separated list of the enabled modules. Options: ["cluster", "camel", "http"]
-
-  time-unit = "seconds"    # Time unit for all timeout properties throughout the config
-
-  event-handlers = ["akka.event.Logging$DefaultLogger"] # Event handlers to register at boot time (Logging$DefaultLogger logs to STDOUT)
-  loglevel        = "WARNING"                           # Options: ERROR, WARNING, INFO, DEBUG
-                                                        # this level is used by the configured loggers (see "event-handlers") as soon
-                                                        # as they have been started; before that, see "stdout-loglevel"
-  stdout-loglevel = "WARNING"                           # Loglevel for the very basic logger activated during AkkaApplication startup
-
-  event-handler-dispatcher {
-      type = "Dispatcher"              # Must be one of the following
-                                       # Dispatcher, (BalancingDispatcher, only valid when all actors using it are of the same type),
-                                       # A FQCN to a class inheriting MessageDispatcherConfigurator with a no-arg visible constructor
-      name = "EventHandlerDispatcher"  # Optional, will be a generated UUID if omitted
-      keep-alive-time = 60             # Keep alive time for threads
-      core-pool-size = 1               # No of core threads
-      max-pool-size  = 8               # Max no of threads
-      executor-bounds = -1             # Makes the Executor bounded, -1 is unbounded
-      task-queue-size = -1             # Specifies the bounded capacity of the task queue (< 1 == unbounded)
-      task-queue-type = "linked"       # Specifies which type of task queue will be used, can be "array" or "linked" (default)
-      allow-core-timeout = on          # Allow core threads to time out
-      throughput = 5                   # Throughput for Dispatcher, set to 1 for complete fairness
-      throughput-deadline-time = -1    # Throughput deadline for Dispatcher, set to 0 or negative for no deadline
-      mailbox-capacity = -1            # If negative (or zero) then an unbounded mailbox is used (default)
-                                       # If positive then a bounded mailbox is used and the capacity is set using the property
-                                       # NOTE: setting a mailbox to 'blocking' can be a bit dangerous, could lead to deadlock, use with care
-                                       # The following are only used for Dispatcher and only if mailbox-capacity > 0
-      mailbox-push-timeout-time = 10   # Specifies the timeout to add a new message to a mailbox that is full - negative number means infinite timeout
-                                       #       (in unit defined by the time-unit property)
-    }
-
-  # These boot classes are loaded (and created) automatically when the Akka Microkernel boots up
-  #     Can be used to bootstrap your application(s)
-  #     Should be the FQN (Fully Qualified Name) of the boot class which needs to have a default constructor
-  # boot = ["sample.camel.Boot",
-  #         "sample.rest.java.Boot",
-  #         "sample.rest.scala.Boot",
-  #         "sample.security.Boot"]
-  boot = []
-
-  actor {
-    timeout = 5                     # Default timeout for Future based invocations
-                                    #    - Actor:        ask && ?
-                                    #    - UntypedActor: ask
-                                    #    - TypedActor:   methods with non-void return type
-    serialize-messages = off        # Does a deep clone of (non-primitive) messages to ensure immutability
-    throughput = 5                  # Default throughput for all Dispatcher, set to 1 for complete fairness
-    throughput-deadline-time = -1   # Default throughput deadline for all Dispatcher, set to 0 or negative for no deadline
-    dispatcher-shutdown-timeout = 1 # Using the akka.time-unit, how long dispatchers by default will wait for new actors until they shut down
-
-    deployment {
-
-      /app/service-ping {                                               # deployment id pattern
-
-        router = "round-robin"                                          # routing (load-balance) scheme to use
-                                                                        #     available: "direct", "round-robin", "random", "scatter-gather"
-                                                                        #                "least-cpu", "least-ram", "least-messages"
-                                                                        #     or:        fully qualified class name of the router class
-                                                                        #     default is "direct";
-                                                                        #     if 'replication' is used then the only available router is "direct"
-
-        nr-of-instances = 3                                             # number of actor instances in the cluster
-                                                                        #     available: positive integer (1-N) or the string "auto" for auto-scaling
-                                                                        #     default is '1'
-                                                                        #     if the "direct" router is used then this element is ignored (always '1')
-
-        #create-as {
-        #  class = "com.biz.app.MyActor"                                # FIXME document 'create-as'
-        #}
-
-        remote {
-          nodes = ["wallace:2552", "gromit:2552"]                       # A list of hostnames and ports for instantiating the remote actor instances
-                                                                        #     The format should be on "hostname:port", where:
-                                                                        #         - hostname can be either hostname or IP address the remote actor should connect to
-                                                                        #         - port should be the port for the remote server on the other node
-        }
-
-        #cluster {                                                      # defines the actor as a clustered actor
-                                                                        #     default (if omitted) is local non-clustered actor
-
-        #  preferred-nodes = ["node:node1"]                             # a list of preferred nodes for instantiating the actor instances on
-                                                                        #     defined as node name
-                                                                        #     available: "node:"
-
-
-        #  replication {                                                # use replication or not? only makes sense for a stateful actor
-
-            # FIXME should we have this config option here? If so, implement it all through.
-        #    serialize-mailbox = off                                    # should the actor mailbox be part of the serialized snapshot?
-                                                                        #     default is 'off'
-
-        #    storage = "transaction-log"                                # storage model for replication
-                                                                        #     available: "transaction-log" and "data-grid"
-                                                                        #     default is "transaction-log"
-
-        #    strategy = "write-through"                                 # guaranteees for replication
-                                                                        #     available: "write-through" and "write-behind"
-                                                                        #     default is "write-through"
-
-        #  }
-        #}
-      }
-    }
-
-    default-dispatcher {
-      type = "Dispatcher"              # Must be one of the following
-                                       # Dispatcher, (BalancingDispatcher, only valid when all actors using it are of the same type),
-                                       # A FQCN to a class inheriting MessageDispatcherConfigurator with a no-arg visible constructor
-      name = "MyDispatcher"            # Optional, will be a generated UUID if omitted
-      keep-alive-time = 60             # Keep alive time for threads
-      core-pool-size-factor = 8.0      # No of core threads ... ceil(available processors * factor)
-      max-pool-size-factor  = 8.0      # Max no of threads ... ceil(available processors * factor)
-      task-queue-size = -1             # Specifies the bounded capacity of the task queue (< 1 == unbounded)
-      task-queue-type = "linked"       # Specifies which type of task queue will be used, can be "array" or "linked" (default)
-      allow-core-timeout = on          # Allow core threads to time out
-      throughput = 5                   # Throughput for Dispatcher, set to 1 for complete fairness
-      throughput-deadline-time = -1    # Throughput deadline for Dispatcher, set to 0 or negative for no deadline
-      mailbox-capacity = -1            # If negative (or zero) then an unbounded mailbox is used (default)
-                                       # If positive then a bounded mailbox is used and the capacity is set using the property
-                                       # NOTE: setting a mailbox to 'blocking' can be a bit dangerous, could lead to deadlock, use with care
-                                       # The following are only used for Dispatcher and only if mailbox-capacity > 0
-      mailbox-push-timeout-time = 10   # Specifies the timeout to add a new message to a mailbox that is full - negative number means infinite timeout
-                                       #       (in unit defined by the time-unit property)
-    }
-
-    debug {
-      receive = off      # enable function of Actor.loggable(), which is to log any received message at DEBUG level
-      autoreceive = off  # enable DEBUG logging of all AutoReceiveMessages (Kill, PoisonPill and the like)
-      lifecycle = off    # enable DEBUG logging of actor lifecycle changes
-      fsm = off          # enable DEBUG logging of all LoggingFSMs for events, transitions and timers
-      event-stream = off # enable DEBUG logging of subscription changes on the eventStream
-    }
-
-    mailbox {
-
-      file-based {
-        directory-path = "./_mb"
-        max-items = 2147483647
-        max-size = 2147483647
-        max-items = 2147483647
-        max-age = 0
-        max-journal-size = 16777216 # 16 * 1024 * 1024
-        max-memory-size = 134217728 # 128 * 1024 * 1024
-        max-journal-overflow = 10
-        max-journal-size-absolute = 9223372036854775807
-        discard-old-when-full = on
-        keep-journal = on
-        sync-journal = off
-      }
-
-      redis {
-        hostname = "127.0.0.1"
-        port = 6379
-      }
-
-      mongodb {
-        # Any specified collection name will be used as a prefix for collections that use durable mongo mailboxes
-        uri = "mongodb://localhost/akka.mailbox" # Follow Mongo URI Spec - http://www.mongodb.org/display/DOCS/Connections
-
-        # Configurable timeouts for certain ops
-        timeout {
-          read = 3000 # number of milliseconds to wait for a read to succeed before timing out the future
-          write = 3000 # number of milliseconds to wait for a write to succeed before timing out the future
-        }
-      }
-
-      zookeeper {
-        server-addresses = "localhost:2181"
-        session-timeout = 60
-        connection-timeout = 60
-        blocking-queue = on
-      }
-
-      beanstalk {
-        hostname = "127.0.0.1"
-        port = 11300
-        reconnect-window = 5
-        message-submit-delay = 0
-        message-submit-timeout = 5
-        message-time-to-live = 120
-      }
-    }
-
-    # Entries for pluggable serializers and their bindings. If a binding for a specific class is not found,
-    # then the default serializer (Java serialization) is used.
-    #
-    # serializers {
-    #   java = "akka.serialization.JavaSerializer"
-    #   proto = "akka.testing.ProtobufSerializer"
-    #   sjson = "akka.testing.SJSONSerializer"
-    #   default = "akka.serialization.JavaSerializer"
-    # }
-
-    # serialization-bindings {
-    #   java = ["akka.serialization.SerializeSpec$Address",
-    #           "akka.serialization.MyJavaSerializableActor",
-    #           "akka.serialization.MyStatelessActorWithMessagesInMailbox",
-    #           "akka.serialization.MyActorWithProtobufMessagesInMailbox"]
-    #   sjson = ["akka.serialization.SerializeSpec$Person"]
-    #   proto = ["com.google.protobuf.Message",
-    #            "akka.actor.ProtobufProtocol$MyMessage"]
-    # }
-  }
-
-  remote {
-    # FIXME rename to transport
-    layer = "akka.cluster.netty.NettyRemoteSupport"
-
-    secure-cookie = ""                            # Generate your own with '$AKKA_HOME/scripts/generate_config_with_secure_cookie.sh'
-                                                  #     or using 'akka.util.Crypt.generateSecureCookie'
-
-    remote-daemon-ack-timeout = 30                # Timeout for ACK of cluster operations, lik checking actor out etc.
-
-    use-passive-connections = on                  # Reuse inbound connections for outbound messages
-
-    failure-detector {                            # accrual failure detection config
-      threshold = 8                               # defines the failure detector threshold
-                                                  #     A low threshold is prone to generate many wrong suspicions but ensures a
-                                                  #     quick detection in the event of a real crash. Conversely, a high threshold
-                                                  #     generates fewer mistakes but needs more time to detect actual crashes
-      max-sample-size = 1000
-    }
-
-    server {
-      port = 2552                                 # The default remote server port clients should connect to. Default is 2552 (AKKA)
-      message-frame-size = 1048576                # Increase this if you want to be able to send messages with large payloads
-      connection-timeout = 120                    # Length in time-unit
-      require-cookie = off                        # Should the remote server require that it peers share the same secure-cookie (defined in the 'remote' section)?
-      untrusted-mode = off                        # Enable untrusted mode for full security of server managed actors, allows untrusted clients to connect.
-      backlog = 4096                              # Sets the size of the connection backlog
-    }
-
-    client {
-      buffering {
-        retry-message-send-on-failure = false     # Should message buffering on remote client error be used (buffer flushed on successful reconnect)
-        capacity = -1                             # If negative (or zero) then an unbounded mailbox is used (default)
-                                                  #     If positive then a bounded mailbox is used and the capacity is set using the property
-      }
-      reconnect-delay = 5
-      read-timeout = 3600
-      message-frame-size = 1048576
-      reap-futures-delay = 5
-      reconnection-time-window = 600              # Maximum time window that a client should try to reconnect for
-    }
-  }
-
-  cluster {
-    name = "test-cluster"
-    zookeeper-server-addresses = "localhost:2181" # comma-separated list of ':' elements
-    max-time-to-wait-until-connected = 30
-    session-timeout = 60
-    connection-timeout = 60
-    include-ref-node-in-replica-set = on          # Can a replica be instantiated on the same node as the cluster reference to the actor
-                                                  #     Default: on
-    log-directory = "_akka_cluster"               # Where ZooKeeper should store the logs and data files
-
-    replication {
-      digest-type = "MAC"                         # Options: CRC32 (cheap & unsafe), MAC (expensive & secure using password)
-      password = "secret"                         # FIXME: store open in file?
-      ensemble-size = 3
-      quorum-size = 2
-      snapshot-frequency = 1000                   # The number of messages that should be logged between every actor snapshot
-      timeout = 30                                # Timeout for asyncronous (write-behind) operations
-    }
-  }
-
-  stm {
-    fair             = on     # Should global transactions be fair or non-fair (non fair yield better performance)
-    max-retries      = 1000
-    timeout          = 5      # Default timeout for blocking transactions and transaction set (in unit defined by
-                              #     the time-unit property)
-    write-skew       = true
-    blocking-allowed = false
-    interruptible    = false
-    speculative      = true
-    quick-release    = true
-    propagation      = "requires"
-    trace-level      = "none"
-  }
-
-  test {
-    timefactor = "1.0"        # factor by which to scale timeouts during tests, e.g. to account for shared build system load
-    filter-leeway = 3         # time-units EventFilter.intercept waits after the block is finished until all required messages are received
-    single-expect-default = 3 # time-units to wait in expectMsg and friends outside of within() block by default
-  }
-}
diff --git a/config/akka.conf b/config/akka.conf
index 84b9bfbbcf..64883cf7c1 100644
--- a/config/akka.conf
+++ b/config/akka.conf
@@ -1,5 +1,2 @@
-# This config imports the Akka reference configuration.
-include "akka-reference.conf"
-
 # In this file you can override any option defined in the 'akka-reference.conf' file.
 # Copy in all or parts of the 'akka-reference.conf' file and modify as you please.
diff --git a/config/akka.test.conf b/config/akka.test.conf
deleted file mode 100644
index 8e21a7d184..0000000000
--- a/config/akka.test.conf
+++ /dev/null
@@ -1,16 +0,0 @@
-# This config imports the Akka reference configuration.
-include "akka-reference.conf"
-
-# In this file you can override any option defined in the 'akka-reference.conf' file.
-# Copy in all or parts of the 'akka-reference.conf' file and modify as you please.
-
-akka {
-  event-handlers = ["akka.testkit.TestEventListener"]
-  loglevel = "WARNING"
-  actor {
-    default-dispatcher {
-      core-pool-size = 4
-      max-pool-size  = 32
-    }
-  }
-}