diff --git a/.gitignore b/.gitignore index 857686b6aa..91eba2fc6b 100755 --- a/.gitignore +++ b/.gitignore @@ -1,3 +1,4 @@ +*.vim *~ *# src_managed diff --git a/akka-actor-tests/src/test/java/akka/actor/JavaExtension.java b/akka-actor-tests/src/test/java/akka/actor/JavaExtension.java index 533fba8164..fefec7640a 100644 --- a/akka-actor-tests/src/test/java/akka/actor/JavaExtension.java +++ b/akka-actor-tests/src/test/java/akka/actor/JavaExtension.java @@ -13,33 +13,34 @@ import static org.junit.Assert.*; public class JavaExtension { - static class TestExtension implements Extension { - private ActorSystemImpl system; - public static ExtensionKey key = new ExtensionKey() { - }; + static class Provider implements ExtensionIdProvider { + public ExtensionId lookup() { return defaultInstance; } + } - public ExtensionKey key() { - return key; - } + public final static TestExtensionId defaultInstance = new TestExtensionId(); - public void init(ActorSystemImpl system) { - this.system = system; - } - - public ActorSystemImpl getSystem() { - return system; + static class TestExtensionId extends AbstractExtensionId { + public TestExtension createExtension(ActorSystemImpl i) { + return new TestExtension(i); } } - private Config c = ConfigFactory.parseString("akka.extensions = [ \"akka.actor.JavaExtension$TestExtension\" ]", + static class TestExtension implements Extension { + public final ActorSystemImpl system; + public TestExtension(ActorSystemImpl i) { + system = i; + } + } + + private Config c = ConfigFactory.parseString("akka.extensions = [ \"akka.actor.JavaExtension$Provider\" ]", ConfigParseOptions.defaults()); private ActorSystem system = ActorSystem.create("JavaExtension", c); @Test public void mustBeAccessible() { - final ActorSystemImpl s = system.extension(TestExtension.key).getSystem(); - assertSame(s, system); + assertSame(system.extension(defaultInstance).system, system); + assertSame(defaultInstance.apply(system).system, system); } } diff --git a/akka-actor-tests/src/test/scala/ConfigDocSpec.scala b/akka-actor-tests/src/test/scala/ConfigDocSpec.scala deleted file mode 100644 index 5f65cce91e..0000000000 --- a/akka-actor-tests/src/test/scala/ConfigDocSpec.scala +++ /dev/null @@ -1,30 +0,0 @@ -package akka.docs.config - -import org.scalatest.WordSpec -import org.scalatest.matchers.MustMatchers -import com.typesafe.config.ConfigFactory -import com.typesafe.config.ConfigParseOptions -import akka.actor.ActorSystem - -//#imports - -class ConfigDocSpec extends WordSpec { - - "programmatically configure ActorSystem" in { - //#custom-config - val customConf = ConfigFactory.parseString(""" - akka.actor.deployment { - /app/my-service { - router = round-robin - nr-of-instances = 3 - } - } - """, ConfigParseOptions.defaults) - val system = ActorSystem("MySystem", ConfigFactory.systemProperties.withFallback(customConf)) - //#custom-config - - system.stop() - - } - -} diff --git a/akka-actor-tests/src/test/scala/akka/actor/ActorRefSpec.scala b/akka-actor-tests/src/test/scala/akka/actor/ActorRefSpec.scala index 938204b993..7fd324a33a 100644 --- a/akka-actor-tests/src/test/scala/akka/actor/ActorRefSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/actor/ActorRefSpec.scala @@ -247,7 +247,7 @@ class ActorRefSpec extends AkkaSpec { out.flush out.close - Serialization.system.withValue(system.asInstanceOf[ActorSystemImpl]) { + Serialization.currentSystem.withValue(system.asInstanceOf[ActorSystemImpl]) { val in = new ObjectInputStream(new ByteArrayInputStream(baos.toByteArray)) val readA = in.readObject @@ -275,7 +275,7 @@ class ActorRefSpec extends AkkaSpec { (intercept[java.lang.IllegalStateException] { in.readObject }).getMessage must be === "Trying to deserialize a serialized ActorRef without an ActorSystem in scope." + - " Use akka.serialization.Serialization.system.withValue(system) { ... }" + " Use akka.serialization.Serialization.currentSystem.withValue(system) { ... }" } "must throw exception on deserialize if not present in actor hierarchy (and remoting is not enabled)" in { @@ -292,7 +292,7 @@ class ActorRefSpec extends AkkaSpec { out.flush out.close - Serialization.system.withValue(system.asInstanceOf[ActorSystemImpl]) { + Serialization.currentSystem.withValue(system.asInstanceOf[ActorSystemImpl]) { val in = new ObjectInputStream(new ByteArrayInputStream(baos.toByteArray)) (intercept[java.lang.IllegalStateException] { in.readObject diff --git a/akka-actor-tests/src/test/scala/akka/actor/ActorSystemSpec.scala b/akka-actor-tests/src/test/scala/akka/actor/ActorSystemSpec.scala index d472387f13..3565cde2fb 100644 --- a/akka-actor-tests/src/test/scala/akka/actor/ActorSystemSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/actor/ActorSystemSpec.scala @@ -10,28 +10,23 @@ import com.typesafe.config.ConfigFactory class JavaExtensionSpec extends JavaExtension with JUnitSuite object ActorSystemSpec { - - class TestExtension extends Extension[TestExtension] { - var system: ActorSystemImpl = _ - - def key = TestExtension - - def init(system: ActorSystemImpl) { - this.system = system - } + object TestExtension extends ExtensionId[TestExtension] with ExtensionIdProvider { + def lookup = this + def createExtension(s: ActorSystemImpl) = new TestExtension(s) } - object TestExtension extends ExtensionKey[TestExtension] - + class TestExtension(val system: ActorSystemImpl) extends Extension } -class ActorSystemSpec extends AkkaSpec("""akka.extensions = ["akka.actor.ActorSystemSpec$TestExtension"]""") { +class ActorSystemSpec extends AkkaSpec("""akka.extensions = ["akka.actor.ActorSystemSpec$TestExtension$"]""") { import ActorSystemSpec._ "An ActorSystem" must { "support extensions" in { + TestExtension(system).system must be === system system.extension(TestExtension).system must be === system + system.hasExtension(TestExtension) must be(true) } } diff --git a/akka-actor-tests/src/test/scala/akka/actor/SchedulerSpec.scala b/akka-actor-tests/src/test/scala/akka/actor/SchedulerSpec.scala index 6293f9c876..4640951322 100644 --- a/akka-actor-tests/src/test/scala/akka/actor/SchedulerSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/actor/SchedulerSpec.scala @@ -28,7 +28,7 @@ class SchedulerSpec extends AkkaSpec with BeforeAndAfterEach { val tickActor = actorOf(new Actor { def receive = { case Tick ⇒ countDownLatch.countDown() } }) - // run every 50 millisec + // run every 50 milliseconds collectCancellable(system.scheduler.schedule(tickActor, Tick, 0 milliseconds, 50 milliseconds)) // after max 1 second it should be executed at least the 3 times already @@ -42,6 +42,16 @@ class SchedulerSpec extends AkkaSpec with BeforeAndAfterEach { assert(countDownLatch2.await(2, TimeUnit.SECONDS)) } + "should stop continuous scheduling if the receiving actor has been terminated" in { + // run immediately and then every 100 milliseconds + collectCancellable(system.scheduler.schedule(testActor, "msg", 0 milliseconds, 100 milliseconds)) + + // stop the actor and, hence, the continuous messaging from happening + testActor ! PoisonPill + + expectNoMsg(500 milliseconds) + } + "schedule once" in { case object Tick val countDownLatch = new CountDownLatch(3) diff --git a/akka-actor-tests/src/test/scala/akka/actor/TypedActorSpec.scala b/akka-actor-tests/src/test/scala/akka/actor/TypedActorSpec.scala index d29627e443..00fb05561d 100644 --- a/akka-actor-tests/src/test/scala/akka/actor/TypedActorSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/actor/TypedActorSpec.scala @@ -5,7 +5,6 @@ package akka.actor */ import org.scalatest.{ BeforeAndAfterAll, BeforeAndAfterEach } -import akka.japi.{ Option ⇒ JOption } import akka.util.Duration import akka.util.duration._ import akka.dispatch.{ Dispatchers, Future, KeptPromise } @@ -14,6 +13,9 @@ import java.util.concurrent.atomic.AtomicReference import annotation.tailrec import akka.testkit.{ EventFilter, filterEvents, AkkaSpec } import akka.serialization.SerializationExtension +import akka.actor.TypedActor.{ PostRestart, PreRestart, PostStop, PreStart } +import java.util.concurrent.{ TimeUnit, CountDownLatch } +import akka.japi.{ Creator, Option ⇒ JOption } object TypedActorSpec { @@ -135,6 +137,23 @@ object TypedActorSpec { class StackedImpl extends Stacked { override def stacked: String = "FOOBAR" //Uppercase } + + trait LifeCycles { + def crash(): Unit + } + + class LifeCyclesImpl(val latch: CountDownLatch) extends PreStart with PostStop with PreRestart with PostRestart with LifeCycles { + + override def crash(): Unit = throw new IllegalStateException("Crash!") + + override def preStart(): Unit = latch.countDown() + + override def postStop(): Unit = for (i ← 1 to 3) latch.countDown() + + override def preRestart(reason: Throwable, message: Option[Any]): Unit = for (i ← 1 to 5) latch.countDown() + + override def postRestart(reason: Throwable): Unit = for (i ← 1 to 7) latch.countDown() + } } @org.junit.runner.RunWith(classOf[org.scalatest.junit.JUnitRunner]) @@ -148,18 +167,18 @@ class TypedActorSpec extends AkkaSpec with BeforeAndAfterEach with BeforeAndAfte newFooBar(Props().withTimeout(Timeout(d))) def newFooBar(props: Props): Foo = - system.typedActorOf(classOf[Foo], classOf[Bar], props) + TypedActor(system).typedActorOf(classOf[Foo], classOf[Bar], props) def newStacked(props: Props = Props().withTimeout(Timeout(2000))): Stacked = - system.typedActorOf(classOf[Stacked], classOf[StackedImpl], props) + TypedActor(system).typedActorOf(classOf[Stacked], classOf[StackedImpl], props) - def mustStop(typedActor: AnyRef) = system.typedActor.stop(typedActor) must be(true) + def mustStop(typedActor: AnyRef) = TypedActor(system).stop(typedActor) must be(true) "TypedActors" must { "be able to instantiate" in { val t = newFooBar - system.typedActor.isTypedActor(t) must be(true) + TypedActor(system).isTypedActor(t) must be(true) mustStop(t) } @@ -169,7 +188,7 @@ class TypedActorSpec extends AkkaSpec with BeforeAndAfterEach with BeforeAndAfte } "not stop non-started ones" in { - system.typedActor.stop(null) must be(false) + TypedActor(system).stop(null) must be(false) } "throw an IllegalStateExcpetion when TypedActor.self is called in the wrong scope" in { @@ -188,7 +207,7 @@ class TypedActorSpec extends AkkaSpec with BeforeAndAfterEach with BeforeAndAfte "be able to call toString" in { val t = newFooBar - t.toString must be(system.typedActor.getActorRefFor(t).toString) + t.toString must be(TypedActor(system).getActorRefFor(t).toString) mustStop(t) } @@ -201,7 +220,7 @@ class TypedActorSpec extends AkkaSpec with BeforeAndAfterEach with BeforeAndAfte "be able to call hashCode" in { val t = newFooBar - t.hashCode must be(system.typedActor.getActorRefFor(t).hashCode) + t.hashCode must be(TypedActor(system).getActorRefFor(t).hashCode) mustStop(t) } @@ -264,7 +283,7 @@ class TypedActorSpec extends AkkaSpec with BeforeAndAfterEach with BeforeAndAfte "be able to handle exceptions when calling methods" in { filterEvents(EventFilter[IllegalStateException]("expected")) { val boss = actorOf(Props(context ⇒ { - case p: Props ⇒ context.sender ! context.typedActorOf(classOf[Foo], classOf[Bar], p) + case p: Props ⇒ context.sender ! TypedActor(context).typedActorOf(classOf[Foo], classOf[Bar], p) }).withFaultHandler(OneForOneStrategy { case e: IllegalStateException if e.getMessage == "expected" ⇒ FaultHandlingStrategy.Resume })) @@ -296,7 +315,7 @@ class TypedActorSpec extends AkkaSpec with BeforeAndAfterEach with BeforeAndAfte } "be able to support implementation only typed actors" in { - val t = system.typedActorOf[Foo, Bar](Props()) + val t = TypedActor(system).typedActorOf[Foo, Bar](Props()) val f = t.futurePigdog(200) val f2 = t.futurePigdog(0) f2.isCompleted must be(false) @@ -306,7 +325,7 @@ class TypedActorSpec extends AkkaSpec with BeforeAndAfterEach with BeforeAndAfte } "be able to support implementation only typed actors with complex interfaces" in { - val t = system.typedActorOf[Stackable1 with Stackable2, StackedImpl]() + val t = TypedActor(system).typedActorOf[Stackable1 with Stackable2, StackedImpl]() t.stackable1 must be("foo") t.stackable2 must be("bar") mustStop(t) @@ -333,17 +352,16 @@ class TypedActorSpec extends AkkaSpec with BeforeAndAfterEach with BeforeAndAfte "be able to serialize and deserialize invocations" in { import java.io._ - val serialization = SerializationExtension(system).serialization - val m = TypedActor.MethodCall(serialization, classOf[Foo].getDeclaredMethod("pigdog"), Array[AnyRef]()) - val baos = new ByteArrayOutputStream(8192 * 4) - val out = new ObjectOutputStream(baos) + Serialization.currentSystem.withValue(system.asInstanceOf[ActorSystemImpl]) { + val m = TypedActor.MethodCall(classOf[Foo].getDeclaredMethod("pigdog"), Array[AnyRef]()) + val baos = new ByteArrayOutputStream(8192 * 4) + val out = new ObjectOutputStream(baos) - out.writeObject(m) - out.close() + out.writeObject(m) + out.close() - val in = new ObjectInputStream(new ByteArrayInputStream(baos.toByteArray)) + val in = new ObjectInputStream(new ByteArrayInputStream(baos.toByteArray)) - Serialization.system.withValue(system.asInstanceOf[ActorSystemImpl]) { val mNew = in.readObject().asInstanceOf[TypedActor.MethodCall] mNew.method must be(m.method) @@ -353,17 +371,16 @@ class TypedActorSpec extends AkkaSpec with BeforeAndAfterEach with BeforeAndAfte "be able to serialize and deserialize invocations' parameters" in { import java.io._ val someFoo: Foo = new Bar - val serialization = SerializationExtension(system).serialization - val m = TypedActor.MethodCall(serialization, classOf[Foo].getDeclaredMethod("testMethodCallSerialization", Array[Class[_]](classOf[Foo], classOf[String], classOf[Int]): _*), Array[AnyRef](someFoo, null, 1.asInstanceOf[AnyRef])) - val baos = new ByteArrayOutputStream(8192 * 4) - val out = new ObjectOutputStream(baos) + Serialization.currentSystem.withValue(system.asInstanceOf[ActorSystemImpl]) { + val m = TypedActor.MethodCall(classOf[Foo].getDeclaredMethod("testMethodCallSerialization", Array[Class[_]](classOf[Foo], classOf[String], classOf[Int]): _*), Array[AnyRef](someFoo, null, 1.asInstanceOf[AnyRef])) + val baos = new ByteArrayOutputStream(8192 * 4) + val out = new ObjectOutputStream(baos) - out.writeObject(m) - out.close() + out.writeObject(m) + out.close() - val in = new ObjectInputStream(new ByteArrayInputStream(baos.toByteArray)) + val in = new ObjectInputStream(new ByteArrayInputStream(baos.toByteArray)) - Serialization.system.withValue(system.asInstanceOf[ActorSystemImpl]) { val mNew = in.readObject().asInstanceOf[TypedActor.MethodCall] mNew.method must be(m.method) @@ -375,5 +392,14 @@ class TypedActorSpec extends AkkaSpec with BeforeAndAfterEach with BeforeAndAfte mNew.parameters(2).asInstanceOf[Int] must be === 1 } } + + "be able to override lifecycle callbacks" in { + val latch = new CountDownLatch(16) + val ta = TypedActor(system) + val t: LifeCycles = ta.typedActorOf(classOf[LifeCycles], new Creator[LifeCyclesImpl] { def create = new LifeCyclesImpl(latch) }, Props()) + t.crash() + ta.poisonPill(t) + latch.await(10, TimeUnit.SECONDS) must be === true + } } } diff --git a/akka-actor-tests/src/test/scala/akka/performance/microbench/TellThroughput10000PerformanceSpec.scala b/akka-actor-tests/src/test/scala/akka/performance/microbench/TellThroughput10000PerformanceSpec.scala new file mode 100644 index 0000000000..e47d7987bd --- /dev/null +++ b/akka-actor-tests/src/test/scala/akka/performance/microbench/TellThroughput10000PerformanceSpec.scala @@ -0,0 +1,228 @@ +package akka.performance.microbench + +import akka.performance.workbench.PerformanceSpec +import org.apache.commons.math.stat.descriptive.DescriptiveStatistics +import akka.actor._ +import java.util.concurrent.{ ThreadPoolExecutor, CountDownLatch, TimeUnit } +import akka.dispatch._ +import java.util.concurrent.ThreadPoolExecutor.AbortPolicy +import java.util.concurrent.BlockingQueue +import java.util.concurrent.LinkedBlockingQueue +import akka.util.Duration +import akka.util.duration._ + +// -server -Xms512M -Xmx1024M -XX:+UseParallelGC -Dbenchmark=true -Dbenchmark.repeatFactor=500 +@org.junit.runner.RunWith(classOf[org.scalatest.junit.JUnitRunner]) +class TellThroughput10000PerformanceSpec extends PerformanceSpec { + import TellThroughput10000PerformanceSpec._ + + /* Experiment with java 7 LinkedTransferQueue + def linkedTransferQueue(): () ⇒ BlockingQueue[Runnable] = + () ⇒ new java.util.concurrent.LinkedTransferQueue[Runnable]() + + def createDispatcher(name: String) = { + val threadPoolConfig = ThreadPoolConfig() + ThreadPoolConfigDispatcherBuilder(config ⇒ + new Dispatcher(system.dispatcherFactory.prerequisites, name, 5, + 0, UnboundedMailbox(), config, 60000), threadPoolConfig) + //.withNewThreadPoolWithLinkedBlockingQueueWithUnboundedCapacity + .copy(config = threadPoolConfig.copy(queueFactory = linkedTransferQueue())) + .setCorePoolSize(maxClients * 2) + .build + } +*/ + + def createDispatcher(name: String) = ThreadPoolConfigDispatcherBuilder(config ⇒ + new Dispatcher(system.dispatcherFactory.prerequisites, name, 10000, + Duration.Zero, UnboundedMailbox(), config, Duration(60, TimeUnit.SECONDS)), ThreadPoolConfig()) + .withNewThreadPoolWithLinkedBlockingQueueWithUnboundedCapacity + .setCorePoolSize(maxClients * 2) + .build + + val clientDispatcher = createDispatcher("client-dispatcher") + //val destinationDispatcher = createDispatcher("destination-dispatcher") + + override def atTermination { + super.atTermination() + System.out.println("Cleaning up after TellThroughputPerformanceSpec") + clientDispatcher.shutdown() + //destinationDispatcher.shutdown() + } + + val repeat = 30000L * repeatFactor + + "Tell" must { + "warmup" in { + runScenario(4, warmup = true) + } + "warmup more" in { + runScenario(4, warmup = true) + } + "perform with load 1" in { + runScenario(1) + } + "perform with load 2" in { + runScenario(2) + } + "perform with load 4" in { + runScenario(4) + } + "perform with load 6" in { + runScenario(6) + } + "perform with load 8" in { + runScenario(8) + } + "perform with load 10" in { + runScenario(10) + } + "perform with load 12" in { + runScenario(12) + } + "perform with load 14" in { + runScenario(14) + } + "perform with load 16" in { + runScenario(16) + } + "perform with load 18" in { + runScenario(18) + } + "perform with load 20" in { + runScenario(20) + } + "perform with load 22" in { + runScenario(22) + } + "perform with load 24" in { + runScenario(24) + } + "perform with load 26" in { + runScenario(26) + } + "perform with load 28" in { + runScenario(28) + } + "perform with load 30" in { + runScenario(30) + } + "perform with load 32" in { + runScenario(32) + } + "perform with load 34" in { + runScenario(34) + } + "perform with load 36" in { + runScenario(36) + } + "perform with load 38" in { + runScenario(38) + } + "perform with load 40" in { + runScenario(40) + } + "perform with load 42" in { + runScenario(42) + } + "perform with load 44" in { + runScenario(44) + } + "perform with load 46" in { + runScenario(46) + } + "perform with load 48" in { + runScenario(48) + } + + def runScenario(numberOfClients: Int, warmup: Boolean = false) { + if (acceptClients(numberOfClients)) { + + val latch = new CountDownLatch(numberOfClients) + val repeatsPerClient = repeat / numberOfClients + /* + val destinations = for (i ← 0 until numberOfClients) + yield system.actorOf(Props(new Destination).withDispatcher(createDispatcher("destination-" + i))) + val clients = for ((dest, j) ← destinations.zipWithIndex) + yield system.actorOf(Props(new Client(dest, latch, repeatsPerClient)).withDispatcher(createDispatcher("client-" + j))) + */ + val destinations = for (i ← 0 until numberOfClients) + yield system.actorOf(Props(new Destination).withDispatcher(clientDispatcher)) + val clients = for ((dest, j) ← destinations.zipWithIndex) + yield system.actorOf(Props(new Client(dest, latch, repeatsPerClient)).withDispatcher(clientDispatcher)) + + val start = System.nanoTime + clients.foreach(_ ! Run) + val ok = latch.await((5000000 + 500 * repeat) * timeDilation, TimeUnit.MICROSECONDS) + val durationNs = (System.nanoTime - start) + + if (!ok) { + System.err.println("Destinations: ") + destinations.foreach { + case l: LocalActorRef ⇒ + val m = l.underlying.mailbox + System.err.println(" -" + l + " mbox(" + m.status + ")" + " containing [" + Stream.continually(m.dequeue()).takeWhile(_ != null).mkString(", ") + "] and has systemMsgs: " + m.hasSystemMessages) + } + System.err.println("") + System.err.println("Clients: ") + + clients.foreach { + case l: LocalActorRef ⇒ + val m = l.underlying.mailbox + System.err.println(" -" + l + " mbox(" + m.status + ")" + " containing [" + Stream.continually(m.dequeue()).takeWhile(_ != null).mkString(", ") + "] and has systemMsgs: " + m.hasSystemMessages) + } + + //val e = clientDispatcher.asInstanceOf[Dispatcher].executorService.get().asInstanceOf[ExecutorServiceDelegate].executor.asInstanceOf[ThreadPoolExecutor] + //val q = e.getQueue + //System.err.println("Client Dispatcher: " + e.getActiveCount + " " + Stream.continually(q.poll()).takeWhile(_ != null).mkString(", ")) + } + + if (!warmup) { + ok must be(true) + logMeasurement(numberOfClients, durationNs, repeat) + } + clients.foreach(_ ! PoisonPill) + destinations.foreach(_ ! PoisonPill) + + } + } + } +} + +object TellThroughput10000PerformanceSpec { + + case object Run + case object Msg + + class Destination extends Actor { + def receive = { + case Msg ⇒ sender ! Msg + } + } + + class Client( + actor: ActorRef, + latch: CountDownLatch, + repeat: Long) extends Actor { + + var sent = 0L + var received = 0L + + def receive = { + case Msg ⇒ + received += 1 + if (sent < repeat) { + actor ! Msg + sent += 1 + } else if (received >= repeat) { + latch.countDown() + } + case Run ⇒ + for (i ← 0L until math.min(20000L, repeat)) { + actor ! Msg + sent += 1 + } + } + + } + +} diff --git a/akka-actor-tests/src/test/scala/akka/performance/microbench/TellThroughputComputationPerformanceSpec.scala b/akka-actor-tests/src/test/scala/akka/performance/microbench/TellThroughputComputationPerformanceSpec.scala new file mode 100644 index 0000000000..52bb3d169b --- /dev/null +++ b/akka-actor-tests/src/test/scala/akka/performance/microbench/TellThroughputComputationPerformanceSpec.scala @@ -0,0 +1,226 @@ +package akka.performance.microbench + +import akka.performance.workbench.PerformanceSpec +import akka.actor._ +import java.util.concurrent.{ ThreadPoolExecutor, CountDownLatch, TimeUnit } +import akka.dispatch._ +import akka.util.Duration +import akka.util.duration._ + +// -server -Xms512M -Xmx1024M -XX:+UseParallelGC -Dbenchmark=true -Dbenchmark.repeatFactor=500 +@org.junit.runner.RunWith(classOf[org.scalatest.junit.JUnitRunner]) +class TellThroughputComputationPerformanceSpec extends PerformanceSpec { + import TellThroughputComputationPerformanceSpec._ + + def createDispatcher(name: String) = ThreadPoolConfigDispatcherBuilder(config ⇒ + new Dispatcher(system.dispatcherFactory.prerequisites, name, 5, + Duration.Zero, UnboundedMailbox(), config, 60 seconds), ThreadPoolConfig()) + .withNewThreadPoolWithLinkedBlockingQueueWithUnboundedCapacity + .setCorePoolSize(maxClients) + .build + + val clientDispatcher = createDispatcher("client-dispatcher") + val destinationDispatcher = createDispatcher("destination-dispatcher") + + override def atTermination { + super.atTermination() + System.out.println("Cleaning up after TellThroughputComputationPerformanceSpec") + clientDispatcher.shutdown() + destinationDispatcher.shutdown() + } + + val repeat = 500L * repeatFactor + + "Tell" must { + "warmup" in { + runScenario(4, warmup = true) + } + // "warmup more" in { + // runScenario(4, warmup = true) + // } + "perform with load 1" in { + runScenario(1) + } + "perform with load 2" in { + runScenario(2) + } + "perform with load 4" in { + runScenario(4) + } + "perform with load 6" in { + runScenario(6) + } + "perform with load 8" in { + runScenario(8) + } + "perform with load 10" in { + runScenario(10) + } + "perform with load 12" in { + runScenario(12) + } + "perform with load 14" in { + runScenario(14) + } + "perform with load 16" in { + runScenario(16) + } + "perform with load 18" in { + runScenario(18) + } + "perform with load 20" in { + runScenario(20) + } + "perform with load 22" in { + runScenario(22) + } + "perform with load 24" in { + runScenario(24) + } + "perform with load 26" in { + runScenario(26) + } + "perform with load 28" in { + runScenario(28) + } + "perform with load 30" in { + runScenario(30) + } + "perform with load 32" in { + runScenario(32) + } + "perform with load 34" in { + runScenario(34) + } + "perform with load 36" in { + runScenario(36) + } + "perform with load 38" in { + runScenario(38) + } + "perform with load 40" in { + runScenario(40) + } + "perform with load 42" in { + runScenario(42) + } + "perform with load 44" in { + runScenario(44) + } + "perform with load 46" in { + runScenario(46) + } + "perform with load 48" in { + runScenario(48) + } + + def runScenario(numberOfClients: Int, warmup: Boolean = false) { + if (acceptClients(numberOfClients)) { + + val latch = new CountDownLatch(numberOfClients) + val repeatsPerClient = repeat / numberOfClients + val destinations = for (i ← 0 until numberOfClients) + yield system.actorOf(Props(new Destination).withDispatcher(destinationDispatcher)) + val clients = for (dest ← destinations) + yield system.actorOf(Props(new Client(dest, latch, repeatsPerClient)).withDispatcher(clientDispatcher)) + + val start = System.nanoTime + clients.foreach(_ ! Run) + val ok = latch.await((5000000 + 500 * repeat) * timeDilation, TimeUnit.MICROSECONDS) + val durationNs = (System.nanoTime - start) + + if (!ok) { + System.err.println("Destinations: ") + destinations.foreach { + case l: LocalActorRef ⇒ + val m = l.underlying.mailbox + System.err.println(" -" + l + " mbox(" + m.status + ")" + " containing [" + Stream.continually(m.dequeue()).takeWhile(_ != null).mkString(", ") + "] and has systemMsgs: " + m.hasSystemMessages) + } + System.err.println("") + System.err.println("Clients: ") + + clients.foreach { + case l: LocalActorRef ⇒ + val m = l.underlying.mailbox + System.err.println(" -" + l + " mbox(" + m.status + ")" + " containing [" + Stream.continually(m.dequeue()).takeWhile(_ != null).mkString(", ") + "] and has systemMsgs: " + m.hasSystemMessages) + } + + val e = clientDispatcher.asInstanceOf[Dispatcher].executorService.get().asInstanceOf[ExecutorServiceDelegate].executor.asInstanceOf[ThreadPoolExecutor] + val q = e.getQueue + System.err.println("Client Dispatcher: " + e.getActiveCount + " " + Stream.continually(q.poll()).takeWhile(_ != null).mkString(", ")) + } + + if (!warmup) { + ok must be(true) + logMeasurement(numberOfClients, durationNs, repeat) + } + clients.foreach(_ ! PoisonPill) + destinations.foreach(_ ! PoisonPill) + + } + } + } +} + +object TellThroughputComputationPerformanceSpec { + + case object Run + case object Msg + + trait PiComputation { + private var _pi: Double = 0.0 + def pi: Double = _pi + private var currentPosition = 0L + def nrOfElements = 1000 + + def calculatePi(): Unit = { + _pi += calculateDecimals(currentPosition) + currentPosition += nrOfElements + } + + private def calculateDecimals(start: Long): Double = { + var acc = 0.0 + for (i ← start until (start + nrOfElements)) + acc += 4.0 * (1 - (i % 2) * 2) / (2 * i + 1) + acc + } + + } + + class Destination extends Actor with PiComputation { + def receive = { + case Msg ⇒ + calculatePi() + sender ! Msg + } + } + + class Client( + actor: ActorRef, + latch: CountDownLatch, + repeat: Long) extends Actor with PiComputation { + + var sent = 0L + var received = 0L + + def receive = { + case Msg ⇒ + received += 1 + calculatePi() + if (sent < repeat) { + actor ! Msg + sent += 1 + } else if (received >= repeat) { + println("PI: " + pi) + latch.countDown() + } + case Run ⇒ + for (i ← 0L until math.min(1000L, repeat)) { + actor ! Msg + sent += 1 + } + } + + } + +} diff --git a/akka-actor-tests/src/test/scala/akka/performance/microbench/TellThroughputSeparateDispatchersPerformanceSpec.scala b/akka-actor-tests/src/test/scala/akka/performance/microbench/TellThroughputSeparateDispatchersPerformanceSpec.scala new file mode 100644 index 0000000000..0de1e1be2d --- /dev/null +++ b/akka-actor-tests/src/test/scala/akka/performance/microbench/TellThroughputSeparateDispatchersPerformanceSpec.scala @@ -0,0 +1,214 @@ +package akka.performance.microbench + +import akka.performance.workbench.PerformanceSpec +import org.apache.commons.math.stat.descriptive.DescriptiveStatistics +import akka.actor._ +import java.util.concurrent.{ ThreadPoolExecutor, CountDownLatch, TimeUnit } +import akka.dispatch._ +import java.util.concurrent.ThreadPoolExecutor.AbortPolicy +import java.util.concurrent.BlockingQueue +import java.util.concurrent.LinkedBlockingQueue +import akka.util.Duration +import akka.util.duration._ + +// -server -Xms512M -Xmx1024M -XX:+UseParallelGC -Dbenchmark=true -Dbenchmark.repeatFactor=500 +@org.junit.runner.RunWith(classOf[org.scalatest.junit.JUnitRunner]) +class TellThroughputSeparateDispatchersPerformanceSpec extends PerformanceSpec { + import TellThroughputSeparateDispatchersPerformanceSpec._ + + def createDispatcher(name: String) = ThreadPoolConfigDispatcherBuilder(config ⇒ + new Dispatcher(system.dispatcherFactory.prerequisites, name, 5, + Duration.Zero, UnboundedMailbox(), config, Duration(60, TimeUnit.SECONDS)), ThreadPoolConfig()) + .withNewThreadPoolWithLinkedBlockingQueueWithUnboundedCapacity + .setCorePoolSize(1) + .build + + //val clientDispatcher = createDispatcher("client-dispatcher") + //val destinationDispatcher = createDispatcher("destination-dispatcher") + + override def atTermination { + super.atTermination() + System.out.println("Cleaning up after TellThroughputPerformanceSpec") + //clientDispatcher.shutdown() + //destinationDispatcher.shutdown() + } + + val repeat = 30000L * repeatFactor + + "Tell" must { + "warmup" in { + runScenario(4, warmup = true) + } + "warmup more" in { + runScenario(4, warmup = true) + } + "perform with load 1" in { + runScenario(1) + } + "perform with load 2" in { + runScenario(2) + } + "perform with load 4" in { + runScenario(4) + } + "perform with load 6" in { + runScenario(6) + } + "perform with load 8" in { + runScenario(8) + } + "perform with load 10" in { + runScenario(10) + } + "perform with load 12" in { + runScenario(12) + } + "perform with load 14" in { + runScenario(14) + } + "perform with load 16" in { + runScenario(16) + } + "perform with load 18" in { + runScenario(18) + } + "perform with load 20" in { + runScenario(20) + } + "perform with load 22" in { + runScenario(22) + } + "perform with load 24" in { + runScenario(24) + } + "perform with load 26" in { + runScenario(26) + } + "perform with load 28" in { + runScenario(28) + } + "perform with load 30" in { + runScenario(30) + } + "perform with load 32" in { + runScenario(32) + } + "perform with load 34" in { + runScenario(34) + } + "perform with load 36" in { + runScenario(36) + } + "perform with load 38" in { + runScenario(38) + } + "perform with load 40" in { + runScenario(40) + } + "perform with load 42" in { + runScenario(42) + } + "perform with load 44" in { + runScenario(44) + } + "perform with load 46" in { + runScenario(46) + } + "perform with load 48" in { + runScenario(48) + } + + def runScenario(numberOfClients: Int, warmup: Boolean = false) { + if (acceptClients(numberOfClients)) { + + val latch = new CountDownLatch(numberOfClients) + val repeatsPerClient = repeat / numberOfClients + + val destinations = for (i ← 0 until numberOfClients) + yield system.actorOf(Props(new Destination).withDispatcher(createDispatcher("destination-" + i))) + val clients = for ((dest, j) ← destinations.zipWithIndex) + yield system.actorOf(Props(new Client(dest, latch, repeatsPerClient)).withDispatcher(createDispatcher("client-" + j))) + + /* + val destinations = for (i ← 0 until numberOfClients) + yield system.actorOf(Props(new Destination).withDispatcher(clientDispatcher)) + val clients = for ((dest, j) ← destinations.zipWithIndex) + yield system.actorOf(Props(new Client(dest, latch, repeatsPerClient)).withDispatcher(clientDispatcher)) + */ + + val start = System.nanoTime + clients.foreach(_ ! Run) + val ok = latch.await((5000000 + 500 * repeat) * timeDilation, TimeUnit.MICROSECONDS) + val durationNs = (System.nanoTime - start) + + if (!ok) { + System.err.println("Destinations: ") + destinations.foreach { + case l: LocalActorRef ⇒ + val m = l.underlying.mailbox + System.err.println(" -" + l + " mbox(" + m.status + ")" + " containing [" + Stream.continually(m.dequeue()).takeWhile(_ != null).mkString(", ") + "] and has systemMsgs: " + m.hasSystemMessages) + } + System.err.println("") + System.err.println("Clients: ") + + clients.foreach { + case l: LocalActorRef ⇒ + val m = l.underlying.mailbox + System.err.println(" -" + l + " mbox(" + m.status + ")" + " containing [" + Stream.continually(m.dequeue()).takeWhile(_ != null).mkString(", ") + "] and has systemMsgs: " + m.hasSystemMessages) + } + + //val e = clientDispatcher.asInstanceOf[Dispatcher].executorService.get().asInstanceOf[ExecutorServiceDelegate].executor.asInstanceOf[ThreadPoolExecutor] + //val q = e.getQueue + //System.err.println("Client Dispatcher: " + e.getActiveCount + " " + Stream.continually(q.poll()).takeWhile(_ != null).mkString(", ")) + } + + if (!warmup) { + ok must be(true) + logMeasurement(numberOfClients, durationNs, repeat) + } + clients.foreach(_ ! PoisonPill) + destinations.foreach(_ ! PoisonPill) + + } + } + } +} + +object TellThroughputSeparateDispatchersPerformanceSpec { + + case object Run + case object Msg + + class Destination extends Actor { + def receive = { + case Msg ⇒ sender ! Msg + } + } + + class Client( + actor: ActorRef, + latch: CountDownLatch, + repeat: Long) extends Actor { + + var sent = 0L + var received = 0L + + def receive = { + case Msg ⇒ + received += 1 + if (sent < repeat) { + actor ! Msg + sent += 1 + } else if (received >= repeat) { + latch.countDown() + } + case Run ⇒ + for (i ← 0L until math.min(1000L, repeat)) { + actor ! Msg + sent += 1 + } + } + + } + +} diff --git a/akka-actor-tests/src/test/scala/akka/performance/trading/domain/Orderbook.scala b/akka-actor-tests/src/test/scala/akka/performance/trading/domain/Orderbook.scala index a3bd2febc0..e1541d7a03 100644 --- a/akka-actor-tests/src/test/scala/akka/performance/trading/domain/Orderbook.scala +++ b/akka-actor-tests/src/test/scala/akka/performance/trading/domain/Orderbook.scala @@ -51,11 +51,10 @@ object Orderbook { val useDummyOrderbook = System.getProperty("benchmark.useDummyOrderbook", "false").toBoolean - def apply(symbol: String, standby: Boolean): Orderbook = standby match { - case false if !useDummyOrderbook ⇒ new Orderbook(symbol) with SimpleTradeObserver - case true if !useDummyOrderbook ⇒ new Orderbook(symbol) with StandbyTradeObserver - case false if useDummyOrderbook ⇒ new DummyOrderbook(symbol) with SimpleTradeObserver - case true if useDummyOrderbook ⇒ new DummyOrderbook(symbol) with StandbyTradeObserver + def apply(symbol: String, standby: Boolean): Orderbook = (useDummyOrderbook, standby) match { + case (false, false) ⇒ new Orderbook(symbol) with NopTradeObserver + case (false, true) ⇒ new Orderbook(symbol) with TotalTradeObserver + case (true, _) ⇒ new DummyOrderbook(symbol) with NopTradeObserver } } diff --git a/akka-actor-tests/src/test/scala/akka/performance/trading/domain/TradeObserver.scala b/akka-actor-tests/src/test/scala/akka/performance/trading/domain/TradeObserver.scala index 123d785e17..c7de6ddcc0 100644 --- a/akka-actor-tests/src/test/scala/akka/performance/trading/domain/TradeObserver.scala +++ b/akka-actor-tests/src/test/scala/akka/performance/trading/domain/TradeObserver.scala @@ -6,15 +6,13 @@ abstract trait TradeObserver { def trade(bid: Bid, ask: Ask) } -trait SimpleTradeObserver extends TradeObserver { +trait TotalTradeObserver extends TradeObserver { override def trade(bid: Bid, ask: Ask) { - if (!Orderbook.useDummyOrderbook) { - TotalTradeCounter.counter.incrementAndGet - } + TotalTradeCounter.counter.incrementAndGet } } -trait StandbyTradeObserver extends TradeObserver { +trait NopTradeObserver extends TradeObserver { override def trade(bid: Bid, ask: Ask) { } } diff --git a/akka-actor-tests/src/test/scala/akka/performance/workbench/PerformanceSpec.scala b/akka-actor-tests/src/test/scala/akka/performance/workbench/PerformanceSpec.scala index b17bb5913c..17ca27b3e2 100644 --- a/akka-actor-tests/src/test/scala/akka/performance/workbench/PerformanceSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/performance/workbench/PerformanceSpec.scala @@ -15,7 +15,10 @@ trait PerformanceSpec extends AkkaSpec with BeforeAndAfterEach { def minClients() = System.getProperty("benchmark.minClients", "1").toInt; - def maxClients() = System.getProperty("benchmark.maxClients", "40").toInt; + def maxClients() = { + val default = if (isBenchmark) "48" else "4" + System.getProperty("benchmark.maxClients", default).toInt; + } def repeatFactor() = { val defaultRepeatFactor = if (isBenchmark) "150" else "2" diff --git a/akka-actor-tests/src/test/scala/akka/routing/ActorPoolSpec.scala b/akka-actor-tests/src/test/scala/akka/routing/ActorPoolSpec.scala index e4ccf34768..d9503e31b7 100644 --- a/akka-actor-tests/src/test/scala/akka/routing/ActorPoolSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/routing/ActorPoolSpec.scala @@ -29,7 +29,9 @@ class TypedActorPoolSpec extends AkkaSpec { import ActorPoolSpec._ "Actor Pool (2)" must { "support typed actors" in { - val pool = system.createProxy[Foo](new Actor with DefaultActorPool with BoundedCapacityStrategy with MailboxPressureCapacitor with SmallestMailboxSelector with Filter with RunningMeanBackoff with BasicRampup { + val ta = TypedActor(system) + val pool = ta.createProxy[Foo](new Actor with DefaultActorPool with BoundedCapacityStrategy with MailboxPressureCapacitor with SmallestMailboxSelector with Filter with RunningMeanBackoff with BasicRampup { + val typedActor = TypedActor(context) def lowerBound = 1 def upperBound = 5 def pressureThreshold = 1 @@ -38,7 +40,7 @@ class TypedActorPoolSpec extends AkkaSpec { def rampupRate = 0.1 def backoffRate = 0.50 def backoffThreshold = 0.50 - def instance(p: Props) = system.typedActor.getActorRefFor(context.typedActorOf[Foo, FooImpl](props = p.withTimeout(10 seconds))) + def instance(p: Props) = typedActor.getActorRefFor(typedActor.typedActorOf[Foo, FooImpl](props = p.withTimeout(10 seconds))) def receive = _route }, Props().withTimeout(10 seconds).withFaultHandler(faultHandler)) @@ -47,7 +49,7 @@ class TypedActorPoolSpec extends AkkaSpec { for ((i, r) ← results) r.get must equal(i * i) - system.typedActor.stop(pool) + ta.stop(pool) } } } diff --git a/akka-actor-tests/src/test/scala/akka/routing/RoutingSpec.scala b/akka-actor-tests/src/test/scala/akka/routing/RoutingSpec.scala index a51993d873..6dd5f56577 100644 --- a/akka-actor-tests/src/test/scala/akka/routing/RoutingSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/routing/RoutingSpec.scala @@ -402,4 +402,79 @@ class RoutingSpec extends AkkaSpec { } }) } + + "broadcast router" must { + + "be started when constructed" in { + val actor1 = actorOf[TestActor] + + val props = RoutedProps(routerFactory = () ⇒ new BroadcastRouter, connectionManager = new LocalConnectionManager(List(actor1))) + val actor = new RoutedActorRef(system, props, system.asInstanceOf[ActorSystemImpl].guardian, "foo") + actor.isTerminated must be(false) + } + + "broadcast message using !" in { + val doneLatch = new CountDownLatch(2) + + val counter1 = new AtomicInteger + val connection1 = actorOf(new Actor { + def receive = { + case "end" ⇒ doneLatch.countDown() + case msg: Int ⇒ counter1.addAndGet(msg) + } + }) + + val counter2 = new AtomicInteger + val connection2 = actorOf(new Actor { + def receive = { + case "end" ⇒ doneLatch.countDown() + case msg: Int ⇒ counter2.addAndGet(msg) + } + }) + + val props = RoutedProps(routerFactory = () ⇒ new BroadcastRouter, connectionManager = new LocalConnectionManager(List(connection1, connection2))) + val actor = new RoutedActorRef(system, props, system.asInstanceOf[ActorSystemImpl].guardian, "foo") + + actor ! 1 + actor ! "end" + + doneLatch.await(5, TimeUnit.SECONDS) must be(true) + + counter1.get must be(1) + counter2.get must be(1) + } + + "broadcast message using ?" in { + val doneLatch = new CountDownLatch(2) + + val counter1 = new AtomicInteger + val connection1 = actorOf(new Actor { + def receive = { + case "end" ⇒ doneLatch.countDown() + case msg: Int ⇒ + counter1.addAndGet(msg) + sender ! "ack" + } + }) + + val counter2 = new AtomicInteger + val connection2 = actorOf(new Actor { + def receive = { + case "end" ⇒ doneLatch.countDown() + case msg: Int ⇒ counter2.addAndGet(msg) + } + }) + + val props = RoutedProps(routerFactory = () ⇒ new BroadcastRouter, connectionManager = new LocalConnectionManager(List(connection1, connection2))) + val actor = new RoutedActorRef(system, props, system.asInstanceOf[ActorSystemImpl].guardian, "foo") + + actor ? 1 + actor ! "end" + + doneLatch.await(5, TimeUnit.SECONDS) must be(true) + + counter1.get must be(1) + counter2.get must be(1) + } + } } diff --git a/akka-actor-tests/src/test/scala/akka/serialization/SerializeSpec.scala b/akka-actor-tests/src/test/scala/akka/serialization/SerializeSpec.scala index 8022edcc62..0e2f43a3d8 100644 --- a/akka-actor-tests/src/test/scala/akka/serialization/SerializeSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/serialization/SerializeSpec.scala @@ -46,7 +46,7 @@ object SerializeSpec { class SerializeSpec extends AkkaSpec(SerializeSpec.serializationConf) { import SerializeSpec._ - val ser = SerializationExtension(system).serialization + val ser = SerializationExtension(system) import ser._ val addr = Address("120", "Monroe Street", "Santa Clara", "95050") @@ -104,7 +104,7 @@ class SerializeSpec extends AkkaSpec(SerializeSpec.serializationConf) { out.close() val in = new ObjectInputStream(new ByteArrayInputStream(outbuf.toByteArray)) - Serialization.system.withValue(a.asInstanceOf[ActorSystemImpl]) { + Serialization.currentSystem.withValue(a.asInstanceOf[ActorSystemImpl]) { val deadLetters = in.readObject().asInstanceOf[DeadLetterActorRef] (deadLetters eq a.deadLetters) must be(true) } diff --git a/akka-actor/src/main/resources/akka-actor-reference.conf b/akka-actor/src/main/resources/akka-actor-reference.conf index a4b74ce474..fbfd7d7e9c 100644 --- a/akka-actor/src/main/resources/akka-actor-reference.conf +++ b/akka-actor/src/main/resources/akka-actor-reference.conf @@ -13,12 +13,17 @@ akka { enabled-modules = [] # Comma separated list of the enabled modules. Options: ["cluster", "camel", "http"] event-handlers = ["akka.event.Logging$DefaultLogger"] # Event handlers to register at boot time (Logging$DefaultLogger logs to STDOUT) - loglevel = "WARNING" # Options: ERROR, WARNING, INFO, DEBUG + loglevel = "INFO" # Options: ERROR, WARNING, INFO, DEBUG # this level is used by the configured loggers (see "event-handlers") as soon # as they have been started; before that, see "stdout-loglevel" - stdout-loglevel = "WARNING" # Loglevel for the very basic logger activated during AkkaApplication startup + stdout-loglevel = "INFO" # Loglevel for the very basic logger activated during AkkaApplication startup + # FIXME: Is there any sensible reason why we have 2 different log levels? - extensions = [] # list FQCN of extensions which shall be loaded at actor system startup + logConfigOnStart = off # Log the complete configuration at INFO level when the actor system is started. + # This is useful when you are uncertain of what configuration is used. + + extensions = [] # List FQCN of extensions which shall be loaded at actor system startup. + # FIXME: clarify "extensions" here, "Akka Extensions ()" # These boot classes are loaded (and created) automatically when the Akka Microkernel boots up # Can be used to bootstrap your application(s) diff --git a/akka-actor/src/main/scala/akka/actor/ActorCell.scala b/akka-actor/src/main/scala/akka/actor/ActorCell.scala index 8fc25d84bf..4bb623cc68 100644 --- a/akka-actor/src/main/scala/akka/actor/ActorCell.scala +++ b/akka-actor/src/main/scala/akka/actor/ActorCell.scala @@ -16,7 +16,7 @@ import akka.util.{ Duration, Helpers } * Exposes contextual information for the actor and the current message. * TODO: everything here for current compatibility - could be limited more */ -trait ActorContext extends ActorRefFactory with TypedActorFactory { +trait ActorContext extends ActorRefFactory { def self: ActorRef @@ -81,8 +81,6 @@ private[akka] class ActorCell( protected final def guardian = self - protected def typedActor = system.typedActor - final def provider = system.provider override def receiveTimeout: Option[Long] = if (receiveTimeoutData._1 > 0) Some(receiveTimeoutData._1) else None diff --git a/akka-actor/src/main/scala/akka/actor/ActorRef.scala b/akka-actor/src/main/scala/akka/actor/ActorRef.scala index fc8c6f950a..398aae0039 100644 --- a/akka-actor/src/main/scala/akka/actor/ActorRef.scala +++ b/akka-actor/src/main/scala/akka/actor/ActorRef.scala @@ -176,8 +176,8 @@ class LocalActorRef private[akka] ( def address: String = path.toString /* - * actorCell.start() publishes actorCell & this to the dispatcher, which - * means that messages may be processed theoretically before the constructor + * actorCell.start() publishes actorCell & this to the dispatcher, which + * means that messages may be processed theoretically before the constructor * ends. The JMM guarantees visibility for final fields only after the end * of the constructor, so publish the actorCell safely by making it a * @volatile var which is NOT TO BE WRITTEN TO. The alternative would be to @@ -305,7 +305,7 @@ trait ScalaActorRef { ref: ActorRef ⇒ */ // FIXME: remove and replace by ActorPath.toString case class SerializedActorRef(hostname: String, port: Int, path: String) { - import akka.serialization.Serialization.system + import akka.serialization.Serialization.currentSystem // FIXME this is broken, but see above def this(address: Address, path: String) = this(address.hostPort, 0, path) @@ -313,11 +313,11 @@ case class SerializedActorRef(hostname: String, port: Int, path: String) { def this(remoteAddress: InetSocketAddress, path: String) = this(remoteAddress.getAddress.getHostAddress, remoteAddress.getPort, path) //TODO FIXME REMOVE @throws(classOf[java.io.ObjectStreamException]) - def readResolve(): AnyRef = { - if (system.value eq null) throw new IllegalStateException( + def readResolve(): AnyRef = currentSystem.value match { + case null ⇒ throw new IllegalStateException( "Trying to deserialize a serialized ActorRef without an ActorSystem in scope." + - " Use akka.serialization.Serialization.system.withValue(system) { ... }") - system.value.provider.deserialize(this) match { + " Use akka.serialization.Serialization.currentSystem.withValue(system) { ... }") + case someSystem ⇒ someSystem.provider.deserialize(this) match { case Some(actor) ⇒ actor case None ⇒ throw new IllegalStateException("Could not deserialize ActorRef") } @@ -365,7 +365,7 @@ case class DeadLetter(message: Any, sender: ActorRef, recipient: ActorRef) object DeadLetterActorRef { class SerializedDeadLetterActorRef extends Serializable { //TODO implement as Protobuf for performance? @throws(classOf[java.io.ObjectStreamException]) - private def readResolve(): AnyRef = Serialization.system.value.deadLetters + private def readResolve(): AnyRef = Serialization.currentSystem.value.deadLetters } val serialized = new SerializedDeadLetterActorRef @@ -392,7 +392,7 @@ class DeadLetterActorRef(val eventStream: EventStream) extends MinimalActorRef { override def isTerminated(): Boolean = true - override def !(message: Any)(implicit sender: ActorRef = null): Unit = message match { + override def !(message: Any)(implicit sender: ActorRef = this): Unit = message match { case d: DeadLetter ⇒ eventStream.publish(d) case _ ⇒ eventStream.publish(DeadLetter(message, sender, this)) } diff --git a/akka-actor/src/main/scala/akka/actor/ActorRefProvider.scala b/akka-actor/src/main/scala/akka/actor/ActorRefProvider.scala index 63d74b9db7..1e96d70395 100644 --- a/akka-actor/src/main/scala/akka/actor/ActorRefProvider.scala +++ b/akka-actor/src/main/scala/akka/actor/ActorRefProvider.scala @@ -11,13 +11,15 @@ import org.jboss.netty.akka.util.{ TimerTask, HashedWheelTimer } import akka.actor.Timeout.intToTimeout import akka.config.ConfigurationException import akka.dispatch.{ SystemMessage, Supervise, Promise, MessageDispatcher, Future, DefaultPromise, Dispatcher, Mailbox, Envelope } -import akka.event.{ Logging, DeathWatch, ActorClassification, EventStream } -import akka.routing.{ ScatterGatherFirstCompletedRouter, Routing, RouterType, Router, RoutedProps, RoutedActorRef, RoundRobinRouter, RandomRouter, LocalConnectionManager, DirectRouter } +import akka.routing.{ ScatterGatherFirstCompletedRouter, Routing, RouterType, Router, RoutedProps, RoutedActorRef, RoundRobinRouter, RandomRouter, LocalConnectionManager, DirectRouter, BroadcastRouter } import akka.AkkaException import com.eaio.uuid.UUID import akka.util.{ Duration, Switch, Helpers } import akka.remote.RemoteAddress import org.jboss.netty.akka.util.internal.ConcurrentIdentityHashMap +import akka.event._ +import akka.event.Logging.Error._ +import akka.event.Logging.Warning /** * Interface for all ActorRef providers to implement. @@ -41,6 +43,7 @@ trait ActorRefProvider { // FIXME: remove/replace? def nodename: String + // FIXME: remove/replace? def clustername: String @@ -192,8 +195,11 @@ class LocalActorRefProvider( * generate name for temporary actor refs */ private val tempNumber = new AtomicLong + private def tempName = "$_" + Helpers.base64(tempNumber.getAndIncrement()) + private val tempNode = rootPath / "tmp" + def tempPath = tempNode / tempName /** @@ -215,7 +221,9 @@ class LocalActorRefProvider( override def toString = name - override def stop() = stopped switchOn { terminationFuture.complete(causeOfTermination.toLeft(())) } + override def stop() = stopped switchOn { + terminationFuture.complete(causeOfTermination.toLeft(())) + } override def isTerminated = stopped.isOn @@ -238,6 +246,7 @@ class LocalActorRefProvider( case Terminated(_) ⇒ context.self.stop() } } + private class SystemGuardian extends Actor { def receive = { case Terminated(_) ⇒ @@ -245,6 +254,7 @@ class LocalActorRefProvider( context.self.stop() } } + private val guardianFaultHandlingStrategy = { import akka.actor.FaultHandlingStrategy._ OneForOneStrategy { @@ -256,7 +266,7 @@ class LocalActorRefProvider( private val guardianProps = Props(new Guardian).withFaultHandler(guardianFaultHandlingStrategy) /* - * The problem is that ActorRefs need a reference to the ActorSystem to + * The problem is that ActorRefs need a reference to the ActorSystem to * provide their service. Hence they cannot be created while the * constructors of ActorSystem and ActorRefProvider are still running. * The solution is to split out that last part into an init() method, @@ -264,7 +274,9 @@ class LocalActorRefProvider( */ @volatile private var system: ActorSystemImpl = _ + def dispatcher: MessageDispatcher = system.dispatcher + lazy val terminationFuture: DefaultPromise[Unit] = new DefaultPromise[Unit](Timeout.never)(dispatcher) lazy val rootGuardian: ActorRef = new LocalActorRef(system, guardianProps, theOneWhoWalksTheBubblesOfSpaceTime, rootPath, true) lazy val guardian: ActorRef = actorOf(system, guardianProps, rootGuardian, "app", true) @@ -309,11 +321,13 @@ class LocalActorRefProvider( // create a routed actor ref case deploy @ Some(DeploymentConfig.Deploy(_, _, routerType, nrOfInstances, DeploymentConfig.LocalScope)) ⇒ - + implicit val dispatcher = if (props.dispatcher == Props.defaultDispatcher) system.dispatcher else props.dispatcher + implicit val timeout = system.settings.ActorTimeout val routerFactory: () ⇒ Router = DeploymentConfig.routerTypeFor(routerType) match { case RouterType.Direct ⇒ () ⇒ new DirectRouter case RouterType.Random ⇒ () ⇒ new RandomRouter case RouterType.RoundRobin ⇒ () ⇒ new RoundRobinRouter + case RouterType.Broadcast ⇒ () ⇒ new BroadcastRouter case RouterType.ScatterGather ⇒ () ⇒ new ScatterGatherFirstCompletedRouter()( if (props.dispatcher == Props.defaultDispatcher) dispatcher else props.dispatcher, settings.ActorTimeout) case RouterType.LeastCPU ⇒ sys.error("Router LeastCPU not supported yet") @@ -353,6 +367,7 @@ class LocalActorRefProvider( } private[akka] def deserialize(actor: SerializedActorRef): Option[ActorRef] = Some(actorFor(actor.path)) + private[akka] def serialize(actor: ActorRef): SerializedActorRef = new SerializedActorRef(rootPath.address, actor.path.toString) private[akka] def createDeathWatch(): DeathWatch = new LocalDeathWatch @@ -387,7 +402,7 @@ class LocalDeathWatch extends DeathWatch with ActorClassification { } } -class DefaultScheduler(hashedWheelTimer: HashedWheelTimer) extends Scheduler { +class DefaultScheduler(hashedWheelTimer: HashedWheelTimer, system: ActorSystem) extends Scheduler { def schedule(receiver: ActorRef, message: Any, initialDelay: Duration, delay: Duration): Cancellable = new DefaultCancellable(hashedWheelTimer.newTimeout(createContinuousTask(receiver, message, delay), initialDelay)) @@ -405,19 +420,37 @@ class DefaultScheduler(hashedWheelTimer: HashedWheelTimer) extends Scheduler { new DefaultCancellable(hashedWheelTimer.newTimeout(createSingleTask(f), delay)) private def createSingleTask(runnable: Runnable): TimerTask = - new TimerTask() { def run(timeout: org.jboss.netty.akka.util.Timeout) { runnable.run() } } + new TimerTask() { + def run(timeout: org.jboss.netty.akka.util.Timeout) { + // FIXME: consider executing runnable inside main dispatcher to prevent blocking of scheduler + runnable.run() + } + } private def createSingleTask(receiver: ActorRef, message: Any): TimerTask = - new TimerTask { def run(timeout: org.jboss.netty.akka.util.Timeout) { receiver ! message } } + new TimerTask { + def run(timeout: org.jboss.netty.akka.util.Timeout) { + receiver ! message + } + } private def createSingleTask(f: () ⇒ Unit): TimerTask = - new TimerTask { def run(timeout: org.jboss.netty.akka.util.Timeout) { f() } } + new TimerTask { + def run(timeout: org.jboss.netty.akka.util.Timeout) { + f() + } + } private def createContinuousTask(receiver: ActorRef, message: Any, delay: Duration): TimerTask = { new TimerTask { def run(timeout: org.jboss.netty.akka.util.Timeout) { - receiver ! message - timeout.getTimer.newTimeout(this, delay) + // Check if the receiver is still alive and kicking before sending it a message and reschedule the task + if (!receiver.isTerminated) { + receiver ! message + timeout.getTimer.newTimeout(this, delay) + } else { + system.eventStream.publish(Warning(this.getClass.getSimpleName, "Could not reschedule message to be sent because receiving actor has been terminated.")) + } } } } @@ -434,9 +467,13 @@ class DefaultScheduler(hashedWheelTimer: HashedWheelTimer) extends Scheduler { private[akka] def stop() = hashedWheelTimer.stop() } -class DefaultCancellable(timeout: org.jboss.netty.akka.util.Timeout) extends Cancellable { - def cancel() { timeout.cancel() } +class DefaultCancellable(val timeout: org.jboss.netty.akka.util.Timeout) extends Cancellable { + def cancel() { + timeout.cancel() + } - def isCancelled: Boolean = { timeout.isCancelled } + def isCancelled: Boolean = { + timeout.isCancelled + } } diff --git a/akka-actor/src/main/scala/akka/actor/ActorSystem.scala b/akka-actor/src/main/scala/akka/actor/ActorSystem.scala index 1aec26cfc2..97c0d16e4d 100644 --- a/akka-actor/src/main/scala/akka/actor/ActorSystem.scala +++ b/akka-actor/src/main/scala/akka/actor/ActorSystem.scala @@ -8,12 +8,7 @@ import akka.actor._ import akka.event._ import akka.dispatch._ import akka.util.duration._ -import java.net.InetAddress -import com.eaio.uuid.UUID -import akka.serialization.Serialization -import akka.remote.RemoteAddress import org.jboss.netty.akka.util.HashedWheelTimer -import java.util.concurrent.TimeUnit.SECONDS import java.util.concurrent.TimeUnit.MILLISECONDS import java.util.concurrent.TimeUnit.NANOSECONDS import java.io.File @@ -24,11 +19,9 @@ import com.typesafe.config.ConfigFactory import java.lang.reflect.InvocationTargetException import akka.util.{ Helpers, Duration, ReflectiveAccess } import java.util.concurrent.atomic.AtomicLong -import java.util.concurrent.CountDownLatch -import java.util.concurrent.ConcurrentHashMap -import java.util.concurrent.Executors +import java.util.concurrent.{ CountDownLatch, Executors, ConcurrentHashMap } import scala.annotation.tailrec -import akka.serialization.SerializationExtension +import org.jboss.netty.akka.util.internal.ConcurrentIdentityHashMap object ActorSystem { @@ -63,17 +56,18 @@ object ActorSystem { import scala.collection.JavaConverters._ import config._ + val ConfigVersion = getString("akka.version") val ProviderClass = getString("akka.actor.provider") val ActorTimeout = Timeout(Duration(getMilliseconds("akka.actor.timeout"), MILLISECONDS)) - // TODO This isn't used anywhere. Remove? val SerializeAllMessages = getBoolean("akka.actor.serialize-messages") val LogLevel = getString("akka.loglevel") val StdoutLogLevel = getString("akka.stdout-loglevel") val EventHandlers: Seq[String] = getStringList("akka.event-handlers").asScala + val LogConfigOnStart = config.getBoolean("akka.logConfigOnStart") val AddLoggingReceive = getBoolean("akka.actor.debug.receive") val DebugAutoReceive = getBoolean("akka.actor.debug.autoreceive") val DebugLifecycle = getBoolean("akka.actor.debug.lifecycle") @@ -101,6 +95,10 @@ object ActorSystem { throw new ConfigurationException("Akka JAR version [" + Version + "] does not match the provided config version [" + ConfigVersion + "]") + override def toString: String = { + config.toString + } + } object DefaultConfigurationLoader { @@ -150,7 +148,7 @@ object ActorSystem { * configuration, e.g. dispatchers, deployments, remote capabilities and * addresses. It is also the entry point for creating or looking up actors. */ -abstract class ActorSystem extends ActorRefFactory with TypedActorFactory { +abstract class ActorSystem extends ActorRefFactory { import ActorSystem._ /** @@ -164,6 +162,11 @@ abstract class ActorSystem extends ActorRefFactory with TypedActorFactory { */ def settings: Settings + /** + * Log the configuration. + */ + def logConfiguration(): Unit + /** * The logical node name where this actor system resides. */ @@ -208,9 +211,6 @@ abstract class ActorSystem extends ActorRefFactory with TypedActorFactory { // FIXME: do not publish this def deadLetterMailbox: Mailbox - // FIXME: TypedActor should be an extension - def typedActor: TypedActor - /** * Light-weight scheduler for running asynchronous tasks after some deadline * in the future. Not terribly precise but cheap. @@ -251,36 +251,25 @@ abstract class ActorSystem extends ActorRefFactory with TypedActorFactory { def stop() /** - * Register an [[akka.actor.Extension]] within this actor system. The supplied - * object is interrogated for the extension’s key with which the extension is - * accessible from anywhere you have a reference to this actor system in - * scope, e.g. within actors (see [[ActorSystem.extension]]). - * - * Extensions can be registered automatically by adding their fully-qualified - * class name to the `akka.extensions` configuration key. + * Registers the provided extension and creates its payload, if this extension isn't already registered + * This method has putIfAbsent-semantics, this method can potentially block, waiting for the initialization + * of the payload, if is in the process of registration from another Thread of execution */ - def registerExtension[T <: AnyRef](ext: Extension[T]): Extension[T] + def registerExtension[T <: Extension](ext: ExtensionId[T]): T /** - * Obtain a reference to a registered extension by passing in the key which - * the extension object returned from its init method (typically a static - * field or Scala `object`): - * - * {{{ - * class MyActor extends Actor { - * val ext: MyExtension = context.app.extension(MyExtension.key) - * } - * }}} - * - * Throws IllegalArgumentException if the extension key is not found. + * Returns the payload that is associated with the provided extension + * throws an IllegalStateException if it is not registered. + * This method can potentially block, waiting for the initialization + * of the payload, if is in the process of registration from another Thread of execution */ - def extension[T <: AnyRef](key: ExtensionKey[T]): T + def extension[T <: Extension](ext: ExtensionId[T]): T /** - * Query presence of a specific extension. Beware that this key needs to be - * “the same” as the one used for registration (it is using a HashMap). + * Returns whether the specified extension is already registered, this method can potentially block, waiting for the initialization + * of the payload, if is in the process of registration from another Thread of execution */ - def hasExtension(key: ExtensionKey[_]): Boolean + def hasExtension(ext: ExtensionId[_ <: Extension]): Boolean } class ActorSystemImpl(val name: String, val applicationConfig: Config) extends ActorSystem { @@ -289,6 +278,8 @@ class ActorSystemImpl(val name: String, val applicationConfig: Config) extends A val settings = new Settings(applicationConfig, name) + def logConfiguration(): Unit = log.info(settings.toString) + protected def systemImpl = this private val systemActors = new ConcurrentHashMap[String, ActorRef] @@ -323,7 +314,7 @@ class ActorSystemImpl(val name: String, val applicationConfig: Config) extends A eventStream.startStdoutLogger(settings) val log = new BusLogging(eventStream, "ActorSystem") // “this” used only for .getClass in tagging messages - val scheduler = new DefaultScheduler(new HashedWheelTimer(log, Executors.defaultThreadFactory, settings.SchedulerTickDuration, settings.SchedulerTicksPerWheel)) + val scheduler = new DefaultScheduler(new HashedWheelTimer(log, Executors.defaultThreadFactory, settings.SchedulerTickDuration, settings.SchedulerTicksPerWheel), this) val deadLetters = new DeadLetterActorRef(eventStream) val deadLetterMailbox = new Mailbox(null) { @@ -358,11 +349,14 @@ class ActorSystemImpl(val name: String, val applicationConfig: Config) extends A } } - val actorsJanitor = MinimalActorRef(provider.rootPath) { + /* + * these cannot be initialized before the provider is started because root + * Path may depend on the ability to register extensions for reading config + */ + lazy val actorsJanitor = MinimalActorRef(provider.rootPath) { case Terminated(x) ⇒ actors.remove(x.path.name) } - - val systemActorsJanitor = MinimalActorRef(provider.rootPath) { + lazy val systemActorsJanitor = MinimalActorRef(provider.rootPath) { case Terminated(x) ⇒ systemActors.remove(x.path.name) } @@ -380,21 +374,16 @@ class ActorSystemImpl(val name: String, val applicationConfig: Config) extends A private final val nextName = new AtomicLong override protected def randomName(): String = Helpers.base64(nextName.incrementAndGet()) - @volatile - private var _typedActor: TypedActor = _ - def typedActor = _typedActor - def /(actorName: String): ActorPath = guardian.path / actorName private lazy val _start: this.type = { - // TODO can we do something better than loading SerializationExtension from here? - _typedActor = new TypedActor(settings, SerializationExtension(this).serialization) provider.init(this) deadLetters.init(dispatcher, provider.rootPath) // this starts the reaper actor and the user-configured logging subscribers, which are also actors eventStream.start(this) eventStream.startDefaultLoggers(this) loadExtensions() + if (LogConfigOnStart) logConfiguration() this } @@ -410,65 +399,61 @@ class ActorSystemImpl(val name: String, val applicationConfig: Config) extends A terminationFuture onComplete (_ ⇒ dispatcher.shutdown()) } - private val extensions = new ConcurrentHashMap[ExtensionKey[_], AnyRef] + private val extensions = new ConcurrentIdentityHashMap[ExtensionId[_], AnyRef] /** - * Attempts to initialize and register this extension if the key associated with it isn't already registered. - * The extension will only be initialized if it isn't already registered. - * Rethrows anything thrown when initializing the extension (doesn't register in that case) - * Returns the registered extension, might be another already registered instance. + * Returns any extension registered to the specified Extension or returns null if not registered */ @tailrec - final def registerExtension[T <: AnyRef](ext: Extension[T]): Extension[T] = { - /** - * Returns any extension registered to the specified key or returns null if not registered - */ - @tailrec - def findExtension[T <: AnyRef](key: ExtensionKey[T]): Option[T] = extensions.get(key) match { - case c: CountDownLatch ⇒ c.await(); findExtension(key) //Registration in process, await completion and retry - case e: Extension[_] ⇒ Some(e.asInstanceOf[T]) //Profit! - case null ⇒ None //Doesn't exist - } + private def findExtension[T <: Extension](ext: ExtensionId[T]): T = extensions.get(ext) match { + case c: CountDownLatch ⇒ c.await(); findExtension(ext) //Registration in process, await completion and retry + case other ⇒ other.asInstanceOf[T] //could be a T or null, in which case we return the null as T + } - findExtension(ext.key) match { - case Some(e: Extension[_]) ⇒ e.asInstanceOf[Extension[T]] //Profit! - case None ⇒ //Doesn't already exist, commence registration + @tailrec + final def registerExtension[T <: Extension](ext: ExtensionId[T]): T = { + findExtension(ext) match { + case null ⇒ //Doesn't already exist, commence registration val inProcessOfRegistration = new CountDownLatch(1) - extensions.putIfAbsent(ext.key, inProcessOfRegistration) match { // Signal that registration is in process + extensions.putIfAbsent(ext, inProcessOfRegistration) match { // Signal that registration is in process case null ⇒ try { // Signal was successfully sent - ext.init(this) //Initialize the new extension - extensions.replace(ext.key, inProcessOfRegistration, ext) //Replace our in process signal with the initialized extension - ext //Profit! + ext.createExtension(this) match { // Create and initialize the extension + case null ⇒ throw new IllegalStateException("Extension instance created as null for Extension: " + ext) + case instance ⇒ + extensions.replace(ext, inProcessOfRegistration, instance) //Replace our in process signal with the initialized extension + instance //Profit! + } } catch { case t ⇒ - extensions.remove(ext.key, inProcessOfRegistration) //In case shit hits the fan, remove the inProcess signal + extensions.remove(ext, inProcessOfRegistration) //In case shit hits the fan, remove the inProcess signal throw t //Escalate to caller } finally { inProcessOfRegistration.countDown //Always notify listeners of the inProcess signal } - case other ⇒ registerExtension(ext) //Someone else is in process of registering an extension for this key, retry + case other ⇒ registerExtension(ext) //Someone else is in process of registering an extension for this Extension, retry } + case existing ⇒ existing.asInstanceOf[T] } } - def extension[T <: AnyRef](key: ExtensionKey[T]): T = extensions.get(key) match { - case x: Extension[_] ⇒ x.asInstanceOf[T] - case _ ⇒ throw new IllegalArgumentException("trying to get non-registered extension " + key) + def extension[T <: Extension](ext: ExtensionId[T]): T = findExtension(ext) match { + case null ⇒ throw new IllegalArgumentException("Trying to get non-registered extension " + ext) + case some ⇒ some.asInstanceOf[T] } - def hasExtension(key: ExtensionKey[_]): Boolean = extensions.get(key) match { - case x: Extension[_] ⇒ true - case _ ⇒ false - } + def hasExtension(ext: ExtensionId[_ <: Extension]): Boolean = findExtension(ext) != null private def loadExtensions() { import scala.collection.JavaConversions._ settings.config.getStringList("akka.extensions") foreach { fqcn ⇒ import ReflectiveAccess._ - createInstance[Extension[_ <: AnyRef]](fqcn, noParams, noArgs) match { - case Left(ex) ⇒ log.error(ex, "Exception trying to load extension " + fqcn) - case Right(ext) ⇒ if (ext.isInstanceOf[Extension[_]]) registerExtension(ext) else log.error("Class {} is not an Extension", fqcn) + getObjectFor[AnyRef](fqcn).fold(_ ⇒ createInstance[AnyRef](fqcn, noParams, noArgs), Right(_)) match { + case Right(p: ExtensionIdProvider) ⇒ registerExtension(p.lookup()); + case Right(p: ExtensionId[_]) ⇒ registerExtension(p); + case Right(other) ⇒ log.error("'{}' is not an ExtensionIdProvider or ExtensionId, skipping...", fqcn) + case Left(problem) ⇒ log.error(problem, "While trying to load extension '{}', skipping...", fqcn) } + } } } diff --git a/akka-actor/src/main/scala/akka/actor/Extension.scala b/akka-actor/src/main/scala/akka/actor/Extension.scala index 7c582fa8c4..bfd4ab6a52 100644 --- a/akka-actor/src/main/scala/akka/actor/Extension.scala +++ b/akka-actor/src/main/scala/akka/actor/Extension.scala @@ -16,53 +16,51 @@ package akka.actor * The extension itself can be created in any way desired and has full access * to the ActorSystem implementation. * - * Scala example: - * - * {{{ - * class MyExtension extends Extension[MyExtension] { - * def key = MyExtension - * def init(system: ActorSystemImpl) { - * ... // initialize here - * } - * } - * object MyExtension extends ExtensionKey[MyExtension] - * }}} - * - * Java example: - * - * {{{ - * static class MyExtension implements Extension { - * public static ExtensionKey key = new ExtensionKey() {}; - * - * public ExtensionKey key() { - * return key; - * } - * public void init(ActorSystemImpl system) { - * ... // initialize here - * } - * } - * }}} */ -trait Extension[T <: AnyRef] { + +/** + * Market interface to signify an Akka Extension + */ +trait Extension + +/** + * Identifies an Extension + * Lookup of Extensions is done by object identity, so the Id must be the same wherever it's used, + * otherwise you'll get the same extension loaded multiple times. + */ +trait ExtensionId[T <: Extension] { /** - * This method is called by the ActorSystem upon registering this extension. - * The key returned is used for looking up extensions, hence it must be a - * suitable hash key and available to all clients of the extension. This is - * best achieved by storing it in a static field (Java) or as/in an object - * (Scala). + * Returns an instance of the extension identified by this ExtensionId instance. */ - def key: ExtensionKey[T] + def apply(system: ActorSystem): T = system.registerExtension(this) - // FIXME ActorSystemImpl exposed to user API. We might well choose to introduce a new interface for this level of access, just so we can shuffle around the implementation /** - * This method is called by the ActorSystem when the extension is registered - * to trigger initialization of the extension. + * Returns an instance of the extension identified by this ExtensionId instance. + * Java API */ - def init(system: ActorSystemImpl): Unit + def get(system: ActorSystem): T = apply(system) + + /** + * Is used by Akka to instantiate the Extension identified by this ExtensionId, + * internal use only. + */ + def createExtension(system: ActorSystemImpl): T } /** - * Marker trait identifying a registered [[akka.actor.Extension]]. + * Java API for ExtensionId */ -trait ExtensionKey[T <: AnyRef] +abstract class AbstractExtensionId[T <: Extension] extends ExtensionId[T] + +/** + * To be able to load an ExtensionId from the configuration, + * a class that implements ExtensionIdProvider must be specified. + * The lookup method should return the canonical reference to the extension. + */ +trait ExtensionIdProvider { + /** + * Returns the canonical ExtensionId for this Extension + */ + def lookup(): ExtensionId[_ <: Extension] +} diff --git a/akka-actor/src/main/scala/akka/actor/TypedActor.scala b/akka-actor/src/main/scala/akka/actor/TypedActor.scala index 3409aa05e4..3ae639e95f 100644 --- a/akka-actor/src/main/scala/akka/actor/TypedActor.scala +++ b/akka-actor/src/main/scala/akka/actor/TypedActor.scala @@ -12,12 +12,189 @@ import akka.serialization.{ Serializer, Serialization } import akka.dispatch._ import akka.serialization.SerializationExtension -object TypedActor { +trait TypedActorFactory { + + protected def actorFactory: ActorRefFactory + + protected def typedActor: TypedActorExtension + + /** + * Stops the underlying ActorRef for the supplied TypedActor proxy, + * if any, returns whether it could find the find the ActorRef or not + */ + def stop(proxy: AnyRef): Boolean = getActorRefFor(proxy) match { + case null ⇒ false + case ref ⇒ ref.stop; true + } + + /** + * Sends a PoisonPill the underlying ActorRef for the supplied TypedActor proxy, + * if any, returns whether it could find the find the ActorRef or not + */ + def poisonPill(proxy: AnyRef): Boolean = getActorRefFor(proxy) match { + case null ⇒ false + case ref ⇒ ref ! PoisonPill; true + } + + /** + * Returns wether the supplied AnyRef is a TypedActor proxy or not + */ + def isTypedActor(proxyOrNot: AnyRef): Boolean + + /** + * Retrieves the underlying ActorRef for the supplied TypedActor proxy, or null if none found + */ + def getActorRefFor(proxy: AnyRef): ActorRef + + /** + * Creates a new TypedActor proxy using the supplied Props, + * the interfaces usable by the returned proxy is the suppli ed interface class (if the class represents an interface) or + * all interfaces (Class.getInterfaces) if it's not an interface class + */ + def typedActorOf[R <: AnyRef, T <: R](interface: Class[R], impl: Class[T], props: Props): R = + typedActor.createProxyAndTypedActor(actorFactory, interface, impl.newInstance, props, None, interface.getClassLoader) + + /** + * Creates a new TypedActor proxy using the supplied Props, + * the interfaces usable by the returned proxy is the supplied interface class (if the class represents an interface) or + * all interfaces (Class.getInterfaces) if it's not an interface class + */ + def typedActorOf[R <: AnyRef, T <: R](interface: Class[R], impl: Class[T], props: Props, name: String): R = + typedActor.createProxyAndTypedActor(actorFactory, interface, impl.newInstance, props, Some(name), interface.getClassLoader) + + /** + * Creates a new TypedActor proxy using the supplied Props, + * the interfaces usable by the returned proxy is the supplied interface class (if the class represents an interface) or + * all interfaces (Class.getInterfaces) if it's not an interface class + */ + def typedActorOf[R <: AnyRef, T <: R](interface: Class[R], impl: Creator[T], props: Props): R = + typedActor.createProxyAndTypedActor(actorFactory, interface, impl.create, props, None, interface.getClassLoader) + + /** + * Creates a new TypedActor proxy using the supplied Props, + * the interfaces usable by the returned proxy is the supplied interface class (if the class represents an interface) or + * all interfaces (Class.getInterfaces) if it's not an interface class + */ + def typedActorOf[R <: AnyRef, T <: R](interface: Class[R], impl: Creator[T], props: Props, name: String): R = + typedActor.createProxyAndTypedActor(actorFactory, interface, impl.create, props, Some(name), interface.getClassLoader) + + /** + * Creates a new TypedActor proxy using the supplied Props, + * the interfaces usable by the returned proxy is the supplied interface class (if the class represents an interface) or + * all interfaces (Class.getInterfaces) if it's not an interface class + */ + def typedActorOf[R <: AnyRef, T <: R](interface: Class[R], impl: Class[T], props: Props, loader: ClassLoader): R = + typedActor.createProxyAndTypedActor(actorFactory, interface, impl.newInstance, props, None, loader) + + /** + * Creates a new TypedActor proxy using the supplied Props, + * the interfaces usable by the returned proxy is the supplied interface class (if the class represents an interface) or + * all interfaces (Class.getInterfaces) if it's not an interface class + */ + def typedActorOf[R <: AnyRef, T <: R](interface: Class[R], impl: Class[T], props: Props, name: String, loader: ClassLoader): R = + typedActor.createProxyAndTypedActor(actorFactory, interface, impl.newInstance, props, Some(name), loader) + + /** + * Creates a new TypedActor proxy using the supplied Props, + * the interfaces usable by the returned proxy is the supplied interface class (if the class represents an interface) or + * all interfaces (Class.getInterfaces) if it's not an interface class + */ + def typedActorOf[R <: AnyRef, T <: R](interface: Class[R], impl: Creator[T], props: Props, loader: ClassLoader): R = + typedActor.createProxyAndTypedActor(actorFactory, interface, impl.create, props, None, loader) + + /** + * Creates a new TypedActor proxy using the supplied Props, + * the interfaces usable by the returned proxy is the supplied interface class (if the class represents an interface) or + * all interfaces (Class.getInterfaces) if it's not an interface class + */ + def typedActorOf[R <: AnyRef, T <: R](interface: Class[R], impl: Creator[T], props: Props, name: String, loader: ClassLoader): R = + typedActor.createProxyAndTypedActor(actorFactory, interface, impl.create, props, Some(name), loader) + + /** + * Creates a new TypedActor proxy using the supplied Props, + * the interfaces usable by the returned proxy is the supplied implementation class' interfaces (Class.getInterfaces) + */ + def typedActorOf[R <: AnyRef, T <: R](impl: Class[T], props: Props, loader: ClassLoader): R = + typedActor.createProxyAndTypedActor(actorFactory, impl, impl.newInstance, props, None, loader) + + /** + * Creates a new TypedActor proxy using the supplied Props, + * the interfaces usable by the returned proxy is the supplied implementation class' interfaces (Class.getInterfaces) + */ + def typedActorOf[R <: AnyRef, T <: R](impl: Class[T], props: Props, name: String, loader: ClassLoader): R = + typedActor.createProxyAndTypedActor(actorFactory, impl, impl.newInstance, props, Some(name), loader) + + /** + * Creates a new TypedActor proxy using the supplied Props, + * the interfaces usable by the returned proxy is the supplied implementation class' interfaces (Class.getInterfaces) + */ + def typedActorOf[R <: AnyRef, T <: R](props: Props = Props(), name: String = null, loader: ClassLoader = null)(implicit m: Manifest[T]): R = { + val clazz = m.erasure.asInstanceOf[Class[T]] + typedActor.createProxyAndTypedActor(actorFactory, clazz, clazz.newInstance, props, Option(name), if (loader eq null) clazz.getClassLoader else loader) + } + + /** + * Creates a proxy given the supplied Props, this is not a TypedActor, so you'll need to implement the MethodCall handling yourself, + * to create TypedActor proxies, use typedActorOf + */ + def createProxy[R <: AnyRef](constructor: ⇒ Actor, props: Props = Props(), name: String = null, loader: ClassLoader = null)(implicit m: Manifest[R]): R = + typedActor.createProxy[R](actorFactory, typedActor.extractInterfaces(m.erasure), (ref: AtomVar[R]) ⇒ constructor, props, Option(name), if (loader eq null) m.erasure.getClassLoader else loader) + + /** + * Creates a proxy given the supplied Props, this is not a TypedActor, so you'll need to implement the MethodCall handling yourself, + * to create TypedActor proxies, use typedActorOf + */ + def createProxy[R <: AnyRef](interfaces: Array[Class[_]], constructor: Creator[Actor], props: Props, loader: ClassLoader): R = + typedActor.createProxy(actorFactory, interfaces, (ref: AtomVar[R]) ⇒ constructor.create, props, None, loader) + + /** + * Creates a proxy given the supplied Props, this is not a TypedActor, so you'll need to implement the MethodCall handling yourself, + * to create TypedActor proxies, use typedActorOf + */ + def createProxy[R <: AnyRef](interfaces: Array[Class[_]], constructor: Creator[Actor], props: Props, name: String, loader: ClassLoader): R = + typedActor.createProxy(actorFactory, interfaces, (ref: AtomVar[R]) ⇒ constructor.create, props, Some(name), loader) + + /** + * Creates a proxy given the supplied Props, this is not a TypedActor, so you'll need to implement the MethodCall handling yourself, + * to create TypedActor proxies, use typedActorOf + */ + def createProxy[R <: AnyRef](interfaces: Array[Class[_]], constructor: ⇒ Actor, props: Props, loader: ClassLoader): R = + typedActor.createProxy[R](actorFactory, interfaces, (ref: AtomVar[R]) ⇒ constructor, props, None, loader) + + /** + * Creates a proxy given the supplied Props, this is not a TypedActor, so you'll need to implement the MethodCall handling yourself, + * to create TypedActor proxies, use typedActorOf + */ + def createProxy[R <: AnyRef](interfaces: Array[Class[_]], constructor: ⇒ Actor, props: Props, name: String, loader: ClassLoader): R = + typedActor.createProxy[R](actorFactory, interfaces, (ref: AtomVar[R]) ⇒ constructor, props, Some(name), loader) + +} + +object TypedActor extends ExtensionId[TypedActorExtension] with ExtensionIdProvider { + def lookup() = this + def createExtension(system: ActorSystemImpl): TypedActorExtension = new TypedActorExtension(system) + + /** + * Returns a contextual TypedActorFactory of this extension, this means that any TypedActors created by this TypedActorExtension + * will be children to the specified context, this allows for creating hierarchies of TypedActors. + * Do _not_ let this instance escape the TypedActor since that will not be thread-safe. + */ + def apply(context: ActorContext): TypedActorFactory = ContextualTypedActorFactory(apply(context.system), context) + + /** + * Returns a contextual TypedActorFactory of this extension, this means that any TypedActors created by this TypedActorExtension + * will be children to the specified context, this allows for creating hierarchies of TypedActors. + * Do _not_ let this instance escape the TypedActor since that will not be thread-safe. + * + * Java API + */ + def get(context: ActorContext): TypedActorFactory = apply(context) + /** * This class represents a Method call, and has a reference to the Method to be called and the parameters to supply * It's sent to the ActorRef backing the TypedActor and can be serialized and deserialized */ - case class MethodCall(ser: Serialization, method: Method, parameters: Array[AnyRef]) { + case class MethodCall(method: Method, parameters: Array[AnyRef]) { def isOneWay = method.getReturnType == java.lang.Void.TYPE def returnsFuture_? = classOf[Future[_]].isAssignableFrom(method.getReturnType) @@ -41,7 +218,7 @@ object TypedActor { case null ⇒ SerializedMethodCall(method.getDeclaringClass, method.getName, method.getParameterTypes, null, null) case ps if ps.length == 0 ⇒ SerializedMethodCall(method.getDeclaringClass, method.getName, method.getParameterTypes, Array[Serializer.Identifier](), Array[Array[Byte]]()) case ps ⇒ - val serializers: Array[Serializer] = ps map ser.findSerializerFor + val serializers: Array[Serializer] = ps map SerializationExtension(Serialization.currentSystem.value).findSerializerFor val serializedParameters: Array[Array[Byte]] = Array.ofDim[Array[Byte]](serializers.length) for (i ← 0 until serializers.length) serializedParameters(i) = serializers(i) toBinary parameters(i) //Mutable for the sake of sanity @@ -58,26 +235,26 @@ object TypedActor { //TODO implement writeObject and readObject to serialize //TODO Possible optimization is to special encode the parameter-types to conserve space private def readResolve(): AnyRef = { - val system = akka.serialization.Serialization.system.value + val system = akka.serialization.Serialization.currentSystem.value if (system eq null) throw new IllegalStateException( "Trying to deserialize a SerializedMethodCall without an ActorSystem in scope." + - " Use akka.serialization.Serialization.system.withValue(system) { ... }") - val serialization = SerializationExtension(system).serialization - MethodCall(serialization, ownerType.getDeclaredMethod(methodName, parameterTypes: _*), serializedParameters match { + " Use akka.serialization.Serialization.currentSystem.withValue(system) { ... }") + val serialization = SerializationExtension(system) + MethodCall(ownerType.getDeclaredMethod(methodName, parameterTypes: _*), serializedParameters match { case null ⇒ null case a if a.length == 0 ⇒ Array[AnyRef]() case a ⇒ val deserializedParameters: Array[AnyRef] = Array.ofDim[AnyRef](a.length) //Mutable for the sake of sanity - for (i ← 0 until a.length) { + for (i ← 0 until a.length) deserializedParameters(i) = serialization.serializerByIdentity(serializerIdentifiers(i)).fromBinary(serializedParameters(i)) - } + deserializedParameters }) } } private val selfReference = new ThreadLocal[AnyRef] - private val appReference = new ThreadLocal[ActorSystem] + private val currentSystem = new ThreadLocal[ActorSystem] /** * Returns the reference to the proxy when called inside a method call in a TypedActor @@ -105,7 +282,7 @@ object TypedActor { /** * Returns the akka system (for a TypedActor) when inside a method call in a TypedActor. */ - def system = appReference.get match { + def system = currentSystem.get match { case null ⇒ throw new IllegalStateException("Calling TypedActor.system outside of a TypedActor implementation method!") case some ⇒ some } @@ -119,220 +296,37 @@ object TypedActor { * Returns the default timeout (for a TypedActor) when inside a method call in a TypedActor. */ implicit def timeout = system.settings.ActorTimeout -} - -trait TypedActorFactory { this: ActorRefFactory ⇒ - - protected def typedActor: TypedActor /** - * Creates a new TypedActor proxy using the supplied Props, - * the interfaces usable by the returned proxy is the supplied interface class (if the class represents an interface) or - * all interfaces (Class.getInterfaces) if it's not an interface class + * Implementation of TypedActor as an Actor */ - def typedActorOf[R <: AnyRef, T <: R](interface: Class[R], impl: Class[T], props: Props): R = - typedActor.createProxyAndTypedActor(this, interface, impl.newInstance, props, None, interface.getClassLoader) - - /** - * Creates a new TypedActor proxy using the supplied Props, - * the interfaces usable by the returned proxy is the supplied interface class (if the class represents an interface) or - * all interfaces (Class.getInterfaces) if it's not an interface class - */ - def typedActorOf[R <: AnyRef, T <: R](interface: Class[R], impl: Class[T], props: Props, name: String): R = - typedActor.createProxyAndTypedActor(this, interface, impl.newInstance, props, Some(name), interface.getClassLoader) - - /** - * Creates a new TypedActor proxy using the supplied Props, - * the interfaces usable by the returned proxy is the supplied interface class (if the class represents an interface) or - * all interfaces (Class.getInterfaces) if it's not an interface class - */ - def typedActorOf[R <: AnyRef, T <: R](interface: Class[R], impl: Creator[T], props: Props): R = - typedActor.createProxyAndTypedActor(this, interface, impl.create, props, None, interface.getClassLoader) - - /** - * Creates a new TypedActor proxy using the supplied Props, - * the interfaces usable by the returned proxy is the supplied interface class (if the class represents an interface) or - * all interfaces (Class.getInterfaces) if it's not an interface class - */ - def typedActorOf[R <: AnyRef, T <: R](interface: Class[R], impl: Creator[T], props: Props, name: String): R = - typedActor.createProxyAndTypedActor(this, interface, impl.create, props, Some(name), interface.getClassLoader) - - /** - * Creates a new TypedActor proxy using the supplied Props, - * the interfaces usable by the returned proxy is the supplied interface class (if the class represents an interface) or - * all interfaces (Class.getInterfaces) if it's not an interface class - */ - def typedActorOf[R <: AnyRef, T <: R](interface: Class[R], impl: Class[T], props: Props, loader: ClassLoader): R = - typedActor.createProxyAndTypedActor(this, interface, impl.newInstance, props, None, loader) - - /** - * Creates a new TypedActor proxy using the supplied Props, - * the interfaces usable by the returned proxy is the supplied interface class (if the class represents an interface) or - * all interfaces (Class.getInterfaces) if it's not an interface class - */ - def typedActorOf[R <: AnyRef, T <: R](interface: Class[R], impl: Class[T], props: Props, name: String, loader: ClassLoader): R = - typedActor.createProxyAndTypedActor(this, interface, impl.newInstance, props, Some(name), loader) - - /** - * Creates a new TypedActor proxy using the supplied Props, - * the interfaces usable by the returned proxy is the supplied interface class (if the class represents an interface) or - * all interfaces (Class.getInterfaces) if it's not an interface class - */ - def typedActorOf[R <: AnyRef, T <: R](interface: Class[R], impl: Creator[T], props: Props, loader: ClassLoader): R = - typedActor.createProxyAndTypedActor(this, interface, impl.create, props, None, loader) - - /** - * Creates a new TypedActor proxy using the supplied Props, - * the interfaces usable by the returned proxy is the supplied interface class (if the class represents an interface) or - * all interfaces (Class.getInterfaces) if it's not an interface class - */ - def typedActorOf[R <: AnyRef, T <: R](interface: Class[R], impl: Creator[T], props: Props, name: String, loader: ClassLoader): R = - typedActor.createProxyAndTypedActor(this, interface, impl.create, props, Some(name), loader) - - /** - * Creates a new TypedActor proxy using the supplied Props, - * the interfaces usable by the returned proxy is the supplied implementation class' interfaces (Class.getInterfaces) - */ - def typedActorOf[R <: AnyRef, T <: R](impl: Class[T], props: Props, loader: ClassLoader): R = - typedActor.createProxyAndTypedActor(this, impl, impl.newInstance, props, None, loader) - - /** - * Creates a new TypedActor proxy using the supplied Props, - * the interfaces usable by the returned proxy is the supplied implementation class' interfaces (Class.getInterfaces) - */ - def typedActorOf[R <: AnyRef, T <: R](impl: Class[T], props: Props, name: String, loader: ClassLoader): R = - typedActor.createProxyAndTypedActor(this, impl, impl.newInstance, props, Some(name), loader) - - /** - * Creates a new TypedActor proxy using the supplied Props, - * the interfaces usable by the returned proxy is the supplied implementation class' interfaces (Class.getInterfaces) - */ - def typedActorOf[R <: AnyRef, T <: R](props: Props = Props(), name: String = null, loader: ClassLoader = null)(implicit m: Manifest[T]): R = { - val clazz = m.erasure.asInstanceOf[Class[T]] - typedActor.createProxyAndTypedActor(this, clazz, clazz.newInstance, props, Option(name), if (loader eq null) clazz.getClassLoader else loader) - } - - /** - * Creates a proxy given the supplied Props, this is not a TypedActor, so you'll need to implement the MethodCall handling yourself, - * to create TypedActor proxies, use typedActorOf - */ - def createProxy[R <: AnyRef](constructor: ⇒ Actor, props: Props = Props(), name: String = null, loader: ClassLoader = null)(implicit m: Manifest[R]): R = - typedActor.createProxy[R](this, typedActor.extractInterfaces(m.erasure), (ref: AtomVar[R]) ⇒ constructor, props, Option(name), if (loader eq null) m.erasure.getClassLoader else loader) - - /** - * Creates a proxy given the supplied Props, this is not a TypedActor, so you'll need to implement the MethodCall handling yourself, - * to create TypedActor proxies, use typedActorOf - */ - def createProxy[R <: AnyRef](interfaces: Array[Class[_]], constructor: Creator[Actor], props: Props, loader: ClassLoader): R = - typedActor.createProxy(this, interfaces, (ref: AtomVar[R]) ⇒ constructor.create, props, None, loader) - - /** - * Creates a proxy given the supplied Props, this is not a TypedActor, so you'll need to implement the MethodCall handling yourself, - * to create TypedActor proxies, use typedActorOf - */ - def createProxy[R <: AnyRef](interfaces: Array[Class[_]], constructor: Creator[Actor], props: Props, name: String, loader: ClassLoader): R = - typedActor.createProxy(this, interfaces, (ref: AtomVar[R]) ⇒ constructor.create, props, Some(name), loader) - - /** - * Creates a proxy given the supplied Props, this is not a TypedActor, so you'll need to implement the MethodCall handling yourself, - * to create TypedActor proxies, use typedActorOf - */ - def createProxy[R <: AnyRef](interfaces: Array[Class[_]], constructor: ⇒ Actor, props: Props, loader: ClassLoader): R = - typedActor.createProxy[R](this, interfaces, (ref: AtomVar[R]) ⇒ constructor, props, None, loader) - - /** - * Creates a proxy given the supplied Props, this is not a TypedActor, so you'll need to implement the MethodCall handling yourself, - * to create TypedActor proxies, use typedActorOf - */ - def createProxy[R <: AnyRef](interfaces: Array[Class[_]], constructor: ⇒ Actor, props: Props, name: String, loader: ClassLoader): R = - typedActor.createProxy[R](this, interfaces, (ref: AtomVar[R]) ⇒ constructor, props, Some(name), loader) - -} - -//TODO Document this class, not only in Scaladoc, but also in a dedicated typed-actor.rst, for both java and scala -/** - * A TypedActor in Akka is an implementation of the Active Objects Pattern, i.e. an object with asynchronous method dispatch - * - * It consists of 2 parts: - * The Interface - * The Implementation - * - * Given a combination of Interface and Implementation, a JDK Dynamic Proxy object with the Interface will be returned - * - * The semantics is as follows, - * any methods in the Interface that returns Unit/void will use fire-and-forget semantics (same as Actor !) - * any methods in the Interface that returns Option/JOption will use ask + block-with-timeout-return-none-if-timeout semantics - * any methods in the Interface that returns anything else will use ask + block-with-timeout-throw-if-timeout semantics - * - * TypedActors needs, just like Actors, to be Stopped when they are no longer needed, use TypedActor.stop(proxy) - */ -class TypedActor(val settings: ActorSystem.Settings, var ser: Serialization) { - - import TypedActor.MethodCall - /** - * Stops the underlying ActorRef for the supplied TypedActor proxy, if any, returns whether it could stop it or not - */ - def stop(proxy: AnyRef): Boolean = getActorRefFor(proxy) match { - case null ⇒ false - case ref ⇒ ref.stop; true - } - - /** - * Retrieves the underlying ActorRef for the supplied TypedActor proxy, or null if none found - */ - def getActorRefFor(proxy: AnyRef): ActorRef = invocationHandlerFor(proxy) match { - case null ⇒ null - case handler ⇒ handler.actor - } - - /** - * Returns wether the supplied AnyRef is a TypedActor proxy or not - */ - def isTypedActor(proxyOrNot: AnyRef): Boolean = invocationHandlerFor(proxyOrNot) ne null - - /* Internal API */ - - private[akka] def invocationHandlerFor(typedActor_? : AnyRef): TypedActorInvocationHandler = - if ((typedActor_? ne null) && Proxy.isProxyClass(typedActor_?.getClass)) typedActor_? match { - case null ⇒ null - case other ⇒ Proxy.getInvocationHandler(other) match { - case null ⇒ null - case handler: TypedActorInvocationHandler ⇒ handler - case _ ⇒ null - } - } - else null - - private[akka] def createProxy[R <: AnyRef](supervisor: ActorRefFactory, interfaces: Array[Class[_]], constructor: (AtomVar[R]) ⇒ Actor, props: Props, name: Option[String], loader: ClassLoader): R = { - val proxyVar = new AtomVar[R] - configureAndProxyLocalActorRef[R](supervisor, interfaces, proxyVar, props.withCreator(constructor(proxyVar)), name, loader) - } - - private[akka] def createProxyAndTypedActor[R <: AnyRef, T <: R](supervisor: ActorRefFactory, interface: Class[_], constructor: ⇒ T, props: Props, name: Option[String], loader: ClassLoader): R = - createProxy[R](supervisor, extractInterfaces(interface), (ref: AtomVar[R]) ⇒ new TypedActor[R, T](ref, constructor), props, name, loader) - - private[akka] def configureAndProxyLocalActorRef[T <: AnyRef](supervisor: ActorRefFactory, interfaces: Array[Class[_]], proxyVar: AtomVar[T], props: Props, name: Option[String], loader: ClassLoader): T = { - //Warning, do not change order of the following statements, it's some elaborate chicken-n-egg handling - val actorVar = new AtomVar[ActorRef](null) - val timeout = props.timeout match { - case Props.`defaultTimeout` ⇒ settings.ActorTimeout - case x ⇒ x - } - val proxy: T = Proxy.newProxyInstance(loader, interfaces, new TypedActorInvocationHandler(actorVar, timeout)).asInstanceOf[T] - proxyVar.set(proxy) // Chicken and egg situation we needed to solve, set the proxy so that we can set the self-reference inside each receive - val ref = if (name.isDefined) supervisor.actorOf(props, name.get) else supervisor.actorOf(props) - actorVar.set(ref) //Make sure the InvocationHandler gets ahold of the actor reference, this is not a problem since the proxy hasn't escaped this method yet - proxyVar.get - } - - private[akka] def extractInterfaces(clazz: Class[_]): Array[Class[_]] = if (clazz.isInterface) Array[Class[_]](clazz) else clazz.getInterfaces - private[akka] class TypedActor[R <: AnyRef, T <: R](val proxyVar: AtomVar[R], createInstance: ⇒ T) extends Actor { val me = createInstance + + override def preStart(): Unit = me match { + case l: PreStart ⇒ l.preStart() + case _ ⇒ super.preStart() + } + + override def postStop(): Unit = me match { + case l: PostStop ⇒ l.postStop() + case _ ⇒ super.postStop() + } + + override def preRestart(reason: Throwable, message: Option[Any]): Unit = me match { + case l: PreRestart ⇒ l.preRestart(reason, message) + case _ ⇒ super.preRestart(reason, message) + } + + override def postRestart(reason: Throwable): Unit = me match { + case l: PostRestart ⇒ l.postRestart(reason) + case _ ⇒ super.postRestart(reason) + } + def receive = { case m: MethodCall ⇒ TypedActor.selfReference set proxyVar.get - TypedActor.appReference set system + TypedActor.currentSystem set system try { if (m.isOneWay) m(me) else { @@ -349,25 +343,73 @@ class TypedActor(val settings: ActorSystem.Settings, var ser: Serialization) { sender ! m(me) } } catch { - case e: Exception ⇒ sender ! Status.Failure(e) + case t: Throwable ⇒ sender ! Status.Failure(t); throw t } } } finally { TypedActor.selfReference set null - TypedActor.appReference set null + TypedActor.currentSystem set null } } } - private[akka] class TypedActorInvocationHandler(actorVar: AtomVar[ActorRef], timeout: Timeout) extends InvocationHandler { + /** + * Mix this into your TypedActor to be able to hook into its lifecycle + */ + trait PreStart { + /** + * User overridable callback. + *

+ * Is called when an Actor is started by invoking 'actor'. + */ + def preStart(): Unit = () + } + + /** + * Mix this into your TypedActor to be able to hook into its lifecycle + */ + trait PostStop { + /** + * User overridable callback. + *

+ * Is called when 'actor.stop()' is invoked. + */ + def postStop(): Unit = () + } + + /** + * Mix this into your TypedActor to be able to hook into its lifecycle + */ + trait PreRestart { + /** + * User overridable callback. + *

+ * Is called on a crashed Actor right BEFORE it is restarted to allow clean + * up of resources before Actor is terminated. + * By default it calls postStop() + */ + def preRestart(reason: Throwable, message: Option[Any]): Unit = () + } + + trait PostRestart { + /** + * User overridable callback. + *

+ * Is called right AFTER restart on the newly created Actor to allow reinitialization after an Actor crash. + * By default it calls preStart() + */ + def postRestart(reason: Throwable): Unit = () + } + + private[akka] class TypedActorInvocationHandler(extension: TypedActorExtension, actorVar: AtomVar[ActorRef], timeout: Timeout) extends InvocationHandler { def actor = actorVar.get def invoke(proxy: AnyRef, method: Method, args: Array[AnyRef]): AnyRef = method.getName match { case "toString" ⇒ actor.toString - case "equals" ⇒ (args.length == 1 && (proxy eq args(0)) || actor == getActorRefFor(args(0))).asInstanceOf[AnyRef] //Force boxing of the boolean + case "equals" ⇒ (args.length == 1 && (proxy eq args(0)) || actor == extension.getActorRefFor(args(0))).asInstanceOf[AnyRef] //Force boxing of the boolean case "hashCode" ⇒ actor.hashCode.asInstanceOf[AnyRef] case _ ⇒ - MethodCall(ser, method, args) match { + MethodCall(method, args) match { case m if m.isOneWay ⇒ actor ! m; null //Null return value case m if m.returnsFuture_? ⇒ actor.?(m, timeout) case m if m.returnsJOption_? || m.returnsOption_? ⇒ @@ -382,3 +424,67 @@ class TypedActor(val settings: ActorSystem.Settings, var ser: Serialization) { } } } + +case class ContextualTypedActorFactory(typedActor: TypedActorExtension, actorFactory: ActorContext) extends TypedActorFactory { + override def getActorRefFor(proxy: AnyRef): ActorRef = typedActor.getActorRefFor(proxy) + override def isTypedActor(proxyOrNot: AnyRef): Boolean = typedActor.isTypedActor(proxyOrNot) +} + +class TypedActorExtension(system: ActorSystemImpl) extends TypedActorFactory with Extension { + import TypedActor._ //Import the goodies from the companion object + protected def actorFactory: ActorRefFactory = system + protected def typedActor = this + + val serialization = SerializationExtension(system) + val settings = system.settings + + /** + * Retrieves the underlying ActorRef for the supplied TypedActor proxy, or null if none found + */ + def getActorRefFor(proxy: AnyRef): ActorRef = invocationHandlerFor(proxy) match { + case null ⇒ null + case handler ⇒ handler.actor + } + + /** + * Returns wether the supplied AnyRef is a TypedActor proxy or not + */ + def isTypedActor(proxyOrNot: AnyRef): Boolean = invocationHandlerFor(proxyOrNot) ne null + + // Private API + + private[akka] def createProxy[R <: AnyRef](supervisor: ActorRefFactory, interfaces: Array[Class[_]], constructor: (AtomVar[R]) ⇒ Actor, props: Props, name: Option[String], loader: ClassLoader): R = { + val proxyVar = new AtomVar[R] + configureAndProxyLocalActorRef[R](supervisor, interfaces, proxyVar, props.withCreator(constructor(proxyVar)), name, loader) + } + + private[akka] def createProxyAndTypedActor[R <: AnyRef, T <: R](supervisor: ActorRefFactory, interface: Class[_], constructor: ⇒ T, props: Props, name: Option[String], loader: ClassLoader): R = + createProxy[R](supervisor, extractInterfaces(interface), (ref: AtomVar[R]) ⇒ new TypedActor[R, T](ref, constructor), props, name, loader) + + private[akka] def configureAndProxyLocalActorRef[T <: AnyRef](supervisor: ActorRefFactory, interfaces: Array[Class[_]], proxyVar: AtomVar[T], props: Props, name: Option[String], loader: ClassLoader): T = { + //Warning, do not change order of the following statements, it's some elaborate chicken-n-egg handling + val actorVar = new AtomVar[ActorRef](null) + val timeout = props.timeout match { + case Props.`defaultTimeout` ⇒ settings.ActorTimeout + case x ⇒ x + } + val proxy: T = Proxy.newProxyInstance(loader, interfaces, new TypedActorInvocationHandler(this, actorVar, timeout)).asInstanceOf[T] + proxyVar.set(proxy) // Chicken and egg situation we needed to solve, set the proxy so that we can set the self-reference inside each receive + val ref = if (name.isDefined) supervisor.actorOf(props, name.get) else supervisor.actorOf(props) + actorVar.set(ref) //Make sure the InvocationHandler gets ahold of the actor reference, this is not a problem since the proxy hasn't escaped this method yet + proxyVar.get + } + + private[akka] def extractInterfaces(clazz: Class[_]): Array[Class[_]] = if (clazz.isInterface) Array[Class[_]](clazz) else clazz.getInterfaces + + private[akka] def invocationHandlerFor(typedActor_? : AnyRef): TypedActorInvocationHandler = + if ((typedActor_? ne null) && Proxy.isProxyClass(typedActor_?.getClass)) typedActor_? match { + case null ⇒ null + case other ⇒ Proxy.getInvocationHandler(other) match { + case null ⇒ null + case handler: TypedActorInvocationHandler ⇒ handler + case _ ⇒ null + } + } + else null +} \ No newline at end of file diff --git a/akka-actor/src/main/scala/akka/event/Logging.scala b/akka-actor/src/main/scala/akka/event/Logging.scala index 5b3ae4b801..bae930cb17 100644 --- a/akka-actor/src/main/scala/akka/event/Logging.scala +++ b/akka-actor/src/main/scala/akka/event/Logging.scala @@ -14,6 +14,7 @@ import akka.actor.Timeout import akka.dispatch.FutureTimeoutException import java.util.concurrent.atomic.AtomicInteger import akka.actor.ActorRefProvider +import scala.util.control.NoStackTrace object LoggingBus { implicit def fromActorSystem(system: ActorSystem): LoggingBus = system.eventStream @@ -268,6 +269,7 @@ object Logging { val AllLogLevels = Seq(ErrorLevel: AnyRef, WarningLevel, InfoLevel, DebugLevel).asInstanceOf[Seq[LogLevel]] val errorFormat = "[ERROR] [%s] [%s] [%s] %s\n%s".intern + val errorFormatWithoutCause = "[ERROR] [%s] [%s] [%s] %s".intern val warningFormat = "[WARN] [%s] [%s] [%s] %s".intern val infoFormat = "[INFO] [%s] [%s] [%s] %s".intern val debugFormat = "[DEBUG] [%s] [%s] [%s] %s".intern @@ -311,7 +313,10 @@ object Logging { def level = ErrorLevel } object Error { - def apply(logSource: String, message: Any) = new Error(new EventHandlerException, logSource, message) + def apply(logSource: String, message: Any) = new Error(NoCause, logSource, message) + + /** Null Object used for errors without cause Throwable */ + object NoCause extends NoStackTrace } case class Warning(logSource: String, message: Any = "") extends LogEvent { @@ -363,13 +368,15 @@ object Logging { } } - def error(event: Error) = - println(errorFormat.format( + def error(event: Error) = { + val f = if (event.cause == Error.NoCause) errorFormatWithoutCause else errorFormat + println(f.format( timestamp, event.thread.getName, event.logSource, event.message, stackTraceFor(event.cause))) + } def warning(event: Warning) = println(warningFormat.format( @@ -429,14 +436,14 @@ object Logging { } def stackTraceFor(e: Throwable) = { - if (e ne null) { + if ((e eq null) || e == Error.NoCause) { + "" + } else { import java.io.{ StringWriter, PrintWriter } val sw = new StringWriter val pw = new PrintWriter(sw) e.printStackTrace(pw) sw.toString - } else { - "[NO STACK TRACE]" } } diff --git a/akka-actor/src/main/scala/akka/remote/RemoteInterface.scala b/akka-actor/src/main/scala/akka/remote/RemoteInterface.scala index 64a45fc9d7..6b5a0b374e 100644 --- a/akka-actor/src/main/scala/akka/remote/RemoteInterface.scala +++ b/akka-actor/src/main/scala/akka/remote/RemoteInterface.scala @@ -12,12 +12,23 @@ import java.net.InetSocketAddress import java.net.URI import java.net.URISyntaxException import java.net.InetAddress +import java.net.UnknownHostException object RemoteAddress { - def apply(system: String, host: String, port: Int) = { - val ip = InetAddress.getByName(host) + def apply(system: String, host: String, port: Int): RemoteAddress = { + // TODO check whether we should not rather bail out early + val ip = try InetAddress.getByName(host) catch { case _: UnknownHostException ⇒ null } new RemoteAddress(system, host, ip, port) } + + val RE = """(?:(\w+)@)?(\w+):(\d+)""".r + object Int { + def unapply(s: String) = Some(Integer.parseInt(s)) + } + def apply(stringRep: String, defaultSystem: String): RemoteAddress = stringRep match { + case RE(sys, host, Int(port)) ⇒ apply(if (sys != null) sys else defaultSystem, host, port) + case _ ⇒ throw new IllegalArgumentException(stringRep + " is not a valid remote address [system@host:port]") + } } case class RemoteAddress(system: String, host: String, ip: InetAddress, port: Int) extends Address { diff --git a/akka-actor/src/main/scala/akka/routing/Routing.scala b/akka-actor/src/main/scala/akka/routing/Routing.scala index 0dc6366cb1..293c1abb4b 100644 --- a/akka-actor/src/main/scala/akka/routing/Routing.scala +++ b/akka-actor/src/main/scala/akka/routing/Routing.scala @@ -41,6 +41,11 @@ object RouterType { */ object ScatterGather extends RouterType + /** + * A RouterType that broadcasts the messages to all connections. + */ + object Broadcast extends RouterType + /** * A RouterType that selects the connection based on the least amount of cpu usage */ @@ -67,15 +72,14 @@ object RouterType { * Routed ActorRef configuration object, this is thread safe and fully sharable. */ case class RoutedProps private[akka] ( - routerFactory: () ⇒ Router = RoutedProps.defaultRouterFactory, - connectionManager: ConnectionManager = new LocalConnectionManager(List()), + routerFactory: () ⇒ Router, + connectionManager: ConnectionManager, timeout: Timeout = RoutedProps.defaultTimeout, localOnly: Boolean = RoutedProps.defaultLocalOnly) { } object RoutedProps { final val defaultTimeout = Timeout(Duration.MinusInf) - final val defaultRouterFactory = () ⇒ new RoundRobinRouter final val defaultLocalOnly = false } @@ -257,12 +261,41 @@ trait BasicRouter extends Router { private def throwNoConnectionsError = throw new RoutingException("No replica connections for router") } +/** + * A Router that uses broadcasts a message to all its connections. + */ +class BroadcastRouter(implicit val dispatcher: MessageDispatcher, timeout: Timeout) extends BasicRouter with Serializable { + override def route(message: Any)(implicit sender: ActorRef) = { + connectionManager.connections.iterable foreach { connection ⇒ + try { + connection.!(message)(sender) // we use original sender, so this is essentially a 'forward' + } catch { + case e: Exception ⇒ + connectionManager.remove(connection) + throw e + } + } + } + + //protected def gather[S, G >: S](results: Iterable[Future[S]]): Future[G] = + override def route[T](message: Any, timeout: Timeout): Future[T] = { + import Future._ + implicit val t = timeout + val futures = connectionManager.connections.iterable map { connection ⇒ + connection.?(message, timeout).asInstanceOf[Future[T]] + } + Future.firstCompletedOf(futures) + } + + protected def next: Option[ActorRef] = None +} + /** * A DirectRouter a Router that only has a single connected actorRef and forwards all request to that actorRef. * * @author Jonas Bonér */ -class DirectRouter extends BasicRouter { +class DirectRouter(implicit val dispatcher: MessageDispatcher, timeout: Timeout) extends BasicRouter { private val state = new AtomicReference[DirectRouterState] @@ -304,7 +337,7 @@ class DirectRouter extends BasicRouter { * * @author Jonas Bonér */ -class RandomRouter extends BasicRouter { +class RandomRouter(implicit val dispatcher: MessageDispatcher, timeout: Timeout) extends BasicRouter { import java.security.SecureRandom private val state = new AtomicReference[RandomRouterState] @@ -346,7 +379,7 @@ class RandomRouter extends BasicRouter { * * @author Jonas Bonér */ -class RoundRobinRouter extends BasicRouter { +class RoundRobinRouter(implicit val dispatcher: MessageDispatcher, timeout: Timeout) extends BasicRouter { private val state = new AtomicReference[RoundRobinState] @@ -437,7 +470,7 @@ trait ScatterGatherRouter extends BasicRouter with Serializable { * (wrapped into {@link Routing.Broadcast} and sent with "?" method). For the messages sent in a fire-forget * mode, the router would behave as {@link RoundRobinRouter} */ -class ScatterGatherFirstCompletedRouter(implicit val dispatcher: MessageDispatcher, timeout: Timeout) extends RoundRobinRouter with ScatterGatherRouter { +class ScatterGatherFirstCompletedRouter(implicit dispatcher: MessageDispatcher, timeout: Timeout) extends RoundRobinRouter with ScatterGatherRouter { protected def gather[S, G >: S](results: Iterable[Future[S]]): Future[G] = Future.firstCompletedOf(results) } diff --git a/akka-actor/src/main/scala/akka/serialization/Serialization.scala b/akka-actor/src/main/scala/akka/serialization/Serialization.scala index 2e10bf22af..7232375fa8 100644 --- a/akka-actor/src/main/scala/akka/serialization/Serialization.scala +++ b/akka-actor/src/main/scala/akka/serialization/Serialization.scala @@ -6,16 +6,59 @@ package akka.serialization import akka.AkkaException import akka.util.ReflectiveAccess -import akka.actor.{ ActorSystem, ActorSystemImpl } import scala.util.DynamicVariable +import com.typesafe.config.{ ConfigRoot, ConfigParseOptions, ConfigFactory, Config } +import com.typesafe.config.Config._ +import akka.config.ConfigurationException +import akka.actor.{ Extension, ActorSystem, ActorSystemImpl } case class NoSerializerFoundException(m: String) extends AkkaException(m) +object Serialization { + + // TODO ensure that these are always set (i.e. withValue()) when doing deserialization + val currentSystem = new DynamicVariable[ActorSystemImpl](null) + + class Settings(cfg: Config) { + private def referenceConfig: Config = + ConfigFactory.parseResource(classOf[ActorSystem], "/akka-serialization-reference.conf", + ConfigParseOptions.defaults.setAllowMissing(false)) + val config: ConfigRoot = ConfigFactory.emptyRoot("akka-serialization").withFallback(cfg).withFallback(referenceConfig).resolve() + + import scala.collection.JavaConverters._ + import config._ + + val Serializers: Map[String, String] = { + toStringMap(getConfig("akka.actor.serializers")) + } + + val SerializationBindings: Map[String, Seq[String]] = { + val configPath = "akka.actor.serialization-bindings" + hasPath(configPath) match { + case false ⇒ Map() + case true ⇒ + val serializationBindings: Map[String, Seq[String]] = getConfig(configPath).toObject.unwrapped.asScala.toMap.map { + case (k: String, v: java.util.Collection[_]) ⇒ (k -> v.asScala.toSeq.asInstanceOf[Seq[String]]) + case invalid ⇒ throw new ConfigurationException("Invalid serialization-bindings [%s]".format(invalid)) + } + serializationBindings + + } + } + + private def toStringMap(mapConfig: Config): Map[String, String] = + mapConfig.toObject.unwrapped.asScala.toMap.map { case (k, v) ⇒ (k, v.toString) } + } +} + /** * Serialization module. Contains methods for serialization and deserialization as well as * locating a Serializer for a particular class as defined in the mapping in the 'akka.conf' file. */ -class Serialization(val system: ActorSystemImpl) { +class Serialization(val system: ActorSystemImpl) extends Extension { + import Serialization._ + + val settings = new Settings(system.applicationConfig) //TODO document me def serialize(o: AnyRef): Either[Exception, Array[Byte]] = @@ -27,7 +70,7 @@ class Serialization(val system: ActorSystemImpl) { clazz: Class[_], classLoader: Option[ClassLoader]): Either[Exception, AnyRef] = try { - Serialization.system.withValue(system) { + currentSystem.withValue(system) { Right(serializerFor(clazz).fromBinary(bytes, Some(clazz), classLoader)) } } catch { case e: Exception ⇒ Left(e) } @@ -63,15 +106,13 @@ class Serialization(val system: ActorSystemImpl) { } } - // serializers and bindings needs to be lazy because Serialization is initialized from SerializationExtension, which is needed here - /** * A Map of serializer from alias to implementation (class implementing akka.serialization.Serializer) * By default always contains the following mapping: "default" -> akka.serialization.JavaSerializer * But "default" can be overridden in config */ lazy val serializers: Map[String, Serializer] = { - val serializersConf = SerializationExtension(system).settings.Serializers + val serializersConf = settings.Serializers for ((k: String, v: String) ← serializersConf) yield k -> serializerOf(v).fold(throw _, identity) } @@ -80,7 +121,7 @@ class Serialization(val system: ActorSystemImpl) { * bindings is a Map whose keys = FQN of class that is serializable and values = the alias of the serializer to be used */ lazy val bindings: Map[String, String] = { - val configBindings = SerializationExtension(system).settings.SerializationBindings + val configBindings = settings.SerializationBindings configBindings.foldLeft(Map[String, String]()) { case (result, (k: String, vs: Seq[_])) ⇒ //All keys which are lists, take the Strings from them and Map them @@ -103,8 +144,3 @@ class Serialization(val system: ActorSystemImpl) { Map(NullSerializer.identifier -> NullSerializer) ++ serializers map { case (_, v) ⇒ (v.identifier, v) } } -object Serialization { - // TODO ensure that these are always set (i.e. withValue()) when doing deserialization - val system = new DynamicVariable[ActorSystemImpl](null) -} - diff --git a/akka-actor/src/main/scala/akka/serialization/SerializationExtension.scala b/akka-actor/src/main/scala/akka/serialization/SerializationExtension.scala index 4fc0b1be3c..a53ba832c7 100644 --- a/akka-actor/src/main/scala/akka/serialization/SerializationExtension.scala +++ b/akka-actor/src/main/scala/akka/serialization/SerializationExtension.scala @@ -3,77 +3,9 @@ */ package akka.serialization -import akka.actor.ActorSystem -import akka.actor.ExtensionKey -import akka.actor.Extension -import akka.actor.ActorSystemImpl -import com.typesafe.config.Config -import com.typesafe.config.ConfigFactory -import com.typesafe.config.ConfigParseOptions -import com.typesafe.config.ConfigRoot -import akka.config.ConfigurationException - -object SerializationExtensionKey extends ExtensionKey[SerializationExtension] - -object SerializationExtension { - def apply(system: ActorSystem): SerializationExtension = { - if (!system.hasExtension(SerializationExtensionKey)) { - system.registerExtension(new SerializationExtension) - } - system.extension(SerializationExtensionKey) - } - - class Settings(cfg: Config) { - private def referenceConfig: Config = - ConfigFactory.parseResource(classOf[ActorSystem], "/akka-serialization-reference.conf", - ConfigParseOptions.defaults.setAllowMissing(false)) - val config: ConfigRoot = ConfigFactory.emptyRoot("akka-serialization").withFallback(cfg).withFallback(referenceConfig).resolve() - - import scala.collection.JavaConverters._ - import config._ - - val Serializers: Map[String, String] = { - toStringMap(getConfig("akka.actor.serializers")) - } - - val SerializationBindings: Map[String, Seq[String]] = { - val configPath = "akka.actor.serialization-bindings" - hasPath(configPath) match { - case false ⇒ Map() - case true ⇒ - val serializationBindings: Map[String, Seq[String]] = getConfig(configPath).toObject.unwrapped.asScala.toMap.map { - case (k: String, v: java.util.Collection[_]) ⇒ (k -> v.asScala.toSeq.asInstanceOf[Seq[String]]) - case invalid ⇒ throw new ConfigurationException("Invalid serialization-bindings [%s]".format(invalid)) - } - serializationBindings - - } - } - - private def toStringMap(mapConfig: Config): Map[String, String] = { - mapConfig.toObject.unwrapped.asScala.toMap.map { entry ⇒ - (entry._1 -> entry._2.toString) - } - } - - } -} - -class SerializationExtension extends Extension[SerializationExtension] { - import SerializationExtension._ - @volatile - private var _settings: Settings = _ - @volatile - private var _serialization: Serialization = _ - def serialization = _serialization - - def key = SerializationExtensionKey - - def init(system: ActorSystemImpl) { - _settings = new Settings(system.applicationConfig) - _serialization = new Serialization(system) - } - - def settings: Settings = _settings +import akka.actor.{ ExtensionId, ExtensionIdProvider, ActorSystemImpl } +object SerializationExtension extends ExtensionId[Serialization] with ExtensionIdProvider { + override def lookup = SerializationExtension + override def createExtension(system: ActorSystemImpl): Serialization = new Serialization(system) } \ No newline at end of file diff --git a/akka-docs/general/configuration.rst b/akka-docs/general/configuration.rst index 5e8c3e9344..cffd15b4dd 100644 --- a/akka-docs/general/configuration.rst +++ b/akka-docs/general/configuration.rst @@ -142,7 +142,7 @@ akka.dev.conf: :: akka { - event-handler-level = "DEBUG" + loglevel = "DEBUG" } The mode option works in the same way when using configuration files in ``AKKA_HOME/config/`` directory. @@ -162,7 +162,7 @@ akka.dev.conf: include "akka.conf" akka { - event-handler-level = "DEBUG" + loglevel = "DEBUG" } .. _-Dakka.output.config.source: diff --git a/akka-docs/general/event-handler.rst b/akka-docs/general/event-handler.rst index e4396faef5..c23911939e 100644 --- a/akka-docs/general/event-handler.rst +++ b/akka-docs/general/event-handler.rst @@ -16,7 +16,7 @@ You can configure which event handlers should be registered at boot time. That i akka { # event handlers to register at boot time (EventHandler$DefaultListener logs to STDOUT) event-handlers = ["akka.event.EventHandler$DefaultListener"] - event-handler-level = "DEBUG" # Options: ERROR, WARNING, INFO, DEBUG + loglevel = "DEBUG" # Options: ERROR, WARNING, INFO, DEBUG } The default one logs to STDOUT and is registered by default. It is not intended to be used for production. There is also an :ref:`slf4j` event handler available in the 'akka-slf4j' module. diff --git a/akka-docs/general/slf4j.rst b/akka-docs/general/slf4j.rst index a49b731771..876b139d65 100644 --- a/akka-docs/general/slf4j.rst +++ b/akka-docs/general/slf4j.rst @@ -4,11 +4,11 @@ SLF4J ===== This module is available in the 'akka-slf4j.jar'. It has one single dependency; the slf4j-api jar. In runtime you -also need a SLF4J backend, we recommend: +also need a SLF4J backend, we recommend `Logback `_: .. code-block:: scala - lazy val logback = "ch.qos.logback" % "logback-classic" % "0.9.28" % "runtime" + lazy val logback = "ch.qos.logback" % "logback-classic" % "1.0.0" % "runtime" Event Handler @@ -20,8 +20,22 @@ This module includes a SLF4J Event Handler that works with Akka's standard Event akka { event-handlers = ["akka.event.slf4j.Slf4jEventHandler"] - event-handler-level = "DEBUG" + loglevel = "DEBUG" } Read more about how to use the :ref:`event-handler`. +Logging thread in MDC +--------------------- + +Since the logging is done asynchronously the thread in which the logging was performed is captured in +Mapped Diagnostic Context (MDC) with attribute name ``sourceThread``. +With Logback the thread name is available with ``%X{sourceThread}`` specifier within the pattern layout configuration:: + + + + %date{ISO8601} %-5level %logger{36} %X{sourceThread} - %msg%n + + + + \ No newline at end of file diff --git a/akka-docs/scala/testing.rst b/akka-docs/scala/testing.rst index 5eca79a784..ca1469217c 100644 --- a/akka-docs/scala/testing.rst +++ b/akka-docs/scala/testing.rst @@ -750,7 +750,7 @@ All these messages are logged at ``DEBUG`` level. To summarize, you can enable full logging of actor activities using this configuration fragment:: akka { - event-handler-level = "DEBUG" + loglevel = "DEBUG" actor { debug { receive = "true" diff --git a/akka-durable-mailboxes/akka-beanstalk-mailbox/src/main/scala/akka/actor/mailbox/BeanstalkBasedMailbox.scala b/akka-durable-mailboxes/akka-beanstalk-mailbox/src/main/scala/akka/actor/mailbox/BeanstalkBasedMailbox.scala index aa0ee6645d..c680511697 100644 --- a/akka-durable-mailboxes/akka-beanstalk-mailbox/src/main/scala/akka/actor/mailbox/BeanstalkBasedMailbox.scala +++ b/akka-durable-mailboxes/akka-beanstalk-mailbox/src/main/scala/akka/actor/mailbox/BeanstalkBasedMailbox.scala @@ -21,7 +21,7 @@ class BeanstalkBasedMailboxException(message: String) extends AkkaException(mess */ class BeanstalkBasedMailbox(val owner: ActorCell) extends DurableMailbox(owner) with DurableMessageSerialization { - private val settings = BeanstalkBasedMailboxExtension(owner.system).settings + private val settings = BeanstalkBasedMailboxExtension(owner.system) private val messageSubmitDelaySeconds = settings.MessageSubmitDelay.toSeconds.toInt private val messageTimeToLiveSeconds = settings.MessageTimeToLive.toSeconds.toInt diff --git a/akka-durable-mailboxes/akka-beanstalk-mailbox/src/main/scala/akka/actor/mailbox/BeanstalkBasedMailboxExtension.scala b/akka-durable-mailboxes/akka-beanstalk-mailbox/src/main/scala/akka/actor/mailbox/BeanstalkBasedMailboxExtension.scala index 539b5b45e1..5f6fd40708 100644 --- a/akka-durable-mailboxes/akka-beanstalk-mailbox/src/main/scala/akka/actor/mailbox/BeanstalkBasedMailboxExtension.scala +++ b/akka-durable-mailboxes/akka-beanstalk-mailbox/src/main/scala/akka/actor/mailbox/BeanstalkBasedMailboxExtension.scala @@ -3,56 +3,32 @@ */ package akka.actor.mailbox -import akka.actor.ActorSystem -import akka.actor.ExtensionKey -import akka.actor.Extension -import akka.actor.ActorSystemImpl import com.typesafe.config.Config import com.typesafe.config.ConfigFactory import com.typesafe.config.ConfigParseOptions import com.typesafe.config.ConfigRoot import akka.util.Duration import java.util.concurrent.TimeUnit.MILLISECONDS +import akka.actor._ -object BeanstalkBasedMailboxExtensionKey extends ExtensionKey[BeanstalkBasedMailboxExtension] - -object BeanstalkBasedMailboxExtension { - def apply(system: ActorSystem): BeanstalkBasedMailboxExtension = { - if (!system.hasExtension(BeanstalkBasedMailboxExtensionKey)) { - system.registerExtension(new BeanstalkBasedMailboxExtension) - } - system.extension(BeanstalkBasedMailboxExtensionKey) - } - - class Settings(cfg: Config) { - private def referenceConfig: Config = - ConfigFactory.parseResource(classOf[ActorSystem], "/akka-beanstalk-mailbox-reference.conf", - ConfigParseOptions.defaults.setAllowMissing(false)) - val config: ConfigRoot = ConfigFactory.emptyRoot("akka-beanstalk-mailbox").withFallback(cfg).withFallback(referenceConfig).resolve() - - import config._ - - val Hostname = getString("akka.actor.mailbox.beanstalk.hostname") - val Port = getInt("akka.actor.mailbox.beanstalk.port") - val ReconnectWindow = Duration(getMilliseconds("akka.actor.mailbox.beanstalk.reconnect-window"), MILLISECONDS) - val MessageSubmitDelay = Duration(getMilliseconds("akka.actor.mailbox.beanstalk.message-submit-delay"), MILLISECONDS) - val MessageSubmitTimeout = Duration(getMilliseconds("akka.actor.mailbox.beanstalk.message-submit-timeout"), MILLISECONDS) - val MessageTimeToLive = Duration(getMilliseconds("akka.actor.mailbox.beanstalk.message-time-to-live"), MILLISECONDS) - - } +object BeanstalkBasedMailboxExtension extends ExtensionId[BeanstalkMailboxSettings] with ExtensionIdProvider { + def lookup() = this + def createExtension(system: ActorSystemImpl) = new BeanstalkMailboxSettings(system.applicationConfig) } -class BeanstalkBasedMailboxExtension extends Extension[BeanstalkBasedMailboxExtension] { - import BeanstalkBasedMailboxExtension._ - @volatile - private var _settings: Settings = _ +class BeanstalkMailboxSettings(cfg: Config) extends Extension { + private def referenceConfig: Config = + ConfigFactory.parseResource(classOf[ActorSystem], "/akka-beanstalk-mailbox-reference.conf", + ConfigParseOptions.defaults.setAllowMissing(false)) + val config: ConfigRoot = ConfigFactory.emptyRoot("akka-beanstalk-mailbox").withFallback(cfg).withFallback(referenceConfig).resolve() - def key = BeanstalkBasedMailboxExtensionKey + import config._ - def init(system: ActorSystemImpl) { - _settings = new Settings(system.applicationConfig) - } - - def settings: Settings = _settings + val Hostname = getString("akka.actor.mailbox.beanstalk.hostname") + val Port = getInt("akka.actor.mailbox.beanstalk.port") + val ReconnectWindow = Duration(getMilliseconds("akka.actor.mailbox.beanstalk.reconnect-window"), MILLISECONDS) + val MessageSubmitDelay = Duration(getMilliseconds("akka.actor.mailbox.beanstalk.message-submit-delay"), MILLISECONDS) + val MessageSubmitTimeout = Duration(getMilliseconds("akka.actor.mailbox.beanstalk.message-submit-timeout"), MILLISECONDS) + val MessageTimeToLive = Duration(getMilliseconds("akka.actor.mailbox.beanstalk.message-time-to-live"), MILLISECONDS) } \ No newline at end of file diff --git a/akka-durable-mailboxes/akka-file-mailbox/src/main/scala/akka/actor/mailbox/FileBasedMailboxExtension.scala b/akka-durable-mailboxes/akka-file-mailbox/src/main/scala/akka/actor/mailbox/FileBasedMailboxExtension.scala index f6ca730a1c..1bdf9ae958 100644 --- a/akka-durable-mailboxes/akka-file-mailbox/src/main/scala/akka/actor/mailbox/FileBasedMailboxExtension.scala +++ b/akka-durable-mailboxes/akka-file-mailbox/src/main/scala/akka/actor/mailbox/FileBasedMailboxExtension.scala @@ -3,63 +3,39 @@ */ package akka.actor.mailbox -import akka.actor.ActorSystem -import akka.actor.ExtensionKey -import akka.actor.Extension -import akka.actor.ActorSystemImpl import com.typesafe.config.Config import com.typesafe.config.ConfigFactory import com.typesafe.config.ConfigParseOptions import com.typesafe.config.ConfigRoot import akka.util.Duration import java.util.concurrent.TimeUnit.MILLISECONDS +import akka.actor._ -object FileBasedMailboxExtensionKey extends ExtensionKey[FileBasedMailboxExtension] - -object FileBasedMailboxExtension { - def apply(system: ActorSystem): FileBasedMailboxExtension = { - if (!system.hasExtension(FileBasedMailboxExtensionKey)) { - system.registerExtension(new FileBasedMailboxExtension) - } - system.extension(FileBasedMailboxExtensionKey) - } - - class Settings(cfg: Config) { - private def referenceConfig: Config = - ConfigFactory.parseResource(classOf[ActorSystem], "/akka-file-mailbox-reference.conf", - ConfigParseOptions.defaults.setAllowMissing(false)) - val config: ConfigRoot = ConfigFactory.emptyRoot("akka-file-mailbox").withFallback(cfg).withFallback(referenceConfig).resolve() - - import config._ - - val QueuePath = getString("akka.actor.mailbox.file-based.directory-path") - - val MaxItems = getInt("akka.actor.mailbox.file-based.max-items") - val MaxSize = getMemorySizeInBytes("akka.actor.mailbox.file-based.max-size") - val MaxItemSize = getMemorySizeInBytes("akka.actor.mailbox.file-based.max-item-size") - val MaxAge = Duration(getMilliseconds("akka.actor.mailbox.file-based.max-age"), MILLISECONDS) - val MaxJournalSize = getMemorySizeInBytes("akka.actor.mailbox.file-based.max-journal-size") - val MaxMemorySize = getMemorySizeInBytes("akka.actor.mailbox.file-based.max-memory-size") - val MaxJournalOverflow = getInt("akka.actor.mailbox.file-based.max-journal-overflow") - val MaxJournalSizeAbsolute = getMemorySizeInBytes("akka.actor.mailbox.file-based.max-journal-size-absolute") - val DiscardOldWhenFull = getBoolean("akka.actor.mailbox.file-based.discard-old-when-full") - val KeepJournal = getBoolean("akka.actor.mailbox.file-based.keep-journal") - val SyncJournal = getBoolean("akka.actor.mailbox.file-based.sync-journal") - - } +object FileBasedMailboxExtension extends ExtensionId[FileBasedMailboxSettings] with ExtensionIdProvider { + def lookup() = this + def createExtension(system: ActorSystemImpl) = new FileBasedMailboxSettings(system.applicationConfig) } -class FileBasedMailboxExtension extends Extension[FileBasedMailboxExtension] { - import FileBasedMailboxExtension._ - @volatile - private var _settings: Settings = _ +class FileBasedMailboxSettings(cfg: Config) extends Extension { + private def referenceConfig: Config = + ConfigFactory.parseResource(classOf[ActorSystem], "/akka-file-mailbox-reference.conf", + ConfigParseOptions.defaults.setAllowMissing(false)) + val config: ConfigRoot = ConfigFactory.emptyRoot("akka-file-mailbox").withFallback(cfg).withFallback(referenceConfig).resolve() - def key = FileBasedMailboxExtensionKey + import config._ - def init(system: ActorSystemImpl) { - _settings = new Settings(system.applicationConfig) - } + val QueuePath = getString("akka.actor.mailbox.file-based.directory-path") - def settings: Settings = _settings + val MaxItems = getInt("akka.actor.mailbox.file-based.max-items") + val MaxSize = getMemorySizeInBytes("akka.actor.mailbox.file-based.max-size") + val MaxItemSize = getMemorySizeInBytes("akka.actor.mailbox.file-based.max-item-size") + val MaxAge = Duration(getMilliseconds("akka.actor.mailbox.file-based.max-age"), MILLISECONDS) + val MaxJournalSize = getMemorySizeInBytes("akka.actor.mailbox.file-based.max-journal-size") + val MaxMemorySize = getMemorySizeInBytes("akka.actor.mailbox.file-based.max-memory-size") + val MaxJournalOverflow = getInt("akka.actor.mailbox.file-based.max-journal-overflow") + val MaxJournalSizeAbsolute = getMemorySizeInBytes("akka.actor.mailbox.file-based.max-journal-size-absolute") + val DiscardOldWhenFull = getBoolean("akka.actor.mailbox.file-based.discard-old-when-full") + val KeepJournal = getBoolean("akka.actor.mailbox.file-based.keep-journal") + val SyncJournal = getBoolean("akka.actor.mailbox.file-based.sync-journal") } \ No newline at end of file diff --git a/akka-durable-mailboxes/akka-file-mailbox/src/main/scala/akka/actor/mailbox/FiledBasedMailbox.scala b/akka-durable-mailboxes/akka-file-mailbox/src/main/scala/akka/actor/mailbox/FiledBasedMailbox.scala index e167a88f27..8a81b2f8e4 100644 --- a/akka-durable-mailboxes/akka-file-mailbox/src/main/scala/akka/actor/mailbox/FiledBasedMailbox.scala +++ b/akka-durable-mailboxes/akka-file-mailbox/src/main/scala/akka/actor/mailbox/FiledBasedMailbox.scala @@ -14,7 +14,7 @@ class FileBasedMailbox(val owner: ActorCell) extends DurableMailbox(owner) with val log = Logging(system, "FileBasedMailbox") - private val settings = FileBasedMailboxExtension(owner.system).settings + private val settings = FileBasedMailboxExtension(owner.system) val queuePath = settings.QueuePath private val queue = try { diff --git a/akka-durable-mailboxes/akka-file-mailbox/src/main/scala/akka/actor/mailbox/filequeue/PersistentQueue.scala b/akka-durable-mailboxes/akka-file-mailbox/src/main/scala/akka/actor/mailbox/filequeue/PersistentQueue.scala index 1ae3cd9e06..1a5ddf4a8c 100644 --- a/akka-durable-mailboxes/akka-file-mailbox/src/main/scala/akka/actor/mailbox/filequeue/PersistentQueue.scala +++ b/akka-durable-mailboxes/akka-file-mailbox/src/main/scala/akka/actor/mailbox/filequeue/PersistentQueue.scala @@ -22,7 +22,7 @@ import scala.collection.mutable import akka.event.LoggingAdapter import akka.util.Duration import java.util.concurrent.TimeUnit -import akka.actor.mailbox.FileBasedMailboxExtension +import akka.actor.mailbox.FileBasedMailboxSettings // a config value that's backed by a global setting but may be locally overridden class OverlaySetting[T](base: ⇒ T) { @@ -34,7 +34,7 @@ class OverlaySetting[T](base: ⇒ T) { def apply() = local.getOrElse(base) } -class PersistentQueue(persistencePath: String, val name: String, val settings: FileBasedMailboxExtension.Settings, log: LoggingAdapter) { +class PersistentQueue(persistencePath: String, val name: String, val settings: FileBasedMailboxSettings, log: LoggingAdapter) { private case object ItemArrived @@ -127,7 +127,7 @@ class PersistentQueue(persistencePath: String, val name: String, val settings: F configure(settings) - def configure(settings: FileBasedMailboxExtension.Settings) = synchronized { + def configure(settings: FileBasedMailboxSettings) = synchronized { maxItems set Some(settings.MaxItems) maxSize set Some(settings.MaxSize) maxItemSize set Some(settings.MaxItemSize) diff --git a/akka-durable-mailboxes/akka-file-mailbox/src/main/scala/akka/actor/mailbox/filequeue/QueueCollection.scala b/akka-durable-mailboxes/akka-file-mailbox/src/main/scala/akka/actor/mailbox/filequeue/QueueCollection.scala index ff5e12c86e..568428dfc6 100644 --- a/akka-durable-mailboxes/akka-file-mailbox/src/main/scala/akka/actor/mailbox/filequeue/QueueCollection.scala +++ b/akka-durable-mailboxes/akka-file-mailbox/src/main/scala/akka/actor/mailbox/filequeue/QueueCollection.scala @@ -21,11 +21,11 @@ import java.io.File import java.util.concurrent.CountDownLatch import scala.collection.mutable import akka.event.LoggingAdapter -import akka.actor.mailbox.FileBasedMailboxExtension +import akka.actor.mailbox.FileBasedMailboxSettings class InaccessibleQueuePath extends Exception("Inaccessible queue path: Must be a directory and writable") -class QueueCollection(queueFolder: String, settings: FileBasedMailboxExtension.Settings, log: LoggingAdapter) { +class QueueCollection(queueFolder: String, settings: FileBasedMailboxSettings, log: LoggingAdapter) { private val path = new File(queueFolder) if (!path.isDirectory) { diff --git a/akka-durable-mailboxes/akka-file-mailbox/src/test/scala/akka/actor/mailbox/FileBasedMailboxSpec.scala b/akka-durable-mailboxes/akka-file-mailbox/src/test/scala/akka/actor/mailbox/FileBasedMailboxSpec.scala index 8b17ec9662..d1a36d14eb 100644 --- a/akka-durable-mailboxes/akka-file-mailbox/src/test/scala/akka/actor/mailbox/FileBasedMailboxSpec.scala +++ b/akka-durable-mailboxes/akka-file-mailbox/src/test/scala/akka/actor/mailbox/FileBasedMailboxSpec.scala @@ -6,7 +6,7 @@ import org.apache.commons.io.FileUtils class FileBasedMailboxSpec extends DurableMailboxSpec("File", FileDurableMailboxType) { def clean { - val queuePath = FileBasedMailboxExtension(system).settings.QueuePath + val queuePath = FileBasedMailboxExtension(system).QueuePath FileUtils.deleteDirectory(new java.io.File(queuePath)) } diff --git a/akka-durable-mailboxes/akka-mongo-mailbox/src/main/scala/akka/actor/mailbox/MongoBasedMailbox.scala b/akka-durable-mailboxes/akka-mongo-mailbox/src/main/scala/akka/actor/mailbox/MongoBasedMailbox.scala index 65fda89354..d010a1ef6a 100644 --- a/akka-durable-mailboxes/akka-mongo-mailbox/src/main/scala/akka/actor/mailbox/MongoBasedMailbox.scala +++ b/akka-durable-mailboxes/akka-mongo-mailbox/src/main/scala/akka/actor/mailbox/MongoBasedMailbox.scala @@ -31,7 +31,7 @@ class MongoBasedMailbox(val owner: ActorCell) extends DurableMailbox(owner) { implicit val mailboxBSONSer = new BSONSerializableMailbox(system) implicit val safeWrite = WriteConcern.Safe // TODO - Replica Safe when appropriate! - private val settings = MongoBasedMailboxExtension(owner.system).settings + private val settings = MongoBasedMailboxExtension(owner.system) val log = Logging(system, "MongoBasedMailbox") diff --git a/akka-durable-mailboxes/akka-mongo-mailbox/src/main/scala/akka/actor/mailbox/MongoBasedMailboxExtension.scala b/akka-durable-mailboxes/akka-mongo-mailbox/src/main/scala/akka/actor/mailbox/MongoBasedMailboxExtension.scala index e6ca3443e0..88eb95438c 100644 --- a/akka-durable-mailboxes/akka-mongo-mailbox/src/main/scala/akka/actor/mailbox/MongoBasedMailboxExtension.scala +++ b/akka-durable-mailboxes/akka-mongo-mailbox/src/main/scala/akka/actor/mailbox/MongoBasedMailboxExtension.scala @@ -3,54 +3,30 @@ */ package akka.actor.mailbox -import akka.actor.ActorSystem -import akka.actor.ExtensionKey -import akka.actor.Extension -import akka.actor.ActorSystemImpl import com.typesafe.config.Config import com.typesafe.config.ConfigFactory import com.typesafe.config.ConfigParseOptions import com.typesafe.config.ConfigRoot import akka.util.Duration import java.util.concurrent.TimeUnit.MILLISECONDS +import akka.actor._ -object MongoBasedMailboxExtensionKey extends ExtensionKey[MongoBasedMailboxExtension] - -object MongoBasedMailboxExtension { - def apply(system: ActorSystem): MongoBasedMailboxExtension = { - if (!system.hasExtension(MongoBasedMailboxExtensionKey)) { - system.registerExtension(new MongoBasedMailboxExtension) - } - system.extension(MongoBasedMailboxExtensionKey) - } - - class Settings(cfg: Config) { - private def referenceConfig: Config = - ConfigFactory.parseResource(classOf[ActorSystem], "/akka-mongo-mailbox-reference.conf", - ConfigParseOptions.defaults.setAllowMissing(false)) - val config: ConfigRoot = ConfigFactory.emptyRoot("akka-mongo-mailbox").withFallback(cfg).withFallback(referenceConfig).resolve() - - import config._ - - val UriConfigKey = "akka.actor.mailbox.mongodb.uri" - val MongoURI = if (config.hasPath(UriConfigKey)) Some(config.getString(UriConfigKey)) else None - val WriteTimeout = Duration(config.getMilliseconds("akka.actor.mailbox.mongodb.timeout.write"), MILLISECONDS) - val ReadTimeout = Duration(config.getMilliseconds("akka.actor.mailbox.mongodb.timeout.read"), MILLISECONDS) - - } +object MongoBasedMailboxExtension extends ExtensionId[MongoBasedMailboxSettings] with ExtensionIdProvider { + def lookup() = this + def createExtension(system: ActorSystemImpl) = new MongoBasedMailboxSettings(system.applicationConfig) } -class MongoBasedMailboxExtension extends Extension[MongoBasedMailboxExtension] { - import MongoBasedMailboxExtension._ - @volatile - private var _settings: Settings = _ +class MongoBasedMailboxSettings(cfg: Config) extends Extension { + private def referenceConfig: Config = + ConfigFactory.parseResource(classOf[ActorSystem], "/akka-mongo-mailbox-reference.conf", + ConfigParseOptions.defaults.setAllowMissing(false)) + val config: ConfigRoot = ConfigFactory.emptyRoot("akka-mongo-mailbox").withFallback(cfg).withFallback(referenceConfig).resolve() - def key = MongoBasedMailboxExtensionKey + import config._ - def init(system: ActorSystemImpl) { - _settings = new Settings(system.applicationConfig) - } - - def settings: Settings = _settings + val UriConfigKey = "akka.actor.mailbox.mongodb.uri" + val MongoURI = if (config.hasPath(UriConfigKey)) Some(config.getString(UriConfigKey)) else None + val WriteTimeout = Duration(config.getMilliseconds("akka.actor.mailbox.mongodb.timeout.write"), MILLISECONDS) + val ReadTimeout = Duration(config.getMilliseconds("akka.actor.mailbox.mongodb.timeout.read"), MILLISECONDS) } \ No newline at end of file diff --git a/akka-durable-mailboxes/akka-redis-mailbox/src/main/scala/akka/actor/mailbox/RedisBasedMailbox.scala b/akka-durable-mailboxes/akka-redis-mailbox/src/main/scala/akka/actor/mailbox/RedisBasedMailbox.scala index d005d5675f..7bb1c5a5dc 100644 --- a/akka-durable-mailboxes/akka-redis-mailbox/src/main/scala/akka/actor/mailbox/RedisBasedMailbox.scala +++ b/akka-durable-mailboxes/akka-redis-mailbox/src/main/scala/akka/actor/mailbox/RedisBasedMailbox.scala @@ -18,7 +18,7 @@ class RedisBasedMailboxException(message: String) extends AkkaException(message) */ class RedisBasedMailbox(val owner: ActorCell) extends DurableMailbox(owner) with DurableMessageSerialization { - private val settings = RedisBasedMailboxExtension(owner.system).settings + private val settings = RedisBasedMailboxExtension(owner.system) @volatile private var clients = connect() // returns a RedisClientPool for multiple asynchronous message handling diff --git a/akka-durable-mailboxes/akka-redis-mailbox/src/main/scala/akka/actor/mailbox/RedisBasedMailboxExtension.scala b/akka-durable-mailboxes/akka-redis-mailbox/src/main/scala/akka/actor/mailbox/RedisBasedMailboxExtension.scala index 4b3d424e0f..beccf4051f 100644 --- a/akka-durable-mailboxes/akka-redis-mailbox/src/main/scala/akka/actor/mailbox/RedisBasedMailboxExtension.scala +++ b/akka-durable-mailboxes/akka-redis-mailbox/src/main/scala/akka/actor/mailbox/RedisBasedMailboxExtension.scala @@ -3,50 +3,25 @@ */ package akka.actor.mailbox -import akka.actor.ActorSystem -import akka.actor.ExtensionKey -import akka.actor.Extension -import akka.actor.ActorSystemImpl import com.typesafe.config.Config import com.typesafe.config.ConfigFactory import com.typesafe.config.ConfigParseOptions import com.typesafe.config.ConfigRoot +import akka.actor._ -object RedisBasedMailboxExtensionKey extends ExtensionKey[RedisBasedMailboxExtension] - -object RedisBasedMailboxExtension { - def apply(system: ActorSystem): RedisBasedMailboxExtension = { - if (!system.hasExtension(RedisBasedMailboxExtensionKey)) { - system.registerExtension(new RedisBasedMailboxExtension) - } - system.extension(RedisBasedMailboxExtensionKey) - } - - class Settings(cfg: Config) { - private def referenceConfig: Config = - ConfigFactory.parseResource(classOf[ActorSystem], "/akka-redis-mailbox-reference.conf", - ConfigParseOptions.defaults.setAllowMissing(false)) - val config: ConfigRoot = ConfigFactory.emptyRoot("akka-redis-mailbox").withFallback(cfg).withFallback(referenceConfig).resolve() - - import config._ - - val Hostname = getString("akka.actor.mailbox.redis.hostname") - val Port = getInt("akka.actor.mailbox.redis.port") - - } +object RedisBasedMailboxExtension extends ExtensionId[RedisBasedMailboxSettings] with ExtensionIdProvider { + def lookup() = this + def createExtension(system: ActorSystemImpl) = new RedisBasedMailboxSettings(system.applicationConfig) } -class RedisBasedMailboxExtension extends Extension[RedisBasedMailboxExtension] { - import RedisBasedMailboxExtension._ - @volatile - private var _settings: Settings = _ +class RedisBasedMailboxSettings(cfg: Config) extends Extension { + private def referenceConfig: Config = + ConfigFactory.parseResource(classOf[ActorSystem], "/akka-redis-mailbox-reference.conf", + ConfigParseOptions.defaults.setAllowMissing(false)) + val config: ConfigRoot = ConfigFactory.emptyRoot("akka-redis-mailbox").withFallback(cfg).withFallback(referenceConfig).resolve() - def key = RedisBasedMailboxExtensionKey - - def init(system: ActorSystemImpl) { - _settings = new Settings(system.applicationConfig) - } - - def settings: Settings = _settings + import config._ + val Hostname = getString("akka.actor.mailbox.redis.hostname") + val Port = getInt("akka.actor.mailbox.redis.port") } \ No newline at end of file diff --git a/akka-durable-mailboxes/akka-zookeeper-mailbox/src/main/scala/akka/actor/mailbox/ZooKeeperBasedMailbox.scala b/akka-durable-mailboxes/akka-zookeeper-mailbox/src/main/scala/akka/actor/mailbox/ZooKeeperBasedMailbox.scala index 3979dfdf36..8350f743d5 100644 --- a/akka-durable-mailboxes/akka-zookeeper-mailbox/src/main/scala/akka/actor/mailbox/ZooKeeperBasedMailbox.scala +++ b/akka-durable-mailboxes/akka-zookeeper-mailbox/src/main/scala/akka/actor/mailbox/ZooKeeperBasedMailbox.scala @@ -22,7 +22,7 @@ class ZooKeeperBasedMailboxException(message: String) extends AkkaException(mess */ class ZooKeeperBasedMailbox(val owner: ActorCell) extends DurableMailbox(owner) with DurableMessageSerialization { - private val settings = ZooKeeperBasedMailboxExtension(owner.system).settings + private val settings = ZooKeeperBasedMailboxExtension(owner.system) val queueNode = "/queues" val queuePathTemplate = queueNode + "/%s" diff --git a/akka-durable-mailboxes/akka-zookeeper-mailbox/src/main/scala/akka/actor/mailbox/ZooKeeperBasedMailboxExtension.scala b/akka-durable-mailboxes/akka-zookeeper-mailbox/src/main/scala/akka/actor/mailbox/ZooKeeperBasedMailboxExtension.scala index a08df43bf5..e2b0ad45f7 100644 --- a/akka-durable-mailboxes/akka-zookeeper-mailbox/src/main/scala/akka/actor/mailbox/ZooKeeperBasedMailboxExtension.scala +++ b/akka-durable-mailboxes/akka-zookeeper-mailbox/src/main/scala/akka/actor/mailbox/ZooKeeperBasedMailboxExtension.scala @@ -3,54 +3,29 @@ */ package akka.actor.mailbox -import akka.actor.ActorSystem -import akka.actor.ExtensionKey -import akka.actor.Extension -import akka.actor.ActorSystemImpl import com.typesafe.config.Config import com.typesafe.config.ConfigFactory import com.typesafe.config.ConfigParseOptions import com.typesafe.config.ConfigRoot import akka.util.Duration import java.util.concurrent.TimeUnit.MILLISECONDS +import akka.actor._ -object ZooKeeperBasedMailboxExtensionKey extends ExtensionKey[ZooKeeperBasedMailboxExtension] - -object ZooKeeperBasedMailboxExtension { - def apply(system: ActorSystem): ZooKeeperBasedMailboxExtension = { - if (!system.hasExtension(ZooKeeperBasedMailboxExtensionKey)) { - system.registerExtension(new ZooKeeperBasedMailboxExtension) - } - system.extension(ZooKeeperBasedMailboxExtensionKey) - } - - class Settings(cfg: Config) { - private def referenceConfig: Config = - ConfigFactory.parseResource(classOf[ActorSystem], "/akka-zookeeper-mailbox-reference.conf", - ConfigParseOptions.defaults.setAllowMissing(false)) - val config: ConfigRoot = ConfigFactory.emptyRoot("akka-zookeeper-mailbox").withFallback(cfg).withFallback(referenceConfig).resolve() - - import config._ - - val ZkServerAddresses = getString("akka.actor.mailbox.zookeeper.server-addresses") - val SessionTimeout = Duration(getMilliseconds("akka.actor.mailbox.zookeeper.session-timeout"), MILLISECONDS) - val ConnectionTimeout = Duration(getMilliseconds("akka.actor.mailbox.zookeeper.connection-timeout"), MILLISECONDS) - val BlockingQueue = getBoolean("akka.actor.mailbox.zookeeper.blocking-queue") - - } +object ZooKeeperBasedMailboxExtension extends ExtensionId[ZooKeeperBasedMailboxSettings] with ExtensionIdProvider { + def lookup() = this + def createExtension(system: ActorSystemImpl) = new ZooKeeperBasedMailboxSettings(system.applicationConfig) } +class ZooKeeperBasedMailboxSettings(cfg: Config) extends Extension { + private def referenceConfig: Config = + ConfigFactory.parseResource(classOf[ActorSystem], "/akka-zookeeper-mailbox-reference.conf", + ConfigParseOptions.defaults.setAllowMissing(false)) + val config: ConfigRoot = ConfigFactory.emptyRoot("akka-zookeeper-mailbox").withFallback(cfg).withFallback(referenceConfig).resolve() -class ZooKeeperBasedMailboxExtension extends Extension[ZooKeeperBasedMailboxExtension] { - import ZooKeeperBasedMailboxExtension._ - @volatile - private var _settings: Settings = _ + import config._ - def key = ZooKeeperBasedMailboxExtensionKey - - def init(system: ActorSystemImpl) { - _settings = new Settings(system.applicationConfig) - } - - def settings: Settings = _settings + val ZkServerAddresses = getString("akka.actor.mailbox.zookeeper.server-addresses") + val SessionTimeout = Duration(getMilliseconds("akka.actor.mailbox.zookeeper.session-timeout"), MILLISECONDS) + val ConnectionTimeout = Duration(getMilliseconds("akka.actor.mailbox.zookeeper.connection-timeout"), MILLISECONDS) + val BlockingQueue = getBoolean("akka.actor.mailbox.zookeeper.blocking-queue") } \ No newline at end of file diff --git a/akka-remote/src/main/resources/akka-remote-reference.conf b/akka-remote/src/main/resources/akka-remote-reference.conf index 4d31549b73..39fcda8cd7 100644 --- a/akka-remote/src/main/resources/akka-remote-reference.conf +++ b/akka-remote/src/main/resources/akka-remote-reference.conf @@ -10,8 +10,8 @@ akka { remote { # FIXME rename to transport layer = "akka.cluster.netty.NettyRemoteSupport" - - use-compression = off + + use-compression = off secure-cookie = "" # Generate your own with '$AKKA_HOME/scripts/generate_config_with_secure_cookie.sh' # or using 'akka.util.Crypt.generateSecureCookie' @@ -50,28 +50,10 @@ akka { reconnection-time-window = 600s # Maximum time window that a client should try to reconnect for } } - - // TODO cluster config will go into akka-cluster-reference.conf when we enable that module + cluster { - name = "test-cluster" + name = "default-cluster" nodename = "" - zookeeper-server-addresses = "localhost:2181" # comma-separated list of ':' elements - max-time-to-wait-until-connected = 30s - session-timeout = 60s - connection-timeout = 60s - include-ref-node-in-replica-set = on # Can a replica be instantiated on the same node as the cluster reference to the actor - # Default: on - log-directory = "_akka_cluster" # Where ZooKeeper should store the logs and data files - - replication { - digest-type = "MAC" # Options: CRC32 (cheap & unsafe), MAC (expensive & secure using password) - password = "secret" # FIXME: store open in file? - ensemble-size = 3 - quorum-size = 2 - snapshot-frequency = 1000 # The number of messages that should be logged between every actor snapshot - timeout = 30s # Timeout for asyncronous (write-behind) operations - } + seed-nodes = [] } - - } diff --git a/akka-remote/src/main/scala/akka/remote/AccrualFailureDetector.scala b/akka-remote/src/main/scala/akka/remote/AccrualFailureDetector.scala index 01244a5fad..87dda83b71 100644 --- a/akka-remote/src/main/scala/akka/remote/AccrualFailureDetector.scala +++ b/akka-remote/src/main/scala/akka/remote/AccrualFailureDetector.scala @@ -27,8 +27,8 @@ class AccrualFailureDetector(val threshold: Int = 8, val maxSampleSize: Int = 10 def this(system: ActorSystem) { this( - RemoteExtension(system).settings.FailureDetectorThreshold, - RemoteExtension(system).settings.FailureDetectorMaxSampleSize) + RemoteExtension(system).FailureDetectorThreshold, + RemoteExtension(system).FailureDetectorMaxSampleSize) } private final val PhiFactor = 1.0 / math.log(10.0) diff --git a/akka-remote/src/main/scala/akka/remote/Gossiper.scala b/akka-remote/src/main/scala/akka/remote/Gossiper.scala index 755fc08df0..250fbc727c 100644 --- a/akka-remote/src/main/scala/akka/remote/Gossiper.scala +++ b/akka-remote/src/main/scala/akka/remote/Gossiper.scala @@ -11,13 +11,17 @@ import akka.util.duration._ import akka.util.Duration import akka.remote.RemoteProtocol._ import akka.remote.RemoteProtocol.RemoteSystemDaemonMessageType._ +import akka.config.ConfigurationException +import akka.serialization.SerializationExtension + import java.util.concurrent.atomic.AtomicReference import java.security.SecureRandom import System.{ currentTimeMillis ⇒ newTimestamp } + import scala.collection.immutable.Map import scala.annotation.tailrec + import com.google.protobuf.ByteString -import akka.serialization.SerializationExtension /** * Interface for node membership change listener. @@ -36,9 +40,8 @@ case class Gossip( availableNodes: Set[RemoteAddress] = Set.empty[RemoteAddress], unavailableNodes: Set[RemoteAddress] = Set.empty[RemoteAddress]) +// ====== START - NEW GOSSIP IMPLEMENTATION ====== /* - // ====== NEW GOSSIP IMPLEMENTATION ====== - case class Gossip( version: VectorClock, node: RemoteAddress, @@ -74,6 +77,7 @@ case class Gossip( changes: Vector[VNodeMod], status: PendingPartitioningStatus) */ +// ====== END - NEW GOSSIP IMPLEMENTATION ====== /** * This module is responsible for Gossiping cluster information. The abstraction maintains the list of live @@ -102,11 +106,17 @@ class Gossiper(remote: Remote) { private val system = remote.system private val remoteExtension = RemoteExtension(system) - private val serializationExtension = SerializationExtension(system) + private val serialization = SerializationExtension(system) private val log = Logging(system, "Gossiper") private val failureDetector = remote.failureDetector private val connectionManager = new RemoteConnectionManager(system, remote, Map.empty[RemoteAddress, ActorRef]) - private val seeds = Set(address) // FIXME read in list of seeds from config + + private val seeds = { + val seeds = remoteExtension.SeedNodes + if (seeds.isEmpty) throw new ConfigurationException( + "At least one seed node must be defined in the configuration [akka.cluster.seed-nodes]") + else seeds + } private val address = remote.remoteAddress private val nodeFingerprint = address.## @@ -238,7 +248,7 @@ class Gossiper(remote: Remote) { throw new IllegalStateException("Connection for [" + peer + "] is not set up")) try { - (connection ? (toRemoteMessage(newGossip), remoteExtension.settings.RemoteSystemDaemonAckTimeout)).as[Status] match { + (connection ? (toRemoteMessage(newGossip), remoteExtension.RemoteSystemDaemonAckTimeout)).as[Status] match { case Some(Success(receiver)) ⇒ log.debug("Gossip sent to [{}] was successfully received", receiver) @@ -300,7 +310,7 @@ class Gossiper(remote: Remote) { } private def toRemoteMessage(gossip: Gossip): RemoteProtocol.RemoteSystemDaemonMessageProtocol = { - val gossipAsBytes = serializationExtension.serialization.serialize(gossip) match { + val gossipAsBytes = serialization.serialize(gossip) match { case Left(error) ⇒ throw error case Right(bytes) ⇒ bytes } diff --git a/akka-remote/src/main/scala/akka/remote/MessageSerializer.scala b/akka-remote/src/main/scala/akka/remote/MessageSerializer.scala index 628264b207..03e10e770b 100644 --- a/akka-remote/src/main/scala/akka/remote/MessageSerializer.scala +++ b/akka-remote/src/main/scala/akka/remote/MessageSerializer.scala @@ -14,13 +14,13 @@ object MessageSerializer { def deserialize(system: ActorSystem, messageProtocol: MessageProtocol, classLoader: Option[ClassLoader] = None): AnyRef = { val clazz = loadManifest(classLoader, messageProtocol) - SerializationExtension(system).serialization.deserialize(messageProtocol.getMessage.toByteArray, + SerializationExtension(system).deserialize(messageProtocol.getMessage.toByteArray, clazz, classLoader).fold(x ⇒ throw x, identity) } def serialize(system: ActorSystem, message: AnyRef): MessageProtocol = { val builder = MessageProtocol.newBuilder - val bytes = SerializationExtension(system).serialization.serialize(message).fold(x ⇒ throw x, identity) + val bytes = SerializationExtension(system).serialize(message).fold(x ⇒ throw x, identity) builder.setMessage(ByteString.copyFrom(bytes)) builder.setMessageManifest(ByteString.copyFromUtf8(message.getClass.getName)) builder.build diff --git a/akka-remote/src/main/scala/akka/remote/Remote.scala b/akka-remote/src/main/scala/akka/remote/Remote.scala index 1a3bc27b0b..4eccce6b33 100644 --- a/akka-remote/src/main/scala/akka/remote/Remote.scala +++ b/akka-remote/src/main/scala/akka/remote/Remote.scala @@ -36,9 +36,9 @@ class Remote(val system: ActorSystemImpl, val nodename: String) { import settings._ private[remote] val remoteExtension = RemoteExtension(system) - private[remote] val serializationExtension = SerializationExtension(system) + private[remote] val serialization = SerializationExtension(system) private[remote] val remoteAddress = { - RemoteAddress(system.name, remoteExtension.settings.serverSettings.Hostname, remoteExtension.settings.serverSettings.Port) + RemoteAddress(system.name, remoteExtension.serverSettings.Hostname, remoteExtension.serverSettings.Port) } val failureDetector = new AccrualFailureDetector(system) @@ -133,10 +133,10 @@ class RemoteSystemDaemon(remote: Remote) extends Actor { if (message.hasActorPath) { val actorFactoryBytes = - if (remoteExtension.settings.ShouldCompressData) LZF.uncompress(message.getPayload.toByteArray) else message.getPayload.toByteArray + if (remoteExtension.ShouldCompressData) LZF.uncompress(message.getPayload.toByteArray) else message.getPayload.toByteArray val actorFactory = - serializationExtension.serialization.deserialize(actorFactoryBytes, classOf[() ⇒ Actor], None) match { + serialization.deserialize(actorFactoryBytes, classOf[() ⇒ Actor], None) match { case Left(error) ⇒ throw error case Right(instance) ⇒ instance.asInstanceOf[() ⇒ Actor] } @@ -237,7 +237,7 @@ class RemoteSystemDaemon(remote: Remote) extends Actor { } private def payloadFor[T](message: RemoteSystemDaemonMessageProtocol, clazz: Class[T]): T = { - serializationExtension.serialization.deserialize(message.getPayload.toByteArray, clazz, None) match { + serialization.deserialize(message.getPayload.toByteArray, clazz, None) match { case Left(error) ⇒ throw error case Right(instance) ⇒ instance.asInstanceOf[T] } diff --git a/akka-remote/src/main/scala/akka/remote/RemoteActorRefProvider.scala b/akka-remote/src/main/scala/akka/remote/RemoteActorRefProvider.scala index 4d5e6f7adc..6240ed1b97 100644 --- a/akka-remote/src/main/scala/akka/remote/RemoteActorRefProvider.scala +++ b/akka-remote/src/main/scala/akka/remote/RemoteActorRefProvider.scala @@ -41,13 +41,13 @@ class RemoteActorRefProvider( def deathWatch = local.deathWatch def guardian = local.guardian def systemGuardian = local.systemGuardian - def nodename = remoteExtension.settings.NodeName - def clustername = remoteExtension.settings.ClusterName + def nodename = remoteExtension.NodeName + def clustername = remoteExtension.ClusterName private val actors = new ConcurrentHashMap[String, AnyRef] /* - * The problem is that ActorRefs need a reference to the ActorSystem to + * The problem is that ActorRefs need a reference to the ActorSystem to * provide their service. Hence they cannot be created while the * constructors of ActorSystem and ActorRefProvider are still running. * The solution is to split out that last part into an init() method, @@ -56,9 +56,9 @@ class RemoteActorRefProvider( @volatile private var system: ActorSystemImpl = _ private lazy val remoteExtension = RemoteExtension(system) - private lazy val serializationExtension = SerializationExtension(system) + private lazy val serialization = SerializationExtension(system) lazy val rootPath: ActorPath = { - val remoteAddress = RemoteAddress(system.name, remoteExtension.settings.serverSettings.Hostname, remoteExtension.settings.serverSettings.Port) + val remoteAddress = RemoteAddress(system.name, remoteExtension.serverSettings.Hostname, remoteExtension.serverSettings.Port) new RootActorPath(remoteAddress) } private lazy val local = new LocalActorRefProvider(systemName, settings, eventStream, scheduler, _deadLetters) @@ -91,14 +91,6 @@ class RemoteActorRefProvider( deployer.lookupDeploymentFor(path.toString) match { case Some(DeploymentConfig.Deploy(_, _, routerType, nrOfInstances, DeploymentConfig.RemoteScope(remoteAddresses))) ⇒ - // FIXME move to AccrualFailureDetector as soon as we have the Gossiper up and running and remove the option to select impl in the akka.conf file since we only have one - // val failureDetector = DeploymentConfig.failureDetectorTypeFor(failureDetectorType) match { - // case FailureDetectorType.NoOp ⇒ new NoOpFailureDetector - // case FailureDetectorType.RemoveConnectionOnFirstFailure ⇒ new RemoveConnectionOnFirstFailureFailureDetector - // case FailureDetectorType.BannagePeriod(timeToBan) ⇒ new BannagePeriodFailureDetector(timeToBan) - // case FailureDetectorType.Custom(implClass) ⇒ FailureDetector.createCustomFailureDetector(implClass) - // } - def isReplicaNode: Boolean = remoteAddresses exists { _ == remote.remoteAddress } //system.eventHandler.debug(this, "%s: Deploy Remote Actor with address [%s] connected to [%s]: isReplica(%s)".format(system.defaultAddress, address, remoteAddresses.mkString, isReplicaNode)) @@ -108,6 +100,9 @@ class RemoteActorRefProvider( local.actorOf(system, props, supervisor, name, true) //FIXME systemService = true here to bypass Deploy, should be fixed when create-or-get is replaced by get-or-create } else { + implicit val dispatcher = if (props.dispatcher == Props.defaultDispatcher) system.dispatcher else props.dispatcher + implicit val timeout = system.settings.ActorTimeout + // we are on the single "reference" node uses the remote actors on the replica nodes val routerFactory: () ⇒ Router = DeploymentConfig.routerTypeFor(routerType) match { case RouterType.Direct ⇒ @@ -116,6 +111,12 @@ class RemoteActorRefProvider( .format(name, remoteAddresses.mkString(", "))) () ⇒ new DirectRouter + case RouterType.Broadcast ⇒ + if (remoteAddresses.size != 1) throw new ConfigurationException( + "Actor [%s] configured with Broadcast router must have exactly 1 remote node configured. Found [%s]" + .format(name, remoteAddresses.mkString(", "))) + () ⇒ new BroadcastRouter + case RouterType.Random ⇒ if (remoteAddresses.size < 1) throw new ConfigurationException( "Actor [%s] configured with Random router must have at least 1 remote node configured. Found [%s]" @@ -214,9 +215,9 @@ class RemoteActorRefProvider( log.debug("[{}] Instantiating Actor [{}] on node [{}]", rootPath, actorPath, remoteAddress) val actorFactoryBytes = - serializationExtension.serialization.serialize(actorFactory) match { + serialization.serialize(actorFactory) match { case Left(error) ⇒ throw error - case Right(bytes) ⇒ if (remoteExtension.settings.ShouldCompressData) LZF.compress(bytes) else bytes + case Right(bytes) ⇒ if (remoteExtension.ShouldCompressData) LZF.compress(bytes) else bytes } val command = RemoteSystemDaemonMessageProtocol.newBuilder @@ -236,7 +237,7 @@ class RemoteActorRefProvider( private def sendCommandToRemoteNode(connection: ActorRef, command: RemoteSystemDaemonMessageProtocol, withACK: Boolean) { if (withACK) { try { - val f = connection ? (command, remoteExtension.settings.RemoteSystemDaemonAckTimeout) + val f = connection ? (command, remoteExtension.RemoteSystemDaemonAckTimeout) (try f.await.value catch { case _: FutureTimeoutException ⇒ None }) match { case Some(Right(receiver)) ⇒ log.debug("Remote system command sent to [{}] successfully received", receiver) diff --git a/akka-remote/src/main/scala/akka/remote/RemoteExtension.scala b/akka-remote/src/main/scala/akka/remote/RemoteExtension.scala index e897bcf713..9c44f18462 100644 --- a/akka-remote/src/main/scala/akka/remote/RemoteExtension.scala +++ b/akka-remote/src/main/scala/akka/remote/RemoteExtension.scala @@ -3,10 +3,6 @@ */ package akka.remote -import akka.actor.ActorSystem -import akka.actor.ExtensionKey -import akka.actor.Extension -import akka.actor.ActorSystemImpl import com.typesafe.config.Config import com.typesafe.config.ConfigFactory import com.typesafe.config.ConfigParseOptions @@ -16,95 +12,78 @@ import java.util.concurrent.TimeUnit.MILLISECONDS import java.net.InetAddress import akka.config.ConfigurationException import com.eaio.uuid.UUID +import akka.actor._ -object RemoteExtensionKey extends ExtensionKey[RemoteExtension] +import scala.collection.JavaConverters._ -object RemoteExtension { - def apply(system: ActorSystem): RemoteExtension = { - if (!system.hasExtension(RemoteExtensionKey)) { - system.registerExtension(new RemoteExtension) - } - system.extension(RemoteExtensionKey) - } - - class Settings(cfg: Config) { - private def referenceConfig: Config = - ConfigFactory.parseResource(classOf[ActorSystem], "/akka-remote-reference.conf", - ConfigParseOptions.defaults.setAllowMissing(false)) - val config: ConfigRoot = ConfigFactory.emptyRoot("akka-remote").withFallback(cfg).withFallback(referenceConfig).resolve() - - import config._ - - val RemoteTransport = getString("akka.remote.layer") - val FailureDetectorThreshold = getInt("akka.remote.failure-detector.threshold") - val FailureDetectorMaxSampleSize = getInt("akka.remote.failure-detector.max-sample-size") - val ShouldCompressData = config.getBoolean("akka.remote.use-compression") - val RemoteSystemDaemonAckTimeout = Duration(config.getMilliseconds("akka.remote.remote-daemon-ack-timeout"), MILLISECONDS) - - // TODO cluster config will go into akka-cluster-reference.conf when we enable that module - val ClusterName = getString("akka.cluster.name") - - val NodeName: String = config.getString("akka.cluster.nodename") match { - case "" ⇒ new UUID().toString - case value ⇒ value - } - - val serverSettings = new RemoteServerSettings - val clientSettings = new RemoteClientSettings - - class RemoteClientSettings { - val SecureCookie: Option[String] = config.getString("akka.remote.secure-cookie") match { - case "" ⇒ None - case cookie ⇒ Some(cookie) - } - - val ReconnectionTimeWindow = Duration(config.getMilliseconds("akka.remote.client.reconnection-time-window"), MILLISECONDS) - val ReadTimeout = Duration(config.getMilliseconds("akka.remote.client.read-timeout"), MILLISECONDS) - val ReconnectDelay = Duration(config.getMilliseconds("akka.remote.client.reconnect-delay"), MILLISECONDS) - val MessageFrameSize = config.getInt("akka.remote.client.message-frame-size") - } - - class RemoteServerSettings { - import scala.collection.JavaConverters._ - val MessageFrameSize = config.getInt("akka.remote.server.message-frame-size") - val SecureCookie: Option[String] = config.getString("akka.remote.secure-cookie") match { - case "" ⇒ None - case cookie ⇒ Some(cookie) - } - val RequireCookie = { - val requireCookie = config.getBoolean("akka.remote.server.require-cookie") - if (requireCookie && SecureCookie.isEmpty) throw new ConfigurationException( - "Configuration option 'akka.remote.server.require-cookie' is turned on but no secure cookie is defined in 'akka.remote.secure-cookie'.") - requireCookie - } - - val UsePassiveConnections = config.getBoolean("akka.remote.use-passive-connections") - - val UntrustedMode = config.getBoolean("akka.remote.server.untrusted-mode") - val Hostname = config.getString("akka.remote.server.hostname") match { - case "" ⇒ InetAddress.getLocalHost.getHostAddress - case value ⇒ value - } - val Port = config.getInt("akka.remote.server.port") - val ConnectionTimeout = Duration(config.getMilliseconds("akka.remote.server.connection-timeout"), MILLISECONDS) - - val Backlog = config.getInt("akka.remote.server.backlog") - } - - } +object RemoteExtension extends ExtensionId[RemoteExtensionSettings] with ExtensionIdProvider { + def lookup() = this + def createExtension(system: ActorSystemImpl) = new RemoteExtensionSettings(system.applicationConfig, system.name) } -class RemoteExtension extends Extension[RemoteExtension] { - import RemoteExtension._ - @volatile - private var _settings: Settings = _ +class RemoteExtensionSettings(cfg: Config, val systemName: String) extends Extension { + private def referenceConfig: Config = + ConfigFactory.parseResource(classOf[ActorSystem], "/akka-remote-reference.conf", + ConfigParseOptions.defaults.setAllowMissing(false)) + val config: ConfigRoot = ConfigFactory.emptyRoot("akka-remote").withFallback(cfg).withFallback(referenceConfig).resolve() - def key = RemoteExtensionKey + import config._ - def init(system: ActorSystemImpl) { - _settings = new Settings(system.applicationConfig) + val RemoteTransport = getString("akka.remote.layer") + val FailureDetectorThreshold = getInt("akka.remote.failure-detector.threshold") + val FailureDetectorMaxSampleSize = getInt("akka.remote.failure-detector.max-sample-size") + val ShouldCompressData = config.getBoolean("akka.remote.use-compression") + val RemoteSystemDaemonAckTimeout = Duration(config.getMilliseconds("akka.remote.remote-daemon-ack-timeout"), MILLISECONDS) + + // TODO cluster config will go into akka-cluster-reference.conf when we enable that module + val ClusterName = getString("akka.cluster.name") + val SeedNodes = Set.empty[RemoteAddress] ++ getStringList("akka.cluster.seed-nodes").asScala.toSeq.map(RemoteAddress(_, systemName)) + + // FIXME remove nodename from config - should only be passed as command line arg or read from properties file etc. + val NodeName: String = config.getString("akka.cluster.nodename") match { + case "" ⇒ new UUID().toString + case value ⇒ value } - def settings: Settings = _settings + val serverSettings = new RemoteServerSettings + val clientSettings = new RemoteClientSettings + class RemoteClientSettings { + val SecureCookie: Option[String] = config.getString("akka.remote.secure-cookie") match { + case "" ⇒ None + case cookie ⇒ Some(cookie) + } + + val ReconnectionTimeWindow = Duration(config.getMilliseconds("akka.remote.client.reconnection-time-window"), MILLISECONDS) + val ReadTimeout = Duration(config.getMilliseconds("akka.remote.client.read-timeout"), MILLISECONDS) + val ReconnectDelay = Duration(config.getMilliseconds("akka.remote.client.reconnect-delay"), MILLISECONDS) + val MessageFrameSize = config.getInt("akka.remote.client.message-frame-size") + } + + class RemoteServerSettings { + import scala.collection.JavaConverters._ + val MessageFrameSize = config.getInt("akka.remote.server.message-frame-size") + val SecureCookie: Option[String] = config.getString("akka.remote.secure-cookie") match { + case "" ⇒ None + case cookie ⇒ Some(cookie) + } + val RequireCookie = { + val requireCookie = config.getBoolean("akka.remote.server.require-cookie") + if (requireCookie && SecureCookie.isEmpty) throw new ConfigurationException( + "Configuration option 'akka.remote.server.require-cookie' is turned on but no secure cookie is defined in 'akka.remote.secure-cookie'.") + requireCookie + } + + val UsePassiveConnections = config.getBoolean("akka.remote.use-passive-connections") + + val UntrustedMode = config.getBoolean("akka.remote.server.untrusted-mode") + val Hostname = config.getString("akka.remote.server.hostname") match { + case "" ⇒ InetAddress.getLocalHost.getHostAddress + case value ⇒ value + } + val Port = config.getInt("akka.remote.server.port") + val ConnectionTimeout = Duration(config.getMilliseconds("akka.remote.server.connection-timeout"), MILLISECONDS) + + val Backlog = config.getInt("akka.remote.server.backlog") + } } \ No newline at end of file diff --git a/akka-remote/src/main/scala/akka/remote/netty/NettyRemoteSupport.scala b/akka-remote/src/main/scala/akka/remote/netty/NettyRemoteSupport.scala index 43b9cd45b6..04abcdc038 100644 --- a/akka-remote/src/main/scala/akka/remote/netty/NettyRemoteSupport.scala +++ b/akka-remote/src/main/scala/akka/remote/netty/NettyRemoteSupport.scala @@ -282,7 +282,7 @@ class ActiveRemoteClientHandler( val client: ActiveRemoteClient) extends SimpleChannelUpstreamHandler { - def runOnceNow(thunk: ⇒ Unit) = timer.newTimeout(new TimerTask() { + def runOnceNow(thunk: ⇒ Unit): Unit = timer.newTimeout(new TimerTask() { def run(timeout: Timeout) = try { thunk } finally { timeout.cancel() } }, 0, TimeUnit.MILLISECONDS) @@ -358,8 +358,8 @@ class ActiveRemoteClientHandler( class NettyRemoteSupport(_system: ActorSystem, val remote: Remote) extends RemoteSupport(_system) with RemoteMarshallingOps { val log = Logging(system, "NettyRemoteSupport") - val serverSettings = RemoteExtension(system).settings.serverSettings - val clientSettings = RemoteExtension(system).settings.clientSettings + val serverSettings = RemoteExtension(system).serverSettings + val clientSettings = RemoteExtension(system).clientSettings private val remoteClients = new HashMap[RemoteAddress, RemoteClient] private val clientsLock = new ReentrantReadWriteLock diff --git a/akka-remote/src/test/scala/akka/remote/RemoteConfigSpec.scala b/akka-remote/src/test/scala/akka/remote/RemoteConfigSpec.scala index d4e4b6b3bc..f72904fc3f 100644 --- a/akka-remote/src/test/scala/akka/remote/RemoteConfigSpec.scala +++ b/akka-remote/src/test/scala/akka/remote/RemoteConfigSpec.scala @@ -8,9 +8,15 @@ class RemoteConfigSpec extends AkkaSpec { "ClusterSpec: A Deployer" must { "be able to parse 'akka.actor.cluster._' config elements" in { - val config = RemoteExtension(system).settings.config + val config = RemoteExtension(system).config import config._ + //akka.remote + getString("akka.remote.layer") must equal("akka.cluster.netty.NettyRemoteSupport") + getString("akka.remote.secure-cookie") must equal("") + getBoolean("akka.remote.use-passive-connections") must equal(true) + // getMilliseconds("akka.remote.remote-daemon-ack-timeout") must equal(30 * 1000) + //akka.remote.server getInt("akka.remote.server.port") must equal(2552) getInt("akka.remote.server.message-frame-size") must equal(1048576) @@ -28,27 +34,23 @@ class RemoteConfigSpec extends AkkaSpec { // TODO cluster config will go into akka-cluster-reference.conf when we enable that module //akka.cluster - getString("akka.cluster.name") must equal("test-cluster") - getString("akka.cluster.zookeeper-server-addresses") must equal("localhost:2181") - getInt("akka.remote.server.port") must equal(2552) - getMilliseconds("akka.cluster.max-time-to-wait-until-connected") must equal(30 * 1000) - getMilliseconds("akka.cluster.session-timeout") must equal(60 * 1000) - getMilliseconds("akka.cluster.connection-timeout") must equal(60 * 1000) - getMilliseconds("akka.remote.remote-daemon-ack-timeout") must equal(30 * 1000) - getBoolean("akka.cluster.include-ref-node-in-replica-set") must equal(true) - getString("akka.remote.layer") must equal("akka.cluster.netty.NettyRemoteSupport") - getString("akka.remote.secure-cookie") must equal("") - getBoolean("akka.remote.use-passive-connections") must equal(true) - getString("akka.cluster.log-directory") must equal("_akka_cluster") + getString("akka.cluster.name") must equal("default-cluster") + getString("akka.cluster.nodename") must equal("") + getStringList("akka.cluster.seed-nodes") must equal(new java.util.ArrayList[String]) - //akka.cluster.replication - getString("akka.cluster.replication.digest-type") must equal("MAC") - getString("akka.cluster.replication.password") must equal("secret") - getInt("akka.cluster.replication.ensemble-size") must equal(3) - getInt("akka.cluster.replication.quorum-size") must equal(2) - getInt("akka.cluster.replication.snapshot-frequency") must equal(1000) - getMilliseconds("akka.cluster.replication.timeout") must equal(30 * 1000) + // getMilliseconds("akka.cluster.max-time-to-wait-until-connected") must equal(30 * 1000) + // getMilliseconds("akka.cluster.session-timeout") must equal(60 * 1000) + // getMilliseconds("akka.cluster.connection-timeout") must equal(60 * 1000) + // getBoolean("akka.cluster.include-ref-node-in-replica-set") must equal(true) + // getString("akka.cluster.log-directory") must equal("_akka_cluster") + // //akka.cluster.replication + // getString("akka.cluster.replication.digest-type") must equal("MAC") + // getString("akka.cluster.replication.password") must equal("secret") + // getInt("akka.cluster.replication.ensemble-size") must equal(3) + // getInt("akka.cluster.replication.quorum-size") must equal(2) + // getInt("akka.cluster.replication.snapshot-frequency") must equal(1000) + // getMilliseconds("akka.cluster.replication.timeout") must equal(30 * 1000) } } } diff --git a/akka-samples/akka-sample-ants/README.md b/akka-samples/akka-sample-ants/README.md deleted file mode 100644 index 3c559834cb..0000000000 --- a/akka-samples/akka-sample-ants/README.md +++ /dev/null @@ -1,46 +0,0 @@ -Ants -==== - -Ants is written by Peter Vlugter. - -Ants is roughly based on the Clojure [ants simulation][ants.clj] by Rich Hickey, and ported to Scala using [Akka][akka] and [Spde][spde]. - -Requirements ------------- - -To build and run Ants you need [Simple Build Tool][sbt] (sbt). - -Running -------- - -First time, 'sbt update' to get dependencies, then to run Ants use 'sbt run'. -Here is an example. First type 'sbt' to start SBT interactively, the run 'update' and 'run': -> cd $AKKA_HOME - -> % sbt - -> > update - -> > project akka-sample-ants - -> > run - - -Notice ------- - -Ants is roughly based on the Clojure ants simulation by Rich Hickey. - -Copyright (c) Rich Hickey. All rights reserved. -The use and distribution terms for this software are covered by the -Common Public License 1.0 ([http://opensource.org/licenses/cpl1.0.php][cpl]) -which can be found in the file cpl.txt at the root of this distribution. -By using this software in any fashion, you are agreeing to be bound by -the terms of this license. -You must not remove this notice, or any other, from this software. - -[ants.clj]:http://clojure.googlegroups.com/web/ants.clj -[akka]:http://akkasource.org -[spde]:http://technically.us/spde/ -[sbt]: http://code.google.com/p/simple-build-tool/ -[cpl]: http://opensource.org/licenses/cpl1.0.php \ No newline at end of file diff --git a/akka-samples/akka-sample-ants/src/main/scala/Ants.scala b/akka-samples/akka-sample-ants/src/main/scala/Ants.scala deleted file mode 100644 index a6aae6f8bd..0000000000 --- a/akka-samples/akka-sample-ants/src/main/scala/Ants.scala +++ /dev/null @@ -1,219 +0,0 @@ -/** - * Copyright (C) 2009-2011 Typesafe Inc. - */ - -package sample.ants - -import java.util.concurrent.TimeUnit -import scala.util.Random.{nextInt => randomInt} -import akka.actor.{Actor, ActorRef, Scheduler} -import akka.actor.Actor.actorOf -import akka.stm._ - -object Config { - val Dim = 80 // dimensions of square world - val AntsSqrt = 20 // number of ants = AntsSqrt^2 - val FoodPlaces = 35 // number of places with food - val FoodRange = 100 // range of amount of food at a place - val PherScale = 10 // scale factor for pheromone drawing - val AntMillis = 100 // how often an ant behaves (milliseconds) - val EvapMillis = 1000 // how often pheromone evaporation occurs (milliseconds) - val EvapRate = 0.99f // pheromone evaporation rate - val StartDelay = 1000 // delay before everything kicks off (milliseconds) -} - -case class Ant(dir: Int, food: Boolean = false) { - def turn(i: Int) = copy(dir = Util.dirBound(dir + i)) - def turnAround = turn(4) - def pickUp = copy(food = true) - def dropOff = copy(food = false) -} - -case class Cell(food: Int = 0, pher: Float = 0, ant: Option[Ant] = None, home: Boolean = false) { - def addFood(i: Int) = copy(food = food + i) - def addPher(x: Float) = copy(pher = pher + x) - def alterPher(f: Float => Float) = copy(pher = f(pher)) - def putAnt(antOpt: Option[Ant]) = copy(ant = antOpt) - def makeHome = copy(home = true) -} - -object EmptyCell extends Cell - -class Place(initCell: Cell = EmptyCell) extends Ref(initCell) { - def cell: Cell = getOrElse(EmptyCell) - def food: Int = cell.food - def food(i: Int) = alter(_.addFood(i)) - def hasFood = food > 0 - def pher: Float = cell.pher - def pher(f: Float => Float) = alter(_.alterPher(f)) - def trail = alter(_.addPher(1)) - def ant: Option[Ant] = cell.ant - def ant(f: Ant => Ant): Cell = alter(_.putAnt(ant map f)) - def enter(antOpt: Option[Ant]): Cell = alter(_.putAnt(antOpt)) - def enter(ant: Ant): Cell = enter(Some(ant)) - def leave = enter(None) - def occupied: Boolean = ant.isDefined - def makeHome = alter(_.makeHome) - def home: Boolean = cell.home -} - -case object Ping - -object World { - import Config._ - - val homeOff = Dim / 4 - lazy val places = Vector.fill(Dim, Dim)(new Place) - lazy val ants = setup - lazy val evaporator = actorOf[Evaporator] - - private val snapshotFactory = TransactionFactory(readonly = true, familyName = "snapshot") - - def snapshot = atomic(snapshotFactory) { Array.tabulate(Dim, Dim)(place(_, _).opt) } - - def place(loc: (Int, Int)) = places(loc._1)(loc._2) - - private def setup = atomic { - for (i <- 1 to FoodPlaces) { - place(randomInt(Dim), randomInt(Dim)) food (randomInt(FoodRange)) - } - val homeRange = homeOff until (AntsSqrt + homeOff) - for (x <- homeRange; y <- homeRange) yield { - place(x, y).makeHome - place(x, y) enter Ant(randomInt(8)) - actorOf(new AntActor(x, y)) - } - } - - def start = { - ants foreach pingEvery(AntMillis) - pingEvery(EvapMillis)(evaporator) - } - - private def pingEvery(millis: Long)(actor: ActorRef) = - Scheduler.schedule(actor, Ping, Config.StartDelay, millis, TimeUnit.MILLISECONDS) -} - -object Util { - import Config._ - - def bound(b: Int, n: Int) = { - val x = n % b - if (x < 0) x + b else x - } - - def dirBound(n: Int) = bound(8, n) - def dimBound(n: Int) = bound(Dim, n) - - val dirDelta = Map(0 -> (0, -1), 1 -> (1, -1), 2 -> (1, 0), 3 -> (1, 1), - 4 -> (0, 1), 5 -> (-1, 1), 6 -> (-1, 0), 7 -> (-1, -1)) - def deltaLoc(x: Int, y: Int, dir: Int) = { - val (dx, dy) = dirDelta(dirBound(dir)) - (dimBound(x + dx), dimBound(y + dy)) - } - - def rankBy[A, B: Ordering](xs: Seq[A], f: A => B) = Map(xs.sortBy(f).zip(Stream from 1): _*) - - def roulette(slices: Seq[Int]) = { - val total = slices.sum - val r = randomInt(total) - var i, sum = 0 - while ((sum + slices(i)) <= r) { - sum += slices(i) - i += 1 - } - i - } -} - -trait WorldActor extends Actor { - def act - def receive = { case Ping => act } -} - -class AntActor(initLoc: (Int, Int)) extends WorldActor { - import World._ - import Util._ - - val locRef = Ref(initLoc) - - val name = "ant-from-" + initLoc._1 + "-" + initLoc._2 - implicit val txFactory = TransactionFactory(familyName = name) - - val homing = (p: Place) => p.pher + (100 * (if (p.home) 0 else 1)) - val foraging = (p: Place) => p.pher + p.food - - def loc = locRef.getOrElse(initLoc) - def newLoc(l: (Int, Int)) = locRef swap l - - def act = atomic { - val (x, y) = loc - val current = place(x, y) - for (ant <- current.ant) { - val ahead = place(deltaLoc(x, y, ant.dir)) - if (ant.food) { // homing - if (current.home) dropFood - else if (ahead.home && !ahead.occupied) move - else random(homing) - } else { // foraging - if (!current.home && current.hasFood) pickUpFood - else if (!ahead.home && ahead.hasFood && !ahead.occupied) move - else random(foraging) - } - } - } - - def move = { - val (x, y) = loc - val from = place(x, y) - for (ant <- from.ant) { - val toLoc = deltaLoc(x, y, ant.dir) - val to = place(toLoc) - to enter ant - from.leave - if (!from.home) from.trail - newLoc(toLoc) - } - } - - def pickUpFood = { - val current = place(loc) - current food -1 - current ant (_.pickUp.turnAround) - } - - def dropFood = { - val current = place(loc) - current food +1 - current ant (_.dropOff.turnAround) - } - - def random[A: Ordering](ranking: Place => A) = { - val (x, y) = loc - val current = place(x, y) - for (ant <- current.ant) { - val delta = (turn: Int) => place(deltaLoc(x, y, ant.dir + turn)) - val ahead = delta(0) - val aheadLeft = delta(-1) - val aheadRight = delta(+1) - val locations = Seq(ahead, aheadLeft, aheadRight) - val ranks = rankBy(locations, ranking) - val ranked = Seq(ranks(aheadLeft), (if (ahead.occupied) 0 else ranks(ahead)), ranks(aheadRight)) - val dir = roulette(ranked) - 1 - if (dir == 0) move - else current ant (_.turn(dir)) - } - } -} - -class Evaporator extends WorldActor { - import Config._ - import World._ - - implicit val txFactory = TransactionFactory(familyName = "evaporator") - val evaporate = (pher: Float) => pher * EvapRate - - def act = for (x <- 0 until Dim; y <- 0 until Dim) { - atomic { place(x, y) pher evaporate } - } -} diff --git a/akka-samples/akka-sample-ants/src/main/spde/Ants.spde b/akka-samples/akka-sample-ants/src/main/spde/Ants.spde deleted file mode 100644 index df0da84b25..0000000000 --- a/akka-samples/akka-sample-ants/src/main/spde/Ants.spde +++ /dev/null @@ -1,45 +0,0 @@ -import sample.ants._ -import sample.ants.Config._ - -val scale = 5 - -size(Dim * scale, Dim * scale) -smooth() - -override def setup() { - background(255) - World.start -} - -def draw() { - val world = World.snapshot - for (x <- 0 until Dim; y <- 0 until Dim; cell <- world(x)(y)) { - val (rx, ry, rw, rh) = (x * scale, y * scale, scale, scale) - noStroke() - fill(255) - rect(rx, ry, rw, rh) - if (cell.pher > 0) fill(0, 255, 0, cell.pher * PherScale) - if (cell.food > 0) fill(255, 0, 0, 255 * (cell.food / FoodRange.floatValue)) - rect(rx, ry, rw, rh) - for (ant <- cell.ant) { - if (ant.food) stroke(255, 0, 0) else stroke(0) - val (hx, hy, tx, ty) = antLine(ant.dir) - line(rx + hx, ry + hy, rx + tx, ry + ty) - } - stroke(0, 0, 255) - noFill() - val homeStart = World.homeOff * scale - val homeWidth = AntsSqrt * scale - rect(homeStart, homeStart, homeWidth, homeWidth) - } -} - -val s = scale - 1 -val m = s / 2 - -def antLine(dir: Int) = dir match { - case 0|4 => (m, 0, m, s) - case 1|5 => (s, 0, 0, s) - case 2|6 => (s, m, 0, m) - case _ => (s, s, 0, 0) -} diff --git a/akka-samples/akka-sample-camel/config/akka.conf b/akka-samples/akka-sample-camel/config/akka.conf deleted file mode 100644 index 574df278c4..0000000000 --- a/akka-samples/akka-sample-camel/config/akka.conf +++ /dev/null @@ -1,20 +0,0 @@ -#################### -# Akka Config File # -#################### - -akka { - version = "2.0-SNAPSHOT" - - enabled-modules = ["camel", "http"] - - time-unit = "seconds" - - event-handlers = ["akka.event.EventHandler$DefaultListener"] - - boot = ["sample.camel.Boot"] - - http { - hostname = "localhost" - port = 9998 - } -} diff --git a/akka-samples/akka-sample-camel/config/microkernel-server.xml b/akka-samples/akka-sample-camel/config/microkernel-server.xml deleted file mode 100644 index 8a5be7f833..0000000000 --- a/akka-samples/akka-sample-camel/config/microkernel-server.xml +++ /dev/null @@ -1,65 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - 300000 - 2 - false - 8443 - 20000 - 5000 - - - - - - - - - - - - - - / - - akka.http.AkkaRestServlet - /* - - - - - - - - - - - - - - - true - true - true - 1000 - - diff --git a/akka-samples/akka-sample-camel/src/main/java/sample/camel/BeanImpl.java b/akka-samples/akka-sample-camel/src/main/java/sample/camel/BeanImpl.java deleted file mode 100644 index 6a5a064629..0000000000 --- a/akka-samples/akka-sample-camel/src/main/java/sample/camel/BeanImpl.java +++ /dev/null @@ -1,12 +0,0 @@ -package sample.camel; - -/** - * @author Martin Krasser - */ -public class BeanImpl implements BeanIntf { - - public String foo(String s) { - return "hello " + s; - } - -} diff --git a/akka-samples/akka-sample-camel/src/main/java/sample/camel/BeanIntf.java b/akka-samples/akka-sample-camel/src/main/java/sample/camel/BeanIntf.java deleted file mode 100644 index a7b2e6e6a4..0000000000 --- a/akka-samples/akka-sample-camel/src/main/java/sample/camel/BeanIntf.java +++ /dev/null @@ -1,10 +0,0 @@ -package sample.camel; - -/** - * @author Martin Krasser - */ -public interface BeanIntf { - - public String foo(String s); - -} diff --git a/akka-samples/akka-sample-camel/src/main/java/sample/camel/RemoteTypedConsumer1.java b/akka-samples/akka-sample-camel/src/main/java/sample/camel/RemoteTypedConsumer1.java deleted file mode 100644 index 3e8ce1e20f..0000000000 --- a/akka-samples/akka-sample-camel/src/main/java/sample/camel/RemoteTypedConsumer1.java +++ /dev/null @@ -1,15 +0,0 @@ -package sample.camel; - -import org.apache.camel.Body; -import org.apache.camel.Header; - -import akka.camel.consume; - -/** - * @author Martin Krasser - */ -public interface RemoteTypedConsumer1 { - - @consume("jetty:http://localhost:6644/camel/remote-typed-actor-1") - public String foo(@Body String body, @Header("name") String header); -} diff --git a/akka-samples/akka-sample-camel/src/main/java/sample/camel/RemoteTypedConsumer1Impl.java b/akka-samples/akka-sample-camel/src/main/java/sample/camel/RemoteTypedConsumer1Impl.java deleted file mode 100644 index 3321ea08c0..0000000000 --- a/akka-samples/akka-sample-camel/src/main/java/sample/camel/RemoteTypedConsumer1Impl.java +++ /dev/null @@ -1,13 +0,0 @@ -package sample.camel; - -import akka.actor.TypedActor; - -/** - * @author Martin Krasser - */ -public class RemoteTypedConsumer1Impl implements RemoteTypedConsumer1 { - - public String foo(String body, String header) { - return String.format("remote1: body=%s header=%s", body, header); - } -} diff --git a/akka-samples/akka-sample-camel/src/main/java/sample/camel/RemoteTypedConsumer2.java b/akka-samples/akka-sample-camel/src/main/java/sample/camel/RemoteTypedConsumer2.java deleted file mode 100644 index ba093a1d96..0000000000 --- a/akka-samples/akka-sample-camel/src/main/java/sample/camel/RemoteTypedConsumer2.java +++ /dev/null @@ -1,15 +0,0 @@ -package sample.camel; - -import org.apache.camel.Body; -import org.apache.camel.Header; -import akka.camel.consume; - -/** - * @author Martin Krasser - */ -public interface RemoteTypedConsumer2 { - - @consume("jetty:http://localhost:6644/camel/remote-typed-actor-2") - public String foo(@Body String body, @Header("name") String header); - -} diff --git a/akka-samples/akka-sample-camel/src/main/java/sample/camel/RemoteTypedConsumer2Impl.java b/akka-samples/akka-sample-camel/src/main/java/sample/camel/RemoteTypedConsumer2Impl.java deleted file mode 100644 index 01420ffbee..0000000000 --- a/akka-samples/akka-sample-camel/src/main/java/sample/camel/RemoteTypedConsumer2Impl.java +++ /dev/null @@ -1,12 +0,0 @@ -package sample.camel; - -/** - * @author Martin Krasser - */ -public class RemoteTypedConsumer2Impl implements RemoteTypedConsumer2 { - - public String foo(String body, String header) { - return String.format("remote2: body=%s header=%s", body, header); - } - -} diff --git a/akka-samples/akka-sample-camel/src/main/java/sample/camel/TypedConsumer1.java b/akka-samples/akka-sample-camel/src/main/java/sample/camel/TypedConsumer1.java deleted file mode 100644 index 6213fb8f09..0000000000 --- a/akka-samples/akka-sample-camel/src/main/java/sample/camel/TypedConsumer1.java +++ /dev/null @@ -1,17 +0,0 @@ -package sample.camel; - -import org.apache.camel.Body; -import org.apache.camel.Header; - -import akka.camel.consume; - -/** - * @author Martin Krasser - */ -public interface TypedConsumer1 { - @consume("file:data/input/typed-actor") - public void foo(String body); - - @consume("jetty:http://0.0.0.0:8877/camel/typed-actor") - public String bar(@Body String body, @Header("name") String header); -} diff --git a/akka-samples/akka-sample-camel/src/main/java/sample/camel/TypedConsumer1Impl.java b/akka-samples/akka-sample-camel/src/main/java/sample/camel/TypedConsumer1Impl.java deleted file mode 100644 index b354872a27..0000000000 --- a/akka-samples/akka-sample-camel/src/main/java/sample/camel/TypedConsumer1Impl.java +++ /dev/null @@ -1,21 +0,0 @@ -package sample.camel; - -import org.apache.camel.Body; -import org.apache.camel.Header; - -import akka.actor.TypedActor; - -/** - * @author Martin Krasser - */ -public class TypedConsumer1Impl implements TypedConsumer1 { - - public void foo(String body) { - System.out.println("Received message:"); - System.out.println(body); - } - - public String bar(@Body String body, @Header("name") String header) { - return String.format("body=%s header=%s", body, header); - } -} diff --git a/akka-samples/akka-sample-camel/src/main/java/sample/camel/TypedConsumer2.java b/akka-samples/akka-sample-camel/src/main/java/sample/camel/TypedConsumer2.java deleted file mode 100644 index 9a39b534b5..0000000000 --- a/akka-samples/akka-sample-camel/src/main/java/sample/camel/TypedConsumer2.java +++ /dev/null @@ -1,14 +0,0 @@ -package sample.camel; - -import org.apache.camel.Body; -import org.apache.camel.Header; -import akka.camel.consume; - -/** - * @author Martin Krasser - */ -public interface TypedConsumer2 { - - @consume("direct:default") - public String foo(String body); -} diff --git a/akka-samples/akka-sample-camel/src/main/java/sample/camel/TypedConsumer2Impl.java b/akka-samples/akka-sample-camel/src/main/java/sample/camel/TypedConsumer2Impl.java deleted file mode 100644 index 603c32b803..0000000000 --- a/akka-samples/akka-sample-camel/src/main/java/sample/camel/TypedConsumer2Impl.java +++ /dev/null @@ -1,11 +0,0 @@ -package sample.camel; - -/** - * @author Martin Krasser - */ -public class TypedConsumer2Impl implements TypedConsumer2 { - - public String foo(String body) { - return String.format("default: %s", body); - } -} diff --git a/akka-samples/akka-sample-camel/src/main/java/sample/camel/UntypedConsumer1.java b/akka-samples/akka-sample-camel/src/main/java/sample/camel/UntypedConsumer1.java deleted file mode 100644 index 718f8f9606..0000000000 --- a/akka-samples/akka-sample-camel/src/main/java/sample/camel/UntypedConsumer1.java +++ /dev/null @@ -1,20 +0,0 @@ -package sample.camel; - -import akka.camel.Message; -import akka.camel.UntypedConsumerActor; - -/** - * @author Martin Krasser - */ -public class UntypedConsumer1 extends UntypedConsumerActor { - - public String getEndpointUri() { - return "direct:untyped-consumer-1"; - } - - public void onReceive(Object message) { - Message msg = (Message)message; - String body = msg.getBodyAs(String.class); - sender.tell(String.format("received %s", body)); - } -} diff --git a/akka-samples/akka-sample-camel/src/main/resources/context-jms.xml b/akka-samples/akka-sample-camel/src/main/resources/context-jms.xml deleted file mode 100644 index 12e4541be3..0000000000 --- a/akka-samples/akka-sample-camel/src/main/resources/context-jms.xml +++ /dev/null @@ -1,27 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - diff --git a/akka-samples/akka-sample-camel/src/main/resources/context-standalone.xml b/akka-samples/akka-sample-camel/src/main/resources/context-standalone.xml deleted file mode 100644 index e4edcbc350..0000000000 --- a/akka-samples/akka-sample-camel/src/main/resources/context-standalone.xml +++ /dev/null @@ -1,26 +0,0 @@ - - - - - - - - - - - - - - - - diff --git a/akka-samples/akka-sample-camel/src/main/scala/sample/camel/Actors.scala b/akka-samples/akka-sample-camel/src/main/scala/sample/camel/Actors.scala deleted file mode 100644 index f4655c3985..0000000000 --- a/akka-samples/akka-sample-camel/src/main/scala/sample/camel/Actors.scala +++ /dev/null @@ -1,161 +0,0 @@ -package sample.camel - -import org.apache.camel.Exchange - -import akka.actor.{ Actor, ActorRef, ActorRegistry } -import akka.camel.{ Ack, Failure, Producer, Message, Consumer } - -/** - * Client-initiated remote actor. - */ -class RemoteActor1 extends Actor with Consumer { - def endpointUri = "jetty:http://localhost:6644/camel/remote-actor-1" - - protected def receive = { - case msg: Message ⇒ sender ! Message("hello %s" format msg.bodyAs[String], Map("sender" -> "remote1")) - } -} - -/** - * Server-initiated remote actor. - */ -class RemoteActor2 extends Actor with Consumer { - def endpointUri = "jetty:http://localhost:6644/camel/remote-actor-2" - - protected def receive = { - case msg: Message ⇒ sender ! Message("hello %s" format msg.bodyAs[String], Map("sender" -> "remote2")) - } -} - -class Producer1 extends Actor with Producer { - def endpointUri = "direct:welcome" - override def oneway = false // default -} - -class Consumer1 extends Actor with Consumer { - def endpointUri = "file:data/input/actor" - - def receive = { - case msg: Message ⇒ println("received %s" format msg.bodyAs[String]) - } -} - -class Consumer2 extends Actor with Consumer { - def endpointUri = "jetty:http://0.0.0.0:8877/camel/default" - - def receive = { - case msg: Message ⇒ sender ! ("Hello %s" format msg.bodyAs[String]) - } -} - -class Consumer3(transformer: ActorRef) extends Actor with Consumer { - def endpointUri = "jetty:http://0.0.0.0:8877/camel/welcome" - - def receive = { - case msg: Message ⇒ transformer.forward(msg.setBodyAs[String]) - } -} - -class Consumer4 extends Actor with Consumer { - def endpointUri = "jetty:http://0.0.0.0:8877/camel/stop" - - def receive = { - case msg: Message ⇒ msg.bodyAs[String] match { - case "stop" ⇒ { - sender ! "Consumer4 stopped" - self.stop - } - case body ⇒ sender ! body - } - } -} - -class Consumer5 extends Actor with Consumer { - def endpointUri = "jetty:http://0.0.0.0:8877/camel/start" - - def receive = { - case _ ⇒ { - Actor.actorOf[Consumer4] - sender ! "Consumer4 started" - } - } -} - -class Transformer(producer: ActorRef) extends Actor { - protected def receive = { - case msg: Message ⇒ producer.forward(msg.transformBody((body: String) ⇒ "- %s -" format body)) - } -} - -class Subscriber(name: String, uri: String) extends Actor with Consumer { - def endpointUri = uri - - protected def receive = { - case msg: Message ⇒ println("%s received: %s" format (name, msg.body)) - } -} - -class Publisher(uri: String) extends Actor with Producer { - def endpointUri = uri - override def oneway = true -} - -class PublisherBridge(uri: String, publisher: ActorRef) extends Actor with Consumer { - def endpointUri = uri - - protected def receive = { - case msg: Message ⇒ { - publisher ! msg.bodyAs[String] - sender ! "message published" - } - } -} - -class HttpConsumer(producer: ActorRef) extends Actor with Consumer { - def endpointUri = "jetty:http://0.0.0.0:8875/" - - protected def receive = { - case msg ⇒ producer forward msg - } -} - -class HttpProducer(transformer: ActorRef) extends Actor with Producer { - def endpointUri = "jetty://http://akka.io/?bridgeEndpoint=true" - - override protected def receiveBeforeProduce = { - // only keep Exchange.HTTP_PATH message header (which needed by bridge endpoint) - case msg: Message ⇒ msg.setHeaders(msg.headers(Set(Exchange.HTTP_PATH))) - } - - override protected def receiveAfterProduce = { - // do not reply but forward result to transformer - case msg ⇒ transformer forward msg - } -} - -class HttpTransformer extends Actor { - protected def receive = { - case msg: Message ⇒ sender ! (msg.transformBody { body: String ⇒ body replaceAll ("Akka ", "AKKA ") }) - case msg: Failure ⇒ sender ! msg - } -} - -class FileConsumer extends Actor with Consumer { - def endpointUri = "file:data/input/actor?delete=true" - override def autoack = false - - var counter = 0 - - def receive = { - case msg: Message ⇒ { - if (counter == 2) { - println("received %s" format msg.bodyAs[String]) - sender ! Ack - } else { - println("rejected %s" format msg.bodyAs[String]) - counter += 1 - sender ! Failure(new Exception("message number %s not accepted" format counter)) - } - } - } -} diff --git a/akka-samples/akka-sample-camel/src/main/scala/sample/camel/Boot.scala b/akka-samples/akka-sample-camel/src/main/scala/sample/camel/Boot.scala deleted file mode 100644 index b84dd9c1c9..0000000000 --- a/akka-samples/akka-sample-camel/src/main/scala/sample/camel/Boot.scala +++ /dev/null @@ -1,98 +0,0 @@ -package sample.camel - -import org.apache.camel.{ Exchange, Processor } -import org.apache.camel.builder.RouteBuilder -import org.apache.camel.impl.DefaultCamelContext -import org.apache.camel.spring.spi.ApplicationContextRegistry -import org.springframework.context.support.ClassPathXmlApplicationContext - -import akka.actor.Actor._ -import akka.actor.Props -import akka.actor.TypedActor -import akka.camel.CamelContextManager - -/** - * @author Martin Krasser - */ -class Boot { - - // ----------------------------------------------------------------------- - // Basic example - // ----------------------------------------------------------------------- - - actorOf[Consumer1] - actorOf[Consumer2] - - // ----------------------------------------------------------------------- - // Custom Camel route example - // ----------------------------------------------------------------------- - - // Create CamelContext and a Spring-based registry - val context = new ClassPathXmlApplicationContext("/context-jms.xml", getClass) - val registry = new ApplicationContextRegistry(context) - - // Use a custom Camel context and a custom touter builder - CamelContextManager.init(new DefaultCamelContext(registry)) - CamelContextManager.mandatoryContext.addRoutes(new CustomRouteBuilder) - - val producer = actorOf[Producer1] - val mediator = actorOf(new Transformer(producer)) - val consumer = actorOf(new Consumer3(mediator)) - - // ----------------------------------------------------------------------- - // Asynchronous consumer-producer example (Akka homepage transformation) - // ----------------------------------------------------------------------- - - val httpTransformer = actorOf(new HttpTransformer) - val httpProducer = actorOf(new HttpProducer(httpTransformer)) - val httpConsumer = actorOf(new HttpConsumer(httpProducer)) - - // ----------------------------------------------------------------------- - // Publish subscribe examples - // ----------------------------------------------------------------------- - - // - // Cometd example commented out because camel-cometd is broken since Camel 2.3 - // - - //val cometdUri = "cometd://localhost:8111/test/abc?baseResource=file:target" - //val cometdSubscriber = actorOf(new Subscriber("cometd-subscriber", cometdUri)) - //val cometdPublisher = actorOf(new Publisher("cometd-publisher", cometdUri)) - - val jmsUri = "jms:topic:test" - val jmsSubscriber1 = actorOf(new Subscriber("jms-subscriber-1", jmsUri)) - val jmsSubscriber2 = actorOf(new Subscriber("jms-subscriber-2", jmsUri)) - val jmsPublisher = actorOf(new Publisher(jmsUri), "jms-publisher") - - //val cometdPublisherBridge = actorOf(new PublisherBridge("jetty:http://0.0.0.0:8877/camel/pub/cometd", cometdPublisher)) - val jmsPublisherBridge = actorOf(new PublisherBridge("jetty:http://0.0.0.0:8877/camel/pub/jms", jmsPublisher)) - - // ----------------------------------------------------------------------- - // Actor un-publishing and re-publishing example - // ----------------------------------------------------------------------- - - actorOf[Consumer4] // POSTing "stop" to http://0.0.0.0:8877/camel/stop stops and unpublishes this actor - actorOf[Consumer5] // POSTing any msg to http://0.0.0.0:8877/camel/start starts and published Consumer4 again. - - // ----------------------------------------------------------------------- - // Active object example - // ----------------------------------------------------------------------- - - // TODO: investigate why this consumer is not published - TypedActor.typedActorOf(classOf[TypedConsumer1], classOf[TypedConsumer1Impl], Props()) -} - -/** - * @author Martin Krasser - */ -class CustomRouteBuilder extends RouteBuilder { - def configure { - val actorUri = "actor:%s" format classOf[Consumer2].getName - from("jetty:http://0.0.0.0:8877/camel/custom").to(actorUri) - from("direct:welcome").process(new Processor() { - def process(exchange: Exchange) { - exchange.getOut.setBody("Welcome %s" format exchange.getIn.getBody) - } - }) - } -} diff --git a/akka-samples/akka-sample-camel/src/main/scala/sample/camel/ClientApplication.scala b/akka-samples/akka-sample-camel/src/main/scala/sample/camel/ClientApplication.scala deleted file mode 100644 index c5662ea3b6..0000000000 --- a/akka-samples/akka-sample-camel/src/main/scala/sample/camel/ClientApplication.scala +++ /dev/null @@ -1,29 +0,0 @@ -package sample.camel - -import akka.actor.Actor._ -import akka.actor.TypedActor -import akka.camel.Message - -/** - * @author Martin Krasser - */ -object ClientApplication extends App { - - /* TODO: fix remote example - - val actor1 = remote.actorOf[RemoteActor1]("localhost", 7777) - val actor2 = remote.actorFor("remote2", "localhost", 7777) - - val typedActor1 = - TypedActor.newRemoteInstance(classOf[RemoteTypedConsumer1],classOf[RemoteTypedConsumer1Impl], "localhost", 7777) - - val typedActor2 = remote.typedActorFor(classOf[RemoteTypedConsumer2], "remote3", "localhost", 7777) - - println(actor1 !! Message("actor1")) // activates and publishes actor remotely - println(actor2 !! Message("actor2")) // actor already activated and published remotely - - println(typedActor1.foo("x1", "y1")) // activates and publishes typed actor methods remotely - println(typedActor2.foo("x2", "y2")) // typed actor methods already activated and published remotely - - */ -} diff --git a/akka-samples/akka-sample-camel/src/main/scala/sample/camel/ServerApplication.scala b/akka-samples/akka-sample-camel/src/main/scala/sample/camel/ServerApplication.scala deleted file mode 100644 index aae7a61d99..0000000000 --- a/akka-samples/akka-sample-camel/src/main/scala/sample/camel/ServerApplication.scala +++ /dev/null @@ -1,27 +0,0 @@ -package sample.camel - -import akka.actor.Actor._ -import akka.camel.CamelServiceManager -import akka.actor.{ TypedActor, Props } - -/** - * @author Martin Krasser - */ -object ServerApplication extends App { - import CamelServiceManager._ - - /* TODO: fix remote example - - startCamelService - - val ua = actorOf[RemoteActor2] - val ta = TypedActor.typedActorOf( - classOf[RemoteTypedConsumer2], - classOf[RemoteTypedConsumer2Impl], Props()) - - remote.start("localhost", 7777) - remote.register("remote2", ua) - remote.registerTypedActor("remote3", ta) - - */ -} diff --git a/akka-samples/akka-sample-camel/src/main/scala/sample/camel/StandaloneApplication.scala b/akka-samples/akka-sample-camel/src/main/scala/sample/camel/StandaloneApplication.scala deleted file mode 100644 index 62506c5c5a..0000000000 --- a/akka-samples/akka-sample-camel/src/main/scala/sample/camel/StandaloneApplication.scala +++ /dev/null @@ -1,128 +0,0 @@ -package sample.camel - -import org.apache.camel.impl.{ DefaultCamelContext, SimpleRegistry } -import org.apache.camel.builder.RouteBuilder -import org.apache.camel.spring.spi.ApplicationContextRegistry -import org.springframework.context.support.ClassPathXmlApplicationContext - -import akka.actor.{ Actor, TypedActor, Props } -import akka.camel._ - -/** - * @author Martin Krasser - */ -object StandaloneApplication extends App { - import CamelContextManager._ - import CamelServiceManager._ - - // 'externally' register typed actors - val registry = new SimpleRegistry - registry.put("sample", TypedActor.typedActorOf(classOf[BeanIntf], classOf[BeanImpl], Props())) - - // customize CamelContext - CamelContextManager.init(new DefaultCamelContext(registry)) - CamelContextManager.mandatoryContext.addRoutes(new StandaloneApplicationRoute) - - startCamelService - - // access 'externally' registered typed actors - assert("hello msg1" == mandatoryContext.createProducerTemplate.requestBody("direct:test", "msg1")) - - mandatoryService.awaitEndpointActivation(1) { - // 'internally' register typed actor (requires CamelService) - TypedActor.typedActorOf(classOf[TypedConsumer2], classOf[TypedConsumer2Impl], Props()) - } - - // access 'internally' (automatically) registered typed-actors - // (see @consume annotation value at TypedConsumer2.foo method) - assert("default: msg3" == mandatoryContext.createProducerTemplate.requestBody("direct:default", "msg3")) - - stopCamelService - - Actor.registry.local.shutdownAll -} - -class StandaloneApplicationRoute extends RouteBuilder { - def configure = { - // route to typed actors (in SimpleRegistry) - from("direct:test").to("typed-actor:sample?method=foo") - } -} - -object StandaloneSpringApplication extends App { - import CamelContextManager._ - - // load Spring application context - val appctx = new ClassPathXmlApplicationContext("/context-standalone.xml") - - // We cannot use the CamelServiceManager to wait for endpoint activation - // because CamelServiceManager is started by the Spring application context. - // (and hence is not available for setting expectations on activations). This - // will be improved/enabled in upcoming releases. - Thread.sleep(1000) - - // access 'externally' registered typed actors with typed-actor component - assert("hello msg3" == mandatoryTemplate.requestBody("direct:test3", "msg3")) - - // access auto-started untyped consumer - assert("received msg3" == mandatoryTemplate.requestBody("direct:untyped-consumer-1", "msg3")) - - appctx.close - - Actor.registry.local.shutdownAll -} - -class StandaloneSpringApplicationRoute extends RouteBuilder { - def configure = { - // routes to typed actor (in ApplicationContextRegistry) - from("direct:test3").to("typed-actor:ta?method=foo") - } -} - -object StandaloneJmsApplication extends App { - import CamelServiceManager._ - - val context = new ClassPathXmlApplicationContext("/context-jms.xml") - val registry = new ApplicationContextRegistry(context) - - // Init CamelContextManager with custom CamelContext - CamelContextManager.init(new DefaultCamelContext(registry)) - - startCamelService - - val jmsUri = "jms:topic:test" - val jmsPublisher = Actor.actorOf(new Publisher(jmsUri), "jms-publisher") - - mandatoryService.awaitEndpointActivation(2) { - Actor.actorOf(new Subscriber("jms-subscriber-1", jmsUri)) - Actor.actorOf(new Subscriber("jms-subscriber-2", jmsUri)) - } - - // Send 10 messages to via publisher actor - for (i ← 1 to 10) { - jmsPublisher ! ("Akka rocks (%d)" format i) - } - - // Send 10 messages to JMS topic directly - for (i ← 1 to 10) { - CamelContextManager.mandatoryTemplate.sendBody(jmsUri, "Camel rocks (%d)" format i) - } - - // Wait a bit for subscribes to receive messages - Thread.sleep(1000) - - stopCamelService - Actor.registry.local.shutdownAll -} - -object StandaloneFileApplication { - import CamelServiceManager._ - - def main(args: Array[String]) { - startCamelService - mandatoryService.awaitEndpointActivation(1) { - Actor.actorOf(new FileConsumer) - } - } -} - diff --git a/akka-samples/akka-sample-camel/src/test/java/sample/camel/SampleRemoteUntypedConsumer.java b/akka-samples/akka-sample-camel/src/test/java/sample/camel/SampleRemoteUntypedConsumer.java deleted file mode 100644 index 4d90518a11..0000000000 --- a/akka-samples/akka-sample-camel/src/test/java/sample/camel/SampleRemoteUntypedConsumer.java +++ /dev/null @@ -1,21 +0,0 @@ -package sample.camel; - -import akka.camel.Message; -import akka.camel.UntypedConsumerActor; - -/** - * @author Martin Krasser - */ -public class SampleRemoteUntypedConsumer extends UntypedConsumerActor { - public String getEndpointUri() { - return "direct:remote-untyped-consumer"; - } - - public void onReceive(Object message) { - Message msg = (Message)message; - String body = msg.getBodyAs(String.class); - String header = msg.getHeaderAs("test", String.class); - sender().tell(String.format("%s %s", body, header)); - } - -} diff --git a/akka-samples/akka-sample-camel/src/test/resources/logback.xml b/akka-samples/akka-sample-camel/src/test/resources/logback.xml deleted file mode 100644 index 023f2cd317..0000000000 --- a/akka-samples/akka-sample-camel/src/test/resources/logback.xml +++ /dev/null @@ -1,11 +0,0 @@ - - - - - [%4p] [%d{ISO8601}] [%t] %c{1}: %m%n - - - - - - diff --git a/akka-samples/akka-sample-camel/src/test/scala/sample/camel/HttpConcurrencyTestStress.scala b/akka-samples/akka-sample-camel/src/test/scala/sample/camel/HttpConcurrencyTestStress.scala deleted file mode 100644 index 6fca5b42f8..0000000000 --- a/akka-samples/akka-sample-camel/src/test/scala/sample/camel/HttpConcurrencyTestStress.scala +++ /dev/null @@ -1,99 +0,0 @@ -package sample.camel - -import _root_.akka.routing.{ RoutedProps, Routing } -import collection.mutable.Set - -import java.util.concurrent.CountDownLatch - -import org.junit._ -import org.scalatest.junit.JUnitSuite - -import akka.actor.Actor._ -import akka.actor.{ ActorRegistry, ActorRef, Actor } -import akka.camel._ -import akka.camel.CamelServiceManager._ -/** - * @author Martin Krasser - */ -class HttpConcurrencyTestStress extends JUnitSuite { - import HttpConcurrencyTestStress._ - - @Test - def shouldProcessMessagesConcurrently = { - /* TODO: fix stress test - - val num = 50 - val latch1 = new CountDownLatch(num) - val latch2 = new CountDownLatch(num) - val latch3 = new CountDownLatch(num) - val client1 = actorOf(new HttpClientActor("client1", latch1)) - val client2 = actorOf(new HttpClientActor("client2", latch2)) - val client3 = actorOf(new HttpClientActor("client3", latch3)) - for (i <- 1 to num) { - client1 ! Message("client1", Map(Message.MessageExchangeId -> i)) - client2 ! Message("client2", Map(Message.MessageExchangeId -> i)) - client3 ! Message("client3", Map(Message.MessageExchangeId -> i)) - } - latch1.await - latch2.await - latch3.await - assert(num == (client1 ? "getCorrelationIdCount").as[Int].get) - assert(num == (client2 ? "getCorrelationIdCount").as[Int].get) - assert(num == (client3 ? "getCorrelationIdCount").as[Int].get)*/ - } -} - -object HttpConcurrencyTestStress { - @BeforeClass - def beforeClass{ - startCamelService - - val workers = for (i ← 1 to 8) yield actorOf[HttpServerWorker] - val balancer = Routing.actorOf(RoutedProps().withRoundRobinRouter.withConnections(workers), "loadbalancer") - //service.get.awaitEndpointActivation(1) { - // actorOf(new HttpServerActor(balancer)) - //} - } - - @AfterClass - def afterClass = { - stopCamelService - Actor.registry.local.shutdownAll - } - - class HttpClientActor(label: String, latch: CountDownLatch) extends Actor with Producer { - def endpointUri = "jetty:http://0.0.0.0:8855/echo" - var correlationIds = Set[Any]() - - override protected def receive = { - case "getCorrelationIdCount" ⇒ sender ! correlationIds.size - case msg ⇒ super.receive(msg) - } - - override protected def receiveAfterProduce = { - case msg: Message ⇒ { - val corr = msg.headers(Message.MessageExchangeId) - val body = msg.bodyAs[String] - correlationIds += corr - assert(label == body) - latch.countDown - print(".") - } - } - } - - class HttpServerActor(balancer: ActorRef) extends Actor with Consumer { - def endpointUri = "jetty:http://0.0.0.0:8855/echo" - var counter = 0 - - def receive = { - case msg ⇒ balancer forward msg - } - } - - class HttpServerWorker extends Actor { - protected def receive = { - case msg ⇒ sender ! msg - } - } -} diff --git a/akka-samples/akka-sample-camel/src/test/scala/sample/camel/RemoteConsumerTest.scala b/akka-samples/akka-sample-camel/src/test/scala/sample/camel/RemoteConsumerTest.scala deleted file mode 100644 index aec3a92804..0000000000 --- a/akka-samples/akka-sample-camel/src/test/scala/sample/camel/RemoteConsumerTest.scala +++ /dev/null @@ -1,101 +0,0 @@ -package sample.camel - -import org.scalatest.{ GivenWhenThen, BeforeAndAfterAll, FeatureSpec } - -import akka.actor.Actor._ -import akka.actor._ -import akka.camel._ -//import akka.cluster.netty.NettyRemoteSupport -//import akka.cluster.RemoteServerModule - -/** - * @author Martin Krasser - */ -class RemoteConsumerTest /*extends FeatureSpec with BeforeAndAfterAll with GivenWhenThen*/ { - /* TODO: fix remote test - - import CamelServiceManager._ - import RemoteConsumerTest._ - - var server: RemoteServerModule = _ - - override protected def beforeAll = { - registry.shutdownAll - - startCamelService - - remote.shutdown - remote.asInstanceOf[NettyRemoteSupport].optimizeLocal.set(false) - - server = remote.start(host,port) - } - - override protected def afterAll = { - remote.shutdown - - stopCamelService - - registry.shutdownAll - remote.asInstanceOf[NettyRemoteSupport].optimizeLocal.set(true) - } - - feature("Publish consumer on remote node") { - scenario("access published remote consumer") { - given("a consumer actor") - val consumer = Actor.actorOf[RemoteConsumer] - - when("registered at the server") - assert(mandatoryService.awaitEndpointActivation(1) { - remote.register(consumer) - }) - - then("the published consumer is accessible via its endpoint URI") - val response = CamelContextManager.mandatoryTemplate.requestBody("direct:remote-consumer", "test") - assert(response === "remote actor: test") - } - } - - feature("Publish typed consumer on remote node") { - scenario("access published remote consumer method") { - given("a typed consumer actor") - when("registered at the server") - assert(mandatoryService.awaitEndpointActivation(1) { - remote.registerTypedActor("whatever", TypedActor.newInstance( - classOf[SampleRemoteTypedConsumer], - classOf[SampleRemoteTypedConsumerImpl])) - }) - then("the published method is accessible via its endpoint URI") - val response = CamelContextManager.mandatoryTemplate.requestBody("direct:remote-typed-consumer", "test") - assert(response === "remote typed actor: test") - } - } - - feature("Publish untyped consumer on remote node") { - scenario("access published remote untyped consumer") { - given("an untyped consumer actor") - val consumer = Actor.actorOf(classOf[SampleRemoteUntypedConsumer]) - - when("registered at the server") - assert(mandatoryService.awaitEndpointActivation(1) { - remote.register(consumer) - }) - then("the published untyped consumer is accessible via its endpoint URI") - val response = CamelContextManager.mandatoryTemplate.requestBodyAndHeader("direct:remote-untyped-consumer", "a", "test", "b") - assert(response === "a b") - } - }*/ -} - -object RemoteConsumerTest { - val host = "localhost" - val port = 7774 - - class RemoteConsumer extends Actor with Consumer { - def endpointUri = "direct:remote-consumer" - - protected def receive = { - case "init" ⇒ sender ! "done" - case m: Message ⇒ sender ! ("remote actor: %s" format m.body) - } - } -} diff --git a/akka-samples/akka-sample-chat/README b/akka-samples/akka-sample-chat/README deleted file mode 100644 index 965d3d5f75..0000000000 --- a/akka-samples/akka-sample-chat/README +++ /dev/null @@ -1,26 +0,0 @@ -Akka Chat Client/Server Sample Application - -How to run the sample: - -1. Fire up two shells. For each of them: - - Step down into to the root of the Akka distribution. - - Set 'export AKKA_HOME=. - - Run 'sbt console' to start up a REPL (interpreter). -2. In the first REPL you get execute: - - scala> import sample.chat._ - - scala> import akka.actor.Actor._ - - scala> val chatService = actorOf[ChatService] -3. In the second REPL you get execute: - - scala> import sample.chat._ - - scala> ClientRunner.run -4. See the chat simulation run. -5. Run it again to see full speed after first initialization. -6. In the client REPL, or in a new REPL, you can also create your own client - - scala> import sample.chat._ - - scala> val myClient = new ChatClient("") - - scala> myClient.login - - scala> myClient.post("Can I join?") - - scala> println("CHAT LOG:\n\t" + myClient.chatLog.log.mkString("\n\t")) - -That’s it. Have fun. - diff --git a/akka-samples/akka-sample-chat/src/main/scala/ChatServer.scala b/akka-samples/akka-sample-chat/src/main/scala/ChatServer.scala deleted file mode 100644 index d9b58ef771..0000000000 --- a/akka-samples/akka-sample-chat/src/main/scala/ChatServer.scala +++ /dev/null @@ -1,253 +0,0 @@ - /** - * Copyright (C) 2009-2010 Typesafe Inc. . - */ - - package sample.chat - - import scala.collection.mutable.HashMap - - import akka.actor.{Actor, ActorRef, Props} - import akka.stm._ - import akka.actor.Actor._ - import akka.event.EventHandler - - /****************************************************************************** - Akka Chat Client/Server Sample Application - - How to run the sample: - - 1. Fire up two shells. For each of them: - - Step down into to the root of the Akka distribution. - - Set 'export AKKA_HOME=. - - Run 'sbt console' to start up a REPL (interpreter). - 2. In the first REPL you get execute: - - scala> import sample.chat._ - - scala> import akka.actor.Actor._ - - scala> val chatService = actorOf[ChatService] - 3. In the second REPL you get execute: - - scala> import sample.chat._ - - scala> ClientRunner.run - 4. See the chat simulation run. - 5. Run it again to see full speed after first initialization. - 6. In the client REPL, or in a new REPL, you can also create your own client - - scala> import sample.chat._ - - scala> val myClient = new ChatClient("") - - scala> myClient.login - - scala> myClient.post("Can I join?") - - scala> println("CHAT LOG:\n\t" + myClient.chatLog.log.mkString("\n\t")) - - - That’s it. Have fun. - - ******************************************************************************/ - - /** - * ChatServer's internal events. - */ - sealed trait Event - case class Login(user: String) extends Event - case class Logout(user: String) extends Event - case class GetChatLog(from: String) extends Event - case class ChatLog(log: List[String]) extends Event - case class ChatMessage(from: String, message: String) extends Event - - /** - * Chat client. - */ - class ChatClient(val name: String) { - val chat = Actor.remote.actorFor("chat:service", "localhost", 2552) - - def login = chat ! Login(name) - def logout = chat ! Logout(name) - def post(message: String) = chat ! ChatMessage(name, name + ": " + message) - def chatLog = (chat !! GetChatLog(name)).as[ChatLog].getOrElse(throw new Exception("Couldn't get the chat log from ChatServer")) - } - - /** - * Internal chat client session. - */ - class Session(user: String, storage: ActorRef) extends Actor { - private val loginTime = System.currentTimeMillis - private var userLog: List[String] = Nil - - EventHandler.info(this, "New session for user [%s] has been created at [%s]".format(user, loginTime)) - - def receive = { - case msg @ ChatMessage(from, message) => - userLog ::= message - storage ! msg - - case msg @ GetChatLog(_) => - storage forward msg - } - } - - /** - * Abstraction of chat storage holding the chat log. - */ - trait ChatStorage extends Actor - - /** - * Memory-backed chat storage implementation. - */ - class MemoryChatStorage extends ChatStorage { - private var chatLog = TransactionalVector[Array[Byte]]() - - EventHandler.info(this, "Memory-based chat storage is starting up...") - - def receive = { - case msg @ ChatMessage(from, message) => - EventHandler.debug(this, "New chat message [%s]".format(message)) - atomic { chatLog + message.getBytes("UTF-8") } - - case GetChatLog(_) => - val messageList = atomic { chatLog.map(bytes => new String(bytes, "UTF-8")).toList } - reply(ChatLog(messageList)) - } - - override def postRestart(reason: Throwable) { - chatLog = TransactionalVector() - } - } - - /** - * Implements user session management. - *

- * Uses self-type annotation (this: Actor =>) to declare that it needs to be mixed in with an Actor. - */ - trait SessionManagement { this: Actor => - - val storage: ActorRef // needs someone to provide the ChatStorage - val sessions = new HashMap[String, ActorRef] - - protected def sessionManagement: Receive = { - case Login(username) => - EventHandler.info(this, "User [%s] has logged in".format(username)) - val session = actorOf(new Session(username, storage)) - session - sessions += (username -> session) - - case Logout(username) => - EventHandler.info(this, "User [%s] has logged out".format(username)) - val session = sessions(username) - session.stop() - sessions -= username - } - - protected def shutdownSessions() { - sessions.foreach { case (_, session) => session.stop() } - } - } - - /** - * Implements chat management, e.g. chat message dispatch. - *

- * Uses self-type annotation (this: Actor =>) to declare that it needs to be mixed in with an Actor. - */ - trait ChatManagement { this: Actor => - val sessions: HashMap[String, ActorRef] // needs someone to provide the Session map - - protected def chatManagement: Receive = { - case msg @ ChatMessage(from, _) => getSession(from).foreach(_ ! msg) - case msg @ GetChatLog(from) => getSession(from).foreach(_ forward msg) - } - - private def getSession(from: String) : Option[ActorRef] = { - if (sessions.contains(from)) - Some(sessions(from)) - else { - EventHandler.info(this, "Session expired for %s".format(from)) - None - } - } - } - - /** - * Creates and links a MemoryChatStorage. - */ - trait MemoryChatStorageFactory { this: Actor => - val storage = actorOf(Props[MemoryChatStorage].withSupervisor(this.self)) // starts and links ChatStorage - } - - /** - * Chat server. Manages sessions and redirects all other messages to the Session for the client. - */ - trait ChatServer extends Actor { - //faultHandler = OneForOneStrategy(List(classOf[Exception]),5, 5000) - val storage: ActorRef - - EventHandler.info(this, "Chat server is starting up...") - - // actor message handler - def receive: Receive = sessionManagement orElse chatManagement - - // abstract methods to be defined somewhere else - protected def chatManagement: Receive - protected def sessionManagement: Receive - protected def shutdownSessions() - - override def postStop() { - EventHandler.info(this, "Chat server is shutting down...") - shutdownSessions() - storage.stop() - } - } - - /** - * Class encapsulating the full Chat Service. - * Start service by invoking: - *

-   * val chatService = Actor.actorOf[ChatService]
-   * 
- */ - class ChatService extends - ChatServer with - SessionManagement with - ChatManagement with - MemoryChatStorageFactory { - override def preStart() { - remote.start("localhost", 2552); - remote.register("chat:service", self) //Register the actor with the specified service id - } - } - - /** - * Test runner starting ChatService. - */ - object ServerRunner { - - def main(args: Array[String]) { ServerRunner.run() } - - def run() { - actorOf[ChatService] - } - } - - /** - * Test runner emulating a chat session. - */ - object ClientRunner { - - def main(args: Array[String]) { ClientRunner.run() } - - def run() { - - val client1 = new ChatClient("jonas") - client1.login - val client2 = new ChatClient("patrik") - client2.login - - client1.post("Hi there") - println("CHAT LOG:\n\t" + client1.chatLog.log.mkString("\n\t")) - - client2.post("Hello") - println("CHAT LOG:\n\t" + client2.chatLog.log.mkString("\n\t")) - - client1.post("Hi again") - println("CHAT LOG:\n\t" + client1.chatLog.log.mkString("\n\t")) - - client1.logout - client2.logout - } - } - diff --git a/akka-samples/akka-sample-hello/config/akka.conf b/akka-samples/akka-sample-hello/config/akka.conf deleted file mode 100644 index 5b8920874f..0000000000 --- a/akka-samples/akka-sample-hello/config/akka.conf +++ /dev/null @@ -1,27 +0,0 @@ -#################### -# Akka Config File # -#################### - -akka { - version = "2.0-SNAPSHOT" - - enabled-modules = ["http"] - - time-unit = "seconds" - - event-handlers = ["akka.event.EventHandler$DefaultListener"] - - boot = ["sample.hello.Boot"] - - http { - hostname = "localhost" - port = 9998 - - connection-close = true - root-actor-id = "_httproot" - root-actor-builtin = true - timeout = 1000 - expired-header-name = "Async-Timeout" - expired-header-value = "expired" - } -} diff --git a/akka-samples/akka-sample-hello/config/microkernel-server.xml b/akka-samples/akka-sample-hello/config/microkernel-server.xml deleted file mode 100644 index 4f86dab23c..0000000000 --- a/akka-samples/akka-sample-hello/config/microkernel-server.xml +++ /dev/null @@ -1,65 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - 300000 - 2 - false - 8443 - 20000 - 5000 - - - - - - - - - - - - - - / - - akka.http.AkkaMistServlet - /* - - - - - - - - - - - - - - - true - true - true - 1000 - - diff --git a/akka-samples/akka-sample-hello/src/main/scala/sample/hello/Boot.scala b/akka-samples/akka-sample-hello/src/main/scala/sample/hello/Boot.scala deleted file mode 100644 index 149c6a3ee4..0000000000 --- a/akka-samples/akka-sample-hello/src/main/scala/sample/hello/Boot.scala +++ /dev/null @@ -1,14 +0,0 @@ -/** - * Copyright (C) 2009-2011 Typesafe Inc. - */ - -package sample.hello - -import akka.actor._ -import akka.http._ - -class Boot { - val supervisor = Supervisor(OneForOneStrategy(List(classOf[Exception]), 3, 100)) - Actor.actorOf(Props[RootEndpoint].withSupervisor(supervisor)) - Actor.actorOf(Props[HelloEndpoint].withSupervisor(supervisor)) -} diff --git a/akka-samples/akka-sample-hello/src/main/scala/sample/hello/HelloEndpoint.scala b/akka-samples/akka-sample-hello/src/main/scala/sample/hello/HelloEndpoint.scala deleted file mode 100644 index 2ea8c1fe83..0000000000 --- a/akka-samples/akka-sample-hello/src/main/scala/sample/hello/HelloEndpoint.scala +++ /dev/null @@ -1,29 +0,0 @@ -/** - * Copyright (C) 2009-2011 Typesafe Inc. - */ - -package sample.hello - -import akka.actor._ -import akka.http._ - -import java.text.DateFormat -import java.util.Date - -class HelloEndpoint extends Actor with Endpoint { - self.dispatcher = Endpoint.Dispatcher - - lazy val hello = Actor.actorOf( - new Actor { - def time = DateFormat.getTimeInstance.format(new Date) - def receive = { - case get: Get => get OK "Hello at " + time - } - }) - - def hook: Endpoint.Hook = { case _ => hello } - - override def preStart = Actor.registry.actorFor(MistSettings.RootActorID).get ! Endpoint.Attach(hook) - - def receive = handleHttpRequest -} diff --git a/akka-samples/akka-sample-osgi/src/main/scala/OsgiExample.scala b/akka-samples/akka-sample-osgi/src/main/scala/OsgiExample.scala deleted file mode 100644 index 3e2d7af049..0000000000 --- a/akka-samples/akka-sample-osgi/src/main/scala/OsgiExample.scala +++ /dev/null @@ -1,31 +0,0 @@ -/** - * Copyright (C) 2009-2011 Typesafe Inc. - */ -package sample.osgi - -import akka.actor.Actor -import akka.actor.Actor._ - -import org.osgi.framework.{ BundleActivator, BundleContext } - -class Activator extends BundleActivator { - - def start(context: BundleContext) { - println("Starting the OSGi example ...") - val echo = actorOf[EchoActor] - val answer = (echo ? "OSGi example").as[String] - println(answer getOrElse "No answer!") - } - - def stop(context: BundleContext) { - Actor.registry.local.shutdownAll() - println("Stopped the OSGi example.") - } -} - -class EchoActor extends Actor { - - override def receive = { - case x => reply(x) - } -} diff --git a/akka-samples/akka-sample-remote/README b/akka-samples/akka-sample-remote/README deleted file mode 100644 index f19386e1e3..0000000000 --- a/akka-samples/akka-sample-remote/README +++ /dev/null @@ -1,27 +0,0 @@ ---------------------------------------------------------- -== Akka Remote Sample Application == ---------------------------------------------------------- -= Server Managed Remote Actors Sample = - -To run the sample: - -1. Fire up two shells. For each of them: - - Step down into to the root of the Akka distribution. - - Set 'export AKKA_HOME=. - - Run 'sbt' - - Run 'update' followed by 'compile' if you have not done that before. - - Run 'project akka-sample-remote' - - Run 'console' to start up a REPL (interpreter). -2. In the first REPL you get execute: - - scala> import sample.remote._ - - scala> ServerManagedRemoteActorServer.run - This starts up the RemoteNode and registers the remote actor -3. In the second REPL you get execute: - - scala> import sample.remote._ - - scala> ServerManagedRemoteActorClient.run -4. See the actor conversation. -5. Run it again to see full speed after first initialization. - -Now you could test client reconnect by killing the console running the ServerManagedRemoteActorClient and start it up again. See the client reconnect take place in the REPL shell. - -That’s it. Have fun. \ No newline at end of file diff --git a/akka-samples/akka-sample-remote/src/main/scala/ServerManagedRemoteActorSample.scala b/akka-samples/akka-sample-remote/src/main/scala/ServerManagedRemoteActorSample.scala deleted file mode 100644 index 84a201f530..0000000000 --- a/akka-samples/akka-sample-remote/src/main/scala/ServerManagedRemoteActorSample.scala +++ /dev/null @@ -1,36 +0,0 @@ -/** - * Copyright (C) 2009-2011 Typesafe Inc. - */ - -package sample.remote - -import akka.actor.Actor._ -import akka.actor. {ActorRegistry, Actor} - -class HelloWorldActor extends Actor { - def receive = { - case "Hello" => - reply("World") - } -} - -object ServerManagedRemoteActorServer { - - def run = { - Actor.remote.start("localhost", 2552) - Actor.remote.register("hello-service", actorOf[HelloWorldActor]) - } - - def main(args: Array[String]) = run -} - -object ServerManagedRemoteActorClient { - - def run = { - val actor = Actor.remote.actorFor("hello-service", "localhost", 2552) - val result = actor !! "Hello" - } - - def main(args: Array[String]) = run -} - diff --git a/akka-slf4j/src/main/scala/akka/event/slf4j/SLF4J.scala b/akka-slf4j/src/main/scala/akka/event/slf4j/SLF4J.scala index dc175e4e82..73d37a838b 100644 --- a/akka-slf4j/src/main/scala/akka/event/slf4j/SLF4J.scala +++ b/akka-slf4j/src/main/scala/akka/event/slf4j/SLF4J.scala @@ -5,7 +5,7 @@ package akka.event.slf4j import org.slf4j.{ Logger ⇒ SLFLogger, LoggerFactory ⇒ SLFLoggerFactory } - +import org.slf4j.MDC import akka.event.Logging._ import akka.actor._ @@ -27,31 +27,54 @@ object Logger { /** * SLF4J Event Handler. * + * The thread in which the logging was performed is captured in + * Mapped Diagnostic Context (MDC) with attribute name "sourceThread". + * * @author Jonas Bonér */ class Slf4jEventHandler extends Actor with SLF4JLogging { + val mdcThreadAttributeName = "sourceThread" + def receive = { + case event @ Error(cause, logSource, message) ⇒ - Logger(logSource).error("[{}] [{}] [{}]", - Array[AnyRef](event.thread.getName, message.asInstanceOf[AnyRef], stackTraceFor(cause))) + withMdc(mdcThreadAttributeName, event.thread.getName) { + cause match { + case Error.NoCause ⇒ Logger(logSource).error(message.toString) + case _ ⇒ Logger(logSource).error(message.toString, cause) + } + } case event @ Warning(logSource, message) ⇒ - Logger(logSource).warn("[{}] [{}]", - event.thread.getName, message.asInstanceOf[AnyRef]) + withMdc(mdcThreadAttributeName, event.thread.getName) { + Logger(logSource).warn("{}", message.asInstanceOf[AnyRef]) + } case event @ Info(logSource, message) ⇒ - Logger(logSource).info("[{}] [{}]", - event.thread.getName, message.asInstanceOf[AnyRef]) + withMdc(mdcThreadAttributeName, event.thread.getName) { + Logger(logSource).info("{}", message.asInstanceOf[AnyRef]) + } case event @ Debug(logSource, message) ⇒ - Logger(logSource).debug("[{}] [{}]", - event.thread.getName, message.asInstanceOf[AnyRef]) + withMdc(mdcThreadAttributeName, event.thread.getName) { + Logger(logSource).debug("{}", message.asInstanceOf[AnyRef]) + } case InitializeLogger(_) ⇒ log.info("Slf4jEventHandler started") sender ! LoggerInitialized } + @inline + final def withMdc(name: String, value: String)(logStatement: ⇒ Unit) { + MDC.put(name, value) + try { + logStatement + } finally { + MDC.remove(name) + } + } + } diff --git a/akka-testkit/src/main/scala/akka/testkit/TestBarrier.scala b/akka-testkit/src/main/scala/akka/testkit/TestBarrier.scala index 22c10271b7..e027620b61 100644 --- a/akka-testkit/src/main/scala/akka/testkit/TestBarrier.scala +++ b/akka-testkit/src/main/scala/akka/testkit/TestBarrier.scala @@ -33,7 +33,7 @@ class TestBarrier(count: Int) { } catch { case e: TimeoutException ⇒ throw new TestBarrierTimeoutException("Timeout of %s and time factor of %s" - format (timeout.toString, TestKitExtension(system).settings.TestTimeFactor)) + format (timeout.toString, TestKitExtension(system).TestTimeFactor)) } } diff --git a/akka-testkit/src/main/scala/akka/testkit/TestEventListener.scala b/akka-testkit/src/main/scala/akka/testkit/TestEventListener.scala index 675bdfe8c1..543d443da6 100644 --- a/akka-testkit/src/main/scala/akka/testkit/TestEventListener.scala +++ b/akka-testkit/src/main/scala/akka/testkit/TestEventListener.scala @@ -81,8 +81,7 @@ abstract class EventFilter(occurrences: Int) { */ def intercept[T](code: ⇒ T)(implicit system: ActorSystem): T = { system.eventStream publish TestEvent.Mute(this) - val testKitExtension = TestKitExtension(system) - val leeway = testKitExtension.settings.TestEventFilterLeeway + val leeway = TestKitExtension(system).TestEventFilterLeeway try { val result = code if (!awaitDone(leeway)) diff --git a/akka-testkit/src/main/scala/akka/testkit/TestKit.scala b/akka-testkit/src/main/scala/akka/testkit/TestKit.scala index 88548e9cb2..dba8437ef6 100644 --- a/akka-testkit/src/main/scala/akka/testkit/TestKit.scala +++ b/akka-testkit/src/main/scala/akka/testkit/TestKit.scala @@ -81,7 +81,7 @@ class TestKit(_system: ActorSystem) { import TestActor.{ Message, RealMessage, NullMessage } implicit val system = _system - val testKitExtension = TestKitExtension(system) + val testKitSettings = TestKitExtension(system) private val queue = new LinkedBlockingDeque[Message]() private[akka] var lastMessage: Message = NullMessage @@ -128,7 +128,7 @@ class TestKit(_system: ActorSystem) { * block or missing that it returns the properly dilated default for this * case from settings (key "akka.test.single-expect-default"). */ - def remaining: Duration = if (end == Duration.Undefined) testKitExtension.settings.SingleExpectDefaultTimeout.dilated else end - now + def remaining: Duration = if (end == Duration.Undefined) testKitSettings.SingleExpectDefaultTimeout.dilated else end - now /** * Query queue status. @@ -569,10 +569,8 @@ object TestKit { * Java API. Scale timeouts (durations) during tests with the configured * 'akka.test.timefactor'. */ - def dilated(duration: Duration, system: ActorSystem): Duration = { - duration * TestKitExtension(system).settings.TestTimeFactor - } - + def dilated(duration: Duration, system: ActorSystem): Duration = + duration * TestKitExtension(system).TestTimeFactor } /** diff --git a/akka-testkit/src/main/scala/akka/testkit/TestKitExtension.scala b/akka-testkit/src/main/scala/akka/testkit/TestKitExtension.scala index d1ef60065f..5af1bde50a 100644 --- a/akka-testkit/src/main/scala/akka/testkit/TestKitExtension.scala +++ b/akka-testkit/src/main/scala/akka/testkit/TestKitExtension.scala @@ -3,53 +3,27 @@ */ package akka.testkit -import akka.actor.ActorSystem -import akka.actor.ExtensionKey -import akka.actor.Extension -import akka.actor.ActorSystemImpl import com.typesafe.config.Config import com.typesafe.config.ConfigFactory import com.typesafe.config.ConfigParseOptions import com.typesafe.config.ConfigRoot import akka.util.Duration import java.util.concurrent.TimeUnit.MILLISECONDS +import akka.actor.{ ExtensionId, ActorSystem, Extension, ActorSystemImpl } -object TestKitExtensionKey extends ExtensionKey[TestKitExtension] - -object TestKitExtension { - def apply(system: ActorSystem): TestKitExtension = { - if (!system.hasExtension(TestKitExtensionKey)) { - system.registerExtension(new TestKitExtension) - } - system.extension(TestKitExtensionKey) - } - - class Settings(cfg: Config) { - private def referenceConfig: Config = - ConfigFactory.parseResource(classOf[ActorSystem], "/akka-testkit-reference.conf", - ConfigParseOptions.defaults.setAllowMissing(false)) - val config: ConfigRoot = ConfigFactory.emptyRoot("akka-testkit").withFallback(cfg).withFallback(referenceConfig).resolve() - - import config._ - - val TestTimeFactor = getDouble("akka.test.timefactor") - val SingleExpectDefaultTimeout = Duration(getMilliseconds("akka.test.single-expect-default"), MILLISECONDS) - val TestEventFilterLeeway = Duration(getMilliseconds("akka.test.filter-leeway"), MILLISECONDS) - - } +object TestKitExtension extends ExtensionId[TestKitSettings] { + def createExtension(system: ActorSystemImpl): TestKitSettings = new TestKitSettings(system.applicationConfig) } -class TestKitExtension extends Extension[TestKitExtension] { - import TestKitExtension._ - @volatile - private var _settings: Settings = _ +class TestKitSettings(cfg: Config) extends Extension { + private def referenceConfig: Config = + ConfigFactory.parseResource(classOf[ActorSystem], "/akka-testkit-reference.conf", + ConfigParseOptions.defaults.setAllowMissing(false)) + val config: ConfigRoot = ConfigFactory.emptyRoot("akka-testkit").withFallback(cfg).withFallback(referenceConfig).resolve() - def key = TestKitExtensionKey - - def init(system: ActorSystemImpl) { - _settings = new Settings(system.applicationConfig) - } - - def settings: Settings = _settings + import config._ + val TestTimeFactor = getDouble("akka.test.timefactor") + val SingleExpectDefaultTimeout = Duration(getMilliseconds("akka.test.single-expect-default"), MILLISECONDS) + val TestEventFilterLeeway = Duration(getMilliseconds("akka.test.filter-leeway"), MILLISECONDS) } \ No newline at end of file diff --git a/akka-testkit/src/main/scala/akka/testkit/TestLatch.scala b/akka-testkit/src/main/scala/akka/testkit/TestLatch.scala index 2cfb2edc85..1af4785525 100644 --- a/akka-testkit/src/main/scala/akka/testkit/TestLatch.scala +++ b/akka-testkit/src/main/scala/akka/testkit/TestLatch.scala @@ -34,10 +34,9 @@ class TestLatch(count: Int = 1)(implicit system: ActorSystem) { def await(): Boolean = await(TestLatch.DefaultTimeout) def await(timeout: Duration): Boolean = { - val testKitExtension = TestKitExtension(system) val opened = latch.await(timeout.dilated.toNanos, TimeUnit.NANOSECONDS) if (!opened) throw new TestLatchTimeoutException( - "Timeout of %s with time factor of %s" format (timeout.toString, testKitExtension.settings.TestTimeFactor)) + "Timeout of %s with time factor of %s" format (timeout.toString, TestKitExtension(system).TestTimeFactor)) opened } @@ -45,10 +44,9 @@ class TestLatch(count: Int = 1)(implicit system: ActorSystem) { * Timeout is expected. Throws exception if latch is opened before timeout. */ def awaitTimeout(timeout: Duration = TestLatch.DefaultTimeout) = { - val testKitExtension = TestKitExtension(system) val opened = latch.await(timeout.dilated.toNanos, TimeUnit.NANOSECONDS) if (opened) throw new TestLatchNoTimeoutException( - "Latch opened before timeout of %s with time factor of %s" format (timeout.toString, testKitExtension.settings.TestTimeFactor)) + "Latch opened before timeout of %s with time factor of %s" format (timeout.toString, TestKitExtension(system).TestTimeFactor)) opened } diff --git a/akka-testkit/src/main/scala/akka/testkit/package.scala b/akka-testkit/src/main/scala/akka/testkit/package.scala index 86b404c67d..6f7bf965ca 100644 --- a/akka-testkit/src/main/scala/akka/testkit/package.scala +++ b/akka-testkit/src/main/scala/akka/testkit/package.scala @@ -12,9 +12,9 @@ package object testkit { try { val result = block - val testKitExtension = TestKitExtension(system) - val stop = now + testKitExtension.settings.TestEventFilterLeeway.toMillis - val failed = eventFilters filterNot (_.awaitDone(Duration(stop - now, MILLISECONDS))) map ("Timeout (" + testKitExtension.settings.TestEventFilterLeeway + ") waiting for " + _) + val testKitSettings = TestKitExtension(system) + val stop = now + testKitSettings.TestEventFilterLeeway.toMillis + val failed = eventFilters filterNot (_.awaitDone(Duration(stop - now, MILLISECONDS))) map ("Timeout (" + testKitSettings.TestEventFilterLeeway + ") waiting for " + _) if (failed.nonEmpty) throw new AssertionError("Filter completion error:\n" + failed.mkString("\n")) @@ -45,7 +45,7 @@ package object testkit { */ class TestDuration(duration: Duration) { def dilated(implicit system: ActorSystem): Duration = { - duration * TestKitExtension(system).settings.TestTimeFactor + duration * TestKitExtension(system).TestTimeFactor } } } diff --git a/akka-testkit/src/test/scala/akka/testkit/AkkaSpec.scala b/akka-testkit/src/test/scala/akka/testkit/AkkaSpec.scala index 367efbbc88..7b2df6efab 100644 --- a/akka-testkit/src/test/scala/akka/testkit/AkkaSpec.scala +++ b/akka-testkit/src/test/scala/akka/testkit/AkkaSpec.scala @@ -24,6 +24,7 @@ object AkkaSpec { akka { event-handlers = ["akka.testkit.TestEventListener"] loglevel = "WARNING" + stdout-loglevel = "WARNING" actor { default-dispatcher { core-pool-size = 4 diff --git a/akka-testkit/src/test/scala/akka/testkit/TestTimeSpec.scala b/akka-testkit/src/test/scala/akka/testkit/TestTimeSpec.scala index ff02a6c82b..0d0bab20b6 100644 --- a/akka-testkit/src/test/scala/akka/testkit/TestTimeSpec.scala +++ b/akka-testkit/src/test/scala/akka/testkit/TestTimeSpec.scala @@ -15,7 +15,7 @@ class TestTimeSpec extends AkkaSpec(Map("akka.test.timefactor" -> 2.0)) with Bef val now = System.nanoTime intercept[AssertionError] { probe.awaitCond(false, Duration("1 second")) } val diff = System.nanoTime - now - val target = (1000000000l * testKitExtension.settings.TestTimeFactor).toLong + val target = (1000000000l * testKitSettings.TestTimeFactor).toLong diff must be > (target - 300000000l) diff must be < (target + 300000000l) } diff --git a/build.sbt b/build.sbt new file mode 100644 index 0000000000..13467e1654 --- /dev/null +++ b/build.sbt @@ -0,0 +1,7 @@ + seq(lsSettings:_*) + + (LsKeys.tags in LsKeys.lsync) := Seq("actors", "stm", "concurrency", "distributed", "fault-tolerance", "scala", "java", "futures", "dataflow", "remoting") + + (externalResolvers in LsKeys.lsync) := Seq("Akka Repository" at "http://akka.io/repository/") + + (description in LsKeys.lsync) := "Akka is the platform for the next generation of event-driven, scalable and fault-tolerant architectures on the JVM." diff --git a/project/AkkaBuild.scala b/project/AkkaBuild.scala index dd09fe3c80..fdc8ecd877 100644 --- a/project/AkkaBuild.scala +++ b/project/AkkaBuild.scala @@ -1,11 +1,18 @@ +/** + * Copyright (C) 2009-2011 Typesafe Inc. + */ + package akka import sbt._ import Keys._ + import com.typesafe.sbtmultijvm.MultiJvmPlugin -import MultiJvmPlugin.{ MultiJvm, extraOptions, jvmOptions, scalatestOptions } import com.typesafe.sbtscalariform.ScalariformPlugin + +import MultiJvmPlugin.{ MultiJvm, extraOptions, jvmOptions, scalatestOptions } import ScalariformPlugin.{ format, formatPreferences, formatSourceDirectories } + import java.lang.Boolean.getBoolean object AkkaBuild extends Build { @@ -26,7 +33,6 @@ object AkkaBuild extends Build { rstdocDirectory <<= baseDirectory / "akka-docs" ), aggregate = Seq(actor, testkit, actorTests, stm, remote, slf4j, amqp, mailboxes, akkaSbtPlugin, samples, tutorials, docs) - //aggregate = Seq(cluster, mailboxes, camel, camelTyped) ) lazy val actor = Project( @@ -88,23 +94,6 @@ object AkkaBuild extends Build { ) ) configs (MultiJvm) - // lazy val cluster = Project( - // id = "akka-cluster", - // base = file("akka-cluster"), - // dependencies = Seq(stm, actorTests % "test->test", testkit % "test"), - // settings = defaultSettings ++ multiJvmSettings ++ Seq( - // libraryDependencies ++= Dependencies.cluster, - // extraOptions in MultiJvm <<= (sourceDirectory in MultiJvm) { src => - // (name: String) => (src ** (name + ".conf")).get.headOption.map("-Dakka.config=" + _.absolutePath).toSeq - // }, - // scalatestOptions in MultiJvm := Seq("-r", "org.scalatest.akka.QuietReporter"), - // jvmOptions in MultiJvm := { - // if (getBoolean("sbt.log.noformat")) Seq("-Dakka.test.nocolor=true") else Nil - // }, - // test in Test <<= (test in Test) dependsOn (test in MultiJvm) - // ) - // ) configs (MultiJvm) - lazy val slf4j = Project( id = "akka-slf4j", base = file("akka-slf4j"), @@ -173,7 +162,7 @@ object AkkaBuild extends Build { testOptions in Test <+= testRedisMailbox map { test => Tests.Filter(s => test) } ) ) - + lazy val zookeeperMailbox = Project( id = "akka-zookeeper-mailbox", base = file("akka-durable-mailboxes/akka-zookeeper-mailbox"), @@ -196,23 +185,6 @@ object AkkaBuild extends Build { ) ) - // lazy val camel = Project( - // id = "akka-camel", - // base = file("akka-camel"), - // dependencies = Seq(actor, slf4j, testkit % "test"), - // settings = defaultSettings ++ Seq( - // libraryDependencies ++= Dependencies.camel - // ) - // ) - - // can be merged back into akka-camel - // lazy val camelTyped = Project( - // id = "akka-camel-typed", - // base = file("akka-camel-typed"), - // dependencies = Seq(camel % "compile;test->test", testkit % "test"), - // settings = defaultSettings - // ) - // lazy val spring = Project( // id = "akka-spring", // base = file("akka-spring"), @@ -244,23 +216,8 @@ object AkkaBuild extends Build { base = file("akka-samples"), settings = parentSettings, aggregate = Seq(fsmSample) - // aggregate = Seq(fsmSample, camelSample) ) - // lazy val antsSample = Project( - // id = "akka-sample-ants", - // base = file("akka-samples/akka-sample-ants"), - // dependencies = Seq(stm), - // settings = defaultSettings - // ) - - // lazy val chatSample = Project( - // id = "akka-sample-chat", - // base = file("akka-samples/akka-sample-chat"), - // dependencies = Seq(cluster), - // settings = defaultSettings - // ) - lazy val fsmSample = Project( id = "akka-sample-fsm", base = file("akka-samples/akka-sample-fsm"), @@ -268,29 +225,6 @@ object AkkaBuild extends Build { settings = defaultSettings ) - // lazy val camelSample = Project( - // id = "akka-sample-camel", - // base = file("akka-samples/akka-sample-camel"), - // dependencies = Seq(actor, camelTyped, testkit % "test"), - // settings = defaultSettings ++ Seq( - // libraryDependencies ++= Dependencies.sampleCamel - // ) - // ) - - // lazy val helloSample = Project( - // id = "akka-sample-hello", - // base = file("akka-samples/akka-sample-hello"), - // dependencies = Seq(kernel), - // settings = defaultSettings - // ) - - // lazy val remoteSample = Project( - // id = "akka-sample-remote", - // base = file("akka-samples/akka-sample-remote"), - // dependencies = Seq(cluster), - // settings = defaultSettings - // ) - lazy val tutorials = Project( id = "akka-tutorials", base = file("akka-tutorials"), @@ -333,12 +267,11 @@ object AkkaBuild extends Build { publishArtifact in Compile := false ) - val testExcludes = SettingKey[Seq[String]]("test-excludes") + val excludeTestNames = SettingKey[Seq[String]]("exclude-test-names") + val excludeTestTags = SettingKey[Seq[String]]("exclude-test-tags") + val includeTestTags = SettingKey[Seq[String]]("include-test-tags") - def akkaTestExcludes: Seq[String] = { - val exclude = System.getProperty("akka.test.exclude", "") - if (exclude.isEmpty) Seq.empty else exclude.split(",").toSeq - } + val defaultExcludedTags = Seq("timing") lazy val defaultSettings = baseSettings ++ formatSettings ++ Seq( resolvers += "Typesafe Repo" at "http://repo.typesafe.com/typesafe/releases/", @@ -356,9 +289,37 @@ object AkkaBuild extends Build { // disable parallel tests parallelExecution in Test := false, - // for excluding tests in jenkins builds (-Dakka.test.exclude=TimingSpec) - testExcludes := akkaTestExcludes, - testOptions in Test <++= testExcludes map { _.map(exclude => Tests.Filter(test => !test.contains(exclude))) }, + // for excluding tests by name (or use system property: -Dakka.test.names.exclude=TimingSpec) + excludeTestNames := { + val exclude = System.getProperty("akka.test.names.exclude", "") + if (exclude.isEmpty) Seq.empty else exclude.split(",").toSeq + }, + + // for excluding tests by tag (or use system property: -Dakka.test.tags.exclude=timing) + excludeTestTags := { + val exclude = System.getProperty("akka.test.tags.exclude", "") + if (exclude.isEmpty) defaultExcludedTags else exclude.split(",").toSeq + }, + + // for including tests by tag (or use system property: -Dakka.test.tags.include=timing) + includeTestTags := { + val include = System.getProperty("akka.test.tags.include", "") + if (include.isEmpty) Seq.empty else include.split(",").toSeq + }, + + // add filters for tests excluded by name + testOptions in Test <++= excludeTestNames map { _.map(exclude => Tests.Filter(test => !test.contains(exclude))) }, + + // add arguments for tests excluded by tag - includes override excludes (opposite to scalatest) + testOptions in Test <++= (excludeTestTags, includeTestTags) map { (excludes, includes) => + val tags = (excludes.toSet -- includes.toSet).toSeq + if (tags.isEmpty) Seq.empty else Seq(Tests.Argument("-l", tags.mkString(" "))) + }, + + // add arguments for tests included by tag + testOptions in Test <++= includeTestTags map { tags => + if (tags.isEmpty) Seq.empty else Seq(Tests.Argument("-n", tags.mkString(" "))) + }, // show full stack traces testOptions in Test += Tests.Argument("-oF") @@ -424,7 +385,7 @@ object Dependencies { val amqp = Seq(rabbit, commonsIo, protobuf) val mailboxes = Seq(Test.scalatest, Test.junit) - + val fileMailbox = Seq(Test.scalatest, Test.junit) val beanstalkMailbox = Seq(beanstalk, Test.junit) @@ -432,13 +393,10 @@ object Dependencies { val redisMailbox = Seq(redis, Test.junit) val mongoMailbox = Seq(mongoAsync, twttrUtilCore, Test.junit) - + val zookeeperMailbox = Seq(zookeeper, Test.junit) -// val camel = Seq(camelCore, Test.junit, Test.scalatest, Test.logback) - val spring = Seq(springBeans, springContext, Test.junit, Test.scalatest) -// val spring = Seq(springBeans, springContext, camelSpring, Test.junit, Test.scalatest) val kernel = Seq( jettyUtil, jettyXml, jettyServlet, jacksonCore, staxApi @@ -466,7 +424,7 @@ object Dependency { val Netty = "3.2.5.Final" val Protobuf = "2.4.1" val Scalatest = "1.6.1" - val Slf4j = "1.6.0" + val Slf4j = "1.6.4" val Spring = "3.0.5.RELEASE" val Zookeeper = "3.4.0" val Rabbit = "2.3.1" diff --git a/project/plugins.sbt b/project/plugins.sbt index 8b6d17a0c3..7140718543 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -4,3 +4,9 @@ resolvers += Classpaths.typesafeResolver addSbtPlugin("com.typesafe.sbtmultijvm" % "sbt-multi-jvm" % "0.1.7") addSbtPlugin("com.typesafe.sbtscalariform" % "sbt-scalariform" % "0.1.4") + +resolvers ++= Seq( + "less is" at "http://repo.lessis.me", + "coda" at "http://repo.codahale.com") + +addSbtPlugin("me.lessis" % "ls-sbt" % "0.1.0")