Merge branch 'master' into samples-henrikengstrom

Conflicts:
	akka-actor/src/main/scala/akka/routing/Routing.scala
	akka-samples/akka-sample-ants/README.md
	akka-samples/akka-sample-ants/src/main/scala/Ants.scala
	akka-samples/akka-sample-camel/src/main/scala/sample/camel/Actors.scala
	akka-samples/akka-sample-camel/src/main/scala/sample/camel/Boot.scala
	akka-samples/akka-sample-camel/src/main/scala/sample/camel/ClientApplication.scala
	akka-samples/akka-sample-camel/src/main/scala/sample/camel/ServerApplication.scala
	akka-samples/akka-sample-camel/src/main/scala/sample/camel/StandaloneApplication.scala
	akka-samples/akka-sample-chat/src/main/scala/ChatServer.scala
	akka-samples/akka-sample-osgi/src/main/scala/OsgiExample.scala
	akka-samples/akka-sample-remote/src/main/scala/ServerManagedRemoteActorSample.scala
	project/AkkaBuild.scala
This commit is contained in:
Henrik Engstrom 2011-11-30 11:05:13 +01:00
commit e4ea7ac5d6
98 changed files with 1860 additions and 2546 deletions

1
.gitignore vendored
View file

@ -1,3 +1,4 @@
*.vim
*~
*#
src_managed

View file

@ -13,33 +13,34 @@ import static org.junit.Assert.*;
public class JavaExtension {
static class TestExtension implements Extension<TestExtension> {
private ActorSystemImpl system;
public static ExtensionKey<TestExtension> key = new ExtensionKey<TestExtension>() {
};
static class Provider implements ExtensionIdProvider {
public ExtensionId<TestExtension> lookup() { return defaultInstance; }
}
public ExtensionKey<TestExtension> key() {
return key;
}
public final static TestExtensionId defaultInstance = new TestExtensionId();
public void init(ActorSystemImpl system) {
this.system = system;
}
public ActorSystemImpl getSystem() {
return system;
static class TestExtensionId extends AbstractExtensionId<TestExtension> {
public TestExtension createExtension(ActorSystemImpl i) {
return new TestExtension(i);
}
}
private Config c = ConfigFactory.parseString("akka.extensions = [ \"akka.actor.JavaExtension$TestExtension\" ]",
static class TestExtension implements Extension {
public final ActorSystemImpl system;
public TestExtension(ActorSystemImpl i) {
system = i;
}
}
private Config c = ConfigFactory.parseString("akka.extensions = [ \"akka.actor.JavaExtension$Provider\" ]",
ConfigParseOptions.defaults());
private ActorSystem system = ActorSystem.create("JavaExtension", c);
@Test
public void mustBeAccessible() {
final ActorSystemImpl s = system.extension(TestExtension.key).getSystem();
assertSame(s, system);
assertSame(system.extension(defaultInstance).system, system);
assertSame(defaultInstance.apply(system).system, system);
}
}

View file

@ -1,30 +0,0 @@
package akka.docs.config
import org.scalatest.WordSpec
import org.scalatest.matchers.MustMatchers
import com.typesafe.config.ConfigFactory
import com.typesafe.config.ConfigParseOptions
import akka.actor.ActorSystem
//#imports
class ConfigDocSpec extends WordSpec {
"programmatically configure ActorSystem" in {
//#custom-config
val customConf = ConfigFactory.parseString("""
akka.actor.deployment {
/app/my-service {
router = round-robin
nr-of-instances = 3
}
}
""", ConfigParseOptions.defaults)
val system = ActorSystem("MySystem", ConfigFactory.systemProperties.withFallback(customConf))
//#custom-config
system.stop()
}
}

View file

@ -247,7 +247,7 @@ class ActorRefSpec extends AkkaSpec {
out.flush
out.close
Serialization.system.withValue(system.asInstanceOf[ActorSystemImpl]) {
Serialization.currentSystem.withValue(system.asInstanceOf[ActorSystemImpl]) {
val in = new ObjectInputStream(new ByteArrayInputStream(baos.toByteArray))
val readA = in.readObject
@ -275,7 +275,7 @@ class ActorRefSpec extends AkkaSpec {
(intercept[java.lang.IllegalStateException] {
in.readObject
}).getMessage must be === "Trying to deserialize a serialized ActorRef without an ActorSystem in scope." +
" Use akka.serialization.Serialization.system.withValue(system) { ... }"
" Use akka.serialization.Serialization.currentSystem.withValue(system) { ... }"
}
"must throw exception on deserialize if not present in actor hierarchy (and remoting is not enabled)" in {
@ -292,7 +292,7 @@ class ActorRefSpec extends AkkaSpec {
out.flush
out.close
Serialization.system.withValue(system.asInstanceOf[ActorSystemImpl]) {
Serialization.currentSystem.withValue(system.asInstanceOf[ActorSystemImpl]) {
val in = new ObjectInputStream(new ByteArrayInputStream(baos.toByteArray))
(intercept[java.lang.IllegalStateException] {
in.readObject

View file

@ -10,28 +10,23 @@ import com.typesafe.config.ConfigFactory
class JavaExtensionSpec extends JavaExtension with JUnitSuite
object ActorSystemSpec {
class TestExtension extends Extension[TestExtension] {
var system: ActorSystemImpl = _
def key = TestExtension
def init(system: ActorSystemImpl) {
this.system = system
}
object TestExtension extends ExtensionId[TestExtension] with ExtensionIdProvider {
def lookup = this
def createExtension(s: ActorSystemImpl) = new TestExtension(s)
}
object TestExtension extends ExtensionKey[TestExtension]
class TestExtension(val system: ActorSystemImpl) extends Extension
}
class ActorSystemSpec extends AkkaSpec("""akka.extensions = ["akka.actor.ActorSystemSpec$TestExtension"]""") {
class ActorSystemSpec extends AkkaSpec("""akka.extensions = ["akka.actor.ActorSystemSpec$TestExtension$"]""") {
import ActorSystemSpec._
"An ActorSystem" must {
"support extensions" in {
TestExtension(system).system must be === system
system.extension(TestExtension).system must be === system
system.hasExtension(TestExtension) must be(true)
}
}

View file

@ -28,7 +28,7 @@ class SchedulerSpec extends AkkaSpec with BeforeAndAfterEach {
val tickActor = actorOf(new Actor {
def receive = { case Tick countDownLatch.countDown() }
})
// run every 50 millisec
// run every 50 milliseconds
collectCancellable(system.scheduler.schedule(tickActor, Tick, 0 milliseconds, 50 milliseconds))
// after max 1 second it should be executed at least the 3 times already
@ -42,6 +42,16 @@ class SchedulerSpec extends AkkaSpec with BeforeAndAfterEach {
assert(countDownLatch2.await(2, TimeUnit.SECONDS))
}
"should stop continuous scheduling if the receiving actor has been terminated" in {
// run immediately and then every 100 milliseconds
collectCancellable(system.scheduler.schedule(testActor, "msg", 0 milliseconds, 100 milliseconds))
// stop the actor and, hence, the continuous messaging from happening
testActor ! PoisonPill
expectNoMsg(500 milliseconds)
}
"schedule once" in {
case object Tick
val countDownLatch = new CountDownLatch(3)

View file

@ -5,7 +5,6 @@ package akka.actor
*/
import org.scalatest.{ BeforeAndAfterAll, BeforeAndAfterEach }
import akka.japi.{ Option JOption }
import akka.util.Duration
import akka.util.duration._
import akka.dispatch.{ Dispatchers, Future, KeptPromise }
@ -14,6 +13,9 @@ import java.util.concurrent.atomic.AtomicReference
import annotation.tailrec
import akka.testkit.{ EventFilter, filterEvents, AkkaSpec }
import akka.serialization.SerializationExtension
import akka.actor.TypedActor.{ PostRestart, PreRestart, PostStop, PreStart }
import java.util.concurrent.{ TimeUnit, CountDownLatch }
import akka.japi.{ Creator, Option JOption }
object TypedActorSpec {
@ -135,6 +137,23 @@ object TypedActorSpec {
class StackedImpl extends Stacked {
override def stacked: String = "FOOBAR" //Uppercase
}
trait LifeCycles {
def crash(): Unit
}
class LifeCyclesImpl(val latch: CountDownLatch) extends PreStart with PostStop with PreRestart with PostRestart with LifeCycles {
override def crash(): Unit = throw new IllegalStateException("Crash!")
override def preStart(): Unit = latch.countDown()
override def postStop(): Unit = for (i 1 to 3) latch.countDown()
override def preRestart(reason: Throwable, message: Option[Any]): Unit = for (i 1 to 5) latch.countDown()
override def postRestart(reason: Throwable): Unit = for (i 1 to 7) latch.countDown()
}
}
@org.junit.runner.RunWith(classOf[org.scalatest.junit.JUnitRunner])
@ -148,18 +167,18 @@ class TypedActorSpec extends AkkaSpec with BeforeAndAfterEach with BeforeAndAfte
newFooBar(Props().withTimeout(Timeout(d)))
def newFooBar(props: Props): Foo =
system.typedActorOf(classOf[Foo], classOf[Bar], props)
TypedActor(system).typedActorOf(classOf[Foo], classOf[Bar], props)
def newStacked(props: Props = Props().withTimeout(Timeout(2000))): Stacked =
system.typedActorOf(classOf[Stacked], classOf[StackedImpl], props)
TypedActor(system).typedActorOf(classOf[Stacked], classOf[StackedImpl], props)
def mustStop(typedActor: AnyRef) = system.typedActor.stop(typedActor) must be(true)
def mustStop(typedActor: AnyRef) = TypedActor(system).stop(typedActor) must be(true)
"TypedActors" must {
"be able to instantiate" in {
val t = newFooBar
system.typedActor.isTypedActor(t) must be(true)
TypedActor(system).isTypedActor(t) must be(true)
mustStop(t)
}
@ -169,7 +188,7 @@ class TypedActorSpec extends AkkaSpec with BeforeAndAfterEach with BeforeAndAfte
}
"not stop non-started ones" in {
system.typedActor.stop(null) must be(false)
TypedActor(system).stop(null) must be(false)
}
"throw an IllegalStateExcpetion when TypedActor.self is called in the wrong scope" in {
@ -188,7 +207,7 @@ class TypedActorSpec extends AkkaSpec with BeforeAndAfterEach with BeforeAndAfte
"be able to call toString" in {
val t = newFooBar
t.toString must be(system.typedActor.getActorRefFor(t).toString)
t.toString must be(TypedActor(system).getActorRefFor(t).toString)
mustStop(t)
}
@ -201,7 +220,7 @@ class TypedActorSpec extends AkkaSpec with BeforeAndAfterEach with BeforeAndAfte
"be able to call hashCode" in {
val t = newFooBar
t.hashCode must be(system.typedActor.getActorRefFor(t).hashCode)
t.hashCode must be(TypedActor(system).getActorRefFor(t).hashCode)
mustStop(t)
}
@ -264,7 +283,7 @@ class TypedActorSpec extends AkkaSpec with BeforeAndAfterEach with BeforeAndAfte
"be able to handle exceptions when calling methods" in {
filterEvents(EventFilter[IllegalStateException]("expected")) {
val boss = actorOf(Props(context {
case p: Props context.sender ! context.typedActorOf(classOf[Foo], classOf[Bar], p)
case p: Props context.sender ! TypedActor(context).typedActorOf(classOf[Foo], classOf[Bar], p)
}).withFaultHandler(OneForOneStrategy {
case e: IllegalStateException if e.getMessage == "expected" FaultHandlingStrategy.Resume
}))
@ -296,7 +315,7 @@ class TypedActorSpec extends AkkaSpec with BeforeAndAfterEach with BeforeAndAfte
}
"be able to support implementation only typed actors" in {
val t = system.typedActorOf[Foo, Bar](Props())
val t = TypedActor(system).typedActorOf[Foo, Bar](Props())
val f = t.futurePigdog(200)
val f2 = t.futurePigdog(0)
f2.isCompleted must be(false)
@ -306,7 +325,7 @@ class TypedActorSpec extends AkkaSpec with BeforeAndAfterEach with BeforeAndAfte
}
"be able to support implementation only typed actors with complex interfaces" in {
val t = system.typedActorOf[Stackable1 with Stackable2, StackedImpl]()
val t = TypedActor(system).typedActorOf[Stackable1 with Stackable2, StackedImpl]()
t.stackable1 must be("foo")
t.stackable2 must be("bar")
mustStop(t)
@ -333,17 +352,16 @@ class TypedActorSpec extends AkkaSpec with BeforeAndAfterEach with BeforeAndAfte
"be able to serialize and deserialize invocations" in {
import java.io._
val serialization = SerializationExtension(system).serialization
val m = TypedActor.MethodCall(serialization, classOf[Foo].getDeclaredMethod("pigdog"), Array[AnyRef]())
val baos = new ByteArrayOutputStream(8192 * 4)
val out = new ObjectOutputStream(baos)
Serialization.currentSystem.withValue(system.asInstanceOf[ActorSystemImpl]) {
val m = TypedActor.MethodCall(classOf[Foo].getDeclaredMethod("pigdog"), Array[AnyRef]())
val baos = new ByteArrayOutputStream(8192 * 4)
val out = new ObjectOutputStream(baos)
out.writeObject(m)
out.close()
out.writeObject(m)
out.close()
val in = new ObjectInputStream(new ByteArrayInputStream(baos.toByteArray))
val in = new ObjectInputStream(new ByteArrayInputStream(baos.toByteArray))
Serialization.system.withValue(system.asInstanceOf[ActorSystemImpl]) {
val mNew = in.readObject().asInstanceOf[TypedActor.MethodCall]
mNew.method must be(m.method)
@ -353,17 +371,16 @@ class TypedActorSpec extends AkkaSpec with BeforeAndAfterEach with BeforeAndAfte
"be able to serialize and deserialize invocations' parameters" in {
import java.io._
val someFoo: Foo = new Bar
val serialization = SerializationExtension(system).serialization
val m = TypedActor.MethodCall(serialization, classOf[Foo].getDeclaredMethod("testMethodCallSerialization", Array[Class[_]](classOf[Foo], classOf[String], classOf[Int]): _*), Array[AnyRef](someFoo, null, 1.asInstanceOf[AnyRef]))
val baos = new ByteArrayOutputStream(8192 * 4)
val out = new ObjectOutputStream(baos)
Serialization.currentSystem.withValue(system.asInstanceOf[ActorSystemImpl]) {
val m = TypedActor.MethodCall(classOf[Foo].getDeclaredMethod("testMethodCallSerialization", Array[Class[_]](classOf[Foo], classOf[String], classOf[Int]): _*), Array[AnyRef](someFoo, null, 1.asInstanceOf[AnyRef]))
val baos = new ByteArrayOutputStream(8192 * 4)
val out = new ObjectOutputStream(baos)
out.writeObject(m)
out.close()
out.writeObject(m)
out.close()
val in = new ObjectInputStream(new ByteArrayInputStream(baos.toByteArray))
val in = new ObjectInputStream(new ByteArrayInputStream(baos.toByteArray))
Serialization.system.withValue(system.asInstanceOf[ActorSystemImpl]) {
val mNew = in.readObject().asInstanceOf[TypedActor.MethodCall]
mNew.method must be(m.method)
@ -375,5 +392,14 @@ class TypedActorSpec extends AkkaSpec with BeforeAndAfterEach with BeforeAndAfte
mNew.parameters(2).asInstanceOf[Int] must be === 1
}
}
"be able to override lifecycle callbacks" in {
val latch = new CountDownLatch(16)
val ta = TypedActor(system)
val t: LifeCycles = ta.typedActorOf(classOf[LifeCycles], new Creator[LifeCyclesImpl] { def create = new LifeCyclesImpl(latch) }, Props())
t.crash()
ta.poisonPill(t)
latch.await(10, TimeUnit.SECONDS) must be === true
}
}
}

View file

@ -0,0 +1,228 @@
package akka.performance.microbench
import akka.performance.workbench.PerformanceSpec
import org.apache.commons.math.stat.descriptive.DescriptiveStatistics
import akka.actor._
import java.util.concurrent.{ ThreadPoolExecutor, CountDownLatch, TimeUnit }
import akka.dispatch._
import java.util.concurrent.ThreadPoolExecutor.AbortPolicy
import java.util.concurrent.BlockingQueue
import java.util.concurrent.LinkedBlockingQueue
import akka.util.Duration
import akka.util.duration._
// -server -Xms512M -Xmx1024M -XX:+UseParallelGC -Dbenchmark=true -Dbenchmark.repeatFactor=500
@org.junit.runner.RunWith(classOf[org.scalatest.junit.JUnitRunner])
class TellThroughput10000PerformanceSpec extends PerformanceSpec {
import TellThroughput10000PerformanceSpec._
/* Experiment with java 7 LinkedTransferQueue
def linkedTransferQueue(): () BlockingQueue[Runnable] =
() new java.util.concurrent.LinkedTransferQueue[Runnable]()
def createDispatcher(name: String) = {
val threadPoolConfig = ThreadPoolConfig()
ThreadPoolConfigDispatcherBuilder(config
new Dispatcher(system.dispatcherFactory.prerequisites, name, 5,
0, UnboundedMailbox(), config, 60000), threadPoolConfig)
//.withNewThreadPoolWithLinkedBlockingQueueWithUnboundedCapacity
.copy(config = threadPoolConfig.copy(queueFactory = linkedTransferQueue()))
.setCorePoolSize(maxClients * 2)
.build
}
*/
def createDispatcher(name: String) = ThreadPoolConfigDispatcherBuilder(config
new Dispatcher(system.dispatcherFactory.prerequisites, name, 10000,
Duration.Zero, UnboundedMailbox(), config, Duration(60, TimeUnit.SECONDS)), ThreadPoolConfig())
.withNewThreadPoolWithLinkedBlockingQueueWithUnboundedCapacity
.setCorePoolSize(maxClients * 2)
.build
val clientDispatcher = createDispatcher("client-dispatcher")
//val destinationDispatcher = createDispatcher("destination-dispatcher")
override def atTermination {
super.atTermination()
System.out.println("Cleaning up after TellThroughputPerformanceSpec")
clientDispatcher.shutdown()
//destinationDispatcher.shutdown()
}
val repeat = 30000L * repeatFactor
"Tell" must {
"warmup" in {
runScenario(4, warmup = true)
}
"warmup more" in {
runScenario(4, warmup = true)
}
"perform with load 1" in {
runScenario(1)
}
"perform with load 2" in {
runScenario(2)
}
"perform with load 4" in {
runScenario(4)
}
"perform with load 6" in {
runScenario(6)
}
"perform with load 8" in {
runScenario(8)
}
"perform with load 10" in {
runScenario(10)
}
"perform with load 12" in {
runScenario(12)
}
"perform with load 14" in {
runScenario(14)
}
"perform with load 16" in {
runScenario(16)
}
"perform with load 18" in {
runScenario(18)
}
"perform with load 20" in {
runScenario(20)
}
"perform with load 22" in {
runScenario(22)
}
"perform with load 24" in {
runScenario(24)
}
"perform with load 26" in {
runScenario(26)
}
"perform with load 28" in {
runScenario(28)
}
"perform with load 30" in {
runScenario(30)
}
"perform with load 32" in {
runScenario(32)
}
"perform with load 34" in {
runScenario(34)
}
"perform with load 36" in {
runScenario(36)
}
"perform with load 38" in {
runScenario(38)
}
"perform with load 40" in {
runScenario(40)
}
"perform with load 42" in {
runScenario(42)
}
"perform with load 44" in {
runScenario(44)
}
"perform with load 46" in {
runScenario(46)
}
"perform with load 48" in {
runScenario(48)
}
def runScenario(numberOfClients: Int, warmup: Boolean = false) {
if (acceptClients(numberOfClients)) {
val latch = new CountDownLatch(numberOfClients)
val repeatsPerClient = repeat / numberOfClients
/*
val destinations = for (i 0 until numberOfClients)
yield system.actorOf(Props(new Destination).withDispatcher(createDispatcher("destination-" + i)))
val clients = for ((dest, j) destinations.zipWithIndex)
yield system.actorOf(Props(new Client(dest, latch, repeatsPerClient)).withDispatcher(createDispatcher("client-" + j)))
*/
val destinations = for (i 0 until numberOfClients)
yield system.actorOf(Props(new Destination).withDispatcher(clientDispatcher))
val clients = for ((dest, j) destinations.zipWithIndex)
yield system.actorOf(Props(new Client(dest, latch, repeatsPerClient)).withDispatcher(clientDispatcher))
val start = System.nanoTime
clients.foreach(_ ! Run)
val ok = latch.await((5000000 + 500 * repeat) * timeDilation, TimeUnit.MICROSECONDS)
val durationNs = (System.nanoTime - start)
if (!ok) {
System.err.println("Destinations: ")
destinations.foreach {
case l: LocalActorRef
val m = l.underlying.mailbox
System.err.println(" -" + l + " mbox(" + m.status + ")" + " containing [" + Stream.continually(m.dequeue()).takeWhile(_ != null).mkString(", ") + "] and has systemMsgs: " + m.hasSystemMessages)
}
System.err.println("")
System.err.println("Clients: ")
clients.foreach {
case l: LocalActorRef
val m = l.underlying.mailbox
System.err.println(" -" + l + " mbox(" + m.status + ")" + " containing [" + Stream.continually(m.dequeue()).takeWhile(_ != null).mkString(", ") + "] and has systemMsgs: " + m.hasSystemMessages)
}
//val e = clientDispatcher.asInstanceOf[Dispatcher].executorService.get().asInstanceOf[ExecutorServiceDelegate].executor.asInstanceOf[ThreadPoolExecutor]
//val q = e.getQueue
//System.err.println("Client Dispatcher: " + e.getActiveCount + " " + Stream.continually(q.poll()).takeWhile(_ != null).mkString(", "))
}
if (!warmup) {
ok must be(true)
logMeasurement(numberOfClients, durationNs, repeat)
}
clients.foreach(_ ! PoisonPill)
destinations.foreach(_ ! PoisonPill)
}
}
}
}
object TellThroughput10000PerformanceSpec {
case object Run
case object Msg
class Destination extends Actor {
def receive = {
case Msg sender ! Msg
}
}
class Client(
actor: ActorRef,
latch: CountDownLatch,
repeat: Long) extends Actor {
var sent = 0L
var received = 0L
def receive = {
case Msg
received += 1
if (sent < repeat) {
actor ! Msg
sent += 1
} else if (received >= repeat) {
latch.countDown()
}
case Run
for (i 0L until math.min(20000L, repeat)) {
actor ! Msg
sent += 1
}
}
}
}

View file

@ -0,0 +1,226 @@
package akka.performance.microbench
import akka.performance.workbench.PerformanceSpec
import akka.actor._
import java.util.concurrent.{ ThreadPoolExecutor, CountDownLatch, TimeUnit }
import akka.dispatch._
import akka.util.Duration
import akka.util.duration._
// -server -Xms512M -Xmx1024M -XX:+UseParallelGC -Dbenchmark=true -Dbenchmark.repeatFactor=500
@org.junit.runner.RunWith(classOf[org.scalatest.junit.JUnitRunner])
class TellThroughputComputationPerformanceSpec extends PerformanceSpec {
import TellThroughputComputationPerformanceSpec._
def createDispatcher(name: String) = ThreadPoolConfigDispatcherBuilder(config
new Dispatcher(system.dispatcherFactory.prerequisites, name, 5,
Duration.Zero, UnboundedMailbox(), config, 60 seconds), ThreadPoolConfig())
.withNewThreadPoolWithLinkedBlockingQueueWithUnboundedCapacity
.setCorePoolSize(maxClients)
.build
val clientDispatcher = createDispatcher("client-dispatcher")
val destinationDispatcher = createDispatcher("destination-dispatcher")
override def atTermination {
super.atTermination()
System.out.println("Cleaning up after TellThroughputComputationPerformanceSpec")
clientDispatcher.shutdown()
destinationDispatcher.shutdown()
}
val repeat = 500L * repeatFactor
"Tell" must {
"warmup" in {
runScenario(4, warmup = true)
}
// "warmup more" in {
// runScenario(4, warmup = true)
// }
"perform with load 1" in {
runScenario(1)
}
"perform with load 2" in {
runScenario(2)
}
"perform with load 4" in {
runScenario(4)
}
"perform with load 6" in {
runScenario(6)
}
"perform with load 8" in {
runScenario(8)
}
"perform with load 10" in {
runScenario(10)
}
"perform with load 12" in {
runScenario(12)
}
"perform with load 14" in {
runScenario(14)
}
"perform with load 16" in {
runScenario(16)
}
"perform with load 18" in {
runScenario(18)
}
"perform with load 20" in {
runScenario(20)
}
"perform with load 22" in {
runScenario(22)
}
"perform with load 24" in {
runScenario(24)
}
"perform with load 26" in {
runScenario(26)
}
"perform with load 28" in {
runScenario(28)
}
"perform with load 30" in {
runScenario(30)
}
"perform with load 32" in {
runScenario(32)
}
"perform with load 34" in {
runScenario(34)
}
"perform with load 36" in {
runScenario(36)
}
"perform with load 38" in {
runScenario(38)
}
"perform with load 40" in {
runScenario(40)
}
"perform with load 42" in {
runScenario(42)
}
"perform with load 44" in {
runScenario(44)
}
"perform with load 46" in {
runScenario(46)
}
"perform with load 48" in {
runScenario(48)
}
def runScenario(numberOfClients: Int, warmup: Boolean = false) {
if (acceptClients(numberOfClients)) {
val latch = new CountDownLatch(numberOfClients)
val repeatsPerClient = repeat / numberOfClients
val destinations = for (i 0 until numberOfClients)
yield system.actorOf(Props(new Destination).withDispatcher(destinationDispatcher))
val clients = for (dest destinations)
yield system.actorOf(Props(new Client(dest, latch, repeatsPerClient)).withDispatcher(clientDispatcher))
val start = System.nanoTime
clients.foreach(_ ! Run)
val ok = latch.await((5000000 + 500 * repeat) * timeDilation, TimeUnit.MICROSECONDS)
val durationNs = (System.nanoTime - start)
if (!ok) {
System.err.println("Destinations: ")
destinations.foreach {
case l: LocalActorRef
val m = l.underlying.mailbox
System.err.println(" -" + l + " mbox(" + m.status + ")" + " containing [" + Stream.continually(m.dequeue()).takeWhile(_ != null).mkString(", ") + "] and has systemMsgs: " + m.hasSystemMessages)
}
System.err.println("")
System.err.println("Clients: ")
clients.foreach {
case l: LocalActorRef
val m = l.underlying.mailbox
System.err.println(" -" + l + " mbox(" + m.status + ")" + " containing [" + Stream.continually(m.dequeue()).takeWhile(_ != null).mkString(", ") + "] and has systemMsgs: " + m.hasSystemMessages)
}
val e = clientDispatcher.asInstanceOf[Dispatcher].executorService.get().asInstanceOf[ExecutorServiceDelegate].executor.asInstanceOf[ThreadPoolExecutor]
val q = e.getQueue
System.err.println("Client Dispatcher: " + e.getActiveCount + " " + Stream.continually(q.poll()).takeWhile(_ != null).mkString(", "))
}
if (!warmup) {
ok must be(true)
logMeasurement(numberOfClients, durationNs, repeat)
}
clients.foreach(_ ! PoisonPill)
destinations.foreach(_ ! PoisonPill)
}
}
}
}
object TellThroughputComputationPerformanceSpec {
case object Run
case object Msg
trait PiComputation {
private var _pi: Double = 0.0
def pi: Double = _pi
private var currentPosition = 0L
def nrOfElements = 1000
def calculatePi(): Unit = {
_pi += calculateDecimals(currentPosition)
currentPosition += nrOfElements
}
private def calculateDecimals(start: Long): Double = {
var acc = 0.0
for (i start until (start + nrOfElements))
acc += 4.0 * (1 - (i % 2) * 2) / (2 * i + 1)
acc
}
}
class Destination extends Actor with PiComputation {
def receive = {
case Msg
calculatePi()
sender ! Msg
}
}
class Client(
actor: ActorRef,
latch: CountDownLatch,
repeat: Long) extends Actor with PiComputation {
var sent = 0L
var received = 0L
def receive = {
case Msg
received += 1
calculatePi()
if (sent < repeat) {
actor ! Msg
sent += 1
} else if (received >= repeat) {
println("PI: " + pi)
latch.countDown()
}
case Run
for (i 0L until math.min(1000L, repeat)) {
actor ! Msg
sent += 1
}
}
}
}

View file

@ -0,0 +1,214 @@
package akka.performance.microbench
import akka.performance.workbench.PerformanceSpec
import org.apache.commons.math.stat.descriptive.DescriptiveStatistics
import akka.actor._
import java.util.concurrent.{ ThreadPoolExecutor, CountDownLatch, TimeUnit }
import akka.dispatch._
import java.util.concurrent.ThreadPoolExecutor.AbortPolicy
import java.util.concurrent.BlockingQueue
import java.util.concurrent.LinkedBlockingQueue
import akka.util.Duration
import akka.util.duration._
// -server -Xms512M -Xmx1024M -XX:+UseParallelGC -Dbenchmark=true -Dbenchmark.repeatFactor=500
@org.junit.runner.RunWith(classOf[org.scalatest.junit.JUnitRunner])
class TellThroughputSeparateDispatchersPerformanceSpec extends PerformanceSpec {
import TellThroughputSeparateDispatchersPerformanceSpec._
def createDispatcher(name: String) = ThreadPoolConfigDispatcherBuilder(config
new Dispatcher(system.dispatcherFactory.prerequisites, name, 5,
Duration.Zero, UnboundedMailbox(), config, Duration(60, TimeUnit.SECONDS)), ThreadPoolConfig())
.withNewThreadPoolWithLinkedBlockingQueueWithUnboundedCapacity
.setCorePoolSize(1)
.build
//val clientDispatcher = createDispatcher("client-dispatcher")
//val destinationDispatcher = createDispatcher("destination-dispatcher")
override def atTermination {
super.atTermination()
System.out.println("Cleaning up after TellThroughputPerformanceSpec")
//clientDispatcher.shutdown()
//destinationDispatcher.shutdown()
}
val repeat = 30000L * repeatFactor
"Tell" must {
"warmup" in {
runScenario(4, warmup = true)
}
"warmup more" in {
runScenario(4, warmup = true)
}
"perform with load 1" in {
runScenario(1)
}
"perform with load 2" in {
runScenario(2)
}
"perform with load 4" in {
runScenario(4)
}
"perform with load 6" in {
runScenario(6)
}
"perform with load 8" in {
runScenario(8)
}
"perform with load 10" in {
runScenario(10)
}
"perform with load 12" in {
runScenario(12)
}
"perform with load 14" in {
runScenario(14)
}
"perform with load 16" in {
runScenario(16)
}
"perform with load 18" in {
runScenario(18)
}
"perform with load 20" in {
runScenario(20)
}
"perform with load 22" in {
runScenario(22)
}
"perform with load 24" in {
runScenario(24)
}
"perform with load 26" in {
runScenario(26)
}
"perform with load 28" in {
runScenario(28)
}
"perform with load 30" in {
runScenario(30)
}
"perform with load 32" in {
runScenario(32)
}
"perform with load 34" in {
runScenario(34)
}
"perform with load 36" in {
runScenario(36)
}
"perform with load 38" in {
runScenario(38)
}
"perform with load 40" in {
runScenario(40)
}
"perform with load 42" in {
runScenario(42)
}
"perform with load 44" in {
runScenario(44)
}
"perform with load 46" in {
runScenario(46)
}
"perform with load 48" in {
runScenario(48)
}
def runScenario(numberOfClients: Int, warmup: Boolean = false) {
if (acceptClients(numberOfClients)) {
val latch = new CountDownLatch(numberOfClients)
val repeatsPerClient = repeat / numberOfClients
val destinations = for (i 0 until numberOfClients)
yield system.actorOf(Props(new Destination).withDispatcher(createDispatcher("destination-" + i)))
val clients = for ((dest, j) destinations.zipWithIndex)
yield system.actorOf(Props(new Client(dest, latch, repeatsPerClient)).withDispatcher(createDispatcher("client-" + j)))
/*
val destinations = for (i 0 until numberOfClients)
yield system.actorOf(Props(new Destination).withDispatcher(clientDispatcher))
val clients = for ((dest, j) destinations.zipWithIndex)
yield system.actorOf(Props(new Client(dest, latch, repeatsPerClient)).withDispatcher(clientDispatcher))
*/
val start = System.nanoTime
clients.foreach(_ ! Run)
val ok = latch.await((5000000 + 500 * repeat) * timeDilation, TimeUnit.MICROSECONDS)
val durationNs = (System.nanoTime - start)
if (!ok) {
System.err.println("Destinations: ")
destinations.foreach {
case l: LocalActorRef
val m = l.underlying.mailbox
System.err.println(" -" + l + " mbox(" + m.status + ")" + " containing [" + Stream.continually(m.dequeue()).takeWhile(_ != null).mkString(", ") + "] and has systemMsgs: " + m.hasSystemMessages)
}
System.err.println("")
System.err.println("Clients: ")
clients.foreach {
case l: LocalActorRef
val m = l.underlying.mailbox
System.err.println(" -" + l + " mbox(" + m.status + ")" + " containing [" + Stream.continually(m.dequeue()).takeWhile(_ != null).mkString(", ") + "] and has systemMsgs: " + m.hasSystemMessages)
}
//val e = clientDispatcher.asInstanceOf[Dispatcher].executorService.get().asInstanceOf[ExecutorServiceDelegate].executor.asInstanceOf[ThreadPoolExecutor]
//val q = e.getQueue
//System.err.println("Client Dispatcher: " + e.getActiveCount + " " + Stream.continually(q.poll()).takeWhile(_ != null).mkString(", "))
}
if (!warmup) {
ok must be(true)
logMeasurement(numberOfClients, durationNs, repeat)
}
clients.foreach(_ ! PoisonPill)
destinations.foreach(_ ! PoisonPill)
}
}
}
}
object TellThroughputSeparateDispatchersPerformanceSpec {
case object Run
case object Msg
class Destination extends Actor {
def receive = {
case Msg sender ! Msg
}
}
class Client(
actor: ActorRef,
latch: CountDownLatch,
repeat: Long) extends Actor {
var sent = 0L
var received = 0L
def receive = {
case Msg
received += 1
if (sent < repeat) {
actor ! Msg
sent += 1
} else if (received >= repeat) {
latch.countDown()
}
case Run
for (i 0L until math.min(1000L, repeat)) {
actor ! Msg
sent += 1
}
}
}
}

View file

@ -51,11 +51,10 @@ object Orderbook {
val useDummyOrderbook = System.getProperty("benchmark.useDummyOrderbook", "false").toBoolean
def apply(symbol: String, standby: Boolean): Orderbook = standby match {
case false if !useDummyOrderbook new Orderbook(symbol) with SimpleTradeObserver
case true if !useDummyOrderbook new Orderbook(symbol) with StandbyTradeObserver
case false if useDummyOrderbook new DummyOrderbook(symbol) with SimpleTradeObserver
case true if useDummyOrderbook new DummyOrderbook(symbol) with StandbyTradeObserver
def apply(symbol: String, standby: Boolean): Orderbook = (useDummyOrderbook, standby) match {
case (false, false) new Orderbook(symbol) with NopTradeObserver
case (false, true) new Orderbook(symbol) with TotalTradeObserver
case (true, _) new DummyOrderbook(symbol) with NopTradeObserver
}
}

View file

@ -6,15 +6,13 @@ abstract trait TradeObserver {
def trade(bid: Bid, ask: Ask)
}
trait SimpleTradeObserver extends TradeObserver {
trait TotalTradeObserver extends TradeObserver {
override def trade(bid: Bid, ask: Ask) {
if (!Orderbook.useDummyOrderbook) {
TotalTradeCounter.counter.incrementAndGet
}
TotalTradeCounter.counter.incrementAndGet
}
}
trait StandbyTradeObserver extends TradeObserver {
trait NopTradeObserver extends TradeObserver {
override def trade(bid: Bid, ask: Ask) {
}
}

View file

@ -15,7 +15,10 @@ trait PerformanceSpec extends AkkaSpec with BeforeAndAfterEach {
def minClients() = System.getProperty("benchmark.minClients", "1").toInt;
def maxClients() = System.getProperty("benchmark.maxClients", "40").toInt;
def maxClients() = {
val default = if (isBenchmark) "48" else "4"
System.getProperty("benchmark.maxClients", default).toInt;
}
def repeatFactor() = {
val defaultRepeatFactor = if (isBenchmark) "150" else "2"

View file

@ -29,7 +29,9 @@ class TypedActorPoolSpec extends AkkaSpec {
import ActorPoolSpec._
"Actor Pool (2)" must {
"support typed actors" in {
val pool = system.createProxy[Foo](new Actor with DefaultActorPool with BoundedCapacityStrategy with MailboxPressureCapacitor with SmallestMailboxSelector with Filter with RunningMeanBackoff with BasicRampup {
val ta = TypedActor(system)
val pool = ta.createProxy[Foo](new Actor with DefaultActorPool with BoundedCapacityStrategy with MailboxPressureCapacitor with SmallestMailboxSelector with Filter with RunningMeanBackoff with BasicRampup {
val typedActor = TypedActor(context)
def lowerBound = 1
def upperBound = 5
def pressureThreshold = 1
@ -38,7 +40,7 @@ class TypedActorPoolSpec extends AkkaSpec {
def rampupRate = 0.1
def backoffRate = 0.50
def backoffThreshold = 0.50
def instance(p: Props) = system.typedActor.getActorRefFor(context.typedActorOf[Foo, FooImpl](props = p.withTimeout(10 seconds)))
def instance(p: Props) = typedActor.getActorRefFor(typedActor.typedActorOf[Foo, FooImpl](props = p.withTimeout(10 seconds)))
def receive = _route
}, Props().withTimeout(10 seconds).withFaultHandler(faultHandler))
@ -47,7 +49,7 @@ class TypedActorPoolSpec extends AkkaSpec {
for ((i, r) results)
r.get must equal(i * i)
system.typedActor.stop(pool)
ta.stop(pool)
}
}
}

View file

@ -402,4 +402,79 @@ class RoutingSpec extends AkkaSpec {
}
})
}
"broadcast router" must {
"be started when constructed" in {
val actor1 = actorOf[TestActor]
val props = RoutedProps(routerFactory = () new BroadcastRouter, connectionManager = new LocalConnectionManager(List(actor1)))
val actor = new RoutedActorRef(system, props, system.asInstanceOf[ActorSystemImpl].guardian, "foo")
actor.isTerminated must be(false)
}
"broadcast message using !" in {
val doneLatch = new CountDownLatch(2)
val counter1 = new AtomicInteger
val connection1 = actorOf(new Actor {
def receive = {
case "end" doneLatch.countDown()
case msg: Int counter1.addAndGet(msg)
}
})
val counter2 = new AtomicInteger
val connection2 = actorOf(new Actor {
def receive = {
case "end" doneLatch.countDown()
case msg: Int counter2.addAndGet(msg)
}
})
val props = RoutedProps(routerFactory = () new BroadcastRouter, connectionManager = new LocalConnectionManager(List(connection1, connection2)))
val actor = new RoutedActorRef(system, props, system.asInstanceOf[ActorSystemImpl].guardian, "foo")
actor ! 1
actor ! "end"
doneLatch.await(5, TimeUnit.SECONDS) must be(true)
counter1.get must be(1)
counter2.get must be(1)
}
"broadcast message using ?" in {
val doneLatch = new CountDownLatch(2)
val counter1 = new AtomicInteger
val connection1 = actorOf(new Actor {
def receive = {
case "end" doneLatch.countDown()
case msg: Int
counter1.addAndGet(msg)
sender ! "ack"
}
})
val counter2 = new AtomicInteger
val connection2 = actorOf(new Actor {
def receive = {
case "end" doneLatch.countDown()
case msg: Int counter2.addAndGet(msg)
}
})
val props = RoutedProps(routerFactory = () new BroadcastRouter, connectionManager = new LocalConnectionManager(List(connection1, connection2)))
val actor = new RoutedActorRef(system, props, system.asInstanceOf[ActorSystemImpl].guardian, "foo")
actor ? 1
actor ! "end"
doneLatch.await(5, TimeUnit.SECONDS) must be(true)
counter1.get must be(1)
counter2.get must be(1)
}
}
}

View file

@ -46,7 +46,7 @@ object SerializeSpec {
class SerializeSpec extends AkkaSpec(SerializeSpec.serializationConf) {
import SerializeSpec._
val ser = SerializationExtension(system).serialization
val ser = SerializationExtension(system)
import ser._
val addr = Address("120", "Monroe Street", "Santa Clara", "95050")
@ -104,7 +104,7 @@ class SerializeSpec extends AkkaSpec(SerializeSpec.serializationConf) {
out.close()
val in = new ObjectInputStream(new ByteArrayInputStream(outbuf.toByteArray))
Serialization.system.withValue(a.asInstanceOf[ActorSystemImpl]) {
Serialization.currentSystem.withValue(a.asInstanceOf[ActorSystemImpl]) {
val deadLetters = in.readObject().asInstanceOf[DeadLetterActorRef]
(deadLetters eq a.deadLetters) must be(true)
}

View file

@ -12,8 +12,8 @@ final class AbstractMailbox {
static {
try {
mailboxStatusOffset = Unsafe.instance.objectFieldOffset(Mailbox.class.getDeclaredField("_status"));
systemMessageOffset = Unsafe.instance.objectFieldOffset(Mailbox.class.getDeclaredField("_systemQueue"));
mailboxStatusOffset = Unsafe.instance.objectFieldOffset(Mailbox.class.getDeclaredField("_statusDoNotCallMeDirectly"));
systemMessageOffset = Unsafe.instance.objectFieldOffset(Mailbox.class.getDeclaredField("_systemQueueDoNotCallMeDirectly"));
} catch(Throwable t){
throw new ExceptionInInitializerError(t);
}

View file

@ -13,12 +13,17 @@ akka {
enabled-modules = [] # Comma separated list of the enabled modules. Options: ["cluster", "camel", "http"]
event-handlers = ["akka.event.Logging$DefaultLogger"] # Event handlers to register at boot time (Logging$DefaultLogger logs to STDOUT)
loglevel = "WARNING" # Options: ERROR, WARNING, INFO, DEBUG
loglevel = "INFO" # Options: ERROR, WARNING, INFO, DEBUG
# this level is used by the configured loggers (see "event-handlers") as soon
# as they have been started; before that, see "stdout-loglevel"
stdout-loglevel = "WARNING" # Loglevel for the very basic logger activated during AkkaApplication startup
stdout-loglevel = "INFO" # Loglevel for the very basic logger activated during AkkaApplication startup
# FIXME: Is there any sensible reason why we have 2 different log levels?
extensions = [] # list FQCN of extensions which shall be loaded at actor system startup
logConfigOnStart = off # Log the complete configuration at INFO level when the actor system is started.
# This is useful when you are uncertain of what configuration is used.
extensions = [] # List FQCN of extensions which shall be loaded at actor system startup.
# FIXME: clarify "extensions" here, "Akka Extensions (<link to docs>)"
# These boot classes are loaded (and created) automatically when the Akka Microkernel boots up
# Can be used to bootstrap your application(s)

View file

@ -16,7 +16,7 @@ import akka.util.{ Duration, Helpers }
* Exposes contextual information for the actor and the current message.
* TODO: everything here for current compatibility - could be limited more
*/
trait ActorContext extends ActorRefFactory with TypedActorFactory {
trait ActorContext extends ActorRefFactory {
def self: ActorRef
@ -81,8 +81,6 @@ private[akka] class ActorCell(
protected final def guardian = self
protected def typedActor = system.typedActor
final def provider = system.provider
override def receiveTimeout: Option[Long] = if (receiveTimeoutData._1 > 0) Some(receiveTimeoutData._1) else None

View file

@ -176,8 +176,8 @@ class LocalActorRef private[akka] (
def address: String = path.toString
/*
* actorCell.start() publishes actorCell & this to the dispatcher, which
* means that messages may be processed theoretically before the constructor
* actorCell.start() publishes actorCell & this to the dispatcher, which
* means that messages may be processed theoretically before the constructor
* ends. The JMM guarantees visibility for final fields only after the end
* of the constructor, so publish the actorCell safely by making it a
* @volatile var which is NOT TO BE WRITTEN TO. The alternative would be to
@ -305,17 +305,17 @@ trait ScalaActorRef { ref: ActorRef ⇒
*/
case class SerializedActorRef(hostname: String, port: Int, path: String) {
import akka.serialization.Serialization.system
import akka.serialization.Serialization.currentSystem
def this(remoteAddress: RemoteAddress, path: String) = this(remoteAddress.hostname, remoteAddress.port, path)
def this(remoteAddress: InetSocketAddress, path: String) = this(remoteAddress.getAddress.getHostAddress, remoteAddress.getPort, path) //TODO FIXME REMOVE
@throws(classOf[java.io.ObjectStreamException])
def readResolve(): AnyRef = {
if (system.value eq null) throw new IllegalStateException(
def readResolve(): AnyRef = currentSystem.value match {
case null throw new IllegalStateException(
"Trying to deserialize a serialized ActorRef without an ActorSystem in scope." +
" Use akka.serialization.Serialization.system.withValue(system) { ... }")
system.value.provider.deserialize(this) match {
" Use akka.serialization.Serialization.currentSystem.withValue(system) { ... }")
case someSystem someSystem.provider.deserialize(this) match {
case Some(actor) actor
case None throw new IllegalStateException("Could not deserialize ActorRef")
}
@ -354,7 +354,7 @@ case class DeadLetter(message: Any, sender: ActorRef, recipient: ActorRef)
object DeadLetterActorRef {
class SerializedDeadLetterActorRef extends Serializable { //TODO implement as Protobuf for performance?
@throws(classOf[java.io.ObjectStreamException])
private def readResolve(): AnyRef = Serialization.system.value.deadLetters
private def readResolve(): AnyRef = Serialization.currentSystem.value.deadLetters
}
val serialized = new SerializedDeadLetterActorRef
@ -381,7 +381,7 @@ class DeadLetterActorRef(val eventStream: EventStream) extends MinimalActorRef {
override def isTerminated(): Boolean = true
override def !(message: Any)(implicit sender: ActorRef = null): Unit = message match {
override def !(message: Any)(implicit sender: ActorRef = this): Unit = message match {
case d: DeadLetter eventStream.publish(d)
case _ eventStream.publish(DeadLetter(message, sender, this))
}

View file

@ -11,13 +11,15 @@ import org.jboss.netty.akka.util.{ TimerTask, HashedWheelTimer }
import akka.actor.Timeout.intToTimeout
import akka.config.ConfigurationException
import akka.dispatch.{ SystemMessage, Supervise, Promise, MessageDispatcher, Future, DefaultPromise, Dispatcher, Mailbox, Envelope }
import akka.event.{ Logging, DeathWatch, ActorClassification, EventStream }
import akka.routing.{ ScatterGatherFirstCompletedRouter, Routing, RouterType, Router, RoutedProps, RoutedActorRef, RoundRobinRouter, RandomRouter, LocalConnectionManager, DirectRouter }
import akka.routing.{ ScatterGatherFirstCompletedRouter, Routing, RouterType, Router, RoutedProps, RoutedActorRef, RoundRobinRouter, RandomRouter, LocalConnectionManager, DirectRouter, BroadcastRouter }
import akka.AkkaException
import com.eaio.uuid.UUID
import akka.util.{ Duration, Switch, Helpers }
import akka.remote.RemoteAddress
import akka.remote.LocalOnly
import akka.event._
import akka.event.Logging.Error._
import akka.event.Logging.Warning
/**
* Interface for all ActorRef providers to implement.
@ -36,6 +38,7 @@ trait ActorRefProvider {
// FIXME: remove/replace?
def nodename: String
// FIXME: remove/replace?
def clustername: String
@ -162,8 +165,11 @@ class LocalActorRefProvider(
* generate name for temporary actor refs
*/
private val tempNumber = new AtomicLong
def tempName = "$_" + Helpers.base64(tempNumber.getAndIncrement())
private val tempNode = rootPath / "tmp"
def tempPath = tempNode / tempName
// FIXME (actor path): this could become a cache for the new tree traversal actorFor
@ -189,7 +195,9 @@ class LocalActorRefProvider(
override def toString = name
override def stop() = stopped switchOn { terminationFuture.complete(causeOfTermination.toLeft(())) }
override def stop() = stopped switchOn {
terminationFuture.complete(causeOfTermination.toLeft(()))
}
override def isTerminated = stopped.isOn
@ -212,6 +220,7 @@ class LocalActorRefProvider(
case Terminated(_) context.self.stop()
}
}
private class SystemGuardian extends Actor {
def receive = {
case Terminated(_)
@ -219,6 +228,7 @@ class LocalActorRefProvider(
context.self.stop()
}
}
private val guardianFaultHandlingStrategy = {
import akka.actor.FaultHandlingStrategy._
OneForOneStrategy {
@ -230,7 +240,7 @@ class LocalActorRefProvider(
private val guardianProps = Props(new Guardian).withFaultHandler(guardianFaultHandlingStrategy)
/*
* The problem is that ActorRefs need a reference to the ActorSystem to
* The problem is that ActorRefs need a reference to the ActorSystem to
* provide their service. Hence they cannot be created while the
* constructors of ActorSystem and ActorRefProvider are still running.
* The solution is to split out that last part into an init() method,
@ -238,7 +248,9 @@ class LocalActorRefProvider(
*/
@volatile
private var system: ActorSystemImpl = _
def dispatcher: MessageDispatcher = system.dispatcher
lazy val terminationFuture: DefaultPromise[Unit] = new DefaultPromise[Unit](Timeout.never)(dispatcher)
lazy val rootGuardian: ActorRef = actorOf(system, guardianProps, theOneWhoWalksTheBubblesOfSpaceTime, rootPath, true)
lazy val guardian: ActorRef = actorOf(system, guardianProps, rootGuardian, "app", true)
@ -289,7 +301,8 @@ class LocalActorRefProvider(
actors.putIfAbsent(path.toString, newFuture) match {
case null
val actor: ActorRef = try {
(if (systemService) None else deployer.lookupDeployment(path.toString)) match { // see if the deployment already exists, if so use it, if not create actor
(if (systemService) None else deployer.lookupDeployment(path.toString)) match {
// see if the deployment already exists, if so use it, if not create actor
// create a local actor
case None | Some(DeploymentConfig.Deploy(_, _, DeploymentConfig.Direct, _, DeploymentConfig.LocalScope))
@ -297,13 +310,14 @@ class LocalActorRefProvider(
// create a routed actor ref
case deploy @ Some(DeploymentConfig.Deploy(_, _, routerType, nrOfInstances, DeploymentConfig.LocalScope))
implicit val dispatcher = if (props.dispatcher == Props.defaultDispatcher) system.dispatcher else props.dispatcher
implicit val timeout = system.settings.ActorTimeout
val routerFactory: () Router = DeploymentConfig.routerTypeFor(routerType) match {
case RouterType.Direct () new DirectRouter
case RouterType.Random () new RandomRouter
case RouterType.RoundRobin () new RoundRobinRouter
case RouterType.ScatterGather () new ScatterGatherFirstCompletedRouter()(
if (props.dispatcher == Props.defaultDispatcher) dispatcher else props.dispatcher, settings.ActorTimeout)
case RouterType.Direct () new DirectRouter
case RouterType.Random () new RandomRouter
case RouterType.RoundRobin () new RoundRobinRouter
case RouterType.Broadcast () new BroadcastRouter
case RouterType.ScatterGather () new ScatterGatherFirstCompletedRouter
case RouterType.LeastCPU sys.error("Router LeastCPU not supported yet")
case RouterType.LeastRAM sys.error("Router LeastRAM not supported yet")
case RouterType.LeastMessages sys.error("Router LeastMessages not supported yet")
@ -357,6 +371,7 @@ class LocalActorRefProvider(
}
private[akka] def deserialize(actor: SerializedActorRef): Option[ActorRef] = actorFor(ActorPath.split(actor.path))
private[akka] def serialize(actor: ActorRef): SerializedActorRef = new SerializedActorRef(rootPath.remoteAddress, actor.path.toString)
private[akka] def createDeathWatch(): DeathWatch = new LocalDeathWatch
@ -367,7 +382,9 @@ class LocalActorRefProvider(
case t if t.duration.length <= 0
new DefaultPromise[Any](0)(dispatcher) //Abort early if nonsensical timeout
case t
val a = new AskActorRef(tempPath, this, deathWatch, t, dispatcher) { def whenDone() = actors.remove(this) }
val a = new AskActorRef(tempPath, this, deathWatch, t, dispatcher) {
def whenDone() = actors.remove(this)
}
assert(actors.putIfAbsent(a.path.toString, a) eq null) //If this fails, we're in deep trouble
recipient.tell(message, a)
a.result
@ -392,7 +409,7 @@ class LocalDeathWatch extends DeathWatch with ActorClassification {
}
}
class DefaultScheduler(hashedWheelTimer: HashedWheelTimer) extends Scheduler {
class DefaultScheduler(hashedWheelTimer: HashedWheelTimer, system: ActorSystem) extends Scheduler {
def schedule(receiver: ActorRef, message: Any, initialDelay: Duration, delay: Duration): Cancellable =
new DefaultCancellable(hashedWheelTimer.newTimeout(createContinuousTask(receiver, message, delay), initialDelay))
@ -410,19 +427,37 @@ class DefaultScheduler(hashedWheelTimer: HashedWheelTimer) extends Scheduler {
new DefaultCancellable(hashedWheelTimer.newTimeout(createSingleTask(f), delay))
private def createSingleTask(runnable: Runnable): TimerTask =
new TimerTask() { def run(timeout: org.jboss.netty.akka.util.Timeout) { runnable.run() } }
new TimerTask() {
def run(timeout: org.jboss.netty.akka.util.Timeout) {
// FIXME: consider executing runnable inside main dispatcher to prevent blocking of scheduler
runnable.run()
}
}
private def createSingleTask(receiver: ActorRef, message: Any): TimerTask =
new TimerTask { def run(timeout: org.jboss.netty.akka.util.Timeout) { receiver ! message } }
new TimerTask {
def run(timeout: org.jboss.netty.akka.util.Timeout) {
receiver ! message
}
}
private def createSingleTask(f: () Unit): TimerTask =
new TimerTask { def run(timeout: org.jboss.netty.akka.util.Timeout) { f() } }
new TimerTask {
def run(timeout: org.jboss.netty.akka.util.Timeout) {
f()
}
}
private def createContinuousTask(receiver: ActorRef, message: Any, delay: Duration): TimerTask = {
new TimerTask {
def run(timeout: org.jboss.netty.akka.util.Timeout) {
receiver ! message
timeout.getTimer.newTimeout(this, delay)
// Check if the receiver is still alive and kicking before sending it a message and reschedule the task
if (!receiver.isTerminated) {
receiver ! message
timeout.getTimer.newTimeout(this, delay)
} else {
system.eventStream.publish(Warning(this.getClass.getSimpleName, "Could not reschedule message to be sent because receiving actor has been terminated."))
}
}
}
}
@ -439,9 +474,13 @@ class DefaultScheduler(hashedWheelTimer: HashedWheelTimer) extends Scheduler {
private[akka] def stop() = hashedWheelTimer.stop()
}
class DefaultCancellable(timeout: org.jboss.netty.akka.util.Timeout) extends Cancellable {
def cancel() { timeout.cancel() }
class DefaultCancellable(val timeout: org.jboss.netty.akka.util.Timeout) extends Cancellable {
def cancel() {
timeout.cancel()
}
def isCancelled: Boolean = { timeout.isCancelled }
def isCancelled: Boolean = {
timeout.isCancelled
}
}

View file

@ -8,12 +8,7 @@ import akka.actor._
import akka.event._
import akka.dispatch._
import akka.util.duration._
import java.net.InetAddress
import com.eaio.uuid.UUID
import akka.serialization.Serialization
import akka.remote.RemoteAddress
import org.jboss.netty.akka.util.HashedWheelTimer
import java.util.concurrent.TimeUnit.SECONDS
import java.util.concurrent.TimeUnit.MILLISECONDS
import java.util.concurrent.TimeUnit.NANOSECONDS
import java.io.File
@ -25,10 +20,9 @@ import java.lang.reflect.InvocationTargetException
import akka.util.{ Helpers, Duration, ReflectiveAccess }
import java.util.concurrent.atomic.AtomicLong
import java.util.concurrent.CountDownLatch
import java.util.concurrent.ConcurrentHashMap
import java.util.concurrent.Executors
import scala.annotation.tailrec
import akka.serialization.SerializationExtension
import org.jboss.netty.akka.util.internal.ConcurrentIdentityHashMap
object ActorSystem {
@ -63,17 +57,18 @@ object ActorSystem {
import scala.collection.JavaConverters._
import config._
val ConfigVersion = getString("akka.version")
val ProviderClass = getString("akka.actor.provider")
val ActorTimeout = Timeout(Duration(getMilliseconds("akka.actor.timeout"), MILLISECONDS))
// TODO This isn't used anywhere. Remove?
val SerializeAllMessages = getBoolean("akka.actor.serialize-messages")
val LogLevel = getString("akka.loglevel")
val StdoutLogLevel = getString("akka.stdout-loglevel")
val EventHandlers: Seq[String] = getStringList("akka.event-handlers").asScala
val LogConfigOnStart = config.getBoolean("akka.logConfigOnStart")
val AddLoggingReceive = getBoolean("akka.actor.debug.receive")
val DebugAutoReceive = getBoolean("akka.actor.debug.autoreceive")
val DebugLifecycle = getBoolean("akka.actor.debug.lifecycle")
@ -101,6 +96,10 @@ object ActorSystem {
throw new ConfigurationException("Akka JAR version [" + Version +
"] does not match the provided config version [" + ConfigVersion + "]")
override def toString: String = {
config.toString
}
}
object DefaultConfigurationLoader {
@ -150,7 +149,7 @@ object ActorSystem {
* configuration, e.g. dispatchers, deployments, remote capabilities and
* addresses. It is also the entry point for creating or looking up actors.
*/
abstract class ActorSystem extends ActorRefFactory with TypedActorFactory {
abstract class ActorSystem extends ActorRefFactory {
import ActorSystem._
/**
@ -164,6 +163,11 @@ abstract class ActorSystem extends ActorRefFactory with TypedActorFactory {
*/
def settings: Settings
/**
* Log the configuration.
*/
def logConfiguration(): Unit
/**
* The logical node name where this actor system resides.
*/
@ -208,9 +212,6 @@ abstract class ActorSystem extends ActorRefFactory with TypedActorFactory {
// FIXME: do not publish this
def deadLetterMailbox: Mailbox
// FIXME: TypedActor should be an extension
def typedActor: TypedActor
/**
* Light-weight scheduler for running asynchronous tasks after some deadline
* in the future. Not terribly precise but cheap.
@ -251,36 +252,25 @@ abstract class ActorSystem extends ActorRefFactory with TypedActorFactory {
def stop()
/**
* Register an [[akka.actor.Extension]] within this actor system. The supplied
* object is interrogated for the extensions key with which the extension is
* accessible from anywhere you have a reference to this actor system in
* scope, e.g. within actors (see [[ActorSystem.extension]]).
*
* Extensions can be registered automatically by adding their fully-qualified
* class name to the `akka.extensions` configuration key.
* Registers the provided extension and creates its payload, if this extension isn't already registered
* This method has putIfAbsent-semantics, this method can potentially block, waiting for the initialization
* of the payload, if is in the process of registration from another Thread of execution
*/
def registerExtension[T <: AnyRef](ext: Extension[T]): Extension[T]
def registerExtension[T <: Extension](ext: ExtensionId[T]): T
/**
* Obtain a reference to a registered extension by passing in the key which
* the extension object returned from its init method (typically a static
* field or Scala `object`):
*
* {{{
* class MyActor extends Actor {
* val ext: MyExtension = context.app.extension(MyExtension.key)
* }
* }}}
*
* Throws IllegalArgumentException if the extension key is not found.
* Returns the payload that is associated with the provided extension
* throws an IllegalStateException if it is not registered.
* This method can potentially block, waiting for the initialization
* of the payload, if is in the process of registration from another Thread of execution
*/
def extension[T <: AnyRef](key: ExtensionKey[T]): T
def extension[T <: Extension](ext: ExtensionId[T]): T
/**
* Query presence of a specific extension. Beware that this key needs to be
* the same as the one used for registration (it is using a HashMap).
* Returns whether the specified extension is already registered, this method can potentially block, waiting for the initialization
* of the payload, if is in the process of registration from another Thread of execution
*/
def hasExtension(key: ExtensionKey[_]): Boolean
def hasExtension(ext: ExtensionId[_ <: Extension]): Boolean
}
class ActorSystemImpl(val name: String, val applicationConfig: Config) extends ActorSystem {
@ -289,6 +279,8 @@ class ActorSystemImpl(val name: String, val applicationConfig: Config) extends A
val settings = new Settings(applicationConfig)
def logConfiguration(): Unit = log.info(settings.toString)
protected def systemImpl = this
private[akka] def systemActorOf(props: Props, address: String): ActorRef = provider.actorOf(this, props, systemGuardian, address, true)
@ -300,7 +292,7 @@ class ActorSystemImpl(val name: String, val applicationConfig: Config) extends A
eventStream.startStdoutLogger(settings)
val log = new BusLogging(eventStream, "ActorSystem") // this used only for .getClass in tagging messages
val scheduler = new DefaultScheduler(new HashedWheelTimer(log, Executors.defaultThreadFactory, settings.SchedulerTickDuration, settings.SchedulerTicksPerWheel))
val scheduler = new DefaultScheduler(new HashedWheelTimer(log, Executors.defaultThreadFactory, settings.SchedulerTickDuration, settings.SchedulerTicksPerWheel), this)
val provider: ActorRefProvider = {
val providerClass = ReflectiveAccess.getClassFor(ProviderClass) match {
@ -347,21 +339,16 @@ class ActorSystemImpl(val name: String, val applicationConfig: Config) extends A
private final val nextName = new AtomicLong
override protected def randomName(): String = Helpers.base64(nextName.incrementAndGet())
@volatile
private var _typedActor: TypedActor = _
def typedActor = _typedActor
def /(actorName: String): ActorPath = guardian.path / actorName
private lazy val _start: this.type = {
// TODO can we do something better than loading SerializationExtension from here?
_typedActor = new TypedActor(settings, SerializationExtension(this).serialization)
provider.init(this)
deadLetters.init(dispatcher, provider.rootPath)
// this starts the reaper actor and the user-configured logging subscribers, which are also actors
eventStream.start(this)
eventStream.startDefaultLoggers(this)
loadExtensions()
if (LogConfigOnStart) logConfiguration()
this
}
@ -377,65 +364,61 @@ class ActorSystemImpl(val name: String, val applicationConfig: Config) extends A
terminationFuture onComplete (_ dispatcher.shutdown())
}
private val extensions = new ConcurrentHashMap[ExtensionKey[_], AnyRef]
private val extensions = new ConcurrentIdentityHashMap[ExtensionId[_], AnyRef]
/**
* Attempts to initialize and register this extension if the key associated with it isn't already registered.
* The extension will only be initialized if it isn't already registered.
* Rethrows anything thrown when initializing the extension (doesn't register in that case)
* Returns the registered extension, might be another already registered instance.
* Returns any extension registered to the specified Extension or returns null if not registered
*/
@tailrec
final def registerExtension[T <: AnyRef](ext: Extension[T]): Extension[T] = {
/**
* Returns any extension registered to the specified key or returns null if not registered
*/
@tailrec
def findExtension[T <: AnyRef](key: ExtensionKey[T]): Option[T] = extensions.get(key) match {
case c: CountDownLatch c.await(); findExtension(key) //Registration in process, await completion and retry
case e: Extension[_] Some(e.asInstanceOf[T]) //Profit!
case null None //Doesn't exist
}
private def findExtension[T <: Extension](ext: ExtensionId[T]): T = extensions.get(ext) match {
case c: CountDownLatch c.await(); findExtension(ext) //Registration in process, await completion and retry
case other other.asInstanceOf[T] //could be a T or null, in which case we return the null as T
}
findExtension(ext.key) match {
case Some(e: Extension[_]) e.asInstanceOf[Extension[T]] //Profit!
case None //Doesn't already exist, commence registration
@tailrec
final def registerExtension[T <: Extension](ext: ExtensionId[T]): T = {
findExtension(ext) match {
case null //Doesn't already exist, commence registration
val inProcessOfRegistration = new CountDownLatch(1)
extensions.putIfAbsent(ext.key, inProcessOfRegistration) match { // Signal that registration is in process
extensions.putIfAbsent(ext, inProcessOfRegistration) match { // Signal that registration is in process
case null try { // Signal was successfully sent
ext.init(this) //Initialize the new extension
extensions.replace(ext.key, inProcessOfRegistration, ext) //Replace our in process signal with the initialized extension
ext //Profit!
ext.createExtension(this) match { // Create and initialize the extension
case null throw new IllegalStateException("Extension instance created as null for Extension: " + ext)
case instance
extensions.replace(ext, inProcessOfRegistration, instance) //Replace our in process signal with the initialized extension
instance //Profit!
}
} catch {
case t
extensions.remove(ext.key, inProcessOfRegistration) //In case shit hits the fan, remove the inProcess signal
extensions.remove(ext, inProcessOfRegistration) //In case shit hits the fan, remove the inProcess signal
throw t //Escalate to caller
} finally {
inProcessOfRegistration.countDown //Always notify listeners of the inProcess signal
}
case other registerExtension(ext) //Someone else is in process of registering an extension for this key, retry
case other registerExtension(ext) //Someone else is in process of registering an extension for this Extension, retry
}
case existing existing.asInstanceOf[T]
}
}
def extension[T <: AnyRef](key: ExtensionKey[T]): T = extensions.get(key) match {
case x: Extension[_] x.asInstanceOf[T]
case _ throw new IllegalArgumentException("trying to get non-registered extension " + key)
def extension[T <: Extension](ext: ExtensionId[T]): T = findExtension(ext) match {
case null throw new IllegalArgumentException("Trying to get non-registered extension " + ext)
case some some.asInstanceOf[T]
}
def hasExtension(key: ExtensionKey[_]): Boolean = extensions.get(key) match {
case x: Extension[_] true
case _ false
}
def hasExtension(ext: ExtensionId[_ <: Extension]): Boolean = findExtension(ext) != null
private def loadExtensions() {
import scala.collection.JavaConversions._
settings.config.getStringList("akka.extensions") foreach { fqcn
import ReflectiveAccess._
createInstance[Extension[_ <: AnyRef]](fqcn, noParams, noArgs) match {
case Left(ex) log.error(ex, "Exception trying to load extension " + fqcn)
case Right(ext) if (ext.isInstanceOf[Extension[_]]) registerExtension(ext) else log.error("Class {} is not an Extension", fqcn)
getObjectFor[AnyRef](fqcn).fold(_ createInstance[AnyRef](fqcn, noParams, noArgs), Right(_)) match {
case Right(p: ExtensionIdProvider) registerExtension(p.lookup());
case Right(p: ExtensionId[_]) registerExtension(p);
case Right(other) log.error("'{}' is not an ExtensionIdProvider or ExtensionId, skipping...", fqcn)
case Left(problem) log.error(problem, "While trying to load extension '{}', skipping...", fqcn)
}
}
}
}

View file

@ -16,53 +16,51 @@ package akka.actor
* The extension itself can be created in any way desired and has full access
* to the ActorSystem implementation.
*
* Scala example:
*
* {{{
* class MyExtension extends Extension[MyExtension] {
* def key = MyExtension
* def init(system: ActorSystemImpl) {
* ... // initialize here
* }
* }
* object MyExtension extends ExtensionKey[MyExtension]
* }}}
*
* Java example:
*
* {{{
* static class MyExtension implements Extension<MyExtension> {
* public static ExtensionKey<MyExtension> key = new ExtensionKey<MyExtension>() {};
*
* public ExtensionKey<TestExtension> key() {
* return key;
* }
* public void init(ActorSystemImpl system) {
* ... // initialize here
* }
* }
* }}}
*/
trait Extension[T <: AnyRef] {
/**
* Market interface to signify an Akka Extension
*/
trait Extension
/**
* Identifies an Extension
* Lookup of Extensions is done by object identity, so the Id must be the same wherever it's used,
* otherwise you'll get the same extension loaded multiple times.
*/
trait ExtensionId[T <: Extension] {
/**
* This method is called by the ActorSystem upon registering this extension.
* The key returned is used for looking up extensions, hence it must be a
* suitable hash key and available to all clients of the extension. This is
* best achieved by storing it in a static field (Java) or as/in an object
* (Scala).
* Returns an instance of the extension identified by this ExtensionId instance.
*/
def key: ExtensionKey[T]
def apply(system: ActorSystem): T = system.registerExtension(this)
// FIXME ActorSystemImpl exposed to user API. We might well choose to introduce a new interface for this level of access, just so we can shuffle around the implementation
/**
* This method is called by the ActorSystem when the extension is registered
* to trigger initialization of the extension.
* Returns an instance of the extension identified by this ExtensionId instance.
* Java API
*/
def init(system: ActorSystemImpl): Unit
def get(system: ActorSystem): T = apply(system)
/**
* Is used by Akka to instantiate the Extension identified by this ExtensionId,
* internal use only.
*/
def createExtension(system: ActorSystemImpl): T
}
/**
* Marker trait identifying a registered [[akka.actor.Extension]].
* Java API for ExtensionId
*/
trait ExtensionKey[T <: AnyRef]
abstract class AbstractExtensionId[T <: Extension] extends ExtensionId[T]
/**
* To be able to load an ExtensionId from the configuration,
* a class that implements ExtensionIdProvider must be specified.
* The lookup method should return the canonical reference to the extension.
*/
trait ExtensionIdProvider {
/**
* Returns the canonical ExtensionId for this Extension
*/
def lookup(): ExtensionId[_ <: Extension]
}

View file

@ -12,12 +12,189 @@ import akka.serialization.{ Serializer, Serialization }
import akka.dispatch._
import akka.serialization.SerializationExtension
object TypedActor {
trait TypedActorFactory {
protected def actorFactory: ActorRefFactory
protected def typedActor: TypedActorExtension
/**
* Stops the underlying ActorRef for the supplied TypedActor proxy,
* if any, returns whether it could find the find the ActorRef or not
*/
def stop(proxy: AnyRef): Boolean = getActorRefFor(proxy) match {
case null false
case ref ref.stop; true
}
/**
* Sends a PoisonPill the underlying ActorRef for the supplied TypedActor proxy,
* if any, returns whether it could find the find the ActorRef or not
*/
def poisonPill(proxy: AnyRef): Boolean = getActorRefFor(proxy) match {
case null false
case ref ref ! PoisonPill; true
}
/**
* Returns wether the supplied AnyRef is a TypedActor proxy or not
*/
def isTypedActor(proxyOrNot: AnyRef): Boolean
/**
* Retrieves the underlying ActorRef for the supplied TypedActor proxy, or null if none found
*/
def getActorRefFor(proxy: AnyRef): ActorRef
/**
* Creates a new TypedActor proxy using the supplied Props,
* the interfaces usable by the returned proxy is the suppli ed interface class (if the class represents an interface) or
* all interfaces (Class.getInterfaces) if it's not an interface class
*/
def typedActorOf[R <: AnyRef, T <: R](interface: Class[R], impl: Class[T], props: Props): R =
typedActor.createProxyAndTypedActor(actorFactory, interface, impl.newInstance, props, None, interface.getClassLoader)
/**
* Creates a new TypedActor proxy using the supplied Props,
* the interfaces usable by the returned proxy is the supplied interface class (if the class represents an interface) or
* all interfaces (Class.getInterfaces) if it's not an interface class
*/
def typedActorOf[R <: AnyRef, T <: R](interface: Class[R], impl: Class[T], props: Props, name: String): R =
typedActor.createProxyAndTypedActor(actorFactory, interface, impl.newInstance, props, Some(name), interface.getClassLoader)
/**
* Creates a new TypedActor proxy using the supplied Props,
* the interfaces usable by the returned proxy is the supplied interface class (if the class represents an interface) or
* all interfaces (Class.getInterfaces) if it's not an interface class
*/
def typedActorOf[R <: AnyRef, T <: R](interface: Class[R], impl: Creator[T], props: Props): R =
typedActor.createProxyAndTypedActor(actorFactory, interface, impl.create, props, None, interface.getClassLoader)
/**
* Creates a new TypedActor proxy using the supplied Props,
* the interfaces usable by the returned proxy is the supplied interface class (if the class represents an interface) or
* all interfaces (Class.getInterfaces) if it's not an interface class
*/
def typedActorOf[R <: AnyRef, T <: R](interface: Class[R], impl: Creator[T], props: Props, name: String): R =
typedActor.createProxyAndTypedActor(actorFactory, interface, impl.create, props, Some(name), interface.getClassLoader)
/**
* Creates a new TypedActor proxy using the supplied Props,
* the interfaces usable by the returned proxy is the supplied interface class (if the class represents an interface) or
* all interfaces (Class.getInterfaces) if it's not an interface class
*/
def typedActorOf[R <: AnyRef, T <: R](interface: Class[R], impl: Class[T], props: Props, loader: ClassLoader): R =
typedActor.createProxyAndTypedActor(actorFactory, interface, impl.newInstance, props, None, loader)
/**
* Creates a new TypedActor proxy using the supplied Props,
* the interfaces usable by the returned proxy is the supplied interface class (if the class represents an interface) or
* all interfaces (Class.getInterfaces) if it's not an interface class
*/
def typedActorOf[R <: AnyRef, T <: R](interface: Class[R], impl: Class[T], props: Props, name: String, loader: ClassLoader): R =
typedActor.createProxyAndTypedActor(actorFactory, interface, impl.newInstance, props, Some(name), loader)
/**
* Creates a new TypedActor proxy using the supplied Props,
* the interfaces usable by the returned proxy is the supplied interface class (if the class represents an interface) or
* all interfaces (Class.getInterfaces) if it's not an interface class
*/
def typedActorOf[R <: AnyRef, T <: R](interface: Class[R], impl: Creator[T], props: Props, loader: ClassLoader): R =
typedActor.createProxyAndTypedActor(actorFactory, interface, impl.create, props, None, loader)
/**
* Creates a new TypedActor proxy using the supplied Props,
* the interfaces usable by the returned proxy is the supplied interface class (if the class represents an interface) or
* all interfaces (Class.getInterfaces) if it's not an interface class
*/
def typedActorOf[R <: AnyRef, T <: R](interface: Class[R], impl: Creator[T], props: Props, name: String, loader: ClassLoader): R =
typedActor.createProxyAndTypedActor(actorFactory, interface, impl.create, props, Some(name), loader)
/**
* Creates a new TypedActor proxy using the supplied Props,
* the interfaces usable by the returned proxy is the supplied implementation class' interfaces (Class.getInterfaces)
*/
def typedActorOf[R <: AnyRef, T <: R](impl: Class[T], props: Props, loader: ClassLoader): R =
typedActor.createProxyAndTypedActor(actorFactory, impl, impl.newInstance, props, None, loader)
/**
* Creates a new TypedActor proxy using the supplied Props,
* the interfaces usable by the returned proxy is the supplied implementation class' interfaces (Class.getInterfaces)
*/
def typedActorOf[R <: AnyRef, T <: R](impl: Class[T], props: Props, name: String, loader: ClassLoader): R =
typedActor.createProxyAndTypedActor(actorFactory, impl, impl.newInstance, props, Some(name), loader)
/**
* Creates a new TypedActor proxy using the supplied Props,
* the interfaces usable by the returned proxy is the supplied implementation class' interfaces (Class.getInterfaces)
*/
def typedActorOf[R <: AnyRef, T <: R](props: Props = Props(), name: String = null, loader: ClassLoader = null)(implicit m: Manifest[T]): R = {
val clazz = m.erasure.asInstanceOf[Class[T]]
typedActor.createProxyAndTypedActor(actorFactory, clazz, clazz.newInstance, props, Option(name), if (loader eq null) clazz.getClassLoader else loader)
}
/**
* Creates a proxy given the supplied Props, this is not a TypedActor, so you'll need to implement the MethodCall handling yourself,
* to create TypedActor proxies, use typedActorOf
*/
def createProxy[R <: AnyRef](constructor: Actor, props: Props = Props(), name: String = null, loader: ClassLoader = null)(implicit m: Manifest[R]): R =
typedActor.createProxy[R](actorFactory, typedActor.extractInterfaces(m.erasure), (ref: AtomVar[R]) constructor, props, Option(name), if (loader eq null) m.erasure.getClassLoader else loader)
/**
* Creates a proxy given the supplied Props, this is not a TypedActor, so you'll need to implement the MethodCall handling yourself,
* to create TypedActor proxies, use typedActorOf
*/
def createProxy[R <: AnyRef](interfaces: Array[Class[_]], constructor: Creator[Actor], props: Props, loader: ClassLoader): R =
typedActor.createProxy(actorFactory, interfaces, (ref: AtomVar[R]) constructor.create, props, None, loader)
/**
* Creates a proxy given the supplied Props, this is not a TypedActor, so you'll need to implement the MethodCall handling yourself,
* to create TypedActor proxies, use typedActorOf
*/
def createProxy[R <: AnyRef](interfaces: Array[Class[_]], constructor: Creator[Actor], props: Props, name: String, loader: ClassLoader): R =
typedActor.createProxy(actorFactory, interfaces, (ref: AtomVar[R]) constructor.create, props, Some(name), loader)
/**
* Creates a proxy given the supplied Props, this is not a TypedActor, so you'll need to implement the MethodCall handling yourself,
* to create TypedActor proxies, use typedActorOf
*/
def createProxy[R <: AnyRef](interfaces: Array[Class[_]], constructor: Actor, props: Props, loader: ClassLoader): R =
typedActor.createProxy[R](actorFactory, interfaces, (ref: AtomVar[R]) constructor, props, None, loader)
/**
* Creates a proxy given the supplied Props, this is not a TypedActor, so you'll need to implement the MethodCall handling yourself,
* to create TypedActor proxies, use typedActorOf
*/
def createProxy[R <: AnyRef](interfaces: Array[Class[_]], constructor: Actor, props: Props, name: String, loader: ClassLoader): R =
typedActor.createProxy[R](actorFactory, interfaces, (ref: AtomVar[R]) constructor, props, Some(name), loader)
}
object TypedActor extends ExtensionId[TypedActorExtension] with ExtensionIdProvider {
def lookup() = this
def createExtension(system: ActorSystemImpl): TypedActorExtension = new TypedActorExtension(system)
/**
* Returns a contextual TypedActorFactory of this extension, this means that any TypedActors created by this TypedActorExtension
* will be children to the specified context, this allows for creating hierarchies of TypedActors.
* Do _not_ let this instance escape the TypedActor since that will not be thread-safe.
*/
def apply(context: ActorContext): TypedActorFactory = ContextualTypedActorFactory(apply(context.system), context)
/**
* Returns a contextual TypedActorFactory of this extension, this means that any TypedActors created by this TypedActorExtension
* will be children to the specified context, this allows for creating hierarchies of TypedActors.
* Do _not_ let this instance escape the TypedActor since that will not be thread-safe.
*
* Java API
*/
def get(context: ActorContext): TypedActorFactory = apply(context)
/**
* This class represents a Method call, and has a reference to the Method to be called and the parameters to supply
* It's sent to the ActorRef backing the TypedActor and can be serialized and deserialized
*/
case class MethodCall(ser: Serialization, method: Method, parameters: Array[AnyRef]) {
case class MethodCall(method: Method, parameters: Array[AnyRef]) {
def isOneWay = method.getReturnType == java.lang.Void.TYPE
def returnsFuture_? = classOf[Future[_]].isAssignableFrom(method.getReturnType)
@ -41,7 +218,7 @@ object TypedActor {
case null SerializedMethodCall(method.getDeclaringClass, method.getName, method.getParameterTypes, null, null)
case ps if ps.length == 0 SerializedMethodCall(method.getDeclaringClass, method.getName, method.getParameterTypes, Array[Serializer.Identifier](), Array[Array[Byte]]())
case ps
val serializers: Array[Serializer] = ps map ser.findSerializerFor
val serializers: Array[Serializer] = ps map SerializationExtension(Serialization.currentSystem.value).findSerializerFor
val serializedParameters: Array[Array[Byte]] = Array.ofDim[Array[Byte]](serializers.length)
for (i 0 until serializers.length)
serializedParameters(i) = serializers(i) toBinary parameters(i) //Mutable for the sake of sanity
@ -58,26 +235,26 @@ object TypedActor {
//TODO implement writeObject and readObject to serialize
//TODO Possible optimization is to special encode the parameter-types to conserve space
private def readResolve(): AnyRef = {
val system = akka.serialization.Serialization.system.value
val system = akka.serialization.Serialization.currentSystem.value
if (system eq null) throw new IllegalStateException(
"Trying to deserialize a SerializedMethodCall without an ActorSystem in scope." +
" Use akka.serialization.Serialization.system.withValue(system) { ... }")
val serialization = SerializationExtension(system).serialization
MethodCall(serialization, ownerType.getDeclaredMethod(methodName, parameterTypes: _*), serializedParameters match {
" Use akka.serialization.Serialization.currentSystem.withValue(system) { ... }")
val serialization = SerializationExtension(system)
MethodCall(ownerType.getDeclaredMethod(methodName, parameterTypes: _*), serializedParameters match {
case null null
case a if a.length == 0 Array[AnyRef]()
case a
val deserializedParameters: Array[AnyRef] = Array.ofDim[AnyRef](a.length) //Mutable for the sake of sanity
for (i 0 until a.length) {
for (i 0 until a.length)
deserializedParameters(i) = serialization.serializerByIdentity(serializerIdentifiers(i)).fromBinary(serializedParameters(i))
}
deserializedParameters
})
}
}
private val selfReference = new ThreadLocal[AnyRef]
private val appReference = new ThreadLocal[ActorSystem]
private val currentSystem = new ThreadLocal[ActorSystem]
/**
* Returns the reference to the proxy when called inside a method call in a TypedActor
@ -105,7 +282,7 @@ object TypedActor {
/**
* Returns the akka system (for a TypedActor) when inside a method call in a TypedActor.
*/
def system = appReference.get match {
def system = currentSystem.get match {
case null throw new IllegalStateException("Calling TypedActor.system outside of a TypedActor implementation method!")
case some some
}
@ -119,220 +296,37 @@ object TypedActor {
* Returns the default timeout (for a TypedActor) when inside a method call in a TypedActor.
*/
implicit def timeout = system.settings.ActorTimeout
}
trait TypedActorFactory { this: ActorRefFactory
protected def typedActor: TypedActor
/**
* Creates a new TypedActor proxy using the supplied Props,
* the interfaces usable by the returned proxy is the supplied interface class (if the class represents an interface) or
* all interfaces (Class.getInterfaces) if it's not an interface class
* Implementation of TypedActor as an Actor
*/
def typedActorOf[R <: AnyRef, T <: R](interface: Class[R], impl: Class[T], props: Props): R =
typedActor.createProxyAndTypedActor(this, interface, impl.newInstance, props, None, interface.getClassLoader)
/**
* Creates a new TypedActor proxy using the supplied Props,
* the interfaces usable by the returned proxy is the supplied interface class (if the class represents an interface) or
* all interfaces (Class.getInterfaces) if it's not an interface class
*/
def typedActorOf[R <: AnyRef, T <: R](interface: Class[R], impl: Class[T], props: Props, name: String): R =
typedActor.createProxyAndTypedActor(this, interface, impl.newInstance, props, Some(name), interface.getClassLoader)
/**
* Creates a new TypedActor proxy using the supplied Props,
* the interfaces usable by the returned proxy is the supplied interface class (if the class represents an interface) or
* all interfaces (Class.getInterfaces) if it's not an interface class
*/
def typedActorOf[R <: AnyRef, T <: R](interface: Class[R], impl: Creator[T], props: Props): R =
typedActor.createProxyAndTypedActor(this, interface, impl.create, props, None, interface.getClassLoader)
/**
* Creates a new TypedActor proxy using the supplied Props,
* the interfaces usable by the returned proxy is the supplied interface class (if the class represents an interface) or
* all interfaces (Class.getInterfaces) if it's not an interface class
*/
def typedActorOf[R <: AnyRef, T <: R](interface: Class[R], impl: Creator[T], props: Props, name: String): R =
typedActor.createProxyAndTypedActor(this, interface, impl.create, props, Some(name), interface.getClassLoader)
/**
* Creates a new TypedActor proxy using the supplied Props,
* the interfaces usable by the returned proxy is the supplied interface class (if the class represents an interface) or
* all interfaces (Class.getInterfaces) if it's not an interface class
*/
def typedActorOf[R <: AnyRef, T <: R](interface: Class[R], impl: Class[T], props: Props, loader: ClassLoader): R =
typedActor.createProxyAndTypedActor(this, interface, impl.newInstance, props, None, loader)
/**
* Creates a new TypedActor proxy using the supplied Props,
* the interfaces usable by the returned proxy is the supplied interface class (if the class represents an interface) or
* all interfaces (Class.getInterfaces) if it's not an interface class
*/
def typedActorOf[R <: AnyRef, T <: R](interface: Class[R], impl: Class[T], props: Props, name: String, loader: ClassLoader): R =
typedActor.createProxyAndTypedActor(this, interface, impl.newInstance, props, Some(name), loader)
/**
* Creates a new TypedActor proxy using the supplied Props,
* the interfaces usable by the returned proxy is the supplied interface class (if the class represents an interface) or
* all interfaces (Class.getInterfaces) if it's not an interface class
*/
def typedActorOf[R <: AnyRef, T <: R](interface: Class[R], impl: Creator[T], props: Props, loader: ClassLoader): R =
typedActor.createProxyAndTypedActor(this, interface, impl.create, props, None, loader)
/**
* Creates a new TypedActor proxy using the supplied Props,
* the interfaces usable by the returned proxy is the supplied interface class (if the class represents an interface) or
* all interfaces (Class.getInterfaces) if it's not an interface class
*/
def typedActorOf[R <: AnyRef, T <: R](interface: Class[R], impl: Creator[T], props: Props, name: String, loader: ClassLoader): R =
typedActor.createProxyAndTypedActor(this, interface, impl.create, props, Some(name), loader)
/**
* Creates a new TypedActor proxy using the supplied Props,
* the interfaces usable by the returned proxy is the supplied implementation class' interfaces (Class.getInterfaces)
*/
def typedActorOf[R <: AnyRef, T <: R](impl: Class[T], props: Props, loader: ClassLoader): R =
typedActor.createProxyAndTypedActor(this, impl, impl.newInstance, props, None, loader)
/**
* Creates a new TypedActor proxy using the supplied Props,
* the interfaces usable by the returned proxy is the supplied implementation class' interfaces (Class.getInterfaces)
*/
def typedActorOf[R <: AnyRef, T <: R](impl: Class[T], props: Props, name: String, loader: ClassLoader): R =
typedActor.createProxyAndTypedActor(this, impl, impl.newInstance, props, Some(name), loader)
/**
* Creates a new TypedActor proxy using the supplied Props,
* the interfaces usable by the returned proxy is the supplied implementation class' interfaces (Class.getInterfaces)
*/
def typedActorOf[R <: AnyRef, T <: R](props: Props = Props(), name: String = null, loader: ClassLoader = null)(implicit m: Manifest[T]): R = {
val clazz = m.erasure.asInstanceOf[Class[T]]
typedActor.createProxyAndTypedActor(this, clazz, clazz.newInstance, props, Option(name), if (loader eq null) clazz.getClassLoader else loader)
}
/**
* Creates a proxy given the supplied Props, this is not a TypedActor, so you'll need to implement the MethodCall handling yourself,
* to create TypedActor proxies, use typedActorOf
*/
def createProxy[R <: AnyRef](constructor: Actor, props: Props = Props(), name: String = null, loader: ClassLoader = null)(implicit m: Manifest[R]): R =
typedActor.createProxy[R](this, typedActor.extractInterfaces(m.erasure), (ref: AtomVar[R]) constructor, props, Option(name), if (loader eq null) m.erasure.getClassLoader else loader)
/**
* Creates a proxy given the supplied Props, this is not a TypedActor, so you'll need to implement the MethodCall handling yourself,
* to create TypedActor proxies, use typedActorOf
*/
def createProxy[R <: AnyRef](interfaces: Array[Class[_]], constructor: Creator[Actor], props: Props, loader: ClassLoader): R =
typedActor.createProxy(this, interfaces, (ref: AtomVar[R]) constructor.create, props, None, loader)
/**
* Creates a proxy given the supplied Props, this is not a TypedActor, so you'll need to implement the MethodCall handling yourself,
* to create TypedActor proxies, use typedActorOf
*/
def createProxy[R <: AnyRef](interfaces: Array[Class[_]], constructor: Creator[Actor], props: Props, name: String, loader: ClassLoader): R =
typedActor.createProxy(this, interfaces, (ref: AtomVar[R]) constructor.create, props, Some(name), loader)
/**
* Creates a proxy given the supplied Props, this is not a TypedActor, so you'll need to implement the MethodCall handling yourself,
* to create TypedActor proxies, use typedActorOf
*/
def createProxy[R <: AnyRef](interfaces: Array[Class[_]], constructor: Actor, props: Props, loader: ClassLoader): R =
typedActor.createProxy[R](this, interfaces, (ref: AtomVar[R]) constructor, props, None, loader)
/**
* Creates a proxy given the supplied Props, this is not a TypedActor, so you'll need to implement the MethodCall handling yourself,
* to create TypedActor proxies, use typedActorOf
*/
def createProxy[R <: AnyRef](interfaces: Array[Class[_]], constructor: Actor, props: Props, name: String, loader: ClassLoader): R =
typedActor.createProxy[R](this, interfaces, (ref: AtomVar[R]) constructor, props, Some(name), loader)
}
//TODO Document this class, not only in Scaladoc, but also in a dedicated typed-actor.rst, for both java and scala
/**
* A TypedActor in Akka is an implementation of the Active Objects Pattern, i.e. an object with asynchronous method dispatch
*
* It consists of 2 parts:
* The Interface
* The Implementation
*
* Given a combination of Interface and Implementation, a JDK Dynamic Proxy object with the Interface will be returned
*
* The semantics is as follows,
* any methods in the Interface that returns Unit/void will use fire-and-forget semantics (same as Actor !)
* any methods in the Interface that returns Option/JOption will use ask + block-with-timeout-return-none-if-timeout semantics
* any methods in the Interface that returns anything else will use ask + block-with-timeout-throw-if-timeout semantics
*
* TypedActors needs, just like Actors, to be Stopped when they are no longer needed, use TypedActor.stop(proxy)
*/
class TypedActor(val settings: ActorSystem.Settings, var ser: Serialization) {
import TypedActor.MethodCall
/**
* Stops the underlying ActorRef for the supplied TypedActor proxy, if any, returns whether it could stop it or not
*/
def stop(proxy: AnyRef): Boolean = getActorRefFor(proxy) match {
case null false
case ref ref.stop; true
}
/**
* Retrieves the underlying ActorRef for the supplied TypedActor proxy, or null if none found
*/
def getActorRefFor(proxy: AnyRef): ActorRef = invocationHandlerFor(proxy) match {
case null null
case handler handler.actor
}
/**
* Returns wether the supplied AnyRef is a TypedActor proxy or not
*/
def isTypedActor(proxyOrNot: AnyRef): Boolean = invocationHandlerFor(proxyOrNot) ne null
/* Internal API */
private[akka] def invocationHandlerFor(typedActor_? : AnyRef): TypedActorInvocationHandler =
if ((typedActor_? ne null) && Proxy.isProxyClass(typedActor_?.getClass)) typedActor_? match {
case null null
case other Proxy.getInvocationHandler(other) match {
case null null
case handler: TypedActorInvocationHandler handler
case _ null
}
}
else null
private[akka] def createProxy[R <: AnyRef](supervisor: ActorRefFactory, interfaces: Array[Class[_]], constructor: (AtomVar[R]) Actor, props: Props, name: Option[String], loader: ClassLoader): R = {
val proxyVar = new AtomVar[R]
configureAndProxyLocalActorRef[R](supervisor, interfaces, proxyVar, props.withCreator(constructor(proxyVar)), name, loader)
}
private[akka] def createProxyAndTypedActor[R <: AnyRef, T <: R](supervisor: ActorRefFactory, interface: Class[_], constructor: T, props: Props, name: Option[String], loader: ClassLoader): R =
createProxy[R](supervisor, extractInterfaces(interface), (ref: AtomVar[R]) new TypedActor[R, T](ref, constructor), props, name, loader)
private[akka] def configureAndProxyLocalActorRef[T <: AnyRef](supervisor: ActorRefFactory, interfaces: Array[Class[_]], proxyVar: AtomVar[T], props: Props, name: Option[String], loader: ClassLoader): T = {
//Warning, do not change order of the following statements, it's some elaborate chicken-n-egg handling
val actorVar = new AtomVar[ActorRef](null)
val timeout = props.timeout match {
case Props.`defaultTimeout` settings.ActorTimeout
case x x
}
val proxy: T = Proxy.newProxyInstance(loader, interfaces, new TypedActorInvocationHandler(actorVar, timeout)).asInstanceOf[T]
proxyVar.set(proxy) // Chicken and egg situation we needed to solve, set the proxy so that we can set the self-reference inside each receive
val ref = if (name.isDefined) supervisor.actorOf(props, name.get) else supervisor.actorOf(props)
actorVar.set(ref) //Make sure the InvocationHandler gets ahold of the actor reference, this is not a problem since the proxy hasn't escaped this method yet
proxyVar.get
}
private[akka] def extractInterfaces(clazz: Class[_]): Array[Class[_]] = if (clazz.isInterface) Array[Class[_]](clazz) else clazz.getInterfaces
private[akka] class TypedActor[R <: AnyRef, T <: R](val proxyVar: AtomVar[R], createInstance: T) extends Actor {
val me = createInstance
override def preStart(): Unit = me match {
case l: PreStart l.preStart()
case _ super.preStart()
}
override def postStop(): Unit = me match {
case l: PostStop l.postStop()
case _ super.postStop()
}
override def preRestart(reason: Throwable, message: Option[Any]): Unit = me match {
case l: PreRestart l.preRestart(reason, message)
case _ super.preRestart(reason, message)
}
override def postRestart(reason: Throwable): Unit = me match {
case l: PostRestart l.postRestart(reason)
case _ super.postRestart(reason)
}
def receive = {
case m: MethodCall
TypedActor.selfReference set proxyVar.get
TypedActor.appReference set system
TypedActor.currentSystem set system
try {
if (m.isOneWay) m(me)
else {
@ -349,25 +343,73 @@ class TypedActor(val settings: ActorSystem.Settings, var ser: Serialization) {
sender ! m(me)
}
} catch {
case e: Exception sender ! Status.Failure(e)
case t: Throwable sender ! Status.Failure(t); throw t
}
}
} finally {
TypedActor.selfReference set null
TypedActor.appReference set null
TypedActor.currentSystem set null
}
}
}
private[akka] class TypedActorInvocationHandler(actorVar: AtomVar[ActorRef], timeout: Timeout) extends InvocationHandler {
/**
* Mix this into your TypedActor to be able to hook into its lifecycle
*/
trait PreStart {
/**
* User overridable callback.
* <p/>
* Is called when an Actor is started by invoking 'actor'.
*/
def preStart(): Unit = ()
}
/**
* Mix this into your TypedActor to be able to hook into its lifecycle
*/
trait PostStop {
/**
* User overridable callback.
* <p/>
* Is called when 'actor.stop()' is invoked.
*/
def postStop(): Unit = ()
}
/**
* Mix this into your TypedActor to be able to hook into its lifecycle
*/
trait PreRestart {
/**
* User overridable callback.
* <p/>
* Is called on a crashed Actor right BEFORE it is restarted to allow clean
* up of resources before Actor is terminated.
* By default it calls postStop()
*/
def preRestart(reason: Throwable, message: Option[Any]): Unit = ()
}
trait PostRestart {
/**
* User overridable callback.
* <p/>
* Is called right AFTER restart on the newly created Actor to allow reinitialization after an Actor crash.
* By default it calls preStart()
*/
def postRestart(reason: Throwable): Unit = ()
}
private[akka] class TypedActorInvocationHandler(extension: TypedActorExtension, actorVar: AtomVar[ActorRef], timeout: Timeout) extends InvocationHandler {
def actor = actorVar.get
def invoke(proxy: AnyRef, method: Method, args: Array[AnyRef]): AnyRef = method.getName match {
case "toString" actor.toString
case "equals" (args.length == 1 && (proxy eq args(0)) || actor == getActorRefFor(args(0))).asInstanceOf[AnyRef] //Force boxing of the boolean
case "equals" (args.length == 1 && (proxy eq args(0)) || actor == extension.getActorRefFor(args(0))).asInstanceOf[AnyRef] //Force boxing of the boolean
case "hashCode" actor.hashCode.asInstanceOf[AnyRef]
case _
MethodCall(ser, method, args) match {
MethodCall(method, args) match {
case m if m.isOneWay actor ! m; null //Null return value
case m if m.returnsFuture_? actor.?(m, timeout)
case m if m.returnsJOption_? || m.returnsOption_?
@ -382,3 +424,67 @@ class TypedActor(val settings: ActorSystem.Settings, var ser: Serialization) {
}
}
}
case class ContextualTypedActorFactory(typedActor: TypedActorExtension, actorFactory: ActorContext) extends TypedActorFactory {
override def getActorRefFor(proxy: AnyRef): ActorRef = typedActor.getActorRefFor(proxy)
override def isTypedActor(proxyOrNot: AnyRef): Boolean = typedActor.isTypedActor(proxyOrNot)
}
class TypedActorExtension(system: ActorSystemImpl) extends TypedActorFactory with Extension {
import TypedActor._ //Import the goodies from the companion object
protected def actorFactory: ActorRefFactory = system
protected def typedActor = this
val serialization = SerializationExtension(system)
val settings = system.settings
/**
* Retrieves the underlying ActorRef for the supplied TypedActor proxy, or null if none found
*/
def getActorRefFor(proxy: AnyRef): ActorRef = invocationHandlerFor(proxy) match {
case null null
case handler handler.actor
}
/**
* Returns wether the supplied AnyRef is a TypedActor proxy or not
*/
def isTypedActor(proxyOrNot: AnyRef): Boolean = invocationHandlerFor(proxyOrNot) ne null
// Private API
private[akka] def createProxy[R <: AnyRef](supervisor: ActorRefFactory, interfaces: Array[Class[_]], constructor: (AtomVar[R]) Actor, props: Props, name: Option[String], loader: ClassLoader): R = {
val proxyVar = new AtomVar[R]
configureAndProxyLocalActorRef[R](supervisor, interfaces, proxyVar, props.withCreator(constructor(proxyVar)), name, loader)
}
private[akka] def createProxyAndTypedActor[R <: AnyRef, T <: R](supervisor: ActorRefFactory, interface: Class[_], constructor: T, props: Props, name: Option[String], loader: ClassLoader): R =
createProxy[R](supervisor, extractInterfaces(interface), (ref: AtomVar[R]) new TypedActor[R, T](ref, constructor), props, name, loader)
private[akka] def configureAndProxyLocalActorRef[T <: AnyRef](supervisor: ActorRefFactory, interfaces: Array[Class[_]], proxyVar: AtomVar[T], props: Props, name: Option[String], loader: ClassLoader): T = {
//Warning, do not change order of the following statements, it's some elaborate chicken-n-egg handling
val actorVar = new AtomVar[ActorRef](null)
val timeout = props.timeout match {
case Props.`defaultTimeout` settings.ActorTimeout
case x x
}
val proxy: T = Proxy.newProxyInstance(loader, interfaces, new TypedActorInvocationHandler(this, actorVar, timeout)).asInstanceOf[T]
proxyVar.set(proxy) // Chicken and egg situation we needed to solve, set the proxy so that we can set the self-reference inside each receive
val ref = if (name.isDefined) supervisor.actorOf(props, name.get) else supervisor.actorOf(props)
actorVar.set(ref) //Make sure the InvocationHandler gets ahold of the actor reference, this is not a problem since the proxy hasn't escaped this method yet
proxyVar.get
}
private[akka] def extractInterfaces(clazz: Class[_]): Array[Class[_]] = if (clazz.isInterface) Array[Class[_]](clazz) else clazz.getInterfaces
private[akka] def invocationHandlerFor(typedActor_? : AnyRef): TypedActorInvocationHandler =
if ((typedActor_? ne null) && Proxy.isProxyClass(typedActor_?.getClass)) typedActor_? match {
case null null
case other Proxy.getInvocationHandler(other) match {
case null null
case handler: TypedActorInvocationHandler handler
case _ null
}
}
else null
}

View file

@ -40,13 +40,13 @@ abstract class Mailbox(val actor: ActorCell) extends MessageQueue with SystemMes
import Mailbox._
@volatile
protected var _status: Status = _ //0 by default
protected var _statusDoNotCallMeDirectly: Status = _ //0 by default
@volatile
protected var _systemQueue: SystemMessage = _ //null by default
protected var _systemQueueDoNotCallMeDirectly: SystemMessage = _ //null by default
@inline
final def status: Mailbox.Status = _status
final def status: Mailbox.Status = Unsafe.instance.getIntVolatile(this, AbstractMailbox.mailboxStatusOffset)
@inline
final def shouldProcessMessage: Boolean = (status & 3) == Open
@ -65,7 +65,8 @@ abstract class Mailbox(val actor: ActorCell) extends MessageQueue with SystemMes
Unsafe.instance.compareAndSwapInt(this, AbstractMailbox.mailboxStatusOffset, oldStatus, newStatus)
@inline
protected final def setStatus(newStatus: Status): Unit = _status = newStatus
protected final def setStatus(newStatus: Status): Unit =
Unsafe.instance.putIntVolatile(this, AbstractMailbox.mailboxStatusOffset, newStatus)
/**
* set new primary status Open. Caller does not need to worry about whether
@ -130,7 +131,8 @@ abstract class Mailbox(val actor: ActorCell) extends MessageQueue with SystemMes
/*
* AtomicReferenceFieldUpdater for system queue
*/
protected final def systemQueueGet: SystemMessage = _systemQueue
protected final def systemQueueGet: SystemMessage =
Unsafe.instance.getObjectVolatile(this, AbstractMailbox.systemMessageOffset).asInstanceOf[SystemMessage]
protected final def systemQueuePut(_old: SystemMessage, _new: SystemMessage): Boolean =
Unsafe.instance.compareAndSwapObject(this, AbstractMailbox.systemMessageOffset, _old, _new)

View file

@ -14,6 +14,7 @@ import akka.actor.Timeout
import akka.dispatch.FutureTimeoutException
import java.util.concurrent.atomic.AtomicInteger
import akka.actor.ActorRefProvider
import scala.util.control.NoStackTrace
object LoggingBus {
implicit def fromActorSystem(system: ActorSystem): LoggingBus = system.eventStream
@ -268,6 +269,7 @@ object Logging {
val AllLogLevels = Seq(ErrorLevel: AnyRef, WarningLevel, InfoLevel, DebugLevel).asInstanceOf[Seq[LogLevel]]
val errorFormat = "[ERROR] [%s] [%s] [%s] %s\n%s".intern
val errorFormatWithoutCause = "[ERROR] [%s] [%s] [%s] %s".intern
val warningFormat = "[WARN] [%s] [%s] [%s] %s".intern
val infoFormat = "[INFO] [%s] [%s] [%s] %s".intern
val debugFormat = "[DEBUG] [%s] [%s] [%s] %s".intern
@ -311,7 +313,10 @@ object Logging {
def level = ErrorLevel
}
object Error {
def apply(logSource: String, message: Any) = new Error(new EventHandlerException, logSource, message)
def apply(logSource: String, message: Any) = new Error(NoCause, logSource, message)
/** Null Object used for errors without cause Throwable */
object NoCause extends NoStackTrace
}
case class Warning(logSource: String, message: Any = "") extends LogEvent {
@ -363,13 +368,15 @@ object Logging {
}
}
def error(event: Error) =
println(errorFormat.format(
def error(event: Error) = {
val f = if (event.cause == Error.NoCause) errorFormatWithoutCause else errorFormat
println(f.format(
timestamp,
event.thread.getName,
event.logSource,
event.message,
stackTraceFor(event.cause)))
}
def warning(event: Warning) =
println(warningFormat.format(
@ -429,14 +436,14 @@ object Logging {
}
def stackTraceFor(e: Throwable) = {
if (e ne null) {
if ((e eq null) || e == Error.NoCause) {
""
} else {
import java.io.{ StringWriter, PrintWriter }
val sw = new StringWriter
val pw = new PrintWriter(sw)
e.printStackTrace(pw)
sw.toString
} else {
"[NO STACK TRACE]"
}
}

View file

@ -14,6 +14,7 @@ import java.net.InetSocketAddress
object RemoteAddress {
def apply(host: String, port: Int): RemoteAddress = apply(new InetSocketAddress(host, port))
def apply(inetAddress: InetSocketAddress): RemoteAddress = inetAddress match {
case null null
case inet
@ -24,16 +25,24 @@ object RemoteAddress {
val portNo = inet.getPort
RemoteAddress(portNo, host)
}
def apply(address: String): RemoteAddress = {
val index = address.indexOf(":")
if (index < 1) throw new IllegalArgumentException(
"Remote address must be a string on the format [\"hostname:port\"], was [" + address + "]")
val hostname = address.substring(0, index)
val port = address.substring(index + 1, address.length).toInt
apply(new InetSocketAddress(hostname, port)) // want the fallback in this method
}
}
case class RemoteAddress private[remote] (port: Int, hostname: String) {
@transient
override lazy val toString = "" + hostname + ":" + port
}
object LocalOnly extends RemoteAddress(0, "local")
case class RemoteAddress private[akka] (port: Int, hostname: String) {
@transient
override lazy val toString = "" + hostname + ":" + port
}
class RemoteException(message: String) extends AkkaException(message)
trait RemoteModule {

View file

@ -9,7 +9,6 @@ import akka.actor._
import akka.config.ConfigurationException
import akka.dispatch.{ Future, MessageDispatcher }
import akka.util.{ ReflectiveAccess, Duration }
import java.net.InetSocketAddress
import java.lang.reflect.InvocationTargetException
import java.util.concurrent.atomic.{ AtomicReference, AtomicInteger }
@ -42,6 +41,11 @@ object RouterType {
*/
object ScatterGather extends RouterType
/**
* A RouterType that broadcasts the messages to all connections.
*/
object Broadcast extends RouterType
/**
* A RouterType that selects the connection based on the least amount of cpu usage
*/
@ -67,9 +71,9 @@ object RouterType {
* Contains the configuration to create local and clustered routed actor references.
* Routed ActorRef configuration object, this is thread safe and fully sharable.
*/
private[akka] case class RoutedProps(
routerFactory: () Router = RoutedProps.defaultRouterFactory,
connectionManager: ConnectionManager = new LocalConnectionManager(List()),
case class RoutedProps private[akka] (
routerFactory: () Router,
connectionManager: ConnectionManager,
timeout: Timeout = RoutedProps.defaultTimeout,
localOnly: Boolean = RoutedProps.defaultLocalOnly) {
@ -82,7 +86,6 @@ private[akka] case class RoutedProps(
object RoutedProps {
final val defaultTimeout = Timeout(Duration.MinusInf)
final val defaultRouterFactory = () new RoundRobinRouter
final val defaultLocalOnly = false
}
@ -264,12 +267,41 @@ trait BasicRouter extends Router {
private def throwNoConnectionsError = throw new RoutingException("No replica connections for router")
}
/**
* A Router that uses broadcasts a message to all its connections.
*/
class BroadcastRouter(implicit val dispatcher: MessageDispatcher, timeout: Timeout) extends BasicRouter with Serializable {
override def route(message: Any)(implicit sender: ActorRef) = {
connectionManager.connections.iterable foreach { connection
try {
connection.!(message)(sender) // we use original sender, so this is essentially a 'forward'
} catch {
case e: Exception
connectionManager.remove(connection)
throw e
}
}
}
//protected def gather[S, G >: S](results: Iterable[Future[S]]): Future[G] =
override def route[T](message: Any, timeout: Timeout): Future[T] = {
import Future._
implicit val t = timeout
val futures = connectionManager.connections.iterable map { connection
connection.?(message, timeout).asInstanceOf[Future[T]]
}
Future.firstCompletedOf(futures)
}
protected def next: Option[ActorRef] = None
}
/**
* A DirectRouter a Router that only has a single connected actorRef and forwards all request to that actorRef.
*
* @author <a href="http://jonasboner.com">Jonas Bon&#233;r</a>
*/
class DirectRouter extends BasicRouter {
class DirectRouter(implicit val dispatcher: MessageDispatcher, timeout: Timeout) extends BasicRouter {
private val state = new AtomicReference[DirectRouterState]
@ -311,7 +343,7 @@ class DirectRouter extends BasicRouter {
*
* @author <a href="http://jonasboner.com">Jonas Bon&#233;r</a>
*/
class RandomRouter extends BasicRouter {
class RandomRouter(implicit val dispatcher: MessageDispatcher, timeout: Timeout) extends BasicRouter {
import java.security.SecureRandom
private val state = new AtomicReference[RandomRouterState]
@ -353,7 +385,7 @@ class RandomRouter extends BasicRouter {
*
* @author <a href="http://jonasboner.com">Jonas Bon&#233;r</a>
*/
private[akka] class RoundRobinRouter extends BasicRouter {
class RoundRobinRouter(implicit val dispatcher: MessageDispatcher, timeout: Timeout) extends BasicRouter {
private val state = new AtomicReference[RoundRobinState]
@ -444,7 +476,7 @@ trait ScatterGatherRouter extends BasicRouter with Serializable {
* (wrapped into {@link Routing.Broadcast} and sent with "?" method). For the messages sent in a fire-forget
* mode, the router would behave as {@link RoundRobinRouter}
*/
class ScatterGatherFirstCompletedRouter(implicit val dispatcher: MessageDispatcher, timeout: Timeout) extends RoundRobinRouter with ScatterGatherRouter {
class ScatterGatherFirstCompletedRouter(implicit dispatcher: MessageDispatcher, timeout: Timeout) extends RoundRobinRouter with ScatterGatherRouter {
protected def gather[S, G >: S](results: Iterable[Future[S]]): Future[G] = Future.firstCompletedOf(results)
}

View file

@ -6,16 +6,59 @@ package akka.serialization
import akka.AkkaException
import akka.util.ReflectiveAccess
import akka.actor.{ ActorSystem, ActorSystemImpl }
import scala.util.DynamicVariable
import com.typesafe.config.{ ConfigRoot, ConfigParseOptions, ConfigFactory, Config }
import com.typesafe.config.Config._
import akka.config.ConfigurationException
import akka.actor.{ Extension, ActorSystem, ActorSystemImpl }
case class NoSerializerFoundException(m: String) extends AkkaException(m)
object Serialization {
// TODO ensure that these are always set (i.e. withValue()) when doing deserialization
val currentSystem = new DynamicVariable[ActorSystemImpl](null)
class Settings(cfg: Config) {
private def referenceConfig: Config =
ConfigFactory.parseResource(classOf[ActorSystem], "/akka-serialization-reference.conf",
ConfigParseOptions.defaults.setAllowMissing(false))
val config: ConfigRoot = ConfigFactory.emptyRoot("akka-serialization").withFallback(cfg).withFallback(referenceConfig).resolve()
import scala.collection.JavaConverters._
import config._
val Serializers: Map[String, String] = {
toStringMap(getConfig("akka.actor.serializers"))
}
val SerializationBindings: Map[String, Seq[String]] = {
val configPath = "akka.actor.serialization-bindings"
hasPath(configPath) match {
case false Map()
case true
val serializationBindings: Map[String, Seq[String]] = getConfig(configPath).toObject.unwrapped.asScala.toMap.map {
case (k: String, v: java.util.Collection[_]) (k -> v.asScala.toSeq.asInstanceOf[Seq[String]])
case invalid throw new ConfigurationException("Invalid serialization-bindings [%s]".format(invalid))
}
serializationBindings
}
}
private def toStringMap(mapConfig: Config): Map[String, String] =
mapConfig.toObject.unwrapped.asScala.toMap.map { case (k, v) (k, v.toString) }
}
}
/**
* Serialization module. Contains methods for serialization and deserialization as well as
* locating a Serializer for a particular class as defined in the mapping in the 'akka.conf' file.
*/
class Serialization(val system: ActorSystemImpl) {
class Serialization(val system: ActorSystemImpl) extends Extension {
import Serialization._
val settings = new Settings(system.applicationConfig)
//TODO document me
def serialize(o: AnyRef): Either[Exception, Array[Byte]] =
@ -27,7 +70,7 @@ class Serialization(val system: ActorSystemImpl) {
clazz: Class[_],
classLoader: Option[ClassLoader]): Either[Exception, AnyRef] =
try {
Serialization.system.withValue(system) {
currentSystem.withValue(system) {
Right(serializerFor(clazz).fromBinary(bytes, Some(clazz), classLoader))
}
} catch { case e: Exception Left(e) }
@ -63,15 +106,13 @@ class Serialization(val system: ActorSystemImpl) {
}
}
// serializers and bindings needs to be lazy because Serialization is initialized from SerializationExtension, which is needed here
/**
* A Map of serializer from alias to implementation (class implementing akka.serialization.Serializer)
* By default always contains the following mapping: "default" -> akka.serialization.JavaSerializer
* But "default" can be overridden in config
*/
lazy val serializers: Map[String, Serializer] = {
val serializersConf = SerializationExtension(system).settings.Serializers
val serializersConf = settings.Serializers
for ((k: String, v: String) serializersConf)
yield k -> serializerOf(v).fold(throw _, identity)
}
@ -80,7 +121,7 @@ class Serialization(val system: ActorSystemImpl) {
* bindings is a Map whose keys = FQN of class that is serializable and values = the alias of the serializer to be used
*/
lazy val bindings: Map[String, String] = {
val configBindings = SerializationExtension(system).settings.SerializationBindings
val configBindings = settings.SerializationBindings
configBindings.foldLeft(Map[String, String]()) {
case (result, (k: String, vs: Seq[_]))
//All keys which are lists, take the Strings from them and Map them
@ -103,8 +144,3 @@ class Serialization(val system: ActorSystemImpl) {
Map(NullSerializer.identifier -> NullSerializer) ++ serializers map { case (_, v) (v.identifier, v) }
}
object Serialization {
// TODO ensure that these are always set (i.e. withValue()) when doing deserialization
val system = new DynamicVariable[ActorSystemImpl](null)
}

View file

@ -3,77 +3,9 @@
*/
package akka.serialization
import akka.actor.ActorSystem
import akka.actor.ExtensionKey
import akka.actor.Extension
import akka.actor.ActorSystemImpl
import com.typesafe.config.Config
import com.typesafe.config.ConfigFactory
import com.typesafe.config.ConfigParseOptions
import com.typesafe.config.ConfigRoot
import akka.config.ConfigurationException
object SerializationExtensionKey extends ExtensionKey[SerializationExtension]
object SerializationExtension {
def apply(system: ActorSystem): SerializationExtension = {
if (!system.hasExtension(SerializationExtensionKey)) {
system.registerExtension(new SerializationExtension)
}
system.extension(SerializationExtensionKey)
}
class Settings(cfg: Config) {
private def referenceConfig: Config =
ConfigFactory.parseResource(classOf[ActorSystem], "/akka-serialization-reference.conf",
ConfigParseOptions.defaults.setAllowMissing(false))
val config: ConfigRoot = ConfigFactory.emptyRoot("akka-serialization").withFallback(cfg).withFallback(referenceConfig).resolve()
import scala.collection.JavaConverters._
import config._
val Serializers: Map[String, String] = {
toStringMap(getConfig("akka.actor.serializers"))
}
val SerializationBindings: Map[String, Seq[String]] = {
val configPath = "akka.actor.serialization-bindings"
hasPath(configPath) match {
case false Map()
case true
val serializationBindings: Map[String, Seq[String]] = getConfig(configPath).toObject.unwrapped.asScala.toMap.map {
case (k: String, v: java.util.Collection[_]) (k -> v.asScala.toSeq.asInstanceOf[Seq[String]])
case invalid throw new ConfigurationException("Invalid serialization-bindings [%s]".format(invalid))
}
serializationBindings
}
}
private def toStringMap(mapConfig: Config): Map[String, String] = {
mapConfig.toObject.unwrapped.asScala.toMap.map { entry
(entry._1 -> entry._2.toString)
}
}
}
}
class SerializationExtension extends Extension[SerializationExtension] {
import SerializationExtension._
@volatile
private var _settings: Settings = _
@volatile
private var _serialization: Serialization = _
def serialization = _serialization
def key = SerializationExtensionKey
def init(system: ActorSystemImpl) {
_settings = new Settings(system.applicationConfig)
_serialization = new Serialization(system)
}
def settings: Settings = _settings
import akka.actor.{ ExtensionId, ExtensionIdProvider, ActorSystemImpl }
object SerializationExtension extends ExtensionId[Serialization] with ExtensionIdProvider {
override def lookup = SerializationExtension
override def createExtension(system: ActorSystemImpl): Serialization = new Serialization(system)
}

View file

@ -142,7 +142,7 @@ akka.dev.conf:
::
akka {
event-handler-level = "DEBUG"
loglevel = "DEBUG"
}
The mode option works in the same way when using configuration files in ``AKKA_HOME/config/`` directory.
@ -162,7 +162,7 @@ akka.dev.conf:
include "akka.conf"
akka {
event-handler-level = "DEBUG"
loglevel = "DEBUG"
}
.. _-Dakka.output.config.source:

View file

@ -16,7 +16,7 @@ You can configure which event handlers should be registered at boot time. That i
akka {
# event handlers to register at boot time (EventHandler$DefaultListener logs to STDOUT)
event-handlers = ["akka.event.EventHandler$DefaultListener"]
event-handler-level = "DEBUG" # Options: ERROR, WARNING, INFO, DEBUG
loglevel = "DEBUG" # Options: ERROR, WARNING, INFO, DEBUG
}
The default one logs to STDOUT and is registered by default. It is not intended to be used for production. There is also an :ref:`slf4j` event handler available in the 'akka-slf4j' module.

View file

@ -4,11 +4,11 @@ SLF4J
=====
This module is available in the 'akka-slf4j.jar'. It has one single dependency; the slf4j-api jar. In runtime you
also need a SLF4J backend, we recommend:
also need a SLF4J backend, we recommend `Logback <http://logback.qos.ch/>`_:
.. code-block:: scala
lazy val logback = "ch.qos.logback" % "logback-classic" % "0.9.28" % "runtime"
lazy val logback = "ch.qos.logback" % "logback-classic" % "1.0.0" % "runtime"
Event Handler
@ -20,8 +20,22 @@ This module includes a SLF4J Event Handler that works with Akka's standard Event
akka {
event-handlers = ["akka.event.slf4j.Slf4jEventHandler"]
event-handler-level = "DEBUG"
loglevel = "DEBUG"
}
Read more about how to use the :ref:`event-handler`.
Logging thread in MDC
---------------------
Since the logging is done asynchronously the thread in which the logging was performed is captured in
Mapped Diagnostic Context (MDC) with attribute name ``sourceThread``.
With Logback the thread name is available with ``%X{sourceThread}`` specifier within the pattern layout configuration::
<appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">
<layout>
<pattern>%date{ISO8601} %-5level %logger{36} %X{sourceThread} - %msg%n</pattern>
</layout>
</appender>

View file

@ -750,7 +750,7 @@ All these messages are logged at ``DEBUG`` level. To summarize, you can enable
full logging of actor activities using this configuration fragment::
akka {
event-handler-level = "DEBUG"
loglevel = "DEBUG"
actor {
debug {
receive = "true"

View file

@ -21,7 +21,7 @@ class BeanstalkBasedMailboxException(message: String) extends AkkaException(mess
*/
class BeanstalkBasedMailbox(val owner: ActorCell) extends DurableMailbox(owner) with DurableMessageSerialization {
private val settings = BeanstalkBasedMailboxExtension(owner.system).settings
private val settings = BeanstalkBasedMailboxExtension(owner.system)
private val messageSubmitDelaySeconds = settings.MessageSubmitDelay.toSeconds.toInt
private val messageTimeToLiveSeconds = settings.MessageTimeToLive.toSeconds.toInt

View file

@ -3,56 +3,32 @@
*/
package akka.actor.mailbox
import akka.actor.ActorSystem
import akka.actor.ExtensionKey
import akka.actor.Extension
import akka.actor.ActorSystemImpl
import com.typesafe.config.Config
import com.typesafe.config.ConfigFactory
import com.typesafe.config.ConfigParseOptions
import com.typesafe.config.ConfigRoot
import akka.util.Duration
import java.util.concurrent.TimeUnit.MILLISECONDS
import akka.actor._
object BeanstalkBasedMailboxExtensionKey extends ExtensionKey[BeanstalkBasedMailboxExtension]
object BeanstalkBasedMailboxExtension {
def apply(system: ActorSystem): BeanstalkBasedMailboxExtension = {
if (!system.hasExtension(BeanstalkBasedMailboxExtensionKey)) {
system.registerExtension(new BeanstalkBasedMailboxExtension)
}
system.extension(BeanstalkBasedMailboxExtensionKey)
}
class Settings(cfg: Config) {
private def referenceConfig: Config =
ConfigFactory.parseResource(classOf[ActorSystem], "/akka-beanstalk-mailbox-reference.conf",
ConfigParseOptions.defaults.setAllowMissing(false))
val config: ConfigRoot = ConfigFactory.emptyRoot("akka-beanstalk-mailbox").withFallback(cfg).withFallback(referenceConfig).resolve()
import config._
val Hostname = getString("akka.actor.mailbox.beanstalk.hostname")
val Port = getInt("akka.actor.mailbox.beanstalk.port")
val ReconnectWindow = Duration(getMilliseconds("akka.actor.mailbox.beanstalk.reconnect-window"), MILLISECONDS)
val MessageSubmitDelay = Duration(getMilliseconds("akka.actor.mailbox.beanstalk.message-submit-delay"), MILLISECONDS)
val MessageSubmitTimeout = Duration(getMilliseconds("akka.actor.mailbox.beanstalk.message-submit-timeout"), MILLISECONDS)
val MessageTimeToLive = Duration(getMilliseconds("akka.actor.mailbox.beanstalk.message-time-to-live"), MILLISECONDS)
}
object BeanstalkBasedMailboxExtension extends ExtensionId[BeanstalkMailboxSettings] with ExtensionIdProvider {
def lookup() = this
def createExtension(system: ActorSystemImpl) = new BeanstalkMailboxSettings(system.applicationConfig)
}
class BeanstalkBasedMailboxExtension extends Extension[BeanstalkBasedMailboxExtension] {
import BeanstalkBasedMailboxExtension._
@volatile
private var _settings: Settings = _
class BeanstalkMailboxSettings(cfg: Config) extends Extension {
private def referenceConfig: Config =
ConfigFactory.parseResource(classOf[ActorSystem], "/akka-beanstalk-mailbox-reference.conf",
ConfigParseOptions.defaults.setAllowMissing(false))
val config: ConfigRoot = ConfigFactory.emptyRoot("akka-beanstalk-mailbox").withFallback(cfg).withFallback(referenceConfig).resolve()
def key = BeanstalkBasedMailboxExtensionKey
import config._
def init(system: ActorSystemImpl) {
_settings = new Settings(system.applicationConfig)
}
def settings: Settings = _settings
val Hostname = getString("akka.actor.mailbox.beanstalk.hostname")
val Port = getInt("akka.actor.mailbox.beanstalk.port")
val ReconnectWindow = Duration(getMilliseconds("akka.actor.mailbox.beanstalk.reconnect-window"), MILLISECONDS)
val MessageSubmitDelay = Duration(getMilliseconds("akka.actor.mailbox.beanstalk.message-submit-delay"), MILLISECONDS)
val MessageSubmitTimeout = Duration(getMilliseconds("akka.actor.mailbox.beanstalk.message-submit-timeout"), MILLISECONDS)
val MessageTimeToLive = Duration(getMilliseconds("akka.actor.mailbox.beanstalk.message-time-to-live"), MILLISECONDS)
}

View file

@ -3,63 +3,39 @@
*/
package akka.actor.mailbox
import akka.actor.ActorSystem
import akka.actor.ExtensionKey
import akka.actor.Extension
import akka.actor.ActorSystemImpl
import com.typesafe.config.Config
import com.typesafe.config.ConfigFactory
import com.typesafe.config.ConfigParseOptions
import com.typesafe.config.ConfigRoot
import akka.util.Duration
import java.util.concurrent.TimeUnit.MILLISECONDS
import akka.actor._
object FileBasedMailboxExtensionKey extends ExtensionKey[FileBasedMailboxExtension]
object FileBasedMailboxExtension {
def apply(system: ActorSystem): FileBasedMailboxExtension = {
if (!system.hasExtension(FileBasedMailboxExtensionKey)) {
system.registerExtension(new FileBasedMailboxExtension)
}
system.extension(FileBasedMailboxExtensionKey)
}
class Settings(cfg: Config) {
private def referenceConfig: Config =
ConfigFactory.parseResource(classOf[ActorSystem], "/akka-file-mailbox-reference.conf",
ConfigParseOptions.defaults.setAllowMissing(false))
val config: ConfigRoot = ConfigFactory.emptyRoot("akka-file-mailbox").withFallback(cfg).withFallback(referenceConfig).resolve()
import config._
val QueuePath = getString("akka.actor.mailbox.file-based.directory-path")
val MaxItems = getInt("akka.actor.mailbox.file-based.max-items")
val MaxSize = getMemorySizeInBytes("akka.actor.mailbox.file-based.max-size")
val MaxItemSize = getMemorySizeInBytes("akka.actor.mailbox.file-based.max-item-size")
val MaxAge = Duration(getMilliseconds("akka.actor.mailbox.file-based.max-age"), MILLISECONDS)
val MaxJournalSize = getMemorySizeInBytes("akka.actor.mailbox.file-based.max-journal-size")
val MaxMemorySize = getMemorySizeInBytes("akka.actor.mailbox.file-based.max-memory-size")
val MaxJournalOverflow = getInt("akka.actor.mailbox.file-based.max-journal-overflow")
val MaxJournalSizeAbsolute = getMemorySizeInBytes("akka.actor.mailbox.file-based.max-journal-size-absolute")
val DiscardOldWhenFull = getBoolean("akka.actor.mailbox.file-based.discard-old-when-full")
val KeepJournal = getBoolean("akka.actor.mailbox.file-based.keep-journal")
val SyncJournal = getBoolean("akka.actor.mailbox.file-based.sync-journal")
}
object FileBasedMailboxExtension extends ExtensionId[FileBasedMailboxSettings] with ExtensionIdProvider {
def lookup() = this
def createExtension(system: ActorSystemImpl) = new FileBasedMailboxSettings(system.applicationConfig)
}
class FileBasedMailboxExtension extends Extension[FileBasedMailboxExtension] {
import FileBasedMailboxExtension._
@volatile
private var _settings: Settings = _
class FileBasedMailboxSettings(cfg: Config) extends Extension {
private def referenceConfig: Config =
ConfigFactory.parseResource(classOf[ActorSystem], "/akka-file-mailbox-reference.conf",
ConfigParseOptions.defaults.setAllowMissing(false))
val config: ConfigRoot = ConfigFactory.emptyRoot("akka-file-mailbox").withFallback(cfg).withFallback(referenceConfig).resolve()
def key = FileBasedMailboxExtensionKey
import config._
def init(system: ActorSystemImpl) {
_settings = new Settings(system.applicationConfig)
}
val QueuePath = getString("akka.actor.mailbox.file-based.directory-path")
def settings: Settings = _settings
val MaxItems = getInt("akka.actor.mailbox.file-based.max-items")
val MaxSize = getMemorySizeInBytes("akka.actor.mailbox.file-based.max-size")
val MaxItemSize = getMemorySizeInBytes("akka.actor.mailbox.file-based.max-item-size")
val MaxAge = Duration(getMilliseconds("akka.actor.mailbox.file-based.max-age"), MILLISECONDS)
val MaxJournalSize = getMemorySizeInBytes("akka.actor.mailbox.file-based.max-journal-size")
val MaxMemorySize = getMemorySizeInBytes("akka.actor.mailbox.file-based.max-memory-size")
val MaxJournalOverflow = getInt("akka.actor.mailbox.file-based.max-journal-overflow")
val MaxJournalSizeAbsolute = getMemorySizeInBytes("akka.actor.mailbox.file-based.max-journal-size-absolute")
val DiscardOldWhenFull = getBoolean("akka.actor.mailbox.file-based.discard-old-when-full")
val KeepJournal = getBoolean("akka.actor.mailbox.file-based.keep-journal")
val SyncJournal = getBoolean("akka.actor.mailbox.file-based.sync-journal")
}

View file

@ -14,7 +14,7 @@ class FileBasedMailbox(val owner: ActorCell) extends DurableMailbox(owner) with
val log = Logging(system, "FileBasedMailbox")
private val settings = FileBasedMailboxExtension(owner.system).settings
private val settings = FileBasedMailboxExtension(owner.system)
val queuePath = settings.QueuePath
private val queue = try {

View file

@ -22,7 +22,7 @@ import scala.collection.mutable
import akka.event.LoggingAdapter
import akka.util.Duration
import java.util.concurrent.TimeUnit
import akka.actor.mailbox.FileBasedMailboxExtension
import akka.actor.mailbox.FileBasedMailboxSettings
// a config value that's backed by a global setting but may be locally overridden
class OverlaySetting[T](base: T) {
@ -34,7 +34,7 @@ class OverlaySetting[T](base: ⇒ T) {
def apply() = local.getOrElse(base)
}
class PersistentQueue(persistencePath: String, val name: String, val settings: FileBasedMailboxExtension.Settings, log: LoggingAdapter) {
class PersistentQueue(persistencePath: String, val name: String, val settings: FileBasedMailboxSettings, log: LoggingAdapter) {
private case object ItemArrived
@ -127,7 +127,7 @@ class PersistentQueue(persistencePath: String, val name: String, val settings: F
configure(settings)
def configure(settings: FileBasedMailboxExtension.Settings) = synchronized {
def configure(settings: FileBasedMailboxSettings) = synchronized {
maxItems set Some(settings.MaxItems)
maxSize set Some(settings.MaxSize)
maxItemSize set Some(settings.MaxItemSize)

View file

@ -21,11 +21,11 @@ import java.io.File
import java.util.concurrent.CountDownLatch
import scala.collection.mutable
import akka.event.LoggingAdapter
import akka.actor.mailbox.FileBasedMailboxExtension
import akka.actor.mailbox.FileBasedMailboxSettings
class InaccessibleQueuePath extends Exception("Inaccessible queue path: Must be a directory and writable")
class QueueCollection(queueFolder: String, settings: FileBasedMailboxExtension.Settings, log: LoggingAdapter) {
class QueueCollection(queueFolder: String, settings: FileBasedMailboxSettings, log: LoggingAdapter) {
private val path = new File(queueFolder)
if (!path.isDirectory) {

View file

@ -6,7 +6,7 @@ import org.apache.commons.io.FileUtils
class FileBasedMailboxSpec extends DurableMailboxSpec("File", FileDurableMailboxType) {
def clean {
val queuePath = FileBasedMailboxExtension(system).settings.QueuePath
val queuePath = FileBasedMailboxExtension(system).QueuePath
FileUtils.deleteDirectory(new java.io.File(queuePath))
}

View file

@ -31,7 +31,7 @@ class MongoBasedMailbox(val owner: ActorCell) extends DurableMailbox(owner) {
implicit val mailboxBSONSer = new BSONSerializableMailbox(system)
implicit val safeWrite = WriteConcern.Safe // TODO - Replica Safe when appropriate!
private val settings = MongoBasedMailboxExtension(owner.system).settings
private val settings = MongoBasedMailboxExtension(owner.system)
val log = Logging(system, "MongoBasedMailbox")

View file

@ -3,54 +3,30 @@
*/
package akka.actor.mailbox
import akka.actor.ActorSystem
import akka.actor.ExtensionKey
import akka.actor.Extension
import akka.actor.ActorSystemImpl
import com.typesafe.config.Config
import com.typesafe.config.ConfigFactory
import com.typesafe.config.ConfigParseOptions
import com.typesafe.config.ConfigRoot
import akka.util.Duration
import java.util.concurrent.TimeUnit.MILLISECONDS
import akka.actor._
object MongoBasedMailboxExtensionKey extends ExtensionKey[MongoBasedMailboxExtension]
object MongoBasedMailboxExtension {
def apply(system: ActorSystem): MongoBasedMailboxExtension = {
if (!system.hasExtension(MongoBasedMailboxExtensionKey)) {
system.registerExtension(new MongoBasedMailboxExtension)
}
system.extension(MongoBasedMailboxExtensionKey)
}
class Settings(cfg: Config) {
private def referenceConfig: Config =
ConfigFactory.parseResource(classOf[ActorSystem], "/akka-mongo-mailbox-reference.conf",
ConfigParseOptions.defaults.setAllowMissing(false))
val config: ConfigRoot = ConfigFactory.emptyRoot("akka-mongo-mailbox").withFallback(cfg).withFallback(referenceConfig).resolve()
import config._
val UriConfigKey = "akka.actor.mailbox.mongodb.uri"
val MongoURI = if (config.hasPath(UriConfigKey)) Some(config.getString(UriConfigKey)) else None
val WriteTimeout = Duration(config.getMilliseconds("akka.actor.mailbox.mongodb.timeout.write"), MILLISECONDS)
val ReadTimeout = Duration(config.getMilliseconds("akka.actor.mailbox.mongodb.timeout.read"), MILLISECONDS)
}
object MongoBasedMailboxExtension extends ExtensionId[MongoBasedMailboxSettings] with ExtensionIdProvider {
def lookup() = this
def createExtension(system: ActorSystemImpl) = new MongoBasedMailboxSettings(system.applicationConfig)
}
class MongoBasedMailboxExtension extends Extension[MongoBasedMailboxExtension] {
import MongoBasedMailboxExtension._
@volatile
private var _settings: Settings = _
class MongoBasedMailboxSettings(cfg: Config) extends Extension {
private def referenceConfig: Config =
ConfigFactory.parseResource(classOf[ActorSystem], "/akka-mongo-mailbox-reference.conf",
ConfigParseOptions.defaults.setAllowMissing(false))
val config: ConfigRoot = ConfigFactory.emptyRoot("akka-mongo-mailbox").withFallback(cfg).withFallback(referenceConfig).resolve()
def key = MongoBasedMailboxExtensionKey
import config._
def init(system: ActorSystemImpl) {
_settings = new Settings(system.applicationConfig)
}
def settings: Settings = _settings
val UriConfigKey = "akka.actor.mailbox.mongodb.uri"
val MongoURI = if (config.hasPath(UriConfigKey)) Some(config.getString(UriConfigKey)) else None
val WriteTimeout = Duration(config.getMilliseconds("akka.actor.mailbox.mongodb.timeout.write"), MILLISECONDS)
val ReadTimeout = Duration(config.getMilliseconds("akka.actor.mailbox.mongodb.timeout.read"), MILLISECONDS)
}

View file

@ -18,7 +18,7 @@ class RedisBasedMailboxException(message: String) extends AkkaException(message)
*/
class RedisBasedMailbox(val owner: ActorCell) extends DurableMailbox(owner) with DurableMessageSerialization {
private val settings = RedisBasedMailboxExtension(owner.system).settings
private val settings = RedisBasedMailboxExtension(owner.system)
@volatile
private var clients = connect() // returns a RedisClientPool for multiple asynchronous message handling

View file

@ -3,50 +3,25 @@
*/
package akka.actor.mailbox
import akka.actor.ActorSystem
import akka.actor.ExtensionKey
import akka.actor.Extension
import akka.actor.ActorSystemImpl
import com.typesafe.config.Config
import com.typesafe.config.ConfigFactory
import com.typesafe.config.ConfigParseOptions
import com.typesafe.config.ConfigRoot
import akka.actor._
object RedisBasedMailboxExtensionKey extends ExtensionKey[RedisBasedMailboxExtension]
object RedisBasedMailboxExtension {
def apply(system: ActorSystem): RedisBasedMailboxExtension = {
if (!system.hasExtension(RedisBasedMailboxExtensionKey)) {
system.registerExtension(new RedisBasedMailboxExtension)
}
system.extension(RedisBasedMailboxExtensionKey)
}
class Settings(cfg: Config) {
private def referenceConfig: Config =
ConfigFactory.parseResource(classOf[ActorSystem], "/akka-redis-mailbox-reference.conf",
ConfigParseOptions.defaults.setAllowMissing(false))
val config: ConfigRoot = ConfigFactory.emptyRoot("akka-redis-mailbox").withFallback(cfg).withFallback(referenceConfig).resolve()
import config._
val Hostname = getString("akka.actor.mailbox.redis.hostname")
val Port = getInt("akka.actor.mailbox.redis.port")
}
object RedisBasedMailboxExtension extends ExtensionId[RedisBasedMailboxSettings] with ExtensionIdProvider {
def lookup() = this
def createExtension(system: ActorSystemImpl) = new RedisBasedMailboxSettings(system.applicationConfig)
}
class RedisBasedMailboxExtension extends Extension[RedisBasedMailboxExtension] {
import RedisBasedMailboxExtension._
@volatile
private var _settings: Settings = _
class RedisBasedMailboxSettings(cfg: Config) extends Extension {
private def referenceConfig: Config =
ConfigFactory.parseResource(classOf[ActorSystem], "/akka-redis-mailbox-reference.conf",
ConfigParseOptions.defaults.setAllowMissing(false))
val config: ConfigRoot = ConfigFactory.emptyRoot("akka-redis-mailbox").withFallback(cfg).withFallback(referenceConfig).resolve()
def key = RedisBasedMailboxExtensionKey
def init(system: ActorSystemImpl) {
_settings = new Settings(system.applicationConfig)
}
def settings: Settings = _settings
import config._
val Hostname = getString("akka.actor.mailbox.redis.hostname")
val Port = getInt("akka.actor.mailbox.redis.port")
}

View file

@ -22,7 +22,7 @@ class ZooKeeperBasedMailboxException(message: String) extends AkkaException(mess
*/
class ZooKeeperBasedMailbox(val owner: ActorCell) extends DurableMailbox(owner) with DurableMessageSerialization {
private val settings = ZooKeeperBasedMailboxExtension(owner.system).settings
private val settings = ZooKeeperBasedMailboxExtension(owner.system)
val queueNode = "/queues"
val queuePathTemplate = queueNode + "/%s"

View file

@ -3,54 +3,29 @@
*/
package akka.actor.mailbox
import akka.actor.ActorSystem
import akka.actor.ExtensionKey
import akka.actor.Extension
import akka.actor.ActorSystemImpl
import com.typesafe.config.Config
import com.typesafe.config.ConfigFactory
import com.typesafe.config.ConfigParseOptions
import com.typesafe.config.ConfigRoot
import akka.util.Duration
import java.util.concurrent.TimeUnit.MILLISECONDS
import akka.actor._
object ZooKeeperBasedMailboxExtensionKey extends ExtensionKey[ZooKeeperBasedMailboxExtension]
object ZooKeeperBasedMailboxExtension {
def apply(system: ActorSystem): ZooKeeperBasedMailboxExtension = {
if (!system.hasExtension(ZooKeeperBasedMailboxExtensionKey)) {
system.registerExtension(new ZooKeeperBasedMailboxExtension)
}
system.extension(ZooKeeperBasedMailboxExtensionKey)
}
class Settings(cfg: Config) {
private def referenceConfig: Config =
ConfigFactory.parseResource(classOf[ActorSystem], "/akka-zookeeper-mailbox-reference.conf",
ConfigParseOptions.defaults.setAllowMissing(false))
val config: ConfigRoot = ConfigFactory.emptyRoot("akka-zookeeper-mailbox").withFallback(cfg).withFallback(referenceConfig).resolve()
import config._
val ZkServerAddresses = getString("akka.actor.mailbox.zookeeper.server-addresses")
val SessionTimeout = Duration(getMilliseconds("akka.actor.mailbox.zookeeper.session-timeout"), MILLISECONDS)
val ConnectionTimeout = Duration(getMilliseconds("akka.actor.mailbox.zookeeper.connection-timeout"), MILLISECONDS)
val BlockingQueue = getBoolean("akka.actor.mailbox.zookeeper.blocking-queue")
}
object ZooKeeperBasedMailboxExtension extends ExtensionId[ZooKeeperBasedMailboxSettings] with ExtensionIdProvider {
def lookup() = this
def createExtension(system: ActorSystemImpl) = new ZooKeeperBasedMailboxSettings(system.applicationConfig)
}
class ZooKeeperBasedMailboxSettings(cfg: Config) extends Extension {
private def referenceConfig: Config =
ConfigFactory.parseResource(classOf[ActorSystem], "/akka-zookeeper-mailbox-reference.conf",
ConfigParseOptions.defaults.setAllowMissing(false))
val config: ConfigRoot = ConfigFactory.emptyRoot("akka-zookeeper-mailbox").withFallback(cfg).withFallback(referenceConfig).resolve()
class ZooKeeperBasedMailboxExtension extends Extension[ZooKeeperBasedMailboxExtension] {
import ZooKeeperBasedMailboxExtension._
@volatile
private var _settings: Settings = _
import config._
def key = ZooKeeperBasedMailboxExtensionKey
def init(system: ActorSystemImpl) {
_settings = new Settings(system.applicationConfig)
}
def settings: Settings = _settings
val ZkServerAddresses = getString("akka.actor.mailbox.zookeeper.server-addresses")
val SessionTimeout = Duration(getMilliseconds("akka.actor.mailbox.zookeeper.session-timeout"), MILLISECONDS)
val ConnectionTimeout = Duration(getMilliseconds("akka.actor.mailbox.zookeeper.connection-timeout"), MILLISECONDS)
val BlockingQueue = getBoolean("akka.actor.mailbox.zookeeper.blocking-queue")
}

View file

@ -10,8 +10,8 @@ akka {
remote {
# FIXME rename to transport
layer = "akka.cluster.netty.NettyRemoteSupport"
use-compression = off
use-compression = off
secure-cookie = "" # Generate your own with '$AKKA_HOME/scripts/generate_config_with_secure_cookie.sh'
# or using 'akka.util.Crypt.generateSecureCookie'
@ -50,28 +50,10 @@ akka {
reconnection-time-window = 600s # Maximum time window that a client should try to reconnect for
}
}
// TODO cluster config will go into akka-cluster-reference.conf when we enable that module
cluster {
name = "test-cluster"
name = "default-cluster"
nodename = ""
zookeeper-server-addresses = "localhost:2181" # comma-separated list of '<hostname>:<port>' elements
max-time-to-wait-until-connected = 30s
session-timeout = 60s
connection-timeout = 60s
include-ref-node-in-replica-set = on # Can a replica be instantiated on the same node as the cluster reference to the actor
# Default: on
log-directory = "_akka_cluster" # Where ZooKeeper should store the logs and data files
replication {
digest-type = "MAC" # Options: CRC32 (cheap & unsafe), MAC (expensive & secure using password)
password = "secret" # FIXME: store open in file?
ensemble-size = 3
quorum-size = 2
snapshot-frequency = 1000 # The number of messages that should be logged between every actor snapshot
timeout = 30s # Timeout for asyncronous (write-behind) operations
}
seed-nodes = []
}
}

View file

@ -27,8 +27,8 @@ class AccrualFailureDetector(val threshold: Int = 8, val maxSampleSize: Int = 10
def this(system: ActorSystem) {
this(
RemoteExtension(system).settings.FailureDetectorThreshold,
RemoteExtension(system).settings.FailureDetectorMaxSampleSize)
RemoteExtension(system).FailureDetectorThreshold,
RemoteExtension(system).FailureDetectorMaxSampleSize)
}
private final val PhiFactor = 1.0 / math.log(10.0)

View file

@ -11,13 +11,17 @@ import akka.util.duration._
import akka.util.Duration
import akka.remote.RemoteProtocol._
import akka.remote.RemoteProtocol.RemoteSystemDaemonMessageType._
import akka.config.ConfigurationException
import akka.serialization.SerializationExtension
import java.util.concurrent.atomic.AtomicReference
import java.security.SecureRandom
import System.{ currentTimeMillis newTimestamp }
import scala.collection.immutable.Map
import scala.annotation.tailrec
import com.google.protobuf.ByteString
import akka.serialization.SerializationExtension
/**
* Interface for node membership change listener.
@ -36,9 +40,8 @@ case class Gossip(
availableNodes: Set[RemoteAddress] = Set.empty[RemoteAddress],
unavailableNodes: Set[RemoteAddress] = Set.empty[RemoteAddress])
// ====== START - NEW GOSSIP IMPLEMENTATION ======
/*
// ====== NEW GOSSIP IMPLEMENTATION ======
case class Gossip(
version: VectorClock,
node: RemoteAddress,
@ -74,6 +77,7 @@ case class Gossip(
changes: Vector[VNodeMod],
status: PendingPartitioningStatus)
*/
// ====== END - NEW GOSSIP IMPLEMENTATION ======
/**
* This module is responsible for Gossiping cluster information. The abstraction maintains the list of live
@ -102,11 +106,17 @@ class Gossiper(remote: Remote) {
private val system = remote.system
private val remoteExtension = RemoteExtension(system)
private val serializationExtension = SerializationExtension(system)
private val serialization = SerializationExtension(system)
private val log = Logging(system, "Gossiper")
private val failureDetector = remote.failureDetector
private val connectionManager = new RemoteConnectionManager(system, remote, Map.empty[RemoteAddress, ActorRef])
private val seeds = Set(address) // FIXME read in list of seeds from config
private val seeds = {
val seeds = remoteExtension.SeedNodes
if (seeds.isEmpty) throw new ConfigurationException(
"At least one seed node must be defined in the configuration [akka.cluster.seed-nodes]")
else seeds
}
private val address = system.asInstanceOf[ActorSystemImpl].provider.rootPath.remoteAddress
private val nodeFingerprint = address.##
@ -238,7 +248,7 @@ class Gossiper(remote: Remote) {
throw new IllegalStateException("Connection for [" + peer + "] is not set up"))
try {
(connection ? (toRemoteMessage(newGossip), remoteExtension.settings.RemoteSystemDaemonAckTimeout)).as[Status] match {
(connection ? (toRemoteMessage(newGossip), remoteExtension.RemoteSystemDaemonAckTimeout)).as[Status] match {
case Some(Success(receiver))
log.debug("Gossip sent to [{}] was successfully received", receiver)
@ -300,7 +310,7 @@ class Gossiper(remote: Remote) {
}
private def toRemoteMessage(gossip: Gossip): RemoteProtocol.RemoteSystemDaemonMessageProtocol = {
val gossipAsBytes = serializationExtension.serialization.serialize(gossip) match {
val gossipAsBytes = serialization.serialize(gossip) match {
case Left(error) throw error
case Right(bytes) bytes
}

View file

@ -14,13 +14,13 @@ object MessageSerializer {
def deserialize(system: ActorSystem, messageProtocol: MessageProtocol, classLoader: Option[ClassLoader] = None): AnyRef = {
val clazz = loadManifest(classLoader, messageProtocol)
SerializationExtension(system).serialization.deserialize(messageProtocol.getMessage.toByteArray,
SerializationExtension(system).deserialize(messageProtocol.getMessage.toByteArray,
clazz, classLoader).fold(x throw x, identity)
}
def serialize(system: ActorSystem, message: AnyRef): MessageProtocol = {
val builder = MessageProtocol.newBuilder
val bytes = SerializationExtension(system).serialization.serialize(message).fold(x throw x, identity)
val bytes = SerializationExtension(system).serialize(message).fold(x throw x, identity)
builder.setMessage(ByteString.copyFrom(bytes))
builder.setMessageManifest(ByteString.copyFromUtf8(message.getClass.getName))
builder.build

View file

@ -36,9 +36,9 @@ class Remote(val system: ActorSystemImpl, val nodename: String) {
import settings._
private[remote] val remoteExtension = RemoteExtension(system)
private[remote] val serializationExtension = SerializationExtension(system)
private[remote] val serialization = SerializationExtension(system)
private[remote] val remoteAddress = {
RemoteAddress(remoteExtension.settings.serverSettings.Hostname, remoteExtension.settings.serverSettings.Port)
RemoteAddress(remoteExtension.serverSettings.Hostname, remoteExtension.serverSettings.Port)
}
val failureDetector = new AccrualFailureDetector(system)
@ -134,10 +134,10 @@ class RemoteSystemDaemon(remote: Remote) extends Actor {
if (message.hasActorPath) {
val actorFactoryBytes =
if (remoteExtension.settings.ShouldCompressData) LZF.uncompress(message.getPayload.toByteArray) else message.getPayload.toByteArray
if (remoteExtension.ShouldCompressData) LZF.uncompress(message.getPayload.toByteArray) else message.getPayload.toByteArray
val actorFactory =
serializationExtension.serialization.deserialize(actorFactoryBytes, classOf[() Actor], None) match {
serialization.deserialize(actorFactoryBytes, classOf[() Actor], None) match {
case Left(error) throw error
case Right(instance) instance.asInstanceOf[() Actor]
}
@ -234,7 +234,7 @@ class RemoteSystemDaemon(remote: Remote) extends Actor {
}
private def payloadFor[T](message: RemoteSystemDaemonMessageProtocol, clazz: Class[T]): T = {
serializationExtension.serialization.deserialize(message.getPayload.toByteArray, clazz, None) match {
serialization.deserialize(message.getPayload.toByteArray, clazz, None) match {
case Left(error) throw error
case Right(instance) instance.asInstanceOf[T]
}

View file

@ -46,7 +46,7 @@ class RemoteActorRefProvider(
private val actors = new ConcurrentHashMap[String, AnyRef]
/*
* The problem is that ActorRefs need a reference to the ActorSystem to
* The problem is that ActorRefs need a reference to the ActorSystem to
* provide their service. Hence they cannot be created while the
* constructors of ActorSystem and ActorRefProvider are still running.
* The solution is to split out that last part into an init() method,
@ -55,13 +55,13 @@ class RemoteActorRefProvider(
@volatile
private var system: ActorSystemImpl = _
private lazy val remoteExtension = RemoteExtension(system)
private lazy val serializationExtension = SerializationExtension(system)
private lazy val serialization = SerializationExtension(system)
lazy val rootPath: ActorPath = {
val remoteAddress = RemoteAddress(remoteExtension.settings.serverSettings.Hostname, remoteExtension.settings.serverSettings.Port)
val remoteAddress = RemoteAddress(remoteExtension.serverSettings.Hostname, remoteExtension.serverSettings.Port)
new RootActorPath(remoteAddress)
}
private lazy val local = new LocalActorRefProvider(settings, eventStream, scheduler, rootPath,
remoteExtension.settings.NodeName, remoteExtension.settings.ClusterName)
remoteExtension.NodeName, remoteExtension.ClusterName)
private[akka] lazy val remote = new Remote(system, nodename)
private lazy val remoteDaemonConnectionManager = new RemoteConnectionManager(system, remote)
@ -94,14 +94,6 @@ class RemoteActorRefProvider(
deployer.lookupDeploymentFor(path.toString) match {
case Some(DeploymentConfig.Deploy(_, _, routerType, nrOfInstances, DeploymentConfig.RemoteScope(remoteAddresses)))
// FIXME move to AccrualFailureDetector as soon as we have the Gossiper up and running and remove the option to select impl in the akka.conf file since we only have one
// val failureDetector = DeploymentConfig.failureDetectorTypeFor(failureDetectorType) match {
// case FailureDetectorType.NoOp new NoOpFailureDetector
// case FailureDetectorType.RemoveConnectionOnFirstFailure new RemoveConnectionOnFirstFailureFailureDetector
// case FailureDetectorType.BannagePeriod(timeToBan) new BannagePeriodFailureDetector(timeToBan)
// case FailureDetectorType.Custom(implClass) FailureDetector.createCustomFailureDetector(implClass)
// }
def isReplicaNode: Boolean = remoteAddresses exists { _ == rootPath.remoteAddress }
//system.eventHandler.debug(this, "%s: Deploy Remote Actor with address [%s] connected to [%s]: isReplica(%s)".format(system.defaultAddress, address, remoteAddresses.mkString, isReplicaNode))
@ -111,6 +103,9 @@ class RemoteActorRefProvider(
local.actorOf(system, props, supervisor, name, true) //FIXME systemService = true here to bypass Deploy, should be fixed when create-or-get is replaced by get-or-create
} else {
implicit val dispatcher = if (props.dispatcher == Props.defaultDispatcher) system.dispatcher else props.dispatcher
implicit val timeout = system.settings.ActorTimeout
// we are on the single "reference" node uses the remote actors on the replica nodes
val routerFactory: () Router = DeploymentConfig.routerTypeFor(routerType) match {
case RouterType.Direct
@ -119,6 +114,12 @@ class RemoteActorRefProvider(
.format(name, remoteAddresses.mkString(", ")))
() new DirectRouter
case RouterType.Broadcast
if (remoteAddresses.size != 1) throw new ConfigurationException(
"Actor [%s] configured with Broadcast router must have exactly 1 remote node configured. Found [%s]"
.format(name, remoteAddresses.mkString(", ")))
() new BroadcastRouter
case RouterType.Random
if (remoteAddresses.size < 1) throw new ConfigurationException(
"Actor [%s] configured with Random router must have at least 1 remote node configured. Found [%s]"
@ -219,9 +220,9 @@ class RemoteActorRefProvider(
log.debug("[{}] Instantiating Actor [{}] on node [{}]", rootPath, actorPath, remoteAddress)
val actorFactoryBytes =
serializationExtension.serialization.serialize(actorFactory) match {
serialization.serialize(actorFactory) match {
case Left(error) throw error
case Right(bytes) if (remoteExtension.settings.ShouldCompressData) LZF.compress(bytes) else bytes
case Right(bytes) if (remoteExtension.ShouldCompressData) LZF.compress(bytes) else bytes
}
val command = RemoteSystemDaemonMessageProtocol.newBuilder
@ -241,7 +242,7 @@ class RemoteActorRefProvider(
private def sendCommandToRemoteNode(connection: ActorRef, command: RemoteSystemDaemonMessageProtocol, withACK: Boolean) {
if (withACK) {
try {
val f = connection ? (command, remoteExtension.settings.RemoteSystemDaemonAckTimeout)
val f = connection ? (command, remoteExtension.RemoteSystemDaemonAckTimeout)
(try f.await.value catch { case _: FutureTimeoutException None }) match {
case Some(Right(receiver))
log.debug("Remote system command sent to [{}] successfully received", receiver)

View file

@ -3,10 +3,6 @@
*/
package akka.remote
import akka.actor.ActorSystem
import akka.actor.ExtensionKey
import akka.actor.Extension
import akka.actor.ActorSystemImpl
import com.typesafe.config.Config
import com.typesafe.config.ConfigFactory
import com.typesafe.config.ConfigParseOptions
@ -16,95 +12,78 @@ import java.util.concurrent.TimeUnit.MILLISECONDS
import java.net.InetAddress
import akka.config.ConfigurationException
import com.eaio.uuid.UUID
import akka.actor._
object RemoteExtensionKey extends ExtensionKey[RemoteExtension]
import scala.collection.JavaConverters._
object RemoteExtension {
def apply(system: ActorSystem): RemoteExtension = {
if (!system.hasExtension(RemoteExtensionKey)) {
system.registerExtension(new RemoteExtension)
}
system.extension(RemoteExtensionKey)
}
class Settings(cfg: Config) {
private def referenceConfig: Config =
ConfigFactory.parseResource(classOf[ActorSystem], "/akka-remote-reference.conf",
ConfigParseOptions.defaults.setAllowMissing(false))
val config: ConfigRoot = ConfigFactory.emptyRoot("akka-remote").withFallback(cfg).withFallback(referenceConfig).resolve()
import config._
val RemoteTransport = getString("akka.remote.layer")
val FailureDetectorThreshold = getInt("akka.remote.failure-detector.threshold")
val FailureDetectorMaxSampleSize = getInt("akka.remote.failure-detector.max-sample-size")
val ShouldCompressData = config.getBoolean("akka.remote.use-compression")
val RemoteSystemDaemonAckTimeout = Duration(config.getMilliseconds("akka.remote.remote-daemon-ack-timeout"), MILLISECONDS)
// TODO cluster config will go into akka-cluster-reference.conf when we enable that module
val ClusterName = getString("akka.cluster.name")
val NodeName: String = config.getString("akka.cluster.nodename") match {
case "" new UUID().toString
case value value
}
val serverSettings = new RemoteServerSettings
val clientSettings = new RemoteClientSettings
class RemoteClientSettings {
val SecureCookie: Option[String] = config.getString("akka.remote.secure-cookie") match {
case "" None
case cookie Some(cookie)
}
val ReconnectionTimeWindow = Duration(config.getMilliseconds("akka.remote.client.reconnection-time-window"), MILLISECONDS)
val ReadTimeout = Duration(config.getMilliseconds("akka.remote.client.read-timeout"), MILLISECONDS)
val ReconnectDelay = Duration(config.getMilliseconds("akka.remote.client.reconnect-delay"), MILLISECONDS)
val MessageFrameSize = config.getInt("akka.remote.client.message-frame-size")
}
class RemoteServerSettings {
import scala.collection.JavaConverters._
val MessageFrameSize = config.getInt("akka.remote.server.message-frame-size")
val SecureCookie: Option[String] = config.getString("akka.remote.secure-cookie") match {
case "" None
case cookie Some(cookie)
}
val RequireCookie = {
val requireCookie = config.getBoolean("akka.remote.server.require-cookie")
if (requireCookie && SecureCookie.isEmpty) throw new ConfigurationException(
"Configuration option 'akka.remote.server.require-cookie' is turned on but no secure cookie is defined in 'akka.remote.secure-cookie'.")
requireCookie
}
val UsePassiveConnections = config.getBoolean("akka.remote.use-passive-connections")
val UntrustedMode = config.getBoolean("akka.remote.server.untrusted-mode")
val Hostname = config.getString("akka.remote.server.hostname") match {
case "" InetAddress.getLocalHost.getHostAddress
case value value
}
val Port = config.getInt("akka.remote.server.port")
val ConnectionTimeout = Duration(config.getMilliseconds("akka.remote.server.connection-timeout"), MILLISECONDS)
val Backlog = config.getInt("akka.remote.server.backlog")
}
}
object RemoteExtension extends ExtensionId[RemoteExtensionSettings] with ExtensionIdProvider {
def lookup() = this
def createExtension(system: ActorSystemImpl) = new RemoteExtensionSettings(system.applicationConfig)
}
class RemoteExtension extends Extension[RemoteExtension] {
import RemoteExtension._
@volatile
private var _settings: Settings = _
class RemoteExtensionSettings(cfg: Config) extends Extension {
private def referenceConfig: Config =
ConfigFactory.parseResource(classOf[ActorSystem], "/akka-remote-reference.conf",
ConfigParseOptions.defaults.setAllowMissing(false))
val config: ConfigRoot = ConfigFactory.emptyRoot("akka-remote").withFallback(cfg).withFallback(referenceConfig).resolve()
def key = RemoteExtensionKey
import config._
def init(system: ActorSystemImpl) {
_settings = new Settings(system.applicationConfig)
val RemoteTransport = getString("akka.remote.layer")
val FailureDetectorThreshold = getInt("akka.remote.failure-detector.threshold")
val FailureDetectorMaxSampleSize = getInt("akka.remote.failure-detector.max-sample-size")
val ShouldCompressData = config.getBoolean("akka.remote.use-compression")
val RemoteSystemDaemonAckTimeout = Duration(config.getMilliseconds("akka.remote.remote-daemon-ack-timeout"), MILLISECONDS)
// TODO cluster config will go into akka-cluster-reference.conf when we enable that module
val ClusterName = getString("akka.cluster.name")
val SeedNodes = Set.empty[RemoteAddress] ++ getStringList("akka.cluster.seed-nodes").asScala.toSeq.map(RemoteAddress(_))
// FIXME remove nodename from config - should only be passed as command line arg or read from properties file etc.
val NodeName: String = config.getString("akka.cluster.nodename") match {
case "" new UUID().toString
case value value
}
def settings: Settings = _settings
val serverSettings = new RemoteServerSettings
val clientSettings = new RemoteClientSettings
class RemoteClientSettings {
val SecureCookie: Option[String] = config.getString("akka.remote.secure-cookie") match {
case "" None
case cookie Some(cookie)
}
val ReconnectionTimeWindow = Duration(config.getMilliseconds("akka.remote.client.reconnection-time-window"), MILLISECONDS)
val ReadTimeout = Duration(config.getMilliseconds("akka.remote.client.read-timeout"), MILLISECONDS)
val ReconnectDelay = Duration(config.getMilliseconds("akka.remote.client.reconnect-delay"), MILLISECONDS)
val MessageFrameSize = config.getInt("akka.remote.client.message-frame-size")
}
class RemoteServerSettings {
import scala.collection.JavaConverters._
val MessageFrameSize = config.getInt("akka.remote.server.message-frame-size")
val SecureCookie: Option[String] = config.getString("akka.remote.secure-cookie") match {
case "" None
case cookie Some(cookie)
}
val RequireCookie = {
val requireCookie = config.getBoolean("akka.remote.server.require-cookie")
if (requireCookie && SecureCookie.isEmpty) throw new ConfigurationException(
"Configuration option 'akka.remote.server.require-cookie' is turned on but no secure cookie is defined in 'akka.remote.secure-cookie'.")
requireCookie
}
val UsePassiveConnections = config.getBoolean("akka.remote.use-passive-connections")
val UntrustedMode = config.getBoolean("akka.remote.server.untrusted-mode")
val Hostname = config.getString("akka.remote.server.hostname") match {
case "" InetAddress.getLocalHost.getHostAddress
case value value
}
val Port = config.getInt("akka.remote.server.port")
val ConnectionTimeout = Duration(config.getMilliseconds("akka.remote.server.connection-timeout"), MILLISECONDS)
val Backlog = config.getInt("akka.remote.server.backlog")
}
}

View file

@ -282,7 +282,7 @@ class ActiveRemoteClientHandler(
val client: ActiveRemoteClient)
extends SimpleChannelUpstreamHandler {
def runOnceNow(thunk: Unit) = timer.newTimeout(new TimerTask() {
def runOnceNow(thunk: Unit): Unit = timer.newTimeout(new TimerTask() {
def run(timeout: Timeout) = try { thunk } finally { timeout.cancel() }
}, 0, TimeUnit.MILLISECONDS)
@ -358,8 +358,8 @@ class ActiveRemoteClientHandler(
class NettyRemoteSupport(_system: ActorSystem) extends RemoteSupport(_system) with RemoteMarshallingOps {
val log = Logging(system, "NettyRemoteSupport")
val serverSettings = RemoteExtension(system).settings.serverSettings
val clientSettings = RemoteExtension(system).settings.clientSettings
val serverSettings = RemoteExtension(system).serverSettings
val clientSettings = RemoteExtension(system).clientSettings
private val remoteClients = new HashMap[RemoteAddress, RemoteClient]
private val clientsLock = new ReentrantReadWriteLock

View file

@ -8,9 +8,15 @@ class RemoteConfigSpec extends AkkaSpec {
"ClusterSpec: A Deployer" must {
"be able to parse 'akka.actor.cluster._' config elements" in {
val config = RemoteExtension(system).settings.config
val config = RemoteExtension(system).config
import config._
//akka.remote
getString("akka.remote.layer") must equal("akka.cluster.netty.NettyRemoteSupport")
getString("akka.remote.secure-cookie") must equal("")
getBoolean("akka.remote.use-passive-connections") must equal(true)
// getMilliseconds("akka.remote.remote-daemon-ack-timeout") must equal(30 * 1000)
//akka.remote.server
getInt("akka.remote.server.port") must equal(2552)
getInt("akka.remote.server.message-frame-size") must equal(1048576)
@ -28,27 +34,23 @@ class RemoteConfigSpec extends AkkaSpec {
// TODO cluster config will go into akka-cluster-reference.conf when we enable that module
//akka.cluster
getString("akka.cluster.name") must equal("test-cluster")
getString("akka.cluster.zookeeper-server-addresses") must equal("localhost:2181")
getInt("akka.remote.server.port") must equal(2552)
getMilliseconds("akka.cluster.max-time-to-wait-until-connected") must equal(30 * 1000)
getMilliseconds("akka.cluster.session-timeout") must equal(60 * 1000)
getMilliseconds("akka.cluster.connection-timeout") must equal(60 * 1000)
getMilliseconds("akka.remote.remote-daemon-ack-timeout") must equal(30 * 1000)
getBoolean("akka.cluster.include-ref-node-in-replica-set") must equal(true)
getString("akka.remote.layer") must equal("akka.cluster.netty.NettyRemoteSupport")
getString("akka.remote.secure-cookie") must equal("")
getBoolean("akka.remote.use-passive-connections") must equal(true)
getString("akka.cluster.log-directory") must equal("_akka_cluster")
getString("akka.cluster.name") must equal("default-cluster")
getString("akka.cluster.nodename") must equal("")
getStringList("akka.cluster.seed-nodes") must equal(new java.util.ArrayList[String])
//akka.cluster.replication
getString("akka.cluster.replication.digest-type") must equal("MAC")
getString("akka.cluster.replication.password") must equal("secret")
getInt("akka.cluster.replication.ensemble-size") must equal(3)
getInt("akka.cluster.replication.quorum-size") must equal(2)
getInt("akka.cluster.replication.snapshot-frequency") must equal(1000)
getMilliseconds("akka.cluster.replication.timeout") must equal(30 * 1000)
// getMilliseconds("akka.cluster.max-time-to-wait-until-connected") must equal(30 * 1000)
// getMilliseconds("akka.cluster.session-timeout") must equal(60 * 1000)
// getMilliseconds("akka.cluster.connection-timeout") must equal(60 * 1000)
// getBoolean("akka.cluster.include-ref-node-in-replica-set") must equal(true)
// getString("akka.cluster.log-directory") must equal("_akka_cluster")
// //akka.cluster.replication
// getString("akka.cluster.replication.digest-type") must equal("MAC")
// getString("akka.cluster.replication.password") must equal("secret")
// getInt("akka.cluster.replication.ensemble-size") must equal(3)
// getInt("akka.cluster.replication.quorum-size") must equal(2)
// getInt("akka.cluster.replication.snapshot-frequency") must equal(1000)
// getMilliseconds("akka.cluster.replication.timeout") must equal(30 * 1000)
}
}
}

View file

@ -1,45 +0,0 @@
import sample.ants._
import sample.ants.Config._
val scale = 5
size(Dim * scale, Dim * scale)
smooth()
override def setup() {
background(255)
World.start
}
def draw() {
val world = World.snapshot
for (x <- 0 until Dim; y <- 0 until Dim; cell <- world(x)(y)) {
val (rx, ry, rw, rh) = (x * scale, y * scale, scale, scale)
noStroke()
fill(255)
rect(rx, ry, rw, rh)
if (cell.pher > 0) fill(0, 255, 0, cell.pher * PherScale)
if (cell.food > 0) fill(255, 0, 0, 255 * (cell.food / FoodRange.floatValue))
rect(rx, ry, rw, rh)
for (ant <- cell.ant) {
if (ant.food) stroke(255, 0, 0) else stroke(0)
val (hx, hy, tx, ty) = antLine(ant.dir)
line(rx + hx, ry + hy, rx + tx, ry + ty)
}
stroke(0, 0, 255)
noFill()
val homeStart = World.homeOff * scale
val homeWidth = AntsSqrt * scale
rect(homeStart, homeStart, homeWidth, homeWidth)
}
}
val s = scale - 1
val m = s / 2
def antLine(dir: Int) = dir match {
case 0|4 => (m, 0, m, s)
case 1|5 => (s, 0, 0, s)
case 2|6 => (s, m, 0, m)
case _ => (s, s, 0, 0)
}

View file

@ -1,20 +0,0 @@
####################
# Akka Config File #
####################
akka {
version = "2.0-SNAPSHOT"
enabled-modules = ["camel", "http"]
time-unit = "seconds"
event-handlers = ["akka.event.EventHandler$DefaultListener"]
boot = ["sample.camel.Boot"]
http {
hostname = "localhost"
port = 9998
}
}

View file

@ -1,65 +0,0 @@
<?xml version="1.0"?>
<!DOCTYPE Configure PUBLIC "-//Jetty//Configure//EN" "http://www.eclipse.org/jetty/configure.dtd">
<Configure id="Server" class="org.eclipse.jetty.server.Server">
<!-- =========================================================== -->
<!-- Server Thread Pool -->
<!-- =========================================================== -->
<Set name="ThreadPool">
<New class="org.eclipse.jetty.util.thread.ExecutorThreadPool">
</New>
</Set>
<!-- =========================================================== -->
<!-- Set connectors -->
<!-- =========================================================== -->
<Call name="addConnector">
<Arg>
<New class="org.eclipse.jetty.server.nio.SelectChannelConnector">
<Set name="host"><SystemProperty name="jetty.host" /></Set>
<Set name="port"><SystemProperty name="jetty.port" default="8080"/></Set>
<Set name="maxIdleTime">300000</Set>
<Set name="Acceptors">2</Set>
<Set name="statsOn">false</Set>
<Set name="confidentialPort">8443</Set>
<Set name="lowResourcesConnections">20000</Set>
<Set name="lowResourcesMaxIdleTime">5000</Set>
</New>
</Arg>
</Call>
<!-- =========================================================== -->
<!-- Set handler -->
<!-- =========================================================== -->
<Set name="handler">
<New id="Handlers" class="org.eclipse.jetty.server.handler.HandlerCollection">
<Set name="handlers">
<Array type="org.eclipse.jetty.server.Handler">
<Item>
<New id="AkkaRestHandler" class="org.eclipse.jetty.servlet.ServletContextHandler">
<Set name="contextPath">/</Set>
<Call name="addServlet">
<Arg>akka.http.AkkaRestServlet</Arg>
<Arg>/*</Arg>
</Call>
</New>
</Item>
<Item>
<New id="DefaultHandler" class="org.eclipse.jetty.server.handler.DefaultHandler"/>
</Item>
</Array>
</Set>
</New>
</Set>
<!-- =========================================================== -->
<!-- extra options -->
<!-- =========================================================== -->
<Set name="stopAtShutdown">true</Set>
<Set name="sendServerVersion">true</Set>
<Set name="sendDateHeader">true</Set>
<Set name="gracefulShutdown">1000</Set>
</Configure>

View file

@ -1,12 +0,0 @@
package sample.camel;
/**
* @author Martin Krasser
*/
public class BeanImpl implements BeanIntf {
public String foo(String s) {
return "hello " + s;
}
}

View file

@ -1,10 +0,0 @@
package sample.camel;
/**
* @author Martin Krasser
*/
public interface BeanIntf {
public String foo(String s);
}

View file

@ -1,15 +0,0 @@
package sample.camel;
import org.apache.camel.Body;
import org.apache.camel.Header;
import akka.camel.consume;
/**
* @author Martin Krasser
*/
public interface RemoteTypedConsumer1 {
@consume("jetty:http://localhost:6644/camel/remote-typed-actor-1")
public String foo(@Body String body, @Header("name") String header);
}

View file

@ -1,13 +0,0 @@
package sample.camel;
import akka.actor.TypedActor;
/**
* @author Martin Krasser
*/
public class RemoteTypedConsumer1Impl implements RemoteTypedConsumer1 {
public String foo(String body, String header) {
return String.format("remote1: body=%s header=%s", body, header);
}
}

View file

@ -1,15 +0,0 @@
package sample.camel;
import org.apache.camel.Body;
import org.apache.camel.Header;
import akka.camel.consume;
/**
* @author Martin Krasser
*/
public interface RemoteTypedConsumer2 {
@consume("jetty:http://localhost:6644/camel/remote-typed-actor-2")
public String foo(@Body String body, @Header("name") String header);
}

View file

@ -1,12 +0,0 @@
package sample.camel;
/**
* @author Martin Krasser
*/
public class RemoteTypedConsumer2Impl implements RemoteTypedConsumer2 {
public String foo(String body, String header) {
return String.format("remote2: body=%s header=%s", body, header);
}
}

View file

@ -1,17 +0,0 @@
package sample.camel;
import org.apache.camel.Body;
import org.apache.camel.Header;
import akka.camel.consume;
/**
* @author Martin Krasser
*/
public interface TypedConsumer1 {
@consume("file:data/input/typed-actor")
public void foo(String body);
@consume("jetty:http://0.0.0.0:8877/camel/typed-actor")
public String bar(@Body String body, @Header("name") String header);
}

View file

@ -1,21 +0,0 @@
package sample.camel;
import org.apache.camel.Body;
import org.apache.camel.Header;
import akka.actor.TypedActor;
/**
* @author Martin Krasser
*/
public class TypedConsumer1Impl implements TypedConsumer1 {
public void foo(String body) {
System.out.println("Received message:");
System.out.println(body);
}
public String bar(@Body String body, @Header("name") String header) {
return String.format("body=%s header=%s", body, header);
}
}

View file

@ -1,14 +0,0 @@
package sample.camel;
import org.apache.camel.Body;
import org.apache.camel.Header;
import akka.camel.consume;
/**
* @author Martin Krasser
*/
public interface TypedConsumer2 {
@consume("direct:default")
public String foo(String body);
}

View file

@ -1,11 +0,0 @@
package sample.camel;
/**
* @author Martin Krasser
*/
public class TypedConsumer2Impl implements TypedConsumer2 {
public String foo(String body) {
return String.format("default: %s", body);
}
}

View file

@ -1,20 +0,0 @@
package sample.camel;
import akka.camel.Message;
import akka.camel.UntypedConsumerActor;
/**
* @author Martin Krasser
*/
public class UntypedConsumer1 extends UntypedConsumerActor {
public String getEndpointUri() {
return "direct:untyped-consumer-1";
}
public void onReceive(Object message) {
Message msg = (Message)message;
String body = msg.getBodyAs(String.class);
sender.tell(String.format("received %s", body));
}
}

View file

@ -1,27 +0,0 @@
<beans xmlns="http://www.springframework.org/schema/beans"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="
http://www.springframework.org/schema/beans
http://www.springframework.org/schema/beans/spring-beans-2.5.xsd">
<!-- ================================================================== -->
<!-- Camel JMS component and ActiveMQ setup -->
<!-- ================================================================== -->
<bean id="jms" class="org.apache.camel.component.jms.JmsComponent">
<property name="configuration" ref="jmsConfig"/>
</bean>
<bean id="jmsConfig" class="org.apache.camel.component.jms.JmsConfiguration">
<property name="connectionFactory" ref="singleConnectionFactory"/>
</bean>
<bean id="singleConnectionFactory" class="org.springframework.jms.connection.SingleConnectionFactory">
<property name="targetConnectionFactory" ref="jmsConnectionFactory"/>
</bean>
<bean id="jmsConnectionFactory" class="org.apache.activemq.ActiveMQConnectionFactory">
<property name="brokerURL" value="vm://testbroker"/>
</bean>
</beans>

View file

@ -1,26 +0,0 @@
<beans xmlns="http://www.springframework.org/schema/beans"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xmlns:akka="http://akka.io/schema/akka"
xmlns:camel="http://camel.apache.org/schema/spring"
xsi:schemaLocation="
http://www.springframework.org/schema/beans
http://www.springframework.org/schema/beans/spring-beans-2.5.xsd
http://akka.io/schema/akka
http://akka.io/akka-2.0-SNAPSHOT.xsd
http://camel.apache.org/schema/spring
http://camel.apache.org/schema/spring/camel-spring.xsd">
<bean id="routeBuilder" class="sample.camel.StandaloneSpringApplicationRoute" />
<camel:camelContext id="camelContext">
<camel:routeBuilder ref="routeBuilder" />
</camel:camelContext>
<akka:camel-service id="service">
<akka:camel-context ref="camelContext" />
</akka:camel-service>
<akka:typed-actor id="ta" interface="sample.camel.BeanIntf" implementation="sample.camel.BeanImpl" timeout="1000" />
<akka:untyped-actor id="ua" implementation="sample.camel.UntypedConsumer1" scope="singleton" autostart="true" />
</beans>

View file

@ -1,168 +0,0 @@
/**
* Copyright (C) 2009-2010 Typesafe Inc. <http://www.typesafe.com>.
*/
// CAMEL IS NOT PART OF MILESTONE 1 OF AKKA 2.0
// TODO FIXME 2.0
//package sample.camel
//
//import org.apache.camel.Exchange
//
//import akka.actor.{ Actor, ActorRef, ActorRegistry }
//import akka.camel.{ Ack, Failure, Producer, Message, Consumer }
//
///**
// * Client-initiated remote actor.
// */
//class RemoteActor1 extends Actor with Consumer {
// def endpointUri = "jetty:http://localhost:6644/camel/remote-actor-1"
//
// protected def receive = {
// case msg: Message sender ! Message("hello %s" format msg.bodyAs[String], Map("sender" -> "remote1"))
// }
//}
//
///**
// * Server-initiated remote actor.
// */
//class RemoteActor2 extends Actor with Consumer {
// def endpointUri = "jetty:http://localhost:6644/camel/remote-actor-2"
//
// protected def receive = {
// case msg: Message sender ! Message("hello %s" format msg.bodyAs[String], Map("sender" -> "remote2"))
// }
//}
//
//class Producer1 extends Actor with Producer {
// def endpointUri = "direct:welcome"
// override def oneway = false // default
//}
//
//class Consumer1 extends Actor with Consumer {
// def endpointUri = "file:data/input/actor"
//
// def receive = {
// case msg: Message println("received %s" format msg.bodyAs[String])
// }
//}
//
//class Consumer2 extends Actor with Consumer {
// def endpointUri = "jetty:http://0.0.0.0:8877/camel/default"
//
// def receive = {
// case msg: Message sender ! ("Hello %s" format msg.bodyAs[String])
// }
//}
//
//class Consumer3(transformer: ActorRef) extends Actor with Consumer {
// def endpointUri = "jetty:http://0.0.0.0:8877/camel/welcome"
//
// def receive = {
// case msg: Message transformer.forward(msg.setBodyAs[String])
// }
//}
//
//class Consumer4 extends Actor with Consumer {
// def endpointUri = "jetty:http://0.0.0.0:8877/camel/stop"
//
// def receive = {
// case msg: Message msg.bodyAs[String] match {
// case "stop" {
// sender ! "Consumer4 stopped"
// self.stop
// }
// case body sender ! body
// }
// }
//}
//
//class Consumer5 extends Actor with Consumer {
// def endpointUri = "jetty:http://0.0.0.0:8877/camel/start"
//
// def receive = {
// case _ {
// Actor.actorOf[Consumer4]
// sender ! "Consumer4 started"
// }
// }
//}
//
//class Transformer(producer: ActorRef) extends Actor {
// protected def receive = {
// case msg: Message producer.forward(msg.transformBody((body: String) "- %s -" format body))
// }
//}
//
//class Subscriber(name: String, uri: String) extends Actor with Consumer {
// def endpointUri = uri
//
// protected def receive = {
// case msg: Message println("%s received: %s" format (name, msg.body))
// }
//}
//
//class Publisher(uri: String) extends Actor with Producer {
// def endpointUri = uri
// override def oneway = true
//}
//
//class PublisherBridge(uri: String, publisher: ActorRef) extends Actor with Consumer {
// def endpointUri = uri
//
// protected def receive = {
// case msg: Message {
// publisher ! msg.bodyAs[String]
// sender ! "message published"
// }
// }
//}
//
//class HttpConsumer(producer: ActorRef) extends Actor with Consumer {
// def endpointUri = "jetty:http://0.0.0.0:8875/"
//
// protected def receive = {
// case msg producer forward msg
// }
//}
//
//class HttpProducer(transformer: ActorRef) extends Actor with Producer {
// def endpointUri = "jetty://http://akka.io/?bridgeEndpoint=true"
//
// override protected def receiveBeforeProduce = {
// // only keep Exchange.HTTP_PATH message header (which needed by bridge endpoint)
// case msg: Message msg.setHeaders(msg.headers(Set(Exchange.HTTP_PATH)))
// }
//
// override protected def receiveAfterProduce = {
// // do not reply but forward result to transformer
// case msg transformer forward msg
// }
//}
//
//class HttpTransformer extends Actor {
// protected def receive = {
// case msg: Message sender ! (msg.transformBody { body: String body replaceAll ("Akka ", "AKKA ") })
// case msg: Failure sender ! msg
// }
//}
//
//class FileConsumer extends Actor with Consumer {
// def endpointUri = "file:data/input/actor?delete=true"
// override def autoack = false
//
// var counter = 0
//
// def receive = {
// case msg: Message {
// if (counter == 2) {
// println("received %s" format msg.bodyAs[String])
// sender ! Ack
// } else {
// println("rejected %s" format msg.bodyAs[String])
// counter += 1
// sender ! Failure(new Exception("message number %s not accepted" format counter))
// }
// }
// }
//}

View file

@ -1,105 +0,0 @@
/**
* Copyright (C) 2009-2010 Typesafe Inc. <http://www.typesafe.com>.
*/
// CAMEL IS NOT PART OF MILESTONE 1 OF AKKA 2.0
// TODO FIXME 2.0
//package sample.camel
//
//import org.apache.camel.{ Exchange, Processor }
//import org.apache.camel.builder.RouteBuilder
//import org.apache.camel.impl.DefaultCamelContext
//import org.apache.camel.spring.spi.ApplicationContextRegistry
//import org.springframework.context.support.ClassPathXmlApplicationContext
//
//import akka.actor.Actor._
//import akka.actor.Props
//import akka.actor.TypedActor
//import akka.camel.CamelContextManager
//
///**
// * @author Martin Krasser
// */
//class Boot {
//
// // -----------------------------------------------------------------------
// // Basic example
// // -----------------------------------------------------------------------
//
// actorOf[Consumer1]
// actorOf[Consumer2]
//
// // -----------------------------------------------------------------------
// // Custom Camel route example
// // -----------------------------------------------------------------------
//
// // Create CamelContext and a Spring-based registry
// val context = new ClassPathXmlApplicationContext("/context-jms.xml", getClass)
// val registry = new ApplicationContextRegistry(context)
//
// // Use a custom Camel context and a custom touter builder
// CamelContextManager.init(new DefaultCamelContext(registry))
// CamelContextManager.mandatoryContext.addRoutes(new CustomRouteBuilder)
//
// val producer = actorOf[Producer1]
// val mediator = actorOf(new Transformer(producer))
// val consumer = actorOf(new Consumer3(mediator))
//
// // -----------------------------------------------------------------------
// // Asynchronous consumer-producer example (Akka homepage transformation)
// // -----------------------------------------------------------------------
//
// val httpTransformer = actorOf(new HttpTransformer)
// val httpProducer = actorOf(new HttpProducer(httpTransformer))
// val httpConsumer = actorOf(new HttpConsumer(httpProducer))
//
// // -----------------------------------------------------------------------
// // Publish subscribe examples
// // -----------------------------------------------------------------------
//
// //
// // Cometd example commented out because camel-cometd is broken since Camel 2.3
// //
//
// //val cometdUri = "cometd://localhost:8111/test/abc?baseResource=file:target"
// //val cometdSubscriber = actorOf(new Subscriber("cometd-subscriber", cometdUri))
// //val cometdPublisher = actorOf(new Publisher("cometd-publisher", cometdUri))
//
// val jmsUri = "jms:topic:test"
// val jmsSubscriber1 = actorOf(new Subscriber("jms-subscriber-1", jmsUri))
// val jmsSubscriber2 = actorOf(new Subscriber("jms-subscriber-2", jmsUri))
// val jmsPublisher = actorOf(new Publisher(jmsUri), "jms-publisher")
//
// //val cometdPublisherBridge = actorOf(new PublisherBridge("jetty:http://0.0.0.0:8877/camel/pub/cometd", cometdPublisher))
// val jmsPublisherBridge = actorOf(new PublisherBridge("jetty:http://0.0.0.0:8877/camel/pub/jms", jmsPublisher))
//
// // -----------------------------------------------------------------------
// // Actor un-publishing and re-publishing example
// // -----------------------------------------------------------------------
//
// actorOf[Consumer4] // POSTing "stop" to http://0.0.0.0:8877/camel/stop stops and unpublishes this actor
// actorOf[Consumer5] // POSTing any msg to http://0.0.0.0:8877/camel/start starts and published Consumer4 again.
//
// // -----------------------------------------------------------------------
// // Active object example
// // -----------------------------------------------------------------------
//
// // TODO: investigate why this consumer is not published
// TypedActor.typedActorOf(classOf[TypedConsumer1], classOf[TypedConsumer1Impl], Props())
//}
//
///**
// * @author Martin Krasser
// */
//class CustomRouteBuilder extends RouteBuilder {
// def configure {
// val actorUri = "actor:%s" format classOf[Consumer2].getName
// from("jetty:http://0.0.0.0:8877/camel/custom").to(actorUri)
// from("direct:welcome").process(new Processor() {
// def process(exchange: Exchange) {
// exchange.getOut.setBody("Welcome %s" format exchange.getIn.getBody)
// }
// })
// }
//}

View file

@ -1,36 +0,0 @@
/**
* Copyright (C) 2009-2010 Typesafe Inc. <http://www.typesafe.com>.
*/
// CAMEL IS NOT PART OF MILESTONE 1 OF AKKA 2.0
// TODO FIXME 2.0
//package sample.camel
//
//import akka.actor.Actor._
//import akka.actor.TypedActor
//import akka.camel.Message
//
///**
// * @author Martin Krasser
// */
//object ClientApplication extends App {
//
// /* TODO: fix remote example
//
// val actor1 = remote.actorOf[RemoteActor1]("localhost", 7777)
// val actor2 = remote.actorFor("remote2", "localhost", 7777)
//
// val typedActor1 =
// TypedActor.newRemoteInstance(classOf[RemoteTypedConsumer1],classOf[RemoteTypedConsumer1Impl], "localhost", 7777)
//
// val typedActor2 = remote.typedActorFor(classOf[RemoteTypedConsumer2], "remote3", "localhost", 7777)
//
// println(actor1 !! Message("actor1")) // activates and publishes actor remotely
// println(actor2 !! Message("actor2")) // actor already activated and published remotely
//
// println(typedActor1.foo("x1", "y1")) // activates and publishes typed actor methods remotely
// println(typedActor2.foo("x2", "y2")) // typed actor methods already activated and published remotely
//
// */
//}

View file

@ -1,34 +0,0 @@
/**
* Copyright (C) 2009-2010 Typesafe Inc. <http://www.typesafe.com>.
*/
// CAMEL IS NOT PART OF MILESTONE 1 OF AKKA 2.0
// TODO FIXME 2.0
//package sample.camel
//
//import akka.actor.Actor._
//import akka.camel.CamelServiceManager
//import akka.actor.{ TypedActor, Props }
//
///**
// * @author Martin Krasser
// */
//object ServerApplication extends App {
// import CamelServiceManager._
//
// /* TODO: fix remote example
//
// startCamelService
//
// val ua = actorOf[RemoteActor2]
// val ta = TypedActor.typedActorOf(
// classOf[RemoteTypedConsumer2],
// classOf[RemoteTypedConsumer2Impl], Props())
//
// remote.start("localhost", 7777)
// remote.register("remote2", ua)
// remote.registerTypedActor("remote3", ta)
//
// */
//}

View file

@ -1,135 +0,0 @@
/**
* Copyright (C) 2009-2010 Typesafe Inc. <http://www.typesafe.com>.
*/
// CAMEL IS NOT PART OF MILESTONE 1 OF AKKA 2.0
// TODO FIXME 2.0
//package sample.camel
//
//import org.apache.camel.impl.{ DefaultCamelContext, SimpleRegistry }
//import org.apache.camel.builder.RouteBuilder
//import org.apache.camel.spring.spi.ApplicationContextRegistry
//import org.springframework.context.support.ClassPathXmlApplicationContext
//
//import akka.actor.{ Actor, TypedActor, Props }
//import akka.camel._
//
///**
// * @author Martin Krasser
// */
//object StandaloneApplication extends App {
// import CamelContextManager._
// import CamelServiceManager._
//
// // 'externally' register typed actors
// val registry = new SimpleRegistry
// registry.put("sample", TypedActor.typedActorOf(classOf[BeanIntf], classOf[BeanImpl], Props()))
//
// // customize CamelContext
// CamelContextManager.init(new DefaultCamelContext(registry))
// CamelContextManager.mandatoryContext.addRoutes(new StandaloneApplicationRoute)
//
// startCamelService
//
// // access 'externally' registered typed actors
// assert("hello msg1" == mandatoryContext.createProducerTemplate.requestBody("direct:test", "msg1"))
//
// mandatoryService.awaitEndpointActivation(1) {
// // 'internally' register typed actor (requires CamelService)
// TypedActor.typedActorOf(classOf[TypedConsumer2], classOf[TypedConsumer2Impl], Props())
// }
//
// // access 'internally' (automatically) registered typed-actors
// // (see @consume annotation value at TypedConsumer2.foo method)
// assert("default: msg3" == mandatoryContext.createProducerTemplate.requestBody("direct:default", "msg3"))
//
// stopCamelService
//
// Actor.registry.local.shutdownAll
//}
//
//class StandaloneApplicationRoute extends RouteBuilder {
// def configure = {
// // route to typed actors (in SimpleRegistry)
// from("direct:test").to("typed-actor:sample?method=foo")
// }
//}
//
//object StandaloneSpringApplication extends App {
// import CamelContextManager._
//
// // load Spring application context
// val appctx = new ClassPathXmlApplicationContext("/context-standalone.xml")
//
// // We cannot use the CamelServiceManager to wait for endpoint activation
// // because CamelServiceManager is started by the Spring application context.
// // (and hence is not available for setting expectations on activations). This
// // will be improved/enabled in upcoming releases.
// Thread.sleep(1000)
//
// // access 'externally' registered typed actors with typed-actor component
// assert("hello msg3" == mandatoryTemplate.requestBody("direct:test3", "msg3"))
//
// // access auto-started untyped consumer
// assert("received msg3" == mandatoryTemplate.requestBody("direct:untyped-consumer-1", "msg3"))
//
// appctx.close
//
// Actor.registry.local.shutdownAll
//}
//
//class StandaloneSpringApplicationRoute extends RouteBuilder {
// def configure = {
// // routes to typed actor (in ApplicationContextRegistry)
// from("direct:test3").to("typed-actor:ta?method=foo")
// }
//}
//
//object StandaloneJmsApplication extends App {
// import CamelServiceManager._
//
// val context = new ClassPathXmlApplicationContext("/context-jms.xml")
// val registry = new ApplicationContextRegistry(context)
//
// // Init CamelContextManager with custom CamelContext
// CamelContextManager.init(new DefaultCamelContext(registry))
//
// startCamelService
//
// val jmsUri = "jms:topic:test"
// val jmsPublisher = Actor.actorOf(new Publisher(jmsUri), "jms-publisher")
//
// mandatoryService.awaitEndpointActivation(2) {
// Actor.actorOf(new Subscriber("jms-subscriber-1", jmsUri))
// Actor.actorOf(new Subscriber("jms-subscriber-2", jmsUri))
// }
//
// // Send 10 messages to via publisher actor
// for (i 1 to 10) {
// jmsPublisher ! ("Akka rocks (%d)" format i)
// }
//
// // Send 10 messages to JMS topic directly
// for (i 1 to 10) {
// CamelContextManager.mandatoryTemplate.sendBody(jmsUri, "Camel rocks (%d)" format i)
// }
//
// // Wait a bit for subscribes to receive messages
// Thread.sleep(1000)
//
// stopCamelService
// Actor.registry.local.shutdownAll
//}
//
//object StandaloneFileApplication {
// import CamelServiceManager._
//
// def main(args: Array[String]) {
// startCamelService
// mandatoryService.awaitEndpointActivation(1) {
// Actor.actorOf(new FileConsumer)
// }
// }
//}
//

View file

@ -1,21 +0,0 @@
package sample.camel;
import akka.camel.Message;
import akka.camel.UntypedConsumerActor;
/**
* @author Martin Krasser
*/
public class SampleRemoteUntypedConsumer extends UntypedConsumerActor {
public String getEndpointUri() {
return "direct:remote-untyped-consumer";
}
public void onReceive(Object message) {
Message msg = (Message)message;
String body = msg.getBodyAs(String.class);
String header = msg.getHeaderAs("test", String.class);
sender().tell(String.format("%s %s", body, header));
}
}

View file

@ -1,11 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<configuration scan="false" debug="false">
<appender name="stdout" class="ch.qos.logback.core.ConsoleAppender">
<encoder>
<pattern>[%4p] [%d{ISO8601}] [%t] %c{1}: %m%n</pattern>
</encoder>
</appender>
<root level="OFF">
<appender-ref ref="stdout"/>
</root>
</configuration>

View file

@ -1,99 +0,0 @@
package sample.camel
import _root_.akka.routing.{ RoutedProps, Routing }
import collection.mutable.Set
import java.util.concurrent.CountDownLatch
import org.junit._
import org.scalatest.junit.JUnitSuite
import akka.actor.Actor._
import akka.actor.{ ActorRegistry, ActorRef, Actor }
import akka.camel._
import akka.camel.CamelServiceManager._
/**
* @author Martin Krasser
*/
class HttpConcurrencyTestStress extends JUnitSuite {
import HttpConcurrencyTestStress._
@Test
def shouldProcessMessagesConcurrently = {
/* TODO: fix stress test
val num = 50
val latch1 = new CountDownLatch(num)
val latch2 = new CountDownLatch(num)
val latch3 = new CountDownLatch(num)
val client1 = actorOf(new HttpClientActor("client1", latch1))
val client2 = actorOf(new HttpClientActor("client2", latch2))
val client3 = actorOf(new HttpClientActor("client3", latch3))
for (i <- 1 to num) {
client1 ! Message("client1", Map(Message.MessageExchangeId -> i))
client2 ! Message("client2", Map(Message.MessageExchangeId -> i))
client3 ! Message("client3", Map(Message.MessageExchangeId -> i))
}
latch1.await
latch2.await
latch3.await
assert(num == (client1 ? "getCorrelationIdCount").as[Int].get)
assert(num == (client2 ? "getCorrelationIdCount").as[Int].get)
assert(num == (client3 ? "getCorrelationIdCount").as[Int].get)*/
}
}
object HttpConcurrencyTestStress {
@BeforeClass
def beforeClass{
startCamelService
val workers = for (i 1 to 8) yield actorOf[HttpServerWorker]
val balancer = Routing.actorOf(RoutedProps().withRoundRobinRouter.withConnections(workers), "loadbalancer")
//service.get.awaitEndpointActivation(1) {
// actorOf(new HttpServerActor(balancer))
//}
}
@AfterClass
def afterClass = {
stopCamelService
Actor.registry.local.shutdownAll
}
class HttpClientActor(label: String, latch: CountDownLatch) extends Actor with Producer {
def endpointUri = "jetty:http://0.0.0.0:8855/echo"
var correlationIds = Set[Any]()
override protected def receive = {
case "getCorrelationIdCount" sender ! correlationIds.size
case msg super.receive(msg)
}
override protected def receiveAfterProduce = {
case msg: Message {
val corr = msg.headers(Message.MessageExchangeId)
val body = msg.bodyAs[String]
correlationIds += corr
assert(label == body)
latch.countDown
print(".")
}
}
}
class HttpServerActor(balancer: ActorRef) extends Actor with Consumer {
def endpointUri = "jetty:http://0.0.0.0:8855/echo"
var counter = 0
def receive = {
case msg balancer forward msg
}
}
class HttpServerWorker extends Actor {
protected def receive = {
case msg sender ! msg
}
}
}

View file

@ -1,101 +0,0 @@
package sample.camel
import org.scalatest.{ GivenWhenThen, BeforeAndAfterAll, FeatureSpec }
import akka.actor.Actor._
import akka.actor._
import akka.camel._
//import akka.cluster.netty.NettyRemoteSupport
//import akka.cluster.RemoteServerModule
/**
* @author Martin Krasser
*/
class RemoteConsumerTest /*extends FeatureSpec with BeforeAndAfterAll with GivenWhenThen*/ {
/* TODO: fix remote test
import CamelServiceManager._
import RemoteConsumerTest._
var server: RemoteServerModule = _
override protected def beforeAll = {
registry.shutdownAll
startCamelService
remote.shutdown
remote.asInstanceOf[NettyRemoteSupport].optimizeLocal.set(false)
server = remote.start(host,port)
}
override protected def afterAll = {
remote.shutdown
stopCamelService
registry.shutdownAll
remote.asInstanceOf[NettyRemoteSupport].optimizeLocal.set(true)
}
feature("Publish consumer on remote node") {
scenario("access published remote consumer") {
given("a consumer actor")
val consumer = Actor.actorOf[RemoteConsumer]
when("registered at the server")
assert(mandatoryService.awaitEndpointActivation(1) {
remote.register(consumer)
})
then("the published consumer is accessible via its endpoint URI")
val response = CamelContextManager.mandatoryTemplate.requestBody("direct:remote-consumer", "test")
assert(response === "remote actor: test")
}
}
feature("Publish typed consumer on remote node") {
scenario("access published remote consumer method") {
given("a typed consumer actor")
when("registered at the server")
assert(mandatoryService.awaitEndpointActivation(1) {
remote.registerTypedActor("whatever", TypedActor.newInstance(
classOf[SampleRemoteTypedConsumer],
classOf[SampleRemoteTypedConsumerImpl]))
})
then("the published method is accessible via its endpoint URI")
val response = CamelContextManager.mandatoryTemplate.requestBody("direct:remote-typed-consumer", "test")
assert(response === "remote typed actor: test")
}
}
feature("Publish untyped consumer on remote node") {
scenario("access published remote untyped consumer") {
given("an untyped consumer actor")
val consumer = Actor.actorOf(classOf[SampleRemoteUntypedConsumer])
when("registered at the server")
assert(mandatoryService.awaitEndpointActivation(1) {
remote.register(consumer)
})
then("the published untyped consumer is accessible via its endpoint URI")
val response = CamelContextManager.mandatoryTemplate.requestBodyAndHeader("direct:remote-untyped-consumer", "a", "test", "b")
assert(response === "a b")
}
}*/
}
object RemoteConsumerTest {
val host = "localhost"
val port = 7774
class RemoteConsumer extends Actor with Consumer {
def endpointUri = "direct:remote-consumer"
protected def receive = {
case "init" sender ! "done"
case m: Message sender ! ("remote actor: %s" format m.body)
}
}
}

View file

@ -1,26 +0,0 @@
Akka Chat Client/Server Sample Application
How to run the sample:
1. Fire up two shells. For each of them:
- Step down into to the root of the Akka distribution.
- Set 'export AKKA_HOME=<root of distribution>.
- Run 'sbt console' to start up a REPL (interpreter).
2. In the first REPL you get execute:
- scala> import sample.chat._
- scala> import akka.actor.Actor._
- scala> val chatService = actorOf[ChatService]
3. In the second REPL you get execute:
- scala> import sample.chat._
- scala> ClientRunner.run
4. See the chat simulation run.
5. Run it again to see full speed after first initialization.
6. In the client REPL, or in a new REPL, you can also create your own client
- scala> import sample.chat._
- scala> val myClient = new ChatClient("<your name>")
- scala> myClient.login
- scala> myClient.post("Can I join?")
- scala> println("CHAT LOG:\n\t" + myClient.chatLog.log.mkString("\n\t"))
Thats it. Have fun.

View file

@ -1,257 +0,0 @@
/**
* Copyright (C) 2009-2010 Typesafe Inc. <http://www.typesafe.com>.
*/
// REMOTING IS NOT PART OF MILESTONE 1 OF AKKA 2.0
// TODO FIXME 2.0
//
// package sample.chat
//
// import scala.collection.mutable.HashMap
//
// import akka.actor.{Actor, ActorRef, Props}
// import akka.stm._
// import akka.actor.Actor._
// import akka.event.EventHandler
//
// /******************************************************************************
// Akka Chat Client/Server Sample Application
//
// How to run the sample:
//
// 1. Fire up two shells. For each of them:
// - Step down into to the root of the Akka distribution.
// - Set 'export AKKA_HOME=<root of distribution>.
// - Run 'sbt console' to start up a REPL (interpreter).
// 2. In the first REPL you get execute:
// - scala> import sample.chat._
// - scala> import akka.actor.Actor._
// - scala> val chatService = actorOf[ChatService]
// 3. In the second REPL you get execute:
// - scala> import sample.chat._
// - scala> ClientRunner.run
// 4. See the chat simulation run.
// 5. Run it again to see full speed after first initialization.
// 6. In the client REPL, or in a new REPL, you can also create your own client
// - scala> import sample.chat._
// - scala> val myClient = new ChatClient("<your name>")
// - scala> myClient.login
// - scala> myClient.post("Can I join?")
// - scala> println("CHAT LOG:\n\t" + myClient.chatLog.log.mkString("\n\t"))
//
//
// Thats it. Have fun.
//
// ******************************************************************************/
//
// /**
// * ChatServer's internal events.
// */
// sealed trait Event
// case class Login(user: String) extends Event
// case class Logout(user: String) extends Event
// case class GetChatLog(from: String) extends Event
// case class ChatLog(log: List[String]) extends Event
// case class ChatMessage(from: String, message: String) extends Event
//
// /**
// * Chat client.
// */
// class ChatClient(val name: String) {
// val chat = Actor.remote.actorFor("chat:service", "localhost", 2552)
//
// def login = chat ! Login(name)
// def logout = chat ! Logout(name)
// def post(message: String) = chat ! ChatMessage(name, name + ": " + message)
// def chatLog = (chat !! GetChatLog(name)).as[ChatLog].getOrElse(throw new Exception("Couldn't get the chat log from ChatServer"))
// }
//
// /**
// * Internal chat client session.
// */
// class Session(user: String, storage: ActorRef) extends Actor {
// private val loginTime = System.currentTimeMillis
// private var userLog: List[String] = Nil
//
// EventHandler.info(this, "New session for user [%s] has been created at [%s]".format(user, loginTime))
//
// def receive = {
// case msg @ ChatMessage(from, message) =>
// userLog ::= message
// storage ! msg
//
// case msg @ GetChatLog(_) =>
// storage forward msg
// }
// }
//
// /**
// * Abstraction of chat storage holding the chat log.
// */
// trait ChatStorage extends Actor
//
// /**
// * Memory-backed chat storage implementation.
// */
// class MemoryChatStorage extends ChatStorage {
// private var chatLog = TransactionalVector[Array[Byte]]()
//
// EventHandler.info(this, "Memory-based chat storage is starting up...")
//
// def receive = {
// case msg @ ChatMessage(from, message) =>
// EventHandler.debug(this, "New chat message [%s]".format(message))
// atomic { chatLog + message.getBytes("UTF-8") }
//
// case GetChatLog(_) =>
// val messageList = atomic { chatLog.map(bytes => new String(bytes, "UTF-8")).toList }
// reply(ChatLog(messageList))
// }
//
// override def postRestart(reason: Throwable) {
// chatLog = TransactionalVector()
// }
// }
//
// /**
// * Implements user session management.
// * <p/>
// * Uses self-type annotation (this: Actor =>) to declare that it needs to be mixed in with an Actor.
// */
// trait SessionManagement { this: Actor =>
//
// val storage: ActorRef // needs someone to provide the ChatStorage
// val sessions = new HashMap[String, ActorRef]
//
// protected def sessionManagement: Receive = {
// case Login(username) =>
// EventHandler.info(this, "User [%s] has logged in".format(username))
// val session = actorOf(new Session(username, storage))
// session
// sessions += (username -> session)
//
// case Logout(username) =>
// EventHandler.info(this, "User [%s] has logged out".format(username))
// val session = sessions(username)
// session.stop()
// sessions -= username
// }
//
// protected def shutdownSessions() {
// sessions.foreach { case (_, session) => session.stop() }
// }
// }
//
// /**
// * Implements chat management, e.g. chat message dispatch.
// * <p/>
// * Uses self-type annotation (this: Actor =>) to declare that it needs to be mixed in with an Actor.
// */
// trait ChatManagement { this: Actor =>
// val sessions: HashMap[String, ActorRef] // needs someone to provide the Session map
//
// protected def chatManagement: Receive = {
// case msg @ ChatMessage(from, _) => getSession(from).foreach(_ ! msg)
// case msg @ GetChatLog(from) => getSession(from).foreach(_ forward msg)
// }
//
// private def getSession(from: String) : Option[ActorRef] = {
// if (sessions.contains(from))
// Some(sessions(from))
// else {
// EventHandler.info(this, "Session expired for %s".format(from))
// None
// }
// }
// }
//
// /**
// * Creates and links a MemoryChatStorage.
// */
// trait MemoryChatStorageFactory { this: Actor =>
// val storage = actorOf(Props[MemoryChatStorage].withSupervisor(this.self)) // starts and links ChatStorage
// }
//
// /**
// * Chat server. Manages sessions and redirects all other messages to the Session for the client.
// */
// trait ChatServer extends Actor {
// //faultHandler = OneForOneStrategy(List(classOf[Exception]),5, 5000)
// val storage: ActorRef
//
// EventHandler.info(this, "Chat server is starting up...")
//
// // actor message handler
// def receive: Receive = sessionManagement orElse chatManagement
//
// // abstract methods to be defined somewhere else
// protected def chatManagement: Receive
// protected def sessionManagement: Receive
// protected def shutdownSessions()
//
// override def postStop() {
// EventHandler.info(this, "Chat server is shutting down...")
// shutdownSessions()
// storage.stop()
// }
// }
//
// /**
// * Class encapsulating the full Chat Service.
// * Start service by invoking:
// * <pre>
// * val chatService = Actor.actorOf[ChatService]
// * </pre>
// */
// class ChatService extends
// ChatServer with
// SessionManagement with
// ChatManagement with
// MemoryChatStorageFactory {
// override def preStart() {
// remote.start("localhost", 2552);
// remote.register("chat:service", self) //Register the actor with the specified service id
// }
// }
//
// /**
// * Test runner starting ChatService.
// */
// object ServerRunner {
//
// def main(args: Array[String]) { ServerRunner.run() }
//
// def run() {
// actorOf[ChatService]
// }
// }
//
// /**
// * Test runner emulating a chat session.
// */
// object ClientRunner {
//
// def main(args: Array[String]) { ClientRunner.run() }
//
// def run() {
//
// val client1 = new ChatClient("jonas")
// client1.login
// val client2 = new ChatClient("patrik")
// client2.login
//
// client1.post("Hi there")
// println("CHAT LOG:\n\t" + client1.chatLog.log.mkString("\n\t"))
//
// client2.post("Hello")
// println("CHAT LOG:\n\t" + client2.chatLog.log.mkString("\n\t"))
//
// client1.post("Hi again")
// println("CHAT LOG:\n\t" + client1.chatLog.log.mkString("\n\t"))
//
// client1.logout
// client2.logout
// }
// }
//

View file

@ -1,27 +0,0 @@
---------------------------------------------------------
== Akka Remote Sample Application ==
---------------------------------------------------------
= Server Managed Remote Actors Sample =
To run the sample:
1. Fire up two shells. For each of them:
- Step down into to the root of the Akka distribution.
- Set 'export AKKA_HOME=<root of distribution>.
- Run 'sbt'
- Run 'update' followed by 'compile' if you have not done that before.
- Run 'project akka-sample-remote'
- Run 'console' to start up a REPL (interpreter).
2. In the first REPL you get execute:
- scala> import sample.remote._
- scala> ServerManagedRemoteActorServer.run
This starts up the RemoteNode and registers the remote actor
3. In the second REPL you get execute:
- scala> import sample.remote._
- scala> ServerManagedRemoteActorClient.run
4. See the actor conversation.
5. Run it again to see full speed after first initialization.
Now you could test client reconnect by killing the console running the ServerManagedRemoteActorClient and start it up again. See the client reconnect take place in the REPL shell.
Thats it. Have fun.

View file

@ -1,39 +0,0 @@
/**
* Copyright (C) 2009-2011 Typesafe Inc. <http://www.typesafe.com>
*/
// REMOTING IS NOT PART OF MILESTONE 1 OF AKKA 2.0
// TODO FIXME 2.0
//package sample.remote
//
//import akka.actor.Actor._
//import akka.actor. {ActorRegistry, Actor}
//
//class HelloWorldActor extends Actor {
// def receive = {
// case "Hello" =>
// reply("World")
// }
//}
//
//object ServerManagedRemoteActorServer {
//
// def run = {
// Actor.remote.start("localhost", 2552)
// Actor.remote.register("hello-service", actorOf[HelloWorldActor])
// }
//
// def main(args: Array[String]) = run
//}
//
//object ServerManagedRemoteActorClient {
//
// def run = {
// val actor = Actor.remote.actorFor("hello-service", "localhost", 2552)
// val result = actor !! "Hello"
// }
//
// def main(args: Array[String]) = run
//}
//

View file

@ -5,7 +5,7 @@
package akka.event.slf4j
import org.slf4j.{ Logger SLFLogger, LoggerFactory SLFLoggerFactory }
import org.slf4j.MDC
import akka.event.Logging._
import akka.actor._
@ -27,31 +27,54 @@ object Logger {
/**
* SLF4J Event Handler.
*
* The thread in which the logging was performed is captured in
* Mapped Diagnostic Context (MDC) with attribute name "sourceThread".
*
* @author <a href="http://jonasboner.com">Jonas Bon&#233;r</a>
*/
class Slf4jEventHandler extends Actor with SLF4JLogging {
val mdcThreadAttributeName = "sourceThread"
def receive = {
case event @ Error(cause, logSource, message)
Logger(logSource).error("[{}] [{}] [{}]",
Array[AnyRef](event.thread.getName, message.asInstanceOf[AnyRef], stackTraceFor(cause)))
withMdc(mdcThreadAttributeName, event.thread.getName) {
cause match {
case Error.NoCause Logger(logSource).error(message.toString)
case _ Logger(logSource).error(message.toString, cause)
}
}
case event @ Warning(logSource, message)
Logger(logSource).warn("[{}] [{}]",
event.thread.getName, message.asInstanceOf[AnyRef])
withMdc(mdcThreadAttributeName, event.thread.getName) {
Logger(logSource).warn("{}", message.asInstanceOf[AnyRef])
}
case event @ Info(logSource, message)
Logger(logSource).info("[{}] [{}]",
event.thread.getName, message.asInstanceOf[AnyRef])
withMdc(mdcThreadAttributeName, event.thread.getName) {
Logger(logSource).info("{}", message.asInstanceOf[AnyRef])
}
case event @ Debug(logSource, message)
Logger(logSource).debug("[{}] [{}]",
event.thread.getName, message.asInstanceOf[AnyRef])
withMdc(mdcThreadAttributeName, event.thread.getName) {
Logger(logSource).debug("{}", message.asInstanceOf[AnyRef])
}
case InitializeLogger(_)
log.info("Slf4jEventHandler started")
sender ! LoggerInitialized
}
@inline
final def withMdc(name: String, value: String)(logStatement: Unit) {
MDC.put(name, value)
try {
logStatement
} finally {
MDC.remove(name)
}
}
}

View file

@ -33,7 +33,7 @@ class TestBarrier(count: Int) {
} catch {
case e: TimeoutException
throw new TestBarrierTimeoutException("Timeout of %s and time factor of %s"
format (timeout.toString, TestKitExtension(system).settings.TestTimeFactor))
format (timeout.toString, TestKitExtension(system).TestTimeFactor))
}
}

View file

@ -81,8 +81,7 @@ abstract class EventFilter(occurrences: Int) {
*/
def intercept[T](code: T)(implicit system: ActorSystem): T = {
system.eventStream publish TestEvent.Mute(this)
val testKitExtension = TestKitExtension(system)
val leeway = testKitExtension.settings.TestEventFilterLeeway
val leeway = TestKitExtension(system).TestEventFilterLeeway
try {
val result = code
if (!awaitDone(leeway))

View file

@ -81,7 +81,7 @@ class TestKit(_system: ActorSystem) {
import TestActor.{ Message, RealMessage, NullMessage }
implicit val system = _system
val testKitExtension = TestKitExtension(system)
val testKitSettings = TestKitExtension(system)
private val queue = new LinkedBlockingDeque[Message]()
private[akka] var lastMessage: Message = NullMessage
@ -128,7 +128,7 @@ class TestKit(_system: ActorSystem) {
* block or missing that it returns the properly dilated default for this
* case from settings (key "akka.test.single-expect-default").
*/
def remaining: Duration = if (end == Duration.Undefined) testKitExtension.settings.SingleExpectDefaultTimeout.dilated else end - now
def remaining: Duration = if (end == Duration.Undefined) testKitSettings.SingleExpectDefaultTimeout.dilated else end - now
/**
* Query queue status.
@ -569,10 +569,8 @@ object TestKit {
* Java API. Scale timeouts (durations) during tests with the configured
* 'akka.test.timefactor'.
*/
def dilated(duration: Duration, system: ActorSystem): Duration = {
duration * TestKitExtension(system).settings.TestTimeFactor
}
def dilated(duration: Duration, system: ActorSystem): Duration =
duration * TestKitExtension(system).TestTimeFactor
}
/**

View file

@ -3,53 +3,27 @@
*/
package akka.testkit
import akka.actor.ActorSystem
import akka.actor.ExtensionKey
import akka.actor.Extension
import akka.actor.ActorSystemImpl
import com.typesafe.config.Config
import com.typesafe.config.ConfigFactory
import com.typesafe.config.ConfigParseOptions
import com.typesafe.config.ConfigRoot
import akka.util.Duration
import java.util.concurrent.TimeUnit.MILLISECONDS
import akka.actor.{ ExtensionId, ActorSystem, Extension, ActorSystemImpl }
object TestKitExtensionKey extends ExtensionKey[TestKitExtension]
object TestKitExtension {
def apply(system: ActorSystem): TestKitExtension = {
if (!system.hasExtension(TestKitExtensionKey)) {
system.registerExtension(new TestKitExtension)
}
system.extension(TestKitExtensionKey)
}
class Settings(cfg: Config) {
private def referenceConfig: Config =
ConfigFactory.parseResource(classOf[ActorSystem], "/akka-testkit-reference.conf",
ConfigParseOptions.defaults.setAllowMissing(false))
val config: ConfigRoot = ConfigFactory.emptyRoot("akka-testkit").withFallback(cfg).withFallback(referenceConfig).resolve()
import config._
val TestTimeFactor = getDouble("akka.test.timefactor")
val SingleExpectDefaultTimeout = Duration(getMilliseconds("akka.test.single-expect-default"), MILLISECONDS)
val TestEventFilterLeeway = Duration(getMilliseconds("akka.test.filter-leeway"), MILLISECONDS)
}
object TestKitExtension extends ExtensionId[TestKitSettings] {
def createExtension(system: ActorSystemImpl): TestKitSettings = new TestKitSettings(system.applicationConfig)
}
class TestKitExtension extends Extension[TestKitExtension] {
import TestKitExtension._
@volatile
private var _settings: Settings = _
class TestKitSettings(cfg: Config) extends Extension {
private def referenceConfig: Config =
ConfigFactory.parseResource(classOf[ActorSystem], "/akka-testkit-reference.conf",
ConfigParseOptions.defaults.setAllowMissing(false))
val config: ConfigRoot = ConfigFactory.emptyRoot("akka-testkit").withFallback(cfg).withFallback(referenceConfig).resolve()
def key = TestKitExtensionKey
def init(system: ActorSystemImpl) {
_settings = new Settings(system.applicationConfig)
}
def settings: Settings = _settings
import config._
val TestTimeFactor = getDouble("akka.test.timefactor")
val SingleExpectDefaultTimeout = Duration(getMilliseconds("akka.test.single-expect-default"), MILLISECONDS)
val TestEventFilterLeeway = Duration(getMilliseconds("akka.test.filter-leeway"), MILLISECONDS)
}

View file

@ -34,10 +34,9 @@ class TestLatch(count: Int = 1)(implicit system: ActorSystem) {
def await(): Boolean = await(TestLatch.DefaultTimeout)
def await(timeout: Duration): Boolean = {
val testKitExtension = TestKitExtension(system)
val opened = latch.await(timeout.dilated.toNanos, TimeUnit.NANOSECONDS)
if (!opened) throw new TestLatchTimeoutException(
"Timeout of %s with time factor of %s" format (timeout.toString, testKitExtension.settings.TestTimeFactor))
"Timeout of %s with time factor of %s" format (timeout.toString, TestKitExtension(system).TestTimeFactor))
opened
}
@ -45,10 +44,9 @@ class TestLatch(count: Int = 1)(implicit system: ActorSystem) {
* Timeout is expected. Throws exception if latch is opened before timeout.
*/
def awaitTimeout(timeout: Duration = TestLatch.DefaultTimeout) = {
val testKitExtension = TestKitExtension(system)
val opened = latch.await(timeout.dilated.toNanos, TimeUnit.NANOSECONDS)
if (opened) throw new TestLatchNoTimeoutException(
"Latch opened before timeout of %s with time factor of %s" format (timeout.toString, testKitExtension.settings.TestTimeFactor))
"Latch opened before timeout of %s with time factor of %s" format (timeout.toString, TestKitExtension(system).TestTimeFactor))
opened
}

View file

@ -12,9 +12,9 @@ package object testkit {
try {
val result = block
val testKitExtension = TestKitExtension(system)
val stop = now + testKitExtension.settings.TestEventFilterLeeway.toMillis
val failed = eventFilters filterNot (_.awaitDone(Duration(stop - now, MILLISECONDS))) map ("Timeout (" + testKitExtension.settings.TestEventFilterLeeway + ") waiting for " + _)
val testKitSettings = TestKitExtension(system)
val stop = now + testKitSettings.TestEventFilterLeeway.toMillis
val failed = eventFilters filterNot (_.awaitDone(Duration(stop - now, MILLISECONDS))) map ("Timeout (" + testKitSettings.TestEventFilterLeeway + ") waiting for " + _)
if (failed.nonEmpty)
throw new AssertionError("Filter completion error:\n" + failed.mkString("\n"))
@ -45,7 +45,7 @@ package object testkit {
*/
class TestDuration(duration: Duration) {
def dilated(implicit system: ActorSystem): Duration = {
duration * TestKitExtension(system).settings.TestTimeFactor
duration * TestKitExtension(system).TestTimeFactor
}
}
}

View file

@ -24,6 +24,7 @@ object AkkaSpec {
akka {
event-handlers = ["akka.testkit.TestEventListener"]
loglevel = "WARNING"
stdout-loglevel = "WARNING"
actor {
default-dispatcher {
core-pool-size = 4

View file

@ -15,7 +15,7 @@ class TestTimeSpec extends AkkaSpec(Map("akka.test.timefactor" -> 2.0)) with Bef
val now = System.nanoTime
intercept[AssertionError] { probe.awaitCond(false, Duration("1 second")) }
val diff = System.nanoTime - now
val target = (1000000000l * testKitExtension.settings.TestTimeFactor).toLong
val target = (1000000000l * testKitSettings.TestTimeFactor).toLong
diff must be > (target - 300000000l)
diff must be < (target + 300000000l)
}

View file

@ -106,7 +106,7 @@ public class Pi {
this.latch = latch;
Creator<Router> routerCreator = new Creator<Router>() {
public Router create() {
return new RoundRobinRouter();
return new RoundRobinRouter(dispatcher(), new akka.actor.Timeout(-1));
}
};
LinkedList<ActorRef> actors = new LinkedList<ActorRef>() {

7
build.sbt Normal file
View file

@ -0,0 +1,7 @@
seq(lsSettings:_*)
(LsKeys.tags in LsKeys.lsync) := Seq("actors", "stm", "concurrency", "distributed", "fault-tolerance", "scala", "java", "futures", "dataflow", "remoting")
(externalResolvers in LsKeys.lsync) := Seq("Akka Repository" at "http://akka.io/repository/")
(description in LsKeys.lsync) := "Akka is the platform for the next generation of event-driven, scalable and fault-tolerant architectures on the JVM."

View file

@ -1,11 +1,18 @@
/**
* Copyright (C) 2009-2011 Typesafe Inc. <http://www.typesafe.com>
*/
package akka
import sbt._
import Keys._
import com.typesafe.sbtmultijvm.MultiJvmPlugin
import MultiJvmPlugin.{ MultiJvm, extraOptions, jvmOptions, scalatestOptions }
import com.typesafe.sbtscalariform.ScalariformPlugin
import MultiJvmPlugin.{ MultiJvm, extraOptions, jvmOptions, scalatestOptions }
import ScalariformPlugin.{ format, formatPreferences, formatSourceDirectories }
import java.lang.Boolean.getBoolean
object AkkaBuild extends Build {
@ -26,7 +33,6 @@ object AkkaBuild extends Build {
rstdocDirectory <<= baseDirectory / "akka-docs"
),
aggregate = Seq(actor, testkit, actorTests, stm, remote, slf4j, amqp, mailboxes, akkaSbtPlugin, samples, tutorials, docs)
//aggregate = Seq(cluster, mailboxes, camel, camelTyped)
)
lazy val actor = Project(
@ -88,23 +94,6 @@ object AkkaBuild extends Build {
)
) configs (MultiJvm)
// lazy val cluster = Project(
// id = "akka-cluster",
// base = file("akka-cluster"),
// dependencies = Seq(stm, actorTests % "test->test", testkit % "test"),
// settings = defaultSettings ++ multiJvmSettings ++ Seq(
// libraryDependencies ++= Dependencies.cluster,
// extraOptions in MultiJvm <<= (sourceDirectory in MultiJvm) { src =>
// (name: String) => (src ** (name + ".conf")).get.headOption.map("-Dakka.config=" + _.absolutePath).toSeq
// },
// scalatestOptions in MultiJvm := Seq("-r", "org.scalatest.akka.QuietReporter"),
// jvmOptions in MultiJvm := {
// if (getBoolean("sbt.log.noformat")) Seq("-Dakka.test.nocolor=true") else Nil
// },
// test in Test <<= (test in Test) dependsOn (test in MultiJvm)
// )
// ) configs (MultiJvm)
lazy val slf4j = Project(
id = "akka-slf4j",
base = file("akka-slf4j"),
@ -173,7 +162,7 @@ object AkkaBuild extends Build {
testOptions in Test <+= testRedisMailbox map { test => Tests.Filter(s => test) }
)
)
lazy val zookeeperMailbox = Project(
id = "akka-zookeeper-mailbox",
base = file("akka-durable-mailboxes/akka-zookeeper-mailbox"),
@ -196,23 +185,6 @@ object AkkaBuild extends Build {
)
)
// lazy val camel = Project(
// id = "akka-camel",
// base = file("akka-camel"),
// dependencies = Seq(actor, slf4j, testkit % "test"),
// settings = defaultSettings ++ Seq(
// libraryDependencies ++= Dependencies.camel
// )
// )
// can be merged back into akka-camel
// lazy val camelTyped = Project(
// id = "akka-camel-typed",
// base = file("akka-camel-typed"),
// dependencies = Seq(camel % "compile;test->test", testkit % "test"),
// settings = defaultSettings
// )
// lazy val spring = Project(
// id = "akka-spring",
// base = file("akka-spring"),
@ -243,30 +215,7 @@ object AkkaBuild extends Build {
id = "akka-samples",
base = file("akka-samples"),
settings = parentSettings,
aggregate = Seq(antsSample, helloSample, osgiSample, fsmSample)
)
lazy val antsSample = Project(
id = "akka-sample-ants",
base = file("akka-samples/akka-sample-ants"),
dependencies = Seq(actor, stm),
settings = defaultSettings
)
lazy val helloSample = Project(
id = "akka-sample-hello",
base = file("akka-samples/akka-sample-hello"),
dependencies = Seq(actor),
settings = defaultSettings
)
lazy val osgiSample = Project(
id = "akka-sample-osgi",
base = file("akka-samples/akka-sample-osgi"),
dependencies = Seq(actor),
settings = defaultSettings ++ Seq(
libraryDependencies ++= Dependencies.sampleOsgi
)
aggregate = Seq(fsmSample, helloSample)
)
lazy val fsmSample = Project(
@ -275,38 +224,14 @@ object AkkaBuild extends Build {
dependencies = Seq(actor),
settings = defaultSettings
)
// lazy val chatSample = Project(
// id = "akka-sample-chat",
// base = file("akka-samples/akka-sample-chat"),
// dependencies = Seq(cluster),
// settings = defaultSettings
// )
// lazy val samples = Project(
// id = "akka-samples",
// base = file("akka-samples"),
// settings = parentSettings,
// aggregate = Seq(fsmSample)
// // aggregate = Seq(fsmSample, camelSample)
// )
// lazy val camelSample = Project(
// id = "akka-sample-camel",
// base = file("akka-samples/akka-sample-camel"),
// dependencies = Seq(actor, camelTyped, testkit % "test"),
// settings = defaultSettings ++ Seq(
// libraryDependencies ++= Dependencies.sampleCamel
// )
// )
// lazy val remoteSample = Project(
// id = "akka-sample-remote",
// base = file("akka-samples/akka-sample-remote"),
// dependencies = Seq(cluster),
// settings = defaultSettings
// )
lazy val helloSample = Project(
id = "akka-sample-hello",
base = file("akka-samples/akka-sample-hello"),
dependencies = Seq(actor),
settings = defaultSettings
)
lazy val tutorials = Project(
id = "akka-tutorials",
base = file("akka-tutorials"),
@ -351,12 +276,11 @@ object AkkaBuild extends Build {
publishArtifact in Compile := false
)
val testExcludes = SettingKey[Seq[String]]("test-excludes")
val excludeTestNames = SettingKey[Seq[String]]("exclude-test-names")
val excludeTestTags = SettingKey[Seq[String]]("exclude-test-tags")
val includeTestTags = SettingKey[Seq[String]]("include-test-tags")
def akkaTestExcludes: Seq[String] = {
val exclude = System.getProperty("akka.test.exclude", "")
if (exclude.isEmpty) Seq.empty else exclude.split(",").toSeq
}
val defaultExcludedTags = Seq("timing")
lazy val defaultSettings = baseSettings ++ formatSettings ++ Seq(
resolvers += "Typesafe Repo" at "http://repo.typesafe.com/typesafe/releases/",
@ -374,9 +298,37 @@ object AkkaBuild extends Build {
// disable parallel tests
parallelExecution in Test := false,
// for excluding tests in jenkins builds (-Dakka.test.exclude=TimingSpec)
testExcludes := akkaTestExcludes,
testOptions in Test <++= testExcludes map { _.map(exclude => Tests.Filter(test => !test.contains(exclude))) },
// for excluding tests by name (or use system property: -Dakka.test.names.exclude=TimingSpec)
excludeTestNames := {
val exclude = System.getProperty("akka.test.names.exclude", "")
if (exclude.isEmpty) Seq.empty else exclude.split(",").toSeq
},
// for excluding tests by tag (or use system property: -Dakka.test.tags.exclude=timing)
excludeTestTags := {
val exclude = System.getProperty("akka.test.tags.exclude", "")
if (exclude.isEmpty) defaultExcludedTags else exclude.split(",").toSeq
},
// for including tests by tag (or use system property: -Dakka.test.tags.include=timing)
includeTestTags := {
val include = System.getProperty("akka.test.tags.include", "")
if (include.isEmpty) Seq.empty else include.split(",").toSeq
},
// add filters for tests excluded by name
testOptions in Test <++= excludeTestNames map { _.map(exclude => Tests.Filter(test => !test.contains(exclude))) },
// add arguments for tests excluded by tag - includes override excludes (opposite to scalatest)
testOptions in Test <++= (excludeTestTags, includeTestTags) map { (excludes, includes) =>
val tags = (excludes.toSet -- includes.toSet).toSeq
if (tags.isEmpty) Seq.empty else Seq(Tests.Argument("-l", tags.mkString(" ")))
},
// add arguments for tests included by tag
testOptions in Test <++= includeTestTags map { tags =>
if (tags.isEmpty) Seq.empty else Seq(Tests.Argument("-n", tags.mkString(" ")))
},
// show full stack traces
testOptions in Test += Tests.Argument("-oF")
@ -442,7 +394,7 @@ object Dependencies {
val amqp = Seq(rabbit, commonsIo, protobuf)
val mailboxes = Seq(Test.scalatest, Test.junit)
val fileMailbox = Seq(Test.scalatest, Test.junit)
val beanstalkMailbox = Seq(beanstalk, Test.junit)
@ -450,13 +402,10 @@ object Dependencies {
val redisMailbox = Seq(redis, Test.junit)
val mongoMailbox = Seq(mongoAsync, twttrUtilCore, Test.junit)
val zookeeperMailbox = Seq(zookeeper, Test.junit)
// val camel = Seq(camelCore, Test.junit, Test.scalatest, Test.logback)
val spring = Seq(springBeans, springContext, Test.junit, Test.scalatest)
// val spring = Seq(springBeans, springContext, camelSpring, Test.junit, Test.scalatest)
val kernel = Seq(
jettyUtil, jettyXml, jettyServlet, jacksonCore, staxApi
@ -488,7 +437,7 @@ object Dependency {
val Netty = "3.2.5.Final"
val Protobuf = "2.4.1"
val Scalatest = "1.6.1"
val Slf4j = "1.6.0"
val Slf4j = "1.6.4"
val Spring = "3.0.5.RELEASE"
val Zookeeper = "3.4.0"
val Rabbit = "2.3.1"

View file

@ -4,3 +4,9 @@ resolvers += Classpaths.typesafeResolver
addSbtPlugin("com.typesafe.sbtmultijvm" % "sbt-multi-jvm" % "0.1.7")
addSbtPlugin("com.typesafe.sbtscalariform" % "sbt-scalariform" % "0.1.4")
resolvers ++= Seq(
"less is" at "http://repo.lessis.me",
"coda" at "http://repo.codahale.com")
addSbtPlugin("me.lessis" % "ls-sbt" % "0.1.0")