diff --git a/.gitignore b/.gitignore index 55bcf5ee42..91a3be7969 100755 --- a/.gitignore +++ b/.gitignore @@ -9,6 +9,7 @@ project/boot/* lib_managed etags TAGS +akka.tmproj reports dist build @@ -32,9 +33,11 @@ tm.out *.iws *.ipr *.iml +run-codefellow .project .settings .classpath .idea .scala_dependencies -multiverse.log \ No newline at end of file +multiverse.log +.eprj \ No newline at end of file diff --git a/akka-active-object-test/pom.xml b/akka-active-object-test/pom.xml index dffacd8db0..e498c34d32 100644 --- a/akka-active-object-test/pom.xml +++ b/akka-active-object-test/pom.xml @@ -3,7 +3,7 @@ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd"> 4.0.0 - Akka Active Object Tests in Java + Akka TypedActor Tests in Java akka-active-object-test se.scalablesolutions.akka 0.9 diff --git a/akka-active-object-test/src/test/java/se/scalablesolutions/akka/api/AllTest.java b/akka-active-object-test/src/test/java/se/scalablesolutions/akka/api/AllTest.java index 465c9da182..77739f6ff1 100644 --- a/akka-active-object-test/src/test/java/se/scalablesolutions/akka/api/AllTest.java +++ b/akka-active-object-test/src/test/java/se/scalablesolutions/akka/api/AllTest.java @@ -10,7 +10,7 @@ public class AllTest extends TestCase { suite.addTestSuite(InMemoryStateTest.class); suite.addTestSuite(InMemNestedStateTest.class); suite.addTestSuite(RemoteInMemoryStateTest.class); - suite.addTestSuite(ActiveObjectGuiceConfiguratorTest.class); + suite.addTestSuite(TypedActorGuiceConfiguratorTest.class); return suite; } diff --git a/akka-active-object-test/src/test/java/se/scalablesolutions/akka/api/InMemNestedStateTest.java b/akka-active-object-test/src/test/java/se/scalablesolutions/akka/api/InMemNestedStateTest.java index 746df950bf..db9d4d4146 100644 --- a/akka-active-object-test/src/test/java/se/scalablesolutions/akka/api/InMemNestedStateTest.java +++ b/akka-active-object-test/src/test/java/se/scalablesolutions/akka/api/InMemNestedStateTest.java @@ -6,7 +6,7 @@ package se.scalablesolutions.akka.api; import se.scalablesolutions.akka.config.*; import se.scalablesolutions.akka.config.Config; -import se.scalablesolutions.akka.config.ActiveObjectConfigurator; +import se.scalablesolutions.akka.config.TypedActorConfigurator; import static se.scalablesolutions.akka.config.JavaConfig.*; import se.scalablesolutions.akka.actor.*; import junit.framework.TestCase; @@ -14,7 +14,7 @@ import junit.framework.TestCase; public class InMemNestedStateTest extends TestCase { static String messageLog = ""; - final private ActiveObjectConfigurator conf = new ActiveObjectConfigurator(); + final private TypedActorConfigurator conf = new TypedActorConfigurator(); public InMemNestedStateTest() { conf.configure( diff --git a/akka-active-object-test/src/test/java/se/scalablesolutions/akka/api/InMemoryStateTest.java b/akka-active-object-test/src/test/java/se/scalablesolutions/akka/api/InMemoryStateTest.java index 3708d58acc..6562d0d611 100644 --- a/akka-active-object-test/src/test/java/se/scalablesolutions/akka/api/InMemoryStateTest.java +++ b/akka-active-object-test/src/test/java/se/scalablesolutions/akka/api/InMemoryStateTest.java @@ -8,7 +8,7 @@ import junit.framework.TestCase; import se.scalablesolutions.akka.config.Config; import se.scalablesolutions.akka.config.*; -import se.scalablesolutions.akka.config.ActiveObjectConfigurator; +import se.scalablesolutions.akka.config.TypedActorConfigurator; import static se.scalablesolutions.akka.config.JavaConfig.*; @@ -17,7 +17,7 @@ import se.scalablesolutions.akka.actor.*; public class InMemoryStateTest extends TestCase { static String messageLog = ""; - final private ActiveObjectConfigurator conf = new ActiveObjectConfigurator(); + final private TypedActorConfigurator conf = new TypedActorConfigurator(); public InMemoryStateTest() { Config.config(); diff --git a/akka-active-object-test/src/test/java/se/scalablesolutions/akka/api/MiscActiveObjectTest.java b/akka-active-object-test/src/test/java/se/scalablesolutions/akka/api/MiscActiveObjectTest.java index e61b8ac07d..aaa97d3587 100644 --- a/akka-active-object-test/src/test/java/se/scalablesolutions/akka/api/MiscActiveObjectTest.java +++ b/akka-active-object-test/src/test/java/se/scalablesolutions/akka/api/MiscActiveObjectTest.java @@ -1,7 +1,7 @@ package se.scalablesolutions.akka.api; -import static se.scalablesolutions.akka.actor.ActiveObject.link; -import static se.scalablesolutions.akka.actor.ActiveObject.newInstance; +import static se.scalablesolutions.akka.actor.TypedActor.link; +import static se.scalablesolutions.akka.actor.TypedActor.newInstance; import org.junit.Assert; import org.junit.Test; @@ -15,7 +15,7 @@ import junit.framework.TestCase; * @author johanrask * */ -public class MiscActiveObjectTest extends TestCase { +public class MiscTypedActorTest extends TestCase { /** diff --git a/akka-active-object-test/src/test/java/se/scalablesolutions/akka/api/RemoteInMemoryStateTest.java b/akka-active-object-test/src/test/java/se/scalablesolutions/akka/api/RemoteInMemoryStateTest.java index d4b4fd7687..3ae8b647ab 100644 --- a/akka-active-object-test/src/test/java/se/scalablesolutions/akka/api/RemoteInMemoryStateTest.java +++ b/akka-active-object-test/src/test/java/se/scalablesolutions/akka/api/RemoteInMemoryStateTest.java @@ -5,8 +5,8 @@ package se.scalablesolutions.akka.api; import se.scalablesolutions.akka.config.Config; -import se.scalablesolutions.akka.actor.ActiveObject; -import se.scalablesolutions.akka.config.ActiveObjectConfigurator; +import se.scalablesolutions.akka.actor.TypedActor; +import se.scalablesolutions.akka.config.TypedActorConfigurator; import se.scalablesolutions.akka.remote.RemoteNode; import junit.framework.TestCase; @@ -23,14 +23,14 @@ public class RemoteInMemoryStateTest extends TestCase { try { Thread.currentThread().sleep(1000); } catch (Exception e) {} Config.config(); } - final ActiveObjectConfigurator conf = new ActiveObjectConfigurator(); + final TypedActorConfigurator conf = new TypedActorConfigurator(); protected void tearDown() { conf.stop(); } public void testMapShouldNotRollbackStateForStatefulServerInCaseOfSuccess() { - InMemStateful stateful = ActiveObject.newRemoteInstance(InMemStateful.class, 1000, "localhost", 9999); + InMemStateful stateful = TypedActor.newRemoteInstance(InMemStateful.class, 1000, "localhost", 9999); stateful.init(); stateful.setMapState("testShouldNotRollbackStateForStatefulServerInCaseOfSuccess", "init"); // set init state stateful.success("testShouldNotRollbackStateForStatefulServerInCaseOfSuccess", "new state"); // transactionrequired @@ -38,10 +38,10 @@ public class RemoteInMemoryStateTest extends TestCase { } public void testMapShouldRollbackStateForStatefulServerInCaseOfFailure() { - InMemStateful stateful = ActiveObject.newRemoteInstance(InMemStateful.class, 10000, "localhost", 9999); + InMemStateful stateful = TypedActor.newRemoteInstance(InMemStateful.class, 10000, "localhost", 9999); stateful.init(); stateful.setMapState("testShouldRollbackStateForStatefulServerInCaseOfFailure", "init"); // set init state - InMemFailer failer = ActiveObject.newRemoteInstance(InMemFailer.class, 1000, "localhost", 9999); //conf.getInstance(InMemFailer.class); + InMemFailer failer = TypedActor.newRemoteInstance(InMemFailer.class, 1000, "localhost", 9999); //conf.getInstance(InMemFailer.class); try { stateful.failure("testShouldRollbackStateForStatefulServerInCaseOfFailure", "new state", failer); // call failing transactionrequired method fail("should have thrown an exception"); @@ -51,7 +51,7 @@ public class RemoteInMemoryStateTest extends TestCase { } public void testVectorShouldNotRollbackStateForStatefulServerInCaseOfSuccess() { - InMemStateful stateful = ActiveObject.newRemoteInstance(InMemStateful.class, 10000, "localhost", 9999); + InMemStateful stateful = TypedActor.newRemoteInstance(InMemStateful.class, 10000, "localhost", 9999); stateful.init(); stateful.setVectorState("init"); // set init state stateful.success("testShouldNotRollbackStateForStatefulServerInCaseOfSuccess", "new state"); // transactionrequired @@ -59,10 +59,10 @@ public class RemoteInMemoryStateTest extends TestCase { } public void testVectorShouldRollbackStateForStatefulServerInCaseOfFailure() { - InMemStateful stateful = ActiveObject.newRemoteInstance(InMemStateful.class, 10000, "localhost", 9999); + InMemStateful stateful = TypedActor.newRemoteInstance(InMemStateful.class, 10000, "localhost", 9999); stateful.init(); stateful.setVectorState("init"); // set init state - InMemFailer failer = ActiveObject.newRemoteInstance(InMemFailer.class, 10000, "localhost", 9999); //conf.getInstance(InMemFailer.class); + InMemFailer failer = TypedActor.newRemoteInstance(InMemFailer.class, 10000, "localhost", 9999); //conf.getInstance(InMemFailer.class); try { stateful.failure("testShouldRollbackStateForStatefulServerInCaseOfFailure", "new state", failer); // call failing transactionrequired method fail("should have thrown an exception"); @@ -72,7 +72,7 @@ public class RemoteInMemoryStateTest extends TestCase { } public void testRefShouldNotRollbackStateForStatefulServerInCaseOfSuccess() { - InMemStateful stateful = ActiveObject.newRemoteInstance(InMemStateful.class, 10000, "localhost", 9999); + InMemStateful stateful = TypedActor.newRemoteInstance(InMemStateful.class, 10000, "localhost", 9999); stateful.init(); stateful.setRefState("init"); // set init state stateful.success("testShouldNotRollbackStateForStatefulServerInCaseOfSuccess", "new state"); // transactionrequired @@ -80,10 +80,10 @@ public class RemoteInMemoryStateTest extends TestCase { } public void testRefShouldRollbackStateForStatefulServerInCaseOfFailure() { - InMemStateful stateful = ActiveObject.newRemoteInstance(InMemStateful.class, 10000, "localhost", 9999); + InMemStateful stateful = TypedActor.newRemoteInstance(InMemStateful.class, 10000, "localhost", 9999); stateful.init(); stateful.setRefState("init"); // set init state - InMemFailer failer = ActiveObject.newRemoteInstance(InMemFailer.class, 10000, "localhost", 9999); //conf.getInstance(InMemFailer.class); + InMemFailer failer = TypedActor.newRemoteInstance(InMemFailer.class, 10000, "localhost", 9999); //conf.getInstance(InMemFailer.class); try { stateful.failure("testShouldRollbackStateForStatefulServerInCaseOfFailure", "new state", failer); // call failing transactionrequired method fail("should have thrown an exception"); diff --git a/akka-active-object-test/src/test/java/se/scalablesolutions/akka/api/ActiveObjectGuiceConfiguratorTest.java b/akka-active-object-test/src/test/java/se/scalablesolutions/akka/api/TypedActorGuiceConfiguratorTest.java similarity index 87% rename from akka-active-object-test/src/test/java/se/scalablesolutions/akka/api/ActiveObjectGuiceConfiguratorTest.java rename to akka-active-object-test/src/test/java/se/scalablesolutions/akka/api/TypedActorGuiceConfiguratorTest.java index 69f74ec537..e604b4da69 100644 --- a/akka-active-object-test/src/test/java/se/scalablesolutions/akka/api/ActiveObjectGuiceConfiguratorTest.java +++ b/akka-active-object-test/src/test/java/se/scalablesolutions/akka/api/TypedActorGuiceConfiguratorTest.java @@ -10,14 +10,14 @@ import com.google.inject.Scopes; import junit.framework.TestCase; import se.scalablesolutions.akka.config.Config; -import se.scalablesolutions.akka.config.ActiveObjectConfigurator; +import se.scalablesolutions.akka.config.TypedActorConfigurator; import static se.scalablesolutions.akka.config.JavaConfig.*; import se.scalablesolutions.akka.dispatch.*; -public class ActiveObjectGuiceConfiguratorTest extends TestCase { +public class TypedActorGuiceConfiguratorTest extends TestCase { static String messageLog = ""; - final private ActiveObjectConfigurator conf = new ActiveObjectConfigurator(); + final private TypedActorConfigurator conf = new TypedActorConfigurator(); protected void setUp() { Config.config(); @@ -46,7 +46,7 @@ public class ActiveObjectGuiceConfiguratorTest extends TestCase { } - public void testGuiceActiveObjectInjection() { + public void testGuiceTypedActorInjection() { messageLog = ""; Foo foo = conf.getInstance(Foo.class); Bar bar = conf.getInstance(Bar.class); @@ -69,7 +69,7 @@ public class ActiveObjectGuiceConfiguratorTest extends TestCase { } } - public void testActiveObjectInvocation() throws InterruptedException { + public void testTypedActorInvocation() throws InterruptedException { messageLog = ""; Foo foo = conf.getInstance(Foo.class); messageLog += foo.foo("foo "); @@ -79,7 +79,7 @@ public class ActiveObjectGuiceConfiguratorTest extends TestCase { assertEquals("foo return_foo before_bar ", messageLog); } - public void testActiveObjectInvocationsInvocation() throws InterruptedException { + public void testTypedActorInvocationsInvocation() throws InterruptedException { messageLog = ""; Foo foo = conf.getInstance(Foo.class); Bar bar = conf.getInstance(Bar.class); diff --git a/akka-amqp/src/main/scala/AMQP.scala b/akka-amqp/src/main/scala/AMQP.scala index 8605401dbd..0bfbd93063 100644 --- a/akka-amqp/src/main/scala/AMQP.scala +++ b/akka-amqp/src/main/scala/AMQP.scala @@ -136,8 +136,8 @@ object AMQP { def toBinary(t: T): Array[Byte] } - + case class RpcClientSerializer[O,I](toBinary: ToBinary[O], fromBinary: FromBinary[I]) - + case class RpcServerSerializer[I,O](fromBinary: FromBinary[I], toBinary: ToBinary[O]) } diff --git a/akka-amqp/src/main/scala/ConsumerActor.scala b/akka-amqp/src/main/scala/ConsumerActor.scala index 26d1ac00db..d394e9d997 100644 --- a/akka-amqp/src/main/scala/ConsumerActor.scala +++ b/akka-amqp/src/main/scala/ConsumerActor.scala @@ -14,7 +14,7 @@ import java.lang.Throwable private[amqp] class ConsumerActor(consumerParameters: ConsumerParameters) extends FaultTolerantChannelActor(consumerParameters.exchangeParameters, consumerParameters.channelParameters) { - + import consumerParameters._ import exchangeParameters._ diff --git a/akka-amqp/src/main/scala/FaultTolerantChannelActor.scala b/akka-amqp/src/main/scala/FaultTolerantChannelActor.scala index 40bcd5de57..5ecae4c6d3 100644 --- a/akka-amqp/src/main/scala/FaultTolerantChannelActor.scala +++ b/akka-amqp/src/main/scala/FaultTolerantChannelActor.scala @@ -14,7 +14,7 @@ import se.scalablesolutions.akka.amqp.AMQP.{ExchangeParameters, ChannelParameter abstract private[amqp] class FaultTolerantChannelActor( exchangeParameters: ExchangeParameters, channelParameters: Option[ChannelParameters]) extends Actor { - + import exchangeParameters._ protected[amqp] var channel: Option[Channel] = None @@ -104,4 +104,4 @@ abstract private[amqp] class FaultTolerantChannelActor( } override def shutdown = closeChannel -} \ No newline at end of file +} diff --git a/akka-amqp/src/main/scala/ProducerActor.scala b/akka-amqp/src/main/scala/ProducerActor.scala index db290a5ac1..48a6be0a94 100644 --- a/akka-amqp/src/main/scala/ProducerActor.scala +++ b/akka-amqp/src/main/scala/ProducerActor.scala @@ -9,7 +9,7 @@ import se.scalablesolutions.akka.amqp.AMQP.ProducerParameters private[amqp] class ProducerActor(producerParameters: ProducerParameters) extends FaultTolerantChannelActor(producerParameters.exchangeParameters, producerParameters.channelParameters) { - + import producerParameters._ import exchangeParameters._ diff --git a/akka-amqp/src/main/scala/RpcClientActor.scala b/akka-amqp/src/main/scala/RpcClientActor.scala index f8c376be7e..2935982a67 100644 --- a/akka-amqp/src/main/scala/RpcClientActor.scala +++ b/akka-amqp/src/main/scala/RpcClientActor.scala @@ -4,6 +4,9 @@ package se.scalablesolutions.akka.amqp +import se.scalablesolutions.akka.serialization.Serializer +import se.scalablesolutions.akka.amqp.AMQP.{ChannelParameters, ExchangeParameters} + import com.rabbitmq.client.{Channel, RpcClient} import se.scalablesolutions.akka.amqp.AMQP.{RpcClientSerializer, ChannelParameters, ExchangeParameters} @@ -20,7 +23,6 @@ class RpcClientActor[I,O](exchangeParameters: ExchangeParameters, def specificMessageHandler = { case payload: I => { - rpcClient match { case Some(client) => val response: Array[Byte] = client.primitiveCall(serializer.toBinary.toBinary(payload)) @@ -30,18 +32,12 @@ class RpcClientActor[I,O](exchangeParameters: ExchangeParameters, } } - protected def setupChannel(ch: Channel) = { - rpcClient = Some(new RpcClient(ch, exchangeName, routingKey)) - } + protected def setupChannel(ch: Channel) = rpcClient = Some(new RpcClient(ch, exchangeName, routingKey)) override def preRestart(reason: Throwable) = { rpcClient = None super.preRestart(reason) } - - override def toString(): String = - "AMQP.RpcClient[exchange=" +exchangeName + - ", routingKey=" + routingKey+ "]" - -} \ No newline at end of file + override def toString = "AMQP.RpcClient[exchange=" +exchangeName + ", routingKey=" + routingKey+ "]" +} diff --git a/akka-amqp/src/main/scala/RpcServerActor.scala b/akka-amqp/src/main/scala/RpcServerActor.scala index 897c041c69..c64ef9058e 100644 --- a/akka-amqp/src/main/scala/RpcServerActor.scala +++ b/akka-amqp/src/main/scala/RpcServerActor.scala @@ -31,4 +31,4 @@ class RpcServerActor[I,O](producer: ActorRef, serializer: RpcServerSerializer[I, override def toString(): String = "AMQP.RpcServer[]" -} \ No newline at end of file +} diff --git a/akka-amqp/src/test/scala/AMQPConnectionRecoveryTest.scala b/akka-amqp/src/test/scala/AMQPConnectionRecoveryTest.scala index 3bc2cb20dd..c1af35546a 100644 --- a/akka-amqp/src/test/scala/AMQPConnectionRecoveryTest.scala +++ b/akka-amqp/src/test/scala/AMQPConnectionRecoveryTest.scala @@ -56,4 +56,4 @@ class AMQPConnectionRecoveryTest extends JUnitSuite with MustMatchers with Loggi // this dummy test makes sure that the whole test class doesn't fail because of missing tests assert(true) } -} \ No newline at end of file +} diff --git a/akka-amqp/src/test/scala/AMQPConsumerChannelRecoveryTest.scala b/akka-amqp/src/test/scala/AMQPConsumerChannelRecoveryTest.scala index 0f6fadfcc4..a0b44f4739 100644 --- a/akka-amqp/src/test/scala/AMQPConsumerChannelRecoveryTest.scala +++ b/akka-amqp/src/test/scala/AMQPConsumerChannelRecoveryTest.scala @@ -67,4 +67,4 @@ class AMQPConsumerChannelRecoveryTest extends JUnitSuite with MustMatchers with // this dummy test makes sure that the whole test class doesn't fail because of missing tests assert(true) } -} \ No newline at end of file +} diff --git a/akka-amqp/src/test/scala/AMQPConsumerConnectionRecoveryTest.scala b/akka-amqp/src/test/scala/AMQPConsumerConnectionRecoveryTest.scala index 9dccd43be8..bf4885fea5 100644 --- a/akka-amqp/src/test/scala/AMQPConsumerConnectionRecoveryTest.scala +++ b/akka-amqp/src/test/scala/AMQPConsumerConnectionRecoveryTest.scala @@ -86,4 +86,4 @@ class AMQPConsumerConnectionRecoveryTest extends JUnitSuite with MustMatchers wi // this dummy test makes sure that the whole test class doesn't fail because of missing tests assert(true) } -} \ No newline at end of file +} diff --git a/akka-amqp/src/test/scala/AMQPConsumerManualAcknowledgeTest.scala b/akka-amqp/src/test/scala/AMQPConsumerManualAcknowledgeTest.scala index d48f38afc5..2dc4ee939b 100644 --- a/akka-amqp/src/test/scala/AMQPConsumerManualAcknowledgeTest.scala +++ b/akka-amqp/src/test/scala/AMQPConsumerManualAcknowledgeTest.scala @@ -64,4 +64,4 @@ class AMQPConsumerManualAcknowledgeTest extends JUnitSuite with MustMatchers wit // this dummy test makes sure that the whole test class doesn't fail because of missing tests assert(true) } -} \ No newline at end of file +} diff --git a/akka-amqp/src/test/scala/AMQPConsumerMessageTest.scala b/akka-amqp/src/test/scala/AMQPConsumerMessageTest.scala index af94b0a515..5d34f867d6 100644 --- a/akka-amqp/src/test/scala/AMQPConsumerMessageTest.scala +++ b/akka-amqp/src/test/scala/AMQPConsumerMessageTest.scala @@ -38,7 +38,7 @@ class AMQPConsumerMessageTest extends JUnitSuite with MustMatchers with Logging val producer = AMQP.newProducer(connection, ProducerParameters(exchangeParameters, channelParameters = Some(channelParameters))) - + countDown.await(2, TimeUnit.SECONDS) must be (true) producer ! Message("some_payload".getBytes, "non.interesting.routing.key") payloadLatch.tryAwait(2, TimeUnit.SECONDS) must be (true) @@ -53,4 +53,4 @@ class AMQPConsumerMessageTest extends JUnitSuite with MustMatchers with Logging // this dummy test makes sure that the whole test class doesn't fail because of missing tests assert(true) } -} \ No newline at end of file +} diff --git a/akka-amqp/src/test/scala/AMQPProducerChannelRecoveryTest.scala b/akka-amqp/src/test/scala/AMQPProducerChannelRecoveryTest.scala index 095a21fc86..26b2d78393 100644 --- a/akka-amqp/src/test/scala/AMQPProducerChannelRecoveryTest.scala +++ b/akka-amqp/src/test/scala/AMQPProducerChannelRecoveryTest.scala @@ -60,4 +60,4 @@ class AMQPProducerChannelRecoveryTest extends JUnitSuite with MustMatchers with // this dummy test makes sure that the whole test class doesn't fail because of missing tests assert(true) } -} \ No newline at end of file +} diff --git a/akka-amqp/src/test/scala/AMQPProducerConnectionRecoveryTest.scala b/akka-amqp/src/test/scala/AMQPProducerConnectionRecoveryTest.scala index 71bc08bdaa..fe8259b208 100644 --- a/akka-amqp/src/test/scala/AMQPProducerConnectionRecoveryTest.scala +++ b/akka-amqp/src/test/scala/AMQPProducerConnectionRecoveryTest.scala @@ -59,4 +59,4 @@ class AMQPProducerConnectionRecoveryTest extends JUnitSuite with MustMatchers wi // this dummy test makes sure that the whole test class doesn't fail because of missing tests assert(true) } -} \ No newline at end of file +} diff --git a/akka-amqp/src/test/scala/AMQPProducerMessageTest.scala b/akka-amqp/src/test/scala/AMQPProducerMessageTest.scala index ab9bb00e7c..5b19df351f 100644 --- a/akka-amqp/src/test/scala/AMQPProducerMessageTest.scala +++ b/akka-amqp/src/test/scala/AMQPProducerMessageTest.scala @@ -21,7 +21,7 @@ class AMQPProducerMessageTest extends JUnitSuite with MustMatchers with Logging @Test def producerMessage = if (AMQPTest.enabled) { - + val connection: ActorRef = AMQP.newConnection() try { val returnLatch = new StandardLatch @@ -48,4 +48,4 @@ class AMQPProducerMessageTest extends JUnitSuite with MustMatchers with Logging // this dummy test makes sure that the whole test class doesn't fail because of missing tests assert(true) } -} \ No newline at end of file +} diff --git a/akka-amqp/src/test/scala/AMQPRpcClientServerTest.scala b/akka-amqp/src/test/scala/AMQPRpcClientServerTest.scala index 7dbfb4becd..dcaec4cd06 100644 --- a/akka-amqp/src/test/scala/AMQPRpcClientServerTest.scala +++ b/akka-amqp/src/test/scala/AMQPRpcClientServerTest.scala @@ -68,4 +68,4 @@ class AMQPRpcClientServerTest extends JUnitSuite with MustMatchers with Logging // this dummy test makes sure that the whole test class doesn't fail because of missing tests assert(true) } -} \ No newline at end of file +} diff --git a/akka-amqp/src/test/scala/AMQPTest.scala b/akka-amqp/src/test/scala/AMQPTest.scala index e50ab673f6..5ff9157bc5 100644 --- a/akka-amqp/src/test/scala/AMQPTest.scala +++ b/akka-amqp/src/test/scala/AMQPTest.scala @@ -6,4 +6,4 @@ package se.scalablesolutions.akka.amqp.test object AMQPTest { def enabled = false -} \ No newline at end of file +} diff --git a/akka-core/src/main/java/se/scalablesolutions/akka/annotation/consume.java b/akka-camel/src/main/java/se/scalablesolutions/akka/annotation/consume.java similarity index 89% rename from akka-core/src/main/java/se/scalablesolutions/akka/annotation/consume.java rename to akka-camel/src/main/java/se/scalablesolutions/akka/annotation/consume.java index 1a8e5c8db6..34d42debab 100644 --- a/akka-core/src/main/java/se/scalablesolutions/akka/annotation/consume.java +++ b/akka-camel/src/main/java/se/scalablesolutions/akka/annotation/consume.java @@ -10,7 +10,7 @@ import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; @Retention(RetentionPolicy.RUNTIME) -@Target({ElementType.TYPE, ElementType.METHOD}) +@Target({ElementType.METHOD}) public @interface consume { public abstract String value(); diff --git a/akka-camel/src/main/resources/META-INF/services/org/apache/camel/component/active-object b/akka-camel/src/main/resources/META-INF/services/org/apache/camel/component/active-object deleted file mode 100644 index 5dd88a0671..0000000000 --- a/akka-camel/src/main/resources/META-INF/services/org/apache/camel/component/active-object +++ /dev/null @@ -1 +0,0 @@ -class=se.scalablesolutions.akka.camel.component.ActiveObjectComponent \ No newline at end of file diff --git a/akka-camel/src/main/resources/META-INF/services/org/apache/camel/component/typed-actor b/akka-camel/src/main/resources/META-INF/services/org/apache/camel/component/typed-actor new file mode 100644 index 0000000000..e004d887b3 --- /dev/null +++ b/akka-camel/src/main/resources/META-INF/services/org/apache/camel/component/typed-actor @@ -0,0 +1 @@ +class=se.scalablesolutions.akka.camel.component.TypedActorComponent \ No newline at end of file diff --git a/akka-camel/src/main/scala/CamelContextLifecycle.scala b/akka-camel/src/main/scala/CamelContextLifecycle.scala index 640ba14e36..05c18396b8 100644 --- a/akka-camel/src/main/scala/CamelContextLifecycle.scala +++ b/akka-camel/src/main/scala/CamelContextLifecycle.scala @@ -9,7 +9,7 @@ import java.util.Map import org.apache.camel.{ProducerTemplate, CamelContext} import org.apache.camel.impl.DefaultCamelContext -import se.scalablesolutions.akka.camel.component.ActiveObjectComponent +import se.scalablesolutions.akka.camel.component.TypedActorComponent import se.scalablesolutions.akka.util.Logging /** @@ -29,15 +29,15 @@ trait CamelContextLifecycle extends Logging { private var _started = false /** - * Camel component for accessing active objects. + * Camel component for accessing typed actors. */ - private[camel] var activeObjectComponent: ActiveObjectComponent = _ + private[camel] var typedActorComponent: TypedActorComponent = _ /** - * Registry in which active objects are TEMPORARILY registered during - * creation of Camel routes to active objects. + * Registry in which typed actors are TEMPORARILY registered during + * creation of Camel routes to typed actors. */ - private[camel] var activeObjectRegistry: Map[String, AnyRef] = _ + private[camel] var typedActorRegistry: Map[String, AnyRef] = _ /** * Returns the managed CamelContext. @@ -93,15 +93,15 @@ trait CamelContextLifecycle extends Logging { * CamelContext stream-caching is enabled. If applications want to disable stream- * caching they can do so after this method returned and prior to calling start. * This method also registers a new - * {@link se.scalablesolutions.akka.camel.component.ActiveObjectComponent} at - * context under a name defined by ActiveObjectComponent.InternalSchema. + * {@link se.scalablesolutions.akka.camel.component.TypedActorComponent} at + * context under a name defined by TypedActorComponent.InternalSchema. */ def init(context: CamelContext) { - this.activeObjectComponent = new ActiveObjectComponent - this.activeObjectRegistry = activeObjectComponent.activeObjectRegistry + this.typedActorComponent = new TypedActorComponent + this.typedActorRegistry = typedActorComponent.typedActorRegistry this.context = context this.context.setStreamCaching(true) - this.context.addComponent(ActiveObjectComponent.InternalSchema, activeObjectComponent) + this.context.addComponent(TypedActorComponent.InternalSchema, typedActorComponent) this.template = context.createProducerTemplate _initialized = true log.info("Camel context initialized") diff --git a/akka-camel/src/main/scala/CamelService.scala b/akka-camel/src/main/scala/CamelService.scala index 65a6a44fe5..163daecf0d 100644 --- a/akka-camel/src/main/scala/CamelService.scala +++ b/akka-camel/src/main/scala/CamelService.scala @@ -1,15 +1,18 @@ /** * Copyright (C) 2009-2010 Scalable Solutions AB */ - package se.scalablesolutions.akka.camel +import java.util.concurrent.CountDownLatch + +import org.apache.camel.CamelContext + import se.scalablesolutions.akka.actor.Actor._ import se.scalablesolutions.akka.actor.{AspectInitRegistry, ActorRegistry} import se.scalablesolutions.akka.util.{Bootable, Logging} /** - * Used by applications (and the Kernel) to publish consumer actors and active objects via + * Used by applications (and the Kernel) to publish consumer actors and typed actors via * Camel endpoints and to manage the life cycle of a a global CamelContext which can be * accessed via se.scalablesolutions.akka.camel.CamelContextManager.context. * @@ -32,8 +35,8 @@ trait CamelService extends Bootable with Logging { * Starts the CamelService. Any started actor that is a consumer actor will be (asynchronously) * published as Camel endpoint. Consumer actors that are started after this method returned will * be published as well. Actor publishing is done asynchronously. A started (loaded) CamelService - * also publishes @consume annotated methods of active objects that have been created - * with ActiveObject.newInstance(..) (and ActiveObject.newInstance(..) + * also publishes @consume annotated methods of typed actors that have been created + * with TypedActor.newInstance(..) (and TypedActor.newInstance(..) * on a remote node). */ abstract override def onLoad = { @@ -43,7 +46,7 @@ trait CamelService extends Bootable with Logging { if (!initialized) init if (!started) start - // start actor that exposes consumer actors and active objects via Camel endpoints + // start actor that exposes consumer actors and typed actors via Camel endpoints consumerPublisher.start // init publishRequestor so that buffered and future events are delivered to consumerPublisher @@ -77,27 +80,53 @@ trait CamelService extends Bootable with Logging { * @see onUnload */ def unload = onUnload + + /** + * Sets an expectation of the number of upcoming endpoint activations and returns + * a {@link CountDownLatch} that can be used to wait for the activations to occur. + * Endpoint activations that occurred in the past are not considered. + */ + def expectEndpointActivationCount(count: Int): CountDownLatch = + (consumerPublisher !! SetExpectedRegistrationCount(count)).as[CountDownLatch].get + + /** + * Sets an expectation of the number of upcoming endpoint de-activations and returns + * a {@link CountDownLatch} that can be used to wait for the de-activations to occur. + * Endpoint de-activations that occurred in the past are not considered. + */ + def expectEndpointDeactivationCount(count: Int): CountDownLatch = + (consumerPublisher !! SetExpectedUnregistrationCount(count)).as[CountDownLatch].get } /** - * CamelService companion object used by standalone applications to create their own - * CamelService instance. + * Single CamelService instance. * * @author Martin Krasser */ -object CamelService { +object CamelService extends CamelService { /** - * Creates a new CamelService instance. + * Starts the CamelService singleton. */ - def newInstance: CamelService = new DefaultCamelService + def start = load + + /** + * Stops the CamelService singleton. + */ + def stop = unload } -/** - * Default CamelService implementation to be created in Java applications with - *
- * CamelService service = new DefaultCamelService()
- * 
- */ -class DefaultCamelService extends CamelService { +object CamelServiceFactory { + /** + * Creates a new CamelService instance + */ + def createCamelService: CamelService = new CamelService { } + + /** + * Creates a new CamelService instance + */ + def createCamelService(camelContext: CamelContext): CamelService = { + CamelContextManager.init(camelContext) + createCamelService + } } diff --git a/akka-camel/src/main/scala/Consumer.scala b/akka-camel/src/main/scala/Consumer.scala index caafca4628..1f7dffc91b 100644 --- a/akka-camel/src/main/scala/Consumer.scala +++ b/akka-camel/src/main/scala/Consumer.scala @@ -4,7 +4,7 @@ package se.scalablesolutions.akka.camel -import se.scalablesolutions.akka.actor.Actor +import se.scalablesolutions.akka.actor.{ActorRef, Actor} /** * Mixed in by Actor implementations that consume message from Camel endpoints. @@ -12,9 +12,34 @@ import se.scalablesolutions.akka.actor.Actor * @author Martin Krasser */ trait Consumer { self: Actor => - /** * Returns the Camel endpoint URI to consume messages from. */ def endpointUri: String + + /** + * Determines whether two-way communications with this consumer actor should + * be done in blocking or non-blocking mode (default is non-blocking). One-way + * communications never block. + */ + def blocking = false +} + +/** + * @author Martin Krasser + */ +private[camel] object Consumer { + /** + * Applies a function f to actorRef if actorRef + * references a consumer actor. A valid reference to a consumer actor is a local actor + * reference with a target actor that implements the Consumer trait. The + * target Consumer object is passed as argument to f. This + * method returns None if actorRef is not a valid reference + * to a consumer actor, Some result otherwise. + */ + def forConsumer[T](actorRef: ActorRef)(f: Consumer => T): Option[T] = { + if (!actorRef.actor.isInstanceOf[Consumer]) None + else if (actorRef.remoteAddress.isDefined) None + else Some(f(actorRef.actor.asInstanceOf[Consumer])) + } } diff --git a/akka-camel/src/main/scala/ConsumerPublisher.scala b/akka-camel/src/main/scala/ConsumerPublisher.scala index 8d29739f02..8c01b2079d 100644 --- a/akka-camel/src/main/scala/ConsumerPublisher.scala +++ b/akka-camel/src/main/scala/ConsumerPublisher.scala @@ -13,7 +13,7 @@ import org.apache.camel.builder.RouteBuilder import se.scalablesolutions.akka.actor._ import se.scalablesolutions.akka.actor.annotation.consume -import se.scalablesolutions.akka.camel.component.ActiveObjectComponent +import se.scalablesolutions.akka.camel.component.TypedActorComponent import se.scalablesolutions.akka.util.Logging /** @@ -24,7 +24,7 @@ private[camel] object ConsumerPublisher extends Logging { * Creates a route to the registered consumer actor. */ def handleConsumerRegistered(event: ConsumerRegistered) { - CamelContextManager.context.addRoutes(new ConsumerActorRoute(event.uri, event.id, event.uuid)) + CamelContextManager.context.addRoutes(new ConsumerActorRoute(event.uri, event.uuid, event.blocking)) log.info("published actor %s at endpoint %s" format (event.actorRef, event.uri)) } @@ -32,20 +32,20 @@ private[camel] object ConsumerPublisher extends Logging { * Stops route to the already un-registered consumer actor. */ def handleConsumerUnregistered(event: ConsumerUnregistered) { - CamelContextManager.context.stopRoute(event.id) + CamelContextManager.context.stopRoute(event.uuid) log.info("unpublished actor %s from endpoint %s" format (event.actorRef, event.uri)) } /** - * Creates a route to an active object method. + * Creates a route to an typed actor method. */ def handleConsumerMethodRegistered(event: ConsumerMethodRegistered) { val targetMethod = event.method.getName val objectId = "%s_%s" format (event.init.actorRef.uuid, targetMethod) - CamelContextManager.activeObjectRegistry.put(objectId, event.activeObject) + CamelContextManager.typedActorRegistry.put(objectId, event.typedActor) CamelContextManager.context.addRoutes(new ConsumerMethodRoute(event.uri, objectId, targetMethod)) - log.info("published method %s of %s at endpoint %s" format (targetMethod, event.activeObject, event.uri)) + log.info("published method %s of %s at endpoint %s" format (targetMethod, event.typedActor, event.uri)) } /** @@ -55,66 +55,66 @@ private[camel] object ConsumerPublisher extends Logging { val targetMethod = event.method.getName val objectId = "%s_%s" format (event.init.actorRef.uuid, targetMethod) - CamelContextManager.activeObjectRegistry.remove(objectId) + CamelContextManager.typedActorRegistry.remove(objectId) CamelContextManager.context.stopRoute(objectId) - log.info("unpublished method %s of %s from endpoint %s" format (targetMethod, event.activeObject, event.uri)) + log.info("unpublished method %s of %s from endpoint %s" format (targetMethod, event.typedActor, event.uri)) } } /** - * Actor that publishes consumer actors and active object methods at Camel endpoints. + * Actor that publishes consumer actors and typed actor methods at Camel endpoints. * The Camel context used for publishing is CamelContextManager.context. This actor * accepts messages of type - * se.scalablesolutions.akka.camel.service.ConsumerRegistered, - * se.scalablesolutions.akka.camel.service.ConsumerMethodRegistered and - * se.scalablesolutions.akka.camel.service.ConsumerUnregistered. + * se.scalablesolutions.akka.camel.ConsumerRegistered, + * se.scalablesolutions.akka.camel.ConsumerUnregistered. + * se.scalablesolutions.akka.camel.ConsumerMethodRegistered and + * se.scalablesolutions.akka.camel.ConsumerMethodUnregistered. * * @author Martin Krasser */ private[camel] class ConsumerPublisher extends Actor { import ConsumerPublisher._ - @volatile private var latch = new CountDownLatch(0) + @volatile private var registrationLatch = new CountDownLatch(0) + @volatile private var unregistrationLatch = new CountDownLatch(0) - /** - * Adds a route to the actor identified by a Publish message to the global CamelContext. - */ protected def receive = { case r: ConsumerRegistered => { handleConsumerRegistered(r) - latch.countDown // needed for testing only. + registrationLatch.countDown } case u: ConsumerUnregistered => { handleConsumerUnregistered(u) - latch.countDown // needed for testing only. + unregistrationLatch.countDown } case mr: ConsumerMethodRegistered => { handleConsumerMethodRegistered(mr) - latch.countDown // needed for testing only. + registrationLatch.countDown } case mu: ConsumerMethodUnregistered => { handleConsumerMethodUnregistered(mu) - latch.countDown // needed for testing only. + unregistrationLatch.countDown } - case SetExpectedMessageCount(num) => { - // needed for testing only. - latch = new CountDownLatch(num) - self.reply(latch) + case SetExpectedRegistrationCount(num) => { + registrationLatch = new CountDownLatch(num) + self.reply(registrationLatch) + } + case SetExpectedUnregistrationCount(num) => { + unregistrationLatch = new CountDownLatch(num) + self.reply(unregistrationLatch) } case _ => { /* ignore */} } } -/** - * Command message used For testing-purposes only. - */ -private[camel] case class SetExpectedMessageCount(num: Int) +private[camel] case class SetExpectedRegistrationCount(num: Int) +private[camel] case class SetExpectedUnregistrationCount(num: Int) /** - * Defines an abstract route to a target which is either an actor or an active object method.. + * Defines an abstract route to a target which is either an actor or an typed actor method.. * - * @param endpointUri endpoint URI of the consumer actor or active object method. - * @param id actor identifier or active object identifier (registry key). + * @param endpointUri endpoint URI of the consumer actor or typed actor method. + * @param id actor identifier or typed actor identifier (registry key). * * @author Martin Krasser */ @@ -139,31 +139,30 @@ private[camel] abstract class ConsumerRoute(endpointUri: String, id: String) ext * Defines the route to a consumer actor. * * @param endpointUri endpoint URI of the consumer actor - * @param id actor identifier - * @param uuid true if id refers to Actor.uuid, false if - * id refers to Actor.getId. + * @param uuid actor uuid + * @param blocking true for blocking in-out exchanges, false otherwise * * @author Martin Krasser */ -private[camel] class ConsumerActorRoute(endpointUri: String, id: String, uuid: Boolean) extends ConsumerRoute(endpointUri, id) { - protected override def targetUri = (if (uuid) "actor:uuid:%s" else "actor:id:%s") format id +private[camel] class ConsumerActorRoute(endpointUri: String, uuid: String, blocking: Boolean) extends ConsumerRoute(endpointUri, uuid) { + protected override def targetUri = "actor:uuid:%s?blocking=%s" format (uuid, blocking) } /** - * Defines the route to an active object method.. + * Defines the route to an typed actor method.. * * @param endpointUri endpoint URI of the consumer actor method - * @param id active object identifier + * @param id typed actor identifier * @param method name of the method to invoke. * * @author Martin Krasser */ private[camel] class ConsumerMethodRoute(val endpointUri: String, id: String, method: String) extends ConsumerRoute(endpointUri, id) { - protected override def targetUri = "%s:%s?method=%s" format (ActiveObjectComponent.InternalSchema, id, method) + protected override def targetUri = "%s:%s?method=%s" format (TypedActorComponent.InternalSchema, id, method) } /** - * A registration listener that triggers publication of consumer actors and active object + * A registration listener that triggers publication of consumer actors and typed actor * methods as well as un-publication of consumer actors. This actor needs to be initialized * with a PublishRequestorInit command message for obtaining a reference to * a publisher actor. Before initialization it buffers all outbound messages @@ -210,7 +209,7 @@ private[camel] class PublishRequestor extends Actor { /** * Command message to initialize a PublishRequestor to use consumerPublisher - * for publishing actors or active object methods. + * for publishing actors or typed actor methods. */ private[camel] case class PublishRequestorInit(consumerPublisher: ActorRef) @@ -226,54 +225,51 @@ private[camel] sealed trait ConsumerEvent * * @param actorRef actor reference * @param uri endpoint URI of the consumer actor - * @param id actor identifier - * @param uuid true if id is the actor's uuid, false if - * id is the actor's id. + * @param uuid actor uuid + * @param blocking true for blocking in-out exchanges, false otherwise * * @author Martin Krasser */ -private[camel] case class ConsumerRegistered(actorRef: ActorRef, uri: String, id: String, uuid: Boolean) extends ConsumerEvent +private[camel] case class ConsumerRegistered(actorRef: ActorRef, uri: String, uuid: String, blocking: Boolean) extends ConsumerEvent /** * Event indicating that a consumer actor has been unregistered from the actor registry. * * @param actorRef actor reference * @param uri endpoint URI of the consumer actor - * @param id actor identifier - * @param uuid true if id is the actor's uuid, false if - * id is the actor's id. + * @param uuid actor uuid * * @author Martin Krasser */ -private[camel] case class ConsumerUnregistered(actorRef: ActorRef, uri: String, id: String, uuid: Boolean) extends ConsumerEvent +private[camel] case class ConsumerUnregistered(actorRef: ActorRef, uri: String, uuid: String) extends ConsumerEvent /** - * Event indicating that an active object proxy has been created for a POJO. For each + * Event indicating that an typed actor proxy has been created for a POJO. For each * @consume annotated POJO method a separate instance of this class is * created. * - * @param activeObject active object (proxy). + * @param typedActor typed actor (proxy). * @param init - * @param uri endpoint URI of the active object method + * @param uri endpoint URI of the typed actor method * @param method method to be published. * * @author Martin Krasser */ -private[camel] case class ConsumerMethodRegistered(activeObject: AnyRef, init: AspectInit, uri: String, method: Method) extends ConsumerEvent +private[camel] case class ConsumerMethodRegistered(typedActor: AnyRef, init: AspectInit, uri: String, method: Method) extends ConsumerEvent /** - * Event indicating that an active object has been stopped. For each + * Event indicating that an typed actor has been stopped. For each * @consume annotated POJO method a separate instance of this class is * created. * - * @param activeObject active object (proxy). + * @param typedActor typed actor (proxy). * @param init - * @param uri endpoint URI of the active object method + * @param uri endpoint URI of the typed actor method * @param method method to be un-published. * * @author Martin Krasser */ -private[camel] case class ConsumerMethodUnregistered(activeObject: AnyRef, init: AspectInit, uri: String, method: Method) extends ConsumerEvent +private[camel] case class ConsumerMethodUnregistered(typedActor: AnyRef, init: AspectInit, uri: String, method: Method) extends ConsumerEvent /** * @author Martin Krasser @@ -283,9 +279,10 @@ private[camel] object ConsumerRegistered { * Optionally creates an ConsumerRegistered event message for a consumer actor or None if * actorRef is not a consumer actor. */ - def forConsumer(actorRef: ActorRef): Option[ConsumerRegistered] = actorRef match { - case ConsumerDescriptor(ref, uri, id, uuid) => Some(ConsumerRegistered(ref, uri, id, uuid)) - case _ => None + def forConsumer(actorRef: ActorRef): Option[ConsumerRegistered] = { + Consumer.forConsumer[ConsumerRegistered](actorRef) { + target => ConsumerRegistered(actorRef, target.endpointUri, actorRef.uuid, target.blocking) + } } } @@ -297,9 +294,10 @@ private[camel] object ConsumerUnregistered { * Optionally creates an ConsumerUnregistered event message for a consumer actor or None if * actorRef is not a consumer actor. */ - def forConsumer(actorRef: ActorRef): Option[ConsumerUnregistered] = actorRef match { - case ConsumerDescriptor(ref, uri, id, uuid) => Some(ConsumerUnregistered(ref, uri, id, uuid)) - case _ => None + def forConsumer(actorRef: ActorRef): Option[ConsumerUnregistered] = { + Consumer.forConsumer[ConsumerUnregistered](actorRef) { + target => ConsumerUnregistered(actorRef, target.endpointUri, actorRef.uuid) + } } } @@ -308,17 +306,17 @@ private[camel] object ConsumerUnregistered { */ private[camel] object ConsumerMethod { /** - * Applies a function f to each consumer method of activeObject and + * Applies a function f to each consumer method of typedActor and * returns the function results as a list. A consumer method is one that is annotated with - * @consume. If activeObject is a proxy for a remote active object + * @consume. If typedActor is a proxy for a remote typed actor * f is never called and Nil is returned. */ - def forConsumer[T](activeObject: AnyRef, init: AspectInit)(f: Method => T): List[T] = { + def forConsumer[T](typedActor: AnyRef, init: AspectInit)(f: Method => T): List[T] = { // TODO: support consumer annotation inheritance // - visit overridden methods in superclasses // - visit implemented method declarations in interfaces - if (init.remoteAddress.isDefined) Nil // let remote node publish active object methods on endpoints - else for (m <- activeObject.getClass.getMethods.toList; if (m.isAnnotationPresent(classOf[consume]))) + if (init.remoteAddress.isDefined) Nil // let remote node publish typed actor methods on endpoints + else for (m <- typedActor.getClass.getMethods.toList; if (m.isAnnotationPresent(classOf[consume]))) yield f(m) } } @@ -328,56 +326,29 @@ private[camel] object ConsumerMethod { */ private[camel] object ConsumerMethodRegistered { /** - * Creates a list of ConsumerMethodRegistered event messages for an active object or an empty - * list if the active object is a proxy for an remote active object or the active object doesn't + * Creates a list of ConsumerMethodRegistered event messages for an typed actor or an empty + * list if the typed actor is a proxy for an remote typed actor or the typed actor doesn't * have any @consume annotated methods. */ - def forConsumer(activeObject: AnyRef, init: AspectInit): List[ConsumerMethodRegistered] = { - ConsumerMethod.forConsumer[ConsumerMethodRegistered](activeObject, init) { - m => ConsumerMethodRegistered(activeObject, init, m.getAnnotation(classOf[consume]).value, m) - } - } -} - -private[camel] object ConsumerMethodUnregistered { - /** - * Creates a list of ConsumerMethodUnregistered event messages for an active object or an empty - * list if the active object is a proxy for an remote active object or the active object doesn't - * have any @consume annotated methods. - */ - def forConsumer(activeObject: AnyRef, init: AspectInit): List[ConsumerMethodUnregistered] = { - ConsumerMethod.forConsumer[ConsumerMethodUnregistered](activeObject, init) { - m => ConsumerMethodUnregistered(activeObject, init, m.getAnnotation(classOf[consume]).value, m) + def forConsumer(typedActor: AnyRef, init: AspectInit): List[ConsumerMethodRegistered] = { + ConsumerMethod.forConsumer(typedActor, init) { + m => ConsumerMethodRegistered(typedActor, init, m.getAnnotation(classOf[consume]).value, m) } } } /** - * Describes a consumer actor with elements that are relevant for publishing an actor at a - * Camel endpoint (or unpublishing an actor from an endpoint). - * * @author Martin Krasser */ -private[camel] object ConsumerDescriptor { - +private[camel] object ConsumerMethodUnregistered { /** - * An extractor that optionally creates a 4-tuple from a consumer actor reference containing - * the actor reference itself, endpoint URI, identifier and a hint whether the identifier - * is the actor uuid or actor id. If actorRef doesn't reference a consumer actor, - * None is returned. + * Creates a list of ConsumerMethodUnregistered event messages for an typed actor or an empty + * list if the typed actor is a proxy for an remote typed actor or the typed actor doesn't + * have any @consume annotated methods. */ - def unapply(actorRef: ActorRef): Option[(ActorRef, String, String, Boolean)] = - unapplyConsumerInstance(actorRef) orElse unapplyConsumeAnnotated(actorRef) - - private def unapplyConsumeAnnotated(actorRef: ActorRef): Option[(ActorRef, String, String, Boolean)] = { - val annotation = actorRef.actorClass.getAnnotation(classOf[consume]) - if (annotation eq null) None - else if (actorRef.remoteAddress.isDefined) None - else Some((actorRef, annotation.value, actorRef.id, false)) + def forConsumer(typedActor: AnyRef, init: AspectInit): List[ConsumerMethodUnregistered] = { + ConsumerMethod.forConsumer(typedActor, init) { + m => ConsumerMethodUnregistered(typedActor, init, m.getAnnotation(classOf[consume]).value, m) + } } - - private def unapplyConsumerInstance(actorRef: ActorRef): Option[(ActorRef, String, String, Boolean)] = - if (!actorRef.actor.isInstanceOf[Consumer]) None - else if (actorRef.remoteAddress.isDefined) None - else Some((actorRef, actorRef.actor.asInstanceOf[Consumer].endpointUri, actorRef.uuid, true)) } diff --git a/akka-camel/src/main/scala/Producer.scala b/akka-camel/src/main/scala/Producer.scala index 9a68b8d57f..c49591ec7f 100644 --- a/akka-camel/src/main/scala/Producer.scala +++ b/akka-camel/src/main/scala/Producer.scala @@ -6,13 +6,10 @@ package se.scalablesolutions.akka.camel import CamelMessageConversion.toExchangeAdapter -import org.apache.camel.{Processor, ExchangePattern, Exchange, ProducerTemplate} -import org.apache.camel.impl.DefaultExchange -import org.apache.camel.spi.Synchronization +import org.apache.camel._ +import org.apache.camel.processor.SendProcessor import se.scalablesolutions.akka.actor.{Actor, ActorRef} -import se.scalablesolutions.akka.dispatch.CompletableFuture -import se.scalablesolutions.akka.util.Logging /** * Mixed in by Actor implementations that produce messages to Camel endpoints. @@ -21,15 +18,21 @@ import se.scalablesolutions.akka.util.Logging */ trait Producer { this: Actor => + /** + * Message headers to copy by default from request message to response-message. + */ private val headersToCopyDefault = Set(Message.MessageExchangeId) /** - * If set to true (default), communication with the Camel endpoint is done via the Camel - * Async API. Camel then processes the - * message in a separate thread. If set to false, the actor thread is blocked until Camel - * has finished processing the produced message. + * Endpoint object resolved from current CamelContext with + * endpointUri. */ - def async: Boolean = true + private lazy val endpoint = CamelContextManager.context.getEndpoint(endpointUri) + + /** + * SendProcessor for producing messages to endpoint. + */ + private lazy val processor = createSendProcessor /** * If set to false (default), this producer expects a response message from the Camel endpoint. @@ -51,146 +54,123 @@ trait Producer { this: Actor => def headersToCopy: Set[String] = headersToCopyDefault /** - * Returns the producer template from the CamelContextManager. Applications either have to ensure - * proper initialization of CamelContextManager or override this method. - * - * @see CamelContextManager. + * Default implementation of Actor.shutdown for freeing resources needed + * to actually send messages to endpointUri. */ - protected def template: ProducerTemplate = CamelContextManager.template - - /** - * Initiates a one-way (in-only) message exchange to the Camel endpoint given by - * endpointUri. This method blocks until Camel finishes processing - * the message exchange. - * - * @param msg: the message to produce. The message is converted to its canonical - * representation via Message.canonicalize. - */ - protected def produceOnewaySync(msg: Any): Unit = - template.send(endpointUri, createInOnlyExchange.fromRequestMessage(Message.canonicalize(msg))) - - /** - * Initiates a one-way (in-only) message exchange to the Camel endpoint given by - * endpointUri. This method triggers asynchronous processing of the - * message exchange by Camel. - * - * @param msg: the message to produce. The message is converted to its canonical - * representation via Message.canonicalize. - */ - protected def produceOnewayAsync(msg: Any): Unit = - template.asyncSend( - endpointUri, createInOnlyExchange.fromRequestMessage(Message.canonicalize(msg))) - - /** - * Initiates a two-way (in-out) message exchange to the Camel endpoint given by - * endpointUri. This method blocks until Camel finishes processing - * the message exchange. - * - * @param msg: the message to produce. The message is converted to its canonical - * representation via Message.canonicalize. - * @return either a response Message or a Failure object. - */ - protected def produceSync(msg: Any): Any = { - val cmsg = Message.canonicalize(msg) - val requestProcessor = new Processor() { - def process(exchange: Exchange) = exchange.fromRequestMessage(cmsg) - } - val result = template.request(endpointUri, requestProcessor) - if (result.isFailed) result.toFailureMessage(cmsg.headers(headersToCopy)) - else result.toResponseMessage(cmsg.headers(headersToCopy)) + override def shutdown { + processor.stop } /** - * Initiates a two-way (in-out) message exchange to the Camel endpoint given by - * endpointUri. This method triggers asynchronous processing of the - * message exchange by Camel. The response message is returned asynchronously to - * the original sender (or sender future). + * Produces msg as exchange of given pattern to the endpoint specified by + * endpointUri. After producing to the endpoint the processing result is passed as argument + * to receiveAfterProduce. If the result was returned synchronously by the endpoint then + * receiveAfterProduce is called synchronously as well. If the result was returned asynchronously, + * the receiveAfterProduce is called asynchronously as well. This is done by wrapping the result, + * adding it to this producers mailbox, unwrapping it once it is received and calling + * receiveAfterProduce. The original sender and senderFuture are thereby preserved. * - * @param msg: the message to produce. The message is converted to its canonical - * representation via Message.canonicalize. - * @return either a response Message or a Failure object. - * @see ProducerResponseSender + * @param msg message to produce + * @param pattern exchange pattern */ - protected def produceAsync(msg: Any): Unit = { + protected def produce(msg: Any, pattern: ExchangePattern): Unit = { val cmsg = Message.canonicalize(msg) - val sync = new ProducerResponseSender( - cmsg.headers(headersToCopy), self.sender, self.senderFuture, this) - template.asyncCallback(endpointUri, createInOutExchange.fromRequestMessage(cmsg), sync) + val exchange = createExchange(pattern).fromRequestMessage(cmsg) + processor.process(exchange, new AsyncCallback { + val producer = self + // Need copies of sender and senderFuture references here + // since the callback could be done later by another thread. + val sender = self.sender + val senderFuture = self.senderFuture + + def done(doneSync: Boolean): Unit = { + (doneSync, exchange.isFailed) match { + case (true, true) => dispatchSync(exchange.toFailureMessage(cmsg.headers(headersToCopy))) + case (true, false) => dispatchSync(exchange.toResponseMessage(cmsg.headers(headersToCopy))) + case (false, true) => dispatchAsync(FailureResult(exchange.toFailureMessage(cmsg.headers(headersToCopy)))) + case (false, false) => dispatchAsync(MessageResult(exchange.toResponseMessage(cmsg.headers(headersToCopy)))) + } + } + + private def dispatchSync(result: Any) = + receiveAfterProduce(result) + + private def dispatchAsync(result: Any) = { + if (senderFuture.isDefined) + producer.postMessageToMailboxAndCreateFutureResultWithTimeout(result, producer.timeout, sender, senderFuture) + else + producer.postMessageToMailbox(result, sender) + } + }) } /** - * Default implementation for Actor.receive. Implementors may choose to - * def receive = produce. This partial function calls one of - * the protected produce methods depending on the return values of - * oneway and async. + * Produces msg to the endpoint specified by endpointUri. Before the message is + * actually produced it is pre-processed by calling receiveBeforeProduce. If oneway + * is true an in-only message exchange is initiated, otherwise an in-out message exchange. + * + * @see Producer#produce(Any, ExchangePattern) */ protected def produce: Receive = { + case res: MessageResult => receiveAfterProduce(res.message) + case res: FailureResult => receiveAfterProduce(res.failure) case msg => { - if ( oneway && !async) produceOnewaySync(msg) - else if ( oneway && async) produceOnewayAsync(msg) - else if (!oneway && !async) self.reply(produceSync(msg)) - else /*(!oneway && async)*/ produceAsync(msg) + if (oneway) + produce(receiveBeforeProduce(msg), ExchangePattern.InOnly) + else + produce(receiveBeforeProduce(msg), ExchangePattern.InOut) } } + /** + * Called before the message is sent to the endpoint specified by endpointUri. The original + * message is passed as argument. By default, this method simply returns the argument but may be overridden + * by subtraits or subclasses. + */ + protected def receiveBeforeProduce: PartialFunction[Any, Any] = { + case msg => msg + } + + /** + * Called after the a result was received from the endpoint specified by endpointUri. The + * result is passed as argument. By default, this method replies the result back to the original sender + * if oneway is false. If oneway is true then nothing is done. This method may + * be overridden by subtraits or subclasses. + */ + protected def receiveAfterProduce: Receive = { + case msg => if (!oneway) self.reply(msg) + } + /** * Default implementation of Actor.receive */ protected def receive = produce /** - * Creates a new in-only Exchange. + * Creates a new Exchange with given pattern from the endpoint specified by + * endpointUri. */ - protected def createInOnlyExchange: Exchange = createExchange(ExchangePattern.InOnly) + private def createExchange(pattern: ExchangePattern): Exchange = endpoint.createExchange(pattern) /** - * Creates a new in-out Exchange. + * Creates a new SendProcessor for endpoint. */ - protected def createInOutExchange: Exchange = createExchange(ExchangePattern.InOut) - - /** - * Creates a new Exchange with given pattern from the CamelContext managed by - * CamelContextManager. Applications either have to ensure proper initialization - * of CamelContextManager or override this method. - * - * @see CamelContextManager. - */ - protected def createExchange(pattern: ExchangePattern): Exchange = - new DefaultExchange(CamelContextManager.context, pattern) + private def createSendProcessor = { + val sendProcessor = new SendProcessor(endpoint) + sendProcessor.start + sendProcessor + } } /** - * Synchronization object that sends responses asynchronously to initial senders. This - * class is used by Producer for asynchronous two-way messaging with a Camel endpoint. - * * @author Martin Krasser */ -private[camel] class ProducerResponseSender( - headers: Map[String, Any], - sender: Option[ActorRef], - senderFuture: Option[CompletableFuture[Any]], - producer: Actor) extends Synchronization with Logging { +private[camel] case class MessageResult(message: Message) - implicit val producerActor = Some(producer) // the response sender - - /** - * Replies a Failure message, created from the given exchange, to sender (or - * senderFuture if applicable). - */ - def onFailure(exchange: Exchange) = reply(exchange.toFailureMessage(headers)) - - /** - * Replies a response Message, created from the given exchange, to sender (or - * senderFuture if applicable). - */ - def onComplete(exchange: Exchange) = reply(exchange.toResponseMessage(headers)) - - private def reply(message: Any) = { - if (senderFuture.isDefined) senderFuture.get completeWithResult message - else if (sender.isDefined) sender.get ! message - else log.warning("No destination for sending response") - } -} +/** + * @author Martin Krasser + */ +private[camel] case class FailureResult(failure: Failure) /** * A one-way producer. @@ -201,12 +181,3 @@ trait Oneway extends Producer { this: Actor => override def oneway = true } -/** - * A synchronous producer. - * - * @author Martin Krasser - */ -trait Sync extends Producer { this: Actor => - override def async = false -} - diff --git a/akka-camel/src/main/scala/component/ActorComponent.scala b/akka-camel/src/main/scala/component/ActorComponent.scala index e80595515e..1cd29ced00 100644 --- a/akka-camel/src/main/scala/component/ActorComponent.scala +++ b/akka-camel/src/main/scala/component/ActorComponent.scala @@ -4,15 +4,25 @@ package se.scalablesolutions.akka.camel.component -import java.lang.{RuntimeException, String} +import java.net.InetSocketAddress import java.util.{Map => JavaMap} import java.util.concurrent.TimeoutException +import java.util.concurrent.atomic.AtomicReference -import org.apache.camel.{Exchange, Consumer, Processor} +import jsr166x.Deque + +import org.apache.camel._ import org.apache.camel.impl.{DefaultProducer, DefaultEndpoint, DefaultComponent} import se.scalablesolutions.akka.actor.{ActorRegistry, Actor, ActorRef} import se.scalablesolutions.akka.camel.{Failure, CamelMessageConversion, Message} +import se.scalablesolutions.akka.dispatch.{CompletableFuture, MessageInvocation, MessageDispatcher} +import se.scalablesolutions.akka.stm.TransactionConfig + +import scala.reflect.BeanProperty + +import CamelMessageConversion.toExchangeAdapter +import java.lang.Throwable /** * Camel component for sending messages to and receiving replies from actors. @@ -41,7 +51,7 @@ class ActorComponent extends DefaultComponent { /** * Camel endpoint for referencing an actor. The actor reference is given by the endpoint URI. - * An actor can be referenced by its Actor.getId or its Actor.uuid. + * An actor can be referenced by its ActorRef.id or its ActorRef.uuid. * Supported endpoint URI formats are * actor:<actorid>, * actor:id:<actorid> and @@ -57,6 +67,12 @@ class ActorEndpoint(uri: String, val id: Option[String], val uuid: Option[String]) extends DefaultEndpoint(uri, comp) { + /** + * Blocking of client thread during two-way message exchanges with consumer actors. This is set + * via the blocking=true|false endpoint URI parameter. If omitted blocking is false. + */ + @BeanProperty var blocking: Boolean = false + /** * @throws UnsupportedOperationException */ @@ -75,60 +91,59 @@ class ActorEndpoint(uri: String, } /** - * Sends the in-message of an exchange to an actor. If the exchange pattern is out-capable, - * the producer waits for a reply (using the !! operator), otherwise the ! operator is used - * for sending the message. + * Sends the in-message of an exchange to an actor. If the exchange pattern is out-capable and + * blocking is enabled then the producer waits for a reply (using the !! operator), + * otherwise the ! operator is used for sending the message. * * @see se.scalablesolutions.akka.camel.component.ActorComponent * @see se.scalablesolutions.akka.camel.component.ActorEndpoint * * @author Martin Krasser */ -class ActorProducer(val ep: ActorEndpoint) extends DefaultProducer(ep) { - import CamelMessageConversion.toExchangeAdapter +class ActorProducer(val ep: ActorEndpoint) extends DefaultProducer(ep) with AsyncProcessor { + import ActorProducer._ - implicit val sender = None + def process(exchange: Exchange) = + if (exchange.getPattern.isOutCapable) sendSync(exchange) else sendAsync(exchange) - /** - * Depending on the exchange pattern, this method either calls processInOut or - * processInOnly for interacting with an actor. This methods looks up the actor - * from the ActorRegistry according to this producer's endpoint URI. - * - * @param exchange represents the message exchange with the actor. - */ - def process(exchange: Exchange) { - val actor = target getOrElse (throw new ActorNotRegisteredException(ep.getEndpointUri)) - if (exchange.getPattern.isOutCapable) processInOut(exchange, actor) - else processInOnly(exchange, actor) - } - - /** - * Send the exchange in-message to the given actor using the ! operator. The message - * send to the actor is of type se.scalablesolutions.akka.camel.Message. - */ - protected def processInOnly(exchange: Exchange, actor: ActorRef): Unit = - actor ! exchange.toRequestMessage(Map(Message.MessageExchangeId -> exchange.getExchangeId)) - - /** - * Send the exchange in-message to the given actor using the !! operator. The exchange - * out-message is populated from the actor's reply message. The message sent to the - * actor is of type se.scalablesolutions.akka.camel.Message. - */ - protected def processInOut(exchange: Exchange, actor: ActorRef) { - val header = Map(Message.MessageExchangeId -> exchange.getExchangeId) - val result: Any = actor !! exchange.toRequestMessage(header) - - result match { - case Some(msg: Failure) => exchange.fromFailureMessage(msg) - case Some(msg) => exchange.fromResponseMessage(Message.canonicalize(msg)) - case None => { - throw new TimeoutException("timeout (%d ms) while waiting response from %s" - format (actor.timeout, ep.getEndpointUri)) + def process(exchange: Exchange, callback: AsyncCallback): Boolean = { + (exchange.getPattern.isOutCapable, ep.blocking) match { + case (true, true) => { + sendSync(exchange) + callback.done(true) + true + } + case (true, false) => { + sendAsync(exchange, Some(AsyncCallbackAdapter(exchange, callback))) + false + } + case (false, _) => { + sendAsync(exchange) + callback.done(true) + true } } } - private def target: Option[ActorRef] = + private def sendSync(exchange: Exchange) = { + val actor = target + val result: Any = actor !! requestFor(exchange) + + result match { + case Some(msg: Failure) => exchange.fromFailureMessage(msg) + case Some(msg) => exchange.fromResponseMessage(Message.canonicalize(msg)) + case None => throw new TimeoutException("timeout (%d ms) while waiting response from %s" + format (actor.timeout, ep.getEndpointUri)) + } + } + + private def sendAsync(exchange: Exchange, sender: Option[ActorRef] = None) = + target.!(requestFor(exchange))(sender) + + private def target = + targetOption getOrElse (throw new ActorNotRegisteredException(ep.getEndpointUri)) + + private def targetOption: Option[ActorRef] = if (ep.id.isDefined) targetById(ep.id.get) else targetByUuid(ep.uuid.get) @@ -141,6 +156,14 @@ class ActorProducer(val ep: ActorEndpoint) extends DefaultProducer(ep) { private def targetByUuid(uuid: String) = ActorRegistry.actorFor(uuid) } +/** + * @author Martin Krasser + */ +private[camel] object ActorProducer { + def requestFor(exchange: Exchange) = + exchange.toRequestMessage(Map(Message.MessageExchangeId -> exchange.getExchangeId)) +} + /** * Thrown to indicate that an actor referenced by an endpoint URI cannot be * found in the ActorRegistry. @@ -150,3 +173,92 @@ class ActorProducer(val ep: ActorEndpoint) extends DefaultProducer(ep) { class ActorNotRegisteredException(uri: String) extends RuntimeException { override def getMessage = "%s not registered" format uri } + +/** + * @author Martin Krasser + */ +private[akka] object AsyncCallbackAdapter { + /** + * Creates and starts an AsyncCallbackAdapter. + * + * @param exchange message exchange to write results to. + * @param callback callback object to generate completion notifications. + */ + def apply(exchange: Exchange, callback: AsyncCallback) = + new AsyncCallbackAdapter(exchange, callback).start +} + +/** + * Adapts an AsyncCallback to ActorRef.!. Used by other actors to reply + * asynchronously to Camel with ActorRef.reply. + *

+ * Please note that this adapter can only be used locally at the moment which should not + * be a problem is most situations as Camel endpoints are only activated for local actor references, + * never for remote references. + * + * @author Martin Krasser + */ +private[akka] class AsyncCallbackAdapter(exchange: Exchange, callback: AsyncCallback) extends ActorRef { + + def start = { + _isRunning = true + this + } + + def stop() = { + _isRunning = false + _isShutDown = true + } + + /** + * Writes the reply message to exchange and uses callback to + * generate completion notifications. + * + * @param message reply message + * @param sender ignored + */ + protected[akka] def postMessageToMailbox(message: Any, senderOption: Option[ActorRef]) = { + message match { + case msg: Failure => exchange.fromFailureMessage(msg) + case msg => exchange.fromResponseMessage(Message.canonicalize(msg)) + } + callback.done(false) + } + + def actorClass: Class[_ <: Actor] = unsupported + def actorClassName = unsupported + def dispatcher_=(md: MessageDispatcher): Unit = unsupported + def dispatcher: MessageDispatcher = unsupported + def transactionConfig_=(config: TransactionConfig): Unit = unsupported + def transactionConfig: TransactionConfig = unsupported + def makeTransactionRequired: Unit = unsupported + def makeRemote(hostname: String, port: Int): Unit = unsupported + def makeRemote(address: InetSocketAddress): Unit = unsupported + def homeAddress_=(address: InetSocketAddress): Unit = unsupported + def remoteAddress: Option[InetSocketAddress] = unsupported + def link(actorRef: ActorRef): Unit = unsupported + def unlink(actorRef: ActorRef): Unit = unsupported + def startLink(actorRef: ActorRef): Unit = unsupported + def startLinkRemote(actorRef: ActorRef, hostname: String, port: Int): Unit = unsupported + def spawn[T <: Actor : Manifest]: ActorRef = unsupported + def spawnRemote[T <: Actor: Manifest](hostname: String, port: Int): ActorRef = unsupported + def spawnLink[T <: Actor: Manifest]: ActorRef = unsupported + def spawnLinkRemote[T <: Actor : Manifest](hostname: String, port: Int): ActorRef = unsupported + def shutdownLinkedActors: Unit = unsupported + def supervisor: Option[ActorRef] = unsupported + protected[akka] def postMessageToMailboxAndCreateFutureResultWithTimeout[T](message: Any, timeout: Long, senderOption: Option[ActorRef], senderFuture: Option[CompletableFuture[T]]) = unsupported + protected[akka] def mailbox: AnyRef = unsupported + protected[akka] def mailbox_=(msg: AnyRef):AnyRef = unsupported + protected[akka] def restart(reason: Throwable, maxNrOfRetries: Int, withinTimeRange: Int): Unit = unsupported + protected[akka] def restartLinkedActors(reason: Throwable, maxNrOfRetries: Int, withinTimeRange: Int): Unit = unsupported + protected[akka] def handleTrapExit(dead: ActorRef, reason: Throwable): Unit = unsupported + protected[akka] def linkedActors: JavaMap[String, ActorRef] = unsupported + protected[akka] def linkedActorsAsList: List[ActorRef] = unsupported + protected[akka] def invoke(messageHandle: MessageInvocation): Unit = unsupported + protected[akka] def remoteAddress_=(addr: Option[InetSocketAddress]): Unit = unsupported + protected[akka] def registerSupervisorAsRemoteActor = unsupported + protected[akka] def supervisor_=(sup: Option[ActorRef]): Unit = unsupported + protected[this] def actorInstance: AtomicReference[Actor] = unsupported + + private def unsupported = throw new UnsupportedOperationException("Not supported for %s" format classOf[AsyncCallbackAdapter].getName) +} diff --git a/akka-camel/src/main/scala/component/ActiveObjectComponent.scala b/akka-camel/src/main/scala/component/TypedActorComponent.scala similarity index 64% rename from akka-camel/src/main/scala/component/ActiveObjectComponent.scala rename to akka-camel/src/main/scala/component/TypedActorComponent.scala index 05fa026e04..2a48cf9fc4 100644 --- a/akka-camel/src/main/scala/component/ActiveObjectComponent.scala +++ b/akka-camel/src/main/scala/component/TypedActorComponent.scala @@ -12,31 +12,31 @@ import org.apache.camel.component.bean._ /** * @author Martin Krasser */ -object ActiveObjectComponent { +object TypedActorComponent { /** - * Default schema name for active object endpoint URIs. + * Default schema name for typed actor endpoint URIs. */ - val InternalSchema = "active-object-internal" + val InternalSchema = "typed-actor-internal" } /** - * Camel component for exchanging messages with active objects. This component - * tries to obtain the active object from the activeObjectRegistry + * Camel component for exchanging messages with typed actors. This component + * tries to obtain the typed actor from the typedActorRegistry * first. If it's not there it tries to obtain it from the CamelContext's registry. * * @see org.apache.camel.component.bean.BeanComponent * * @author Martin Krasser */ -class ActiveObjectComponent extends BeanComponent { - val activeObjectRegistry = new ConcurrentHashMap[String, AnyRef] +class TypedActorComponent extends BeanComponent { + val typedActorRegistry = new ConcurrentHashMap[String, AnyRef] /** * Creates a {@link org.apache.camel.component.bean.BeanEndpoint} with a custom - * bean holder that uses activeObjectRegistry for getting access to - * active objects (beans). + * bean holder that uses typedActorRegistry for getting access to + * typed actors (beans). * - * @see se.scalablesolutions.akka.camel.component.ActiveObjectHolder + * @see se.scalablesolutions.akka.camel.component.TypedActorHolder */ override def createEndpoint(uri: String, remaining: String, parameters: Map[String, AnyRef]) = { val endpoint = new BeanEndpoint(uri, this) @@ -47,39 +47,39 @@ class ActiveObjectComponent extends BeanComponent { } private def createBeanHolder(beanName: String) = - new ActiveObjectHolder(activeObjectRegistry, getCamelContext, beanName).createCacheHolder + new TypedActorHolder(typedActorRegistry, getCamelContext, beanName).createCacheHolder } /** * {@link org.apache.camel.component.bean.BeanHolder} implementation that uses a custom - * registry for getting access to active objects. + * registry for getting access to typed actors. * * @author Martin Krasser */ -class ActiveObjectHolder(activeObjectRegistry: Map[String, AnyRef], context: CamelContext, name: String) +class TypedActorHolder(typedActorRegistry: Map[String, AnyRef], context: CamelContext, name: String) extends RegistryBean(context, name) { /** - * Returns an {@link se.scalablesolutions.akka.camel.component.ActiveObjectInfo} instance. + * Returns an {@link se.scalablesolutions.akka.camel.component.TypedActorInfo} instance. */ override def getBeanInfo: BeanInfo = - new ActiveObjectInfo(getContext, getBean.getClass, getParameterMappingStrategy) + new TypedActorInfo(getContext, getBean.getClass, getParameterMappingStrategy) /** - * Obtains an active object from activeObjectRegistry. + * Obtains an typed actor from typedActorRegistry. */ override def getBean: AnyRef = { - val bean = activeObjectRegistry.get(getName) + val bean = typedActorRegistry.get(getName) if (bean eq null) super.getBean else bean } } /** - * Provides active object meta information. + * Provides typed actor meta information. * * @author Martin Krasser */ -class ActiveObjectInfo(context: CamelContext, clazz: Class[_], strategy: ParameterMappingStrategy) +class TypedActorInfo(context: CamelContext, clazz: Class[_], strategy: ParameterMappingStrategy) extends BeanInfo(context, clazz, strategy) { /** diff --git a/akka-camel/src/test/java/se/scalablesolutions/akka/camel/PojoBase.java b/akka-camel/src/test/java/se/scalablesolutions/akka/camel/PojoBase.java index 05bf1625bb..f0f0b8ee4d 100644 --- a/akka-camel/src/test/java/se/scalablesolutions/akka/camel/PojoBase.java +++ b/akka-camel/src/test/java/se/scalablesolutions/akka/camel/PojoBase.java @@ -1,34 +1,28 @@ package se.scalablesolutions.akka.camel; -import org.apache.camel.Body; -import org.apache.camel.Header; - -import se.scalablesolutions.akka.actor.annotation.consume; +import se.scalablesolutions.akka.actor.TypedActor; /** * @author Martin Krasser */ -public class PojoBase { +public class PojoBase extends TypedActor implements PojoBaseIntf { public String m1(String b, String h) { return "m1base: " + b + " " + h; } - @consume("direct:m2base") - public String m2(@Body String b, @Header("test") String h) { + public String m2(String b, String h) { return "m2base: " + b + " " + h; } - @consume("direct:m3base") - public String m3(@Body String b, @Header("test") String h) { + public String m3(String b, String h) { return "m3base: " + b + " " + h; } - @consume("direct:m4base") - public String m4(@Body String b, @Header("test") String h) { + public String m4(String b, String h) { return "m4base: " + b + " " + h; } - public void m5(@Body String b, @Header("test") String h) { + public void m5(String b, String h) { } } diff --git a/akka-camel/src/test/java/se/scalablesolutions/akka/camel/PojoBaseIntf.java b/akka-camel/src/test/java/se/scalablesolutions/akka/camel/PojoBaseIntf.java new file mode 100644 index 0000000000..2ca8ef4360 --- /dev/null +++ b/akka-camel/src/test/java/se/scalablesolutions/akka/camel/PojoBaseIntf.java @@ -0,0 +1,21 @@ +package se.scalablesolutions.akka.camel; + +import org.apache.camel.Body; +import org.apache.camel.Header; + +import se.scalablesolutions.akka.actor.annotation.consume; + +/** + * @author Martin Krasser + */ +public interface PojoBaseIntf { + + public String m1(String b, String h); + @consume("direct:m2base") + public String m2(@Body String b, @Header("test") String h); + @consume("direct:m3base") + public String m3(@Body String b, @Header("test") String h); + @consume("direct:m4base") + public String m4(@Body String b, @Header("test") String h); + public void m5(@Body String b, @Header("test") String h); +} diff --git a/akka-camel/src/test/java/se/scalablesolutions/akka/camel/PojoImpl.java b/akka-camel/src/test/java/se/scalablesolutions/akka/camel/PojoImpl.java index b48202d4dc..f8ade8ac97 100644 --- a/akka-camel/src/test/java/se/scalablesolutions/akka/camel/PojoImpl.java +++ b/akka-camel/src/test/java/se/scalablesolutions/akka/camel/PojoImpl.java @@ -1,23 +1,17 @@ package se.scalablesolutions.akka.camel; -import org.apache.camel.Body; -import org.apache.camel.Header; - -import se.scalablesolutions.akka.actor.annotation.consume; +import se.scalablesolutions.akka.actor.TypedActor; /** * @author Martin Krasser */ -public class PojoImpl implements PojoIntf { +public class PojoImpl extends TypedActor implements PojoIntf { public String m1(String b, String h) { return "m1impl: " + b + " " + h; } - @consume("direct:m2impl") - public String m2(@Body String b, @Header("test") String h) { + public String m2(String b, String h) { return "m2impl: " + b + " " + h; } - - } diff --git a/akka-camel/src/test/java/se/scalablesolutions/akka/camel/Pojo.java b/akka-camel/src/test/java/se/scalablesolutions/akka/camel/PojoNonConsumer.java similarity index 57% rename from akka-camel/src/test/java/se/scalablesolutions/akka/camel/Pojo.java rename to akka-camel/src/test/java/se/scalablesolutions/akka/camel/PojoNonConsumer.java index d1848c49ee..fc6ea834fd 100644 --- a/akka-camel/src/test/java/se/scalablesolutions/akka/camel/Pojo.java +++ b/akka-camel/src/test/java/se/scalablesolutions/akka/camel/PojoNonConsumer.java @@ -1,11 +1,11 @@ package se.scalablesolutions.akka.camel; -import se.scalablesolutions.akka.actor.annotation.consume; +import se.scalablesolutions.akka.actor.*; /** * @author Martin Krasser */ -public class Pojo { +public class PojoNonConsumer extends TypedActor implements PojoNonConsumerIntf { public String foo(String s) { return String.format("foo: %s", s); diff --git a/akka-camel/src/test/java/se/scalablesolutions/akka/camel/PojoNonConsumerIntf.java b/akka-camel/src/test/java/se/scalablesolutions/akka/camel/PojoNonConsumerIntf.java new file mode 100644 index 0000000000..aec8caaf19 --- /dev/null +++ b/akka-camel/src/test/java/se/scalablesolutions/akka/camel/PojoNonConsumerIntf.java @@ -0,0 +1,9 @@ +package se.scalablesolutions.akka.camel; + +/** + * @author Martin Krasser + */ +public interface PojoNonConsumerIntf { + + public String foo(String s); +} \ No newline at end of file diff --git a/akka-camel/src/test/java/se/scalablesolutions/akka/camel/PojoRemote.java b/akka-camel/src/test/java/se/scalablesolutions/akka/camel/PojoRemote.java index 57b0999b8f..3ea8b05180 100644 --- a/akka-camel/src/test/java/se/scalablesolutions/akka/camel/PojoRemote.java +++ b/akka-camel/src/test/java/se/scalablesolutions/akka/camel/PojoRemote.java @@ -1,15 +1,14 @@ package se.scalablesolutions.akka.camel; -import se.scalablesolutions.akka.actor.annotation.consume; +import se.scalablesolutions.akka.actor.TypedActor; /** * @author Martin Krasser */ -public class PojoRemote { +public class PojoRemote extends TypedActor implements PojoRemoteIntf { - @consume("direct:remote-active-object") public String foo(String s) { - return String.format("remote active object: %s", s); + return String.format("remote typed actor: %s", s); } } diff --git a/akka-camel/src/test/java/se/scalablesolutions/akka/camel/PojoRemoteIntf.java b/akka-camel/src/test/java/se/scalablesolutions/akka/camel/PojoRemoteIntf.java new file mode 100644 index 0000000000..45f09e1757 --- /dev/null +++ b/akka-camel/src/test/java/se/scalablesolutions/akka/camel/PojoRemoteIntf.java @@ -0,0 +1,12 @@ +package se.scalablesolutions.akka.camel; + +import se.scalablesolutions.akka.actor.annotation.consume; + +/** + * @author Martin Krasser + */ +public interface PojoRemoteIntf { + + @consume("direct:remote-typed-actor") + public String foo(String s); +} diff --git a/akka-camel/src/test/java/se/scalablesolutions/akka/camel/PojoSingle.java b/akka-camel/src/test/java/se/scalablesolutions/akka/camel/PojoSingle.java index 7d577535b2..7d4f6be5da 100644 --- a/akka-camel/src/test/java/se/scalablesolutions/akka/camel/PojoSingle.java +++ b/akka-camel/src/test/java/se/scalablesolutions/akka/camel/PojoSingle.java @@ -1,13 +1,12 @@ package se.scalablesolutions.akka.camel; -import se.scalablesolutions.akka.actor.annotation.consume; +import se.scalablesolutions.akka.actor.TypedActor; /** * @author Martin Krasser */ -public class PojoSingle { +public class PojoSingle extends TypedActor implements PojoSingleIntf { - @consume("direct:foo") public void foo(String b) { } diff --git a/akka-camel/src/test/java/se/scalablesolutions/akka/camel/PojoSingleIntf.java b/akka-camel/src/test/java/se/scalablesolutions/akka/camel/PojoSingleIntf.java new file mode 100644 index 0000000000..22a25325a3 --- /dev/null +++ b/akka-camel/src/test/java/se/scalablesolutions/akka/camel/PojoSingleIntf.java @@ -0,0 +1,12 @@ +package se.scalablesolutions.akka.camel; + +import se.scalablesolutions.akka.actor.annotation.consume; + +/** + * @author Martin Krasser + */ +public interface PojoSingleIntf { + + @consume("direct:foo") + public void foo(String b); +} diff --git a/akka-camel/src/test/java/se/scalablesolutions/akka/camel/PojoSub.java b/akka-camel/src/test/java/se/scalablesolutions/akka/camel/PojoSub.java index be5b453698..a7e592ef01 100644 --- a/akka-camel/src/test/java/se/scalablesolutions/akka/camel/PojoSub.java +++ b/akka-camel/src/test/java/se/scalablesolutions/akka/camel/PojoSub.java @@ -1,15 +1,11 @@ package se.scalablesolutions.akka.camel; -import org.apache.camel.Body; -import org.apache.camel.Header; +import se.scalablesolutions.akka.actor.TypedActor; -import se.scalablesolutions.akka.actor.annotation.consume; - -public class PojoSub extends PojoBase { +public class PojoSub extends PojoBase implements PojoSubIntf { @Override - @consume("direct:m1sub") - public String m1(@Body String b, @Header("test") String h) { + public String m1(String b, String h) { return "m1sub: " + b + " " + h; } @@ -19,8 +15,7 @@ public class PojoSub extends PojoBase { } @Override - @consume("direct:m3sub") - public String m3(@Body String b, @Header("test") String h) { + public String m3(String b, String h) { return "m3sub: " + b + " " + h; } diff --git a/akka-camel/src/test/java/se/scalablesolutions/akka/camel/PojoSubIntf.java b/akka-camel/src/test/java/se/scalablesolutions/akka/camel/PojoSubIntf.java new file mode 100644 index 0000000000..08a153b124 --- /dev/null +++ b/akka-camel/src/test/java/se/scalablesolutions/akka/camel/PojoSubIntf.java @@ -0,0 +1,18 @@ +package se.scalablesolutions.akka.camel; + +import org.apache.camel.Body; +import org.apache.camel.Header; + +import se.scalablesolutions.akka.actor.annotation.consume; + +public interface PojoSubIntf extends PojoBaseIntf { + @consume("direct:m1sub") + public String m1(@Body String b, @Header("test") String h); + + @Override + public String m2(String b, String h); + + @Override + @consume("direct:m3sub") + public String m3(@Body String b, @Header("test") String h); +} diff --git a/akka-camel/src/test/scala/CamelServiceFeatureTest.scala b/akka-camel/src/test/scala/CamelServiceFeatureTest.scala index 1e88b62bf2..71ab86599d 100644 --- a/akka-camel/src/test/scala/CamelServiceFeatureTest.scala +++ b/akka-camel/src/test/scala/CamelServiceFeatureTest.scala @@ -1,12 +1,13 @@ package se.scalablesolutions.akka.camel -import java.util.concurrent.{CountDownLatch, TimeUnit} +import java.util.concurrent.{TimeoutException, CountDownLatch, TimeUnit} +import org.apache.camel.CamelExecutionException import org.apache.camel.builder.RouteBuilder import org.scalatest.{GivenWhenThen, BeforeAndAfterAll, FeatureSpec} import se.scalablesolutions.akka.actor.Actor._ -import se.scalablesolutions.akka.actor.{ActiveObject, Actor, ActorRegistry} +import se.scalablesolutions.akka.actor.{TypedActor, Actor, ActorRegistry} class CamelServiceFeatureTest extends FeatureSpec with BeforeAndAfterAll with GivenWhenThen { import CamelServiceFeatureTest._ @@ -16,7 +17,7 @@ class CamelServiceFeatureTest extends FeatureSpec with BeforeAndAfterAll with Gi override protected def beforeAll = { ActorRegistry.shutdownAll // create new CamelService instance - service = CamelService.newInstance + service = CamelServiceFactory.createCamelService // register test consumer before starting the CamelService actorOf(new TestConsumer("direct:publish-test-1")).start // Configure a custom camel route @@ -26,7 +27,7 @@ class CamelServiceFeatureTest extends FeatureSpec with BeforeAndAfterAll with Gi // count expectations in the next step (needed for testing only). service.consumerPublisher.start // set expectations on publish count - val latch = (service.consumerPublisher !! SetExpectedMessageCount(1)).as[CountDownLatch].get + val latch = service.expectEndpointActivationCount(1) // start the CamelService service.load // await publication of first test consumer @@ -40,10 +41,10 @@ class CamelServiceFeatureTest extends FeatureSpec with BeforeAndAfterAll with Gi feature("Publish registered consumer actors in the global CamelContext") { - scenario("access registered consumer actors via Camel direct-endpoints") { + scenario("access non-blocking consumer actors via Camel direct-endpoints") { given("two consumer actors registered before and after CamelService startup") - val latch = (service.consumerPublisher !! SetExpectedMessageCount(1)).as[CountDownLatch].get + val latch = service.expectEndpointActivationCount(1) actorOf(new TestConsumer("direct:publish-test-2")).start assert(latch.await(5000, TimeUnit.MILLISECONDS)) @@ -55,6 +56,25 @@ class CamelServiceFeatureTest extends FeatureSpec with BeforeAndAfterAll with Gi assert(response1 === "received msg1") assert(response2 === "received msg2") } + + scenario("access blocking, non-responding consumer actor via a Camel direct-endpoint") { + + given("a consumer actor registered after CamelService startup") + val latch = service.expectEndpointActivationCount(1) + actorOf(new TestBlocker("direct:publish-test-3")).start + assert(latch.await(5000, TimeUnit.MILLISECONDS)) + + try { + when("a request is sent to this actor") + CamelContextManager.template.requestBody("direct:publish-test-3", "msg3") + fail("expected TimoutException not thrown") + } catch { + case e => { + then("a TimoutException should be thrown") + assert(e.getCause.isInstanceOf[TimeoutException]) + } + } + } } feature("Unpublish registered consumer actor from the global CamelContext") { @@ -62,24 +82,22 @@ class CamelServiceFeatureTest extends FeatureSpec with BeforeAndAfterAll with Gi scenario("access to unregistered consumer actor via Camel direct-endpoint fails") { val endpointUri = "direct:unpublish-test-1" - given("a consumer actor that has been stopped") + given("a consumer actor registered after CamelService startup") assert(CamelContextManager.context.hasEndpoint(endpointUri) eq null) - var latch = (service.consumerPublisher !! SetExpectedMessageCount(1)).as[CountDownLatch].get + var latch = service.expectEndpointActivationCount(1) val consumer = actorOf(new TestConsumer(endpointUri)).start assert(latch.await(5000, TimeUnit.MILLISECONDS)) assert(CamelContextManager.context.hasEndpoint(endpointUri) ne null) - latch = (service.consumerPublisher !! SetExpectedMessageCount(1)).as[CountDownLatch].get + when("the actor is stopped") + latch = service.expectEndpointDeactivationCount(1) consumer.stop assert(latch.await(5000, TimeUnit.MILLISECONDS)) - // endpoint is still there but the route has been stopped - assert(CamelContextManager.context.hasEndpoint(endpointUri) ne null) - when("a request is sent to this actor") - val response1 = CamelContextManager.template.requestBody(endpointUri, "msg1") - - then("the direct-endpoint falls back to its default behaviour and returns the original message") - assert(response1 === "msg1") + then("the associated endpoint isn't accessible any more") + intercept[CamelExecutionException] { + CamelContextManager.template.requestBody(endpointUri, "msg1") + } } } @@ -98,13 +116,13 @@ class CamelServiceFeatureTest extends FeatureSpec with BeforeAndAfterAll with Gi } } - feature("Publish active object methods in the global CamelContext") { + feature("Publish typed actor methods in the global CamelContext") { - scenario("access active object methods via Camel direct-endpoints") { + scenario("access typed actor methods via Camel direct-endpoints") { - given("an active object registered after CamelService startup") - var latch = (service.consumerPublisher !! SetExpectedMessageCount(3)).as[CountDownLatch].get - val obj = ActiveObject.newInstance(classOf[PojoBase]) + given("an typed actor registered after CamelService startup") + var latch = service.expectEndpointActivationCount(3) + val obj = TypedActor.newInstance(classOf[PojoBaseIntf], classOf[PojoBase]) assert(latch.await(5000, TimeUnit.MILLISECONDS)) when("requests are sent to published methods") @@ -117,35 +135,37 @@ class CamelServiceFeatureTest extends FeatureSpec with BeforeAndAfterAll with Gi assert(response2 === "m3base: x y") assert(response3 === "m4base: x y") - // cleanup to avoid conflicts with next test (i.e. avoid multiple consumers on direct-endpoints) - latch = (service.consumerPublisher !! SetExpectedMessageCount(3)).as[CountDownLatch].get - ActiveObject.stop(obj) + // cleanup to avoid conflicts with next test (i.e. avoid multiple consumers on direct-endpoints) + latch = service.expectEndpointDeactivationCount(3) + TypedActor.stop(obj) assert(latch.await(5000, TimeUnit.MILLISECONDS)) } } - feature("Unpublish active object method from the global CamelContext") { + feature("Unpublish typed actor method from the global CamelContext") { - scenario("access to unregistered active object methof via Camel direct-endpoint fails") { + scenario("access to unregistered typed actor method via Camel direct-endpoint fails") { - given("an active object that has been stopped") - var latch = (service.consumerPublisher !! SetExpectedMessageCount(3)).as[CountDownLatch].get - val obj = ActiveObject.newInstance(classOf[PojoBase]) + given("an typed actor registered after CamelService startup") + var latch = service.expectEndpointActivationCount(3) + val obj = TypedActor.newInstance(classOf[PojoBaseIntf], classOf[PojoBase]) assert(latch.await(5000, TimeUnit.MILLISECONDS)) - latch = (service.consumerPublisher !! SetExpectedMessageCount(3)).as[CountDownLatch].get - ActiveObject.stop(obj) + when("the typed actor is stopped") + latch = service.expectEndpointDeactivationCount(3) + TypedActor.stop(obj) assert(latch.await(5000, TimeUnit.MILLISECONDS)) - when("requests are sent to published methods") - val response1 = CamelContextManager.template.requestBodyAndHeader("direct:m2base", "x", "test", "y") - val response2 = CamelContextManager.template.requestBodyAndHeader("direct:m3base", "x", "test", "y") - val response3 = CamelContextManager.template.requestBodyAndHeader("direct:m4base", "x", "test", "y") - - then("the direct-endpoints fall back to their default behaviour and return the original message") - assert(response1 === "x") - assert(response2 === "x") - assert(response3 === "x") + then("the associated endpoints aren't accessible any more") + intercept[CamelExecutionException] { + CamelContextManager.template.requestBodyAndHeader("direct:m2base", "x", "test", "y") + } + intercept[CamelExecutionException] { + CamelContextManager.template.requestBodyAndHeader("direct:m3base", "x", "test", "y") + } + intercept[CamelExecutionException] { + CamelContextManager.template.requestBodyAndHeader("direct:m4base", "x", "test", "y") + } } } } @@ -159,6 +179,15 @@ object CamelServiceFeatureTest { } } + class TestBlocker(uri: String) extends Actor with Consumer { + self.timeout = 1000 + def endpointUri = uri + override def blocking = true + protected def receive = { + case msg: Message => { /* do not reply */ } + } + } + class TestActor extends Actor { self.id = "custom-actor-id" protected def receive = { diff --git a/akka-camel/src/test/scala/ConsumerMethodRegisteredTest.scala b/akka-camel/src/test/scala/ConsumerMethodRegisteredTest.scala index 7c28c7d8ee..964fe8e7bf 100644 --- a/akka-camel/src/test/scala/ConsumerMethodRegisteredTest.scala +++ b/akka-camel/src/test/scala/ConsumerMethodRegisteredTest.scala @@ -4,7 +4,7 @@ import java.net.InetSocketAddress import org.scalatest.junit.JUnitSuite -import se.scalablesolutions.akka.actor.{AspectInit, ActiveObject} +import se.scalablesolutions.akka.actor.{AspectInit, TypedActor} import se.scalablesolutions.akka.camel.ConsumerMethodRegistered._ import org.junit.{AfterClass, Test} @@ -12,8 +12,8 @@ class ConsumerMethodRegisteredTest extends JUnitSuite { import ConsumerMethodRegisteredTest._ val remoteAddress = new InetSocketAddress("localhost", 8888); - val remoteAspectInit = AspectInit(classOf[String], null, Some(remoteAddress), 1000) - val localAspectInit = AspectInit(classOf[String], null, None, 1000) + val remoteAspectInit = AspectInit(classOf[String], null, null, Some(remoteAddress), 1000) + val localAspectInit = AspectInit(classOf[String], null, null, None, 1000) val ascendingMethodName = (r1: ConsumerMethodRegistered, r2: ConsumerMethodRegistered) => r1.method.getName < r2.method.getName @@ -44,14 +44,14 @@ class ConsumerMethodRegisteredTest extends JUnitSuite { } object ConsumerMethodRegisteredTest { - val activePojoBase = ActiveObject.newInstance(classOf[PojoBase]) - val activePojoSub = ActiveObject.newInstance(classOf[PojoSub]) - val activePojoIntf = ActiveObject.newInstance(classOf[PojoIntf], new PojoImpl) + val activePojoBase = TypedActor.newInstance(classOf[PojoBaseIntf], classOf[PojoBase]) + val activePojoSub = TypedActor.newInstance(classOf[PojoSubIntf], classOf[PojoSub]) + val activePojoIntf = TypedActor.newInstance(classOf[PojoIntf], classOf[PojoImpl]) @AfterClass def afterClass = { - ActiveObject.stop(activePojoBase) - ActiveObject.stop(activePojoSub) - ActiveObject.stop(activePojoIntf) + TypedActor.stop(activePojoBase) + TypedActor.stop(activePojoSub) + TypedActor.stop(activePojoIntf) } } diff --git a/akka-camel/src/test/scala/ConsumerRegisteredTest.scala b/akka-camel/src/test/scala/ConsumerRegisteredTest.scala index caaa03591b..3339caacf2 100644 --- a/akka-camel/src/test/scala/ConsumerRegisteredTest.scala +++ b/akka-camel/src/test/scala/ConsumerRegisteredTest.scala @@ -5,17 +5,16 @@ import org.scalatest.junit.JUnitSuite import se.scalablesolutions.akka.actor.Actor import se.scalablesolutions.akka.actor.Actor._ -import se.scalablesolutions.akka.actor.annotation.consume object ConsumerRegisteredTest { - @consume("mock:test1") - class ConsumeAnnotatedActor extends Actor { - self.id = "test" + class ConsumerActor1 extends Actor with Consumer { + def endpointUri = "mock:test1" protected def receive = null } - class ConsumerActor extends Actor with Consumer { + class ConsumerActor2 extends Actor with Consumer { def endpointUri = "mock:test2" + override def blocking = true protected def receive = null } @@ -27,21 +26,14 @@ object ConsumerRegisteredTest { class ConsumerRegisteredTest extends JUnitSuite { import ConsumerRegisteredTest._ - @Test def shouldCreatePublishRequestList = { - val a = actorOf[ConsumeAnnotatedActor] - val as = List(a) - val events = for (a <- as; e <- ConsumerRegistered.forConsumer(a)) yield e - assert(events === List(ConsumerRegistered(a, "mock:test1", "test", false))) + @Test def shouldCreateSomeNonBlockingPublishRequest = { + val ca = actorOf[ConsumerActor1] + val event = ConsumerRegistered.forConsumer(ca) + assert(event === Some(ConsumerRegistered(ca, "mock:test1", ca.uuid, false))) } - @Test def shouldCreateSomePublishRequestWithActorId = { - val a = actorOf[ConsumeAnnotatedActor] - val event = ConsumerRegistered.forConsumer(a) - assert(event === Some(ConsumerRegistered(a, "mock:test1", "test", false))) - } - - @Test def shouldCreateSomePublishRequestWithActorUuid = { - val ca = actorOf[ConsumerActor] + @Test def shouldCreateSomeBlockingPublishRequest = { + val ca = actorOf[ConsumerActor2] val event = ConsumerRegistered.forConsumer(ca) assert(event === Some(ConsumerRegistered(ca, "mock:test2", ca.uuid, true))) } diff --git a/akka-camel/src/test/scala/ProducerFeatureTest.scala b/akka-camel/src/test/scala/ProducerFeatureTest.scala index 96d1b9eeef..d238286f9c 100644 --- a/akka-camel/src/test/scala/ProducerFeatureTest.scala +++ b/akka-camel/src/test/scala/ProducerFeatureTest.scala @@ -5,14 +5,8 @@ import org.apache.camel.builder.RouteBuilder import org.apache.camel.component.mock.MockEndpoint import org.scalatest.{GivenWhenThen, BeforeAndAfterEach, BeforeAndAfterAll, FeatureSpec} -import se.scalablesolutions.akka.actor.{Actor, ActorRegistry} import se.scalablesolutions.akka.actor.Actor._ - -object ProducerFeatureTest { - class TestProducer(uri: String) extends Actor with Producer { - def endpointUri = uri - } -} +import se.scalablesolutions.akka.actor.{ActorRef, Actor, ActorRegistry} class ProducerFeatureTest extends FeatureSpec with BeforeAndAfterAll with BeforeAndAfterEach with GivenWhenThen { import ProducerFeatureTest._ @@ -24,109 +18,276 @@ class ProducerFeatureTest extends FeatureSpec with BeforeAndAfterAll with Before CamelContextManager.start } - override protected def afterAll = CamelContextManager.stop + override protected def afterAll = { + CamelContextManager.stop + ActorRegistry.shutdownAll + } override protected def afterEach = { mockEndpoint.reset - ActorRegistry.shutdownAll } feature("Produce a message to a Camel endpoint") { - scenario("produce message sync and receive response") { - given("a registered synchronous two-way producer for endpoint direct:producer-test-2") - val producer = actorOf(new TestProducer("direct:producer-test-2") with Sync) + scenario("produce message and receive normal response") { + given("a registered two-way producer") + val producer = actorOf(new TestProducer("direct:producer-test-2", true)) producer.start - when("a test message is sent to the producer") + when("a test message is sent to the producer with !!") val message = Message("test", Map(Message.MessageExchangeId -> "123")) val result = producer !! message - then("the expected result message should be returned including a correlation identifier") - val expected = Message("received test", Map(Message.MessageExchangeId -> "123")) + then("a normal response should have been returned by the producer") + val expected = Message("received TEST", Map(Message.MessageExchangeId -> "123")) assert(result === Some(expected)) } - scenario("produce message async and receive response") { - given("a registered asynchronous two-way producer for endpoint direct:producer-test-2") + scenario("produce message and receive failure response") { + given("a registered two-way producer") val producer = actorOf(new TestProducer("direct:producer-test-2")) producer.start - when("a test message is sent to the producer") - val message = Message("test", Map(Message.MessageExchangeId -> "123")) - val result = producer !! message - - then("the expected result message should be returned including a correlation identifier") - val expected = Message("received test", Map(Message.MessageExchangeId -> "123")) - assert(result === Some(expected)) - } - - scenario("produce message sync and receive failure") { - given("a registered synchronous two-way producer for endpoint direct:producer-test-2") - val producer = actorOf(new TestProducer("direct:producer-test-2") with Sync) - producer.start - - when("a fail message is sent to the producer") + when("a test message causing an exception is sent to the producer with !!") val message = Message("fail", Map(Message.MessageExchangeId -> "123")) val result = (producer !! message).as[Failure] - then("the expected failure message should be returned including a correlation identifier") + then("a failure response should have been returned by the producer") val expectedFailureText = result.get.cause.getMessage val expectedHeaders = result.get.headers assert(expectedFailureText === "failure") assert(expectedHeaders === Map(Message.MessageExchangeId -> "123")) } - scenario("produce message async and receive failure") { - given("a registered asynchronous two-way producer for endpoint direct:producer-test-2") - val producer = actorOf(new TestProducer("direct:producer-test-2")) + scenario("produce message oneway") { + given("a registered one-way producer") + val producer = actorOf(new TestProducer("direct:producer-test-1", true) with Oneway) producer.start - when("a fail message is sent to the producer") - val message = Message("fail", Map(Message.MessageExchangeId -> "123")) - val result = (producer !! message).as[Failure] - - then("the expected failure message should be returned including a correlation identifier") - val expectedFailureText = result.get.cause.getMessage - val expectedHeaders = result.get.headers - assert(expectedFailureText === "failure") - assert(expectedHeaders === Map(Message.MessageExchangeId -> "123")) - } - - scenario("produce message sync oneway") { - given("a registered synchronous one-way producer for endpoint direct:producer-test-1") - val producer = actorOf(new TestProducer("direct:producer-test-1") with Sync with Oneway) - producer.start - - when("a test message is sent to the producer") - mockEndpoint.expectedBodiesReceived("test") + when("a test message is sent to the producer with !") + mockEndpoint.expectedBodiesReceived("TEST") producer ! Message("test") - then("the expected message should have been sent to mock:mock") + then("the test message should have been sent to mock:mock") mockEndpoint.assertIsSatisfied } - scenario("produce message async oneway") { - given("a registered asynchronous one-way producer for endpoint direct:producer-test-1") - val producer = actorOf(new TestProducer("direct:producer-test-1") with Oneway) + scenario("produce message twoway without sender reference") { + given("a registered two-way producer") + val producer = actorOf(new TestProducer("direct:producer-test-1")) producer.start - when("a test message is sent to the producer") + when("a test message is sent to the producer with !") mockEndpoint.expectedBodiesReceived("test") producer ! Message("test") - then("the expected message should have been sent to mock:mock") + then("there should be only a warning that there's no sender reference") + mockEndpoint.assertIsSatisfied + } + } + + feature("Produce a message to an async Camel endpoint") { + + scenario("produce message and async receive normal response") { + given("a registered two-way producer") + val producer = actorOf(new TestProducer("direct:producer-test-3")) + producer.start + + when("a test message is sent to the producer with !!") + val message = Message("test", Map(Message.MessageExchangeId -> "123")) + val result = producer !! message + + then("a normal response should have been returned by the producer") + val expected = Message("received test", Map(Message.MessageExchangeId -> "123")) + assert(result === Some(expected)) + } + + scenario("produce message and async receive failure response") { + given("a registered two-way producer") + val producer = actorOf(new TestProducer("direct:producer-test-3")) + producer.start + + when("a test message causing an exception is sent to the producer with !!") + val message = Message("fail", Map(Message.MessageExchangeId -> "123")) + val result = (producer !! message).as[Failure] + + then("a failure response should have been returned by the producer") + val expectedFailureText = result.get.cause.getMessage + val expectedHeaders = result.get.headers + assert(expectedFailureText === "failure") + assert(expectedHeaders === Map(Message.MessageExchangeId -> "123")) + } + } + + feature("Produce a message to a Camel endpoint and then forward the result") { + + scenario("produce message, forward and receive normal response") { + given("a registered two-way producer configured with a forward target") + val target = actorOf[ReplyingForwardTarget].start + val producer = actorOf(new TestForwarder("direct:producer-test-2", target)).start + + when("a test message is sent to the producer with !!") + val message = Message("test", Map(Message.MessageExchangeId -> "123")) + val result = producer !! message + + then("a normal response should have been returned by the forward target") + val expected = Message("received test", Map(Message.MessageExchangeId -> "123", "test" -> "result")) + assert(result === Some(expected)) + } + + scenario("produce message, forward and receive failure response") { + given("a registered two-way producer configured with a forward target") + val target = actorOf[ReplyingForwardTarget].start + val producer = actorOf(new TestForwarder("direct:producer-test-2", target)).start + + when("a test message causing an exception is sent to the producer with !!") + val message = Message("fail", Map(Message.MessageExchangeId -> "123")) + val result = (producer !! message).as[Failure] + + then("a failure response should have been returned by the forward target") + val expectedFailureText = result.get.cause.getMessage + val expectedHeaders = result.get.headers + assert(expectedFailureText === "failure") + assert(expectedHeaders === Map(Message.MessageExchangeId -> "123", "test" -> "failure")) + } + + scenario("produce message, forward and produce normal response") { + given("a registered one-way producer configured with a forward target") + val target = actorOf[ProducingForwardTarget].start + val producer = actorOf(new TestForwarder("direct:producer-test-2", target)).start + + when("a test message is sent to the producer with !") + mockEndpoint.expectedBodiesReceived("received test") + val result = producer.!(Message("test"))(Some(producer)) + + then("a normal response should have been produced by the forward target") + mockEndpoint.assertIsSatisfied + } + + scenario("produce message, forward and produce failure response") { + given("a registered one-way producer configured with a forward target") + val target = actorOf[ProducingForwardTarget].start + val producer = actorOf(new TestForwarder("direct:producer-test-2", target)).start + + when("a test message causing an exception is sent to the producer with !") + mockEndpoint.expectedMessageCount(1) + mockEndpoint.message(0).body().isInstanceOf(classOf[Failure]) + val result = producer.!(Message("fail"))(Some(producer)) + + then("a failure response should have been produced by the forward target") + mockEndpoint.assertIsSatisfied + } + } + + feature("Produce a message to an async Camel endpoint and then forward the result") { + + scenario("produce message, forward and async receive normal response") { + given("a registered two-way producer configured with a forward target") + val target = actorOf[ReplyingForwardTarget].start + val producer = actorOf(new TestForwarder("direct:producer-test-3", target)).start + + when("a test message is sent to the producer with !!") + val message = Message("test", Map(Message.MessageExchangeId -> "123")) + val result = producer !! message + + then("a normal response should have been returned by the forward target") + val expected = Message("received test", Map(Message.MessageExchangeId -> "123", "test" -> "result")) + assert(result === Some(expected)) + } + + scenario("produce message, forward and async receive failure response") { + given("a registered two-way producer configured with a forward target") + val target = actorOf[ReplyingForwardTarget].start + val producer = actorOf(new TestForwarder("direct:producer-test-3", target)).start + + when("a test message causing an exception is sent to the producer with !!") + val message = Message("fail", Map(Message.MessageExchangeId -> "123")) + val result = (producer !! message).as[Failure] + + then("a failure response should have been returned by the forward target") + val expectedFailureText = result.get.cause.getMessage + val expectedHeaders = result.get.headers + assert(expectedFailureText === "failure") + assert(expectedHeaders === Map(Message.MessageExchangeId -> "123", "test" -> "failure")) + } + + scenario("produce message, forward and async produce normal response") { + given("a registered one-way producer configured with a forward target") + val target = actorOf[ProducingForwardTarget].start + val producer = actorOf(new TestForwarder("direct:producer-test-3", target)).start + + when("a test message is sent to the producer with !") + mockEndpoint.expectedBodiesReceived("received test") + val result = producer.!(Message("test"))(Some(producer)) + + then("a normal response should have been produced by the forward target") + mockEndpoint.assertIsSatisfied + } + + scenario("produce message, forward and async produce failure response") { + given("a registered one-way producer configured with a forward target") + val target = actorOf[ProducingForwardTarget].start + val producer = actorOf(new TestForwarder("direct:producer-test-3", target)).start + + when("a test message causing an exception is sent to the producer with !") + mockEndpoint.expectedMessageCount(1) + mockEndpoint.message(0).body().isInstanceOf(classOf[Failure]) + val result = producer.!(Message("fail"))(Some(producer)) + + then("a failure response should have been produced by the forward target") mockEndpoint.assertIsSatisfied } } private def mockEndpoint = CamelContextManager.context.getEndpoint("mock:mock", classOf[MockEndpoint]) +} + +object ProducerFeatureTest { + class TestProducer(uri: String, upper: Boolean = false) extends Actor with Producer { + def endpointUri = uri + override protected def receiveBeforeProduce = { + case msg: Message => if (upper) msg.transformBody[String] { _.toUpperCase } else msg + } + } + + class TestForwarder(uri: String, target: ActorRef) extends Actor with Producer { + def endpointUri = uri + override protected def receiveAfterProduce = { + case msg => target forward msg + } + } + + class TestResponder extends Actor { + protected def receive = { + case msg: Message => msg.body match { + case "fail" => self.reply(Failure(new Exception("failure"), msg.headers)) + case _ => self.reply(msg.transformBody[String] { "received %s" format _ }) + } + } + } + + class ReplyingForwardTarget extends Actor { + protected def receive = { + case msg: Message => + self.reply(msg.addHeader("test" -> "result")) + case msg: Failure => + self.reply(Failure(msg.cause, msg.headers + ("test" -> "failure"))) + } + } + + class ProducingForwardTarget extends Actor with Producer with Oneway { + def endpointUri = "direct:forward-test-1" + } class TestRoute extends RouteBuilder { + val responder = actorOf[TestResponder].start def configure { + from("direct:forward-test-1").to("mock:mock") // for one-way messaging tests from("direct:producer-test-1").to("mock:mock") - // for two-way messaging tests + // for two-way messaging tests (async) + from("direct:producer-test-3").to("actor:uuid:%s" format responder.uuid) + // for two-way messaging tests (sync) from("direct:producer-test-2").process(new Processor() { def process(exchange: Exchange) = { exchange.getIn.getBody match { diff --git a/akka-camel/src/test/scala/PublishRequestorTest.scala b/akka-camel/src/test/scala/PublishRequestorTest.scala index 44c6c30684..2913a5981c 100644 --- a/akka-camel/src/test/scala/PublishRequestorTest.scala +++ b/akka-camel/src/test/scala/PublishRequestorTest.scala @@ -32,28 +32,28 @@ class PublishRequestorTest extends JUnitSuite { } @Test def shouldReceiveConsumerMethodRegisteredEvent = { - val obj = ActiveObject.newInstance(classOf[PojoSingle]) - val init = AspectInit(classOf[PojoSingle], null, None, 1000) + val obj = TypedActor.newInstance(classOf[PojoSingleIntf], classOf[PojoSingle]) + val init = AspectInit(classOf[PojoSingleIntf], null, null, None, 1000) val latch = (publisher !! SetExpectedTestMessageCount(1)).as[CountDownLatch].get requestor ! AspectInitRegistered(obj, init) assert(latch.await(5000, TimeUnit.MILLISECONDS)) val event = (publisher !! GetRetainedMessage).get.asInstanceOf[ConsumerMethodRegistered] assert(event.init === init) assert(event.uri === "direct:foo") - assert(event.activeObject === obj) + assert(event.typedActor === obj) assert(event.method.getName === "foo") } @Test def shouldReceiveConsumerMethodUnregisteredEvent = { - val obj = ActiveObject.newInstance(classOf[PojoSingle]) - val init = AspectInit(classOf[PojoSingle], null, None, 1000) + val obj = TypedActor.newInstance(classOf[PojoSingleIntf], classOf[PojoSingle]) + val init = AspectInit(classOf[PojoSingleIntf], null, null, None, 1000) val latch = (publisher !! SetExpectedTestMessageCount(1)).as[CountDownLatch].get requestor ! AspectInitUnregistered(obj, init) assert(latch.await(5000, TimeUnit.MILLISECONDS)) val event = (publisher !! GetRetainedMessage).get.asInstanceOf[ConsumerMethodUnregistered] assert(event.init === init) assert(event.uri === "direct:foo") - assert(event.activeObject === obj) + assert(event.typedActor === obj) assert(event.method.getName === "foo") } @@ -62,7 +62,7 @@ class PublishRequestorTest extends JUnitSuite { requestor ! ActorRegistered(consumer) assert(latch.await(5000, TimeUnit.MILLISECONDS)) assert((publisher !! GetRetainedMessage) === - Some(ConsumerRegistered(consumer, "mock:test", consumer.uuid, true))) + Some(ConsumerRegistered(consumer, "mock:test", consumer.uuid, false))) } @Test def shouldReceiveConsumerUnregisteredEvent = { @@ -70,7 +70,7 @@ class PublishRequestorTest extends JUnitSuite { requestor ! ActorUnregistered(consumer) assert(latch.await(5000, TimeUnit.MILLISECONDS)) assert((publisher !! GetRetainedMessage) === - Some(ConsumerUnregistered(consumer, "mock:test", consumer.uuid, true))) + Some(ConsumerUnregistered(consumer, "mock:test", consumer.uuid))) } } diff --git a/akka-camel/src/test/scala/RemoteConsumerTest.scala b/akka-camel/src/test/scala/RemoteConsumerTest.scala index 7e3b666590..fd5dc52a8b 100644 --- a/akka-camel/src/test/scala/RemoteConsumerTest.scala +++ b/akka-camel/src/test/scala/RemoteConsumerTest.scala @@ -5,7 +5,7 @@ import java.util.concurrent.{CountDownLatch, TimeUnit} import org.scalatest.{GivenWhenThen, BeforeAndAfterAll, FeatureSpec} import se.scalablesolutions.akka.actor.Actor._ -import se.scalablesolutions.akka.actor.{ActiveObject, ActorRegistry, RemoteActor} +import se.scalablesolutions.akka.actor.{TypedActor, ActorRegistry, RemoteActor} import se.scalablesolutions.akka.remote.{RemoteClient, RemoteServer} /** @@ -20,7 +20,7 @@ class RemoteConsumerTest extends FeatureSpec with BeforeAndAfterAll with GivenWh override protected def beforeAll = { ActorRegistry.shutdownAll - service = CamelService.newInstance + service = CamelServiceFactory.createCamelService service.load server = new RemoteServer() @@ -45,7 +45,7 @@ class RemoteConsumerTest extends FeatureSpec with BeforeAndAfterAll with GivenWh val consumer = actorOf[RemoteConsumer].start when("remote consumer publication is triggered") - val latch = (service.consumerPublisher !! SetExpectedMessageCount(1)).as[CountDownLatch].get + var latch = service.expectEndpointActivationCount(1) consumer !! "init" assert(latch.await(5000, TimeUnit.MILLISECONDS)) @@ -55,19 +55,19 @@ class RemoteConsumerTest extends FeatureSpec with BeforeAndAfterAll with GivenWh } } - feature("Client-initiated remote consumer active object") { + feature("Client-initiated remote consumer typed actor") { scenario("access published remote consumer method") { - given("a client-initiated remote consumer active object") - val consumer = ActiveObject.newRemoteInstance(classOf[PojoRemote], host, port) + given("a client-initiated remote consumer typed actor") + val consumer = TypedActor.newRemoteInstance(classOf[PojoRemoteIntf], classOf[PojoRemote], host, port) when("remote consumer publication is triggered") - val latch = (service.consumerPublisher !! SetExpectedMessageCount(1)).as[CountDownLatch].get + var latch = service.expectEndpointActivationCount(1) consumer.foo("init") assert(latch.await(5000, TimeUnit.MILLISECONDS)) then("the published method is accessible via its endpoint URI") - val response = CamelContextManager.template.requestBody("direct:remote-active-object", "test") - assert(response === "remote active object: test") + val response = CamelContextManager.template.requestBody("direct:remote-typed-actor", "test") + assert(response === "remote typed actor: test") } } } diff --git a/akka-camel/src/test/scala/component/ActorComponentFeatureTest.scala b/akka-camel/src/test/scala/component/ActorComponentFeatureTest.scala index b7fd607f28..3817bc239e 100644 --- a/akka-camel/src/test/scala/component/ActorComponentFeatureTest.scala +++ b/akka-camel/src/test/scala/component/ActorComponentFeatureTest.scala @@ -3,23 +3,31 @@ package se.scalablesolutions.akka.camel.component import java.util.concurrent.{TimeUnit, CountDownLatch} import org.apache.camel.RuntimeCamelException +import org.apache.camel.builder.RouteBuilder +import org.apache.camel.component.mock.MockEndpoint import org.scalatest.{BeforeAndAfterEach, BeforeAndAfterAll, FeatureSpec} import se.scalablesolutions.akka.actor.Actor._ import se.scalablesolutions.akka.actor.{ActorRegistry, Actor} -import se.scalablesolutions.akka.camel.{Message, CamelContextManager} +import se.scalablesolutions.akka.camel.{Failure, Message, CamelContextManager} import se.scalablesolutions.akka.camel.support._ class ActorComponentFeatureTest extends FeatureSpec with BeforeAndAfterAll with BeforeAndAfterEach { + import ActorComponentFeatureTest._ + override protected def beforeAll = { ActorRegistry.shutdownAll CamelContextManager.init + CamelContextManager.context.addRoutes(new TestRoute) CamelContextManager.start } override protected def afterAll = CamelContextManager.stop - override protected def afterEach = ActorRegistry.shutdownAll + override protected def afterEach = { + ActorRegistry.shutdownAll + mockEndpoint.reset + } feature("Communicate with an actor from a Camel application using actor endpoint URIs") { import CamelContextManager.template @@ -55,8 +63,49 @@ class ActorComponentFeatureTest extends FeatureSpec with BeforeAndAfterAll with scenario("two-way communication with timeout") { val actor = actorOf[Tester3].start intercept[RuntimeCamelException] { - template.requestBody("actor:uuid:%s" format actor.uuid, "Martin") + template.requestBody("actor:uuid:%s?blocking=true" format actor.uuid, "Martin") } } + + scenario("two-way async communication with failure response") { + mockEndpoint.expectedBodiesReceived("whatever") + template.requestBody("direct:failure-test-1", "whatever") + mockEndpoint.assertIsSatisfied + } + + scenario("two-way sync communication with exception") { + mockEndpoint.expectedBodiesReceived("whatever") + template.requestBody("direct:failure-test-2", "whatever") + mockEndpoint.assertIsSatisfied + } + } + + private def mockEndpoint = CamelContextManager.context.getEndpoint("mock:mock", classOf[MockEndpoint]) +} + +object ActorComponentFeatureTest { + class FailWithMessage extends Actor { + protected def receive = { + case msg: Message => self.reply(Failure(new Exception("test"))) + } + } + + class FailWithException extends Actor { + protected def receive = { + case msg: Message => throw new Exception("test") + } + } + + class TestRoute extends RouteBuilder { + val failWithMessage = actorOf[FailWithMessage].start + val failWithException = actorOf[FailWithException].start + def configure { + from("direct:failure-test-1") + .onException(classOf[Exception]).to("mock:mock").handled(true).end + .to("actor:uuid:%s" format failWithMessage.uuid) + from("direct:failure-test-2") + .onException(classOf[Exception]).to("mock:mock").handled(true).end + .to("actor:uuid:%s?blocking=true" format failWithException.uuid) + } } } diff --git a/akka-camel/src/test/scala/component/ActorComponentTest.scala b/akka-camel/src/test/scala/component/ActorComponentTest.scala index 6bf472916b..e27e8c5875 100644 --- a/akka-camel/src/test/scala/component/ActorComponentTest.scala +++ b/akka-camel/src/test/scala/component/ActorComponentTest.scala @@ -1,11 +1,12 @@ package se.scalablesolutions.akka.camel.component +import org.apache.camel.{Endpoint, AsyncProcessor} import org.apache.camel.impl.DefaultCamelContext import org.junit._ import org.scalatest.junit.JUnitSuite class ActorComponentTest extends JUnitSuite { - val component: ActorComponent = ActorComponentTest.mockComponent + val component: ActorComponent = ActorComponentTest.actorComponent @Test def shouldCreateEndpointWithIdDefined = { val ep1: ActorEndpoint = component.createEndpoint("actor:abc").asInstanceOf[ActorEndpoint] @@ -14,21 +15,33 @@ class ActorComponentTest extends JUnitSuite { assert(ep2.id === Some("abc")) assert(ep1.uuid === None) assert(ep2.uuid === None) + assert(!ep1.blocking) + assert(!ep2.blocking) } @Test def shouldCreateEndpointWithUuidDefined = { val ep: ActorEndpoint = component.createEndpoint("actor:uuid:abc").asInstanceOf[ActorEndpoint] assert(ep.uuid === Some("abc")) assert(ep.id === None) + assert(!ep.blocking) + } + + @Test def shouldCreateEndpointWithBlockingSet = { + val ep: ActorEndpoint = component.createEndpoint("actor:uuid:abc?blocking=true").asInstanceOf[ActorEndpoint] + assert(ep.uuid === Some("abc")) + assert(ep.id === None) + assert(ep.blocking) } } object ActorComponentTest { - def mockComponent = { + def actorComponent = { val component = new ActorComponent component.setCamelContext(new DefaultCamelContext) component } - def mockEndpoint(uri:String) = mockComponent.createEndpoint(uri) + def actorEndpoint(uri:String) = actorComponent.createEndpoint(uri) + def actorProducer(endpoint: Endpoint) = endpoint.createProducer + def actorAsyncProducer(endpoint: Endpoint) = endpoint.createProducer.asInstanceOf[AsyncProcessor] } diff --git a/akka-camel/src/test/scala/component/ActorProducerTest.scala b/akka-camel/src/test/scala/component/ActorProducerTest.scala index f6d5f88d36..5e8a674e55 100644 --- a/akka-camel/src/test/scala/component/ActorProducerTest.scala +++ b/akka-camel/src/test/scala/component/ActorProducerTest.scala @@ -4,7 +4,8 @@ import ActorComponentTest._ import java.util.concurrent.{CountDownLatch, TimeoutException, TimeUnit} -import org.apache.camel.ExchangePattern +import org.apache.camel.{AsyncCallback, ExchangePattern} + import org.junit.{After, Test} import org.scalatest.junit.JUnitSuite import org.scalatest.BeforeAndAfterAll @@ -15,44 +16,77 @@ import se.scalablesolutions.akka.camel.{Failure, Message} import se.scalablesolutions.akka.camel.support._ class ActorProducerTest extends JUnitSuite with BeforeAndAfterAll { + import ActorProducerTest._ + @After def tearDown = ActorRegistry.shutdownAll - @Test def shouldSendMessageToActor = { + @Test def shouldSendMessageToActorWithProcessor = { val actor = actorOf[Tester1].start val latch = (actor !! SetExpectedMessageCount(1)).as[CountDownLatch].get - val endpoint = mockEndpoint("actor:uuid:%s" format actor.uuid) + val endpoint = actorEndpoint("actor:uuid:%s" format actor.uuid) val exchange = endpoint.createExchange(ExchangePattern.InOnly) exchange.getIn.setBody("Martin") exchange.getIn.setHeader("k1", "v1") - endpoint.createProducer.process(exchange) + actorProducer(endpoint).process(exchange) assert(latch.await(5000, TimeUnit.MILLISECONDS)) val reply = (actor !! GetRetainedMessage).get.asInstanceOf[Message] assert(reply.body === "Martin") assert(reply.headers === Map(Message.MessageExchangeId -> exchange.getExchangeId, "k1" -> "v1")) } - @Test def shouldSendMessageToActorAndReceiveResponse = { + @Test def shouldSendMessageToActorWithAsyncProcessor = { + val actor = actorOf[Tester1].start + val latch = (actor !! SetExpectedMessageCount(1)).as[CountDownLatch].get + val endpoint = actorEndpoint("actor:uuid:%s" format actor.uuid) + val exchange = endpoint.createExchange(ExchangePattern.InOnly) + exchange.getIn.setBody("Martin") + exchange.getIn.setHeader("k1", "v1") + actorAsyncProducer(endpoint).process(exchange, expectSyncCompletion) + assert(latch.await(5000, TimeUnit.MILLISECONDS)) + val reply = (actor !! GetRetainedMessage).get.asInstanceOf[Message] + assert(reply.body === "Martin") + assert(reply.headers === Map(Message.MessageExchangeId -> exchange.getExchangeId, "k1" -> "v1")) + } + + @Test def shouldSendMessageToActorAndReceiveResponseWithProcessor = { val actor = actorOf(new Tester2 { override def response(msg: Message) = Message(super.response(msg), Map("k2" -> "v2")) }).start - val endpoint = mockEndpoint("actor:uuid:%s" format actor.uuid) + val endpoint = actorEndpoint("actor:uuid:%s" format actor.uuid) val exchange = endpoint.createExchange(ExchangePattern.InOut) exchange.getIn.setBody("Martin") exchange.getIn.setHeader("k1", "v1") - endpoint.createProducer.process(exchange) + actorProducer(endpoint).process(exchange) assert(exchange.getOut.getBody === "Hello Martin") assert(exchange.getOut.getHeader("k2") === "v2") } - @Test def shouldSendMessageToActorAndReceiveFailure = { + @Test def shouldSendMessageToActorAndReceiveResponseWithAsyncProcessor = { val actor = actorOf(new Tester2 { - override def response(msg: Message) = Failure(new Exception("testmsg"), Map("k3" -> "v3")) + override def response(msg: Message) = Message(super.response(msg), Map("k2" -> "v2")) }).start - val endpoint = mockEndpoint("actor:uuid:%s" format actor.uuid) + val completion = expectAsyncCompletion + val endpoint = actorEndpoint("actor:uuid:%s" format actor.uuid) val exchange = endpoint.createExchange(ExchangePattern.InOut) exchange.getIn.setBody("Martin") exchange.getIn.setHeader("k1", "v1") - endpoint.createProducer.process(exchange) + actorAsyncProducer(endpoint).process(exchange, completion) + assert(completion.latch.await(5000, TimeUnit.MILLISECONDS)) + assert(exchange.getOut.getBody === "Hello Martin") + assert(exchange.getOut.getHeader("k2") === "v2") + } + + @Test def shouldSendMessageToActorAndReceiveFailureWithAsyncProcessor = { + val actor = actorOf(new Tester2 { + override def response(msg: Message) = Failure(new Exception("testmsg"), Map("k3" -> "v3")) + }).start + val completion = expectAsyncCompletion + val endpoint = actorEndpoint("actor:uuid:%s" format actor.uuid) + val exchange = endpoint.createExchange(ExchangePattern.InOut) + exchange.getIn.setBody("Martin") + exchange.getIn.setHeader("k1", "v1") + actorAsyncProducer(endpoint).process(exchange, completion) + assert(completion.latch.await(5000, TimeUnit.MILLISECONDS)) assert(exchange.getException.getMessage === "testmsg") assert(exchange.getOut.getBody === null) assert(exchange.getOut.getHeader("k3") === null) // headers from failure message are currently ignored @@ -60,7 +94,7 @@ class ActorProducerTest extends JUnitSuite with BeforeAndAfterAll { @Test def shouldSendMessageToActorAndTimeout(): Unit = { val actor = actorOf[Tester3].start - val endpoint = mockEndpoint("actor:uuid:%s" format actor.uuid) + val endpoint = actorEndpoint("actor:uuid:%s" format actor.uuid) val exchange = endpoint.createExchange(ExchangePattern.InOut) exchange.getIn.setBody("Martin") intercept[TimeoutException] { @@ -68,3 +102,18 @@ class ActorProducerTest extends JUnitSuite with BeforeAndAfterAll { } } } + +object ActorProducerTest { + def expectSyncCompletion = new AsyncCallback { + def done(doneSync: Boolean) = assert(doneSync) + } + + def expectAsyncCompletion = new AsyncCallback { + val latch = new CountDownLatch(1); + def done(doneSync: Boolean) = { + assert(!doneSync) + latch.countDown + } + } + +} diff --git a/akka-camel/src/test/scala/component/ActiveObjectComponentFeatureTest.scala b/akka-camel/src/test/scala/component/TypedActorComponentFeatureTest.scala similarity index 69% rename from akka-camel/src/test/scala/component/ActiveObjectComponentFeatureTest.scala rename to akka-camel/src/test/scala/component/TypedActorComponentFeatureTest.scala index d80eedfd7a..12ec9f32ab 100644 --- a/akka-camel/src/test/scala/component/ActiveObjectComponentFeatureTest.scala +++ b/akka-camel/src/test/scala/component/TypedActorComponentFeatureTest.scala @@ -4,7 +4,7 @@ import org.scalatest.{BeforeAndAfterEach, BeforeAndAfterAll, FeatureSpec} import org.apache.camel.builder.RouteBuilder import se.scalablesolutions.akka.actor.Actor._ -import se.scalablesolutions.akka.actor.{ActorRegistry, ActiveObject} +import se.scalablesolutions.akka.actor.{ActorRegistry, TypedActor} import se.scalablesolutions.akka.camel._ import org.apache.camel.impl.{DefaultCamelContext, SimpleRegistry} import org.apache.camel.{ResolveEndpointFailedException, ExchangePattern, Exchange, Processor} @@ -12,14 +12,14 @@ import org.apache.camel.{ResolveEndpointFailedException, ExchangePattern, Exchan /** * @author Martin Krasser */ -class ActiveObjectComponentFeatureTest extends FeatureSpec with BeforeAndAfterAll with BeforeAndAfterEach { - import ActiveObjectComponentFeatureTest._ +class TypedActorComponentFeatureTest extends FeatureSpec with BeforeAndAfterAll with BeforeAndAfterEach { + import TypedActorComponentFeatureTest._ import CamelContextManager.template override protected def beforeAll = { - val activePojo = ActiveObject.newInstance(classOf[Pojo]) // not a consumer - val activePojoBase = ActiveObject.newInstance(classOf[PojoBase]) - val activePojoIntf = ActiveObject.newInstance(classOf[PojoIntf], new PojoImpl) + val activePojo = TypedActor.newInstance(classOf[PojoNonConsumerIntf], classOf[PojoNonConsumer]) // not a consumer + val activePojoBase = TypedActor.newInstance(classOf[PojoBaseIntf], classOf[PojoBase]) + val activePojoIntf = TypedActor.newInstance(classOf[PojoIntf], classOf[PojoImpl]) val registry = new SimpleRegistry registry.put("pojo", activePojo) @@ -28,8 +28,8 @@ class ActiveObjectComponentFeatureTest extends FeatureSpec with BeforeAndAfterAl CamelContextManager.context.addRoutes(new CustomRouteBuilder) CamelContextManager.start - CamelContextManager.activeObjectRegistry.put("base", activePojoBase) - CamelContextManager.activeObjectRegistry.put("intf", activePojoIntf) + CamelContextManager.typedActorRegistry.put("base", activePojoBase) + CamelContextManager.typedActorRegistry.put("intf", activePojoIntf) } override protected def afterAll = { @@ -37,8 +37,8 @@ class ActiveObjectComponentFeatureTest extends FeatureSpec with BeforeAndAfterAl ActorRegistry.shutdownAll } - feature("Communicate with an active object from a Camel application using active object endpoint URIs") { - import ActiveObjectComponent.InternalSchema + feature("Communicate with an typed actor from a Camel application using typed actor endpoint URIs") { + import TypedActorComponent.InternalSchema import ExchangePattern._ scenario("in-out exchange with proxy created from interface and method returning String") { @@ -81,25 +81,25 @@ class ActiveObjectComponentFeatureTest extends FeatureSpec with BeforeAndAfterAl } } - feature("Communicate with an active object from a Camel application from a custom Camel route") { + feature("Communicate with an typed actor from a Camel application from a custom Camel route") { - scenario("in-out exchange with externally registered active object") { + scenario("in-out exchange with externally registered typed actor") { val result = template.requestBody("direct:test", "test") assert(result === "foo: test") } - scenario("in-out exchange with internally registered active object not possible") { + scenario("in-out exchange with internally registered typed actor not possible") { intercept[ResolveEndpointFailedException] { - template.requestBodyAndHeader("active-object:intf?method=m2", "x", "test", "y") + template.requestBodyAndHeader("typed-actor:intf?method=m2", "x", "test", "y") } } } } -object ActiveObjectComponentFeatureTest { +object TypedActorComponentFeatureTest { class CustomRouteBuilder extends RouteBuilder { def configure = { - from("direct:test").to("active-object:pojo?method=foo") + from("direct:test").to("typed-actor:pojo?method=foo") } } } diff --git a/akka-core/src/main/java/se/scalablesolutions/akka/annotation/configuration.java b/akka-core/src/main/java/se/scalablesolutions/akka/annotation/configuration.java deleted file mode 100644 index 9c5375398b..0000000000 --- a/akka-core/src/main/java/se/scalablesolutions/akka/annotation/configuration.java +++ /dev/null @@ -1,11 +0,0 @@ -/** - * Copyright (C) 2009-2010 Scalable Solutions AB - */ - -package se.scalablesolutions.akka.actor.annotation; - -import java.lang.annotation.*; - -@Retention(RetentionPolicy.RUNTIME) -@Target(ElementType.TYPE) -public @interface configuration {} diff --git a/akka-core/src/main/java/se/scalablesolutions/akka/annotation/inittransactionalstate.java b/akka-core/src/main/java/se/scalablesolutions/akka/annotation/inittransactionalstate.java deleted file mode 100644 index 35c5f05afe..0000000000 --- a/akka-core/src/main/java/se/scalablesolutions/akka/annotation/inittransactionalstate.java +++ /dev/null @@ -1,11 +0,0 @@ -/** - * Copyright (C) 2009-2010 Scalable Solutions AB - */ - -package se.scalablesolutions.akka.actor.annotation; - -import java.lang.annotation.*; - -@Retention(RetentionPolicy.RUNTIME) -@Target(ElementType.METHOD) -public @interface inittransactionalstate {} diff --git a/akka-core/src/main/java/se/scalablesolutions/akka/annotation/postrestart.java b/akka-core/src/main/java/se/scalablesolutions/akka/annotation/postrestart.java deleted file mode 100644 index 5eed474832..0000000000 --- a/akka-core/src/main/java/se/scalablesolutions/akka/annotation/postrestart.java +++ /dev/null @@ -1,11 +0,0 @@ -/** - * Copyright (C) 2009-2010 Scalable Solutions AB - */ - -package se.scalablesolutions.akka.actor.annotation; - -import java.lang.annotation.*; - -@Retention(RetentionPolicy.RUNTIME) -@Target(ElementType.METHOD) -public @interface postrestart {} diff --git a/akka-core/src/main/java/se/scalablesolutions/akka/annotation/prerestart.java b/akka-core/src/main/java/se/scalablesolutions/akka/annotation/prerestart.java deleted file mode 100644 index 94f9a01405..0000000000 --- a/akka-core/src/main/java/se/scalablesolutions/akka/annotation/prerestart.java +++ /dev/null @@ -1,11 +0,0 @@ -/** - * Copyright (C) 2009-2010 Scalable Solutions AB - */ - -package se.scalablesolutions.akka.actor.annotation; - -import java.lang.annotation.*; - -@Retention(RetentionPolicy.RUNTIME) -@Target(ElementType.METHOD) -public @interface prerestart {} diff --git a/akka-core/src/main/java/se/scalablesolutions/akka/annotation/shutdown.java b/akka-core/src/main/java/se/scalablesolutions/akka/annotation/shutdown.java deleted file mode 100644 index f806e7bca6..0000000000 --- a/akka-core/src/main/java/se/scalablesolutions/akka/annotation/shutdown.java +++ /dev/null @@ -1,14 +0,0 @@ -/** - * Copyright (C) 2009-2010 Scalable Solutions AB - */ - -package se.scalablesolutions.akka.actor.annotation; - -import java.lang.annotation.ElementType; -import java.lang.annotation.Retention; -import java.lang.annotation.RetentionPolicy; -import java.lang.annotation.Target; - -@Retention(RetentionPolicy.RUNTIME) -@Target(ElementType.METHOD) -public @interface shutdown {} \ No newline at end of file diff --git a/akka-core/src/main/java/se/scalablesolutions/akka/annotation/state.java b/akka-core/src/main/java/se/scalablesolutions/akka/annotation/state.java deleted file mode 100644 index 509d129c1b..0000000000 --- a/akka-core/src/main/java/se/scalablesolutions/akka/annotation/state.java +++ /dev/null @@ -1,11 +0,0 @@ -/** - * Copyright (C) 2009-2010 Scalable Solutions AB - */ - -package se.scalablesolutions.akka.actor.annotation; - -import java.lang.annotation.*; - -@Retention(RetentionPolicy.RUNTIME) -@Target(ElementType.FIELD) -public @interface state {} diff --git a/akka-core/src/main/java/se/scalablesolutions/akka/annotation/transactionrequired.java b/akka-core/src/main/java/se/scalablesolutions/akka/annotation/transactionrequired.java deleted file mode 100644 index c41a09ee46..0000000000 --- a/akka-core/src/main/java/se/scalablesolutions/akka/annotation/transactionrequired.java +++ /dev/null @@ -1,11 +0,0 @@ -/** - * Copyright (C) 2009-2010 Scalable Solutions AB - */ - -package se.scalablesolutions.akka.actor.annotation; - -import java.lang.annotation.*; - -@Retention(RetentionPolicy.RUNTIME) -@Target(ElementType.TYPE) -public @interface transactionrequired {} diff --git a/akka-core/src/main/java/se/scalablesolutions/akka/config/ActiveObjectGuiceModule.java b/akka-core/src/main/java/se/scalablesolutions/akka/config/TypedActorGuiceModule.java similarity index 85% rename from akka-core/src/main/java/se/scalablesolutions/akka/config/ActiveObjectGuiceModule.java rename to akka-core/src/main/java/se/scalablesolutions/akka/config/TypedActorGuiceModule.java index 60dfd4cadd..0c2ed11402 100644 --- a/akka-core/src/main/java/se/scalablesolutions/akka/config/ActiveObjectGuiceModule.java +++ b/akka-core/src/main/java/se/scalablesolutions/akka/config/TypedActorGuiceModule.java @@ -13,10 +13,10 @@ import com.google.inject.Singleton; /** * @author Jonas Bonér */ -public class ActiveObjectGuiceModule extends AbstractModule { +public class TypedActorGuiceModule extends AbstractModule { private final List bindings; - public ActiveObjectGuiceModule(final List bindings) { + public TypedActorGuiceModule(final List bindings) { this.bindings = bindings; } diff --git a/akka-core/src/main/java/se/scalablesolutions/akka/remote/protocol/RemoteProtocol.java b/akka-core/src/main/java/se/scalablesolutions/akka/remote/protocol/RemoteProtocol.java index 648f53842b..8babc16770 100644 --- a/akka-core/src/main/java/se/scalablesolutions/akka/remote/protocol/RemoteProtocol.java +++ b/akka-core/src/main/java/se/scalablesolutions/akka/remote/protocol/RemoteProtocol.java @@ -8,6 +8,75 @@ public final class RemoteProtocol { public static void registerAllExtensions( com.google.protobuf.ExtensionRegistry registry) { } + public enum ActorType + implements com.google.protobuf.ProtocolMessageEnum { + SCALA_ACTOR(0, 1), + JAVA_ACTOR(1, 2), + TYPED_ACTOR(2, 3), + ; + + + public final int getNumber() { return value; } + + public static ActorType valueOf(int value) { + switch (value) { + case 1: return SCALA_ACTOR; + case 2: return JAVA_ACTOR; + case 3: return TYPED_ACTOR; + default: return null; + } + } + + public static com.google.protobuf.Internal.EnumLiteMap + internalGetValueMap() { + return internalValueMap; + } + private static com.google.protobuf.Internal.EnumLiteMap + internalValueMap = + new com.google.protobuf.Internal.EnumLiteMap() { + public ActorType findValueByNumber(int number) { + return ActorType.valueOf(number) + ; } + }; + + public final com.google.protobuf.Descriptors.EnumValueDescriptor + getValueDescriptor() { + return getDescriptor().getValues().get(index); + } + public final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptorForType() { + return getDescriptor(); + } + public static final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptor() { + return se.scalablesolutions.akka.remote.protocol.RemoteProtocol.getDescriptor().getEnumTypes().get(0); + } + + private static final ActorType[] VALUES = { + SCALA_ACTOR, JAVA_ACTOR, TYPED_ACTOR, + }; + public static ActorType valueOf( + com.google.protobuf.Descriptors.EnumValueDescriptor desc) { + if (desc.getType() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "EnumValueDescriptor is not for this type."); + } + return VALUES[desc.getIndex()]; + } + private final int index; + private final int value; + private ActorType(int index, int value) { + this.index = index; + this.value = value; + } + + static { + se.scalablesolutions.akka.remote.protocol.RemoteProtocol.getDescriptor(); + } + + // @@protoc_insertion_point(enum_scope:ActorType) + } + public enum SerializationSchemeType implements com.google.protobuf.ProtocolMessageEnum { JAVA(0, 1), @@ -53,7 +122,7 @@ public final class RemoteProtocol { } public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() { - return se.scalablesolutions.akka.remote.protocol.RemoteProtocol.getDescriptor().getEnumTypes().get(0); + return se.scalablesolutions.akka.remote.protocol.RemoteProtocol.getDescriptor().getEnumTypes().get(1); } private static final SerializationSchemeType[] VALUES = { @@ -120,7 +189,7 @@ public final class RemoteProtocol { } public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() { - return se.scalablesolutions.akka.remote.protocol.RemoteProtocol.getDescriptor().getEnumTypes().get(1); + return se.scalablesolutions.akka.remote.protocol.RemoteProtocol.getDescriptor().getEnumTypes().get(2); } private static final LifeCycleType[] VALUES = { @@ -667,29 +736,36 @@ public final class RemoteProtocol { public boolean hasTimeout() { return hasTimeout; } public long getTimeout() { return timeout_; } - // optional .LifeCycleProtocol lifeCycle = 9; - public static final int LIFECYCLE_FIELD_NUMBER = 9; + // optional uint64 receiveTimeout = 9; + public static final int RECEIVETIMEOUT_FIELD_NUMBER = 9; + private boolean hasReceiveTimeout; + private long receiveTimeout_ = 0L; + public boolean hasReceiveTimeout() { return hasReceiveTimeout; } + public long getReceiveTimeout() { return receiveTimeout_; } + + // optional .LifeCycleProtocol lifeCycle = 10; + public static final int LIFECYCLE_FIELD_NUMBER = 10; private boolean hasLifeCycle; private se.scalablesolutions.akka.remote.protocol.RemoteProtocol.LifeCycleProtocol lifeCycle_; public boolean hasLifeCycle() { return hasLifeCycle; } public se.scalablesolutions.akka.remote.protocol.RemoteProtocol.LifeCycleProtocol getLifeCycle() { return lifeCycle_; } - // optional .RemoteActorRefProtocol supervisor = 10; - public static final int SUPERVISOR_FIELD_NUMBER = 10; + // optional .RemoteActorRefProtocol supervisor = 11; + public static final int SUPERVISOR_FIELD_NUMBER = 11; private boolean hasSupervisor; private se.scalablesolutions.akka.remote.protocol.RemoteProtocol.RemoteActorRefProtocol supervisor_; public boolean hasSupervisor() { return hasSupervisor; } public se.scalablesolutions.akka.remote.protocol.RemoteProtocol.RemoteActorRefProtocol getSupervisor() { return supervisor_; } - // optional bytes hotswapStack = 11; - public static final int HOTSWAPSTACK_FIELD_NUMBER = 11; + // optional bytes hotswapStack = 12; + public static final int HOTSWAPSTACK_FIELD_NUMBER = 12; private boolean hasHotswapStack; private com.google.protobuf.ByteString hotswapStack_ = com.google.protobuf.ByteString.EMPTY; public boolean hasHotswapStack() { return hasHotswapStack; } public com.google.protobuf.ByteString getHotswapStack() { return hotswapStack_; } - // repeated .RemoteRequestProtocol messages = 12; - public static final int MESSAGES_FIELD_NUMBER = 12; + // repeated .RemoteRequestProtocol messages = 13; + public static final int MESSAGES_FIELD_NUMBER = 13; private java.util.List messages_ = java.util.Collections.emptyList(); public java.util.List getMessagesList() { @@ -750,17 +826,20 @@ public final class RemoteProtocol { if (hasTimeout()) { output.writeUInt64(8, getTimeout()); } + if (hasReceiveTimeout()) { + output.writeUInt64(9, getReceiveTimeout()); + } if (hasLifeCycle()) { - output.writeMessage(9, getLifeCycle()); + output.writeMessage(10, getLifeCycle()); } if (hasSupervisor()) { - output.writeMessage(10, getSupervisor()); + output.writeMessage(11, getSupervisor()); } if (hasHotswapStack()) { - output.writeBytes(11, getHotswapStack()); + output.writeBytes(12, getHotswapStack()); } for (se.scalablesolutions.akka.remote.protocol.RemoteProtocol.RemoteRequestProtocol element : getMessagesList()) { - output.writeMessage(12, element); + output.writeMessage(13, element); } getUnknownFields().writeTo(output); } @@ -803,21 +882,25 @@ public final class RemoteProtocol { size += com.google.protobuf.CodedOutputStream .computeUInt64Size(8, getTimeout()); } + if (hasReceiveTimeout()) { + size += com.google.protobuf.CodedOutputStream + .computeUInt64Size(9, getReceiveTimeout()); + } if (hasLifeCycle()) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(9, getLifeCycle()); + .computeMessageSize(10, getLifeCycle()); } if (hasSupervisor()) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(10, getSupervisor()); + .computeMessageSize(11, getSupervisor()); } if (hasHotswapStack()) { size += com.google.protobuf.CodedOutputStream - .computeBytesSize(11, getHotswapStack()); + .computeBytesSize(12, getHotswapStack()); } for (se.scalablesolutions.akka.remote.protocol.RemoteProtocol.RemoteRequestProtocol element : getMessagesList()) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(12, element); + .computeMessageSize(13, element); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; @@ -1005,6 +1088,9 @@ public final class RemoteProtocol { if (other.hasTimeout()) { setTimeout(other.getTimeout()); } + if (other.hasReceiveTimeout()) { + setReceiveTimeout(other.getReceiveTimeout()); + } if (other.hasLifeCycle()) { mergeLifeCycle(other.getLifeCycle()); } @@ -1082,7 +1168,11 @@ public final class RemoteProtocol { setTimeout(input.readUInt64()); break; } - case 74: { + case 72: { + setReceiveTimeout(input.readUInt64()); + break; + } + case 82: { se.scalablesolutions.akka.remote.protocol.RemoteProtocol.LifeCycleProtocol.Builder subBuilder = se.scalablesolutions.akka.remote.protocol.RemoteProtocol.LifeCycleProtocol.newBuilder(); if (hasLifeCycle()) { subBuilder.mergeFrom(getLifeCycle()); @@ -1091,7 +1181,7 @@ public final class RemoteProtocol { setLifeCycle(subBuilder.buildPartial()); break; } - case 82: { + case 90: { se.scalablesolutions.akka.remote.protocol.RemoteProtocol.RemoteActorRefProtocol.Builder subBuilder = se.scalablesolutions.akka.remote.protocol.RemoteProtocol.RemoteActorRefProtocol.newBuilder(); if (hasSupervisor()) { subBuilder.mergeFrom(getSupervisor()); @@ -1100,11 +1190,11 @@ public final class RemoteProtocol { setSupervisor(subBuilder.buildPartial()); break; } - case 90: { + case 98: { setHotswapStack(input.readBytes()); break; } - case 98: { + case 106: { se.scalablesolutions.akka.remote.protocol.RemoteProtocol.RemoteRequestProtocol.Builder subBuilder = se.scalablesolutions.akka.remote.protocol.RemoteProtocol.RemoteRequestProtocol.newBuilder(); input.readMessage(subBuilder, extensionRegistry); addMessages(subBuilder.buildPartial()); @@ -1293,7 +1383,25 @@ public final class RemoteProtocol { return this; } - // optional .LifeCycleProtocol lifeCycle = 9; + // optional uint64 receiveTimeout = 9; + public boolean hasReceiveTimeout() { + return result.hasReceiveTimeout(); + } + public long getReceiveTimeout() { + return result.getReceiveTimeout(); + } + public Builder setReceiveTimeout(long value) { + result.hasReceiveTimeout = true; + result.receiveTimeout_ = value; + return this; + } + public Builder clearReceiveTimeout() { + result.hasReceiveTimeout = false; + result.receiveTimeout_ = 0L; + return this; + } + + // optional .LifeCycleProtocol lifeCycle = 10; public boolean hasLifeCycle() { return result.hasLifeCycle(); } @@ -1330,7 +1438,7 @@ public final class RemoteProtocol { return this; } - // optional .RemoteActorRefProtocol supervisor = 10; + // optional .RemoteActorRefProtocol supervisor = 11; public boolean hasSupervisor() { return result.hasSupervisor(); } @@ -1367,7 +1475,7 @@ public final class RemoteProtocol { return this; } - // optional bytes hotswapStack = 11; + // optional bytes hotswapStack = 12; public boolean hasHotswapStack() { return result.hasHotswapStack(); } @@ -1388,7 +1496,7 @@ public final class RemoteProtocol { return this; } - // repeated .RemoteRequestProtocol messages = 12; + // repeated .RemoteRequestProtocol messages = 13; public java.util.List getMessagesList() { return java.util.Collections.unmodifiableList(result.messages_); } @@ -1831,6 +1939,825 @@ public final class RemoteProtocol { // @@protoc_insertion_point(class_scope:MessageProtocol) } + public static final class ActorInfoProtocol extends + com.google.protobuf.GeneratedMessage { + // Use ActorInfoProtocol.newBuilder() to construct. + private ActorInfoProtocol() { + initFields(); + } + private ActorInfoProtocol(boolean noInit) {} + + private static final ActorInfoProtocol defaultInstance; + public static ActorInfoProtocol getDefaultInstance() { + return defaultInstance; + } + + public ActorInfoProtocol getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return se.scalablesolutions.akka.remote.protocol.RemoteProtocol.internal_static_ActorInfoProtocol_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return se.scalablesolutions.akka.remote.protocol.RemoteProtocol.internal_static_ActorInfoProtocol_fieldAccessorTable; + } + + // required string uuid = 1; + public static final int UUID_FIELD_NUMBER = 1; + private boolean hasUuid; + private java.lang.String uuid_ = ""; + public boolean hasUuid() { return hasUuid; } + public java.lang.String getUuid() { return uuid_; } + + // required string target = 2; + public static final int TARGET_FIELD_NUMBER = 2; + private boolean hasTarget; + private java.lang.String target_ = ""; + public boolean hasTarget() { return hasTarget; } + public java.lang.String getTarget() { return target_; } + + // required uint64 timeout = 3; + public static final int TIMEOUT_FIELD_NUMBER = 3; + private boolean hasTimeout; + private long timeout_ = 0L; + public boolean hasTimeout() { return hasTimeout; } + public long getTimeout() { return timeout_; } + + // required .ActorType actorType = 4; + public static final int ACTORTYPE_FIELD_NUMBER = 4; + private boolean hasActorType; + private se.scalablesolutions.akka.remote.protocol.RemoteProtocol.ActorType actorType_; + public boolean hasActorType() { return hasActorType; } + public se.scalablesolutions.akka.remote.protocol.RemoteProtocol.ActorType getActorType() { return actorType_; } + + // optional .TypedActorInfoProtocol typedActorInfo = 5; + public static final int TYPEDACTORINFO_FIELD_NUMBER = 5; + private boolean hasTypedActorInfo; + private se.scalablesolutions.akka.remote.protocol.RemoteProtocol.TypedActorInfoProtocol typedActorInfo_; + public boolean hasTypedActorInfo() { return hasTypedActorInfo; } + public se.scalablesolutions.akka.remote.protocol.RemoteProtocol.TypedActorInfoProtocol getTypedActorInfo() { return typedActorInfo_; } + + private void initFields() { + actorType_ = se.scalablesolutions.akka.remote.protocol.RemoteProtocol.ActorType.SCALA_ACTOR; + typedActorInfo_ = se.scalablesolutions.akka.remote.protocol.RemoteProtocol.TypedActorInfoProtocol.getDefaultInstance(); + } + public final boolean isInitialized() { + if (!hasUuid) return false; + if (!hasTarget) return false; + if (!hasTimeout) return false; + if (!hasActorType) return false; + if (hasTypedActorInfo()) { + if (!getTypedActorInfo().isInitialized()) return false; + } + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (hasUuid()) { + output.writeString(1, getUuid()); + } + if (hasTarget()) { + output.writeString(2, getTarget()); + } + if (hasTimeout()) { + output.writeUInt64(3, getTimeout()); + } + if (hasActorType()) { + output.writeEnum(4, getActorType().getNumber()); + } + if (hasTypedActorInfo()) { + output.writeMessage(5, getTypedActorInfo()); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (hasUuid()) { + size += com.google.protobuf.CodedOutputStream + .computeStringSize(1, getUuid()); + } + if (hasTarget()) { + size += com.google.protobuf.CodedOutputStream + .computeStringSize(2, getTarget()); + } + if (hasTimeout()) { + size += com.google.protobuf.CodedOutputStream + .computeUInt64Size(3, getTimeout()); + } + if (hasActorType()) { + size += com.google.protobuf.CodedOutputStream + .computeEnumSize(4, getActorType().getNumber()); + } + if (hasTypedActorInfo()) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(5, getTypedActorInfo()); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + public static se.scalablesolutions.akka.remote.protocol.RemoteProtocol.ActorInfoProtocol parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static se.scalablesolutions.akka.remote.protocol.RemoteProtocol.ActorInfoProtocol parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static se.scalablesolutions.akka.remote.protocol.RemoteProtocol.ActorInfoProtocol parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static se.scalablesolutions.akka.remote.protocol.RemoteProtocol.ActorInfoProtocol parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static se.scalablesolutions.akka.remote.protocol.RemoteProtocol.ActorInfoProtocol parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static se.scalablesolutions.akka.remote.protocol.RemoteProtocol.ActorInfoProtocol parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static se.scalablesolutions.akka.remote.protocol.RemoteProtocol.ActorInfoProtocol parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static se.scalablesolutions.akka.remote.protocol.RemoteProtocol.ActorInfoProtocol parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static se.scalablesolutions.akka.remote.protocol.RemoteProtocol.ActorInfoProtocol parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static se.scalablesolutions.akka.remote.protocol.RemoteProtocol.ActorInfoProtocol parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(se.scalablesolutions.akka.remote.protocol.RemoteProtocol.ActorInfoProtocol prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder { + private se.scalablesolutions.akka.remote.protocol.RemoteProtocol.ActorInfoProtocol result; + + // Construct using se.scalablesolutions.akka.remote.protocol.RemoteProtocol.ActorInfoProtocol.newBuilder() + private Builder() {} + + private static Builder create() { + Builder builder = new Builder(); + builder.result = new se.scalablesolutions.akka.remote.protocol.RemoteProtocol.ActorInfoProtocol(); + return builder; + } + + protected se.scalablesolutions.akka.remote.protocol.RemoteProtocol.ActorInfoProtocol internalGetResult() { + return result; + } + + public Builder clear() { + if (result == null) { + throw new IllegalStateException( + "Cannot call clear() after build()."); + } + result = new se.scalablesolutions.akka.remote.protocol.RemoteProtocol.ActorInfoProtocol(); + return this; + } + + public Builder clone() { + return create().mergeFrom(result); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return se.scalablesolutions.akka.remote.protocol.RemoteProtocol.ActorInfoProtocol.getDescriptor(); + } + + public se.scalablesolutions.akka.remote.protocol.RemoteProtocol.ActorInfoProtocol getDefaultInstanceForType() { + return se.scalablesolutions.akka.remote.protocol.RemoteProtocol.ActorInfoProtocol.getDefaultInstance(); + } + + public boolean isInitialized() { + return result.isInitialized(); + } + public se.scalablesolutions.akka.remote.protocol.RemoteProtocol.ActorInfoProtocol build() { + if (result != null && !isInitialized()) { + throw newUninitializedMessageException(result); + } + return buildPartial(); + } + + private se.scalablesolutions.akka.remote.protocol.RemoteProtocol.ActorInfoProtocol buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + if (!isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return buildPartial(); + } + + public se.scalablesolutions.akka.remote.protocol.RemoteProtocol.ActorInfoProtocol buildPartial() { + if (result == null) { + throw new IllegalStateException( + "build() has already been called on this Builder."); + } + se.scalablesolutions.akka.remote.protocol.RemoteProtocol.ActorInfoProtocol returnMe = result; + result = null; + return returnMe; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof se.scalablesolutions.akka.remote.protocol.RemoteProtocol.ActorInfoProtocol) { + return mergeFrom((se.scalablesolutions.akka.remote.protocol.RemoteProtocol.ActorInfoProtocol)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(se.scalablesolutions.akka.remote.protocol.RemoteProtocol.ActorInfoProtocol other) { + if (other == se.scalablesolutions.akka.remote.protocol.RemoteProtocol.ActorInfoProtocol.getDefaultInstance()) return this; + if (other.hasUuid()) { + setUuid(other.getUuid()); + } + if (other.hasTarget()) { + setTarget(other.getTarget()); + } + if (other.hasTimeout()) { + setTimeout(other.getTimeout()); + } + if (other.hasActorType()) { + setActorType(other.getActorType()); + } + if (other.hasTypedActorInfo()) { + mergeTypedActorInfo(other.getTypedActorInfo()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + return this; + } + break; + } + case 10: { + setUuid(input.readString()); + break; + } + case 18: { + setTarget(input.readString()); + break; + } + case 24: { + setTimeout(input.readUInt64()); + break; + } + case 32: { + int rawValue = input.readEnum(); + se.scalablesolutions.akka.remote.protocol.RemoteProtocol.ActorType value = se.scalablesolutions.akka.remote.protocol.RemoteProtocol.ActorType.valueOf(rawValue); + if (value == null) { + unknownFields.mergeVarintField(4, rawValue); + } else { + setActorType(value); + } + break; + } + case 42: { + se.scalablesolutions.akka.remote.protocol.RemoteProtocol.TypedActorInfoProtocol.Builder subBuilder = se.scalablesolutions.akka.remote.protocol.RemoteProtocol.TypedActorInfoProtocol.newBuilder(); + if (hasTypedActorInfo()) { + subBuilder.mergeFrom(getTypedActorInfo()); + } + input.readMessage(subBuilder, extensionRegistry); + setTypedActorInfo(subBuilder.buildPartial()); + break; + } + } + } + } + + + // required string uuid = 1; + public boolean hasUuid() { + return result.hasUuid(); + } + public java.lang.String getUuid() { + return result.getUuid(); + } + public Builder setUuid(java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + result.hasUuid = true; + result.uuid_ = value; + return this; + } + public Builder clearUuid() { + result.hasUuid = false; + result.uuid_ = getDefaultInstance().getUuid(); + return this; + } + + // required string target = 2; + public boolean hasTarget() { + return result.hasTarget(); + } + public java.lang.String getTarget() { + return result.getTarget(); + } + public Builder setTarget(java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + result.hasTarget = true; + result.target_ = value; + return this; + } + public Builder clearTarget() { + result.hasTarget = false; + result.target_ = getDefaultInstance().getTarget(); + return this; + } + + // required uint64 timeout = 3; + public boolean hasTimeout() { + return result.hasTimeout(); + } + public long getTimeout() { + return result.getTimeout(); + } + public Builder setTimeout(long value) { + result.hasTimeout = true; + result.timeout_ = value; + return this; + } + public Builder clearTimeout() { + result.hasTimeout = false; + result.timeout_ = 0L; + return this; + } + + // required .ActorType actorType = 4; + public boolean hasActorType() { + return result.hasActorType(); + } + public se.scalablesolutions.akka.remote.protocol.RemoteProtocol.ActorType getActorType() { + return result.getActorType(); + } + public Builder setActorType(se.scalablesolutions.akka.remote.protocol.RemoteProtocol.ActorType value) { + if (value == null) { + throw new NullPointerException(); + } + result.hasActorType = true; + result.actorType_ = value; + return this; + } + public Builder clearActorType() { + result.hasActorType = false; + result.actorType_ = se.scalablesolutions.akka.remote.protocol.RemoteProtocol.ActorType.SCALA_ACTOR; + return this; + } + + // optional .TypedActorInfoProtocol typedActorInfo = 5; + public boolean hasTypedActorInfo() { + return result.hasTypedActorInfo(); + } + public se.scalablesolutions.akka.remote.protocol.RemoteProtocol.TypedActorInfoProtocol getTypedActorInfo() { + return result.getTypedActorInfo(); + } + public Builder setTypedActorInfo(se.scalablesolutions.akka.remote.protocol.RemoteProtocol.TypedActorInfoProtocol value) { + if (value == null) { + throw new NullPointerException(); + } + result.hasTypedActorInfo = true; + result.typedActorInfo_ = value; + return this; + } + public Builder setTypedActorInfo(se.scalablesolutions.akka.remote.protocol.RemoteProtocol.TypedActorInfoProtocol.Builder builderForValue) { + result.hasTypedActorInfo = true; + result.typedActorInfo_ = builderForValue.build(); + return this; + } + public Builder mergeTypedActorInfo(se.scalablesolutions.akka.remote.protocol.RemoteProtocol.TypedActorInfoProtocol value) { + if (result.hasTypedActorInfo() && + result.typedActorInfo_ != se.scalablesolutions.akka.remote.protocol.RemoteProtocol.TypedActorInfoProtocol.getDefaultInstance()) { + result.typedActorInfo_ = + se.scalablesolutions.akka.remote.protocol.RemoteProtocol.TypedActorInfoProtocol.newBuilder(result.typedActorInfo_).mergeFrom(value).buildPartial(); + } else { + result.typedActorInfo_ = value; + } + result.hasTypedActorInfo = true; + return this; + } + public Builder clearTypedActorInfo() { + result.hasTypedActorInfo = false; + result.typedActorInfo_ = se.scalablesolutions.akka.remote.protocol.RemoteProtocol.TypedActorInfoProtocol.getDefaultInstance(); + return this; + } + + // @@protoc_insertion_point(builder_scope:ActorInfoProtocol) + } + + static { + defaultInstance = new ActorInfoProtocol(true); + se.scalablesolutions.akka.remote.protocol.RemoteProtocol.internalForceInit(); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:ActorInfoProtocol) + } + + public static final class TypedActorInfoProtocol extends + com.google.protobuf.GeneratedMessage { + // Use TypedActorInfoProtocol.newBuilder() to construct. + private TypedActorInfoProtocol() { + initFields(); + } + private TypedActorInfoProtocol(boolean noInit) {} + + private static final TypedActorInfoProtocol defaultInstance; + public static TypedActorInfoProtocol getDefaultInstance() { + return defaultInstance; + } + + public TypedActorInfoProtocol getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return se.scalablesolutions.akka.remote.protocol.RemoteProtocol.internal_static_TypedActorInfoProtocol_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return se.scalablesolutions.akka.remote.protocol.RemoteProtocol.internal_static_TypedActorInfoProtocol_fieldAccessorTable; + } + + // required string interface = 1; + public static final int INTERFACE_FIELD_NUMBER = 1; + private boolean hasInterface; + private java.lang.String interface_ = ""; + public boolean hasInterface() { return hasInterface; } + public java.lang.String getInterface() { return interface_; } + + // required string method = 2; + public static final int METHOD_FIELD_NUMBER = 2; + private boolean hasMethod; + private java.lang.String method_ = ""; + public boolean hasMethod() { return hasMethod; } + public java.lang.String getMethod() { return method_; } + + private void initFields() { + } + public final boolean isInitialized() { + if (!hasInterface) return false; + if (!hasMethod) return false; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (hasInterface()) { + output.writeString(1, getInterface()); + } + if (hasMethod()) { + output.writeString(2, getMethod()); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (hasInterface()) { + size += com.google.protobuf.CodedOutputStream + .computeStringSize(1, getInterface()); + } + if (hasMethod()) { + size += com.google.protobuf.CodedOutputStream + .computeStringSize(2, getMethod()); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + public static se.scalablesolutions.akka.remote.protocol.RemoteProtocol.TypedActorInfoProtocol parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static se.scalablesolutions.akka.remote.protocol.RemoteProtocol.TypedActorInfoProtocol parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static se.scalablesolutions.akka.remote.protocol.RemoteProtocol.TypedActorInfoProtocol parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static se.scalablesolutions.akka.remote.protocol.RemoteProtocol.TypedActorInfoProtocol parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static se.scalablesolutions.akka.remote.protocol.RemoteProtocol.TypedActorInfoProtocol parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static se.scalablesolutions.akka.remote.protocol.RemoteProtocol.TypedActorInfoProtocol parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static se.scalablesolutions.akka.remote.protocol.RemoteProtocol.TypedActorInfoProtocol parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static se.scalablesolutions.akka.remote.protocol.RemoteProtocol.TypedActorInfoProtocol parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static se.scalablesolutions.akka.remote.protocol.RemoteProtocol.TypedActorInfoProtocol parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static se.scalablesolutions.akka.remote.protocol.RemoteProtocol.TypedActorInfoProtocol parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(se.scalablesolutions.akka.remote.protocol.RemoteProtocol.TypedActorInfoProtocol prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder { + private se.scalablesolutions.akka.remote.protocol.RemoteProtocol.TypedActorInfoProtocol result; + + // Construct using se.scalablesolutions.akka.remote.protocol.RemoteProtocol.TypedActorInfoProtocol.newBuilder() + private Builder() {} + + private static Builder create() { + Builder builder = new Builder(); + builder.result = new se.scalablesolutions.akka.remote.protocol.RemoteProtocol.TypedActorInfoProtocol(); + return builder; + } + + protected se.scalablesolutions.akka.remote.protocol.RemoteProtocol.TypedActorInfoProtocol internalGetResult() { + return result; + } + + public Builder clear() { + if (result == null) { + throw new IllegalStateException( + "Cannot call clear() after build()."); + } + result = new se.scalablesolutions.akka.remote.protocol.RemoteProtocol.TypedActorInfoProtocol(); + return this; + } + + public Builder clone() { + return create().mergeFrom(result); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return se.scalablesolutions.akka.remote.protocol.RemoteProtocol.TypedActorInfoProtocol.getDescriptor(); + } + + public se.scalablesolutions.akka.remote.protocol.RemoteProtocol.TypedActorInfoProtocol getDefaultInstanceForType() { + return se.scalablesolutions.akka.remote.protocol.RemoteProtocol.TypedActorInfoProtocol.getDefaultInstance(); + } + + public boolean isInitialized() { + return result.isInitialized(); + } + public se.scalablesolutions.akka.remote.protocol.RemoteProtocol.TypedActorInfoProtocol build() { + if (result != null && !isInitialized()) { + throw newUninitializedMessageException(result); + } + return buildPartial(); + } + + private se.scalablesolutions.akka.remote.protocol.RemoteProtocol.TypedActorInfoProtocol buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + if (!isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return buildPartial(); + } + + public se.scalablesolutions.akka.remote.protocol.RemoteProtocol.TypedActorInfoProtocol buildPartial() { + if (result == null) { + throw new IllegalStateException( + "build() has already been called on this Builder."); + } + se.scalablesolutions.akka.remote.protocol.RemoteProtocol.TypedActorInfoProtocol returnMe = result; + result = null; + return returnMe; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof se.scalablesolutions.akka.remote.protocol.RemoteProtocol.TypedActorInfoProtocol) { + return mergeFrom((se.scalablesolutions.akka.remote.protocol.RemoteProtocol.TypedActorInfoProtocol)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(se.scalablesolutions.akka.remote.protocol.RemoteProtocol.TypedActorInfoProtocol other) { + if (other == se.scalablesolutions.akka.remote.protocol.RemoteProtocol.TypedActorInfoProtocol.getDefaultInstance()) return this; + if (other.hasInterface()) { + setInterface(other.getInterface()); + } + if (other.hasMethod()) { + setMethod(other.getMethod()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + return this; + } + break; + } + case 10: { + setInterface(input.readString()); + break; + } + case 18: { + setMethod(input.readString()); + break; + } + } + } + } + + + // required string interface = 1; + public boolean hasInterface() { + return result.hasInterface(); + } + public java.lang.String getInterface() { + return result.getInterface(); + } + public Builder setInterface(java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + result.hasInterface = true; + result.interface_ = value; + return this; + } + public Builder clearInterface() { + result.hasInterface = false; + result.interface_ = getDefaultInstance().getInterface(); + return this; + } + + // required string method = 2; + public boolean hasMethod() { + return result.hasMethod(); + } + public java.lang.String getMethod() { + return result.getMethod(); + } + public Builder setMethod(java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + result.hasMethod = true; + result.method_ = value; + return this; + } + public Builder clearMethod() { + result.hasMethod = false; + result.method_ = getDefaultInstance().getMethod(); + return this; + } + + // @@protoc_insertion_point(builder_scope:TypedActorInfoProtocol) + } + + static { + defaultInstance = new TypedActorInfoProtocol(true); + se.scalablesolutions.akka.remote.protocol.RemoteProtocol.internalForceInit(); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:TypedActorInfoProtocol) + } + public static final class RemoteRequestProtocol extends com.google.protobuf.GeneratedMessage { // Use RemoteRequestProtocol.newBuilder() to construct. @@ -1872,64 +2799,29 @@ public final class RemoteProtocol { public boolean hasMessage() { return hasMessage; } public se.scalablesolutions.akka.remote.protocol.RemoteProtocol.MessageProtocol getMessage() { return message_; } - // optional string method = 3; - public static final int METHOD_FIELD_NUMBER = 3; - private boolean hasMethod; - private java.lang.String method_ = ""; - public boolean hasMethod() { return hasMethod; } - public java.lang.String getMethod() { return method_; } + // required .ActorInfoProtocol actorInfo = 3; + public static final int ACTORINFO_FIELD_NUMBER = 3; + private boolean hasActorInfo; + private se.scalablesolutions.akka.remote.protocol.RemoteProtocol.ActorInfoProtocol actorInfo_; + public boolean hasActorInfo() { return hasActorInfo; } + public se.scalablesolutions.akka.remote.protocol.RemoteProtocol.ActorInfoProtocol getActorInfo() { return actorInfo_; } - // required string target = 4; - public static final int TARGET_FIELD_NUMBER = 4; - private boolean hasTarget; - private java.lang.String target_ = ""; - public boolean hasTarget() { return hasTarget; } - public java.lang.String getTarget() { return target_; } - - // required string uuid = 5; - public static final int UUID_FIELD_NUMBER = 5; - private boolean hasUuid; - private java.lang.String uuid_ = ""; - public boolean hasUuid() { return hasUuid; } - public java.lang.String getUuid() { return uuid_; } - - // required uint64 timeout = 6; - public static final int TIMEOUT_FIELD_NUMBER = 6; - private boolean hasTimeout; - private long timeout_ = 0L; - public boolean hasTimeout() { return hasTimeout; } - public long getTimeout() { return timeout_; } - - // optional string supervisorUuid = 7; - public static final int SUPERVISORUUID_FIELD_NUMBER = 7; - private boolean hasSupervisorUuid; - private java.lang.String supervisorUuid_ = ""; - public boolean hasSupervisorUuid() { return hasSupervisorUuid; } - public java.lang.String getSupervisorUuid() { return supervisorUuid_; } - - // required bool isActor = 8; - public static final int ISACTOR_FIELD_NUMBER = 8; - private boolean hasIsActor; - private boolean isActor_ = false; - public boolean hasIsActor() { return hasIsActor; } - public boolean getIsActor() { return isActor_; } - - // required bool isOneWay = 9; - public static final int ISONEWAY_FIELD_NUMBER = 9; + // required bool isOneWay = 4; + public static final int ISONEWAY_FIELD_NUMBER = 4; private boolean hasIsOneWay; private boolean isOneWay_ = false; public boolean hasIsOneWay() { return hasIsOneWay; } public boolean getIsOneWay() { return isOneWay_; } - // required bool isEscaped = 10; - public static final int ISESCAPED_FIELD_NUMBER = 10; - private boolean hasIsEscaped; - private boolean isEscaped_ = false; - public boolean hasIsEscaped() { return hasIsEscaped; } - public boolean getIsEscaped() { return isEscaped_; } + // optional string supervisorUuid = 5; + public static final int SUPERVISORUUID_FIELD_NUMBER = 5; + private boolean hasSupervisorUuid; + private java.lang.String supervisorUuid_ = ""; + public boolean hasSupervisorUuid() { return hasSupervisorUuid; } + public java.lang.String getSupervisorUuid() { return supervisorUuid_; } - // optional .RemoteActorRefProtocol sender = 11; - public static final int SENDER_FIELD_NUMBER = 11; + // optional .RemoteActorRefProtocol sender = 6; + public static final int SENDER_FIELD_NUMBER = 6; private boolean hasSender; private se.scalablesolutions.akka.remote.protocol.RemoteProtocol.RemoteActorRefProtocol sender_; public boolean hasSender() { return hasSender; } @@ -1937,18 +2829,16 @@ public final class RemoteProtocol { private void initFields() { message_ = se.scalablesolutions.akka.remote.protocol.RemoteProtocol.MessageProtocol.getDefaultInstance(); + actorInfo_ = se.scalablesolutions.akka.remote.protocol.RemoteProtocol.ActorInfoProtocol.getDefaultInstance(); sender_ = se.scalablesolutions.akka.remote.protocol.RemoteProtocol.RemoteActorRefProtocol.getDefaultInstance(); } public final boolean isInitialized() { if (!hasId) return false; if (!hasMessage) return false; - if (!hasTarget) return false; - if (!hasUuid) return false; - if (!hasTimeout) return false; - if (!hasIsActor) return false; + if (!hasActorInfo) return false; if (!hasIsOneWay) return false; - if (!hasIsEscaped) return false; if (!getMessage().isInitialized()) return false; + if (!getActorInfo().isInitialized()) return false; if (hasSender()) { if (!getSender().isInitialized()) return false; } @@ -1964,32 +2854,17 @@ public final class RemoteProtocol { if (hasMessage()) { output.writeMessage(2, getMessage()); } - if (hasMethod()) { - output.writeString(3, getMethod()); - } - if (hasTarget()) { - output.writeString(4, getTarget()); - } - if (hasUuid()) { - output.writeString(5, getUuid()); - } - if (hasTimeout()) { - output.writeUInt64(6, getTimeout()); - } - if (hasSupervisorUuid()) { - output.writeString(7, getSupervisorUuid()); - } - if (hasIsActor()) { - output.writeBool(8, getIsActor()); + if (hasActorInfo()) { + output.writeMessage(3, getActorInfo()); } if (hasIsOneWay()) { - output.writeBool(9, getIsOneWay()); + output.writeBool(4, getIsOneWay()); } - if (hasIsEscaped()) { - output.writeBool(10, getIsEscaped()); + if (hasSupervisorUuid()) { + output.writeString(5, getSupervisorUuid()); } if (hasSender()) { - output.writeMessage(11, getSender()); + output.writeMessage(6, getSender()); } getUnknownFields().writeTo(output); } @@ -2008,41 +2883,21 @@ public final class RemoteProtocol { size += com.google.protobuf.CodedOutputStream .computeMessageSize(2, getMessage()); } - if (hasMethod()) { + if (hasActorInfo()) { size += com.google.protobuf.CodedOutputStream - .computeStringSize(3, getMethod()); - } - if (hasTarget()) { - size += com.google.protobuf.CodedOutputStream - .computeStringSize(4, getTarget()); - } - if (hasUuid()) { - size += com.google.protobuf.CodedOutputStream - .computeStringSize(5, getUuid()); - } - if (hasTimeout()) { - size += com.google.protobuf.CodedOutputStream - .computeUInt64Size(6, getTimeout()); - } - if (hasSupervisorUuid()) { - size += com.google.protobuf.CodedOutputStream - .computeStringSize(7, getSupervisorUuid()); - } - if (hasIsActor()) { - size += com.google.protobuf.CodedOutputStream - .computeBoolSize(8, getIsActor()); + .computeMessageSize(3, getActorInfo()); } if (hasIsOneWay()) { size += com.google.protobuf.CodedOutputStream - .computeBoolSize(9, getIsOneWay()); + .computeBoolSize(4, getIsOneWay()); } - if (hasIsEscaped()) { + if (hasSupervisorUuid()) { size += com.google.protobuf.CodedOutputStream - .computeBoolSize(10, getIsEscaped()); + .computeStringSize(5, getSupervisorUuid()); } if (hasSender()) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(11, getSender()); + .computeMessageSize(6, getSender()); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; @@ -2208,29 +3063,14 @@ public final class RemoteProtocol { if (other.hasMessage()) { mergeMessage(other.getMessage()); } - if (other.hasMethod()) { - setMethod(other.getMethod()); - } - if (other.hasTarget()) { - setTarget(other.getTarget()); - } - if (other.hasUuid()) { - setUuid(other.getUuid()); - } - if (other.hasTimeout()) { - setTimeout(other.getTimeout()); - } - if (other.hasSupervisorUuid()) { - setSupervisorUuid(other.getSupervisorUuid()); - } - if (other.hasIsActor()) { - setIsActor(other.getIsActor()); + if (other.hasActorInfo()) { + mergeActorInfo(other.getActorInfo()); } if (other.hasIsOneWay()) { setIsOneWay(other.getIsOneWay()); } - if (other.hasIsEscaped()) { - setIsEscaped(other.getIsEscaped()); + if (other.hasSupervisorUuid()) { + setSupervisorUuid(other.getSupervisorUuid()); } if (other.hasSender()) { mergeSender(other.getSender()); @@ -2274,38 +3114,23 @@ public final class RemoteProtocol { break; } case 26: { - setMethod(input.readString()); + se.scalablesolutions.akka.remote.protocol.RemoteProtocol.ActorInfoProtocol.Builder subBuilder = se.scalablesolutions.akka.remote.protocol.RemoteProtocol.ActorInfoProtocol.newBuilder(); + if (hasActorInfo()) { + subBuilder.mergeFrom(getActorInfo()); + } + input.readMessage(subBuilder, extensionRegistry); + setActorInfo(subBuilder.buildPartial()); break; } - case 34: { - setTarget(input.readString()); - break; - } - case 42: { - setUuid(input.readString()); - break; - } - case 48: { - setTimeout(input.readUInt64()); - break; - } - case 58: { - setSupervisorUuid(input.readString()); - break; - } - case 64: { - setIsActor(input.readBool()); - break; - } - case 72: { + case 32: { setIsOneWay(input.readBool()); break; } - case 80: { - setIsEscaped(input.readBool()); + case 42: { + setSupervisorUuid(input.readString()); break; } - case 90: { + case 50: { se.scalablesolutions.akka.remote.protocol.RemoteProtocol.RemoteActorRefProtocol.Builder subBuilder = se.scalablesolutions.akka.remote.protocol.RemoteProtocol.RemoteActorRefProtocol.newBuilder(); if (hasSender()) { subBuilder.mergeFrom(getSender()); @@ -2374,88 +3199,62 @@ public final class RemoteProtocol { return this; } - // optional string method = 3; - public boolean hasMethod() { - return result.hasMethod(); + // required .ActorInfoProtocol actorInfo = 3; + public boolean hasActorInfo() { + return result.hasActorInfo(); } - public java.lang.String getMethod() { - return result.getMethod(); + public se.scalablesolutions.akka.remote.protocol.RemoteProtocol.ActorInfoProtocol getActorInfo() { + return result.getActorInfo(); } - public Builder setMethod(java.lang.String value) { + public Builder setActorInfo(se.scalablesolutions.akka.remote.protocol.RemoteProtocol.ActorInfoProtocol value) { if (value == null) { - throw new NullPointerException(); - } - result.hasMethod = true; - result.method_ = value; + throw new NullPointerException(); + } + result.hasActorInfo = true; + result.actorInfo_ = value; return this; } - public Builder clearMethod() { - result.hasMethod = false; - result.method_ = getDefaultInstance().getMethod(); + public Builder setActorInfo(se.scalablesolutions.akka.remote.protocol.RemoteProtocol.ActorInfoProtocol.Builder builderForValue) { + result.hasActorInfo = true; + result.actorInfo_ = builderForValue.build(); + return this; + } + public Builder mergeActorInfo(se.scalablesolutions.akka.remote.protocol.RemoteProtocol.ActorInfoProtocol value) { + if (result.hasActorInfo() && + result.actorInfo_ != se.scalablesolutions.akka.remote.protocol.RemoteProtocol.ActorInfoProtocol.getDefaultInstance()) { + result.actorInfo_ = + se.scalablesolutions.akka.remote.protocol.RemoteProtocol.ActorInfoProtocol.newBuilder(result.actorInfo_).mergeFrom(value).buildPartial(); + } else { + result.actorInfo_ = value; + } + result.hasActorInfo = true; + return this; + } + public Builder clearActorInfo() { + result.hasActorInfo = false; + result.actorInfo_ = se.scalablesolutions.akka.remote.protocol.RemoteProtocol.ActorInfoProtocol.getDefaultInstance(); return this; } - // required string target = 4; - public boolean hasTarget() { - return result.hasTarget(); + // required bool isOneWay = 4; + public boolean hasIsOneWay() { + return result.hasIsOneWay(); } - public java.lang.String getTarget() { - return result.getTarget(); + public boolean getIsOneWay() { + return result.getIsOneWay(); } - public Builder setTarget(java.lang.String value) { - if (value == null) { - throw new NullPointerException(); - } - result.hasTarget = true; - result.target_ = value; + public Builder setIsOneWay(boolean value) { + result.hasIsOneWay = true; + result.isOneWay_ = value; return this; } - public Builder clearTarget() { - result.hasTarget = false; - result.target_ = getDefaultInstance().getTarget(); + public Builder clearIsOneWay() { + result.hasIsOneWay = false; + result.isOneWay_ = false; return this; } - // required string uuid = 5; - public boolean hasUuid() { - return result.hasUuid(); - } - public java.lang.String getUuid() { - return result.getUuid(); - } - public Builder setUuid(java.lang.String value) { - if (value == null) { - throw new NullPointerException(); - } - result.hasUuid = true; - result.uuid_ = value; - return this; - } - public Builder clearUuid() { - result.hasUuid = false; - result.uuid_ = getDefaultInstance().getUuid(); - return this; - } - - // required uint64 timeout = 6; - public boolean hasTimeout() { - return result.hasTimeout(); - } - public long getTimeout() { - return result.getTimeout(); - } - public Builder setTimeout(long value) { - result.hasTimeout = true; - result.timeout_ = value; - return this; - } - public Builder clearTimeout() { - result.hasTimeout = false; - result.timeout_ = 0L; - return this; - } - - // optional string supervisorUuid = 7; + // optional string supervisorUuid = 5; public boolean hasSupervisorUuid() { return result.hasSupervisorUuid(); } @@ -2476,61 +3275,7 @@ public final class RemoteProtocol { return this; } - // required bool isActor = 8; - public boolean hasIsActor() { - return result.hasIsActor(); - } - public boolean getIsActor() { - return result.getIsActor(); - } - public Builder setIsActor(boolean value) { - result.hasIsActor = true; - result.isActor_ = value; - return this; - } - public Builder clearIsActor() { - result.hasIsActor = false; - result.isActor_ = false; - return this; - } - - // required bool isOneWay = 9; - public boolean hasIsOneWay() { - return result.hasIsOneWay(); - } - public boolean getIsOneWay() { - return result.getIsOneWay(); - } - public Builder setIsOneWay(boolean value) { - result.hasIsOneWay = true; - result.isOneWay_ = value; - return this; - } - public Builder clearIsOneWay() { - result.hasIsOneWay = false; - result.isOneWay_ = false; - return this; - } - - // required bool isEscaped = 10; - public boolean hasIsEscaped() { - return result.hasIsEscaped(); - } - public boolean getIsEscaped() { - return result.getIsEscaped(); - } - public Builder setIsEscaped(boolean value) { - result.hasIsEscaped = true; - result.isEscaped_ = value; - return this; - } - public Builder clearIsEscaped() { - result.hasIsEscaped = false; - result.isEscaped_ = false; - return this; - } - - // optional .RemoteActorRefProtocol sender = 11; + // optional .RemoteActorRefProtocol sender = 6; public boolean hasSender() { return result.hasSender(); } @@ -3168,6 +3913,20 @@ public final class RemoteProtocol { public boolean hasPostRestart() { return hasPostRestart; } public java.lang.String getPostRestart() { return postRestart_; } + // optional string init = 4; + public static final int INIT_FIELD_NUMBER = 4; + private boolean hasInit; + private java.lang.String init_ = ""; + public boolean hasInit() { return hasInit; } + public java.lang.String getInit() { return init_; } + + // optional string shutdown = 5; + public static final int SHUTDOWN_FIELD_NUMBER = 5; + private boolean hasShutdown; + private java.lang.String shutdown_ = ""; + public boolean hasShutdown() { return hasShutdown; } + public java.lang.String getShutdown() { return shutdown_; } + private void initFields() { lifeCycle_ = se.scalablesolutions.akka.remote.protocol.RemoteProtocol.LifeCycleType.PERMANENT; } @@ -3188,6 +3947,12 @@ public final class RemoteProtocol { if (hasPostRestart()) { output.writeString(3, getPostRestart()); } + if (hasInit()) { + output.writeString(4, getInit()); + } + if (hasShutdown()) { + output.writeString(5, getShutdown()); + } getUnknownFields().writeTo(output); } @@ -3209,6 +3974,14 @@ public final class RemoteProtocol { size += com.google.protobuf.CodedOutputStream .computeStringSize(3, getPostRestart()); } + if (hasInit()) { + size += com.google.protobuf.CodedOutputStream + .computeStringSize(4, getInit()); + } + if (hasShutdown()) { + size += com.google.protobuf.CodedOutputStream + .computeStringSize(5, getShutdown()); + } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; @@ -3376,6 +4149,12 @@ public final class RemoteProtocol { if (other.hasPostRestart()) { setPostRestart(other.getPostRestart()); } + if (other.hasInit()) { + setInit(other.getInit()); + } + if (other.hasShutdown()) { + setShutdown(other.getShutdown()); + } this.mergeUnknownFields(other.getUnknownFields()); return this; } @@ -3419,6 +4198,14 @@ public final class RemoteProtocol { setPostRestart(input.readString()); break; } + case 34: { + setInit(input.readString()); + break; + } + case 42: { + setShutdown(input.readString()); + break; + } } } } @@ -3487,6 +4274,48 @@ public final class RemoteProtocol { return this; } + // optional string init = 4; + public boolean hasInit() { + return result.hasInit(); + } + public java.lang.String getInit() { + return result.getInit(); + } + public Builder setInit(java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + result.hasInit = true; + result.init_ = value; + return this; + } + public Builder clearInit() { + result.hasInit = false; + result.init_ = getDefaultInstance().getInit(); + return this; + } + + // optional string shutdown = 5; + public boolean hasShutdown() { + return result.hasShutdown(); + } + public java.lang.String getShutdown() { + return result.getShutdown(); + } + public Builder setShutdown(java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + result.hasShutdown = true; + result.shutdown_ = value; + return this; + } + public Builder clearShutdown() { + result.hasShutdown = false; + result.shutdown_ = getDefaultInstance().getShutdown(); + return this; + } + // @@protoc_insertion_point(builder_scope:LifeCycleProtocol) } @@ -4173,6 +5002,16 @@ public final class RemoteProtocol { private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_MessageProtocol_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_ActorInfoProtocol_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_ActorInfoProtocol_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_TypedActorInfoProtocol_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_TypedActorInfoProtocol_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_RemoteRequestProtocol_descriptor; private static @@ -4210,40 +5049,46 @@ public final class RemoteProtocol { "\n\024RemoteProtocol.proto\"v\n\026RemoteActorRef" + "Protocol\022\014\n\004uuid\030\001 \002(\t\022\026\n\016actorClassname" + "\030\002 \002(\t\022%\n\013homeAddress\030\003 \002(\0132\020.AddressPro" + - "tocol\022\017\n\007timeout\030\004 \001(\004\"\350\002\n\032SerializedAct" + + "tocol\022\017\n\007timeout\030\004 \001(\004\"\200\003\n\032SerializedAct" + "orRefProtocol\022\014\n\004uuid\030\001 \002(\t\022\n\n\002id\030\002 \002(\t\022" + "\026\n\016actorClassname\030\003 \002(\t\022)\n\017originalAddre" + "ss\030\004 \002(\0132\020.AddressProtocol\022\025\n\ractorInsta" + "nce\030\005 \001(\014\022\033\n\023serializerClassname\030\006 \001(\t\022\024" + - "\n\014isTransactor\030\007 \001(\010\022\017\n\007timeout\030\010 \001(\004\022%\n" + - "\tlifeCycle\030\t \001(\0132\022.LifeCycleProtocol\022+\n\n", - "supervisor\030\n \001(\0132\027.RemoteActorRefProtoco" + - "l\022\024\n\014hotswapStack\030\013 \001(\014\022(\n\010messages\030\014 \003(" + - "\0132\026.RemoteRequestProtocol\"r\n\017MessageProt" + - "ocol\0225\n\023serializationScheme\030\001 \002(\0162\030.Seri" + - "alizationSchemeType\022\017\n\007message\030\002 \002(\014\022\027\n\017" + - "messageManifest\030\003 \001(\014\"\374\001\n\025RemoteRequestP" + - "rotocol\022\n\n\002id\030\001 \002(\004\022!\n\007message\030\002 \002(\0132\020.M" + - "essageProtocol\022\016\n\006method\030\003 \001(\t\022\016\n\006target" + - "\030\004 \002(\t\022\014\n\004uuid\030\005 \002(\t\022\017\n\007timeout\030\006 \002(\004\022\026\n" + - "\016supervisorUuid\030\007 \001(\t\022\017\n\007isActor\030\010 \002(\010\022\020", - "\n\010isOneWay\030\t \002(\010\022\021\n\tisEscaped\030\n \002(\010\022\'\n\006s" + - "ender\030\013 \001(\0132\027.RemoteActorRefProtocol\"\252\001\n" + - "\023RemoteReplyProtocol\022\n\n\002id\030\001 \002(\004\022!\n\007mess" + - "age\030\002 \001(\0132\020.MessageProtocol\022%\n\texception" + - "\030\003 \001(\0132\022.ExceptionProtocol\022\026\n\016supervisor" + - "Uuid\030\004 \001(\t\022\017\n\007isActor\030\005 \002(\010\022\024\n\014isSuccess" + - "ful\030\006 \002(\010\"_\n\021LifeCycleProtocol\022!\n\tlifeCy" + - "cle\030\001 \002(\0162\016.LifeCycleType\022\022\n\npreRestart\030" + - "\002 \001(\t\022\023\n\013postRestart\030\003 \001(\t\"1\n\017AddressPro" + - "tocol\022\020\n\010hostname\030\001 \002(\t\022\014\n\004port\030\002 \002(\r\"7\n", - "\021ExceptionProtocol\022\021\n\tclassname\030\001 \002(\t\022\017\n" + - "\007message\030\002 \002(\t*]\n\027SerializationSchemeTyp" + - "e\022\010\n\004JAVA\020\001\022\013\n\007SBINARY\020\002\022\016\n\nSCALA_JSON\020\003" + - "\022\r\n\tJAVA_JSON\020\004\022\014\n\010PROTOBUF\020\005*-\n\rLifeCyc" + - "leType\022\r\n\tPERMANENT\020\001\022\r\n\tTEMPORARY\020\002B-\n)" + - "se.scalablesolutions.akka.remote.protoco" + - "lH\001" + "\n\014isTransactor\030\007 \001(\010\022\017\n\007timeout\030\010 \001(\004\022\026\n" + + "\016receiveTimeout\030\t \001(\004\022%\n\tlifeCycle\030\n \001(\013", + "2\022.LifeCycleProtocol\022+\n\nsupervisor\030\013 \001(\013" + + "2\027.RemoteActorRefProtocol\022\024\n\014hotswapStac" + + "k\030\014 \001(\014\022(\n\010messages\030\r \003(\0132\026.RemoteReques" + + "tProtocol\"r\n\017MessageProtocol\0225\n\023serializ" + + "ationScheme\030\001 \002(\0162\030.SerializationSchemeT" + + "ype\022\017\n\007message\030\002 \002(\014\022\027\n\017messageManifest\030" + + "\003 \001(\014\"\222\001\n\021ActorInfoProtocol\022\014\n\004uuid\030\001 \002(" + + "\t\022\016\n\006target\030\002 \002(\t\022\017\n\007timeout\030\003 \002(\004\022\035\n\tac" + + "torType\030\004 \002(\0162\n.ActorType\022/\n\016typedActorI" + + "nfo\030\005 \001(\0132\027.TypedActorInfoProtocol\";\n\026Ty", + "pedActorInfoProtocol\022\021\n\tinterface\030\001 \002(\t\022" + + "\016\n\006method\030\002 \002(\t\"\300\001\n\025RemoteRequestProtoco" + + "l\022\n\n\002id\030\001 \002(\004\022!\n\007message\030\002 \002(\0132\020.Message" + + "Protocol\022%\n\tactorInfo\030\003 \002(\0132\022.ActorInfoP" + + "rotocol\022\020\n\010isOneWay\030\004 \002(\010\022\026\n\016supervisorU" + + "uid\030\005 \001(\t\022\'\n\006sender\030\006 \001(\0132\027.RemoteActorR" + + "efProtocol\"\252\001\n\023RemoteReplyProtocol\022\n\n\002id" + + "\030\001 \002(\004\022!\n\007message\030\002 \001(\0132\020.MessageProtoco" + + "l\022%\n\texception\030\003 \001(\0132\022.ExceptionProtocol" + + "\022\026\n\016supervisorUuid\030\004 \001(\t\022\017\n\007isActor\030\005 \002(", + "\010\022\024\n\014isSuccessful\030\006 \002(\010\"\177\n\021LifeCycleProt" + + "ocol\022!\n\tlifeCycle\030\001 \002(\0162\016.LifeCycleType\022" + + "\022\n\npreRestart\030\002 \001(\t\022\023\n\013postRestart\030\003 \001(\t" + + "\022\014\n\004init\030\004 \001(\t\022\020\n\010shutdown\030\005 \001(\t\"1\n\017Addr" + + "essProtocol\022\020\n\010hostname\030\001 \002(\t\022\014\n\004port\030\002 " + + "\002(\r\"7\n\021ExceptionProtocol\022\021\n\tclassname\030\001 " + + "\002(\t\022\017\n\007message\030\002 \002(\t*=\n\tActorType\022\017\n\013SCA" + + "LA_ACTOR\020\001\022\016\n\nJAVA_ACTOR\020\002\022\017\n\013TYPED_ACTO" + + "R\020\003*]\n\027SerializationSchemeType\022\010\n\004JAVA\020\001" + + "\022\013\n\007SBINARY\020\002\022\016\n\nSCALA_JSON\020\003\022\r\n\tJAVA_JS", + "ON\020\004\022\014\n\010PROTOBUF\020\005*-\n\rLifeCycleType\022\r\n\tP" + + "ERMANENT\020\001\022\r\n\tTEMPORARY\020\002B-\n)se.scalable" + + "solutions.akka.remote.protocolH\001" }; com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { @@ -4263,7 +5108,7 @@ public final class RemoteProtocol { internal_static_SerializedActorRefProtocol_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_SerializedActorRefProtocol_descriptor, - new java.lang.String[] { "Uuid", "Id", "ActorClassname", "OriginalAddress", "ActorInstance", "SerializerClassname", "IsTransactor", "Timeout", "LifeCycle", "Supervisor", "HotswapStack", "Messages", }, + new java.lang.String[] { "Uuid", "Id", "ActorClassname", "OriginalAddress", "ActorInstance", "SerializerClassname", "IsTransactor", "Timeout", "ReceiveTimeout", "LifeCycle", "Supervisor", "HotswapStack", "Messages", }, se.scalablesolutions.akka.remote.protocol.RemoteProtocol.SerializedActorRefProtocol.class, se.scalablesolutions.akka.remote.protocol.RemoteProtocol.SerializedActorRefProtocol.Builder.class); internal_static_MessageProtocol_descriptor = @@ -4274,16 +5119,32 @@ public final class RemoteProtocol { new java.lang.String[] { "SerializationScheme", "Message", "MessageManifest", }, se.scalablesolutions.akka.remote.protocol.RemoteProtocol.MessageProtocol.class, se.scalablesolutions.akka.remote.protocol.RemoteProtocol.MessageProtocol.Builder.class); - internal_static_RemoteRequestProtocol_descriptor = + internal_static_ActorInfoProtocol_descriptor = getDescriptor().getMessageTypes().get(3); + internal_static_ActorInfoProtocol_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_ActorInfoProtocol_descriptor, + new java.lang.String[] { "Uuid", "Target", "Timeout", "ActorType", "TypedActorInfo", }, + se.scalablesolutions.akka.remote.protocol.RemoteProtocol.ActorInfoProtocol.class, + se.scalablesolutions.akka.remote.protocol.RemoteProtocol.ActorInfoProtocol.Builder.class); + internal_static_TypedActorInfoProtocol_descriptor = + getDescriptor().getMessageTypes().get(4); + internal_static_TypedActorInfoProtocol_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_TypedActorInfoProtocol_descriptor, + new java.lang.String[] { "Interface", "Method", }, + se.scalablesolutions.akka.remote.protocol.RemoteProtocol.TypedActorInfoProtocol.class, + se.scalablesolutions.akka.remote.protocol.RemoteProtocol.TypedActorInfoProtocol.Builder.class); + internal_static_RemoteRequestProtocol_descriptor = + getDescriptor().getMessageTypes().get(5); internal_static_RemoteRequestProtocol_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_RemoteRequestProtocol_descriptor, - new java.lang.String[] { "Id", "Message", "Method", "Target", "Uuid", "Timeout", "SupervisorUuid", "IsActor", "IsOneWay", "IsEscaped", "Sender", }, + new java.lang.String[] { "Id", "Message", "ActorInfo", "IsOneWay", "SupervisorUuid", "Sender", }, se.scalablesolutions.akka.remote.protocol.RemoteProtocol.RemoteRequestProtocol.class, se.scalablesolutions.akka.remote.protocol.RemoteProtocol.RemoteRequestProtocol.Builder.class); internal_static_RemoteReplyProtocol_descriptor = - getDescriptor().getMessageTypes().get(4); + getDescriptor().getMessageTypes().get(6); internal_static_RemoteReplyProtocol_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_RemoteReplyProtocol_descriptor, @@ -4291,15 +5152,15 @@ public final class RemoteProtocol { se.scalablesolutions.akka.remote.protocol.RemoteProtocol.RemoteReplyProtocol.class, se.scalablesolutions.akka.remote.protocol.RemoteProtocol.RemoteReplyProtocol.Builder.class); internal_static_LifeCycleProtocol_descriptor = - getDescriptor().getMessageTypes().get(5); + getDescriptor().getMessageTypes().get(7); internal_static_LifeCycleProtocol_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_LifeCycleProtocol_descriptor, - new java.lang.String[] { "LifeCycle", "PreRestart", "PostRestart", }, + new java.lang.String[] { "LifeCycle", "PreRestart", "PostRestart", "Init", "Shutdown", }, se.scalablesolutions.akka.remote.protocol.RemoteProtocol.LifeCycleProtocol.class, se.scalablesolutions.akka.remote.protocol.RemoteProtocol.LifeCycleProtocol.Builder.class); internal_static_AddressProtocol_descriptor = - getDescriptor().getMessageTypes().get(6); + getDescriptor().getMessageTypes().get(8); internal_static_AddressProtocol_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_AddressProtocol_descriptor, @@ -4307,7 +5168,7 @@ public final class RemoteProtocol { se.scalablesolutions.akka.remote.protocol.RemoteProtocol.AddressProtocol.class, se.scalablesolutions.akka.remote.protocol.RemoteProtocol.AddressProtocol.Builder.class); internal_static_ExceptionProtocol_descriptor = - getDescriptor().getMessageTypes().get(7); + getDescriptor().getMessageTypes().get(9); internal_static_ExceptionProtocol_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_ExceptionProtocol_descriptor, diff --git a/akka-core/src/main/protocol/RemoteProtocol.proto b/akka-core/src/main/protocol/RemoteProtocol.proto index 6d8b8995f4..6cf9bfd534 100644 --- a/akka-core/src/main/protocol/RemoteProtocol.proto +++ b/akka-core/src/main/protocol/RemoteProtocol.proto @@ -36,10 +36,11 @@ message SerializedActorRefProtocol { optional string serializerClassname = 6; optional bool isTransactor = 7; optional uint64 timeout = 8; - optional LifeCycleProtocol lifeCycle = 9; - optional RemoteActorRefProtocol supervisor = 10; - optional bytes hotswapStack = 11; - repeated RemoteRequestProtocol messages = 12; + optional uint64 receiveTimeout = 9; + optional LifeCycleProtocol lifeCycle = 10; + optional RemoteActorRefProtocol supervisor = 11; + optional bytes hotswapStack = 12; + repeated RemoteRequestProtocol messages = 13; } /** @@ -51,21 +52,35 @@ message MessageProtocol { optional bytes messageManifest = 3; } +/** + * Defines the actor info. + */ +message ActorInfoProtocol { + required string uuid = 1; + required string target = 2; + required uint64 timeout = 3; + required ActorType actorType = 4; + optional TypedActorInfoProtocol typedActorInfo = 5; +} + +/** + * Defines the typed actor extra info. + */ +message TypedActorInfoProtocol { + required string interface = 1; + required string method = 2; +} + /** * Defines a remote message request. */ message RemoteRequestProtocol { required uint64 id = 1; required MessageProtocol message = 2; - optional string method = 3; - required string target = 4; - required string uuid = 5; - required uint64 timeout = 6; - optional string supervisorUuid = 7; - required bool isActor = 8; - required bool isOneWay = 9; - required bool isEscaped = 10; - optional RemoteActorRefProtocol sender = 11; + required ActorInfoProtocol actorInfo = 3; + required bool isOneWay = 4; + optional string supervisorUuid = 5; + optional RemoteActorRefProtocol sender = 6; } /** @@ -80,6 +95,15 @@ message RemoteReplyProtocol { required bool isSuccessful = 6; } +/** + * Defines the actor type. + */ +enum ActorType { + SCALA_ACTOR = 1; + JAVA_ACTOR = 2; + TYPED_ACTOR = 3; +} + /** * Defines the serialization scheme used to serialize the message and/or Actor instance. */ @@ -114,8 +138,6 @@ enum DispatcherType { */ message LifeCycleProtocol { required LifeCycleType lifeCycle = 1; - optional string preRestart = 2; - optional string postRestart = 3; } /** diff --git a/akka-core/src/main/resources/META-INF/aop.xml b/akka-core/src/main/resources/META-INF/aop.xml index 2f8d5159a8..bdc167ca54 100644 --- a/akka-core/src/main/resources/META-INF/aop.xml +++ b/akka-core/src/main/resources/META-INF/aop.xml @@ -1,7 +1,7 @@ - + diff --git a/akka-core/src/main/scala/actor/ActiveObject.scala b/akka-core/src/main/scala/actor/ActiveObject.scala deleted file mode 100644 index a545f9f633..0000000000 --- a/akka-core/src/main/scala/actor/ActiveObject.scala +++ /dev/null @@ -1,860 +0,0 @@ -/** - * Copyright (C) 2009-2010 Scalable Solutions AB - */ - -package se.scalablesolutions.akka.actor - -import Actor._ -import se.scalablesolutions.akka.config.FaultHandlingStrategy -import se.scalablesolutions.akka.remote.protocol.RemoteProtocol.RemoteRequestProtocol -import se.scalablesolutions.akka.remote.{MessageSerializer, RemoteClient, RemoteRequestProtocolIdFactory} -import se.scalablesolutions.akka.dispatch.{MessageDispatcher, Future, CompletableFuture} -import se.scalablesolutions.akka.config.ScalaConfig._ -import se.scalablesolutions.akka.serialization.Serializer -import se.scalablesolutions.akka.util._ - -import org.codehaus.aspectwerkz.joinpoint.{MethodRtti, JoinPoint} -import org.codehaus.aspectwerkz.proxy.Proxy -import org.codehaus.aspectwerkz.annotation.{Aspect, Around} - -import java.net.InetSocketAddress -import java.lang.reflect.{InvocationTargetException, Method} - -object Annotations { - import se.scalablesolutions.akka.actor.annotation._ - val transactionrequired = classOf[transactionrequired] - val prerestart = classOf[prerestart] - val postrestart = classOf[postrestart] - val shutdown = classOf[shutdown] - val inittransactionalstate = classOf[inittransactionalstate] -} - -/** - * Configuration factory for Active Objects. - * - * FIXDOC: document ActiveObjectConfiguration - */ -final class ActiveObjectConfiguration { - private[akka] var _timeout: Long = Actor.TIMEOUT - private[akka] var _restartCallbacks: Option[RestartCallbacks] = None - private[akka] var _shutdownCallback: Option[ShutdownCallback] = None - private[akka] var _transactionRequired = false - private[akka] var _host: Option[InetSocketAddress] = None - private[akka] var _messageDispatcher: Option[MessageDispatcher] = None - - def timeout(timeout: Long) : ActiveObjectConfiguration = { - _timeout = timeout - this - } - - def restartCallbacks(pre: String, post: String) : ActiveObjectConfiguration = { - _restartCallbacks = Some(new RestartCallbacks(pre, post)) - this - } - - def shutdownCallback(down: String) : ActiveObjectConfiguration = { - _shutdownCallback = Some(new ShutdownCallback(down)) - this - } - - def makeTransactionRequired() : ActiveObjectConfiguration = { - _transactionRequired = true; - this - } - - def makeRemote(hostname: String, port: Int) : ActiveObjectConfiguration = { - _host = Some(new InetSocketAddress(hostname, port)) - this - } - - def dispatcher(messageDispatcher: MessageDispatcher) : ActiveObjectConfiguration = { - _messageDispatcher = Some(messageDispatcher) - this - } -} - -/** - * Holds RTTI (runtime type information) for the Active Object, f.e. current 'sender' - * reference, the 'senderFuture' reference etc. - *

- * In order to make use of this context you have to create a member field in your - * Active Object that has the type 'ActiveObjectContext', then an instance will - * be injected for you to use. - *

- * This class does not contain static information but is updated by the runtime system - * at runtime. - *

- * Here is an example of usage: - *

- * class Ping {
- *   // This context will be injected, holds RTTI (runtime type information)
- *   // for the current message send
- *   private ActiveObjectContext context = null;
- *
- *   public void hit(int count) {
- *     Pong pong = (Pong) context.getSender();
- *     pong.hit(count++)
- *   }
- * }
- * 
- * - * @author Jonas Bonér - */ -final class ActiveObjectContext { - private[akka] var _sender: AnyRef = _ - private[akka] var _senderFuture: CompletableFuture[Any] = _ - - /** - * Returns the current sender Active Object reference. - * Scala style getter. - */ - def sender: AnyRef = { - if (_sender eq null) throw new IllegalActorStateException("Sender reference should not be null.") - else _sender - } - - /** - * Returns the current sender Active Object reference. - * Java style getter. - */ - def getSender: AnyRef = { - if (_sender eq null) throw new IllegalActorStateException("Sender reference should not be null.") - else _sender - } - - /** - * Returns the current sender future Active Object reference. - * Scala style getter. - */ - def senderFuture: Option[CompletableFuture[Any]] = if (_senderFuture eq null) None else Some(_senderFuture) - - /** - * Returns the current sender future Active Object reference. - * Java style getter. - * This method returns 'null' if the sender future is not available. - */ - def getSenderFuture = _senderFuture -} - -/** - * Internal helper class to help pass the contextual information between threads. - * - * @author Jonas Bonér - */ -private[akka] object ActiveObjectContext { - import scala.util.DynamicVariable - private[actor] val sender = new DynamicVariable[AnyRef](null) - private[actor] val senderFuture = new DynamicVariable[CompletableFuture[Any]](null) -} - -/** - * Factory class for creating Active Objects out of plain POJOs and/or POJOs with interfaces. - * - * @author Jonas Bonér - */ -object ActiveObject extends Logging { - import Actor.actorOf - - val AKKA_CAMEL_ROUTING_SCHEME = "akka" - private[actor] val AW_PROXY_PREFIX = "$$ProxiedByAW".intern - - def newInstance[T](target: Class[T], timeout: Long): T = - newInstance(target, actorOf(new Dispatcher(false)), None, timeout) - - def newInstance[T](target: Class[T]): T = - newInstance(target, actorOf(new Dispatcher(false)), None, Actor.TIMEOUT) - - def newInstance[T](intf: Class[T], target: AnyRef, timeout: Long): T = - newInstance(intf, target, actorOf(new Dispatcher(false)), None, timeout) - - def newInstance[T](intf: Class[T], target: AnyRef): T = - newInstance(intf, target, actorOf(new Dispatcher(false)), None, Actor.TIMEOUT) - - def newRemoteInstance[T](target: Class[T], timeout: Long, hostname: String, port: Int): T = - newInstance(target, actorOf(new Dispatcher(false)), Some(new InetSocketAddress(hostname, port)), timeout) - - def newRemoteInstance[T](target: Class[T], hostname: String, port: Int): T = - newInstance(target, actorOf(new Dispatcher(false)), Some(new InetSocketAddress(hostname, port)), Actor.TIMEOUT) - - def newInstance[T](target: Class[T], config: ActiveObjectConfiguration): T = { - val actor = actorOf(new Dispatcher(config._transactionRequired, config._restartCallbacks, config._shutdownCallback)) - if (config._messageDispatcher.isDefined) { - actor.dispatcher = config._messageDispatcher.get - } - newInstance(target, actor, config._host, config._timeout) - } - - def newInstance[T](intf: Class[T], target: AnyRef, config: ActiveObjectConfiguration): T = { - val actor = actorOf(new Dispatcher(config._transactionRequired, config._restartCallbacks, config._shutdownCallback)) - if (config._messageDispatcher.isDefined) { - actor.dispatcher = config._messageDispatcher.get - } - newInstance(intf, target, actor, config._host, config._timeout) - } - - @deprecated("use newInstance(target: Class[T], config: ActiveObjectConfiguration) instead") - def newInstance[T](target: Class[T], timeout: Long, restartCallbacks: Option[RestartCallbacks]): T = - newInstance(target, actorOf(new Dispatcher(false, restartCallbacks)), None, timeout) - - @deprecated("use newInstance(intf: Class[T], target: AnyRef, config: ActiveObjectConfiguration) instead") - def newInstance[T](intf: Class[T], target: AnyRef, timeout: Long, restartCallbacks: Option[RestartCallbacks]): T = - newInstance(intf, target, actorOf(new Dispatcher(false, restartCallbacks)), None, timeout) - - @deprecated("use newInstance(target: Class[T], config: ActiveObjectConfiguration) instead") - def newInstance[T](target: Class[T], timeout: Long, transactionRequired: Boolean): T = - newInstance(target, actorOf(new Dispatcher(transactionRequired, None)), None, timeout) - - @deprecated("use newInstance(target: Class[T], config: ActiveObjectConfiguration) instead") - def newInstance[T](target: Class[T], timeout: Long, transactionRequired: Boolean, restartCallbacks: Option[RestartCallbacks]): T = - newInstance(target, actorOf(new Dispatcher(transactionRequired, restartCallbacks)), None, timeout) - - @deprecated("use newInstance(intf: Class[T], target: AnyRef, config: ActiveObjectConfiguration) instead") - def newInstance[T](intf: Class[T], target: AnyRef, timeout: Long, transactionRequired: Boolean): T = - newInstance(intf, target, actorOf(new Dispatcher(transactionRequired, None)), None, timeout) - - @deprecated("use newInstance(intf: Class[T], target: AnyRef, config: ActiveObjectConfiguration) instead") - def newInstance[T](intf: Class[T], target: AnyRef, timeout: Long, transactionRequired: Boolean, restartCallbacks: Option[RestartCallbacks]): T = - newInstance(intf, target, actorOf(new Dispatcher(transactionRequired, restartCallbacks)), None, timeout) - - @deprecated("use newInstance(intf: Class[T], target: AnyRef, config: ActiveObjectConfiguration) instead") - def newRemoteInstance[T](intf: Class[T], target: AnyRef, timeout: Long, hostname: String, port: Int): T = - newInstance(intf, target, actorOf(new Dispatcher(false, None)), Some(new InetSocketAddress(hostname, port)), timeout) - - @deprecated("use newInstance(intf: Class[T], target: AnyRef, config: ActiveObjectConfiguration) instead") - def newRemoteInstance[T](intf: Class[T], target: AnyRef, timeout: Long, hostname: String, port: Int, restartCallbacks: Option[RestartCallbacks]): T = - newInstance(intf, target, actorOf(new Dispatcher(false, restartCallbacks)), Some(new InetSocketAddress(hostname, port)), timeout) - - @deprecated("use newInstance(target: Class[T], config: ActiveObjectConfiguration) instead") - def newRemoteInstance[T](target: Class[T], timeout: Long, transactionRequired: Boolean, hostname: String, port: Int): T = - newInstance(target, actorOf(new Dispatcher(transactionRequired, None)), Some(new InetSocketAddress(hostname, port)), timeout) - - @deprecated("use newInstance(target: Class[T], config: ActiveObjectConfiguration) instead") - def newRemoteInstance[T](target: Class[T], timeout: Long, transactionRequired: Boolean, hostname: String, port: Int, restartCallbacks: Option[RestartCallbacks]): T = - newInstance(target, actorOf(new Dispatcher(transactionRequired, restartCallbacks)), Some(new InetSocketAddress(hostname, port)), timeout) - - @deprecated("use newInstance(intf: Class[T], target: AnyRef, config: ActiveObjectConfiguration) instead") - def newRemoteInstance[T](intf: Class[T], target: AnyRef, timeout: Long, transactionRequired: Boolean, hostname: String, port: Int): T = - newInstance(intf, target, actorOf(new Dispatcher(transactionRequired, None)), Some(new InetSocketAddress(hostname, port)), timeout) - - @deprecated("use newInstance(intf: Class[T], target: AnyRef, config: ActiveObjectConfiguration) instead") - def newRemoteInstance[T](intf: Class[T], target: AnyRef, timeout: Long, transactionRequired: Boolean, hostname: String, port: Int, restartCallbacks: Option[RestartCallbacks]): T = - newInstance(intf, target, actorOf(new Dispatcher(transactionRequired, restartCallbacks)), Some(new InetSocketAddress(hostname, port)), timeout) - - @deprecated("use newInstance(target: Class[T], config: ActiveObjectConfiguration) instead") - def newInstance[T](target: Class[T], timeout: Long, dispatcher: MessageDispatcher): T = { - val actor = actorOf(new Dispatcher(false, None)) - actor.dispatcher = dispatcher - newInstance(target, actor, None, timeout) - } - - @deprecated("use newInstance(target: Class[T], config: ActiveObjectConfiguration) instead") - def newInstance[T](target: Class[T], timeout: Long, dispatcher: MessageDispatcher, restartCallbacks: Option[RestartCallbacks]): T = { - val actor = actorOf(new Dispatcher(false, restartCallbacks)) - actor.dispatcher = dispatcher - newInstance(target, actor, None, timeout) - } - - @deprecated("use newInstance(intf: Class[T], target: AnyRef, config: ActiveObjectConfiguration) instead") - def newInstance[T](intf: Class[T], target: AnyRef, timeout: Long, dispatcher: MessageDispatcher): T = { - val actor = actorOf(new Dispatcher(false, None)) - actor.dispatcher = dispatcher - newInstance(intf, target, actor, None, timeout) - } - - @deprecated("use newInstance(intf: Class[T], target: AnyRef, config: ActiveObjectConfiguration) instead") - def newInstance[T](intf: Class[T], target: AnyRef, timeout: Long, - dispatcher: MessageDispatcher, restartCallbacks: Option[RestartCallbacks]): T = { - val actor = actorOf(new Dispatcher(false, restartCallbacks)) - actor.dispatcher = dispatcher - newInstance(intf, target, actor, None, timeout) - } - - @deprecated("use newInstance(target: Class[T], config: ActiveObjectConfiguration) instead") - def newInstance[T](target: Class[T], timeout: Long, transactionRequired: Boolean, dispatcher: MessageDispatcher): T = { - val actor = actorOf(new Dispatcher(transactionRequired, None)) - actor.dispatcher = dispatcher - newInstance(target, actor, None, timeout) - } - - @deprecated("use newInstance(target: Class[T], config: ActiveObjectConfiguration) instead") - def newInstance[T](target: Class[T], timeout: Long, transactionRequired: Boolean, - dispatcher: MessageDispatcher, restartCallbacks: Option[RestartCallbacks]): T = { - val actor = actorOf(new Dispatcher(transactionRequired, restartCallbacks)) - actor.dispatcher = dispatcher - newInstance(target, actor, None, timeout) - } - - @deprecated("use newInstance(intf: Class[T], target: AnyRef, config: ActiveObjectConfiguration) instead") - def newInstance[T](intf: Class[T], target: AnyRef, timeout: Long, transactionRequired: Boolean, dispatcher: MessageDispatcher): T = { - val actor = actorOf(new Dispatcher(transactionRequired, None)) - actor.dispatcher = dispatcher - newInstance(intf, target, actor, None, timeout) - } - - @deprecated("use newInstance(intf: Class[T], target: AnyRef, config: ActiveObjectConfiguration) instead") - def newInstance[T](intf: Class[T], target: AnyRef, timeout: Long, transactionRequired: Boolean, - dispatcher: MessageDispatcher, restartCallbacks: Option[RestartCallbacks]): T = { - val actor = actorOf(new Dispatcher(transactionRequired, restartCallbacks)) - actor.dispatcher = dispatcher - newInstance(intf, target, actor, None, timeout) - } - - @deprecated("use newInstance(target: Class[T], config: ActiveObjectConfiguration) instead") - def newRemoteInstance[T](target: Class[T], timeout: Long, dispatcher: MessageDispatcher, hostname: String, port: Int): T = { - val actor = actorOf(new Dispatcher(false, None)) - actor.dispatcher = dispatcher - newInstance(target, actor, Some(new InetSocketAddress(hostname, port)), timeout) - } - - @deprecated("use newInstance(target: Class[T], config: ActiveObjectConfiguration) instead") - def newRemoteInstance[T](target: Class[T], timeout: Long, dispatcher: MessageDispatcher, - hostname: String, port: Int, restartCallbacks: Option[RestartCallbacks]): T = { - val actor = actorOf(new Dispatcher(false, restartCallbacks)) - actor.dispatcher = dispatcher - newInstance(target, actor, Some(new InetSocketAddress(hostname, port)), timeout) - } - - @deprecated("use newInstance(intf: Class[T], target: AnyRef, config: ActiveObjectConfiguration) instead") - def newRemoteInstance[T](intf: Class[T], target: AnyRef, timeout: Long, dispatcher: MessageDispatcher, hostname: String, port: Int): T = { - val actor = actorOf(new Dispatcher(false, None)) - actor.dispatcher = dispatcher - newInstance(intf, target, actor, Some(new InetSocketAddress(hostname, port)), timeout) - } - - @deprecated("use newInstance(intf: Class[T], target: AnyRef, config: ActiveObjectConfiguration) instead") - def newRemoteInstance[T](intf: Class[T], target: AnyRef, timeout: Long, dispatcher: MessageDispatcher, - hostname: String, port: Int, restartCallbacks: Option[RestartCallbacks]): T = { - val actor = actorOf(new Dispatcher(false, restartCallbacks)) - actor.dispatcher = dispatcher - newInstance(intf, target, actor, Some(new InetSocketAddress(hostname, port)), timeout) - } - - @deprecated("use newInstance(target: Class[T], config: ActiveObjectConfiguration) instead") - def newRemoteInstance[T](target: Class[T], timeout: Long, transactionRequired: Boolean, - dispatcher: MessageDispatcher, hostname: String, port: Int): T = { - val actor = actorOf(new Dispatcher(transactionRequired, None)) - actor.dispatcher = dispatcher - newInstance(target, actor, Some(new InetSocketAddress(hostname, port)), timeout) - } - - @deprecated("use newInstance(target: Class[T], config: ActiveObjectConfiguration) instead") - def newRemoteInstance[T](target: Class[T], timeout: Long, transactionRequired: Boolean, dispatcher: MessageDispatcher, - hostname: String, port: Int, restartCallbacks: Option[RestartCallbacks]): T = { - val actor = actorOf(new Dispatcher(transactionRequired, restartCallbacks)) - actor.dispatcher = dispatcher - newInstance(target, actor, Some(new InetSocketAddress(hostname, port)), timeout) - } - - @deprecated("use newInstance(intf: Class[T], target: AnyRef, config: ActiveObjectConfiguration) instead") - def newRemoteInstance[T](intf: Class[T], target: AnyRef, timeout: Long, transactionRequired: Boolean, - dispatcher: MessageDispatcher, hostname: String, port: Int): T = { - val actor = actorOf(new Dispatcher(transactionRequired, None)) - actor.dispatcher = dispatcher - newInstance(intf, target, actor, Some(new InetSocketAddress(hostname, port)), timeout) - } - - @deprecated("use newInstance(intf: Class[T], target: AnyRef, config: ActiveObjectConfiguration) instead") - def newRemoteInstance[T](intf: Class[T], target: AnyRef, timeout: Long, transactionRequired: Boolean, - dispatcher: MessageDispatcher, hostname: String, port: Int, restartCallbacks: Option[RestartCallbacks]): T = { - val actor = actorOf(new Dispatcher(transactionRequired, restartCallbacks)) - actor.dispatcher = dispatcher - newInstance(intf, target, actor, Some(new InetSocketAddress(hostname, port)), timeout) - } - - private[akka] def newInstance[T](target: Class[T], actorRef: ActorRef, remoteAddress: Option[InetSocketAddress], timeout: Long): T = { - val proxy = Proxy.newInstance(target, true, false) - val context = injectActiveObjectContext(proxy) - actorRef.actor.asInstanceOf[Dispatcher].initialize(target, proxy, context) - actorRef.timeout = timeout - if (remoteAddress.isDefined) actorRef.makeRemote(remoteAddress.get) - AspectInitRegistry.register(proxy, AspectInit(target, actorRef, remoteAddress, timeout)) - actorRef.start - proxy.asInstanceOf[T] - } - - private[akka] def newInstance[T](intf: Class[T], target: AnyRef, actorRef: ActorRef, - remoteAddress: Option[InetSocketAddress], timeout: Long): T = { - val context = injectActiveObjectContext(target) - val proxy = Proxy.newInstance(Array(intf), Array(target), true, false) - actorRef.actor.asInstanceOf[Dispatcher].initialize(target.getClass, target, context) - actorRef.timeout = timeout - if (remoteAddress.isDefined) actorRef.makeRemote(remoteAddress.get) - AspectInitRegistry.register(proxy, AspectInit(intf, actorRef, remoteAddress, timeout)) - actorRef.start - proxy.asInstanceOf[T] - } - - def stop(obj: AnyRef): Unit = { - val init = AspectInitRegistry.initFor(obj) - init.actorRef.stop - } - - /** - * Get the underlying dispatcher actor for the given active object. - */ - def actorFor(obj: AnyRef): Option[ActorRef] = - ActorRegistry.actorsFor(classOf[Dispatcher]).find(a => a.actor.asInstanceOf[Dispatcher].target == Some(obj)) - - /** - * Links an other active object to this active object. - * @param supervisor the supervisor active object - * @param supervised the active object to link - */ - def link(supervisor: AnyRef, supervised: AnyRef) = { - val supervisorActor = actorFor(supervisor).getOrElse( - throw new IllegalActorStateException("Can't link when the supervisor is not an active object")) - val supervisedActor = actorFor(supervised).getOrElse( - throw new IllegalActorStateException("Can't link when the supervised is not an active object")) - supervisorActor.link(supervisedActor) - } - - /** - * Links an other active object to this active object and sets the fault handling for the supervisor. - * @param supervisor the supervisor active object - * @param supervised the active object to link - * @param handler fault handling strategy - * @param trapExceptions array of exceptions that should be handled by the supervisor - */ - def link(supervisor: AnyRef, supervised: AnyRef, handler: FaultHandlingStrategy, trapExceptions: Array[Class[_ <: Throwable]]) = { - val supervisorActor = actorFor(supervisor).getOrElse( - throw new IllegalActorStateException("Can't link when the supervisor is not an active object")) - val supervisedActor = actorFor(supervised).getOrElse( - throw new IllegalActorStateException("Can't link when the supervised is not an active object")) - supervisorActor.trapExit = trapExceptions.toList - supervisorActor.faultHandler = Some(handler) - supervisorActor.link(supervisedActor) - } - - /** - * Unlink the supervised active object from the supervisor. - * @param supervisor the supervisor active object - * @param supervised the active object to unlink - */ - def unlink(supervisor: AnyRef, supervised: AnyRef) = { - val supervisorActor = actorFor(supervisor).getOrElse( - throw new IllegalActorStateException("Can't unlink when the supervisor is not an active object")) - val supervisedActor = actorFor(supervised).getOrElse( - throw new IllegalActorStateException("Can't unlink when the supervised is not an active object")) - supervisorActor.unlink(supervisedActor) - } - - /** - * Sets the trap exit for the given supervisor active object. - * @param supervisor the supervisor active object - * @param trapExceptions array of exceptions that should be handled by the supervisor - */ - def trapExit(supervisor: AnyRef, trapExceptions: Array[Class[_ <: Throwable]]) = { - val supervisorActor = actorFor(supervisor).getOrElse( - throw new IllegalActorStateException("Can't set trap exceptions when the supervisor is not an active object")) - supervisorActor.trapExit = trapExceptions.toList - this - } - - /** - * Sets the fault handling strategy for the given supervisor active object. - * @param supervisor the supervisor active object - * @param handler fault handling strategy - */ - def faultHandler(supervisor: AnyRef, handler: FaultHandlingStrategy) = { - val supervisorActor = actorFor(supervisor).getOrElse( - throw new IllegalActorStateException("Can't set fault handler when the supervisor is not an active object")) - supervisorActor.faultHandler = Some(handler) - this - } - - private def injectActiveObjectContext(activeObject: AnyRef): Option[ActiveObjectContext] = { - def injectActiveObjectContext0(activeObject: AnyRef, clazz: Class[_]): Option[ActiveObjectContext] = { - val contextField = clazz.getDeclaredFields.toList.find(_.getType == classOf[ActiveObjectContext]) - if (contextField.isDefined) { - contextField.get.setAccessible(true) - val context = new ActiveObjectContext - contextField.get.set(activeObject, context) - Some(context) - } else { - val parent = clazz.getSuperclass - if (parent != null) injectActiveObjectContext0(activeObject, parent) - else { - log.trace( - "Can't set 'ActiveObjectContext' for ActiveObject [%s] since no field of this type could be found.", - activeObject.getClass.getName) - None - } - } - } - injectActiveObjectContext0(activeObject, activeObject.getClass) - } - - private[akka] def supervise(restartStrategy: RestartStrategy, components: List[Supervise]): Supervisor = - Supervisor(SupervisorConfig(restartStrategy, components)) - -} - -private[akka] object AspectInitRegistry extends ListenerManagement { - private val initializations = new java.util.concurrent.ConcurrentHashMap[AnyRef, AspectInit] - - def initFor(target: AnyRef) = { - initializations.get(target) - } - - def register(target: AnyRef, init: AspectInit) = { - val res = initializations.put(target, init) - foreachListener(_ ! AspectInitRegistered(target, init)) - res - } - - def unregister(target: AnyRef) = { - val res = initializations.remove(target) - foreachListener(_ ! AspectInitUnregistered(target, res)) - res - } -} - -private[akka] sealed trait AspectInitRegistryEvent -private[akka] case class AspectInitRegistered(proxy: AnyRef, init: AspectInit) extends AspectInitRegistryEvent -private[akka] case class AspectInitUnregistered(proxy: AnyRef, init: AspectInit) extends AspectInitRegistryEvent - -private[akka] sealed case class AspectInit( - val target: Class[_], - val actorRef: ActorRef, - val remoteAddress: Option[InetSocketAddress], - val timeout: Long) { - def this(target: Class[_], actorRef: ActorRef, timeout: Long) = this(target, actorRef, None, timeout) -} - -/** - * AspectWerkz Aspect that is turning POJOs into Active Object. - * Is deployed on a 'per-instance' basis. - * - * @author Jonas Bonér - */ -@Aspect("perInstance") -private[akka] sealed class ActiveObjectAspect { - @volatile private var isInitialized = false - @volatile private var isStopped = false - private var target: Class[_] = _ - private var actorRef: ActorRef = _ - private var remoteAddress: Option[InetSocketAddress] = _ - private var timeout: Long = _ - @volatile private var instance: AnyRef = _ - - @Around("execution(* *.*(..))") - def invoke(joinPoint: JoinPoint): AnyRef = { - if (!isInitialized) { - val init = AspectInitRegistry.initFor(joinPoint.getThis) - target = init.target - actorRef = init.actorRef - remoteAddress = init.remoteAddress - timeout = init.timeout - isInitialized = true - - } - dispatch(joinPoint) - } - - private def dispatch(joinPoint: JoinPoint) = { - if (remoteAddress.isDefined) remoteDispatch(joinPoint) - else localDispatch(joinPoint) - } - - private def localDispatch(joinPoint: JoinPoint): AnyRef = { - val rtti = joinPoint.getRtti.asInstanceOf[MethodRtti] - val isOneWay = isVoid(rtti) - val sender = ActiveObjectContext.sender.value - val senderFuture = ActiveObjectContext.senderFuture.value - - if (!actorRef.isRunning && !isStopped) { - isStopped = true - joinPoint.proceed - } else if (isOneWay) { - actorRef ! Invocation(joinPoint, true, true, sender, senderFuture) - null.asInstanceOf[AnyRef] - } else { - val result = (actorRef !! (Invocation(joinPoint, false, isOneWay, sender, senderFuture), timeout)).as[AnyRef] - if (result.isDefined) result.get - else throw new IllegalActorStateException("No result defined for invocation [" + joinPoint + "]") - } - } - - private def remoteDispatch(joinPoint: JoinPoint): AnyRef = { - val rtti = joinPoint.getRtti.asInstanceOf[MethodRtti] - val isOneWay = isVoid(rtti) - val (message: Array[AnyRef], isEscaped) = escapeArguments(rtti.getParameterValues) - val requestBuilder = RemoteRequestProtocol.newBuilder - .setId(RemoteRequestProtocolIdFactory.nextId) - .setMessage(MessageSerializer.serialize(message)) - .setMethod(rtti.getMethod.getName) - .setTarget(target.getName) - .setUuid(actorRef.uuid) - .setTimeout(timeout) - .setIsActor(false) - .setIsOneWay(isOneWay) - .setIsEscaped(false) - val id = actorRef.registerSupervisorAsRemoteActor - if (id.isDefined) requestBuilder.setSupervisorUuid(id.get) - val remoteMessage = requestBuilder.build - val future = RemoteClient.clientFor(remoteAddress.get).send(remoteMessage, None) - if (isOneWay) null // for void methods - else { - if (future.isDefined) { - future.get.await - val result = getResultOrThrowException(future.get) - if (result.isDefined) result.get - else throw new IllegalActorStateException("No result returned from call to [" + joinPoint + "]") - } else throw new IllegalActorStateException("No future returned from call to [" + joinPoint + "]") - } - } - - private def getResultOrThrowException[T](future: Future[T]): Option[T] = - if (future.exception.isDefined) { - val (_, cause) = future.exception.get - throw cause - } else future.result - - private def isVoid(rtti: MethodRtti) = rtti.getMethod.getReturnType == java.lang.Void.TYPE - - private def escapeArguments(args: Array[AnyRef]): Tuple2[Array[AnyRef], Boolean] = { - var isEscaped = false - val escapedArgs = for (arg <- args) yield { - val clazz = arg.getClass - if (clazz.getName.contains(ActiveObject.AW_PROXY_PREFIX)) { - isEscaped = true - ActiveObject.AW_PROXY_PREFIX + clazz.getSuperclass.getName - } else arg - } - (escapedArgs, isEscaped) - } -} - -/** - * Represents a snapshot of the current invocation. - * - * @author Jonas Bonér - */ -@serializable private[akka] case class Invocation( - joinPoint: JoinPoint, isOneWay: Boolean, isVoid: Boolean, sender: AnyRef, senderFuture: CompletableFuture[Any]) { - - override def toString: String = synchronized { - "Invocation [joinPoint: " + joinPoint.toString + - ", isOneWay: " + isOneWay + - ", isVoid: " + isVoid + - ", sender: " + sender + - ", senderFuture: " + senderFuture + - "]" - } - - override def hashCode: Int = synchronized { - var result = HashCode.SEED - result = HashCode.hash(result, joinPoint) - result = HashCode.hash(result, isOneWay) - result = HashCode.hash(result, isVoid) - result = HashCode.hash(result, sender) - result = HashCode.hash(result, senderFuture) - result - } - - override def equals(that: Any): Boolean = synchronized { - that != null && - that.isInstanceOf[Invocation] && - that.asInstanceOf[Invocation].joinPoint == joinPoint && - that.asInstanceOf[Invocation].isOneWay == isOneWay && - that.asInstanceOf[Invocation].isVoid == isVoid && - that.asInstanceOf[Invocation].sender == sender && - that.asInstanceOf[Invocation].senderFuture == senderFuture - } -} - -object Dispatcher { - val ZERO_ITEM_CLASS_ARRAY = Array[Class[_]]() - val ZERO_ITEM_OBJECT_ARRAY = Array[Object]() - var crashedActorTl:ThreadLocal[Dispatcher] = new ThreadLocal(); -} - -/** - * Generic Actor managing Invocation dispatch, transaction and error management. - * - * @author Jonas Bonér - */ -private[akka] class Dispatcher(transactionalRequired: Boolean, - var restartCallbacks: Option[RestartCallbacks] = None, - var shutdownCallback: Option[ShutdownCallback] = None) extends Actor { - import Dispatcher._ - - private[actor] var target: Option[AnyRef] = None - private var zhutdown: Option[Method] = None - private var preRestart: Option[Method] = None - private var postRestart: Option[Method] = None - private var initTxState: Option[Method] = None - private var context: Option[ActiveObjectContext] = None - private var targetClass:Class[_] = _ - - - - def this(transactionalRequired: Boolean) = this(transactionalRequired,None) - - private[actor] def initialize(targetClass: Class[_], targetInstance: AnyRef, ctx: Option[ActiveObjectContext]) = { - - if (transactionalRequired || targetClass.isAnnotationPresent(Annotations.transactionrequired)) - self.makeTransactionRequired - self.id = targetClass.getName - this.targetClass = targetClass - target = Some(targetInstance) - context = ctx - val methods = targetInstance.getClass.getDeclaredMethods.toList - - // See if we have any config define restart callbacks - restartCallbacks match { - case None => {} - case Some(RestartCallbacks(pre, post)) => - preRestart = Some(try { - targetInstance.getClass.getDeclaredMethod(pre, ZERO_ITEM_CLASS_ARRAY: _*) - } catch { case e => throw new IllegalActorStateException( - "Could not find pre restart method [" + pre + "] \nin [" + - targetClass.getName + "]. \nIt must have a zero argument definition.") }) - postRestart = Some(try { - targetInstance.getClass.getDeclaredMethod(post, ZERO_ITEM_CLASS_ARRAY: _*) - } catch { case e => throw new IllegalActorStateException( - "Could not find post restart method [" + post + "] \nin [" + - targetClass.getName + "]. \nIt must have a zero argument definition.") }) - } - // See if we have any config define a shutdown callback - shutdownCallback match { - case None => {} - case Some(ShutdownCallback(down)) => - zhutdown = Some(try { - targetInstance.getClass.getDeclaredMethod(down, ZERO_ITEM_CLASS_ARRAY: _*) - } catch { case e => throw new IllegalStateException( - "Could not find shutdown method [" + down + "] \nin [" + - targetClass.getName + "]. \nIt must have a zero argument definition.") }) - } - - // See if we have any annotation defined restart callbacks - if (!preRestart.isDefined) preRestart = methods.find(m => m.isAnnotationPresent(Annotations.prerestart)) - if (!postRestart.isDefined) postRestart = methods.find(m => m.isAnnotationPresent(Annotations.postrestart)) - // See if we have an annotation defined shutdown callback - if (!zhutdown.isDefined) zhutdown = methods.find(m => m.isAnnotationPresent(Annotations.shutdown)) - - if (preRestart.isDefined && preRestart.get.getParameterTypes.length != 0) - throw new IllegalActorStateException( - "Method annotated with @prerestart or defined as a restart callback in \n[" + - targetClass.getName + "] must have a zero argument definition") - if (postRestart.isDefined && postRestart.get.getParameterTypes.length != 0) - throw new IllegalActorStateException( - "Method annotated with @postrestart or defined as a restart callback in \n[" + - targetClass.getName + "] must have a zero argument definition") - if (zhutdown.isDefined && zhutdown.get.getParameterTypes.length != 0) - throw new IllegalStateException( - "Method annotated with @shutdown or defined as a shutdown callback in \n[" + - targetClass.getName + "] must have a zero argument definition") - - if (preRestart.isDefined) preRestart.get.setAccessible(true) - if (postRestart.isDefined) postRestart.get.setAccessible(true) - if (zhutdown.isDefined) zhutdown.get.setAccessible(true) - - // see if we have a method annotated with @inittransactionalstate, if so invoke it - initTxState = methods.find(m => m.isAnnotationPresent(Annotations.inittransactionalstate)) - if (initTxState.isDefined && initTxState.get.getParameterTypes.length != 0) - throw new IllegalActorStateException("Method annotated with @inittransactionalstate must have a zero argument definition") - if (initTxState.isDefined) initTxState.get.setAccessible(true) - } - - def receive = { - case Invocation(joinPoint, isOneWay, _, sender, senderFuture) => - context.foreach { ctx => - if (sender ne null) ctx._sender = sender - if (senderFuture ne null) ctx._senderFuture = senderFuture - } - ActiveObjectContext.sender.value = joinPoint.getThis // set next sender - self.senderFuture.foreach(ActiveObjectContext.senderFuture.value = _) - - if (Actor.SERIALIZE_MESSAGES) serializeArguments(joinPoint) - if (isOneWay) joinPoint.proceed - else self.reply(joinPoint.proceed) - - // Jan Kronquist: started work on issue 121 - case Link(target) => self.link(target) - case Unlink(target) => self.unlink(target) - case unexpected => - throw new IllegalActorStateException("Unexpected message [" + unexpected + "] sent to [" + this + "]") - } - - override def preRestart(reason: Throwable) { - try { - // Since preRestart is called we know that this dispatcher - // is about to be restarted. Put the instance in a thread - // local so the new dispatcher can be initialized with the contents of the - // old. - //FIXME - This should be considered as a workaround. - crashedActorTl.set(this) - if (preRestart.isDefined) preRestart.get.invoke(target.get, ZERO_ITEM_OBJECT_ARRAY: _*) - } catch { case e: InvocationTargetException => throw e.getCause } - } - - override def postRestart(reason: Throwable) { - try { - - if (postRestart.isDefined) { - postRestart.get.invoke(target.get, ZERO_ITEM_OBJECT_ARRAY: _*) - } - } catch { case e: InvocationTargetException => throw e.getCause } - } - - override def init = { - // Get the crashed dispatcher from thread local and intitialize this actor with the - // contents of the old dispatcher - val oldActor = crashedActorTl.get(); - if(oldActor != null) { - initialize(oldActor.targetClass,oldActor.target.get,oldActor.context) - crashedActorTl.set(null) - } - } - - override def shutdown = { - try { - if (zhutdown.isDefined) { - zhutdown.get.invoke(target.get, ZERO_ITEM_OBJECT_ARRAY: _*) - } - } catch { - case e: InvocationTargetException => throw e.getCause - } finally { - AspectInitRegistry.unregister(target.get); - } - } - - override def initTransactionalState = { - try { - if (initTxState.isDefined && target.isDefined) initTxState.get.invoke(target.get, ZERO_ITEM_OBJECT_ARRAY: _*) - } catch { case e: InvocationTargetException => throw e.getCause } - } - - - - private def serializeArguments(joinPoint: JoinPoint) = { - val args = joinPoint.getRtti.asInstanceOf[MethodRtti].getParameterValues - var unserializable = false - var hasMutableArgument = false - for (arg <- args.toList) { - if (!arg.isInstanceOf[String] && - !arg.isInstanceOf[Byte] && - !arg.isInstanceOf[Int] && - !arg.isInstanceOf[Long] && - !arg.isInstanceOf[Float] && - !arg.isInstanceOf[Double] && - !arg.isInstanceOf[Boolean] && - !arg.isInstanceOf[Char] && - !arg.isInstanceOf[java.lang.Byte] && - !arg.isInstanceOf[java.lang.Integer] && - !arg.isInstanceOf[java.lang.Long] && - !arg.isInstanceOf[java.lang.Float] && - !arg.isInstanceOf[java.lang.Double] && - !arg.isInstanceOf[java.lang.Boolean] && - !arg.isInstanceOf[java.lang.Character]) { - hasMutableArgument = true - } - if (arg.getClass.getName.contains(ActiveObject.AW_PROXY_PREFIX)) unserializable = true - } - if (!unserializable && hasMutableArgument) { - val copyOfArgs = Serializer.Java.deepClone(args) - joinPoint.getRtti.asInstanceOf[MethodRtti].setParameterValues(copyOfArgs.asInstanceOf[Array[AnyRef]]) - } - } -} diff --git a/akka-core/src/main/scala/actor/Actor.scala b/akka-core/src/main/scala/actor/Actor.scala index e1227168b2..5b736abb70 100644 --- a/akka-core/src/main/scala/actor/Actor.scala +++ b/akka-core/src/main/scala/actor/Actor.scala @@ -9,10 +9,12 @@ import se.scalablesolutions.akka.config.Config._ import se.scalablesolutions.akka.config.ScalaConfig._ import se.scalablesolutions.akka.serialization.Serializer import se.scalablesolutions.akka.util.Helpers.{narrow, narrowSilently} -import se.scalablesolutions.akka.util.Logging +import se.scalablesolutions.akka.util.{Logging, Duration} import com.google.protobuf.Message + import java.util.concurrent.TimeUnit +import java.net.InetSocketAddress /** * Implements the Transactor abstraction. E.g. a transactional actor. @@ -32,8 +34,9 @@ trait Transactor extends Actor { * * @author Jonas Bonér */ -abstract class RemoteActor(hostname: String, port: Int) extends Actor { - self.makeRemote(hostname, port) +abstract class RemoteActor(address: InetSocketAddress) extends Actor { + def this(hostname: String, port: Int) = this(new InetSocketAddress(hostname, port)) + self.makeRemote(address) } /** @@ -46,14 +49,16 @@ case class Exit(dead: ActorRef, killer: Throwable) extends LifeCycleMessage case class Link(child: ActorRef) extends LifeCycleMessage case class Unlink(child: ActorRef) extends LifeCycleMessage case class UnlinkAndStop(child: ActorRef) extends LifeCycleMessage -case object Kill extends LifeCycleMessage case object ReceiveTimeout extends LifeCycleMessage +case class MaximumNumberOfRestartsWithinTimeRangeReached( + victim: ActorRef, maxNrOfRetries: Int, withinTimeRange: Int, lastExceptionCausingRestart: Throwable) extends LifeCycleMessage // Exceptions for Actors class ActorStartException private[akka](message: String) extends RuntimeException(message) class IllegalActorStateException private[akka](message: String) extends RuntimeException(message) class ActorKilledException private[akka](message: String) extends RuntimeException(message) class ActorInitializationException private[akka](message: String) extends RuntimeException(message) +class ActorTimeoutException private[akka](message: String) extends RuntimeException(message) /** * Actor factory module with factory methods for creating various kinds of Actors. @@ -61,7 +66,7 @@ class ActorInitializationException private[akka](message: String) extends Runtim * @author Jonas Bonér */ object Actor extends Logging { - val TIMEOUT = config.getInt("akka.actor.timeout", 5000) + val TIMEOUT = Duration(config.getInt("akka.actor.timeout", 5), TIME_UNIT).toMillis val SERIALIZE_MESSAGES = config.getBool("akka.actor.serialize-messages", false) /** @@ -71,9 +76,9 @@ object Actor extends Logging { type Receive = PartialFunction[Any, Unit] private[actor] val actorRefInCreation = new scala.util.DynamicVariable[Option[ActorRef]](None) - + /** - * Creates a Actor.actorOf out of the Actor with type T. + * Creates an ActorRef out of the Actor with type T. *
    *   import Actor._
    *   val actor = actorOf[MyActor]
@@ -89,7 +94,7 @@ object Actor extends Logging {
   def actorOf[T <: Actor : Manifest]: ActorRef = new LocalActorRef(manifest[T].erasure.asInstanceOf[Class[_ <: Actor]])
 
   /**
-   * Creates a Actor.actorOf out of the Actor. Allows you to pass in a factory function
+   * Creates an ActorRef out of the Actor. Allows you to pass in a factory function
    * that creates the Actor. Please note that this function can be invoked multiple
    * times if for example the Actor is supervised and needs to be restarted.
    * 

@@ -284,6 +289,7 @@ object Actor extends Logging { * @author Jonas Bonér */ trait Actor extends Logging { + /** * Type alias because traits cannot have companion objects. */ @@ -300,12 +306,12 @@ trait Actor extends Logging { Actor.actorRefInCreation.value = None if (ref.isEmpty) throw new ActorInitializationException( "ActorRef for instance of actor [" + getClass.getName + "] is not in scope." + - "\n\tYou can not create an instance of an actor explicitly using 'new MyActor'." + - "\n\tYou have to use one of the factory methods in the 'Actor' object to create a new actor." + - "\n\tEither use:" + - "\n\t\t'val actor = Actor.actorOf[MyActor]', or" + - "\n\t\t'val actor = Actor.actorOf(new MyActor(..))', or" + - "\n\t\t'val actor = Actor.actor { case msg => .. } }'") + "\n\tYou can not create an instance of an actor explicitly using 'new MyActor'." + + "\n\tYou have to use one of the factory methods in the 'Actor' object to create a new actor." + + "\n\tEither use:" + + "\n\t\t'val actor = Actor.actorOf[MyActor]', or" + + "\n\t\t'val actor = Actor.actorOf(new MyActor(..))', or" + + "\n\t\t'val actor = Actor.actor { case msg => .. } }'") else ref } @@ -413,6 +419,14 @@ trait Actor extends Logging { */ def isDefinedAt(message: Any): Boolean = base.isDefinedAt(message) + /** One of the fundamental methods of the ActorsModel + * Actor assumes a new behavior + */ + def become(behavior: Option[Receive]) { + self.hotswap = behavior + self.checkReceiveTimeout // FIXME : how to reschedule receivetimeout on hotswap? + } + // ========================================= // ==== INTERNAL IMPLEMENTATION DETAILS ==== // ========================================= @@ -425,13 +439,12 @@ trait Actor extends Logging { } private val lifeCycles: Receive = { - case HotSwap(code) => self.hotswap = code; self.checkReceiveTimeout // FIXME : how to reschedule receivetimeout on hotswap? - case Restart(reason) => self.restart(reason) - case Exit(dead, reason) => self.handleTrapExit(dead, reason) - case Link(child) => self.link(child) - case Unlink(child) => self.unlink(child) + case HotSwap(code) => become(code) + case Exit(dead, reason) => self.handleTrapExit(dead, reason) + case Link(child) => self.link(child) + case Unlink(child) => self.unlink(child) case UnlinkAndStop(child) => self.unlink(child); child.stop - case Kill => throw new ActorKilledException("Actor [" + toString + "] was killed by a Kill message") + case Restart(reason) => throw reason } } diff --git a/akka-core/src/main/scala/actor/ActorRef.scala b/akka-core/src/main/scala/actor/ActorRef.scala index 2472ea924d..2fd205edeb 100644 --- a/akka-core/src/main/scala/actor/ActorRef.scala +++ b/akka-core/src/main/scala/actor/ActorRef.scala @@ -10,26 +10,27 @@ import se.scalablesolutions.akka.config.{AllForOneStrategy, OneForOneStrategy, F import se.scalablesolutions.akka.config.ScalaConfig._ import se.scalablesolutions.akka.stm.global._ import se.scalablesolutions.akka.stm.TransactionManagement._ -import se.scalablesolutions.akka.stm.TransactionManagement +import se.scalablesolutions.akka.stm.{TransactionManagement, TransactionSetAbortedException} import se.scalablesolutions.akka.remote.protocol.RemoteProtocol._ import se.scalablesolutions.akka.remote.{RemoteNode, RemoteServer, RemoteClient, MessageSerializer, RemoteRequestProtocolIdFactory} import se.scalablesolutions.akka.serialization.Serializer import se.scalablesolutions.akka.util.{HashCode, Logging, UUID, ReentrantGuard} +import RemoteActorSerialization._ import org.multiverse.api.ThreadLocalTransaction._ import org.multiverse.commitbarriers.CountDownCommitBarrier - -import jsr166x.{Deque, ConcurrentLinkedDeque} +import org.multiverse.api.exceptions.DeadTransactionException import java.net.InetSocketAddress import java.util.concurrent.locks.ReentrantLock import java.util.concurrent.atomic.AtomicReference +import java.util.concurrent.{ConcurrentHashMap, TimeUnit} import java.util.{Map => JMap} import java.lang.reflect.Field -import RemoteActorSerialization._ + +import jsr166x.{Deque, ConcurrentLinkedDeque} import com.google.protobuf.ByteString -import java.util.concurrent.{ConcurrentHashMap, TimeUnit} /** * ActorRef is an immutable and serializable handle to an Actor. @@ -63,7 +64,7 @@ import java.util.concurrent.{ConcurrentHashMap, TimeUnit} * * @author Jonas Bonér */ -trait ActorRef extends TransactionManagement { +trait ActorRef extends TransactionManagement with java.lang.Comparable[ActorRef] { // Only mutable for RemoteServer in order to maintain identity across nodes @volatile protected[akka] var _uuid = UUID.newUuid.toString @@ -71,9 +72,7 @@ trait ActorRef extends TransactionManagement { @volatile protected[this] var _isShutDown = false @volatile protected[akka] var _isBeingRestarted = false @volatile protected[akka] var _homeAddress = new InetSocketAddress(RemoteServer.HOSTNAME, RemoteServer.PORT) - @volatile protected[akka] var _timeoutActor: Option[ActorRef] = None - @volatile protected[akka] var startOnCreation = false @volatile protected[akka] var registeredInRemoteNodeDuringSerialization = false protected[this] val guard = new ReentrantGuard @@ -99,12 +98,12 @@ trait ActorRef extends TransactionManagement { @volatile var timeout: Long = Actor.TIMEOUT /** - * User overridable callback/setting. - *

- * Defines the default timeout for an initial receive invocation. - * When specified, the receive function should be able to handle a 'ReceiveTimeout' message. - */ - @volatile var receiveTimeout: Option[Long] = None + * User overridable callback/setting. + *

+ * Defines the default timeout for an initial receive invocation. + * When specified, the receive function should be able to handle a 'ReceiveTimeout' message. + */ + @volatile var receiveTimeout: Option[Long] = None /** * User overridable callback/setting. @@ -166,12 +165,12 @@ trait ActorRef extends TransactionManagement { * The default is also that all actors that are created and spawned from within this actor * is sharing the same dispatcher as its creator. */ - private[akka] var _dispatcher: MessageDispatcher = Dispatchers.globalExecutorBasedEventDrivenDispatcher + @volatile private[akka] var _dispatcher: MessageDispatcher = Dispatchers.globalExecutorBasedEventDrivenDispatcher /** * Holds the hot swapped partial function. */ - protected[akka] var hotswap: Option[PartialFunction[Any, Unit]] = None // FIXME: _hotswap should be a stack + @volatile protected[akka] var hotswap: Option[PartialFunction[Any, Unit]] = None // FIXME: _hotswap should be a stack /** * User overridable callback/setting. @@ -184,12 +183,12 @@ trait ActorRef extends TransactionManagement { /** * Configuration for TransactionFactory. User overridable. */ - protected[akka] var _transactionConfig: TransactionConfig = DefaultGlobalTransactionConfig + @volatile protected[akka] var _transactionConfig: TransactionConfig = DefaultGlobalTransactionConfig /** * TransactionFactory to be used for atomic when isTransactor. Configuration is overridable. */ - private[akka] var _transactionFactory: Option[TransactionFactory] = None + @volatile private[akka] var _transactionFactory: Option[TransactionFactory] = None /** * This lock ensures thread safety in the dispatching: only one message can @@ -204,6 +203,10 @@ trait ActorRef extends TransactionManagement { protected[akka] def currentMessage_=(msg: Option[MessageInvocation]) = guard.withGuard { _currentMessage = msg } protected[akka] def currentMessage = guard.withGuard { _currentMessage } + + /** comparison only takes uuid into account + */ + def compareTo(other: ActorRef) = this.uuid.compareTo(other.uuid) /** * Returns the uuid for the actor. @@ -215,12 +218,10 @@ trait ActorRef extends TransactionManagement { * Is defined if the message was sent from another Actor, else None. */ def sender: Option[ActorRef] = { - //Five lines of map-performance-avoidance, could be just: currentMessage map { _.sender } + // Five lines of map-performance-avoidance, could be just: currentMessage map { _.sender } val msg = currentMessage - if(msg.isEmpty) - None - else - msg.get.sender + if(msg.isEmpty) None + else msg.get.sender } /** @@ -228,12 +229,10 @@ trait ActorRef extends TransactionManagement { * Is defined if the message was sent with sent with '!!' or '!!!', else None. */ def senderFuture: Option[CompletableFuture[Any]] = { - //Five lines of map-performance-avoidance, could be just: currentMessage map { _.senderFuture } + // Five lines of map-performance-avoidance, could be just: currentMessage map { _.senderFuture } val msg = currentMessage - if(msg.isEmpty) - None - else - msg.get.senderFuture + if(msg.isEmpty) None + else msg.get.senderFuture } /** @@ -296,15 +295,15 @@ trait ActorRef extends TransactionManagement { def !!(message: Any, timeout: Long = this.timeout)(implicit sender: Option[ActorRef] = None): Option[Any] = { if (isRunning) { val future = postMessageToMailboxAndCreateFutureResultWithTimeout[Any](message, timeout, sender, None) - val isActiveObject = message.isInstanceOf[Invocation] - if (isActiveObject && message.asInstanceOf[Invocation].isVoid) { + val isTypedActor = message.isInstanceOf[Invocation] + if (isTypedActor && message.asInstanceOf[Invocation].isVoid) { future.asInstanceOf[CompletableFuture[Option[_]]].completeWithResult(None) } try { future.await } catch { case e: FutureTimeoutException => - if (isActiveObject) throw e + if (isTypedActor) throw e else None } if (future.exception.isDefined) throw future.exception.get._2 @@ -352,7 +351,7 @@ trait ActorRef extends TransactionManagement { "\n\tNo sender in scope, can't reply. " + "\n\tYou have probably: " + "\n\t\t1. Sent a message to an Actor from an instance that is NOT an Actor." + - "\n\t\t2. Invoked a method on an Active Object from an instance NOT an Active Object." + + "\n\t\t2. Invoked a method on an TypedActor from an instance NOT an TypedActor." + "\n\tElse you might want to use 'reply_?' which returns Boolean(true) if succes and Boolean(false) if no sender in scope") /** @@ -421,13 +420,13 @@ trait ActorRef extends TransactionManagement { * Returns the home address and port for this actor. */ def homeAddress: InetSocketAddress = _homeAddress - + /** * Set the home address and port for this actor. */ def homeAddress_=(hostnameAndPort: Tuple2[String, Int]): Unit = homeAddress_=(new InetSocketAddress(hostnameAndPort._1, hostnameAndPort._2)) - + /** * Set the home address and port for this actor. */ @@ -442,7 +441,7 @@ trait ActorRef extends TransactionManagement { /** * Starts up the actor and its message queue. */ - def start: ActorRef + def start(): ActorRef /** * Shuts down the actor its dispatcher and message queue. @@ -462,64 +461,48 @@ trait ActorRef extends TransactionManagement { * If the 'trapExit' member field has been set to at contain at least one exception class then it will * 'trap' these exceptions and automatically restart the linked actors according to the restart strategy * defined by the 'faultHandler'. - *

- * To be invoked from within the actor itself. */ def link(actorRef: ActorRef): Unit /** * Unlink the actor. - *

- * To be invoked from within the actor itself. */ def unlink(actorRef: ActorRef): Unit /** * Atomically start and link an actor. - *

- * To be invoked from within the actor itself. */ def startLink(actorRef: ActorRef): Unit /** * Atomically start, link and make an actor remote. - *

- * To be invoked from within the actor itself. */ def startLinkRemote(actorRef: ActorRef, hostname: String, port: Int): Unit /** * Atomically create (from actor class) and start an actor. - *

- * To be invoked from within the actor itself. */ def spawn[T <: Actor : Manifest]: ActorRef /** * Atomically create (from actor class), start and make an actor remote. - *

- * To be invoked from within the actor itself. */ def spawnRemote[T <: Actor: Manifest](hostname: String, port: Int): ActorRef /** * Atomically create (from actor class), start and link an actor. - *

- * To be invoked from within the actor itself. */ def spawnLink[T <: Actor: Manifest]: ActorRef /** * Atomically create (from actor class), start, link and make an actor remote. - *

- * To be invoked from within the actor itself. */ def spawnLinkRemote[T <: Actor : Manifest](hostname: String, port: Int): ActorRef /** * Returns the mailbox size. */ - def mailboxSize: Int + def mailboxSize = dispatcher.mailboxSize(this) /** * Returns the supervisor, if there is one. @@ -547,13 +530,14 @@ trait ActorRef extends TransactionManagement { protected[akka] def supervisor_=(sup: Option[ActorRef]): Unit - protected[akka] def mailbox: Deque[MessageInvocation] - - protected[akka] def restart(reason: Throwable): Unit + protected[akka] def mailbox: AnyRef + protected[akka] def mailbox_=(value: AnyRef): AnyRef protected[akka] def handleTrapExit(dead: ActorRef, reason: Throwable): Unit - protected[akka] def restartLinkedActors(reason: Throwable): Unit + protected[akka] def restart(reason: Throwable, maxNrOfRetries: Int, withinTimeRange: Int): Unit + + protected[akka] def restartLinkedActors(reason: Throwable, maxNrOfRetries: Int, withinTimeRange: Int): Unit protected[akka] def registerSupervisorAsRemoteActor: Option[String] @@ -571,23 +555,19 @@ trait ActorRef extends TransactionManagement { override def toString = "Actor[" + id + ":" + uuid + "]" - protected[akka] def cancelReceiveTimeout = { - _timeoutActor.foreach { - x => - if (x.isRunning) Scheduler.unschedule(x) - _timeoutActor = None - log.debug("Timeout canceled for %s", this) - } - } - - protected [akka] def checkReceiveTimeout = { + protected[akka] def checkReceiveTimeout = { cancelReceiveTimeout - receiveTimeout.foreach { timeout => + receiveTimeout.foreach { time => log.debug("Scheduling timeout for %s", this) - _timeoutActor = Some(Scheduler.scheduleOnce(this, ReceiveTimeout, timeout, TimeUnit.MILLISECONDS)) + _timeoutActor = Some(Scheduler.scheduleOnce(this, ReceiveTimeout, time, TimeUnit.MILLISECONDS)) } } + protected[akka] def cancelReceiveTimeout = _timeoutActor.foreach { timeoutActor => + if (timeoutActor.isRunning) Scheduler.unschedule(timeoutActor) + _timeoutActor = None + log.debug("Timeout canceled for %s", this) + } } /** @@ -595,12 +575,28 @@ trait ActorRef extends TransactionManagement { * * @author Jonas Bonér */ -sealed class LocalActorRef private[akka]( +class LocalActorRef private[akka]( private[this] var actorFactory: Either[Option[Class[_ <: Actor]], Option[() => Actor]] = Left(None)) extends ActorRef { - private var isDeserialized = false - private var loader: Option[ClassLoader] = None + @volatile private[akka] var _remoteAddress: Option[InetSocketAddress] = None // only mutable to maintain identity across nodes + @volatile private[akka] var _linkedActors: Option[ConcurrentHashMap[String, ActorRef]] = None + @volatile private[akka] var _supervisor: Option[ActorRef] = None + @volatile private var isInInitialization = false + @volatile private var runActorInitialization = false + @volatile private var isDeserialized = false + @volatile private var loader: Option[ClassLoader] = None + @volatile private var maxNrOfRetriesCount: Int = 0 + @volatile private var restartsWithinTimeRangeTimestamp: Long = 0L + @volatile private var _mailbox: AnyRef = _ + + protected[this] val actorInstance = guard.withGuard { new AtomicReference[Actor](newActor) } + + // Needed to be able to null out the 'val self: ActorRef' member variables to make the Actor + // instance elegible for garbage collection + private val actorSelfFields = findActorSelfField(actor.getClass) + + if (runActorInitialization && !isDeserialized) initializeActorInstance private[akka] def this(clazz: Class[_ <: Actor]) = this(Left(Some(clazz))) private[akka] def this(factory: () => Actor) = this(Right(Some(factory))) @@ -614,6 +610,7 @@ sealed class LocalActorRef private[akka]( __port: Int, __isTransactor: Boolean, __timeout: Long, + __receiveTimeout: Option[Long], __lifeCycle: Option[LifeCycle], __supervisor: Option[ActorRef], __hotswap: Option[PartialFunction[Any, Unit]], @@ -635,6 +632,7 @@ sealed class LocalActorRef private[akka]( homeAddress = (__hostname, __port) isTransactor = __isTransactor timeout = __timeout + receiveTimeout = __receiveTimeout lifeCycle = __lifeCycle _supervisor = __supervisor hotswap = __hotswap @@ -643,30 +641,11 @@ sealed class LocalActorRef private[akka]( actorSelfFields._3.set(actor, Some(this)) start __messages.foreach(message => this ! MessageSerializer.deserialize(message.getMessage)) + checkReceiveTimeout ActorRegistry.register(this) } - // Only mutable for RemoteServer in order to maintain identity across nodes - @volatile private[akka] var _remoteAddress: Option[InetSocketAddress] = None - @volatile private[akka] var _linkedActors: Option[ConcurrentHashMap[String, ActorRef]] = None - @volatile private[akka] var _supervisor: Option[ActorRef] = None - - protected[akka] val _mailbox: Deque[MessageInvocation] = new ConcurrentLinkedDeque[MessageInvocation] - protected[this] val actorInstance = guard.withGuard { new AtomicReference[Actor](newActor) } - - @volatile private var isInInitialization = false - @volatile private var runActorInitialization = false - - // Needed to be able to null out the 'val self: ActorRef' member variables to make the Actor - // instance elegible for garbage collection - private val actorSelfFields = findActorSelfField(actor.getClass) - - if (runActorInitialization && !isDeserialized) initializeActorInstance - - /** - * Returns the mailbox. - */ - def mailbox: Deque[MessageInvocation] = _mailbox + // ========= PUBLIC FUNCTIONS ========= /** * Returns the class for the Actor instance that is managed by the ActorRef. @@ -681,7 +660,7 @@ sealed class LocalActorRef private[akka]( /** * Sets the dispatcher for this actor. Needs to be invoked before the actor is started. */ - def dispatcher_=(md: MessageDispatcher): Unit = guard.withGuard { + def dispatcher_=(md: MessageDispatcher): Unit = { if (!isRunning || isBeingRestarted) _dispatcher = md else throw new ActorInitializationException( "Can not swap dispatcher for " + toString + " after it has been started") @@ -690,7 +669,7 @@ sealed class LocalActorRef private[akka]( /** * Get the dispatcher for this actor. */ - def dispatcher: MessageDispatcher = guard.withGuard { _dispatcher } + def dispatcher: MessageDispatcher = _dispatcher /** * Invoking 'makeRemote' means that an actor will be moved to and invoked on a remote host. @@ -734,19 +713,19 @@ sealed class LocalActorRef private[akka]( /** * Get the transaction configuration for this actor. */ - def transactionConfig: TransactionConfig = guard.withGuard { _transactionConfig } + def transactionConfig: TransactionConfig = _transactionConfig /** * Set the contact address for this actor. This is used for replying to messages * sent asynchronously when no reply channel exists. */ - def homeAddress_=(address: InetSocketAddress): Unit = guard.withGuard { _homeAddress = address } + def homeAddress_=(address: InetSocketAddress): Unit = _homeAddress = address /** * Returns the remote address for the actor, if any, else None. */ - def remoteAddress: Option[InetSocketAddress] = guard.withGuard { _remoteAddress } - protected[akka] def remoteAddress_=(addr: Option[InetSocketAddress]): Unit = guard.withGuard { _remoteAddress = addr } + def remoteAddress: Option[InetSocketAddress] = _remoteAddress + protected[akka] def remoteAddress_=(addr: Option[InetSocketAddress]): Unit = _remoteAddress = addr /** * Starts up the actor and its message queue. @@ -783,7 +762,7 @@ sealed class LocalActorRef private[akka]( address.getHostName, address.getPort, uuid)) RemoteNode.unregister(this) nullOutActorRefReferencesFor(actorInstance.get) - } else if (isBeingRestarted) throw new ActorKilledException("Actor [" + toString + "] is being restarted.") + } //else if (isBeingRestarted) throw new ActorKilledException("Actor [" + toString + "] is being restarted.") } /** @@ -898,19 +877,16 @@ sealed class LocalActorRef private[akka]( } /** - * Returns the mailbox size. + * Returns the mailbox. */ - def mailboxSize: Int = _mailbox.size + def mailbox: AnyRef = _mailbox - /** - * Returns a copy of all the messages, put into a List[MessageInvocation]. - */ - def messagesInMailbox: List[MessageInvocation] = _mailbox.toArray.toList.asInstanceOf[List[MessageInvocation]] + protected[akka] def mailbox_=(value: AnyRef):AnyRef = { _mailbox = value; value } /** * Shuts down and removes all linked actors. */ - def shutdownLinkedActors(): Unit = guard.withGuard { + def shutdownLinkedActors(): Unit = { linkedActorsAsList.foreach(_.stop) linkedActors.clear } @@ -918,9 +894,173 @@ sealed class LocalActorRef private[akka]( /** * Returns the supervisor, if there is one. */ - def supervisor: Option[ActorRef] = guard.withGuard { _supervisor } + def supervisor: Option[ActorRef] = _supervisor - protected[akka] def supervisor_=(sup: Option[ActorRef]): Unit = guard.withGuard { _supervisor = sup } + // ========= AKKA PROTECTED FUNCTIONS ========= + + protected[akka] def supervisor_=(sup: Option[ActorRef]): Unit = _supervisor = sup + + protected[akka] def postMessageToMailbox(message: Any, senderOption: Option[ActorRef]): Unit = { + joinTransaction(message) + + if (remoteAddress.isDefined) { + RemoteClient.clientFor(remoteAddress.get).send[Any]( + createRemoteRequestProtocolBuilder(this, message, true, senderOption).build, None) + } else { + val invocation = new MessageInvocation(this, message, senderOption, None, transactionSet.get) + invocation.send + } + } + + protected[akka] def postMessageToMailboxAndCreateFutureResultWithTimeout[T]( + message: Any, + timeout: Long, + senderOption: Option[ActorRef], + senderFuture: Option[CompletableFuture[T]]): CompletableFuture[T] = { + joinTransaction(message) + + if (remoteAddress.isDefined) { + val future = RemoteClient.clientFor(remoteAddress.get).send( + createRemoteRequestProtocolBuilder(this, message, false, senderOption).build, senderFuture) + if (future.isDefined) future.get + else throw new IllegalActorStateException("Expected a future from remote call to actor " + toString) + } else { + val future = if (senderFuture.isDefined) senderFuture.get + else new DefaultCompletableFuture[T](timeout) + val invocation = new MessageInvocation( + this, message, senderOption, Some(future.asInstanceOf[CompletableFuture[Any]]), transactionSet.get) + invocation.send + future + } + } + + /** + * Callback for the dispatcher. This is the ingle entry point to the user Actor implementation. + */ + protected[akka] def invoke(messageHandle: MessageInvocation): Unit = guard.withGuard { + if (isShutdown) + Actor.log.warning("Actor [%s] is shut down,\n\tignoring message [%s]", toString, messageHandle) + else { + currentMessage = Option(messageHandle) + try { + dispatch(messageHandle) + } catch { + case e => + Actor.log.error(e, "Could not invoke actor [%s]", this) + throw e + } finally { + currentMessage = None //TODO: Don't reset this, we might want to resend the message + } + } + } + + protected[akka] def handleTrapExit(dead: ActorRef, reason: Throwable): Unit = { + if (trapExit.exists(_.isAssignableFrom(reason.getClass))) { + faultHandler match { + case Some(AllForOneStrategy(maxNrOfRetries, withinTimeRange)) => + restartLinkedActors(reason, maxNrOfRetries, withinTimeRange) + + case Some(OneForOneStrategy(maxNrOfRetries, withinTimeRange)) => + dead.restart(reason, maxNrOfRetries, withinTimeRange) + + case None => throw new IllegalActorStateException( + "No 'faultHandler' defined for an actor with the 'trapExit' member field defined " + + "\n\tto non-empty list of exception classes - can't proceed " + toString) + } + } else { + notifySupervisorWithMessage(Exit(this, reason)) // if 'trapExit' is not defined then pass the Exit on + } + } + + protected[akka] def restart(reason: Throwable, maxNrOfRetries: Int, withinTimeRange: Int): Unit = { + if (maxNrOfRetriesCount == 0) restartsWithinTimeRangeTimestamp = System.currentTimeMillis // first time around + maxNrOfRetriesCount += 1 + + val tooManyRestarts = maxNrOfRetriesCount > maxNrOfRetries + val restartingHasExpired = (System.currentTimeMillis - restartsWithinTimeRangeTimestamp) > withinTimeRange + if (tooManyRestarts || restartingHasExpired) { + val notification = MaximumNumberOfRestartsWithinTimeRangeReached(this, maxNrOfRetries, withinTimeRange, reason) + Actor.log.warning( + "Maximum number of restarts [%s] within time range [%s] reached." + + "\n\tWill *not* restart actor [%s] anymore." + + "\n\tLast exception causing restart was" + + "\n\t[%s].", + maxNrOfRetries, withinTimeRange, this, reason) + _supervisor.foreach { sup => + // can supervisor handle the notification? + if (sup.isDefinedAt(notification)) notifySupervisorWithMessage(notification) + else Actor.log.warning( + "No message handler defined for system message [MaximumNumberOfRestartsWithinTimeRangeReached]" + + "\n\tCan't send the message to the supervisor [%s].", sup) + } + stop + } else { + _isBeingRestarted = true + val failedActor = actorInstance.get + guard.withGuard { + lifeCycle match { + case Some(LifeCycle(Temporary)) => shutDownTemporaryActor(this) + case _ => + // either permanent or none where default is permanent + Actor.log.info("Restarting actor [%s] configured as PERMANENT.", id) + Actor.log.debug("Restarting linked actors for actor [%s].", id) + restartLinkedActors(reason, maxNrOfRetries, withinTimeRange) + Actor.log.debug("Invoking 'preRestart' for failed actor instance [%s].", id) + if (isTypedActorDispatcher(failedActor)) restartTypedActorDispatcher(failedActor, reason) + else restartActor(failedActor, reason) + _isBeingRestarted = false + } + } + } + } + + protected[akka] def restartLinkedActors(reason: Throwable, maxNrOfRetries: Int, withinTimeRange: Int) = { + linkedActorsAsList.foreach { actorRef => + actorRef.lifeCycle match { + // either permanent or none where default is permanent + case Some(LifeCycle(Temporary)) => shutDownTemporaryActor(actorRef) + case _ => actorRef.restart(reason, maxNrOfRetries, withinTimeRange) + } + } + } + + protected[akka] def registerSupervisorAsRemoteActor: Option[String] = guard.withGuard { + if (_supervisor.isDefined) { + RemoteClient.clientFor(remoteAddress.get).registerSupervisorForActor(this) + Some(_supervisor.get.uuid) + } else None + } + + protected[akka] def linkedActors: JMap[String, ActorRef] = guard.withGuard { + if (_linkedActors.isEmpty) { + val actors = new ConcurrentHashMap[String, ActorRef] + _linkedActors = Some(actors) + actors + } else _linkedActors.get + } + + protected[akka] def linkedActorsAsList: List[ActorRef] = + linkedActors.values.toArray.toList.asInstanceOf[List[ActorRef]] + + // ========= PRIVATE FUNCTIONS ========= + + private def isTypedActorDispatcher(a: Actor): Boolean = a.isInstanceOf[Dispatcher] + + private def restartTypedActorDispatcher(failedActor: Actor, reason: Throwable) = { + failedActor.preRestart(reason) + failedActor.postRestart(reason) + } + + private def restartActor(failedActor: Actor, reason: Throwable) = { + failedActor.preRestart(reason) + nullOutActorRefReferencesFor(failedActor) + val freshActor = newActor + freshActor.init + freshActor.initTransactionalState + actorInstance.set(freshActor) + Actor.log.debug("Invoking 'postRestart' for new actor instance [%s].", id) + freshActor.postRestart(reason) + } private def spawnButDoNotStart[T <: Actor: Manifest]: ActorRef = guard.withGuard { val actorRef = Actor.actorOf(manifest[T].erasure.asInstanceOf[Class[T]].newInstance) @@ -954,74 +1094,23 @@ sealed class LocalActorRef private[akka]( actor } - protected[akka] def postMessageToMailbox(message: Any, senderOption: Option[ActorRef]): Unit = { - joinTransaction(message) - - if (remoteAddress.isDefined) { - RemoteClient.clientFor(remoteAddress.get).send[Any]( - createRemoteRequestProtocolBuilder(this, message, true, senderOption).build, None) - } else { - val invocation = new MessageInvocation(this, message, senderOption, None, transactionSet.get) - if (dispatcher.usesActorMailbox) { - _mailbox.add(invocation) - invocation.send - } else invocation.send - } - } - - protected[akka] def postMessageToMailboxAndCreateFutureResultWithTimeout[T]( - message: Any, - timeout: Long, - senderOption: Option[ActorRef], - senderFuture: Option[CompletableFuture[T]]): CompletableFuture[T] = { - joinTransaction(message) - - if (remoteAddress.isDefined) { - val future = RemoteClient.clientFor(remoteAddress.get).send( - createRemoteRequestProtocolBuilder(this, message, false, senderOption).build, senderFuture) - if (future.isDefined) future.get - else throw new IllegalActorStateException("Expected a future from remote call to actor " + toString) - } else { - val future = if (senderFuture.isDefined) senderFuture.get - else new DefaultCompletableFuture[T](timeout) - val invocation = new MessageInvocation( - this, message, senderOption, Some(future.asInstanceOf[CompletableFuture[Any]]), transactionSet.get) - if (dispatcher.usesActorMailbox) _mailbox.add(invocation) - invocation.send - future - } - } - private def joinTransaction(message: Any) = if (isTransactionSetInScope) { import org.multiverse.api.ThreadLocalTransaction - val txSet = getTransactionSetInScope - Actor.log.trace("Joining transaction set [%s];\n\tactor %s\n\twith message [%s]", txSet, toString, message) // FIXME test to run bench without this trace call + val oldTxSet = getTransactionSetInScope + val currentTxSet = if (oldTxSet.isAborted || oldTxSet.isCommitted) { + clearTransactionSet + createNewTransactionSet + } else oldTxSet + Actor.log.ifTrace("Joining transaction set [" + currentTxSet + + "];\n\tactor " + toString + + "\n\twith message [" + message + "]") val mtx = ThreadLocalTransaction.getThreadLocalTransaction - if ((mtx eq null) || mtx.getStatus.isDead) txSet.incParties - else txSet.incParties(mtx, 1) - } - - /** - * Callback for the dispatcher. This is the ingle entry point to the user Actor implementation. - */ - protected[akka] def invoke(messageHandle: MessageInvocation): Unit = actor.synchronized { - if (isShutdown) { - Actor.log.warning("Actor [%s] is shut down, ignoring message [%s]", toString, messageHandle) - return - } - currentMessage = Option(messageHandle) - try { - dispatch(messageHandle) - } catch { - case e => - Actor.log.error(e, "Could not invoke actor [%s]", this) - throw e - } finally { - currentMessage = None //TODO: Don't reset this, we might want to resend the message - } + if ((mtx eq null) || mtx.getStatus.isDead) currentTxSet.incParties + else currentTxSet.incParties(mtx, 1) } private def dispatch[T](messageHandle: MessageInvocation) = { + Actor.log.ifTrace("Invoking actor with message:\n" + messageHandle) val message = messageHandle.message //serializeMessage(messageHandle.message) var topLevelTransaction = false val txSet: Option[CountDownCommitBarrier] = @@ -1029,9 +1118,8 @@ sealed class LocalActorRef private[akka]( else { topLevelTransaction = true // FIXME create a new internal atomic block that can wait for X seconds if top level tx if (isTransactor) { - Actor.log.trace( - "Creating a new transaction set (top-level transaction)\n\tfor actor %s\n\twith message %s", - toString, messageHandle) + Actor.log.ifTrace("Creating a new transaction set (top-level transaction)\n\tfor actor " + toString + + "\n\twith message " + messageHandle) Some(createNewTransactionSet) } else None } @@ -1050,93 +1138,18 @@ sealed class LocalActorRef private[akka]( setTransactionSet(txSet) // restore transaction set to allow atomic block to do commit } } catch { - case e => - _isBeingRestarted = true - // abort transaction set - if (isTransactionSetInScope) { - val txSet = getTransactionSetInScope - Actor.log.debug("Aborting transaction set [%s]", txSet) - txSet.abort - } - Actor.log.error(e, "Exception when invoking \n\tactor [%s] \n\twith message [%s]", this, message) - - senderFuture.foreach(_.completeWithException(this, e)) - - clearTransaction - if (topLevelTransaction) clearTransactionSet - - // FIXME to fix supervisor restart of remote actor for oneway calls, inject a supervisor proxy that can send notification back to client - if (_supervisor.isDefined) _supervisor.get ! Exit(this, e) + case e: DeadTransactionException => + handleExceptionInDispatch( + new TransactionSetAbortedException("Transaction set has been aborted by another participant"), + message, topLevelTransaction) + case e: InterruptedException => {} // received message while actor is shutting down, ignore + case e => handleExceptionInDispatch(e, message, topLevelTransaction) } finally { clearTransaction if (topLevelTransaction) clearTransactionSet } } - protected[akka] def handleTrapExit(dead: ActorRef, reason: Throwable): Unit = { - if (trapExit.exists(_.isAssignableFrom(reason.getClass))) { - faultHandler match { - // FIXME: implement support for maxNrOfRetries and withinTimeRange in RestartStrategy - case Some(AllForOneStrategy(maxNrOfRetries, withinTimeRange)) => - restartLinkedActors(reason) - - case Some(OneForOneStrategy(maxNrOfRetries, withinTimeRange)) => - dead.restart(reason) - - case None => - throw new IllegalActorStateException( - "No 'faultHandler' defined for an actor with the 'trapExit' member field defined " + - "\n\tto non-empty list of exception classes - can't proceed " + toString) - } - } else { - if (lifeCycle.isEmpty) lifeCycle = Some(LifeCycle(Permanent)) // when passing on make sure we have a lifecycle - _supervisor.foreach(_ ! Exit(this, reason)) // if 'trapExit' is not defined then pass the Exit on - } - } - - protected[akka] def restart(reason: Throwable): Unit = { - val failedActor = actorInstance.get - failedActor.synchronized { - lifeCycle.get match { - case LifeCycle(scope, _, _) => { - scope match { - case Permanent => - Actor.log.info("Restarting actor [%s] configured as PERMANENT.", id) - restartLinkedActors(reason) - Actor.log.debug("Restarting linked actors for actor [%s].", id) - Actor.log.debug("Invoking 'preRestart' for failed actor instance [%s].", id) - failedActor.preRestart(reason) - nullOutActorRefReferencesFor(failedActor) - val freshActor = newActor - freshActor.synchronized { - freshActor.init - freshActor.initTransactionalState - actorInstance.set(freshActor) - Actor.log.debug("Invoking 'postRestart' for new actor instance [%s].", id) - freshActor.postRestart(reason) - } - _isBeingRestarted = false - case Temporary => shutDownTemporaryActor(this) - } - } - } - } - } - - protected[akka] def restartLinkedActors(reason: Throwable) = guard.withGuard { - linkedActorsAsList.foreach { actorRef => - if (actorRef.lifeCycle.isEmpty) actorRef.lifeCycle = Some(LifeCycle(Permanent)) - actorRef.lifeCycle.get match { - case LifeCycle(scope, _, _) => { - scope match { - case Permanent => actorRef.restart(reason) - case Temporary => shutDownTemporaryActor(actorRef) - } - } - } - } - } - private def shutDownTemporaryActor(temporaryActor: ActorRef) = { Actor.log.info("Actor [%s] configured as TEMPORARY and will not be restarted.", temporaryActor.id) temporaryActor.stop @@ -1147,28 +1160,41 @@ sealed class LocalActorRef private[akka]( "All linked actors have died permanently (they were all configured as TEMPORARY)" + "\n\tshutting down and unlinking supervisor actor as well [%s].", temporaryActor.id) - _supervisor.foreach(_ ! UnlinkAndStop(this)) + notifySupervisorWithMessage(UnlinkAndStop(this)) } } - protected[akka] def registerSupervisorAsRemoteActor: Option[String] = guard.withGuard { - if (_supervisor.isDefined) { - RemoteClient.clientFor(remoteAddress.get).registerSupervisorForActor(this) - Some(_supervisor.get.uuid) - } else None + private def handleExceptionInDispatch(reason: Throwable, message: Any, topLevelTransaction: Boolean) = { + Actor.log.error(reason, "Exception when invoking \n\tactor [%s] \n\twith message [%s]", this, message) + + _isBeingRestarted = true + // abort transaction set + if (isTransactionSetInScope) { + val txSet = getTransactionSetInScope + if (!txSet.isCommitted) { + Actor.log.debug("Aborting transaction set [%s]", txSet) + txSet.abort + } + } + + senderFuture.foreach(_.completeWithException(this, reason)) + + clearTransaction + if (topLevelTransaction) clearTransactionSet + + notifySupervisorWithMessage(Exit(this, reason)) } - protected[akka] def linkedActors: JMap[String, ActorRef] = guard.withGuard { - if (_linkedActors.isEmpty) { - val actors = new ConcurrentHashMap[String, ActorRef] - _linkedActors = Some(actors) - actors - } else _linkedActors.get + private def notifySupervisorWithMessage(notification: LifeCycleMessage) = { + // FIXME to fix supervisor restart of remote actor for oneway calls, inject a supervisor proxy that can send notification back to client + _supervisor.foreach { sup => + if (sup.isShutdown) { // if supervisor is shut down, game over for all linked actors +// shutdownLinkedActors +// stop + } else sup ! notification // else notify supervisor + } } - protected[akka] def linkedActorsAsList: List[ActorRef] = - linkedActors.values.toArray.toList.asInstanceOf[List[ActorRef]] - private def nullOutActorRefReferencesFor(actor: Actor) = { actorSelfFields._1.set(actor, null) actorSelfFields._2.set(actor, null) @@ -1188,7 +1214,8 @@ sealed class LocalActorRef private[akka]( case e: NoSuchFieldException => val parent = clazz.getSuperclass if (parent != null) findActorSelfField(parent) - else throw new IllegalActorStateException(toString + " is not an Actor since it have not mixed in the 'Actor' trait") + else throw new IllegalActorStateException( + toString + " is not an Actor since it have not mixed in the 'Actor' trait") } } @@ -1293,13 +1320,13 @@ private[akka] case class RemoteActorRef private[akka] ( def spawnRemote[T <: Actor: Manifest](hostname: String, port: Int): ActorRef = unsupported def spawnLink[T <: Actor: Manifest]: ActorRef = unsupported def spawnLinkRemote[T <: Actor : Manifest](hostname: String, port: Int): ActorRef = unsupported - def mailboxSize: Int = unsupported def supervisor: Option[ActorRef] = unsupported def shutdownLinkedActors: Unit = unsupported - protected[akka] def mailbox: Deque[MessageInvocation] = unsupported - protected[akka] def restart(reason: Throwable): Unit = unsupported + protected[akka] def mailbox: AnyRef = unsupported + protected[akka] def mailbox_=(value: AnyRef):AnyRef = unsupported protected[akka] def handleTrapExit(dead: ActorRef, reason: Throwable): Unit = unsupported - protected[akka] def restartLinkedActors(reason: Throwable): Unit = unsupported + protected[akka] def restart(reason: Throwable, maxNrOfRetries: Int, withinTimeRange: Int): Unit = unsupported + protected[akka] def restartLinkedActors(reason: Throwable, maxNrOfRetries: Int, withinTimeRange: Int): Unit = unsupported protected[akka] def linkedActors: JMap[String, ActorRef] = unsupported protected[akka] def linkedActorsAsList: List[ActorRef] = unsupported protected[akka] def invoke(messageHandle: MessageInvocation): Unit = unsupported diff --git a/akka-core/src/main/scala/actor/ActorRegistry.scala b/akka-core/src/main/scala/actor/ActorRegistry.scala index c568c8de03..aea37432b7 100644 --- a/akka-core/src/main/scala/actor/ActorRegistry.scala +++ b/akka-core/src/main/scala/actor/ActorRegistry.scala @@ -29,11 +29,6 @@ case class ActorUnregistered(actor: ActorRef) extends ActorRegistryEvent * @author Jonas Bonér */ object ActorRegistry extends ListenerManagement { - - private val refComparator = new java.util.Comparator[ActorRef]{ - def compare(a: ActorRef,b: ActorRef) = a.uuid.compareTo(b.uuid) - } - private val actorsByUUID = new ConcurrentHashMap[String, ActorRef] private val actorsById = new ConcurrentHashMap[String, JSet[ActorRef]] private val actorsByClassName = new ConcurrentHashMap[String, JSet[ActorRef]] @@ -122,16 +117,16 @@ object ActorRegistry extends ListenerManagement { if (id eq null) throw new IllegalActorStateException("Actor.id is null " + actor) if (actorsById.containsKey(id)) actorsById.get(id).add(actor) else { - val set = new ConcurrentSkipListSet[ActorRef](refComparator) + val set = new ConcurrentSkipListSet[ActorRef] set.add(actor) actorsById.put(id, set) } // Class name - val className = actor.actor.getClass.getName + val className = actor.actorClassName if (actorsByClassName.containsKey(className)) actorsByClassName.get(className).add(actor) else { - val set = new ConcurrentSkipListSet[ActorRef](refComparator) + val set = new ConcurrentSkipListSet[ActorRef] set.add(actor) actorsByClassName.put(className, set) } @@ -149,7 +144,7 @@ object ActorRegistry extends ListenerManagement { val id = actor.id if (actorsById.containsKey(id)) actorsById.get(id).remove(actor) - val className = actor.getClass.getName + val className = actor.actorClassName if (actorsByClassName.containsKey(className)) actorsByClassName.get(className).remove(actor) // notify listeners @@ -159,7 +154,7 @@ object ActorRegistry extends ListenerManagement { /** * Shuts down and unregisters all actors in the system. */ - def shutdownAll = { + def shutdownAll() { log.info("Shutting down all actors in the system...") foreach(_.stop) actorsByUUID.clear diff --git a/akka-core/src/main/scala/actor/Scheduler.scala b/akka-core/src/main/scala/actor/Scheduler.scala index 6a7187afdc..6f4f099bb2 100644 --- a/akka-core/src/main/scala/actor/Scheduler.scala +++ b/akka-core/src/main/scala/actor/Scheduler.scala @@ -20,7 +20,7 @@ import java.util.concurrent._ import se.scalablesolutions.akka.util.Logging -object Scheduler { +object Scheduler extends Logging { import Actor._ case object UnSchedule @@ -28,8 +28,12 @@ object Scheduler { private var service = Executors.newSingleThreadScheduledExecutor(SchedulerThreadFactory) private val schedulers = new ConcurrentHashMap[ActorRef, ActorRef] + log.info("Starting up Scheduler") def schedule(receiver: ActorRef, message: AnyRef, initialDelay: Long, delay: Long, timeUnit: TimeUnit): ActorRef = { + log.trace( + "Schedule scheduled event\n\tevent = [%s]\n\treceiver = [%s]\n\tinitialDelay = [%s]\n\tdelay = [%s]\n\ttimeUnit = [%s]", + message, receiver, initialDelay, delay, timeUnit) try { val future = service.scheduleAtFixedRate( new Runnable { def run = receiver ! message }, @@ -44,6 +48,9 @@ object Scheduler { } def scheduleOnce(receiver: ActorRef, message: AnyRef, delay: Long, timeUnit: TimeUnit): ActorRef = { + log.trace( + "Schedule one-time event\n\tevent = [%s]\n\treceiver = [%s]\n\tdelay = [%s]\n\ttimeUnit = [%s]", + message, receiver, delay, timeUnit) try { val future = service.schedule( new Runnable { def run = receiver ! message }, delay, timeUnit).asInstanceOf[ScheduledFuture[AnyRef]] @@ -65,6 +72,7 @@ object Scheduler { } def shutdown = { + log.info("Shutting down Scheduler") import scala.collection.JavaConversions._ schedulers.values.foreach(_ ! UnSchedule) schedulers.clear @@ -72,14 +80,16 @@ object Scheduler { } def restart = { + log.info("Restarting Scheduler") shutdown service = Executors.newSingleThreadScheduledExecutor(SchedulerThreadFactory) } } -private class ScheduleActor(future: ScheduledFuture[AnyRef]) extends Actor with Logging { +private class ScheduleActor(future: ScheduledFuture[AnyRef]) extends Actor { def receive = { case Scheduler.UnSchedule => + Scheduler.log.trace("Unschedule event handled by scheduleActor\n\tactorRef = [%s]", self.toString) future.cancel(true) self.stop } @@ -91,7 +101,7 @@ private object SchedulerThreadFactory extends ThreadFactory { def newThread(r: Runnable): Thread = { val thread = threadFactory.newThread(r) - thread.setName("Scheduler-" + count) + thread.setName("akka:scheduler-" + count) thread.setDaemon(true) thread } diff --git a/akka-core/src/main/scala/actor/SerializationProtocol.scala b/akka-core/src/main/scala/actor/SerializationProtocol.scala index d549bb8c80..20e9842cba 100644 --- a/akka-core/src/main/scala/actor/SerializationProtocol.scala +++ b/akka-core/src/main/scala/actor/SerializationProtocol.scala @@ -77,42 +77,38 @@ object ActorSerialization { toSerializedActorRefProtocol(a, format).toByteArray } - private def toSerializedActorRefProtocol[T <: Actor](a: ActorRef, format: Format[T]): SerializedActorRefProtocol = { + private def toSerializedActorRefProtocol[T <: Actor](actorRef: ActorRef, format: Format[T]): SerializedActorRefProtocol = { val lifeCycleProtocol: Option[LifeCycleProtocol] = { def setScope(builder: LifeCycleProtocol.Builder, scope: Scope) = scope match { case Permanent => builder.setLifeCycle(LifeCycleType.PERMANENT) case Temporary => builder.setLifeCycle(LifeCycleType.TEMPORARY) } val builder = LifeCycleProtocol.newBuilder - a.lifeCycle match { - case Some(LifeCycle(scope, None, _)) => + actorRef.lifeCycle match { + case Some(LifeCycle(scope)) => setScope(builder, scope) Some(builder.build) - case Some(LifeCycle(scope, Some(callbacks), _)) => - setScope(builder, scope) - builder.setPreRestart(callbacks.preRestart) - builder.setPostRestart(callbacks.postRestart) - Some(builder.build) case None => None } } val originalAddress = AddressProtocol.newBuilder - .setHostname(a.homeAddress.getHostName) - .setPort(a.homeAddress.getPort) + .setHostname(actorRef.homeAddress.getHostName) + .setPort(actorRef.homeAddress.getPort) .build val builder = SerializedActorRefProtocol.newBuilder - .setUuid(a.uuid) - .setId(a.id) - .setActorClassname(a.actorClass.getName) + .setUuid(actorRef.uuid) + .setId(actorRef.id) + .setActorClassname(actorRef.actorClass.getName) .setOriginalAddress(originalAddress) - .setIsTransactor(a.isTransactor) - .setTimeout(a.timeout) + .setIsTransactor(actorRef.isTransactor) + .setTimeout(actorRef.timeout) - builder.setActorInstance(ByteString.copyFrom(format.toBinary(a.actor.asInstanceOf[T]))) + actorRef.receiveTimeout.foreach(builder.setReceiveTimeout(_)) + builder.setActorInstance(ByteString.copyFrom(format.toBinary(actorRef.actor.asInstanceOf[T]))) lifeCycleProtocol.foreach(builder.setLifeCycle(_)) - a.supervisor.foreach(s => builder.setSupervisor(RemoteActorSerialization.toRemoteActorRefProtocol(s))) + actorRef.supervisor.foreach(s => builder.setSupervisor(RemoteActorSerialization.toRemoteActorRefProtocol(s))) // FIXME: how to serialize the hotswap PartialFunction ?? //hotswap.foreach(builder.setHotswapStack(_)) builder.build @@ -121,7 +117,8 @@ object ActorSerialization { private def fromBinaryToLocalActorRef[T <: Actor](bytes: Array[Byte], format: Format[T]): ActorRef = fromProtobufToLocalActorRef(SerializedActorRefProtocol.newBuilder.mergeFrom(bytes).build, format, None) - private def fromProtobufToLocalActorRef[T <: Actor](protocol: SerializedActorRefProtocol, format: Format[T], loader: Option[ClassLoader]): ActorRef = { + private def fromProtobufToLocalActorRef[T <: Actor]( + protocol: SerializedActorRefProtocol, format: Format[T], loader: Option[ClassLoader]): ActorRef = { Actor.log.debug("Deserializing SerializedActorRefProtocol to LocalActorRef:\n" + protocol) val serializer = @@ -132,12 +129,8 @@ object ActorSerialization { val lifeCycle = if (protocol.hasLifeCycle) { val lifeCycleProtocol = protocol.getLifeCycle - val restartCallbacks = - if (lifeCycleProtocol.hasPreRestart || lifeCycleProtocol.hasPostRestart) - Some(RestartCallbacks(lifeCycleProtocol.getPreRestart, lifeCycleProtocol.getPostRestart)) - else None - Some(if (lifeCycleProtocol.getLifeCycle == LifeCycleType.PERMANENT) LifeCycle(Permanent, restartCallbacks) - else if (lifeCycleProtocol.getLifeCycle == LifeCycleType.TEMPORARY) LifeCycle(Temporary, restartCallbacks) + Some(if (lifeCycleProtocol.getLifeCycle == LifeCycleType.PERMANENT) LifeCycle(Permanent) + else if (lifeCycleProtocol.getLifeCycle == LifeCycleType.TEMPORARY) LifeCycle(Temporary) else throw new IllegalActorStateException("LifeCycle type is not valid: " + lifeCycleProtocol.getLifeCycle)) } else None @@ -161,6 +154,7 @@ object ActorSerialization { protocol.getOriginalAddress.getPort, if (protocol.hasIsTransactor) protocol.getIsTransactor else false, if (protocol.hasTimeout) protocol.getTimeout else Actor.TIMEOUT, + if (protocol.hasReceiveTimeout) Some(protocol.getReceiveTimeout) else None, lifeCycle, supervisor, hotswap, @@ -223,26 +217,30 @@ object RemoteActorSerialization { .build } - def createRemoteRequestProtocolBuilder(ar: ActorRef, - message: Any, isOneWay: Boolean, senderOption: Option[ActorRef]): RemoteRequestProtocol.Builder = { - import ar._ - val protocol = RemoteRequestProtocol.newBuilder - .setId(RemoteRequestProtocolIdFactory.nextId) - .setMessage(MessageSerializer.serialize(message)) + def createRemoteRequestProtocolBuilder(actorRef: ActorRef, message: Any, isOneWay: Boolean, senderOption: Option[ActorRef]): + RemoteRequestProtocol.Builder = { + import actorRef._ + + val actorInfo = ActorInfoProtocol.newBuilder + .setUuid(uuid) .setTarget(actorClassName) .setTimeout(timeout) - .setUuid(uuid) - .setIsActor(true) + .setActorType(ActorType.SCALA_ACTOR) + .build + + val request = RemoteRequestProtocol.newBuilder + .setId(RemoteRequestProtocolIdFactory.nextId) + .setMessage(MessageSerializer.serialize(message)) + .setActorInfo(actorInfo) .setIsOneWay(isOneWay) - .setIsEscaped(false) val id = registerSupervisorAsRemoteActor - if (id.isDefined) protocol.setSupervisorUuid(id.get) + if (id.isDefined) request.setSupervisorUuid(id.get) senderOption.foreach { sender => RemoteServer.getOrCreateServer(sender.homeAddress).register(sender.uuid, sender) - protocol.setSender(toRemoteActorRefProtocol(sender)) + request.setSender(toRemoteActorRefProtocol(sender)) } - protocol + request } } diff --git a/akka-core/src/main/scala/actor/Supervisor.scala b/akka-core/src/main/scala/actor/Supervisor.scala index ecbbf9bb9d..9b56bddf38 100644 --- a/akka-core/src/main/scala/actor/Supervisor.scala +++ b/akka-core/src/main/scala/actor/Supervisor.scala @@ -161,8 +161,8 @@ sealed class Supervisor private[akka] ( _childActors.put(className, actorRef :: currentActors) actorRef.lifeCycle = Some(lifeCycle) supervisor.link(actorRef) - remoteAddress.foreach(address => - RemoteServer.registerActor(new InetSocketAddress(address.hostname, address.port), actorRef.uuid, actorRef)) + remoteAddress.foreach(address => RemoteServer.registerActor( + new InetSocketAddress(address.hostname, address.port), actorRef.uuid, actorRef)) case supervisorConfig @ SupervisorConfig(_, _) => // recursive supervisor configuration val childSupervisor = Supervisor(supervisorConfig) supervisor.link(childSupervisor.supervisor) @@ -180,14 +180,23 @@ final class SupervisorActor private[akka] ( handler: FaultHandlingStrategy, trapExceptions: List[Class[_ <: Throwable]]) extends Actor { import self._ + trapExit = trapExceptions faultHandler = Some(handler) override def shutdown(): Unit = shutdownLinkedActors def receive = { + // FIXME add a way to respond to MaximumNumberOfRestartsWithinTimeRangeReached in declaratively configured Supervisor + case MaximumNumberOfRestartsWithinTimeRangeReached( + victim, maxNrOfRetries, withinTimeRange, lastExceptionCausingRestart) => + Actor.log.warning( + "Declaratively configured supervisor received a [MaximumNumberOfRestartsWithinTimeRangeReached] notification," + + "\n\tbut there is currently no way of handling it in a declaratively configured supervisor." + + "\n\tIf you want to be able to handle this error condition then you need to create the supervision tree programatically." + + "\n\tThis will be supported in the future.") case unknown => throw new SupervisorException( - "SupervisorActor can not respond to messages. Unknown message [" + unknown + "]") + "SupervisorActor can not respond to messages.\n\tUnknown message [" + unknown + "]") } } diff --git a/akka-core/src/main/scala/actor/TypedActor.scala b/akka-core/src/main/scala/actor/TypedActor.scala new file mode 100644 index 0000000000..ae72da4d70 --- /dev/null +++ b/akka-core/src/main/scala/actor/TypedActor.scala @@ -0,0 +1,804 @@ +/** + * Copyright (C) 2009-2010 Scalable Solutions AB + */ + +package se.scalablesolutions.akka.actor + +import Actor._ +import se.scalablesolutions.akka.config.FaultHandlingStrategy +import se.scalablesolutions.akka.remote.protocol.RemoteProtocol._ +import se.scalablesolutions.akka.remote.{MessageSerializer, RemoteClient, RemoteRequestProtocolIdFactory} +import se.scalablesolutions.akka.dispatch.{MessageDispatcher, Future, CompletableFuture} +import se.scalablesolutions.akka.config.ScalaConfig._ +import se.scalablesolutions.akka.serialization.Serializer +import se.scalablesolutions.akka.util._ + +import org.codehaus.aspectwerkz.joinpoint.{MethodRtti, JoinPoint} +import org.codehaus.aspectwerkz.proxy.Proxy +import org.codehaus.aspectwerkz.annotation.{Aspect, Around} + +import java.net.InetSocketAddress +import java.lang.reflect.{InvocationTargetException, Method, Field} + +import scala.reflect.BeanProperty + +/** + * FIXME: document TypedActor + * + * Here is an example of usage (in Java): + *

+ * class PingImpl extends TypedActor implements Ping {
+ *   public void hit(int count) {
+ *     Pong pong = (Pong) getContext().getSender();
+ *     pong.hit(count++);
+ *   }
+ *
+ *   @Override
+ *   public void init() {
+ *     ... // optional initialization on start
+ *   }
+ *
+ *   @Override
+ *   public void shutdown() {
+ *     ... // optional cleanup on stop
+ *   }
+ *
+ *   ... // more life-cycle callbacks if needed
+ * }
+ *
+ * // create the ping actor
+ * Ping ping = TypedActor.newInstance(Ping.class, PingImpl.class);
+ *
+ * ping.hit(1); // use the actor
+ * ping.hit(1);
+ *
+ * // stop the actor
+ * TypedActor.stop(ping);
+ * 
+ * + * Here is an example of usage (in Scala): + *
+ * class PingImpl extends TypedActor with Ping {
+ *   def hit(count: Int) = {
+ *     val pong = context.sender.asInstanceOf[Pong]
+ *     pong.hit(count += 1)
+ *   }
+ *
+ *   override def init = {
+ *     ... // optional initialization on start
+ *   }
+ *
+ *   override def shutdown = {
+ *     ... // optional cleanup on stop
+ *   }
+ *
+ *   ... // more life-cycle callbacks if needed
+ * }
+ *
+ * // create the ping actor
+ * val ping = TypedActor.newInstance(classOf[Ping], classOf[PingImpl])
+ *
+ * ping.hit(1) // use the actor
+ * ping.hit(1)
+ *
+ * // stop the actor
+ * TypedActor.stop(ping)
+ * 
+ * + * @author Jonas Bonér + */ +abstract class TypedActor extends Logging { + + /** + * Holds RTTI (runtime type information) for the TypedActor, f.e. current 'sender' + * reference, the 'senderFuture' reference etc. + *

+ * This class does not contain static information but is updated by the runtime system + * at runtime. + *

+ * You can get a hold of the context using either the 'getContext()' or 'context' + * methods from the 'TypedActor' base class. + *

+ * + * Here is an example of usage (in Java): + *

+   * class PingImpl extends TypedActor implements Ping {
+   *   public void hit(int count) {
+   *     Pong pong = (Pong) getContext().getSender();
+   *     pong.hit(count++);
+   *   }
+   * }
+   * 
+ * + * Here is an example of usage (in Scala): + *
+   * class PingImpl extends TypedActor with Ping {
+   *   def hit(count: Int) = {
+   *     val pong = context.sender.asInstanceOf[Pong]
+   *     pong.hit(count += 1)
+   *   }
+   * }
+   * 
+ */ + @BeanProperty protected var context: TypedActorContext = _ + + /** + * The uuid for the Typed Actor. + */ + @BeanProperty @volatile var uuid = UUID.newUuid.toString + + /** + * Identifier for actor, does not have to be a unique one. Default is the 'uuid'. + *

+ * This field is used for logging, AspectRegistry.actorsFor(id), identifier for remote + * actor in RemoteServer etc.But also as the identifier for persistence, which means + * that you can use a custom name to be able to retrieve the "correct" persisted state + * upon restart, remote restart etc. + *

+ * This property can be set to a custom ID. + */ + @BeanProperty @volatile protected var id: String = uuid + + /** + * Defines the default timeout for '!!' and '!!!' invocations, + * e.g. the timeout for the future returned by the call to '!!' and '!!!'. + *

+ * This property can be set to a custom timeout. + */ + @BeanProperty @volatile protected var timeout: Long = Actor.TIMEOUT + + /** + * User overridable callback. + *

+ * Is called when an Actor is started by invoking 'actor.start'. + */ + def init {} + + /** + * User overridable callback. + *

+ * Is called when 'actor.stop' is invoked. + */ + def shutdown {} + + /** + * User overridable callback. + *

+ * Is called on a crashed Actor right BEFORE it is restarted to allow clean up of resources before Actor is terminated. + */ + def preRestart(reason: Throwable) {} + + /** + * User overridable callback. + *

+ * Is called right AFTER restart on the newly created Actor to allow reinitialization after an Actor crash. + */ + def postRestart(reason: Throwable) {} + + /** + * User overridable callback. + *

+ * Is called during initialization. Can be used to initialize transactional state. Will be invoked within a transaction. + */ + def initTransactionalState {} +} + +/** + * FIXME: document TypedTransactor + * + * @author Jonas Bonér + */ +abstract class TypedTransactor extends TypedActor + +/** + * Configuration factory for TypedActors. + * + * FIXDOC: document TypedActorConfiguration + * + * @author Jonas Bonér + */ +final class TypedActorConfiguration { + private[akka] var _timeout: Long = Actor.TIMEOUT + private[akka] var _transactionRequired = false + private[akka] var _host: Option[InetSocketAddress] = None + private[akka] var _messageDispatcher: Option[MessageDispatcher] = None + + def timeout = _timeout + def timeout(timeout: Duration) : TypedActorConfiguration = { + _timeout = timeout.toMillis + this + } + + def makeTransactionRequired() : TypedActorConfiguration = { + _transactionRequired = true; + this + } + + def makeRemote(hostname: String, port: Int) : TypedActorConfiguration = { + _host = Some(new InetSocketAddress(hostname, port)) + this + } + + def dispatcher(messageDispatcher: MessageDispatcher) : TypedActorConfiguration = { + _messageDispatcher = Some(messageDispatcher) + this + } +} + +/** + * Holds RTTI (runtime type information) for the TypedActor, f.e. current 'sender' + * reference, the 'senderFuture' reference etc. + *

+ * This class does not contain static information but is updated by the runtime system + * at runtime. + *

+ * You can get a hold of the context using either the 'getContext()' or 'context' + * methods from the 'TypedActor' base class. + *

+ * Here is an example of usage (from Java): + *

+ * class PingImpl extends TypedActor implements Ping {
+ *   public void hit(int count) {
+ *     Pong pong = (Pong) getContext().getSender();
+ *     pong.hit(count++);
+ *   }
+ * }
+ * 
+ * + * Here is an example of usage (in Scala): + *
+ * class PingImpl extends TypedActor with Ping {
+ *   def hit(count: Int) = {
+ *     val pong = context.sender.asInstanceOf[Pong]
+ *     pong.hit(count += 1)
+ *   }
+ * }
+ * 
+ * + * @author Jonas Bonér + */ +final class TypedActorContext { + private[akka] var _self: AnyRef = _ + private[akka] var _sender: AnyRef = _ + private[akka] var _senderFuture: CompletableFuture[Any] = _ + + /** + * Returns the current sender reference. + * Scala style getter. + */ + def sender: AnyRef = { + if (_sender eq null) throw new IllegalActorStateException("Sender reference should not be null.") + else _sender + } + + /** + * Returns the current sender reference. + * Java style getter. + */ + def getSender: AnyRef = { + if (_sender eq null) throw new IllegalActorStateException("Sender reference should not be null.") + else _sender + } + + /** + * Returns the current sender future TypedActor reference. + * Scala style getter. + */ + def senderFuture: Option[CompletableFuture[Any]] = if (_senderFuture eq null) None else Some(_senderFuture) + + /** + * Returns the current sender future TypedActor reference. + * Java style getter. + * This method returns 'null' if the sender future is not available. + */ + def getSenderFuture = _senderFuture +} + +/** + * Factory class for creating TypedActors out of plain POJOs and/or POJOs with interfaces. + * + * @author Jonas Bonér + */ +object TypedActor extends Logging { + import Actor.actorOf + + val AKKA_CAMEL_ROUTING_SCHEME = "akka".intern + private[actor] val AW_PROXY_PREFIX = "$$ProxiedByAW".intern + + def newInstance[T](intfClass: Class[T], targetClass: Class[_], timeout: Long): T = { + newInstance(intfClass, newTypedActor(targetClass), actorOf(new Dispatcher(false)), None, timeout) + } + + def newInstance[T](intfClass: Class[T], targetClass: Class[_]): T = { + newInstance(intfClass, newTypedActor(targetClass), actorOf(new Dispatcher(false)), None, Actor.TIMEOUT) + } + + def newRemoteInstance[T](intfClass: Class[T], targetClass: Class[_], timeout: Long, hostname: String, port: Int): T = { + newInstance(intfClass, newTypedActor(targetClass), actorOf(new Dispatcher(false)), Some(new InetSocketAddress(hostname, port)), timeout) + } + + def newRemoteInstance[T](intfClass: Class[T], targetClass: Class[_], hostname: String, port: Int): T = { + newInstance(intfClass, newTypedActor(targetClass), actorOf(new Dispatcher(false)), Some(new InetSocketAddress(hostname, port)), Actor.TIMEOUT) + } + + def newInstance[T](intfClass: Class[T], targetClass: Class[_], config: TypedActorConfiguration): T = { + val actor = actorOf(new Dispatcher(config._transactionRequired)) + if (config._messageDispatcher.isDefined) actor.dispatcher = config._messageDispatcher.get + newInstance(intfClass, newTypedActor(targetClass), actor, config._host, config.timeout) + } + + private[akka] def newInstance[T](intfClass: Class[T], targetInstance: TypedActor, actorRef: ActorRef, + remoteAddress: Option[InetSocketAddress], timeout: Long): T = { + val context = injectTypedActorContext(targetInstance) + val proxy = Proxy.newInstance(Array(intfClass), Array(targetInstance), true, false) + actorRef.actor.asInstanceOf[Dispatcher].initialize(targetInstance.getClass, targetInstance, proxy, context) + actorRef.timeout = timeout + if (remoteAddress.isDefined) actorRef.makeRemote(remoteAddress.get) + AspectInitRegistry.register(proxy, AspectInit(intfClass, targetInstance, actorRef, remoteAddress, timeout)) + actorRef.start + proxy.asInstanceOf[T] + } + + // NOTE: currently not used - but keep it around + private[akka] def newInstance[T <: TypedActor]( + targetClass: Class[T], actorRef: ActorRef, remoteAddress: Option[InetSocketAddress], timeout: Long): T = { + val proxy = { + val instance = Proxy.newInstance(targetClass, true, false) + if (instance.isInstanceOf[TypedActor]) instance.asInstanceOf[TypedActor] + else throw new IllegalActorStateException("Actor [" + targetClass.getName + "] is not a sub class of 'TypedActor'") + } + val context = injectTypedActorContext(proxy) + actorRef.actor.asInstanceOf[Dispatcher].initialize(targetClass, proxy, proxy, context) + actorRef.timeout = timeout + if (remoteAddress.isDefined) actorRef.makeRemote(remoteAddress.get) + AspectInitRegistry.register(proxy, AspectInit(targetClass, proxy, actorRef, remoteAddress, timeout)) + actorRef.start + proxy.asInstanceOf[T] + } + + /** + * Stops the current Typed Actor. + */ + def stop(proxy: AnyRef): Unit = AspectInitRegistry.initFor(proxy).actorRef.stop + + /** + * Get the underlying dispatcher actor for the given Typed Actor. + */ + def actorFor(proxy: AnyRef): Option[ActorRef] = + ActorRegistry + .actorsFor(classOf[Dispatcher]) + .find(a => a.actor.asInstanceOf[Dispatcher].proxy == proxy) + + /** + * Links an other Typed Actor to this Typed Actor. + * @param supervisor the supervisor Typed Actor + * @param supervised the Typed Actor to link + */ + def link(supervisor: AnyRef, supervised: AnyRef) = { + val supervisorActor = actorFor(supervisor).getOrElse( + throw new IllegalActorStateException("Can't link when the supervisor is not an Typed Actor")) + val supervisedActor = actorFor(supervised).getOrElse( + throw new IllegalActorStateException("Can't link when the supervised is not an Typed Actor")) + supervisorActor.link(supervisedActor) + } + + /** + * Links an other Typed Actor to this Typed Actor and sets the fault handling for the supervisor. + * @param supervisor the supervisor Typed Actor + * @param supervised the Typed Actor to link + * @param handler fault handling strategy + * @param trapExceptions array of exceptions that should be handled by the supervisor + */ + def link(supervisor: AnyRef, supervised: AnyRef, + handler: FaultHandlingStrategy, trapExceptions: Array[Class[_ <: Throwable]]) = { + val supervisorActor = actorFor(supervisor).getOrElse( + throw new IllegalActorStateException("Can't link when the supervisor is not an Typed Actor")) + val supervisedActor = actorFor(supervised).getOrElse( + throw new IllegalActorStateException("Can't link when the supervised is not an Typed Actor")) + supervisorActor.trapExit = trapExceptions.toList + supervisorActor.faultHandler = Some(handler) + supervisorActor.link(supervisedActor) + } + + /** + * Unlink the supervised Typed Actor from the supervisor. + * @param supervisor the supervisor Typed Actor + * @param supervised the Typed Actor to unlink + */ + def unlink(supervisor: AnyRef, supervised: AnyRef) = { + val supervisorActor = actorFor(supervisor).getOrElse( + throw new IllegalActorStateException("Can't unlink when the supervisor is not an Typed Actor")) + val supervisedActor = actorFor(supervised).getOrElse( + throw new IllegalActorStateException("Can't unlink when the supervised is not an Typed Actor")) + supervisorActor.unlink(supervisedActor) + } + + /** + * Sets the trap exit for the given supervisor Typed Actor. + * @param supervisor the supervisor Typed Actor + * @param trapExceptions array of exceptions that should be handled by the supervisor + */ + def trapExit(supervisor: AnyRef, trapExceptions: Array[Class[_ <: Throwable]]) = { + val supervisorActor = actorFor(supervisor).getOrElse( + throw new IllegalActorStateException("Can't set trap exceptions when the supervisor is not an Typed Actor")) + supervisorActor.trapExit = trapExceptions.toList + this + } + + /** + * Sets the fault handling strategy for the given supervisor Typed Actor. + * @param supervisor the supervisor Typed Actor + * @param handler fault handling strategy + */ + def faultHandler(supervisor: AnyRef, handler: FaultHandlingStrategy) = { + val supervisorActor = actorFor(supervisor).getOrElse( + throw new IllegalActorStateException("Can't set fault handler when the supervisor is not an Typed Actor")) + supervisorActor.faultHandler = Some(handler) + this + } + + private def injectTypedActorContext(typedActor: AnyRef): Option[TypedActorContext] = { + def injectTypedActorContext0(typedActor: AnyRef, clazz: Class[_]): Option[TypedActorContext] = { + val contextField = clazz.getDeclaredFields.toList.find(_.getType == classOf[TypedActorContext]) + if (contextField.isDefined) { + contextField.get.setAccessible(true) + val context = new TypedActorContext + contextField.get.set(typedActor, context) + Some(context) + } else { + val parent = clazz.getSuperclass + if (parent != null) injectTypedActorContext0(typedActor, parent) + else { + log.ifTrace("Can't set 'TypedActorContext' for TypedActor [" + + typedActor.getClass.getName + + "] since no field of this type could be found.") + None + } + } + } + injectTypedActorContext0(typedActor, typedActor.getClass) + } + + private[akka] def newTypedActor(targetClass: Class[_]): TypedActor = { + val instance = targetClass.newInstance + val typedActor = + if (instance.isInstanceOf[TypedActor]) instance.asInstanceOf[TypedActor] + else throw new IllegalArgumentException("Actor [" + targetClass.getName + "] is not a sub class of 'TypedActor'") + typedActor.init + import se.scalablesolutions.akka.stm.local.atomic + atomic { + typedActor.initTransactionalState + } + typedActor + } + + private[akka] def supervise(restartStrategy: RestartStrategy, components: List[Supervise]): Supervisor = + Supervisor(SupervisorConfig(restartStrategy, components)) +} + +/** + * Internal helper class to help pass the contextual information between threads. + * + * @author Jonas Bonér + */ +private[akka] object TypedActorContext { + import scala.util.DynamicVariable + private[actor] val sender = new DynamicVariable[AnyRef](null) + private[actor] val senderFuture = new DynamicVariable[CompletableFuture[Any]](null) +} + +/** + * @author Jonas Bonér + */ +private[akka] object AspectInitRegistry extends ListenerManagement { + private val initializations = new java.util.concurrent.ConcurrentHashMap[AnyRef, AspectInit] + + def initFor(proxy: AnyRef) = initializations.get(proxy) + + def register(proxy: AnyRef, init: AspectInit) = { + val res = initializations.put(proxy, init) + foreachListener(_ ! AspectInitRegistered(proxy, init)) + res + } + + def unregister(proxy: AnyRef) = { + val res = initializations.remove(proxy) + foreachListener(_ ! AspectInitUnregistered(proxy, res)) + res + } +} + +private[akka] sealed trait AspectInitRegistryEvent +private[akka] case class AspectInitRegistered(proxy: AnyRef, init: AspectInit) extends AspectInitRegistryEvent +private[akka] case class AspectInitUnregistered(proxy: AnyRef, init: AspectInit) extends AspectInitRegistryEvent + +/** + * @author Jonas Bonér + */ +private[akka] sealed case class AspectInit( + val interfaceClass: Class[_], + val targetInstance: TypedActor, + val actorRef: ActorRef, + val remoteAddress: Option[InetSocketAddress], + val timeout: Long) { + def this(interfaceClass: Class[_], targetInstance: TypedActor, actorRef: ActorRef, timeout: Long) = + this(interfaceClass, targetInstance, actorRef, None, timeout) +} + +/** + * AspectWerkz Aspect that is turning POJO into TypedActor. + *

+ * Is deployed on a 'perInstance' basis with the pointcut 'execution(* *.*(..))', + * e.g. all methods on the instance. + * + * @author Jonas Bonér + */ +@Aspect("perInstance") +private[akka] sealed class TypedActorAspect { + @volatile private var isInitialized = false + @volatile private var isStopped = false + private var interfaceClass: Class[_] = _ + private var targetInstance: TypedActor = _ + private var actorRef: ActorRef = _ + private var remoteAddress: Option[InetSocketAddress] = _ + private var timeout: Long = _ + private var uuid: String = _ + @volatile private var instance: TypedActor = _ + + @Around("execution(* *.*(..))") + def invoke(joinPoint: JoinPoint): AnyRef = { + if (!isInitialized) { + val init = AspectInitRegistry.initFor(joinPoint.getThis) + interfaceClass = init.interfaceClass + targetInstance = init.targetInstance + uuid = targetInstance.uuid + actorRef = init.actorRef + remoteAddress = init.remoteAddress + timeout = init.timeout + isInitialized = true + } + dispatch(joinPoint) + } + + private def dispatch(joinPoint: JoinPoint) = { + if (remoteAddress.isDefined) remoteDispatch(joinPoint) + else localDispatch(joinPoint) + } + + private def localDispatch(joinPoint: JoinPoint): AnyRef = { + val rtti = joinPoint.getRtti.asInstanceOf[MethodRtti] + val isOneWay = isVoid(rtti) + val sender = TypedActorContext.sender.value + val senderFuture = TypedActorContext.senderFuture.value + + if (!actorRef.isRunning && !isStopped) { + isStopped = true + joinPoint.proceed + + } else if (isOneWay) { + actorRef ! Invocation(joinPoint, true, true, sender, senderFuture) + null.asInstanceOf[AnyRef] + + } else { + val result = (actorRef !! (Invocation(joinPoint, false, isOneWay, sender, senderFuture), timeout)).as[AnyRef] + if (result.isDefined) result.get + else throw new IllegalActorStateException("No result defined for invocation [" + joinPoint + "]") + } + } + + private def remoteDispatch(joinPoint: JoinPoint): AnyRef = { + val rtti = joinPoint.getRtti.asInstanceOf[MethodRtti] + val isOneWay = isVoid(rtti) + val (message: Array[AnyRef], isEscaped) = escapeArguments(rtti.getParameterValues) + + val typedActorInfo = TypedActorInfoProtocol.newBuilder + .setInterface(interfaceClass.getName) + .setMethod(rtti.getMethod.getName) + .build + + val actorInfo = ActorInfoProtocol.newBuilder + .setUuid(uuid) + .setTarget(targetInstance.getClass.getName) + .setTimeout(timeout) + .setActorType(ActorType.TYPED_ACTOR) + .setTypedActorInfo(typedActorInfo) + .build + + val requestBuilder = RemoteRequestProtocol.newBuilder + .setId(RemoteRequestProtocolIdFactory.nextId) + .setMessage(MessageSerializer.serialize(message)) + .setActorInfo(actorInfo) + .setIsOneWay(isOneWay) + + val id = actorRef.registerSupervisorAsRemoteActor + if (id.isDefined) requestBuilder.setSupervisorUuid(id.get) + + val remoteMessage = requestBuilder.build + + val future = RemoteClient.clientFor(remoteAddress.get).send(remoteMessage, None) + + if (isOneWay) null // for void methods + else { + if (future.isDefined) { + future.get.await + val result = getResultOrThrowException(future.get) + if (result.isDefined) result.get + else throw new IllegalActorStateException("No result returned from call to [" + joinPoint + "]") + } else throw new IllegalActorStateException("No future returned from call to [" + joinPoint + "]") + } + } + + private def getResultOrThrowException[T](future: Future[T]): Option[T] = + if (future.exception.isDefined) { + val (_, cause) = future.exception.get + throw cause + } else future.result + + private def isVoid(rtti: MethodRtti) = rtti.getMethod.getReturnType == java.lang.Void.TYPE + + private def escapeArguments(args: Array[AnyRef]): Tuple2[Array[AnyRef], Boolean] = { + var isEscaped = false + val escapedArgs = for (arg <- args) yield { + val clazz = arg.getClass + if (clazz.getName.contains(TypedActor.AW_PROXY_PREFIX)) { + isEscaped = true + TypedActor.AW_PROXY_PREFIX + clazz.getSuperclass.getName + } else arg + } + (escapedArgs, isEscaped) + } +} + +/** + * Represents a snapshot of the current invocation. + * + * @author Jonas Bonér + */ +@serializable private[akka] case class Invocation( + joinPoint: JoinPoint, isOneWay: Boolean, isVoid: Boolean, sender: AnyRef, senderFuture: CompletableFuture[Any]) { + + override def toString: String = synchronized { + "Invocation [" + + "\n\t\tmethod = " + joinPoint.getRtti.asInstanceOf[MethodRtti].getMethod.getName + " @ " + joinPoint.getTarget.getClass.getName + + "\n\t\tisOneWay = " + isOneWay + + "\n\t\tisVoid = " + isVoid + + "\n\t\tsender = " + sender + + "\n\t\tsenderFuture = " + senderFuture + + "]" + } + + override def hashCode: Int = synchronized { + var result = HashCode.SEED + result = HashCode.hash(result, joinPoint) + result = HashCode.hash(result, isOneWay) + result = HashCode.hash(result, isVoid) + result = HashCode.hash(result, sender) + result = HashCode.hash(result, senderFuture) + result + } + + override def equals(that: Any): Boolean = synchronized { + that != null && + that.isInstanceOf[Invocation] && + that.asInstanceOf[Invocation].joinPoint == joinPoint && + that.asInstanceOf[Invocation].isOneWay == isOneWay && + that.asInstanceOf[Invocation].isVoid == isVoid && + that.asInstanceOf[Invocation].sender == sender && + that.asInstanceOf[Invocation].senderFuture == senderFuture + } +} + +object Dispatcher { + val ZERO_ITEM_CLASS_ARRAY = Array[Class[_]]() + val ZERO_ITEM_OBJECT_ARRAY = Array[Object]() +} + +/** + * Generic Actor managing Invocation dispatch, transaction and error management. + * + * @author Jonas Bonér + */ +private[akka] class Dispatcher(transactionalRequired: Boolean) extends Actor { + import Dispatcher._ + + private[actor] var proxy: AnyRef = _ + private var context: Option[TypedActorContext] = None + private var targetClass: Class[_] = _ + @volatile private[akka] var targetInstance: TypedActor = _ + private var proxyDelegate: Field = _ + + private[actor] def initialize( + targetClass: Class[_], targetInstance: TypedActor, proxy: AnyRef, ctx: Option[TypedActorContext]) = { + if (transactionalRequired || isTransactional(targetClass)) self.makeTransactionRequired + + self.id = targetClass.getName + this.targetClass = targetClass + this.proxy = proxy + this.targetInstance = targetInstance + this.context = ctx + + proxyDelegate = { + val field = proxy.getClass.getDeclaredField("DELEGATE_0") + field.setAccessible(true) + field + } + + if (self.lifeCycle.isEmpty) self.lifeCycle = Some(LifeCycle(Permanent)) + } + + def receive = { + case invocation @ Invocation(joinPoint, isOneWay, _, sender, senderFuture) => + TypedActor.log.ifTrace("Invoking Typed Actor with message:\n" + invocation) + context.foreach { ctx => + if (sender ne null) ctx._sender = sender + if (senderFuture ne null) ctx._senderFuture = senderFuture + } + TypedActorContext.sender.value = joinPoint.getThis // set next sender + self.senderFuture.foreach(TypedActorContext.senderFuture.value = _) + if (Actor.SERIALIZE_MESSAGES) serializeArguments(joinPoint) + if (isOneWay) joinPoint.proceed + else self.reply(joinPoint.proceed) + + // Jan Kronquist: started work on issue 121 + case Link(proxy) => self.link(proxy) + case Unlink(proxy) => self.unlink(proxy) + case unexpected => throw new IllegalActorStateException( + "Unexpected message [" + unexpected + "] sent to [" + this + "]") + } + + override def preRestart(reason: Throwable) { + targetInstance.preRestart(reason) + + // rewrite target instance in Dispatcher and AspectWerkz Proxy + targetInstance = TypedActor.newTypedActor(targetClass) + proxyDelegate.set(proxy, targetInstance) + } + + override def postRestart(reason: Throwable) { + targetInstance.postRestart(reason) + } + + override def shutdown { + targetInstance.shutdown + AspectInitRegistry.unregister(proxy); + } + + override def initTransactionalState { + targetInstance.initTransactionalState + } + + def isTransactional(clazz: Class[_]): Boolean = + if (clazz == null) false + else if (clazz.isAssignableFrom(classOf[TypedTransactor])) true + else isTransactional(clazz.getSuperclass) + + private def serializeArguments(joinPoint: JoinPoint) = { + val args = joinPoint.getRtti.asInstanceOf[MethodRtti].getParameterValues + var unserializable = false + var hasMutableArgument = false + for (arg <- args.toList) { + if (!arg.isInstanceOf[String] && + !arg.isInstanceOf[Byte] && + !arg.isInstanceOf[Int] && + !arg.isInstanceOf[Long] && + !arg.isInstanceOf[Float] && + !arg.isInstanceOf[Double] && + !arg.isInstanceOf[Boolean] && + !arg.isInstanceOf[Char] && + !arg.isInstanceOf[java.lang.Byte] && + !arg.isInstanceOf[java.lang.Integer] && + !arg.isInstanceOf[java.lang.Long] && + !arg.isInstanceOf[java.lang.Float] && + !arg.isInstanceOf[java.lang.Double] && + !arg.isInstanceOf[java.lang.Boolean] && + !arg.isInstanceOf[java.lang.Character]) { + hasMutableArgument = true + } + if (arg.getClass.getName.contains(TypedActor.AW_PROXY_PREFIX)) unserializable = true + } + if (!unserializable && hasMutableArgument) { + val copyOfArgs = Serializer.Java.deepClone(args) + joinPoint.getRtti.asInstanceOf[MethodRtti].setParameterValues(copyOfArgs.asInstanceOf[Array[AnyRef]]) + } + } +} diff --git a/akka-core/src/main/scala/actor/UntypedActor.scala b/akka-core/src/main/scala/actor/UntypedActor.scala new file mode 100644 index 0000000000..8ea36531e8 --- /dev/null +++ b/akka-core/src/main/scala/actor/UntypedActor.scala @@ -0,0 +1,580 @@ +/** + * Copyright (C) 2009-2010 Scalable Solutions AB + */ + +package se.scalablesolutions.akka.actor + +import se.scalablesolutions.akka.dispatch._ +import se.scalablesolutions.akka.stm.global._ +import se.scalablesolutions.akka.config.{AllForOneStrategy, OneForOneStrategy, FaultHandlingStrategy} +import se.scalablesolutions.akka.config.ScalaConfig._ + +import java.net.InetSocketAddress + +/** + * Subclass this abstract class to create a MDB-style untyped actor. + *

+ * This class is meant to be used from Java. + *

+ * Here is an example on how to create and use an UntypedActor: + *

+ *  public class SampleUntypedActor extends UntypedActor {
+ *    public void onReceive(Object message, UntypedActorRef self) throws Exception {
+ *      if (message instanceof String) {
+ *        String msg = (String)message;
+ *
+ *            if (msg.equals("UseReply")) {
+ *                  // Reply to original sender of message using the 'replyUnsafe' method
+ *                  self.replyUnsafe(msg + ":" + self.getUuid());
+ *
+ *            } else if (msg.equals("UseSender") && self.getSender().isDefined()) {     
+ *                  // Reply to original sender of message using the sender reference
+ *                  // also passing along my own refererence (the self)
+ *                  self.getSender().get().sendOneWay(msg, self); 
+ *
+ *        } else if (msg.equals("UseSenderFuture") && self.getSenderFuture().isDefined()) {     
+ *                  // Reply to original sender of message using the sender future reference
+ *                  self.getSenderFuture().get().completeWithResult(msg);
+ *
+ *        } else if (msg.equals("SendToSelf")) {
+ *                  // Send message to the actor itself recursively
+ *                  self.sendOneWay(msg)
+ *
+ *        } else if (msg.equals("ForwardMessage")) {
+ *          // Retreive an actor from the ActorRegistry by ID and get an ActorRef back
+ *          ActorRef actorRef = ActorRegistry.actorsFor("some-actor-id").head();
+ *          // Wrap the ActorRef in an UntypedActorRef and forward the message to this actor
+ *          UntypedActorRef.wrap(actorRef).forward(msg, self);
+ *
+ *        } else throw new IllegalArgumentException("Unknown message: " + message);
+ *      } else throw new IllegalArgumentException("Unknown message: " + message);
+ *    }
+ *   
+ *    public static void main(String[] args) {
+ *      UntypedActorRef actor = UntypedActor.actorOf(SampleUntypedActor.class);
+ *      actor.start();
+ *      actor.sendOneWay("SendToSelf");
+ *      actor.stop();
+ *    }
+ *  }
+ * 
+ * + * @author Jonas Bonér + */ +abstract class UntypedActor extends Actor { + protected[akka] var context: Option[UntypedActorRef] = None + + final protected def receive = { + case msg => + if (context.isEmpty) { + val ctx = new UntypedActorRef(self) + context = Some(ctx) + onReceive(msg, ctx) + } else onReceive(msg, context.get) + } + + @throws(classOf[Exception]) + def onReceive(message: Any, context: UntypedActorRef): Unit +} + +/** + * Implements the Transactor abstraction. E.g. a transactional UntypedActor. + * + * @author Jonas Bonér + */ +abstract class UntypedTransactor extends UntypedActor { + self.makeTransactionRequired +} + +/** + * Extend this abstract class to create a remote UntypedActor. + * + * @author Jonas Bonér + */ +abstract class RemoteUntypedActor(address: InetSocketAddress) extends UntypedActor { + def this(hostname: String, port: Int) = this(new InetSocketAddress(hostname, port)) + self.makeRemote(address) +} + +/** + * Factory object for creating and managing 'UntypedActor's. Meant to be used from Java. + *

+ * Example on how to create an actor: + *

+ *   ActorRef actor = UntypedActor.actorOf(MyUntypedActor.class);
+ *   actor.start();
+ *   actor.sendOneWay(message, context)
+ *   actor.stop();
+ * 
+ * You can create and start the actor in one statement like this: + *
+ *   ActorRef actor = UntypedActor.actorOf(MyUntypedActor.class).start();
+ * 
+ * + * @author Jonas Bonér + */ +object UntypedActor { + + /** + * Creates an ActorRef out of the Actor. Allows you to pass in the class for the Actor. + *

+ * Example in Java: + *

+   *   ActorRef actor = UntypedActor.actorOf(MyUntypedActor.class);
+   *   actor.start();
+   *   actor.sendOneWay(message, context)
+   *   actor.stop();
+   * 
+ * You can create and start the actor in one statement like this: + *
+   *   ActorRef actor = UntypedActor.actorOf(MyUntypedActor.class).start();
+   * 
+ */ + def actorOf(clazz: Class[_]): UntypedActorRef = { + if (!clazz.isInstanceOf[Class[_ <: UntypedActor]]) throw new IllegalArgumentException( + "Class [" + clazz.getName + "] passed into the 'actorOf' factory method needs to be assignable from 'UntypedActor'") + UntypedActorRef.wrap(new LocalActorRef(() => clazz.newInstance.asInstanceOf[Actor])) + } + + /** + * NOTE: Use this convenience method with care, do NOT make it possible to get a reference to the + * UntypedActor instance directly, but only through its 'UntypedActorRef' wrapper reference. + *

+ * Creates an ActorRef out of the Actor. Allows you to pass in the instance for the Actor. Only + * use this method when you need to pass in constructor arguments into the 'UntypedActor'. + *

+ * Example in Java: + *

+   *   ActorRef actor = UntypedActor.actorOf(new MyUntypedActor("service:name", 5));
+   *   actor.start();
+   *   actor.sendOneWay(message, context)
+   *   actor.stop();
+   * 
+ * You can create and start the actor in one statement like this: + *
+   *   ActorRef actor = UntypedActor.actorOf(MyUntypedActor.class).start();
+   * 
+ */ + def actorOf(actorInstance: UntypedActor): UntypedActorRef = UntypedActorRef.wrap(new LocalActorRef(() => actorInstance)) +} + +/** + * Use this class if you need to wrap an 'ActorRef' in the more Java-friendly 'UntypedActorRef'. + * + * @author Jonas Bonér + */ +object UntypedActorRef { + def wrap(actorRef: ActorRef) = new UntypedActorRef(actorRef) +} + +/** + * A Java-friendly wrapper class around the 'ActorRef'. + * + * @author Jonas Bonér + */ +class UntypedActorRef(val actorRef: ActorRef) { + + /** + * Returns the uuid for the actor. + */ + def getUuid(): String = actorRef.uuid + + /** + * Identifier for actor, does not have to be a unique one. Default is the 'uuid'. + *

+ * This field is used for logging, AspectRegistry.actorsFor(id), identifier for remote + * actor in RemoteServer etc.But also as the identifier for persistence, which means + * that you can use a custom name to be able to retrieve the "correct" persisted state + * upon restart, remote restart etc. + */ + def setId(id: String) = actorRef.id = id + def getId(): String = actorRef.id + + /** + * Defines the default timeout for '!!' and '!!!' invocations, + * e.g. the timeout for the future returned by the call to '!!' and '!!!'. + */ + def setTimeout(timeout: Long) = actorRef.timeout = timeout + def getTimeout(): Long = actorRef.timeout + + /** + * Defines the default timeout for an initial receive invocation. + * When specified, the receive function should be able to handle a 'ReceiveTimeout' message. + */ + def setReceiveTimeout(timeout: Long) = actorRef.receiveTimeout = Some(timeout) + def getReceiveTimeout(): Option[Long] = actorRef.receiveTimeout + + /** + * Set 'trapExit' to the list of exception classes that the actor should be able to trap + * from the actor it is supervising. When the supervising actor throws these exceptions + * then they will trigger a restart. + *

+ * + * Trap all exceptions: + *

+   * context.setTrapExit(new Class[]{Throwable.class});
+   * 
+ * + * Trap specific exceptions only: + *
+   * context.setTrapExit(new Class[]{MyApplicationException.class, MyApplicationError.class});
+   * 
+ */ + def setTrapExit(exceptions: Array[Class[_ <: Throwable]]) = actorRef.trapExit = exceptions.toList + def getTrapExit(): Array[Class[_ <: Throwable]] = actorRef.trapExit.toArray + + /** + * If 'trapExit' is set for the actor to act as supervisor, then a 'faultHandler' must be defined. + *

+ * Can be one of: + *

+   *  context.setFaultHandler(new AllForOneStrategy(maxNrOfRetries, withinTimeRange));
+   * 
+ * Or: + *
+   *  context.setFaultHandler(new OneForOneStrategy(maxNrOfRetries, withinTimeRange));
+   * 
+ */ + def setFaultHandler(handler: FaultHandlingStrategy) = actorRef.faultHandler = Some(handler) + def getFaultHandler(): Option[FaultHandlingStrategy] = actorRef.faultHandler + + /** + * Defines the life-cycle for a supervised actor. + */ + def setLifeCycle(lifeCycle: LifeCycle) = actorRef.lifeCycle = Some(lifeCycle) + def getLifeCycle(): Option[LifeCycle] = actorRef.lifeCycle + + /** + * The default dispatcher is the Dispatchers.globalExecutorBasedEventDrivenDispatcher();. + * This means that all actors will share the same event-driven executor based dispatcher. + *

+ * You can override it so it fits the specific use-case that the actor is used for. + * See the se.scalablesolutions.akka.dispatch.Dispatchers class for the different + * dispatchers available. + *

+ * The default is also that all actors that are created and spawned from within this actor + * is sharing the same dispatcher as its creator. + */ + def setDispatcher(dispatcher: MessageDispatcher) = actorRef.dispatcher = dispatcher + def getDispatcher(): MessageDispatcher = actorRef.dispatcher + + /** + * The reference sender Actor of the last received message. + * Is defined if the message was sent from another Actor, else None. + */ + def getSender(): Option[UntypedActorRef] = actorRef.sender match { + case Some(s) => Some(UntypedActorRef.wrap(s)) + case None => None + } + + /** + * The reference sender future of the last received message. + * Is defined if the message was sent with sent with 'sendRequestReply' or 'sendRequestReplyFuture', else None. + */ + def getSenderFuture(): Option[CompletableFuture[Any]] = actorRef.senderFuture + + /** + * Starts up the actor and its message queue. + */ + def start(): UntypedActorRef = UntypedActorRef.wrap(actorRef.start) + + /** + * Shuts down the actor its dispatcher and message queue. + * Alias for 'stop'. + */ + def exit() = stop() + + /** + * Shuts down the actor its dispatcher and message queue. + */ + def stop(): Unit = actorRef.stop() + + /** + * Sends a one-way asynchronous message. E.g. fire-and-forget semantics. + *

+ *

+   *   actor.sendOneWay(message);
+   * 
+ *

+ */ + def sendOneWay(message: AnyRef) = actorRef.!(message)(None) + + /** + * Sends a one-way asynchronous message. E.g. fire-and-forget semantics. + *

+ * Allows you to pass along the sender of the messag. + *

+ *

+   *   actor.sendOneWay(message, context);
+   * 
+ *

+ */ + def sendOneWay(message: AnyRef, sender: UntypedActorRef) = + if (sender eq null) actorRef.!(message)(None) + else actorRef.!(message)(Some(sender.actorRef)) + + /** + * Sends a message asynchronously and waits on a future for a reply message under the hood. The timeout is taken from + * the default timeout in the Actor. + *

+ * It waits on the reply either until it receives it or until the timeout expires + * (which will throw an ActorTimeoutException). E.g. send-and-receive-eventually semantics. + *

+ * NOTE: + * Use this method with care. In most cases it is better to use 'sendOneWay' together with 'context.getSender()' to + * implement request/response message exchanges. + *

+ * If you are sending messages using sendRequestReply then you have to use context.reply(..) + * to send a reply message to the original sender. If not then the sender will block until the timeout expires. + */ + def sendRequestReply(message: AnyRef): AnyRef = + actorRef.!!(message)(None).getOrElse(throw new ActorTimeoutException( + "Message [" + message + + "]\n\tsent to [" + actorRef.actorClassName + + "]\n\twith timeout [" + actorRef.timeout + + "]\n\ttimed out.")) + .asInstanceOf[AnyRef] + + /** + * Sends a message asynchronously and waits on a future for a reply message under the hood. The timeout is taken from + * the default timeout in the Actor. + *

+ * It waits on the reply either until it receives it or until the timeout expires + * (which will throw an ActorTimeoutException). E.g. send-and-receive-eventually semantics. + *

+ * NOTE: + * Use this method with care. In most cases it is better to use 'sendOneWay' together with 'context.getSender()' to + * implement request/response message exchanges. + *

+ * If you are sending messages using sendRequestReply then you have to use context.reply(..) + * to send a reply message to the original sender. If not then the sender will block until the timeout expires. + */ + def sendRequestReply(message: AnyRef, sender: UntypedActorRef): AnyRef = { + val result = if (sender eq null) actorRef.!!(message)(None) + else actorRef.!!(message)(Some(sender.actorRef)) + result.getOrElse(throw new ActorTimeoutException( + "Message [" + message + + "]\n\tsent to [" + actorRef.actorClassName + + "]\n\tfrom [" + sender.actorRef.actorClassName + + "]\n\twith timeout [" + actorRef.timeout + + "]\n\ttimed out.")) + .asInstanceOf[AnyRef] + } + + /** + * Sends a message asynchronously and waits on a future for a reply message under the hood. + *

+ * It waits on the reply either until it receives it or until the timeout expires + * (which will throw an ActorTimeoutException). E.g. send-and-receive-eventually semantics. + *

+ * NOTE: + * Use this method with care. In most cases it is better to use 'sendOneWay' together with 'context.getSender()' to + * implement request/response message exchanges. + *

+ * If you are sending messages using sendRequestReply then you have to use context.reply(..) + * to send a reply message to the original sender. If not then the sender will block until the timeout expires. + */ + def sendRequestReply(message: AnyRef, timeout: Long): AnyRef = + actorRef.!!(message, timeout)(None).getOrElse(throw new ActorTimeoutException( + "Message [" + message + + "]\n\tsent to [" + actorRef.actorClassName + + "]\n\twith timeout [" + timeout + + "]\n\ttimed out.")) + .asInstanceOf[AnyRef] + + /** + * Sends a message asynchronously and waits on a future for a reply message under the hood. + *

+ * It waits on the reply either until it receives it or until the timeout expires + * (which will throw an ActorTimeoutException). E.g. send-and-receive-eventually semantics. + *

+ * NOTE: + * Use this method with care. In most cases it is better to use 'sendOneWay' together with 'context.getSender()' to + * implement request/response message exchanges. + *

+ * If you are sending messages using sendRequestReply then you have to use context.reply(..) + * to send a reply message to the original sender. If not then the sender will block until the timeout expires. + */ + def sendRequestReply(message: AnyRef, timeout: Long, sender: UntypedActorRef): AnyRef = { + val result = if (sender eq null) actorRef.!!(message, timeout)(None) + else actorRef.!!(message)(Some(sender.actorRef)) + result.getOrElse(throw new ActorTimeoutException( + "Message [" + message + + "]\n\tsent to [" + actorRef.actorClassName + + "]\n\tfrom [" + sender.actorRef.actorClassName + + "]\n\twith timeout [" + timeout + + "]\n\ttimed out.")) + .asInstanceOf[AnyRef] + } + + /** + * Sends a message asynchronously returns a future holding the eventual reply message. The timeout is taken from + * the default timeout in the Actor. + *

+ * NOTE: + * Use this method with care. In most cases it is better to use 'sendOneWay' together with the 'context.getSender()' to + * implement request/response message exchanges. + *

+ * If you are sending messages using sendRequestReplyFuture then you have to use context.reply(..) + * to send a reply message to the original sender. If not then the sender will block until the timeout expires. + */ + def sendRequestReplyFuture(message: AnyRef): Future[_] = actorRef.!!!(message)(None) + + /** + * Sends a message asynchronously returns a future holding the eventual reply message. The timeout is taken from + * the default timeout in the Actor. + *

+ * NOTE: + * Use this method with care. In most cases it is better to use 'sendOneWay' together with the 'context.getSender()' to + * implement request/response message exchanges. + *

+ * If you are sending messages using sendRequestReplyFuture then you have to use context.reply(..) + * to send a reply message to the original sender. If not then the sender will block until the timeout expires. + */ + def sendRequestReplyFuture(message: AnyRef, sender: UntypedActorRef): Future[_] = + if (sender eq null) actorRef.!!!(message)(None) + else actorRef.!!!(message)(Some(sender.actorRef)) + + /** + * Sends a message asynchronously returns a future holding the eventual reply message. + *

+ * NOTE: + * Use this method with care. In most cases it is better to use 'sendOneWay' together with the 'context.getSender()' to + * implement request/response message exchanges. + *

+ * If you are sending messages using sendRequestReplyFuture then you have to use context.reply(..) + * to send a reply message to the original sender. If not then the sender will block until the timeout expires. + */ + def sendRequestReplyFuture(message: AnyRef, timeout: Long): Future[_] = actorRef.!!!(message, timeout)(None) + + /** + * Sends a message asynchronously returns a future holding the eventual reply message. + *

+ * NOTE: + * Use this method with care. In most cases it is better to use 'sendOneWay' together with the 'context.getSender()' to + * implement request/response message exchanges. + *

+ * If you are sending messages using sendRequestReplyFuture then you have to use context.reply(..) + * to send a reply message to the original sender. If not then the sender will block until the timeout expires. + */ + def sendRequestReplyFuture(message: AnyRef, timeout: Long, sender: UntypedActorRef): Future[_] = + if (sender eq null) actorRef.!!!(message, timeout)(None) + else actorRef.!!!(message)(Some(sender.actorRef)) + + /** + * Forwards the message and passes the original sender actor as the sender. + *

+ * Works with 'sendOneWay', 'sendRequestReply' and 'sendRequestReplyFuture'. + */ + def forward(message: AnyRef, sender: UntypedActorRef): Unit = + if (sender eq null) throw new IllegalArgumentException("The 'sender' argument to 'forward' can't be null") + else actorRef.forward(message)(Some(sender.actorRef)) + + /** + * Use context.replyUnsafe(..) to reply with a message to the original sender of the message currently + * being processed. + *

+ * Throws an IllegalStateException if unable to determine what to reply to. + */ + def replyUnsafe(message: AnyRef): Unit = actorRef.reply(message) + + /** + * Use context.replySafe(..) to reply with a message to the original sender of the message currently + * being processed. + *

+ * Returns true if reply was sent, and false if unable to determine what to reply to. + */ + def replySafe(message: AnyRef): Boolean = actorRef.reply_?(message) + + /** + * Returns the class for the Actor instance that is managed by the ActorRef. + */ + def getActorClass(): Class[_ <: Actor] = actorRef.actorClass + + /** + * Returns the class name for the Actor instance that is managed by the ActorRef. + */ + def getActorClassName(): String = actorRef.actorClassName + + /** + * Invoking 'makeRemote' means that an actor will be moved to and invoked on a remote host. + */ + def makeRemote(hostname: String, port: Int): Unit = actorRef.makeRemote(hostname, port) + + /** + * Invoking 'makeRemote' means that an actor will be moved to and invoked on a remote host. + */ + def makeRemote(address: InetSocketAddress): Unit = actorRef.makeRemote(address) + + /** + * Invoking 'makeTransactionRequired' means that the actor will **start** a new transaction if non exists. + * However, it will always participate in an existing transaction. + */ + def makeTransactionRequired(): Unit = actorRef.makeTransactionRequired + + /** + * Sets the transaction configuration for this actor. Needs to be invoked before the actor is started. + */ + def setTransactionConfig(config: TransactionConfig): Unit = actorRef.transactionConfig = config + + /** + * Get the transaction configuration for this actor. + */ + def getTransactionConfig(): TransactionConfig = actorRef.transactionConfig + + /** + * Gets the remote address for the actor, if any, else None. + */ + def getRemoteAddress(): Option[InetSocketAddress] = actorRef.remoteAddress + + /** + * Returns the home address and port for this actor. + */ + def getHomeAddress(): InetSocketAddress = actorRef.homeAddress + + /** + * Set the home address and port for this actor. + */ + def setHomeAddress(hostnameAndPort: Tuple2[String, Int]): Unit = actorRef.homeAddress = hostnameAndPort + + /** + * Set the home address and port for this actor. + */ + def setHomeAddress(address: InetSocketAddress): Unit = actorRef.homeAddress = address + + /** + * Links an other actor to this actor. Links are unidirectional and means that a the linking actor will + * receive a notification if the linked actor has crashed. + *

+ * If the 'trapExit' member field has been set to at contain at least one exception class then it will + * 'trap' these exceptions and automatically restart the linked actors according to the restart strategy + * defined by the 'faultHandler'. + */ + def link(actor: UntypedActorRef): Unit = actorRef.link(actor.actorRef) + + /** + * Unlink the actor. + */ + def unlink(actor: UntypedActorRef): Unit = actorRef.unlink(actor.actorRef) + + /** + * Atomically start and link an actor. + */ + def startLink(actor: UntypedActorRef): Unit = actorRef.startLink(actor.actorRef) + + /** + * Atomically start, link and make an actor remote. + */ + def startLinkRemote(actor: UntypedActorRef, hostname: String, port: Int): Unit = + actorRef.startLinkRemote(actor.actorRef, hostname, port) + + /** + * Returns the mailbox size. + */ + def getMailboxSize(): Int = actorRef.mailboxSize + + /** + * Returns the current supervisor if there is one, null if not. + */ + def getSupervisor(): UntypedActorRef = UntypedActorRef.wrap(actorRef.supervisor.getOrElse(null)) +} diff --git a/akka-core/src/main/scala/config/Config.scala b/akka-core/src/main/scala/config/Config.scala index 68842ad1e3..ec44453e59 100644 --- a/akka-core/src/main/scala/config/Config.scala +++ b/akka-core/src/main/scala/config/Config.scala @@ -82,6 +82,8 @@ object Config extends Logging { if (VERSION != CONFIG_VERSION) throw new ConfigurationException( "Akka JAR version [" + VERSION + "] is different than the provided config ('akka.conf') version [" + CONFIG_VERSION + "]") + val TIME_UNIT = config.getString("akka.time-unit", "seconds") + val startTime = System.currentTimeMillis def uptime = (System.currentTimeMillis - startTime) / 1000 } diff --git a/akka-core/src/main/scala/config/Configuration.scala b/akka-core/src/main/scala/config/Configuration.scala index faa5c912f1..e257c739a9 100644 --- a/akka-core/src/main/scala/config/Configuration.scala +++ b/akka-core/src/main/scala/config/Configuration.scala @@ -5,7 +5,7 @@ package se.scalablesolutions.akka.config /* -import se.scalablesolutions.akka.kernel.{ActiveObject, ActiveObjectProxy} +import se.scalablesolutions.akka.kernel.{TypedActor, TypedActorProxy} import com.google.inject.{AbstractModule} import java.util.{List => JList, ArrayList} import scala.reflect.BeanProperty @@ -55,6 +55,6 @@ class Component(@BeanProperty val intf: Class[_], @BeanProperty val target: Class[_], @BeanProperty val lifeCycle: LifeCycle, @BeanProperty val timeout: Int) extends Server { - def newWorker(proxy: ActiveObjectProxy) = se.scalablesolutions.akka.kernel.Supervise(proxy.server, lifeCycle.transform) + def newWorker(proxy: TypedActorProxy) = se.scalablesolutions.akka.kernel.Supervise(proxy.server, lifeCycle.transform) } */ diff --git a/akka-core/src/main/scala/config/Configurator.scala b/akka-core/src/main/scala/config/Configurator.scala index db92c5f35b..ba7e1f35f2 100644 --- a/akka-core/src/main/scala/config/Configurator.scala +++ b/akka-core/src/main/scala/config/Configurator.scala @@ -6,14 +6,14 @@ package se.scalablesolutions.akka.config import ScalaConfig.{RestartStrategy, Component} -private[akka] trait ActiveObjectConfiguratorBase { +private[akka] trait TypedActorConfiguratorBase { def getExternalDependency[T](clazz: Class[T]): T - def configure(restartStrategy: RestartStrategy, components: List[Component]): ActiveObjectConfiguratorBase + def configure(restartStrategy: RestartStrategy, components: List[Component]): TypedActorConfiguratorBase - def inject: ActiveObjectConfiguratorBase + def inject: TypedActorConfiguratorBase - def supervise: ActiveObjectConfiguratorBase + def supervise: TypedActorConfiguratorBase def reset diff --git a/akka-core/src/main/scala/config/SupervisionConfig.scala b/akka-core/src/main/scala/config/SupervisionConfig.scala index 1f5fd15a9b..cb0829704d 100644 --- a/akka-core/src/main/scala/config/SupervisionConfig.scala +++ b/akka-core/src/main/scala/config/SupervisionConfig.scala @@ -42,16 +42,7 @@ object ScalaConfig { case object AllForOne extends FailOverScheme case object OneForOne extends FailOverScheme - case class LifeCycle(scope: Scope, - restartCallbacks: Option[RestartCallbacks] = None, - shutdownCallback: Option[ShutdownCallback] = None) extends ConfigElement - case class RestartCallbacks(preRestart: String, postRestart: String) { - if ((preRestart eq null) || (postRestart eq null)) throw new IllegalArgumentException("Restart callback methods can't be null") - } - case class ShutdownCallback(shutdown: String) { - if (shutdown eq null) throw new IllegalArgumentException("Shutdown callback method can't be null") - } - + case class LifeCycle(scope: Scope) extends ConfigElement case object Permanent extends Scope case object Temporary extends Scope @@ -137,26 +128,12 @@ object JavaConfig { scheme.transform, maxNrOfRetries, withinTimeRange, trapExceptions.toList) } - class LifeCycle(@BeanProperty val scope: Scope, - @BeanProperty val restartCallbacks: RestartCallbacks, - @BeanProperty val shutdownCallback: ShutdownCallback) extends ConfigElement { - def this(scope: Scope) = this(scope, null, null) - def this(scope: Scope, restartCallbacks: RestartCallbacks) = this(scope, restartCallbacks, null) - def this(scope: Scope, shutdownCallback: ShutdownCallback) = this(scope, null, shutdownCallback) + class LifeCycle(@BeanProperty val scope: Scope) extends ConfigElement { def transform = { - val restartCallbacksOption = if (restartCallbacks eq null) None else Some(restartCallbacks.transform) - val shutdownCallbackOption = if (shutdownCallback eq null) None else Some(shutdownCallback.transform) - se.scalablesolutions.akka.config.ScalaConfig.LifeCycle(scope.transform, restartCallbacksOption, shutdownCallbackOption) + se.scalablesolutions.akka.config.ScalaConfig.LifeCycle(scope.transform) } } - class RestartCallbacks(@BeanProperty val preRestart: String, @BeanProperty val postRestart: String) { - def transform = se.scalablesolutions.akka.config.ScalaConfig.RestartCallbacks(preRestart, postRestart) - } - class ShutdownCallback(@BeanProperty val shutdown: String) { - def transform = se.scalablesolutions.akka.config.ScalaConfig.ShutdownCallback(shutdown) - } - abstract class Scope extends ConfigElement { def transform: se.scalablesolutions.akka.config.ScalaConfig.Scope } diff --git a/akka-core/src/main/scala/config/ActiveObjectConfigurator.scala b/akka-core/src/main/scala/config/TypedActorConfigurator.scala similarity index 56% rename from akka-core/src/main/scala/config/ActiveObjectConfigurator.scala rename to akka-core/src/main/scala/config/TypedActorConfigurator.scala index 88e495bbd0..d639d21f5f 100644 --- a/akka-core/src/main/scala/config/ActiveObjectConfigurator.scala +++ b/akka-core/src/main/scala/config/TypedActorConfigurator.scala @@ -12,54 +12,55 @@ import java.util.{ArrayList} import com.google.inject._ /** - * Configurator for the Active Objects. Used to do declarative configuration of supervision. - * It also does dependency injection with and into Active Objects using dependency injection + * Configurator for the TypedActors. Used to do declarative configuration of supervision. + * It also does dependency injection with and into TypedActors using dependency injection * frameworks such as Google Guice or Spring. *

- * If you don't want declarative configuration then you should use the ActiveObject + * If you don't want declarative configuration then you should use the TypedActor * factory methods. * * @author Jonas Bonér */ -class ActiveObjectConfigurator { +class TypedActorConfigurator { import scala.collection.JavaConversions._ // TODO: make pluggable once we have f.e a SpringConfigurator - private val INSTANCE = new ActiveObjectGuiceConfigurator + private val INSTANCE = new TypedActorGuiceConfigurator /** - * Returns the a list with all active objects that has been put under supervision for the class specified. + * Returns the a list with all typed actors that has been put under supervision for the class specified. * - * @param clazz the class for the active object - * @return a list with all the active objects for the class + * @param clazz the class for the typed actor + * @return a list with all the typed actors for the class */ - def getInstances[T](clazz: Class[T]): JList[T] = INSTANCE.getInstance(clazz).foldLeft(new ArrayList[T]){ (l, i) => l add i ; l } + def getInstances[T](clazz: Class[T]): JList[T] = + INSTANCE.getInstance(clazz).foldLeft(new ArrayList[T]){ (l, i) => l add i ; l } /** - * Returns the first item in a list of all active objects that has been put under supervision for the class specified. + * Returns the first item in a list of all typed actors that has been put under supervision for the class specified. * - * @param clazz the class for the active object - * @return the active object for the class + * @param clazz the class for the typed actor + * @return the typed actor for the class */ def getInstance[T](clazz: Class[T]): T = INSTANCE.getInstance(clazz).head - def configure(restartStrategy: RestartStrategy, components: Array[Component]): ActiveObjectConfigurator = { + def configure(restartStrategy: RestartStrategy, components: Array[Component]): TypedActorConfigurator = { INSTANCE.configure( restartStrategy.transform, components.toList.asInstanceOf[scala.List[Component]].map(_.transform)) this } - def inject: ActiveObjectConfigurator = { + def inject: TypedActorConfigurator = { INSTANCE.inject this } - def supervise: ActiveObjectConfigurator = { + def supervise: TypedActorConfigurator = { INSTANCE.supervise this } - def addExternalGuiceModule(module: Module): ActiveObjectConfigurator = { + def addExternalGuiceModule(module: Module): TypedActorConfigurator = { INSTANCE.addExternalGuiceModule(module) this } diff --git a/akka-core/src/main/scala/config/ActiveObjectGuiceConfigurator.scala b/akka-core/src/main/scala/config/TypedActorGuiceConfigurator.scala similarity index 66% rename from akka-core/src/main/scala/config/ActiveObjectGuiceConfigurator.scala rename to akka-core/src/main/scala/config/TypedActorGuiceConfigurator.scala index 54174b6030..8b23921792 100644 --- a/akka-core/src/main/scala/config/ActiveObjectGuiceConfigurator.scala +++ b/akka-core/src/main/scala/config/TypedActorGuiceConfigurator.scala @@ -7,7 +7,7 @@ package se.scalablesolutions.akka.config import com.google.inject._ import se.scalablesolutions.akka.config.ScalaConfig._ -import se.scalablesolutions.akka.actor.{Supervisor, ActiveObject, Dispatcher, ActorRef, Actor, IllegalActorStateException} +import se.scalablesolutions.akka.actor.{Supervisor, TypedActor, Dispatcher, ActorRef, Actor, IllegalActorStateException} import se.scalablesolutions.akka.remote.RemoteServer import se.scalablesolutions.akka.util.Logging @@ -17,12 +17,12 @@ import java.net.InetSocketAddress import java.lang.reflect.Method /** - * This is an class for internal usage. Instead use the se.scalablesolutions.akka.config.ActiveObjectConfigurator - * class for creating ActiveObjects. + * This is an class for internal usage. Instead use the se.scalablesolutions.akka.config.TypedActorConfigurator + * class for creating TypedActors. * * @author Jonas Bonér */ -private[akka] class ActiveObjectGuiceConfigurator extends ActiveObjectConfiguratorBase with Logging { +private[akka] class TypedActorGuiceConfigurator extends TypedActorConfiguratorBase with Logging { private var injector: Injector = _ private var supervisor: Option[Supervisor] = None private var restartStrategy: RestartStrategy = _ @@ -30,22 +30,22 @@ private[akka] class ActiveObjectGuiceConfigurator extends ActiveObjectConfigurat private var supervised: List[Supervise] = Nil private var bindings: List[DependencyBinding] = Nil private var configRegistry = new HashMap[Class[_], Component] // TODO is configRegistry needed? - private var activeObjectRegistry = new HashMap[Class[_], Tuple3[AnyRef, AnyRef, Component]] + private var typedActorRegistry = new HashMap[Class[_], Tuple3[AnyRef, AnyRef, Component]] private var modules = new java.util.ArrayList[Module] private var methodToUriRegistry = new HashMap[Method, String] /** * Returns the active abject that has been put under supervision for the class specified. * - * @param clazz the class for the active object - * @return the active objects for the class + * @param clazz the class for the typed actor + * @return the typed actors for the class */ def getInstance[T](clazz: Class[T]): List[T] = synchronized { - log.debug("Retrieving active object [%s]", clazz.getName) + log.debug("Retrieving typed actor [%s]", clazz.getName) if (injector eq null) throw new IllegalActorStateException( "inject() and/or supervise() must be called before invoking getInstance(clazz)") val (proxy, targetInstance, component) = - activeObjectRegistry.getOrElse(clazz, throw new IllegalActorStateException( + typedActorRegistry.getOrElse(clazz, throw new IllegalActorStateException( "Class [" + clazz.getName + "] has not been put under supervision" + "\n(by passing in the config to the 'configure' and then invoking 'supervise') method")) injector.injectMembers(targetInstance) @@ -53,7 +53,7 @@ private[akka] class ActiveObjectGuiceConfigurator extends ActiveObjectConfigurat } def isDefined(clazz: Class[_]): Boolean = synchronized { - activeObjectRegistry.get(clazz).isDefined + typedActorRegistry.get(clazz).isDefined } override def getExternalDependency[T](clazz: Class[T]): T = synchronized { @@ -67,7 +67,7 @@ private[akka] class ActiveObjectGuiceConfigurator extends ActiveObjectConfigurat } override def configure(restartStrategy: RestartStrategy, components: List[Component]): - ActiveObjectConfiguratorBase = synchronized { + TypedActorConfiguratorBase = synchronized { this.restartStrategy = restartStrategy this.components = components.toArray.toList.asInstanceOf[List[Component]] bindings = for (component <- this.components) yield { @@ -76,63 +76,72 @@ private[akka] class ActiveObjectGuiceConfigurator extends ActiveObjectConfigurat } val deps = new java.util.ArrayList[DependencyBinding](bindings.size) for (b <- bindings) deps.add(b) - modules.add(new ActiveObjectGuiceModule(deps)) + modules.add(new TypedActorGuiceModule(deps)) this } private def newSubclassingProxy(component: Component): DependencyBinding = { - val targetClass = component.target - val actorRef = Actor.actorOf(new Dispatcher(component.transactionRequired, - component.lifeCycle.restartCallbacks, - component.lifeCycle.shutdownCallback)) + val targetClass = + if (component.target.isInstanceOf[Class[_ <: TypedActor]]) component.target.asInstanceOf[Class[_ <: TypedActor]] + else throw new IllegalArgumentException("TypedActor [" + component.target.getName + "] must be a subclass of TypedActor") + val actorRef = Actor.actorOf(new Dispatcher(component.transactionRequired)) if (component.dispatcher.isDefined) actorRef.dispatcher = component.dispatcher.get val remoteAddress = if (component.remoteAddress.isDefined) Some(new InetSocketAddress(component.remoteAddress.get.hostname, component.remoteAddress.get.port)) else None - val proxy = ActiveObject.newInstance(targetClass, actorRef, remoteAddress, component.timeout).asInstanceOf[AnyRef] - remoteAddress.foreach(address => RemoteServer.registerActiveObject(address, targetClass.getName, proxy)) + val proxy = TypedActor.newInstance(targetClass, actorRef, remoteAddress, component.timeout).asInstanceOf[AnyRef] + remoteAddress.foreach(address => RemoteServer.registerTypedActor(address, targetClass.getName, proxy)) supervised ::= Supervise(actorRef, component.lifeCycle) - activeObjectRegistry.put(targetClass, (proxy, proxy, component)) + typedActorRegistry.put(targetClass, (proxy, proxy, component)) new DependencyBinding(targetClass, proxy) } private def newDelegatingProxy(component: Component): DependencyBinding = { - val targetClass = component.intf.get - val targetInstance = component.target.newInstance.asInstanceOf[AnyRef] // TODO: perhaps need to put in registry component.target.getConstructor(Array[Class[_]](): _*).setAccessible(true) - val actorRef = Actor.actorOf(new Dispatcher(component.transactionRequired, - component.lifeCycle.restartCallbacks, - component.lifeCycle.shutdownCallback)) + + val targetClass = component.intf.get + val instance = component.target.newInstance.asInstanceOf[AnyRef] // TODO: perhaps need to put in registry + + val targetInstance = + if (instance.isInstanceOf[TypedActor]) instance.asInstanceOf[TypedActor] + else throw new IllegalArgumentException("TypedActor [" + component.target.getName + "] must be a subclass of TypedActor") + + val actorRef = Actor.actorOf(new Dispatcher(component.transactionRequired)) + if (component.dispatcher.isDefined) actorRef.dispatcher = component.dispatcher.get + val remoteAddress = if (component.remoteAddress.isDefined) Some(new InetSocketAddress(component.remoteAddress.get.hostname, component.remoteAddress.get.port)) else None - val proxy = ActiveObject.newInstance( + + val proxy = TypedActor.newInstance( targetClass, targetInstance, actorRef, remoteAddress, component.timeout).asInstanceOf[AnyRef] - remoteAddress.foreach(address => RemoteServer.registerActiveObject(address, targetClass.getName, proxy)) + + remoteAddress.foreach(address => RemoteServer.registerTypedActor(address, targetClass.getName, proxy)) supervised ::= Supervise(actorRef, component.lifeCycle) - activeObjectRegistry.put(targetClass, (proxy, targetInstance, component)) + + typedActorRegistry.put(targetClass, (proxy, targetInstance, component)) new DependencyBinding(targetClass, proxy) } - override def inject: ActiveObjectConfiguratorBase = synchronized { + override def inject: TypedActorConfiguratorBase = synchronized { if (injector ne null) throw new IllegalActorStateException("inject() has already been called on this configurator") injector = Guice.createInjector(modules) this } - override def supervise: ActiveObjectConfiguratorBase = synchronized { + override def supervise: TypedActorConfiguratorBase = synchronized { if (injector eq null) inject - supervisor = Some(ActiveObject.supervise(restartStrategy, supervised)) + supervisor = Some(TypedActor.supervise(restartStrategy, supervised)) this } /** * Add additional services to be wired in. *

-   * activeObjectConfigurator.addExternalGuiceModule(new AbstractModule {
+   * typedActorConfigurator.addExternalGuiceModule(new AbstractModule {
    *   protected void configure() {
    *     bind(Foo.class).to(FooImpl.class).in(Scopes.SINGLETON);
    *     bind(BarImpl.class);
@@ -141,7 +150,7 @@ private[akka] class ActiveObjectGuiceConfigurator extends ActiveObjectConfigurat
    *   }})
    * 
*/ - def addExternalGuiceModule(module: Module): ActiveObjectConfiguratorBase = synchronized { + def addExternalGuiceModule(module: Module): TypedActorConfiguratorBase = synchronized { modules.add(module) this } @@ -151,7 +160,7 @@ private[akka] class ActiveObjectGuiceConfigurator extends ActiveObjectConfigurat def reset = synchronized { modules = new java.util.ArrayList[Module] configRegistry = new HashMap[Class[_], Component] - activeObjectRegistry = new HashMap[Class[_], Tuple3[AnyRef, AnyRef, Component]] + typedActorRegistry = new HashMap[Class[_], Tuple3[AnyRef, AnyRef, Component]] methodToUriRegistry = new HashMap[Method, String] injector = null restartStrategy = null diff --git a/akka-core/src/main/scala/dispatch/AbstractReactorBasedEventDrivenDispatcher.scala b/akka-core/src/main/scala/dispatch/AbstractReactorBasedEventDrivenDispatcher.scala index 832ae9203a..6bacec73be 100644 --- a/akka-core/src/main/scala/dispatch/AbstractReactorBasedEventDrivenDispatcher.scala +++ b/akka-core/src/main/scala/dispatch/AbstractReactorBasedEventDrivenDispatcher.scala @@ -29,6 +29,7 @@ abstract class AbstractReactorBasedEventDrivenDispatcher(val name: String) exten } def shutdown = if (active) { + log.debug("Shutting down %s", toString) active = false selectorThread.interrupt doShutdown diff --git a/akka-core/src/main/scala/dispatch/ExecutorBasedEventDrivenDispatcher.scala b/akka-core/src/main/scala/dispatch/ExecutorBasedEventDrivenDispatcher.scala index 48b42847a0..836dc0ea86 100644 --- a/akka-core/src/main/scala/dispatch/ExecutorBasedEventDrivenDispatcher.scala +++ b/akka-core/src/main/scala/dispatch/ExecutorBasedEventDrivenDispatcher.scala @@ -5,6 +5,7 @@ package se.scalablesolutions.akka.dispatch import se.scalablesolutions.akka.actor.{ActorRef, IllegalActorStateException} +import jsr166x.ConcurrentLinkedDeque /** * Default settings are: @@ -64,18 +65,37 @@ class ExecutorBasedEventDrivenDispatcher(_name: String, throughput: Int = Dispat @volatile private var active: Boolean = false - val name: String = "event-driven:executor:dispatcher:" + _name + val name = "akka:event-driven:dispatcher:" + _name init - def dispatch(invocation: MessageInvocation) = dispatch(invocation.receiver) + def dispatch(invocation: MessageInvocation) = { + getMailbox(invocation.receiver).add(invocation) + dispatch(invocation.receiver) + } + + /** + * @return the mailbox associated with the actor + */ + private def getMailbox(receiver: ActorRef) = receiver.mailbox.asInstanceOf[ConcurrentLinkedDeque[MessageInvocation]] + + override def mailboxSize(actorRef: ActorRef) = getMailbox(actorRef).size + + override def register(actorRef: ActorRef) = { + // The actor will need a ConcurrentLinkedDeque based mailbox + if( actorRef.mailbox == null ) { + actorRef.mailbox = new ConcurrentLinkedDeque[MessageInvocation]() + } + super.register(actorRef) + } def dispatch(receiver: ActorRef): Unit = if (active) { + executor.execute(new Runnable() { def run = { var lockAcquiredOnce = false var finishedBeforeMailboxEmpty = false val lock = receiver.dispatcherLock - val mailbox = receiver.mailbox + val mailbox = getMailbox(receiver) // this do-while loop is required to prevent missing new messages between the end of the inner while // loop and releasing the lock do { @@ -92,7 +112,9 @@ class ExecutorBasedEventDrivenDispatcher(_name: String, throughput: Int = Dispat } while ((lockAcquiredOnce && !finishedBeforeMailboxEmpty && !mailbox.isEmpty)) } }) - } else throw new IllegalActorStateException("Can't submit invocations to dispatcher since it's not started") + } else { + log.warning("%s is shut down,\n\tignoring the rest of the messages in the mailbox of\n\t%s", toString, receiver) + } /** @@ -102,39 +124,38 @@ class ExecutorBasedEventDrivenDispatcher(_name: String, throughput: Int = Dispat */ def processMailbox(receiver: ActorRef): Boolean = { var processedMessages = 0 - var messageInvocation = receiver.mailbox.poll + val mailbox = getMailbox(receiver) + var messageInvocation = mailbox.poll while (messageInvocation != null) { messageInvocation.invoke processedMessages += 1 // check if we simply continue with other messages, or reached the throughput limit - if (throughput <= 0 || processedMessages < throughput) - messageInvocation = receiver.mailbox.poll + if (throughput <= 0 || processedMessages < throughput) messageInvocation = mailbox.poll else { - return !receiver.mailbox.isEmpty messageInvocation = null + return !mailbox.isEmpty } } - - return false + false } def start = if (!active) { - log.debug("Starting ExecutorBasedEventDrivenDispatcher [%s]", name) - log.debug("Throughput for %s = %d", name, throughput) + log.debug("Starting up %s\n\twith throughput [%d]", toString, throughput) active = true } def shutdown = if (active) { - log.debug("Shutting down ExecutorBasedEventDrivenDispatcher [%s]", name) + log.debug("Shutting down %s", toString) executor.shutdownNow active = false references.clear } - def usesActorMailbox = true - def ensureNotActive(): Unit = if (active) throw new IllegalActorStateException( "Can't build a new thread pool for a dispatcher that is already up and running") + override def toString = "ExecutorBasedEventDrivenDispatcher[" + name + "]" + + // FIXME: should we have an unbounded queue and not bounded as default ???? private[akka] def init = withNewThreadPoolWithLinkedBlockingQueueWithUnboundedCapacity.buildThreadPool } diff --git a/akka-core/src/main/scala/dispatch/ExecutorBasedEventDrivenWorkStealingDispatcher.scala b/akka-core/src/main/scala/dispatch/ExecutorBasedEventDrivenWorkStealingDispatcher.scala index 94ef0a2e67..b9ff5d92f4 100644 --- a/akka-core/src/main/scala/dispatch/ExecutorBasedEventDrivenWorkStealingDispatcher.scala +++ b/akka-core/src/main/scala/dispatch/ExecutorBasedEventDrivenWorkStealingDispatcher.scala @@ -7,6 +7,7 @@ package se.scalablesolutions.akka.dispatch import java.util.concurrent.CopyOnWriteArrayList import se.scalablesolutions.akka.actor.{Actor, ActorRef, IllegalActorStateException} +import jsr166x.ConcurrentLinkedDeque /** * An executor based event driven dispatcher which will try to redistribute work from busy actors to idle actors. It is assumed @@ -41,11 +42,19 @@ class ExecutorBasedEventDrivenWorkStealingDispatcher(_name: String) extends Mess /** The index in the pooled actors list which was last used to steal work */ @volatile private var lastThiefIndex = 0 - // TODO: is there a naming convention for this name? - val name: String = "event-driven-work-stealing:executor:dispatcher:" + _name + val name = "akka:event-driven-work-stealing:dispatcher:" + _name init + + /** + * @return the mailbox associated with the actor + */ + private def getMailbox(receiver: ActorRef) = receiver.mailbox.asInstanceOf[ConcurrentLinkedDeque[MessageInvocation]] + + override def mailboxSize(actorRef: ActorRef) = getMailbox(actorRef).size + def dispatch(invocation: MessageInvocation) = if (active) { + getMailbox(invocation.receiver).add(invocation) executor.execute(new Runnable() { def run = { if (!tryProcessMailbox(invocation.receiver)) { @@ -77,7 +86,7 @@ class ExecutorBasedEventDrivenWorkStealingDispatcher(_name: String) extends Mess lock.unlock } } - } while ((lockAcquiredOnce && !receiver.mailbox.isEmpty)) + } while ((lockAcquiredOnce && !getMailbox(receiver).isEmpty)) return lockAcquiredOnce } @@ -86,10 +95,11 @@ class ExecutorBasedEventDrivenWorkStealingDispatcher(_name: String) extends Mess * Process the messages in the mailbox of the given actor. */ private def processMailbox(receiver: ActorRef) = { - var messageInvocation = receiver.mailbox.poll + val mailbox = getMailbox(receiver) + var messageInvocation = mailbox.poll while (messageInvocation != null) { messageInvocation.invoke - messageInvocation = receiver.mailbox.poll + messageInvocation = mailbox.poll } } @@ -117,7 +127,7 @@ class ExecutorBasedEventDrivenWorkStealingDispatcher(_name: String) extends Mess for (i <- 0 to actors.length) { val index = (i + startIndex) % actors.length val actor = actors(index) - if (actor != receiver && actor.mailbox.isEmpty) return (Some(actor), index) + if (actor != receiver && getMailbox(actor).isEmpty) return (Some(actor), index) } (None, startIndex) // nothing found, reuse same start index next time } @@ -129,8 +139,7 @@ class ExecutorBasedEventDrivenWorkStealingDispatcher(_name: String) extends Mess private def tryDonateAndProcessMessages(receiver: ActorRef, thief: ActorRef) = { if (thief.dispatcherLock.tryLock) { try { - while(donateMessage(receiver, thief)) - processMailbox(thief) + while(donateMessage(receiver, thief)) processMailbox(thief) } finally { thief.dispatcherLock.unlock } @@ -141,7 +150,7 @@ class ExecutorBasedEventDrivenWorkStealingDispatcher(_name: String) extends Mess * Steal a message from the receiver and give it to the thief. */ private def donateMessage(receiver: ActorRef, thief: ActorRef): Boolean = { - val donated = receiver.mailbox.pollLast + val donated = getMailbox(receiver).pollLast if (donated ne null) { if (donated.senderFuture.isDefined) thief.self.postMessageToMailboxAndCreateFutureResultWithTimeout[Any]( donated.message, receiver.timeout, donated.sender, donated.senderFuture) @@ -156,7 +165,7 @@ class ExecutorBasedEventDrivenWorkStealingDispatcher(_name: String) extends Mess } def shutdown = if (active) { - log.debug("Shutting down ExecutorBasedEventDrivenWorkStealingDispatcher [%s]", name) + log.debug("Shutting down %s", toString) executor.shutdownNow active = false references.clear @@ -165,10 +174,16 @@ class ExecutorBasedEventDrivenWorkStealingDispatcher(_name: String) extends Mess def ensureNotActive(): Unit = if (active) throw new IllegalActorStateException( "Can't build a new thread pool for a dispatcher that is already up and running") + override def toString = "ExecutorBasedEventDrivenWorkStealingDispatcher[" + name + "]" + private[akka] def init = withNewThreadPoolWithLinkedBlockingQueueWithUnboundedCapacity.buildThreadPool override def register(actorRef: ActorRef) = { verifyActorsAreOfSameType(actorRef) + // The actor will need a ConcurrentLinkedDeque based mailbox + if( actorRef.mailbox == null ) { + actorRef.mailbox = new ConcurrentLinkedDeque[MessageInvocation]() + } pooledActors.add(actorRef) super.register(actorRef) } @@ -178,19 +193,14 @@ class ExecutorBasedEventDrivenWorkStealingDispatcher(_name: String) extends Mess super.unregister(actorRef) } - def usesActorMailbox = true - private def verifyActorsAreOfSameType(actorOfId: ActorRef) = { actorType match { - case None => { - actorType = Some(actorOfId.actor.getClass) - } - case Some(aType) => { + case None => actorType = Some(actorOfId.actor.getClass) + case Some(aType) => if (aType != actorOfId.actor.getClass) - throw new IllegalActorStateException( - String.format("Can't register actor %s in a work stealing dispatcher which already knows actors of type %s", - actorOfId.actor, aType)) - } + throw new IllegalActorStateException(String.format( + "Can't register actor %s in a work stealing dispatcher which already knows actors of type %s", + actorOfId.actor, aType)) } } } diff --git a/akka-core/src/main/scala/dispatch/HawtDispatcher.scala b/akka-core/src/main/scala/dispatch/HawtDispatcher.scala new file mode 100644 index 0000000000..45e4468b3d --- /dev/null +++ b/akka-core/src/main/scala/dispatch/HawtDispatcher.scala @@ -0,0 +1,249 @@ +/** + * Copyright (C) 2010, Progress Software Corporation and/or its + * subsidiaries or affiliates. All rights reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package se.scalablesolutions.akka.dispatch + +import se.scalablesolutions.akka.actor.ActorRef +import org.fusesource.hawtdispatch.DispatchQueue +import org.fusesource.hawtdispatch.ScalaDispatch._ +import java.util.concurrent.atomic.AtomicInteger +import java.util.concurrent.atomic.AtomicBoolean +import java.util.concurrent.CountDownLatch +import org.fusesource.hawtdispatch.DispatchQueue.QueueType +import org.fusesource.hawtdispatch.ListEventAggregator + +/** + * Holds helper methods for working with actors that are using + * a HawtDispatcher as it's dispatcher. + */ +object HawtDispatcher { + + private val retained = new AtomicInteger() + @volatile private var shutdownLatch: CountDownLatch = _ + + private def retainNonDaemon = { + if( retained.getAndIncrement == 0 ) { + shutdownLatch = new CountDownLatch(1) + new Thread("HawtDispatch Non-Daemon") { + override def run = { + try { + shutdownLatch.await + } catch { + case _ => + } + println("done"); + } + }.start() + } + } + + private def releaseNonDaemon = { + if( retained.decrementAndGet == 0 ) { + shutdownLatch.countDown + shutdownLatch = null + } + } + + /** + * @return the mailbox associated with the actor + */ + private def mailbox(actorRef: ActorRef) = { + actorRef.mailbox.asInstanceOf[HawtDispatcherMailbox] + } + + /** + * @return the dispatch queue associated with the actor + */ + def queue(actorRef: ActorRef) = { + mailbox(actorRef).queue + } + + + /** + *

+ * Pins an actor to a random thread queue. Once pinned the actor will always execute + * on the same thread. + *

+ * + *

+ * This method can only succeed if the actor it's dispatcher is set to a HawtDispatcher and it has been started + *

+ * + * @return true if the actor was pinned + */ + def pin(actorRef: ActorRef) = { + actorRef.mailbox match { + case x:HawtDispatcherMailbox=> + x.queue.setTargetQueue( getRandomThreadQueue ) + true + case _ => false + } + } + + /** + *

+ * Unpins the actor so that all threads in the hawt dispatch thread pool + * compete to execute him. + *

+ * + *

+ * This method can only succeed if the actor it's dispatcher is set to a HawtDispatcher and it has been started + *

+ * @return true if the actor was unpinned + */ + def unpin(actorRef: ActorRef) = { + target(actorRef, globalQueue) + } + + + /** + * @return true if the actor was pinned to a thread. + */ + def pinned(actorRef: ActorRef):Boolean = { + actorRef.mailbox match { + case x:HawtDispatcherMailbox=> + x.queue.getTargetQueue.getQueueType == QueueType.THREAD_QUEUE + case _ => false + } + } + + /** + *

+ * Updates the actor's target dispatch queue to the value specified. This allows + * you to do odd things like targeting another serial queue. + *

+ * + *

+ * This method can only succeed if the actor it's dispatcher is set to a HawtDispatcher and it has been started + *

+ * @return true if the actor was unpinned + */ + def target(actorRef: ActorRef, parent:DispatchQueue) = { + actorRef.mailbox match { + case x:HawtDispatcherMailbox=> + x.queue.setTargetQueue( parent ) + true + case _ => false + } + } + +} + +/** + *

+ * A HawtDispatch based MessageDispatcher. Actors with this dispatcher are executed + * on the HawtDispatch fixed sized thread pool. The number of of threads will match + * the number of cores available on your system. + * + *

+ *

+ * Actors using this dispatcher are restricted to only executing non blocking + * operations. The actor cannot synchronously call another actor or call 3rd party + * libraries that can block for a long time. You should use non blocking IO APIs + * instead of blocking IO apis to avoid blocking that actor for an extended amount + * of time. + *

+ * + *

+ * This dispatcher delivers messages to the actors in the order that they + * were producer at the sender. + *

+ * + *

+ * HawtDispatch supports processing Non blocking Socket IO in both the reactor + * and proactor styles. For more details, see the HawtDispacherEchoServer.scala + * example. + *

+ * + * @author Hiram Chirino + */ +class HawtDispatcher(val aggregate:Boolean=true, val parent:DispatchQueue=globalQueue) extends MessageDispatcher { + import HawtDispatcher._ + private val active = new AtomicBoolean(false) + + def start = { + if( active.compareAndSet(false, true) ) { + retainNonDaemon + } + } + + def shutdown = { + if( active.compareAndSet(true, false) ) { + releaseNonDaemon + } + } + + def isShutdown = !active.get + + def dispatch(invocation: MessageInvocation) = if(active.get()) { + mailbox(invocation.receiver).dispatch(invocation) + } else { + log.warning("%s is shut down,\n\tignoring the the messages sent to\n\t%s", toString, invocation.receiver) + } + + // hawtdispatch does not have a way to get queue sizes, getting an accurate + // size can cause extra contention.. is this really needed? + // TODO: figure out if this can be optional in akka + override def mailboxSize(actorRef: ActorRef) = 0 + + override def register(actorRef: ActorRef) = { + if( actorRef.mailbox == null ) { + val queue = parent.createSerialQueue(actorRef.toString) + if( aggregate ) { + actorRef.mailbox = new AggregatingHawtDispatcherMailbox(queue) + } else { + actorRef.mailbox = new HawtDispatcherMailbox(queue) + } + } + super.register(actorRef) + } + + override def toString = "HawtDispatchEventDrivenDispatcher" + +} + +class HawtDispatcherMailbox(val queue:DispatchQueue) { + def dispatch(invocation: MessageInvocation):Unit = { + queue { + invocation.invoke + } + } +} + +class AggregatingHawtDispatcherMailbox(queue:DispatchQueue) extends HawtDispatcherMailbox(queue) { + private val source = createSource(new ListEventAggregator[MessageInvocation](), queue) + source.setEventHandler (^{drain_source} ) + source.resume + + private def drain_source = { + source.getData.foreach { invocation => + invocation.invoke + } + } + + override def dispatch(invocation: MessageInvocation):Unit = { + if ( getCurrentQueue == null ) { + // we are being call from a non hawtdispatch thread, can't aggregate + // it's events + super.dispatch(invocation) + } else { + // we are being call from a hawtdispatch thread, use the dispatch source + // so that multiple invocations issues on this thread will aggregate and then once + // the thread runs out of work, they get transferred as a batch to the other thread. + source.merge(invocation) + } + } +} diff --git a/akka-core/src/main/scala/dispatch/MessageHandling.scala b/akka-core/src/main/scala/dispatch/MessageHandling.scala index c2e74ceb1d..92926bb253 100644 --- a/akka-core/src/main/scala/dispatch/MessageHandling.scala +++ b/akka-core/src/main/scala/dispatch/MessageHandling.scala @@ -53,7 +53,7 @@ final class MessageInvocation(val receiver: ActorRef, "\n\tsender = " + sender + "\n\tsenderFuture = " + senderFuture + "\n\ttransactionSet = " + transactionSet + - "\n]" + "]" } } @@ -79,7 +79,7 @@ trait MessageDispatcher extends Logging { } def canBeShutDown: Boolean = references.isEmpty def isShutdown: Boolean - def usesActorMailbox : Boolean + def mailboxSize(actorRef: ActorRef):Int = 0 } /** diff --git a/akka-core/src/main/scala/dispatch/ReactorBasedSingleThreadEventDrivenDispatcher.scala b/akka-core/src/main/scala/dispatch/ReactorBasedSingleThreadEventDrivenDispatcher.scala index e8fbe9a221..d0850aa830 100644 --- a/akka-core/src/main/scala/dispatch/ReactorBasedSingleThreadEventDrivenDispatcher.scala +++ b/akka-core/src/main/scala/dispatch/ReactorBasedSingleThreadEventDrivenDispatcher.scala @@ -12,11 +12,14 @@ package se.scalablesolutions.akka.dispatch import java.util.{LinkedList, List} -class ReactorBasedSingleThreadEventDrivenDispatcher(name: String) extends AbstractReactorBasedEventDrivenDispatcher(name) { +class ReactorBasedSingleThreadEventDrivenDispatcher(_name: String) + extends AbstractReactorBasedEventDrivenDispatcher("akka:event-driven:reactor:single-thread:dispatcher:" + _name) { + def start = if (!active) { + log.debug("Starting up %s", toString) active = true val messageDemultiplexer = new Demultiplexer(queue) - selectorThread = new Thread("event-driven:reactor:single-thread:dispatcher:" + name) { + selectorThread = new Thread(name) { override def run = { while (active) { try { @@ -38,7 +41,7 @@ class ReactorBasedSingleThreadEventDrivenDispatcher(name: String) extends Abstra def isShutdown = !active - def usesActorMailbox = false + override def toString = "ReactorBasedSingleThreadEventDrivenDispatcher[" + name + "]" class Demultiplexer(private val messageQueue: ReactiveMessageQueue) extends MessageDemultiplexer { diff --git a/akka-core/src/main/scala/dispatch/ReactorBasedThreadPoolEventDrivenDispatcher.scala b/akka-core/src/main/scala/dispatch/ReactorBasedThreadPoolEventDrivenDispatcher.scala index 3e44cf9f6a..530184d4b2 100644 --- a/akka-core/src/main/scala/dispatch/ReactorBasedThreadPoolEventDrivenDispatcher.scala +++ b/akka-core/src/main/scala/dispatch/ReactorBasedThreadPoolEventDrivenDispatcher.scala @@ -64,7 +64,7 @@ import se.scalablesolutions.akka.actor.IllegalActorStateException * @author Jonas Bonér */ class ReactorBasedThreadPoolEventDrivenDispatcher(_name: String) - extends AbstractReactorBasedEventDrivenDispatcher("event-driven:reactor:thread-pool:dispatcher:" + _name) + extends AbstractReactorBasedEventDrivenDispatcher("akka:event-driven:reactor:dispatcher:" + _name) with ThreadPoolBuilder { private var fair = true @@ -75,17 +75,18 @@ class ReactorBasedThreadPoolEventDrivenDispatcher(_name: String) withNewThreadPoolWithLinkedBlockingQueueWithUnboundedCapacity.buildThreadPool def start = if (!active) { + log.debug("Starting up %s", toString) active = true /** - * This dispatcher code is based on code from the actorom actor framework by Sergio Bossa [http://code.google.com/p/actorom/]. + * This dispatcher code is based on code from the actorom actor framework by Sergio Bossa + * [http://code.google.com/p/actorom/]. */ selectorThread = new Thread(name) { override def run = { while (active) { try { try { - // guard.synchronized { /* empty */ } // prevents risk for deadlock as described in [http://developers.sun.com/learning/javaoneonline/2006/coreplatform/TS-1315.pdf] messageDemultiplexer.select } catch { case e: InterruptedException => active = false } process(messageDemultiplexer.acquireSelectedInvocations) @@ -110,7 +111,8 @@ class ReactorBasedThreadPoolEventDrivenDispatcher(_name: String) if (invocation eq null) throw new IllegalActorStateException("Message invocation is null [" + invocation + "]") if (!busyActors.contains(invocation.receiver)) { val invoker = messageInvokers.get(invocation.receiver) - if (invoker eq null) throw new IllegalActorStateException("Message invoker for invocation [" + invocation + "] is null") + if (invoker eq null) throw new IllegalActorStateException( + "Message invoker for invocation [" + invocation + "] is null") resume(invocation.receiver) invocations.remove executor.execute(new Runnable() { @@ -137,11 +139,11 @@ class ReactorBasedThreadPoolEventDrivenDispatcher(_name: String) else nrOfBusyMessages < 100 } - def usesActorMailbox = false - def ensureNotActive(): Unit = if (active) throw new IllegalActorStateException( "Can't build a new thread pool for a dispatcher that is already up and running") + override def toString = "ReactorBasedThreadPoolEventDrivenDispatcher[" + name + "]" + class Demultiplexer(private val messageQueue: ReactiveMessageQueue) extends MessageDemultiplexer { private val selectedInvocations: List[MessageInvocation] = new LinkedList[MessageInvocation] private val selectedInvocationsLock = new ReentrantLock diff --git a/akka-core/src/main/scala/dispatch/ThreadBasedDispatcher.scala b/akka-core/src/main/scala/dispatch/ThreadBasedDispatcher.scala index 5c1cb78a52..012c4899d8 100644 --- a/akka-core/src/main/scala/dispatch/ThreadBasedDispatcher.scala +++ b/akka-core/src/main/scala/dispatch/ThreadBasedDispatcher.scala @@ -16,7 +16,7 @@ import se.scalablesolutions.akka.actor.{Actor, ActorRef} */ class ThreadBasedDispatcher(private val actor: ActorRef) extends MessageDispatcher { private val name = actor.getClass.getName + ":" + actor.uuid - private val threadName = "thread-based:dispatcher:" + name + private val threadName = "akka:thread-based:dispatcher:" + name private val queue = new BlockingMessageQueue(name) private var selectorThread: Thread = _ @volatile private var active: Boolean = false @@ -24,6 +24,7 @@ class ThreadBasedDispatcher(private val actor: ActorRef) extends MessageDispatch def dispatch(invocation: MessageInvocation) = queue.append(invocation) def start = if (!active) { + log.debug("Starting up %s", toString) active = true selectorThread = new Thread(threadName) { override def run = { @@ -39,14 +40,14 @@ class ThreadBasedDispatcher(private val actor: ActorRef) extends MessageDispatch def isShutdown = !active - def usesActorMailbox = false - def shutdown = if (active) { - log.debug("Shutting down ThreadBasedDispatcher [%s]", name) + log.debug("Shutting down %s", toString) active = false selectorThread.interrupt references.clear } + + override def toString = "ThreadBasedDispatcher[" + threadName + "]" } class BlockingMessageQueue(name: String) extends MessageQueue { diff --git a/akka-core/src/main/scala/dispatch/ThreadPoolBuilder.scala b/akka-core/src/main/scala/dispatch/ThreadPoolBuilder.scala index a111ae87a5..5abf431ef8 100644 --- a/akka-core/src/main/scala/dispatch/ThreadPoolBuilder.scala +++ b/akka-core/src/main/scala/dispatch/ThreadPoolBuilder.scala @@ -234,18 +234,19 @@ trait ThreadPoolBuilder { extends Thread(runnable, name + "-" + MonitorableThread.created.incrementAndGet) with Logging { setUncaughtExceptionHandler(new Thread.UncaughtExceptionHandler() { - def uncaughtException(thread: Thread, cause: Throwable) = log.error(cause, "UNCAUGHT in thread [%s]", thread.getName) + def uncaughtException(thread: Thread, cause: Throwable) = + log.error(cause, "UNCAUGHT in thread [%s]", thread.getName) }) override def run = { val debug = MonitorableThread.debugLifecycle - log.debug("Created %s", getName) + log.debug("Created thread %s", getName) try { MonitorableThread.alive.incrementAndGet super.run } finally { MonitorableThread.alive.decrementAndGet - log.debug("Exiting %s", getName) + log.debug("Exiting thread %s", getName) } } } diff --git a/akka-core/src/main/scala/remote/Cluster.scala b/akka-core/src/main/scala/remote/Cluster.scala index f1c3633944..8a5864a51b 100644 --- a/akka-core/src/main/scala/remote/Cluster.scala +++ b/akka-core/src/main/scala/remote/Cluster.scala @@ -151,11 +151,17 @@ abstract class BasicClusterActor extends ClusterActor with Logging { case InitClusterActor(s) => { serializer = s + boot } } /** - * Implement this in a subclass to add node-to-node messaging + * Implement this in a subclass to boot up the cluster implementation + */ + protected def boot: Unit + + /** + * Implement this in a subclass to add node-to-node messaging */ protected def toOneNode(dest: ADDR_T, msg: Array[Byte]): Unit diff --git a/akka-core/src/main/scala/remote/JGroupsClusterActor.scala b/akka-core/src/main/scala/remote/JGroupsClusterActor.scala index 847985e3d3..54ef3807d4 100644 --- a/akka-core/src/main/scala/remote/JGroupsClusterActor.scala +++ b/akka-core/src/main/scala/remote/JGroupsClusterActor.scala @@ -17,9 +17,8 @@ class JGroupsClusterActor extends BasicClusterActor { @volatile private var isActive = false @volatile private var channel: Option[JChannel] = None - override def init = { - super.init - log info "Initiating JGroups-based cluster actor" + protected def boot = { + log info "Booting JGroups-based cluster" isActive = true // Set up the JGroups local endpoint diff --git a/akka-core/src/main/scala/remote/RemoteClient.scala b/akka-core/src/main/scala/remote/RemoteClient.scala index c90d472c09..e0212572b2 100644 --- a/akka-core/src/main/scala/remote/RemoteClient.scala +++ b/akka-core/src/main/scala/remote/RemoteClient.scala @@ -7,8 +7,8 @@ package se.scalablesolutions.akka.remote import se.scalablesolutions.akka.remote.protocol.RemoteProtocol._ import se.scalablesolutions.akka.actor.{Exit, Actor, ActorRef, RemoteActorRef, IllegalActorStateException} import se.scalablesolutions.akka.dispatch.{DefaultCompletableFuture, CompletableFuture} -import se.scalablesolutions.akka.util.{UUID, Logging} -import se.scalablesolutions.akka.config.Config.config +import se.scalablesolutions.akka.util.{UUID, Logging, Duration} +import se.scalablesolutions.akka.config.Config._ import org.jboss.netty.channel._ import group.DefaultChannelGroup @@ -51,8 +51,8 @@ case class RemoteClientConnected(host: String, port: Int) extends RemoteClientLi * @author Jonas Bonér */ object RemoteClient extends Logging { - val READ_TIMEOUT = config.getInt("akka.remote.client.read-timeout", 10000) - val RECONNECT_DELAY = config.getInt("akka.remote.client.reconnect-delay", 5000) + val READ_TIMEOUT = Duration(config.getInt("akka.remote.client.read-timeout", 1), TIME_UNIT) + val RECONNECT_DELAY = Duration(config.getInt("akka.remote.client.reconnect-delay", 5), TIME_UNIT) private val remoteClients = new HashMap[String, RemoteClient] private val remoteActors = new HashMap[RemoteServer.Address, HashSet[String]] @@ -138,7 +138,7 @@ object RemoteClient extends Logging { actorsFor(RemoteServer.Address(hostname, port)) += uuid } - // TODO: add RemoteClient.unregister for ActiveObject, but first need a @shutdown callback + // TODO: add RemoteClient.unregister for TypedActor, but first need a @shutdown callback private[akka] def unregister(hostname: String, port: Int, uuid: String) = synchronized { val set = actorsFor(RemoteServer.Address(hostname, port)) set -= uuid @@ -218,7 +218,7 @@ class RemoteClient private[akka] (val hostname: String, val port: Int, loader: O } else { futures.synchronized { val futureResult = if (senderFuture.isDefined) senderFuture.get - else new DefaultCompletableFuture[T](request.getTimeout) + else new DefaultCompletableFuture[T](request.getActorInfo.getTimeout) futures.put(request.getId, futureResult) connection.getChannel.write(request) Some(futureResult) @@ -231,11 +231,13 @@ class RemoteClient private[akka] (val hostname: String, val port: Int, loader: O } private[akka] def registerSupervisorForActor(actorRef: ActorRef) = - if (!actorRef.supervisor.isDefined) throw new IllegalActorStateException("Can't register supervisor for " + actorRef + " since it is not under supervision") + if (!actorRef.supervisor.isDefined) throw new IllegalActorStateException( + "Can't register supervisor for " + actorRef + " since it is not under supervision") else supervisors.putIfAbsent(actorRef.supervisor.get.uuid, actorRef) private[akka] def deregisterSupervisorForActor(actorRef: ActorRef) = - if (!actorRef.supervisor.isDefined) throw new IllegalActorStateException("Can't unregister supervisor for " + actorRef + " since it is not under supervision") + if (!actorRef.supervisor.isDefined) throw new IllegalActorStateException( + "Can't unregister supervisor for " + actorRef + " since it is not under supervision") else supervisors.remove(actorRef.supervisor.get.uuid) } @@ -250,6 +252,7 @@ class RemoteClientPipelineFactory(name: String, timer: HashedWheelTimer, client: RemoteClient) extends ChannelPipelineFactory { def getPipeline: ChannelPipeline = { + def join(ch: ChannelHandler*) = Array[ChannelHandler](ch:_*) val engine = RemoteServerSslContext.client.createSSLEngine() @@ -257,7 +260,7 @@ class RemoteClientPipelineFactory(name: String, engine.setUseClientMode(true) val ssl = if(RemoteServer.SECURE) join(new SslHandler(engine)) else join() - val timeout = new ReadTimeoutHandler(timer, RemoteClient.READ_TIMEOUT) + val timeout = new ReadTimeoutHandler(timer, RemoteClient.READ_TIMEOUT.toMillis.toInt) val lenDec = new LengthFieldBasedFrameDecoder(1048576, 0, 4, 0, 4) val lenPrep = new LengthFieldPrepender(4) val protobufDec = new ProtobufDecoder(RemoteReplyProtocol.getDefaultInstance) @@ -345,7 +348,7 @@ class RemoteClientHandler(val name: String, log.error(client.connection.getCause, "Reconnection to [%s] has failed", remoteAddress) } } - }, RemoteClient.RECONNECT_DELAY, TimeUnit.MILLISECONDS) + }, RemoteClient.RECONNECT_DELAY.toMillis, TimeUnit.MILLISECONDS) } override def channelConnected(ctx: ChannelHandlerContext, event: ChannelStateEvent) = { diff --git a/akka-core/src/main/scala/remote/RemoteServer.scala b/akka-core/src/main/scala/remote/RemoteServer.scala index 93386311f2..5d25d81b75 100644 --- a/akka-core/src/main/scala/remote/RemoteServer.scala +++ b/akka-core/src/main/scala/remote/RemoteServer.scala @@ -13,7 +13,7 @@ import se.scalablesolutions.akka.actor._ import se.scalablesolutions.akka.actor.Actor._ import se.scalablesolutions.akka.util._ import se.scalablesolutions.akka.remote.protocol.RemoteProtocol._ -import se.scalablesolutions.akka.config.Config.config +import se.scalablesolutions.akka.config.Config._ import org.jboss.netty.bootstrap.ServerBootstrap import org.jboss.netty.channel._ @@ -57,7 +57,7 @@ object RemoteNode extends RemoteServer /** * For internal use only. - * Holds configuration variables, remote actors, remote active objects and remote servers. + * Holds configuration variables, remote actors, remote typed actors and remote servers. * * @author Jonas Bonér */ @@ -65,7 +65,7 @@ object RemoteServer { val HOSTNAME = config.getString("akka.remote.server.hostname", "localhost") val PORT = config.getInt("akka.remote.server.port", 9999) - val CONNECTION_TIMEOUT_MILLIS = config.getInt("akka.remote.server.connection-timeout", 1000) + val CONNECTION_TIMEOUT_MILLIS = Duration(config.getInt("akka.remote.server.connection-timeout", 1), TIME_UNIT) val COMPRESSION_SCHEME = config.getString("akka.remote.compression-scheme", "zlib") val ZLIB_COMPRESSION_LEVEL = { @@ -76,7 +76,8 @@ object RemoteServer { } val SECURE = { - if(config.getBool("akka.remote.ssl.service",false)){ + //TODO: Remove this when SSL is in working condition + /*if(config.getBool("akka.remote.ssl.service",false)){ val properties = List( ("key-store-type" ,"keyStoreType"), @@ -97,7 +98,7 @@ object RemoteServer { true } - else + else */ false } @@ -121,7 +122,7 @@ object RemoteServer { private class RemoteActorSet { private[RemoteServer] val actors = new ConcurrentHashMap[String, ActorRef] - private[RemoteServer] val activeObjects = new ConcurrentHashMap[String, AnyRef] + private[RemoteServer] val typedActors = new ConcurrentHashMap[String, AnyRef] } private val guard = new ReadWriteGuard @@ -132,8 +133,8 @@ object RemoteServer { actorsFor(RemoteServer.Address(address.getHostName, address.getPort)).actors.put(uuid, actor) } - private[akka] def registerActiveObject(address: InetSocketAddress, name: String, activeObject: AnyRef) = guard.withWriteGuard { - actorsFor(RemoteServer.Address(address.getHostName, address.getPort)).activeObjects.put(name, activeObject) + private[akka] def registerTypedActor(address: InetSocketAddress, name: String, typedActor: AnyRef) = guard.withWriteGuard { + actorsFor(RemoteServer.Address(address.getHostName, address.getPort)).typedActors.put(name, typedActor) } private[akka] def getOrCreateServer(address: InetSocketAddress): RemoteServer = guard.withWriteGuard { @@ -225,12 +226,12 @@ class RemoteServer extends Logging { RemoteServer.register(hostname, port, this) val remoteActorSet = RemoteServer.actorsFor(RemoteServer.Address(hostname, port)) val pipelineFactory = new RemoteServerPipelineFactory( - name, openChannels, loader, remoteActorSet.actors, remoteActorSet.activeObjects) + name, openChannels, loader, remoteActorSet.actors, remoteActorSet.typedActors) bootstrap.setPipelineFactory(pipelineFactory) bootstrap.setOption("child.tcpNoDelay", true) bootstrap.setOption("child.keepAlive", true) bootstrap.setOption("child.reuseAddress", true) - bootstrap.setOption("child.connectTimeoutMillis", RemoteServer.CONNECTION_TIMEOUT_MILLIS) + bootstrap.setOption("child.connectTimeoutMillis", RemoteServer.CONNECTION_TIMEOUT_MILLIS.toMillis) openChannels.add(bootstrap.bind(new InetSocketAddress(hostname, port))) _isRunning = true Cluster.registerLocalNode(hostname, port) @@ -243,15 +244,20 @@ class RemoteServer extends Logging { def shutdown = synchronized { if (_isRunning) { - RemoteServer.unregister(hostname, port) - openChannels.disconnect - openChannels.close.awaitUninterruptibly - bootstrap.releaseExternalResources - Cluster.deregisterLocalNode(hostname, port) + try { + RemoteServer.unregister(hostname, port) + openChannels.disconnect + openChannels.close.awaitUninterruptibly + bootstrap.releaseExternalResources + Cluster.deregisterLocalNode(hostname, port) + } catch { + case e: java.nio.channels.ClosedChannelException => {} + case e => log.warning("Could not close remote server channel in a graceful way") + } } } - // TODO: register active object in RemoteServer as well + // TODO: register typed actor in RemoteServer as well /** * Register Remote Actor by the Actor's 'id' field. It starts the Actor if it is not started already. @@ -331,7 +337,7 @@ class RemoteServerPipelineFactory( val openChannels: ChannelGroup, val loader: Option[ClassLoader], val actors: JMap[String, ActorRef], - val activeObjects: JMap[String, AnyRef]) extends ChannelPipelineFactory { + val typedActors: JMap[String, AnyRef]) extends ChannelPipelineFactory { import RemoteServer._ def getPipeline: ChannelPipeline = { @@ -351,7 +357,7 @@ class RemoteServerPipelineFactory( case _ => (join(),join()) } - val remoteServer = new RemoteServerHandler(name, openChannels, loader, actors, activeObjects) + val remoteServer = new RemoteServerHandler(name, openChannels, loader, actors, typedActors) val stages = ssl ++ dec ++ join(lenDec, protobufDec) ++ enc ++ join(lenPrep, protobufEnc, remoteServer) @@ -368,7 +374,7 @@ class RemoteServerHandler( val openChannels: ChannelGroup, val applicationLoader: Option[ClassLoader], val actors: JMap[String, ActorRef], - val activeObjects: JMap[String, AnyRef]) extends SimpleChannelUpstreamHandler with Logging { + val typedActors: JMap[String, AnyRef]) extends SimpleChannelUpstreamHandler with Logging { val AW_PROXY_PREFIX = "$$ProxiedByAW".intern applicationLoader.foreach(MessageSerializer.setClassLoader(_)) @@ -422,139 +428,88 @@ class RemoteServerHandler( private def handleRemoteRequestProtocol(request: RemoteRequestProtocol, channel: Channel) = { log.debug("Received RemoteRequestProtocol[\n%s]", request.toString) - if (request.getIsActor) dispatchToActor(request, channel) - else dispatchToActiveObject(request, channel) + val actorType = request.getActorInfo.getActorType + if (actorType == ActorType.SCALA_ACTOR) dispatchToActor(request, channel) + else if (actorType == ActorType.JAVA_ACTOR) throw new IllegalActorStateException("ActorType JAVA_ACTOR is currently not supported") + else if (actorType == ActorType.TYPED_ACTOR) dispatchToTypedActor(request, channel) + else throw new IllegalActorStateException("Unknown ActorType [" + actorType + "]") } private def dispatchToActor(request: RemoteRequestProtocol, channel: Channel) = { - log.debug("Dispatching to remote actor [%s:%s]", request.getTarget, request.getUuid) - val actorRef = createActor(request.getTarget, request.getUuid, request.getTimeout) + val actorInfo = request.getActorInfo + log.debug("Dispatching to remote actor [%s:%s]", actorInfo.getTarget, actorInfo.getUuid) + + val actorRef = createActor(actorInfo) actorRef.start + val message = MessageSerializer.deserialize(request.getMessage) val sender = if (request.hasSender) Some(RemoteActorSerialization.fromProtobufToRemoteActorRef(request.getSender, applicationLoader)) else None + if (request.getIsOneWay) actorRef.!(message)(sender) else { try { val resultOrNone = (actorRef.!!(message)(sender)).as[AnyRef] val result = if (resultOrNone.isDefined) resultOrNone.get else null + log.debug("Returning result from actor invocation [%s]", result) val replyBuilder = RemoteReplyProtocol.newBuilder .setId(request.getId) .setMessage(MessageSerializer.serialize(result)) .setIsSuccessful(true) .setIsActor(true) + if (request.hasSupervisorUuid) replyBuilder.setSupervisorUuid(request.getSupervisorUuid) - val replyMessage = replyBuilder.build - channel.write(replyMessage) + channel.write(replyBuilder.build) + } catch { - case e: Throwable => - log.error(e, "Could not invoke remote actor [%s]", request.getTarget) - val replyBuilder = RemoteReplyProtocol.newBuilder - .setId(request.getId) - .setException(ExceptionProtocol.newBuilder.setClassname(e.getClass.getName).setMessage(e.getMessage).build) - .setIsSuccessful(false) - .setIsActor(true) - if (request.hasSupervisorUuid) replyBuilder.setSupervisorUuid(request.getSupervisorUuid) - val replyMessage = replyBuilder.build - channel.write(replyMessage) + case e: Throwable => channel.write(createErrorReplyMessage(e, request, true)) } } } - private def dispatchToActiveObject(request: RemoteRequestProtocol, channel: Channel) = { - log.debug("Dispatching to remote active object [%s :: %s]", request.getMethod, request.getTarget) - val activeObject = createActiveObject(request.getTarget, request.getTimeout) + private def dispatchToTypedActor(request: RemoteRequestProtocol, channel: Channel) = { + val actorInfo = request.getActorInfo + val typedActorInfo = actorInfo.getTypedActorInfo + log.debug("Dispatching to remote typed actor [%s :: %s]", typedActorInfo.getMethod, typedActorInfo.getInterface) + val typedActor = createTypedActor(actorInfo) val args = MessageSerializer.deserialize(request.getMessage).asInstanceOf[Array[AnyRef]].toList val argClasses = args.map(_.getClass) - val (unescapedArgs, unescapedArgClasses) = unescapeArgs(args, argClasses, request.getTimeout) try { - val messageReceiver = activeObject.getClass.getDeclaredMethod( - request.getMethod, unescapedArgClasses: _*) - if (request.getIsOneWay) messageReceiver.invoke(activeObject, unescapedArgs: _*) + val messageReceiver = typedActor.getClass.getDeclaredMethod(typedActorInfo.getMethod, argClasses: _*) + if (request.getIsOneWay) messageReceiver.invoke(typedActor, args: _*) else { - val result = messageReceiver.invoke(activeObject, unescapedArgs: _*) - log.debug("Returning result from remote active object invocation [%s]", result) + val result = messageReceiver.invoke(typedActor, args: _*) + log.debug("Returning result from remote typed actor invocation [%s]", result) val replyBuilder = RemoteReplyProtocol.newBuilder .setId(request.getId) .setMessage(MessageSerializer.serialize(result)) .setIsSuccessful(true) .setIsActor(false) if (request.hasSupervisorUuid) replyBuilder.setSupervisorUuid(request.getSupervisorUuid) - val replyMessage = replyBuilder.build - channel.write(replyMessage) + channel.write(replyBuilder.build) } } catch { - case e: InvocationTargetException => - log.error(e.getCause, "Could not invoke remote active object [%s :: %s]", request.getMethod, request.getTarget) - val replyBuilder = RemoteReplyProtocol.newBuilder - .setId(request.getId) - .setException(ExceptionProtocol.newBuilder.setClassname(e.getCause.getClass.getName).setMessage(e.getCause.getMessage).build) - .setIsSuccessful(false) - .setIsActor(false) - if (request.hasSupervisorUuid) replyBuilder.setSupervisorUuid(request.getSupervisorUuid) - val replyMessage = replyBuilder.build - channel.write(replyMessage) - case e: Throwable => - log.error(e, "Could not invoke remote active object [%s :: %s]", request.getMethod, request.getTarget) - val replyBuilder = RemoteReplyProtocol.newBuilder - .setId(request.getId) - .setException(ExceptionProtocol.newBuilder.setClassname(e.getClass.getName).setMessage(e.getMessage).build) - .setIsSuccessful(false) - .setIsActor(false) - if (request.hasSupervisorUuid) replyBuilder.setSupervisorUuid(request.getSupervisorUuid) - val replyMessage = replyBuilder.build - channel.write(replyMessage) + case e: InvocationTargetException => channel.write(createErrorReplyMessage(e.getCause, request, false)) + case e: Throwable => channel.write(createErrorReplyMessage(e, request, false)) } } - private def unescapeArgs(args: scala.List[AnyRef], argClasses: scala.List[Class[_]], timeout: Long) = { - val unescapedArgs = new Array[AnyRef](args.size) - val unescapedArgClasses = new Array[Class[_]](args.size) - - val escapedArgs = for (i <- 0 until args.size) { - val arg = args(i) - if (arg.isInstanceOf[String] && arg.asInstanceOf[String].startsWith(AW_PROXY_PREFIX)) { - val argString = arg.asInstanceOf[String] - val proxyName = argString.replace(AW_PROXY_PREFIX, "") - val activeObject = createActiveObject(proxyName, timeout) - unescapedArgs(i) = activeObject - unescapedArgClasses(i) = Class.forName(proxyName) - } else { - unescapedArgs(i) = args(i) - unescapedArgClasses(i) = argClasses(i) - } - } - (unescapedArgs, unescapedArgClasses) - } - - private def createActiveObject(name: String, timeout: Long): AnyRef = { - val activeObjectOrNull = activeObjects.get(name) - if (activeObjectOrNull eq null) { - try { - log.info("Creating a new remote active object [%s]", name) - val clazz = if (applicationLoader.isDefined) applicationLoader.get.loadClass(name) - else Class.forName(name) - val newInstance = ActiveObject.newInstance(clazz, timeout).asInstanceOf[AnyRef] - activeObjects.put(name, newInstance) - newInstance - } catch { - case e => - log.error(e, "Could not create remote active object instance") - throw e - } - } else activeObjectOrNull - } - /** * Creates a new instance of the actor with name, uuid and timeout specified as arguments. + * * If actor already created then just return it from the registry. + * * Does not start the actor. */ - private def createActor(name: String, uuid: String, timeout: Long): ActorRef = { + private def createActor(actorInfo: ActorInfoProtocol): ActorRef = { + val name = actorInfo.getTarget + val uuid = actorInfo.getUuid + val timeout = actorInfo.getTimeout + val actorRefOrNull = actors.get(uuid) if (actorRefOrNull eq null) { try { @@ -574,4 +529,43 @@ class RemoteServerHandler( } } else actorRefOrNull } + + private def createTypedActor(actorInfo: ActorInfoProtocol): AnyRef = { + val uuid = actorInfo.getUuid + val typedActorOrNull = typedActors.get(uuid) + + if (typedActorOrNull eq null) { + val typedActorInfo = actorInfo.getTypedActorInfo + val interfaceClassname = typedActorInfo.getInterface + val targetClassname = actorInfo.getTarget + + try { + log.info("Creating a new remote typed actor:\n\t[%s :: %s]", interfaceClassname, targetClassname) + + val (interfaceClass, targetClass) = + if (applicationLoader.isDefined) (applicationLoader.get.loadClass(interfaceClassname), + applicationLoader.get.loadClass(targetClassname)) + else (Class.forName(interfaceClassname), Class.forName(targetClassname)) + + val newInstance = TypedActor.newInstance( + interfaceClass, targetClass.asInstanceOf[Class[_ <: TypedActor]], actorInfo.getTimeout).asInstanceOf[AnyRef] + typedActors.put(uuid, newInstance) + newInstance + } catch { + case e => log.error(e, "Could not create remote typed actor instance"); throw e + } + } else typedActorOrNull + } + + private def createErrorReplyMessage(e: Throwable, request: RemoteRequestProtocol, isActor: Boolean): RemoteReplyProtocol = { + val actorInfo = request.getActorInfo + log.error(e, "Could not invoke remote typed actor [%s :: %s]", actorInfo.getTypedActorInfo.getMethod, actorInfo.getTarget) + val replyBuilder = RemoteReplyProtocol.newBuilder + .setId(request.getId) + .setException(ExceptionProtocol.newBuilder.setClassname(e.getClass.getName).setMessage(e.getMessage).build) + .setIsSuccessful(false) + .setIsActor(isActor) + if (request.hasSupervisorUuid) replyBuilder.setSupervisorUuid(request.getSupervisorUuid) + replyBuilder.build + } } diff --git a/akka-core/src/main/scala/stm/JTA.scala b/akka-core/src/main/scala/stm/JTA.scala index bb61973c91..1c0af88d73 100644 --- a/akka-core/src/main/scala/stm/JTA.scala +++ b/akka-core/src/main/scala/stm/JTA.scala @@ -4,7 +4,9 @@ package se.scalablesolutions.akka.stm -import javax.transaction.{TransactionManager, UserTransaction, Transaction => JtaTransaction, SystemException, Status, Synchronization, TransactionSynchronizationRegistry} +import javax.transaction.{TransactionManager, UserTransaction, + Transaction => JtaTransaction, SystemException, + Status, Synchronization, TransactionSynchronizationRegistry} import javax.naming.{InitialContext, Context, NamingException} import se.scalablesolutions.akka.config.Config._ @@ -16,7 +18,7 @@ import se.scalablesolutions.akka.util.Logging * @author Jonas Bonér */ object TransactionContainer extends Logging { - val AKKA_JTA_TRANSACTION_SERVICE_CLASS = "se.scalablesolutions.akka.jta.AtomikosTransactionService" + val AKKA_JTA_TRANSACTION_SERVICE_CLASS = "se.scalablesolutions.akka.jta.AtomikosTransactionService" val DEFAULT_USER_TRANSACTION_NAME = "java:comp/UserTransaction" val FALLBACK_TRANSACTION_MANAGER_NAMES = "java:comp/TransactionManager" :: "java:appserver/TransactionManager" :: @@ -119,22 +121,31 @@ class TransactionContainer private (val tm: Either[Option[UserTransaction], Opti } } - def begin = tm match { - case Left(Some(userTx)) => userTx.begin - case Right(Some(txMan)) => txMan.begin - case _ => throw new StmConfigurationException("Does not have a UserTransaction or TransactionManager in scope") + def begin = { + TransactionContainer.log.ifTrace("Starting JTA transaction") + tm match { + case Left(Some(userTx)) => userTx.begin + case Right(Some(txMan)) => txMan.begin + case _ => throw new StmConfigurationException("Does not have a UserTransaction or TransactionManager in scope") + } } - def commit = tm match { - case Left(Some(userTx)) => userTx.commit - case Right(Some(txMan)) => txMan.commit - case _ => throw new StmConfigurationException("Does not have a UserTransaction or TransactionManager in scope") + def commit = { + TransactionContainer.log.ifTrace("Committing JTA transaction") + tm match { + case Left(Some(userTx)) => userTx.commit + case Right(Some(txMan)) => txMan.commit + case _ => throw new StmConfigurationException("Does not have a UserTransaction or TransactionManager in scope") + } } - def rollback = tm match { - case Left(Some(userTx)) => userTx.rollback - case Right(Some(txMan)) => txMan.rollback - case _ => throw new StmConfigurationException("Does not have a UserTransaction or TransactionManager in scope") + def rollback = { + TransactionContainer.log.ifTrace("Aborting JTA transaction") + tm match { + case Left(Some(userTx)) => userTx.rollback + case Right(Some(txMan)) => txMan.rollback + case _ => throw new StmConfigurationException("Does not have a UserTransaction or TransactionManager in scope") + } } def getStatus = tm match { diff --git a/akka-core/src/main/scala/stm/Ref.scala b/akka-core/src/main/scala/stm/Ref.scala index 3b13d32971..2ca6802b15 100644 --- a/akka-core/src/main/scala/stm/Ref.scala +++ b/akka-core/src/main/scala/stm/Ref.scala @@ -6,23 +6,15 @@ package se.scalablesolutions.akka.stm import se.scalablesolutions.akka.util.UUID -import org.multiverse.api.GlobalStmInstance.getGlobalStmInstance - -object RefFactory { - private val factory = getGlobalStmInstance.getProgrammaticRefFactoryBuilder.build - - def createRef[T] = factory.atomicCreateRef[T]() - - def createRef[T](value: T) = factory.atomicCreateRef(value) -} +import org.multiverse.transactional.refs.BasicRef /** - * Ref. + * Ref * * @author Jonas Bonér */ object Ref { - def apply[T]() = new Ref[T] + def apply[T]() = new Ref[T]() def apply[T](initialValue: T) = new Ref[T](Some(initialValue)) @@ -33,77 +25,47 @@ object Ref { } /** - * Implements a transactional managed reference. + * Transactional managed reference. * * @author Jonas Bonér */ -class Ref[T](initialOpt: Option[T] = None) extends Transactional { +class Ref[T](initialOpt: Option[T] = None) + extends BasicRef[T](initialOpt.getOrElse(null.asInstanceOf[T])) + with Transactional { + self => def this() = this(None) // Java compatibility - import org.multiverse.api.ThreadLocalTransaction._ - val uuid = UUID.newUuid.toString - private[this] val ref = { - if (initialOpt.isDefined) RefFactory.createRef(initialOpt.get) - else RefFactory.createRef[T] - } - - def swap(elem: T) = { - ensureIsInTransaction - ref.set(elem) - } + def swap(elem: T) = set(elem) def alter(f: T => T): T = { - ensureIsInTransaction ensureNotNull - ref.set(f(ref.get)) - ref.get + set(f(this.get)) + this.get } - def get: Option[T] = { - ensureIsInTransaction - if (ref.isNull) None - else Some(ref.get) - } + def getOption: Option[T] = Option(this.get) - def getOrWait: T = { - ensureIsInTransaction - ref.getOrAwait - } + def getOrWait: T = getOrAwait - def getOrElse(default: => T): T = { - ensureIsInTransaction - if (ref.isNull) default - else ref.get - } + def getOrElse(default: => T): T = + if (isNull) default else this.get - def isDefined: Boolean = { - ensureIsInTransaction - !ref.isNull - } + def isDefined: Boolean = !isNull - def isEmpty: Boolean = { - ensureIsInTransaction - ref.isNull - } + def isEmpty: Boolean = isNull - def map[B](f: T => B): Ref[B] = { - ensureIsInTransaction - if (isEmpty) Ref[B] else Ref(f(ref.get)) - } + def map[B](f: T => B): Ref[B] = + if (isEmpty) Ref[B] else Ref(f(this.get)) - def flatMap[B](f: T => Ref[B]): Ref[B] = { - ensureIsInTransaction - if (isEmpty) Ref[B] else f(ref.get) - } + def flatMap[B](f: T => Ref[B]): Ref[B] = + if (isEmpty) Ref[B] else f(this.get) - def filter(p: T => Boolean): Ref[T] = { - ensureIsInTransaction - if (isDefined && p(ref.get)) Ref(ref.get) else Ref[T] - } + def filter(p: T => Boolean): Ref[T] = + if (isDefined && p(this.get)) Ref(this.get) else Ref[T] /** * Necessary to keep from being implicitly converted to Iterable in for comprehensions. @@ -117,34 +79,21 @@ class Ref[T](initialOpt: Option[T] = None) extends Transactional { def withFilter(q: T => Boolean): WithFilter = new WithFilter(x => p(x) && q(x)) } - def foreach[U](f: T => U): Unit = { - ensureIsInTransaction - if (isDefined) f(ref.get) - } + def foreach[U](f: T => U): Unit = + if (isDefined) f(this.get) - def elements: Iterator[T] = { - ensureIsInTransaction - if (isEmpty) Iterator.empty else Iterator(ref.get) - } + def elements: Iterator[T] = + if (isEmpty) Iterator.empty else Iterator(this.get) - def toList: List[T] = { - ensureIsInTransaction - if (isEmpty) List() else List(ref.get) - } + def toList: List[T] = + if (isEmpty) List() else List(this.get) - def toRight[X](left: => X) = { - ensureIsInTransaction - if (isEmpty) Left(left) else Right(ref.get) - } + def toRight[X](left: => X) = + if (isEmpty) Left(left) else Right(this.get) - def toLeft[X](right: => X) = { - ensureIsInTransaction - if (isEmpty) Right(right) else Left(ref.get) - } - - private def ensureIsInTransaction = - if (getThreadLocalTransaction eq null) throw new NoTransactionInScopeException + def toLeft[X](right: => X) = + if (isEmpty) Right(right) else Left(this.get) private def ensureNotNull = - if (ref.isNull) throw new RuntimeException("Cannot alter Ref's value when it is null") + if (isNull) throw new RuntimeException("Cannot alter Ref's value when it is null") } diff --git a/akka-core/src/main/scala/stm/Transaction.scala b/akka-core/src/main/scala/stm/Transaction.scala index 54f20a3504..0951cbc5c5 100644 --- a/akka-core/src/main/scala/stm/Transaction.scala +++ b/akka-core/src/main/scala/stm/Transaction.scala @@ -83,11 +83,12 @@ object Transaction { if (JTA_AWARE) Some(TransactionContainer()) else None - log.trace("Creating %s", toString) + log.ifTrace("Creating transaction " + toString) // --- public methods --------- def begin = synchronized { + log.ifTrace("Starting transaction " + toString) jta.foreach { txContainer => txContainer.begin txContainer.registerSynchronization(new StmSynchronization(txContainer, this)) @@ -95,14 +96,14 @@ object Transaction { } def commit = synchronized { - log.trace("Committing transaction %s", toString) + log.ifTrace("Committing transaction " + toString) persistentStateMap.valuesIterator.foreach(_.commit) status = TransactionStatus.Completed jta.foreach(_.commit) } def abort = synchronized { - log.trace("Aborting transaction %s", toString) + log.ifTrace("Aborting transaction " + toString) jta.foreach(_.rollback) persistentStateMap.valuesIterator.foreach(_.abort) persistentStateMap.clear diff --git a/akka-core/src/main/scala/stm/TransactionFactory.scala b/akka-core/src/main/scala/stm/TransactionFactory.scala index 56982bb759..691fec675b 100644 --- a/akka-core/src/main/scala/stm/TransactionFactory.scala +++ b/akka-core/src/main/scala/stm/TransactionFactory.scala @@ -22,8 +22,7 @@ object TransactionConfig { val FAMILY_NAME = "DefaultTransaction" val READONLY = null.asInstanceOf[JBoolean] val MAX_RETRIES = config.getInt("akka.stm.max-retries", 1000) - val TIMEOUT = config.getLong("akka.stm.timeout", Long.MaxValue) - val TIME_UNIT = config.getString("akka.stm.time-unit", "seconds") + val TIMEOUT = config.getLong("akka.stm.timeout", 10) val TRACK_READS = null.asInstanceOf[JBoolean] val WRITE_SKEW = config.getBool("akka.stm.write-skew", true) val EXPLICIT_RETRIES = config.getBool("akka.stm.explicit-retries", false) @@ -37,8 +36,8 @@ object TransactionConfig { def traceLevel(level: String) = level.toLowerCase match { case "coarse" | "course" => Transaction.TraceLevel.Coarse - case "fine" => Transaction.TraceLevel.Fine - case _ => Transaction.TraceLevel.None + case "fine" => Transaction.TraceLevel.Fine + case _ => Transaction.TraceLevel.None } /** @@ -125,8 +124,9 @@ object TransactionFactory { quickRelease: Boolean = TransactionConfig.QUICK_RELEASE, traceLevel: TraceLevel = TransactionConfig.TRACE_LEVEL, hooks: Boolean = TransactionConfig.HOOKS) = { - val config = new TransactionConfig(familyName, readonly, maxRetries, timeout, trackReads, writeSkew, - explicitRetries, interruptible, speculative, quickRelease, traceLevel, hooks) + val config = new TransactionConfig( + familyName, readonly, maxRetries, timeout, trackReads, writeSkew, + explicitRetries, interruptible, speculative, quickRelease, traceLevel, hooks) new TransactionFactory(config) } } @@ -152,8 +152,9 @@ object TransactionFactory { * * @see TransactionConfig for configuration options. */ -class TransactionFactory(val config: TransactionConfig = DefaultTransactionConfig, defaultName: String = TransactionConfig.FAMILY_NAME) { - self => +class TransactionFactory( + val config: TransactionConfig = DefaultTransactionConfig, + defaultName: String = TransactionConfig.FAMILY_NAME) { self => // use the config family name if it's been set, otherwise defaultName - used by actors to set class name as default val familyName = if (config.familyName != TransactionConfig.FAMILY_NAME) config.familyName else defaultName diff --git a/akka-core/src/main/scala/stm/TransactionManagement.scala b/akka-core/src/main/scala/stm/TransactionManagement.scala index 0c6a244f42..65f8d7624c 100644 --- a/akka-core/src/main/scala/stm/TransactionManagement.scala +++ b/akka-core/src/main/scala/stm/TransactionManagement.scala @@ -7,6 +7,7 @@ package se.scalablesolutions.akka.stm import se.scalablesolutions.akka.util.Logging import java.util.concurrent.atomic.AtomicBoolean +import java.util.concurrent.TimeUnit import org.multiverse.api.{StmUtils => MultiverseStmUtils} import org.multiverse.api.ThreadLocalTransaction._ @@ -14,16 +15,20 @@ import org.multiverse.api.{Transaction => MultiverseTransaction} import org.multiverse.commitbarriers.CountDownCommitBarrier import org.multiverse.templates.{TransactionalCallable, OrElseTemplate} -class StmException(msg: String) extends RuntimeException(msg) +class TransactionSetAbortedException(msg: String) extends RuntimeException(msg) +// TODO Should we remove TransactionAwareWrapperException? Not used anywhere yet. class TransactionAwareWrapperException(val cause: Throwable, val tx: Option[Transaction]) extends RuntimeException(cause) { override def toString = "TransactionAwareWrapperException[" + cause + ", " + tx + "]" } +/** + * Internal helper methods and properties for transaction management. + */ object TransactionManagement extends TransactionManagement { import se.scalablesolutions.akka.config.Config._ - // move to stm.global.fair? + // FIXME move to stm.global.fair? val FAIR_TRANSACTIONS = config.getBool("akka.stm.fair", true) private[akka] val transactionSet = new ThreadLocal[Option[CountDownCommitBarrier]]() { @@ -47,6 +52,9 @@ object TransactionManagement extends TransactionManagement { } } +/** + * Internal helper methods for transaction management. + */ trait TransactionManagement { private[akka] def createNewTransactionSet: CountDownCommitBarrier = { @@ -111,7 +119,9 @@ class LocalStm extends TransactionManagement with Logging { factory.boilerplate.execute(new TransactionalCallable[T]() { def call(mtx: MultiverseTransaction): T = { factory.addHooks - body + val result = body + log.ifTrace("Committing local transaction [" + mtx + "]") + result } }) } @@ -145,10 +155,14 @@ class GlobalStm extends TransactionManagement with Logging { factory.addHooks val result = body val txSet = getTransactionSetInScope - log.trace("Committing transaction [%s]\n\tby joining transaction set [%s]", mtx, txSet) - // FIXME ? txSet.tryJoinCommit(mtx, TransactionManagement.TRANSACTION_TIMEOUT, TimeUnit.MILLISECONDS) - try { txSet.joinCommit(mtx) } catch { case e: IllegalStateException => {} } - clearTransaction + log.ifTrace("Committing global transaction [" + mtx + "]\n\tand joining transaction set [" + txSet + "]") + try { + txSet.tryJoinCommit( + mtx, + TransactionConfig.DefaultTimeout.length, + TransactionConfig.DefaultTimeout.unit) + // Need to catch IllegalStateException until we have fix in Multiverse, since it throws it by mistake + } catch { case e: IllegalStateException => {} } result } }) @@ -156,18 +170,19 @@ class GlobalStm extends TransactionManagement with Logging { } trait StmUtil { + /** * Schedule a deferred task on the thread local transaction (use within an atomic). * This is executed when the transaction commits. */ - def deferred[T](body: => T): Unit = + def deferred[T](body: => T): Unit = MultiverseStmUtils.scheduleDeferredTask(new Runnable { def run = body }) /** * Schedule a compensating task on the thread local transaction (use within an atomic). * This is executed when the transaction aborts. */ - def compensating[T](body: => T): Unit = + def compensating[T](body: => T): Unit = MultiverseStmUtils.scheduleCompensatingTask(new Runnable { def run = body }) /** @@ -178,6 +193,14 @@ trait StmUtil { /** * Use either-orElse to combine two blocking transactions. + * Usage: + *
+   * either {
+   *   ...
+   * } orElse {
+   *   ...
+   * }
+   * 
*/ def either[T](firstBody: => T) = new { def orElse(secondBody: => T) = new OrElseTemplate[T] { diff --git a/akka-core/src/main/scala/stm/TransactionalMap.scala b/akka-core/src/main/scala/stm/TransactionalMap.scala index be7b9c5189..54d3c59db6 100644 --- a/akka-core/src/main/scala/stm/TransactionalMap.scala +++ b/akka-core/src/main/scala/stm/TransactionalMap.scala @@ -41,36 +41,36 @@ class TransactionalMap[K, V](initialOpt: Option[HashMap[K, V]] = None) extends T } override def remove(key: K) = { - val map = ref.get.get + val map = ref.get val oldValue = map.get(key) - ref.swap(ref.get.get - key) + ref.swap(ref.get - key) oldValue } - def get(key: K): Option[V] = ref.get.get.get(key) + def get(key: K): Option[V] = ref.get.get(key) override def put(key: K, value: V): Option[V] = { - val map = ref.get.get + val map = ref.get val oldValue = map.get(key) ref.swap(map.updated(key, value)) oldValue } override def update(key: K, value: V) = { - val map = ref.get.get + val map = ref.get val oldValue = map.get(key) ref.swap(map.updated(key, value)) } - def iterator = ref.get.get.iterator + def iterator = ref.get.iterator - override def elements: Iterator[(K, V)] = ref.get.get.iterator + override def elements: Iterator[(K, V)] = ref.get.iterator - override def contains(key: K): Boolean = ref.get.get.contains(key) + override def contains(key: K): Boolean = ref.get.contains(key) override def clear = ref.swap(HashMap[K, V]()) - override def size: Int = ref.get.get.size + override def size: Int = ref.get.size override def hashCode: Int = System.identityHashCode(this); diff --git a/akka-core/src/main/scala/stm/TransactionalVector.scala b/akka-core/src/main/scala/stm/TransactionalVector.scala index e2ad6a2aeb..585782d326 100644 --- a/akka-core/src/main/scala/stm/TransactionalVector.scala +++ b/akka-core/src/main/scala/stm/TransactionalVector.scala @@ -32,20 +32,20 @@ class TransactionalVector[T](initialOpt: Option[Vector[T]] = None) extends Trans def +(elem: T) = add(elem) - def add(elem: T) = ref.swap(ref.get.get :+ elem) + def add(elem: T) = ref.swap(ref.get :+ elem) - def get(index: Int): T = ref.get.get.apply(index) + def get(index: Int): T = ref.get.apply(index) /** * Removes the tail element of this vector. */ - def pop = ref.swap(ref.get.get.dropRight(1)) + def pop = ref.swap(ref.get.dropRight(1)) - def update(index: Int, elem: T) = ref.swap(ref.get.get.updated(index, elem)) + def update(index: Int, elem: T) = ref.swap(ref.get.updated(index, elem)) - def length: Int = ref.get.get.length + def length: Int = ref.get.length - def apply(index: Int): T = ref.get.get.apply(index) + def apply(index: Int): T = ref.get.apply(index) override def hashCode: Int = System.identityHashCode(this); diff --git a/akka-core/src/main/scala/util/Duration.scala b/akka-core/src/main/scala/util/Duration.scala index f49e1ae04b..0dee2fc139 100644 --- a/akka-core/src/main/scala/util/Duration.scala +++ b/akka-core/src/main/scala/util/Duration.scala @@ -20,8 +20,21 @@ object Duration { /** * Utility for working with java.util.concurrent.TimeUnit durations. + * *

- * Example: + * Examples of usage from Java: + *

+ * import se.scalablesolutions.akka.util.Duration;
+ * import java.util.concurrent.TimeUnit;
+ *
+ * Duration duration = new Duration(100, TimeUnit.MILLISECONDS);
+ * Duration duration = new Duration(5, "seconds");
+ *
+ * duration.toNanos();
+ * 
+ * + *

+ * Examples of usage from Scala: *

  * import se.scalablesolutions.akka.util.Duration
  * import java.util.concurrent.TimeUnit
@@ -31,6 +44,7 @@ object Duration {
  *
  * duration.toNanos
  * 
+ * *

* Implicits are also provided for Int and Long. Example usage: *

@@ -40,6 +54,7 @@ object Duration {
  * 
*/ class Duration(val length: Long, val unit: TimeUnit) { + def this(length: Long, unit: String) = this(length, Duration.timeUnit(unit)) def toNanos = unit.toNanos(length) def toMicros = unit.toMicros(length) def toMillis = unit.toMillis(length) diff --git a/akka-core/src/main/scala/util/ListenerManagement.scala b/akka-core/src/main/scala/util/ListenerManagement.scala index 7316beba64..8b53cc7d0a 100644 --- a/akka-core/src/main/scala/util/ListenerManagement.scala +++ b/akka-core/src/main/scala/util/ListenerManagement.scala @@ -31,8 +31,8 @@ trait ListenerManagement extends Logging { * The listener is stopped by this method. */ def removeListener(listener: ActorRef) = { - listener.stop listeners.remove(listener) + listener.stop } /** diff --git a/akka-core/src/main/scala/util/LockUtil.scala b/akka-core/src/main/scala/util/LockUtil.scala index 09a4b2d650..885e11def7 100644 --- a/akka-core/src/main/scala/util/LockUtil.scala +++ b/akka-core/src/main/scala/util/LockUtil.scala @@ -10,7 +10,7 @@ import java.util.concurrent.locks.{ReentrantReadWriteLock, ReentrantLock} * @author Jonas Bonér */ class ReentrantGuard { - private val lock = new ReentrantLock + val lock = new ReentrantLock def withGuard[T](body: => T): T = { lock.lock @@ -20,6 +20,15 @@ class ReentrantGuard { lock.unlock } } + + def tryWithGuard[T](body: => T): T = { + while(!lock.tryLock) { Thread.sleep(10) } // wait on the monitor to be unlocked + try { + body + } finally { + lock.unlock + } + } } /** diff --git a/akka-core/src/test/java/se/scalablesolutions/akka/actor/ActiveObjectFailer.java b/akka-core/src/test/java/se/scalablesolutions/akka/actor/ActiveObjectFailer.java deleted file mode 100644 index 6e30a1a971..0000000000 --- a/akka-core/src/test/java/se/scalablesolutions/akka/actor/ActiveObjectFailer.java +++ /dev/null @@ -1,7 +0,0 @@ -package se.scalablesolutions.akka.actor; - -public class ActiveObjectFailer implements java.io.Serializable { - public int fail() { - throw new RuntimeException("expected"); - } -} diff --git a/akka-core/src/test/java/se/scalablesolutions/akka/actor/BarImpl.java b/akka-core/src/test/java/se/scalablesolutions/akka/actor/BarImpl.java index 09b50a7347..9cb41a85cf 100644 --- a/akka-core/src/test/java/se/scalablesolutions/akka/actor/BarImpl.java +++ b/akka-core/src/test/java/se/scalablesolutions/akka/actor/BarImpl.java @@ -1,13 +1,16 @@ package se.scalablesolutions.akka.actor; import com.google.inject.Inject; +import se.scalablesolutions.akka.actor.*; -public class BarImpl implements Bar { +public class BarImpl extends TypedActor implements Bar { @Inject private Ext ext; + public Ext getExt() { return ext; } + public void bar(String msg) { } } diff --git a/akka-core/src/test/java/se/scalablesolutions/akka/actor/Foo.java b/akka-core/src/test/java/se/scalablesolutions/akka/actor/Foo.java index 87eb809aba..4cc5b977dc 100644 --- a/akka-core/src/test/java/se/scalablesolutions/akka/actor/Foo.java +++ b/akka-core/src/test/java/se/scalablesolutions/akka/actor/Foo.java @@ -1,34 +1,14 @@ package se.scalablesolutions.akka.actor; -import com.google.inject.Inject; - -public class Foo extends se.scalablesolutions.akka.serialization.Serializable.JavaJSON { - @Inject - private Bar bar; - public Foo body() { return this; } - public Bar getBar() { - return bar; - } - public String foo(String msg) { - return msg + "return_foo "; - } - public void bar(String msg) { - bar.bar(msg); - } - public String longRunning() { - try { - Thread.sleep(1200); - } catch (InterruptedException e) { - } - return "test"; - } - public String throwsException() { - if (true) throw new RuntimeException("Expected exception; to test fault-tolerance"); - return "test"; - } +public interface Foo { + public Foo body(); + public Bar getBar(); - public int $tag() throws java.rmi.RemoteException - { - return 0; - } + public String foo(String msg); + public void bar(String msg); + + public String longRunning(); + public String throwsException(); + + public int $tag() throws java.rmi.RemoteException; } diff --git a/akka-core/src/test/java/se/scalablesolutions/akka/actor/FooImpl.java b/akka-core/src/test/java/se/scalablesolutions/akka/actor/FooImpl.java new file mode 100644 index 0000000000..dc6aba481c --- /dev/null +++ b/akka-core/src/test/java/se/scalablesolutions/akka/actor/FooImpl.java @@ -0,0 +1,40 @@ +package se.scalablesolutions.akka.actor; + +import com.google.inject.Inject; +import se.scalablesolutions.akka.actor.*; + +public class FooImpl extends TypedActor implements Foo { + @Inject + private Bar bar; + + public Foo body() { return this; } + + public Bar getBar() { + return bar; + } + + public String foo(String msg) { + return msg + "return_foo "; + } + + public void bar(String msg) { + bar.bar(msg); + } + + public String longRunning() { + try { + Thread.sleep(1200); + } catch (InterruptedException e) { + } + return "test"; + } + + public String throwsException() { + if (true) throw new RuntimeException("Expected exception; to test fault-tolerance"); + return "test"; + } + + public int $tag() throws java.rmi.RemoteException { + return 0; + } +} diff --git a/akka-core/src/test/java/se/scalablesolutions/akka/actor/NestedTransactionalTypedActor.java b/akka-core/src/test/java/se/scalablesolutions/akka/actor/NestedTransactionalTypedActor.java new file mode 100644 index 0000000000..ee7998f69a --- /dev/null +++ b/akka-core/src/test/java/se/scalablesolutions/akka/actor/NestedTransactionalTypedActor.java @@ -0,0 +1,12 @@ +package se.scalablesolutions.akka.actor; + +public interface NestedTransactionalTypedActor { + public String getMapState(String key); + public String getVectorState(); + public String getRefState(); + public void setMapState(String key, String msg); + public void setVectorState(String msg); + public void setRefState(String msg); + public void success(String key, String msg); + public String failure(String key, String msg, TypedActorFailer failer); +} \ No newline at end of file diff --git a/akka-core/src/test/java/se/scalablesolutions/akka/actor/NestedTransactionalActiveObject.java b/akka-core/src/test/java/se/scalablesolutions/akka/actor/NestedTransactionalTypedActorImpl.java similarity index 59% rename from akka-core/src/test/java/se/scalablesolutions/akka/actor/NestedTransactionalActiveObject.java rename to akka-core/src/test/java/se/scalablesolutions/akka/actor/NestedTransactionalTypedActorImpl.java index af6bb8245c..5b7eab615e 100644 --- a/akka-core/src/test/java/se/scalablesolutions/akka/actor/NestedTransactionalActiveObject.java +++ b/akka-core/src/test/java/se/scalablesolutions/akka/actor/NestedTransactionalTypedActorImpl.java @@ -1,17 +1,15 @@ package se.scalablesolutions.akka.actor; -import se.scalablesolutions.akka.actor.annotation.transactionrequired; -import se.scalablesolutions.akka.actor.annotation.inittransactionalstate; +import se.scalablesolutions.akka.actor.*; import se.scalablesolutions.akka.stm.*; -@transactionrequired -public class NestedTransactionalActiveObject { +public class NestedTransactionalTypedActorImpl extends TypedTransactor implements NestedTransactionalTypedActor { private TransactionalMap mapState; private TransactionalVector vectorState; private Ref refState; private boolean isInitialized = false; - @inittransactionalstate + @Override public void init() { if (!isInitialized) { mapState = new TransactionalMap(); @@ -25,62 +23,37 @@ public class NestedTransactionalActiveObject { return (String) mapState.get(key).get(); } - public String getVectorState() { return (String) vectorState.last(); } - public String getRefState() { - return (String) refState.get().get(); + return (String) refState.get(); } - public void setMapState(String key, String msg) { mapState.put(key, msg); } - public void setVectorState(String msg) { vectorState.add(msg); } - public void setRefState(String msg) { refState.swap(msg); } - public void success(String key, String msg) { mapState.put(key, msg); vectorState.add(msg); refState.swap(msg); } - - public String failure(String key, String msg, ActiveObjectFailer failer) { + public String failure(String key, String msg, TypedActorFailer failer) { mapState.put(key, msg); vectorState.add(msg); refState.swap(msg); failer.fail(); return msg; } - - - public void thisMethodHangs(String key, String msg, ActiveObjectFailer failer) { - setMapState(key, msg); - } - - /* - public void clashOk(String key, String msg, InMemClasher clasher) { - mapState.put(key, msg); - clasher.clash(); - } - - public void clashNotOk(String key, String msg, InMemClasher clasher) { - mapState.put(key, msg); - clasher.clash(); - this.success("clash", "clash"); - } - */ } \ No newline at end of file diff --git a/akka-core/src/test/java/se/scalablesolutions/akka/actor/ReplyUntypedActor.java b/akka-core/src/test/java/se/scalablesolutions/akka/actor/ReplyUntypedActor.java new file mode 100644 index 0000000000..8510c3889b --- /dev/null +++ b/akka-core/src/test/java/se/scalablesolutions/akka/actor/ReplyUntypedActor.java @@ -0,0 +1,28 @@ +package se.scalablesolutions.akka.actor; + +import se.scalablesolutions.akka.actor.*; + +public class ReplyUntypedActor extends UntypedActor { + public void onReceive(Object message, UntypedActorRef context) throws Exception { + if (message instanceof String) { + String str = (String)message; + + if (str.equals("ReplyToSendOneWayUsingReply")) { + context.replyUnsafe("Reply"); + } else if (str.equals("ReplyToSendOneWayUsingSender")) { + context.getSender().get().sendOneWay("Reply"); + + } else if (str.equals("ReplyToSendRequestReplyUsingReply")) { + context.replyUnsafe("Reply"); + } else if (str.equals("ReplyToSendRequestReplyUsingFuture")) { + context.getSenderFuture().get().completeWithResult("Reply"); + + } else if (str.equals("ReplyToSendRequestReplyFutureUsingReply")) { + context.replyUnsafe("Reply"); + } else if (str.equals("ReplyToSendRequestReplyFutureUsingFuture")) { + context.getSenderFuture().get().completeWithResult("Reply"); + + } else throw new IllegalArgumentException("Unknown message: " + str); + } else throw new IllegalArgumentException("Unknown message: " + message); + } +} diff --git a/akka-core/src/test/java/se/scalablesolutions/akka/actor/SamplePojo.java b/akka-core/src/test/java/se/scalablesolutions/akka/actor/SamplePojo.java index 50f3e43221..ae47276ba6 100644 --- a/akka-core/src/test/java/se/scalablesolutions/akka/actor/SamplePojo.java +++ b/akka-core/src/test/java/se/scalablesolutions/akka/actor/SamplePojo.java @@ -2,36 +2,7 @@ package se.scalablesolutions.akka.actor; import java.util.concurrent.CountDownLatch; -public class SamplePojo { - - private CountDownLatch latch; - - public boolean _pre = false; - public boolean _post = false; - public boolean _down = false; - - public CountDownLatch newCountdownLatch(int count) { - latch = new CountDownLatch(count); - return latch; - } - - public String fail() { - throw new RuntimeException("expected"); - } - - public void pre() { - _pre = true; - latch.countDown(); - } - - public void post() { - _post = true; - latch.countDown(); - } - - public void down() { - _down = true; - latch.countDown(); - } - -} +public interface SamplePojo { + public String greet(String s); + public String fail(); +} \ No newline at end of file diff --git a/akka-core/src/test/java/se/scalablesolutions/akka/actor/SamplePojoAnnotated.java b/akka-core/src/test/java/se/scalablesolutions/akka/actor/SamplePojoAnnotated.java deleted file mode 100644 index 8bf4ba36d3..0000000000 --- a/akka-core/src/test/java/se/scalablesolutions/akka/actor/SamplePojoAnnotated.java +++ /dev/null @@ -1,52 +0,0 @@ -package se.scalablesolutions.akka.actor; - -import se.scalablesolutions.akka.actor.annotation.postrestart; -import se.scalablesolutions.akka.actor.annotation.prerestart; -import se.scalablesolutions.akka.actor.annotation.shutdown; - -import java.util.concurrent.CountDownLatch; - -public class SamplePojoAnnotated { - - private CountDownLatch latch; - - public boolean _pre = false; - public boolean _post = false; - public boolean _down = false; - - public SamplePojoAnnotated() { - latch = new CountDownLatch(1); - } - - public CountDownLatch newCountdownLatch(int count) { - latch = new CountDownLatch(count); - return latch; - } - - public String greet(String s) { - return "hello " + s; - } - - public String fail() { - throw new RuntimeException("expected"); - } - - @prerestart - public void pre() { - _pre = true; - latch.countDown(); - } - - @postrestart - public void post() { - _post = true; - latch.countDown(); - } - - @shutdown - public void down() { - _down = true; - latch.countDown(); - } - -} \ No newline at end of file diff --git a/akka-core/src/test/java/se/scalablesolutions/akka/actor/SamplePojoImpl.java b/akka-core/src/test/java/se/scalablesolutions/akka/actor/SamplePojoImpl.java new file mode 100644 index 0000000000..d57232b629 --- /dev/null +++ b/akka-core/src/test/java/se/scalablesolutions/akka/actor/SamplePojoImpl.java @@ -0,0 +1,45 @@ +package se.scalablesolutions.akka.actor; + +import se.scalablesolutions.akka.actor.*; + +import java.util.concurrent.CountDownLatch; + +public class SamplePojoImpl extends TypedActor implements SamplePojo { + + public static CountDownLatch latch = new CountDownLatch(1); + + public static boolean _pre = false; + public static boolean _post = false; + public static boolean _down = false; + public static void reset() { + _pre = false; + _post = false; + _down = false; + } + + public String greet(String s) { + return "hello " + s; + } + + public String fail() { + throw new RuntimeException("expected"); + } + + @Override + public void preRestart(Throwable e) { + _pre = true; + latch.countDown(); + } + + @Override + public void postRestart(Throwable e) { + _post = true; + latch.countDown(); + } + + @Override + public void shutdown() { + _down = true; + latch.countDown(); + } +} \ No newline at end of file diff --git a/akka-core/src/test/java/se/scalablesolutions/akka/actor/SampleUntypedActor.java b/akka-core/src/test/java/se/scalablesolutions/akka/actor/SampleUntypedActor.java new file mode 100644 index 0000000000..8040e1394f --- /dev/null +++ b/akka-core/src/test/java/se/scalablesolutions/akka/actor/SampleUntypedActor.java @@ -0,0 +1,53 @@ +/** + * Copyright (C) 2009-2010 Scalable Solutions AB + */ +package se.scalablesolutions.akka.actor; + +import se.scalablesolutions.akka.actor.*; + +/** + * Here is an example on how to create and use an UntypedActor. + * + * @author Jonas Bonér + */ +public class SampleUntypedActor extends UntypedActor { + + public void onReceive(Object message, UntypedActorRef self) throws Exception { + if (message instanceof String) { + String msg = (String)message; + System.out.println("Received message: " + msg); + + if (msg.equals("UseReply")) { + // Reply to original sender of message using the 'replyUnsafe' method + self.replyUnsafe(msg + ":" + self.getUuid()); + + } else if (msg.equals("UseSender") && self.getSender().isDefined()) { + // Reply to original sender of message using the sender reference + // also passing along my own refererence (the self) + self.getSender().get().sendOneWay(msg, self); + + } else if (msg.equals("UseSenderFuture") && self.getSenderFuture().isDefined()) { + // Reply to original sender of message using the sender future reference + self.getSenderFuture().get().completeWithResult(msg); + + } else if (msg.equals("SendToSelf")) { + // Send fire-forget message to the actor itself recursively + self.sendOneWay(msg); + + } else if (msg.equals("ForwardMessage")) { + // Retreive an actor from the ActorRegistry by ID and get an ActorRef back + ActorRef actorRef = ActorRegistry.actorsFor("some-actor-id").head(); + // Wrap the ActorRef in an UntypedActorRef and forward the message to this actor + UntypedActorRef.wrap(actorRef).forward(msg, self); + + } else throw new IllegalArgumentException("Unknown message: " + message); + } else throw new IllegalArgumentException("Unknown message: " + message); + } + + public static void main(String[] args) { + UntypedActorRef actor = UntypedActor.actorOf(SampleUntypedActor.class); + actor.start(); + actor.sendOneWay("SendToSelf"); + actor.stop(); + } +} diff --git a/akka-core/src/test/java/se/scalablesolutions/akka/actor/SenderUntypedActor.java b/akka-core/src/test/java/se/scalablesolutions/akka/actor/SenderUntypedActor.java new file mode 100644 index 0000000000..7234ff27a4 --- /dev/null +++ b/akka-core/src/test/java/se/scalablesolutions/akka/actor/SenderUntypedActor.java @@ -0,0 +1,44 @@ +package se.scalablesolutions.akka.actor; + +import se.scalablesolutions.akka.actor.*; +import se.scalablesolutions.akka.dispatch.CompletableFuture; + +public class SenderUntypedActor extends UntypedActor { + private UntypedActorRef replyActor = null; + + public void onReceive(Object message, UntypedActorRef context) throws Exception { + if (message instanceof UntypedActorRef) replyActor = (UntypedActorRef)message; + else if (message instanceof String) { + if (replyActor == null) throw new IllegalStateException("Need to receive a ReplyUntypedActor before any other message."); + String str = (String)message; + + if (str.equals("ReplyToSendOneWayUsingReply")) { + replyActor.sendOneWay("ReplyToSendOneWayUsingReply", context); + } else if (str.equals("ReplyToSendOneWayUsingSender")) { + replyActor.sendOneWay("ReplyToSendOneWayUsingSender", context); + + } else if (str.equals("ReplyToSendRequestReplyUsingReply")) { + UntypedActorTestState.log = (String)replyActor.sendRequestReply("ReplyToSendRequestReplyUsingReply", context); + UntypedActorTestState.finished.await(); + } else if (str.equals("ReplyToSendRequestReplyUsingFuture")) { + UntypedActorTestState.log = (String)replyActor.sendRequestReply("ReplyToSendRequestReplyUsingFuture", context); + UntypedActorTestState.finished.await(); + + } else if (str.equals("ReplyToSendRequestReplyFutureUsingReply")) { + CompletableFuture future = (CompletableFuture)replyActor.sendRequestReplyFuture("ReplyToSendRequestReplyFutureUsingReply", context); + future.await(); + UntypedActorTestState.log = (String)future.result().get(); + UntypedActorTestState.finished.await(); + } else if (str.equals("ReplyToSendRequestReplyFutureUsingFuture")) { + CompletableFuture future = (CompletableFuture)replyActor.sendRequestReplyFuture("ReplyToSendRequestReplyFutureUsingFuture", context); + future.await(); + UntypedActorTestState.log = (String)future.result().get(); + UntypedActorTestState.finished.await(); + + } else if (str.equals("Reply")) { + UntypedActorTestState.log = "Reply"; + UntypedActorTestState.finished.await(); + } + } + } +} diff --git a/akka-core/src/test/java/se/scalablesolutions/akka/actor/SimpleJavaPojo.java b/akka-core/src/test/java/se/scalablesolutions/akka/actor/SimpleJavaPojo.java index ee549cef23..d7ab60b676 100644 --- a/akka-core/src/test/java/se/scalablesolutions/akka/actor/SimpleJavaPojo.java +++ b/akka-core/src/test/java/se/scalablesolutions/akka/actor/SimpleJavaPojo.java @@ -1,36 +1,11 @@ package se.scalablesolutions.akka.actor; -import se.scalablesolutions.akka.actor.annotation.prerestart; -import se.scalablesolutions.akka.actor.annotation.postrestart; +import se.scalablesolutions.akka.dispatch.CompletableFuture; -public class SimpleJavaPojo { - - public boolean pre = false; - public boolean post = false; - - private String name; - - public void setName(String name) { - this.name = name; - } - - public String getName() { - return name; - } - - @prerestart - public void pre() { - System.out.println("** pre()"); - pre = true; - } - - @postrestart - public void post() { - System.out.println("** post()"); - post = true; - } - - public void throwException() { - throw new RuntimeException(); - } +public interface SimpleJavaPojo { + public Object getSender(); + public CompletableFuture getSenderFuture(); + public void setName(String name); + public String getName(); + public void throwException(); } diff --git a/akka-core/src/test/java/se/scalablesolutions/akka/actor/SimpleJavaPojoCaller.java b/akka-core/src/test/java/se/scalablesolutions/akka/actor/SimpleJavaPojoCaller.java new file mode 100644 index 0000000000..a1bdab5337 --- /dev/null +++ b/akka-core/src/test/java/se/scalablesolutions/akka/actor/SimpleJavaPojoCaller.java @@ -0,0 +1,9 @@ +package se.scalablesolutions.akka.actor; + +import se.scalablesolutions.akka.dispatch.CompletableFuture; + +public interface SimpleJavaPojoCaller { + public void setPojo(SimpleJavaPojo pojo); + public Object getSenderFromSimpleJavaPojo(); + public CompletableFuture getSenderFutureFromSimpleJavaPojo(); +} diff --git a/akka-core/src/test/java/se/scalablesolutions/akka/actor/SimpleJavaPojoCallerImpl.java b/akka-core/src/test/java/se/scalablesolutions/akka/actor/SimpleJavaPojoCallerImpl.java new file mode 100644 index 0000000000..15a6aec8e0 --- /dev/null +++ b/akka-core/src/test/java/se/scalablesolutions/akka/actor/SimpleJavaPojoCallerImpl.java @@ -0,0 +1,21 @@ +package se.scalablesolutions.akka.actor; + +import se.scalablesolutions.akka.actor.*; +import se.scalablesolutions.akka.dispatch.CompletableFuture; + +public class SimpleJavaPojoCallerImpl extends TypedActor implements SimpleJavaPojoCaller { + + SimpleJavaPojo pojo; + + public void setPojo(SimpleJavaPojo pojo) { + this.pojo = pojo; + } + + public Object getSenderFromSimpleJavaPojo() { + return pojo.getSender(); + } + + public CompletableFuture getSenderFutureFromSimpleJavaPojo() { + return pojo.getSenderFuture(); + } +} diff --git a/akka-core/src/test/java/se/scalablesolutions/akka/actor/SimpleJavaPojoImpl.java b/akka-core/src/test/java/se/scalablesolutions/akka/actor/SimpleJavaPojoImpl.java new file mode 100644 index 0000000000..3b3508c0ab --- /dev/null +++ b/akka-core/src/test/java/se/scalablesolutions/akka/actor/SimpleJavaPojoImpl.java @@ -0,0 +1,48 @@ +package se.scalablesolutions.akka.actor; + +import se.scalablesolutions.akka.actor.*; +import se.scalablesolutions.akka.dispatch.CompletableFuture; + +public class SimpleJavaPojoImpl extends TypedActor implements SimpleJavaPojo { + + public static boolean _pre = false; + public static boolean _post = false; + public static boolean _down = false; + public static void reset() { + _pre = false; + _post = false; + _down = false; + } + + private String name; + + public Object getSender() { + return getContext().getSender(); + } + + public CompletableFuture getSenderFuture() { + return getContext().getSenderFuture(); + } + + public void setName(String name) { + this.name = name; + } + + public String getName() { + return name; + } + + @Override + public void preRestart(Throwable e) { + _pre = true; + } + + @Override + public void postRestart(Throwable e) { + _post = true; + } + + public void throwException() { + throw new RuntimeException(); + } +} diff --git a/akka-core/src/test/java/se/scalablesolutions/akka/actor/TransactionalTypedActor.java b/akka-core/src/test/java/se/scalablesolutions/akka/actor/TransactionalTypedActor.java new file mode 100644 index 0000000000..6e7c43745b --- /dev/null +++ b/akka-core/src/test/java/se/scalablesolutions/akka/actor/TransactionalTypedActor.java @@ -0,0 +1,14 @@ +package se.scalablesolutions.akka.actor; + +public interface TransactionalTypedActor { + public String getMapState(String key); + public String getVectorState(); + public String getRefState(); + public void setMapState(String key, String msg); + public void setVectorState(String msg); + public void setRefState(String msg); + public void success(String key, String msg); + public void success(String key, String msg, NestedTransactionalTypedActor nested); + public String failure(String key, String msg, TypedActorFailer failer); + public String failure(String key, String msg, NestedTransactionalTypedActor nested, TypedActorFailer failer); +} diff --git a/akka-core/src/test/java/se/scalablesolutions/akka/actor/TransactionalActiveObject.java b/akka-core/src/test/java/se/scalablesolutions/akka/actor/TransactionalTypedActorImpl.java similarity index 67% rename from akka-core/src/test/java/se/scalablesolutions/akka/actor/TransactionalActiveObject.java rename to akka-core/src/test/java/se/scalablesolutions/akka/actor/TransactionalTypedActorImpl.java index 515f4fafee..f992028caf 100644 --- a/akka-core/src/test/java/se/scalablesolutions/akka/actor/TransactionalActiveObject.java +++ b/akka-core/src/test/java/se/scalablesolutions/akka/actor/TransactionalTypedActorImpl.java @@ -1,20 +1,16 @@ package se.scalablesolutions.akka.actor; -import se.scalablesolutions.akka.actor.annotation.transactionrequired; -import se.scalablesolutions.akka.actor.annotation.prerestart; -import se.scalablesolutions.akka.actor.annotation.postrestart; -import se.scalablesolutions.akka.actor.annotation.inittransactionalstate; +import se.scalablesolutions.akka.actor.*; import se.scalablesolutions.akka.stm.*; -@transactionrequired -public class TransactionalActiveObject { +public class TransactionalTypedActorImpl extends TypedTransactor implements TransactionalTypedActor { private TransactionalMap mapState; private TransactionalVector vectorState; private Ref refState; private boolean isInitialized = false; - - @inittransactionalstate - public void init() { + + @Override + public void initTransactionalState() { if (!isInitialized) { mapState = new TransactionalMap(); vectorState = new TransactionalVector(); @@ -32,7 +28,7 @@ public class TransactionalActiveObject { } public String getRefState() { - return (String)refState.get().get(); + return (String)refState.get(); } public void setMapState(String key, String msg) { @@ -53,14 +49,14 @@ public class TransactionalActiveObject { refState.swap(msg); } - public void success(String key, String msg, NestedTransactionalActiveObject nested) { + public void success(String key, String msg, NestedTransactionalTypedActor nested) { mapState.put(key, msg); vectorState.add(msg); refState.swap(msg); nested.success(key, msg); } - public String failure(String key, String msg, ActiveObjectFailer failer) { + public String failure(String key, String msg, TypedActorFailer failer) { mapState.put(key, msg); vectorState.add(msg); refState.swap(msg); @@ -68,7 +64,7 @@ public class TransactionalActiveObject { return msg; } - public String failure(String key, String msg, NestedTransactionalActiveObject nested, ActiveObjectFailer failer) { + public String failure(String key, String msg, NestedTransactionalTypedActor nested, TypedActorFailer failer) { mapState.put(key, msg); vectorState.add(msg); refState.swap(msg); @@ -76,17 +72,13 @@ public class TransactionalActiveObject { return msg; } - public void thisMethodHangs(String key, String msg, ActiveObjectFailer failer) { - setMapState(key, msg); - } - - @prerestart - public void preRestart() { + @Override + public void preRestart(Throwable e) { System.out.println("################ PRE RESTART"); } - @postrestart - public void postRestart() { + @Override + public void postRestart(Throwable e) { System.out.println("################ POST RESTART"); } } diff --git a/akka-core/src/test/java/se/scalablesolutions/akka/actor/TypedActorFailer.java b/akka-core/src/test/java/se/scalablesolutions/akka/actor/TypedActorFailer.java new file mode 100644 index 0000000000..b4a69e1cd1 --- /dev/null +++ b/akka-core/src/test/java/se/scalablesolutions/akka/actor/TypedActorFailer.java @@ -0,0 +1,5 @@ +package se.scalablesolutions.akka.actor; + +public interface TypedActorFailer extends java.io.Serializable { + public int fail(); +} diff --git a/akka-core/src/test/java/se/scalablesolutions/akka/actor/TypedActorFailerImpl.java b/akka-core/src/test/java/se/scalablesolutions/akka/actor/TypedActorFailerImpl.java new file mode 100644 index 0000000000..0d01fd801c --- /dev/null +++ b/akka-core/src/test/java/se/scalablesolutions/akka/actor/TypedActorFailerImpl.java @@ -0,0 +1,9 @@ +package se.scalablesolutions.akka.actor; + +import se.scalablesolutions.akka.actor.*; + +public class TypedActorFailerImpl extends TypedActor implements TypedActorFailer { + public int fail() { + throw new RuntimeException("expected"); + } +} diff --git a/akka-core/src/test/java/se/scalablesolutions/akka/actor/UntypedActorTestState.java b/akka-core/src/test/java/se/scalablesolutions/akka/actor/UntypedActorTestState.java new file mode 100644 index 0000000000..b94c5870fd --- /dev/null +++ b/akka-core/src/test/java/se/scalablesolutions/akka/actor/UntypedActorTestState.java @@ -0,0 +1,10 @@ +package se.scalablesolutions.akka.actor; + +import se.scalablesolutions.akka.actor.*; + +import java.util.concurrent.CyclicBarrier; + +public class UntypedActorTestState { + public static String log = "NIL"; + public static CyclicBarrier finished = null; +} diff --git a/akka-core/src/test/resources/META-INF/aop.xml b/akka-core/src/test/resources/META-INF/aop.xml index 2f8d5159a8..bdc167ca54 100644 --- a/akka-core/src/test/resources/META-INF/aop.xml +++ b/akka-core/src/test/resources/META-INF/aop.xml @@ -1,7 +1,7 @@ - + diff --git a/akka-core/src/test/scala/ActiveObjectLifecycleSpec.scala b/akka-core/src/test/scala/ActiveObjectLifecycleSpec.scala deleted file mode 100644 index 97b01c12ce..0000000000 --- a/akka-core/src/test/scala/ActiveObjectLifecycleSpec.scala +++ /dev/null @@ -1,155 +0,0 @@ -package se.scalablesolutions.akka.actor - -import org.junit.runner.RunWith -import org.scalatest.{BeforeAndAfterAll, Spec} -import org.scalatest.junit.JUnitRunner -import org.scalatest.matchers.ShouldMatchers - -import se.scalablesolutions.akka.config.ActiveObjectConfigurator -import se.scalablesolutions.akka.config.JavaConfig._ - -/** - * @author Martin Krasser - */ -@RunWith(classOf[JUnitRunner]) -class ActiveObjectLifecycleSpec extends Spec with ShouldMatchers with BeforeAndAfterAll { - var conf1: ActiveObjectConfigurator = _ - var conf2: ActiveObjectConfigurator = _ - var conf3: ActiveObjectConfigurator = _ - var conf4: ActiveObjectConfigurator = _ - - override protected def beforeAll() = { - val strategy = new RestartStrategy(new AllForOne(), 3, 1000, Array(classOf[Exception])) - val comp1 = new Component(classOf[SamplePojoAnnotated], new LifeCycle(new Permanent()), 1000) - val comp2 = new Component(classOf[SamplePojoAnnotated], new LifeCycle(new Temporary()), 1000) - val comp3 = new Component(classOf[SamplePojo], new LifeCycle(new Permanent(), new RestartCallbacks("pre", "post")), 1000) - val comp4 = new Component(classOf[SamplePojo], new LifeCycle(new Temporary(), new ShutdownCallback("down")), 1000) - conf1 = new ActiveObjectConfigurator().configure(strategy, Array(comp1)).supervise - conf2 = new ActiveObjectConfigurator().configure(strategy, Array(comp2)).supervise - conf3 = new ActiveObjectConfigurator().configure(strategy, Array(comp3)).supervise - conf4 = new ActiveObjectConfigurator().configure(strategy, Array(comp4)).supervise - } - - override protected def afterAll() = { - conf1.stop - conf2.stop - conf3.stop - conf4.stop - } - - describe("ActiveObject lifecycle management") { - it("should restart supervised, annotated active object on failure") { - val obj = conf1.getInstance[SamplePojoAnnotated](classOf[SamplePojoAnnotated]) - val cdl = obj.newCountdownLatch(2) - assert(AspectInitRegistry.initFor(obj) ne null) - try { - obj.fail - fail("expected exception not thrown") - } catch { - case e: RuntimeException => { - cdl.await - assert(obj._pre) - assert(obj._post) - assert(!obj._down) - assert(AspectInitRegistry.initFor(obj) ne null) - } - } - } - - it("should shutdown supervised, annotated active object on failure") { - val obj = conf2.getInstance[SamplePojoAnnotated](classOf[SamplePojoAnnotated]) - val cdl = obj.newCountdownLatch(1) - assert(AspectInitRegistry.initFor(obj) ne null) - try { - obj.fail - fail("expected exception not thrown") - } catch { - case e: RuntimeException => { - cdl.await - assert(!obj._pre) - assert(!obj._post) - assert(obj._down) - assert(AspectInitRegistry.initFor(obj) eq null) - } - } - } - - it("should restart supervised, non-annotated active object on failure") { - val obj = conf3.getInstance[SamplePojo](classOf[SamplePojo]) - val cdl = obj.newCountdownLatch(2) - assert(AspectInitRegistry.initFor(obj) ne null) - try { - obj.fail - fail("expected exception not thrown") - } catch { - case e: RuntimeException => { - cdl.await - assert(obj._pre) - assert(obj._post) - assert(!obj._down) - assert(AspectInitRegistry.initFor(obj) ne null) - } - } - } - - it("should shutdown supervised, non-annotated active object on failure") { - val obj = conf4.getInstance[SamplePojo](classOf[SamplePojo]) - val cdl = obj.newCountdownLatch(1) - assert(AspectInitRegistry.initFor(obj) ne null) - try { - obj.fail - fail("expected exception not thrown") - } catch { - case e: RuntimeException => { - cdl.await - assert(!obj._pre) - assert(!obj._post) - assert(obj._down) - assert(AspectInitRegistry.initFor(obj) eq null) - } - } - } - - it("should shutdown non-supervised, annotated active object on ActiveObject.stop") { - val obj = ActiveObject.newInstance(classOf[SamplePojoAnnotated]) - assert(AspectInitRegistry.initFor(obj) ne null) - assert("hello akka" === obj.greet("akka")) - ActiveObject.stop(obj) - assert(AspectInitRegistry.initFor(obj) eq null) - assert(!obj._pre) - assert(!obj._post) - assert(obj._down) - try { - obj.greet("akka") - fail("access to stopped active object") - } catch { - case e: Exception => { /* test passed */ } - } - } - - it("should shutdown non-supervised, annotated active object on ActorRegistry.shutdownAll") { - val obj = ActiveObject.newInstance(classOf[SamplePojoAnnotated]) - assert(AspectInitRegistry.initFor(obj) ne null) - assert("hello akka" === obj.greet("akka")) - ActorRegistry.shutdownAll - assert(AspectInitRegistry.initFor(obj) eq null) - assert(!obj._pre) - assert(!obj._post) - assert(obj._down) - try { - obj.greet("akka") - fail("access to stopped active object") - } catch { - case e: Exception => { /* test passed */ } - } - } - - it("should shutdown non-supervised, non-initialized active object on ActiveObject.stop") { - val obj = ActiveObject.newInstance(classOf[SamplePojoAnnotated]) - ActiveObject.stop(obj) - assert(!obj._pre) - assert(!obj._post) - assert(obj._down) - } - } -} \ No newline at end of file diff --git a/akka-core/src/test/scala/Messages.scala b/akka-core/src/test/scala/Messages.scala index 436257e3b5..ad1fcf8885 100644 --- a/akka-core/src/test/scala/Messages.scala +++ b/akka-core/src/test/scala/Messages.scala @@ -7,9 +7,9 @@ package se.scalablesolutions.akka import se.scalablesolutions.akka.serialization.Serializable import sbinary._ import sbinary.Operations._ -import sbinary.DefaultProtocol._ sealed abstract class TestMessage + case object Ping extends TestMessage case object Pong extends TestMessage case object OneWay extends TestMessage diff --git a/akka-core/src/test/scala/RefSpec.scala b/akka-core/src/test/scala/RefSpec.scala deleted file mode 100644 index 805e0834ea..0000000000 --- a/akka-core/src/test/scala/RefSpec.scala +++ /dev/null @@ -1,156 +0,0 @@ -package se.scalablesolutions.akka.stm - -import org.scalatest.Spec -import org.scalatest.matchers.ShouldMatchers -import org.scalatest.junit.JUnitRunner -import org.junit.runner.RunWith - -import se.scalablesolutions.akka.actor.Actor._ - -@RunWith(classOf[JUnitRunner]) -class RefSpec extends Spec with ShouldMatchers { - - describe("A Ref") { - import local._ - - it("should optionally accept an initial value") { - val emptyRef = Ref[Int] - val empty = atomic { emptyRef.get } - - empty should be(None) - - val ref = Ref(3) - val value = atomic { ref.get.get } - - value should be(3) - } - - it("should keep the initial value, even if the first transaction is rolled back") { - val ref = Ref(3) - - try { - atomic(DefaultLocalTransactionFactory) { - ref.swap(5) - throw new Exception - } - } catch { - case e => {} - } - - val value = atomic { ref.get.get } - - value should be(3) - } - - it("should be settable using swap") { - val ref = Ref[Int] - - atomic { ref.swap(3) } - - val value = atomic { ref.get.get } - - value should be(3) - } - - it("should be changeable using alter") { - val ref = Ref(0) - - def increment = atomic { - ref alter (_ + 1) - } - - increment - increment - increment - - val value = atomic { ref.get.get } - - value should be(3) - } - - it("should not be changeable using alter if no value has been set") { - val ref = Ref[Int] - - def increment = atomic { - ref alter (_ + 1) - } - - evaluating { increment } should produce [RuntimeException] - } - - it("should be able to be mapped") { - val ref1 = Ref(1) - - val ref2 = atomic { - ref1 map (_ + 1) - } - - val value1 = atomic { ref1.get.get } - val value2 = atomic { ref2.get.get } - - value1 should be(1) - value2 should be(2) - } - - it("should be able to be used in a 'foreach' for comprehension") { - val ref = Ref(3) - - var result = 0 - - atomic { - for (value <- ref) { - result += value - } - } - - result should be(3) - } - - it("should be able to be used in a 'map' for comprehension") { - val ref1 = Ref(1) - - val ref2 = atomic { - for (value <- ref1) yield value + 2 - } - - val value2 = atomic { ref2.get.get } - - value2 should be(3) - } - - it("should be able to be used in a 'flatMap' for comprehension") { - val ref1 = Ref(1) - val ref2 = Ref(2) - - val ref3 = atomic { - for { - value1 <- ref1 - value2 <- ref2 - } yield value1 + value2 - } - - val value3 = atomic { ref3.get.get } - - value3 should be(3) - } - - it("should be able to be used in a 'filter' for comprehension") { - val ref1 = Ref(1) - - val refLess2 = atomic { - for (value <- ref1 if value < 2) yield value - } - - val optLess2 = atomic { refLess2.get } - - val refGreater2 = atomic { - for (value <- ref1 if value > 2) yield value - } - - val optGreater2 = atomic { refGreater2.get } - - optLess2 should be(Some(1)) - optGreater2 should be(None) - } - } -} diff --git a/akka-core/src/test/scala/SchedulerSpec.scala b/akka-core/src/test/scala/SchedulerSpec.scala deleted file mode 100644 index 0fe7c45ea5..0000000000 --- a/akka-core/src/test/scala/SchedulerSpec.scala +++ /dev/null @@ -1,38 +0,0 @@ -package se.scalablesolutions.akka.actor - -import org.scalatest.junit.JUnitSuite -import Actor._ -import java.util.concurrent.{CountDownLatch, TimeUnit} -import org.junit.{After, Test} - -class SchedulerSpec extends JUnitSuite { - - @Test def schedulerShouldScheduleMoreThanOnce = { - - case object Tick - val countDownLatch = new CountDownLatch(3) - val tickActor = actor { - case Tick => countDownLatch.countDown - } - // run every 50 millisec - Scheduler.schedule(tickActor, Tick, 0, 50, TimeUnit.MILLISECONDS) - - // after max 1 second it should be executed at least the 3 times already - assert(countDownLatch.await(1, TimeUnit.SECONDS)) - } - - @Test def schedulerShouldScheduleOnce = { - case object Tick - val countDownLatch = new CountDownLatch(2) - val tickActor = actor { - case Tick => countDownLatch.countDown - } - // run every 50 millisec - Scheduler.scheduleOnce(tickActor, Tick, 50, TimeUnit.MILLISECONDS) - - // after 1 second the wait should fail - assert(countDownLatch.await(1, TimeUnit.SECONDS) == false) - // should still be 1 left - assert(countDownLatch.getCount == 1) - } -} diff --git a/akka-core/src/test/scala/ActorFireForgetRequestReplySpec.scala b/akka-core/src/test/scala/actor/actor/ActorFireForgetRequestReplySpec.scala similarity index 100% rename from akka-core/src/test/scala/ActorFireForgetRequestReplySpec.scala rename to akka-core/src/test/scala/actor/actor/ActorFireForgetRequestReplySpec.scala diff --git a/akka-core/src/test/scala/AgentSpec.scala b/akka-core/src/test/scala/actor/actor/AgentSpec.scala similarity index 95% rename from akka-core/src/test/scala/AgentSpec.scala rename to akka-core/src/test/scala/actor/actor/AgentSpec.scala index de4326c646..71911c3ad8 100644 --- a/akka-core/src/test/scala/AgentSpec.scala +++ b/akka-core/src/test/scala/actor/actor/AgentSpec.scala @@ -1,10 +1,6 @@ package se.scalablesolutions.akka.actor import se.scalablesolutions.akka.actor.Actor.transactor -import se.scalablesolutions.akka.stm.Transaction.Global.atomic -import se.scalablesolutions.akka.util.Logging -import Actor._ - import org.scalatest.Suite import org.scalatest.junit.JUnitRunner import org.scalatest.matchers.MustMatchers diff --git a/akka-core/src/test/scala/Bench.scala b/akka-core/src/test/scala/actor/actor/Bench.scala similarity index 100% rename from akka-core/src/test/scala/Bench.scala rename to akka-core/src/test/scala/actor/actor/Bench.scala diff --git a/akka-core/src/test/scala/ForwardActorSpec.scala b/akka-core/src/test/scala/actor/actor/ForwardActorSpec.scala similarity index 100% rename from akka-core/src/test/scala/ForwardActorSpec.scala rename to akka-core/src/test/scala/actor/actor/ForwardActorSpec.scala diff --git a/akka-core/src/test/scala/ReceiveTimeoutSpec.scala b/akka-core/src/test/scala/actor/actor/ReceiveTimeoutSpec.scala similarity index 100% rename from akka-core/src/test/scala/ReceiveTimeoutSpec.scala rename to akka-core/src/test/scala/actor/actor/ReceiveTimeoutSpec.scala index 5c50337894..ff43467efc 100644 --- a/akka-core/src/test/scala/ReceiveTimeoutSpec.scala +++ b/akka-core/src/test/scala/actor/actor/ReceiveTimeoutSpec.scala @@ -3,9 +3,9 @@ package se.scalablesolutions.akka.actor import org.scalatest.junit.JUnitSuite import org.junit.Test -import Actor._ import java.util.concurrent.TimeUnit import org.multiverse.api.latches.StandardLatch +import Actor._ class ReceiveTimeoutSpec extends JUnitSuite { diff --git a/akka-core/src/test/scala/TransactorSpec.scala b/akka-core/src/test/scala/actor/actor/TransactorSpec.scala similarity index 97% rename from akka-core/src/test/scala/TransactorSpec.scala rename to akka-core/src/test/scala/actor/actor/TransactorSpec.scala index 872b160fb1..dd23a76a88 100644 --- a/akka-core/src/test/scala/TransactorSpec.scala +++ b/akka-core/src/test/scala/actor/actor/TransactorSpec.scala @@ -35,9 +35,9 @@ class StatefulTransactor(expectedInvocationCount: Int) extends Transactor { val notifier = new CountDownLatch(expectedInvocationCount) - private lazy val mapState = TransactionalMap[String, String]() - private lazy val vectorState = TransactionalVector[String]() - private lazy val refState = Ref[String]() + private val mapState = TransactionalMap[String, String]() + private val vectorState = TransactionalVector[String]() + private val refState = Ref[String]() def receive = { case GetNotifier => @@ -49,7 +49,7 @@ class StatefulTransactor(expectedInvocationCount: Int) extends Transactor { self.reply(vectorState.length.asInstanceOf[AnyRef]) notifier.countDown case GetRefState => - self.reply(refState.get.get) + self.reply(refState.get) notifier.countDown case SetMapState(key, msg) => mapState.put(key, msg) @@ -76,7 +76,6 @@ class StatefulTransactor(expectedInvocationCount: Int) extends Transactor { failer !! "Failure" self.reply(msg) notifier.countDown - case SetMapStateOneWay(key, msg) => mapState.put(key, msg) notifier.countDown @@ -95,8 +94,8 @@ class StatefulTransactor(expectedInvocationCount: Int) extends Transactor { mapState.put(key, msg) vectorState.add(msg) refState.swap(msg) - failer ! "Failure" notifier.countDown + failer ! "Failure" } } @@ -110,6 +109,7 @@ class FailerTransactor extends Transactor { } class TransactorSpec extends JUnitSuite { + @Test def shouldOneWayMapShouldNotRollbackStateForStatefulServerInCaseOfSuccess = { val stateful = actorOf(new StatefulTransactor(2)) @@ -139,7 +139,7 @@ class TransactorSpec extends JUnitSuite { stateful ! SetMapStateOneWay("testShouldRollbackStateForStatefulServerInCaseOfFailure", "init") // set init state stateful ! FailureOneWay("testShouldRollbackStateForStatefulServerInCaseOfFailure", "new state", failer) // call failing transactionrequired method val notifier = (stateful !! GetNotifier).as[CountDownLatch] - assert(notifier.get.await(1, TimeUnit.SECONDS)) + assert(notifier.get.await(5, TimeUnit.SECONDS)) assert("init" === (stateful !! GetMapState("testShouldRollbackStateForStatefulServerInCaseOfFailure")).get) // check that state is == init state } diff --git a/akka-core/src/test/scala/actor/supervisor/RestartStrategySpec.scala b/akka-core/src/test/scala/actor/supervisor/RestartStrategySpec.scala new file mode 100644 index 0000000000..5023c756e1 --- /dev/null +++ b/akka-core/src/test/scala/actor/supervisor/RestartStrategySpec.scala @@ -0,0 +1,74 @@ +/** + * Copyright (C) 2009-2010 Scalable Solutions AB + */ + +package se.scalablesolutions.akka.actor + +import org.scalatest.junit.JUnitSuite +import org.junit.Test + +import Actor._ +import se.scalablesolutions.akka.config.OneForOneStrategy +import java.util.concurrent.{TimeUnit, CountDownLatch} +import se.scalablesolutions.akka.config.ScalaConfig.{Permanent, LifeCycle} +import org.multiverse.api.latches.StandardLatch + +class RestartStrategySpec extends JUnitSuite { + + object Ping + object Crash + + @Test + def slaveShouldStayDeadAfterMaxRestarts = { + + val boss = actorOf(new Actor{ + self.trapExit = List(classOf[Throwable]) + self.faultHandler = Some(OneForOneStrategy(1, 1000)) + protected def receive = { case _ => () } + }).start + + val restartLatch = new StandardLatch + val secondRestartLatch = new StandardLatch + val countDownLatch = new CountDownLatch(2) + + + val slave = actorOf(new Actor{ + + protected def receive = { + case Ping => countDownLatch.countDown + case Crash => throw new Exception("Crashing...") + } + override def postRestart(reason: Throwable) = { + restartLatch.open + } + + override def shutdown = { + if (restartLatch.isOpen) { + secondRestartLatch.open + } + } + }) + boss.startLink(slave) + + slave ! Ping + slave ! Crash + slave ! Ping + + // test restart and post restart ping + assert(restartLatch.tryAwait(1, TimeUnit.SECONDS)) + assert(countDownLatch.await(1, TimeUnit.SECONDS)) + + // now crash again... should not restart + slave ! Crash + + assert(secondRestartLatch.tryAwait(1, TimeUnit.SECONDS)) + val exceptionLatch = new StandardLatch + try { + slave ! Ping // this should fail + } catch { + case e => exceptionLatch.open // expected here + } + assert(exceptionLatch.tryAwait(1, TimeUnit.SECONDS)) + } +} + diff --git a/akka-core/src/test/scala/SupervisorHierarchySpec.scala b/akka-core/src/test/scala/actor/supervisor/SupervisorHierarchySpec.scala similarity index 53% rename from akka-core/src/test/scala/SupervisorHierarchySpec.scala rename to akka-core/src/test/scala/actor/supervisor/SupervisorHierarchySpec.scala index 75751e3d58..ffc9dbd860 100644 --- a/akka-core/src/test/scala/SupervisorHierarchySpec.scala +++ b/akka-core/src/test/scala/actor/supervisor/SupervisorHierarchySpec.scala @@ -6,12 +6,27 @@ package se.scalablesolutions.akka.actor import org.scalatest.junit.JUnitSuite import org.junit.Test -import java.lang.Throwable + import Actor._ import se.scalablesolutions.akka.config.OneForOneStrategy + import java.util.concurrent.{TimeUnit, CountDownLatch} +object SupervisorHierarchySpec { + class FireWorkerException(msg: String) extends Exception(msg) + + class CountDownActor(countDown: CountDownLatch) extends Actor { + protected def receive = { case _ => () } + override def postRestart(reason: Throwable) = countDown.countDown + } + + class CrasherActor extends Actor { + protected def receive = { case _ => () } + } +} + class SupervisorHierarchySpec extends JUnitSuite { + import SupervisorHierarchySpec._ @Test def killWorkerShouldRestartMangerAndOtherWorkers = { @@ -19,7 +34,7 @@ class SupervisorHierarchySpec extends JUnitSuite { val workerOne = actorOf(new CountDownActor(countDown)) val workerTwo = actorOf(new CountDownActor(countDown)) - val workerThree = actorOf(new CountDownActor( countDown)) + val workerThree = actorOf(new CountDownActor(countDown)) val boss = actorOf(new Actor{ self.trapExit = List(classOf[Throwable]) @@ -35,19 +50,32 @@ class SupervisorHierarchySpec extends JUnitSuite { manager.startLink(workerTwo) manager.startLink(workerThree) - workerOne ! Exit(workerOne, new RuntimeException("Fire the worker!")) + workerOne ! Exit(workerOne, new FireWorkerException("Fire the worker!")) // manager + all workers should be restarted by only killing a worker // manager doesn't trap exits, so boss will restart manager - assert(countDown.await(4, TimeUnit.SECONDS)) + assert(countDown.await(2, TimeUnit.SECONDS)) } - class CountDownActor(countDown: CountDownLatch) extends Actor { + @Test + def supervisorShouldReceiveNotificationMessageWhenMaximumNumberOfRestartsWithinTimeRangeIsReached = { + val countDown = new CountDownLatch(2) + val crasher = actorOf(new CountDownActor(countDown)) + val boss = actorOf(new Actor{ + self.trapExit = List(classOf[Throwable]) + self.faultHandler = Some(OneForOneStrategy(1, 5000)) + protected def receive = { + case MaximumNumberOfRestartsWithinTimeRangeReached(_, _, _, _) => + countDown.countDown + } + }).start + boss.startLink(crasher) - protected def receive = { case _ => () } + crasher ! Exit(crasher, new FireWorkerException("Fire the worker!")) + crasher ! Exit(crasher, new FireWorkerException("Fire the worker!")) - override def postRestart(reason: Throwable) = countDown.countDown + assert(countDown.await(2, TimeUnit.SECONDS)) } } diff --git a/akka-core/src/test/scala/SupervisorSpec.scala b/akka-core/src/test/scala/actor/supervisor/SupervisorSpec.scala similarity index 99% rename from akka-core/src/test/scala/SupervisorSpec.scala rename to akka-core/src/test/scala/actor/supervisor/SupervisorSpec.scala index fcbedd476b..01eb9cb006 100644 --- a/akka-core/src/test/scala/SupervisorSpec.scala +++ b/akka-core/src/test/scala/actor/supervisor/SupervisorSpec.scala @@ -6,7 +6,6 @@ package se.scalablesolutions.akka.actor import se.scalablesolutions.akka.config.ScalaConfig._ import se.scalablesolutions.akka.config.OneForOneStrategy -import se.scalablesolutions.akka.dispatch.Dispatchers import se.scalablesolutions.akka.{OneWay, Die, Ping} import Actor._ diff --git a/akka-core/src/test/scala/NestedTransactionalActiveObjectSpec.scala b/akka-core/src/test/scala/actor/typed-actor/NestedTransactionalTypedActorSpec.scala similarity index 54% rename from akka-core/src/test/scala/NestedTransactionalActiveObjectSpec.scala rename to akka-core/src/test/scala/actor/typed-actor/NestedTransactionalTypedActorSpec.scala index 0a47d1c9d6..7338e8df41 100644 --- a/akka-core/src/test/scala/NestedTransactionalActiveObjectSpec.scala +++ b/akka-core/src/test/scala/actor/typed-actor/NestedTransactionalTypedActorSpec.scala @@ -1,4 +1,4 @@ -/** + /** * Copyright (C) 2009-2010 Scalable Solutions AB */ @@ -11,158 +11,92 @@ import org.scalatest.BeforeAndAfterAll import org.scalatest.junit.JUnitRunner import org.junit.runner.RunWith -import se.scalablesolutions.akka.config.Config -import se.scalablesolutions.akka.config._ -import se.scalablesolutions.akka.config.ActiveObjectConfigurator -import se.scalablesolutions.akka.config.JavaConfig._ import se.scalablesolutions.akka.actor._ -/* @RunWith(classOf[JUnitRunner]) -class NestedTransactionalActiveObjectSpec extends -<<<<<<< HEAD:akka-core/src/test/scala/NestedTransactionalActiveObjectSpec.scala +class NestedTransactionalTypedActorSpec extends Spec with ShouldMatchers with BeforeAndAfterAll { -======= - Spec with - ShouldMatchers with - BeforeAndAfterAll { ->>>>>>> 38e8bea3fe6a7e9fcc9c5f353124144739bdc234:akka-core/src/test/scala/NestedTransactionalActiveObjectSpec.scala - private val conf = new ActiveObjectConfigurator private var messageLog = "" - override def beforeAll { - Config.config - conf.configure( - new RestartStrategy(new AllForOne, 3, 5000, List(classOf[Exception]).toArray), - List( - new Component(classOf[TransactionalActiveObject], - new LifeCycle(new Permanent), - 10000), - new Component(classOf[NestedTransactionalActiveObject], - new LifeCycle(new Permanent), - 10000), - new Component(classOf[ActiveObjectFailer], - new LifeCycle(new Permanent), - 10000) - ).toArray).supervise - } - override def afterAll { - conf.stop + // ActorRegistry.shutdownAll } - describe("Transactional nested in-memory Active Object") { + describe("Declaratively nested supervised transactional in-memory TypedActor") { it("map should not rollback state for stateful server in case of success") { - val stateful = conf.getInstance(classOf[TransactionalActiveObject]) - stateful.init + val stateful = TypedActor.newInstance(classOf[TransactionalTypedActor], classOf[TransactionalTypedActorImpl]) stateful.setMapState("testShouldNotRollbackStateForStatefulServerInCaseOfSuccess", "init") // set init state - Thread.sleep(100) - val nested = conf.getInstance(classOf[NestedTransactionalActiveObject]) - nested.init + val nested = TypedActor.newInstance(classOf[NestedTransactionalTypedActor], classOf[NestedTransactionalTypedActorImpl]) nested.setMapState("testShouldNotRollbackStateForStatefulServerInCaseOfSuccess", "init") // set init state - Thread.sleep(100) stateful.success("testShouldNotRollbackStateForStatefulServerInCaseOfSuccess", "new state", nested) // transactionrequired - Thread.sleep(100) stateful.getMapState("testShouldNotRollbackStateForStatefulServerInCaseOfSuccess") should equal("new state") nested.getMapState("testShouldNotRollbackStateForStatefulServerInCaseOfSuccess") should equal("new state") } it("map should rollback state for stateful server in case of failure") { - val stateful = conf.getInstance(classOf[TransactionalActiveObject]) - stateful.init + val stateful = TypedActor.newInstance(classOf[TransactionalTypedActor], classOf[TransactionalTypedActorImpl]) stateful.setMapState("testShouldRollbackStateForStatefulServerInCaseOfFailure", "init") // set init state - Thread.sleep(100) - val nested = conf.getInstance(classOf[NestedTransactionalActiveObject]) - nested.init + val nested = TypedActor.newInstance(classOf[NestedTransactionalTypedActor], classOf[NestedTransactionalTypedActorImpl]) nested.setMapState("testShouldRollbackStateForStatefulServerInCaseOfFailure", "init") // set init state - Thread.sleep(100) - val failer = conf.getInstance(classOf[ActiveObjectFailer]) + val failer = TypedActor.newInstance(classOf[TypedActorFailer], classOf[TypedActorFailerImpl]) try { stateful.failure("testShouldRollbackStateForStatefulServerInCaseOfFailure", "new state", nested, failer) - Thread.sleep(100) fail("should have thrown an exception") } catch { case e => {} } stateful.getMapState("testShouldRollbackStateForStatefulServerInCaseOfFailure") should equal("init") - Thread.sleep(100) nested.getMapState("testShouldRollbackStateForStatefulServerInCaseOfFailure") should equal("init") } it("vector should not rollback state for stateful server in case of success") { - val stateful = conf.getInstance(classOf[TransactionalActiveObject]) - stateful.init + val stateful = TypedActor.newInstance(classOf[TransactionalTypedActor], classOf[TransactionalTypedActorImpl]) stateful.setVectorState("init") // set init state - Thread.sleep(100) - val nested = conf.getInstance(classOf[NestedTransactionalActiveObject]) - nested.init - Thread.sleep(100) + val nested = TypedActor.newInstance(classOf[NestedTransactionalTypedActor], classOf[NestedTransactionalTypedActorImpl]) nested.setVectorState("init") // set init state - Thread.sleep(100) stateful.success("testShouldNotRollbackStateForStatefulServerInCaseOfSuccess", "new state", nested) // transactionrequired - Thread.sleep(100) stateful.getVectorState should equal("new state") - Thread.sleep(100) nested.getVectorState should equal("new state") } it("vector should rollback state for stateful server in case of failure") { - val stateful = conf.getInstance(classOf[TransactionalActiveObject]) - stateful.init + val stateful = TypedActor.newInstance(classOf[TransactionalTypedActor], classOf[TransactionalTypedActorImpl]) stateful.setVectorState("init") // set init state - Thread.sleep(100) - val nested = conf.getInstance(classOf[NestedTransactionalActiveObject]) - nested.init + val nested = TypedActor.newInstance(classOf[NestedTransactionalTypedActor], classOf[NestedTransactionalTypedActorImpl]) nested.setVectorState("init") // set init state - Thread.sleep(100) - val failer = conf.getInstance(classOf[ActiveObjectFailer]) + val failer = TypedActor.newInstance(classOf[TypedActorFailer], classOf[TypedActorFailerImpl]) try { stateful.failure("testShouldRollbackStateForStatefulServerInCaseOfFailure", "new state", nested, failer) - Thread.sleep(100) fail("should have thrown an exception") } catch { case e => {} } stateful.getVectorState should equal("init") - Thread.sleep(100) nested.getVectorState should equal("init") } it("ref should not rollback state for stateful server in case of success") { - val stateful = conf.getInstance(classOf[TransactionalActiveObject]) - stateful.init - val nested = conf.getInstance(classOf[NestedTransactionalActiveObject]) - nested.init + val stateful = TypedActor.newInstance(classOf[TransactionalTypedActor], classOf[TransactionalTypedActorImpl]) + val nested = TypedActor.newInstance(classOf[NestedTransactionalTypedActor], classOf[NestedTransactionalTypedActorImpl]) stateful.setRefState("init") // set init state - Thread.sleep(100) nested.setRefState("init") // set init state - Thread.sleep(100) stateful.success("testShouldNotRollbackStateForStatefulServerInCaseOfSuccess", "new state", nested) - Thread.sleep(100) stateful.getRefState should equal("new state") - Thread.sleep(100) nested.getRefState should equal("new state") } it("ref should rollback state for stateful server in case of failure") { - val stateful = conf.getInstance(classOf[TransactionalActiveObject]) - stateful.init - val nested = conf.getInstance(classOf[NestedTransactionalActiveObject]) - nested.init + val stateful = TypedActor.newInstance(classOf[TransactionalTypedActor], classOf[TransactionalTypedActorImpl]) + val nested = TypedActor.newInstance(classOf[NestedTransactionalTypedActor], classOf[NestedTransactionalTypedActorImpl]) stateful.setRefState("init") // set init state - Thread.sleep(100) nested.setRefState("init") // set init state - Thread.sleep(100) - val failer = conf.getInstance(classOf[ActiveObjectFailer]) + val failer = TypedActor.newInstance(classOf[TypedActorFailer], classOf[TypedActorFailerImpl]) try { stateful.failure("testShouldRollbackStateForStatefulServerInCaseOfFailure", "new state", nested, failer) - Thread.sleep(100) fail("should have thrown an exception") } catch { case e => {} } stateful.getRefState should equal("init") - Thread.sleep(100) nested.getRefState should equal("init") } } } -*/ diff --git a/akka-core/src/test/scala/actor/typed-actor/RestartNestedTransactionalTypedActorSpec.scala b/akka-core/src/test/scala/actor/typed-actor/RestartNestedTransactionalTypedActorSpec.scala new file mode 100644 index 0000000000..1769a5c47b --- /dev/null +++ b/akka-core/src/test/scala/actor/typed-actor/RestartNestedTransactionalTypedActorSpec.scala @@ -0,0 +1,118 @@ +/** + * Copyright (C) 2009-2010 Scalable Solutions AB + */ + +package se.scalablesolutions.akka.actor + +import org.scalatest.Spec +import org.scalatest.Assertions +import org.scalatest.matchers.ShouldMatchers +import org.scalatest.BeforeAndAfterAll +import org.scalatest.junit.JUnitRunner +import org.junit.runner.RunWith + +import se.scalablesolutions.akka.config.Config +import se.scalablesolutions.akka.config._ +import se.scalablesolutions.akka.config.TypedActorConfigurator +import se.scalablesolutions.akka.config.JavaConfig._ +import se.scalablesolutions.akka.actor._ + +@RunWith(classOf[JUnitRunner]) +class RestartNestedTransactionalTypedActorSpec extends + Spec with + ShouldMatchers with + BeforeAndAfterAll { + + private val conf = new TypedActorConfigurator + private var messageLog = "" + + override def beforeAll { + /* + Config.config + conf.configure( + new RestartStrategy(new AllForOne, 3, 5000, List(classOf[Exception]).toArray), + List( + new Component(classOf[TransactionalTypedActor], + new LifeCycle(new Permanent), + 10000), + new Component(classOf[NestedTransactionalTypedActor], + new LifeCycle(new Permanent), + 10000), + new Component(classOf[TypedActorFailer], + new LifeCycle(new Permanent), + 10000) + ).toArray).supervise + */ + } + + override def afterAll { + /* + conf.stop + ActorRegistry.shutdownAll + */ + } + + describe("Restart nested supervised transactional Typed Actor") { +/* + it("map should rollback state for stateful server in case of failure") { + val stateful = conf.getInstance(classOf[TransactionalTypedActor]) + stateful.init + stateful.setMapState("testShouldRollbackStateForStatefulServerInCaseOfFailure", "init") // set init state + + val nested = conf.getInstance(classOf[NestedTransactionalTypedActor]) + nested.init + nested.setMapState("testShouldRollbackStateForStatefulServerInCaseOfFailure", "init") // set init state + + val failer = conf.getInstance(classOf[TypedActorFailer]) + try { + stateful.failure("testShouldRollbackStateForStatefulServerInCaseOfFailure", "new state", nested, failer) + + fail("should have thrown an exception") + } catch { case e => {} } + stateful.getMapState("testShouldRollbackStateForStatefulServerInCaseOfFailure") should equal("init") + + nested.getMapState("testShouldRollbackStateForStatefulServerInCaseOfFailure") should equal("init") + } + + it("vector should rollback state for stateful server in case of failure") { + val stateful = conf.getInstance(classOf[TransactionalTypedActor]) + stateful.init + stateful.setVectorState("init") // set init state + + val nested = conf.getInstance(classOf[NestedTransactionalTypedActor]) + nested.init + nested.setVectorState("init") // set init state + + val failer = conf.getInstance(classOf[TypedActorFailer]) + try { + stateful.failure("testShouldRollbackStateForStatefulServerInCaseOfFailure", "new state", nested, failer) + + fail("should have thrown an exception") + } catch { case e => {} } + stateful.getVectorState should equal("init") + + nested.getVectorState should equal("init") + } + + it("ref should rollback state for stateful server in case of failure") { + val stateful = conf.getInstance(classOf[TransactionalTypedActor]) + stateful.init + val nested = conf.getInstance(classOf[NestedTransactionalTypedActor]) + nested.init + stateful.setRefState("init") // set init state + + nested.setRefState("init") // set init state + + val failer = conf.getInstance(classOf[TypedActorFailer]) + try { + stateful.failure("testShouldRollbackStateForStatefulServerInCaseOfFailure", "new state", nested, failer) + + fail("should have thrown an exception") + } catch { case e => {} } + stateful.getRefState should equal("init") + + nested.getRefState should equal("init") + } + */ + } +} diff --git a/akka-core/src/test/scala/actor/typed-actor/RestartTransactionalTypedActorSpec.scala b/akka-core/src/test/scala/actor/typed-actor/RestartTransactionalTypedActorSpec.scala new file mode 100644 index 0000000000..56b1e6ec5b --- /dev/null +++ b/akka-core/src/test/scala/actor/typed-actor/RestartTransactionalTypedActorSpec.scala @@ -0,0 +1,92 @@ +/** + * Copyright (C) 2009-2010 Scalable Solutions AB + */ + +package se.scalablesolutions.akka.actor + +import org.scalatest.Spec +import org.scalatest.Assertions +import org.scalatest.matchers.ShouldMatchers +import org.scalatest.BeforeAndAfterAll +import org.scalatest.junit.JUnitRunner +import org.junit.runner.RunWith + +import se.scalablesolutions.akka.config.Config +import se.scalablesolutions.akka.config._ +import se.scalablesolutions.akka.config.TypedActorConfigurator +import se.scalablesolutions.akka.config.JavaConfig._ +import se.scalablesolutions.akka.actor._ + +@RunWith(classOf[JUnitRunner]) +class RestartTransactionalTypedActorSpec extends + Spec with + ShouldMatchers with + BeforeAndAfterAll { + + private val conf = new TypedActorConfigurator + private var messageLog = "" + + def before { + Config.config + conf.configure( + new RestartStrategy(new AllForOne, 3, 5000, List(classOf[Exception]).toArray), + List( + new Component( + classOf[TransactionalTypedActor], + new LifeCycle(new Temporary), + 10000), + new Component( + classOf[TypedActorFailer], + new LifeCycle(new Temporary), + 10000) + ).toArray).supervise + } + + def after { + conf.stop + ActorRegistry.shutdownAll + } + + describe("Restart supervised transactional Typed Actor ") { +/* + it("map should rollback state for stateful server in case of failure") { + before + val stateful = conf.getInstance(classOf[TransactionalTypedActor]) + stateful.init + stateful.setMapState("testShouldRollbackStateForStatefulServerInCaseOfFailure", "init") + val failer = conf.getInstance(classOf[TypedActorFailer]) + try { + stateful.failure("testShouldRollbackStateForStatefulServerInCaseOfFailure", "new state", failer) + fail("should have thrown an exception") + } catch { case e => {} } + stateful.getMapState("testShouldRollbackStateForStatefulServerInCaseOfFailure") should equal("init") + after + } + + it("vector should rollback state for stateful server in case of failure") { + before + val stateful = conf.getInstance(classOf[TransactionalTypedActor]) + stateful.init + stateful.setVectorState("init") // set init state + val failer = conf.getInstance(classOf[TypedActorFailer]) + try { + stateful.failure("testShouldRollbackStateForStatefulServerInCaseOfFailure", "new state", failer) + fail("should have thrown an exception") + } catch { case e => {} } + stateful.getVectorState should equal("init") + after + } + + it("ref should rollback state for stateful server in case of failure") { + val stateful = conf.getInstance(classOf[TransactionalTypedActor]) + stateful.init + stateful.setRefState("init") // set init state + val failer = conf.getInstance(classOf[TypedActorFailer]) + try { + stateful.failure("testShouldRollbackStateForStatefulServerInCaseOfFailure", "new state", failer) + fail("should have thrown an exception") + } catch { case e => {} } + stateful.getRefState should equal("init") + } +*/ } +} diff --git a/akka-core/src/test/scala/TransactionalActiveObjectSpec.scala b/akka-core/src/test/scala/actor/typed-actor/TransactionalTypedActorSpec.scala similarity index 59% rename from akka-core/src/test/scala/TransactionalActiveObjectSpec.scala rename to akka-core/src/test/scala/actor/typed-actor/TransactionalTypedActorSpec.scala index b42c137e33..b55f52c875 100644 --- a/akka-core/src/test/scala/TransactionalActiveObjectSpec.scala +++ b/akka-core/src/test/scala/actor/typed-actor/TransactionalTypedActorSpec.scala @@ -11,61 +11,32 @@ import org.scalatest.BeforeAndAfterAll import org.scalatest.junit.JUnitRunner import org.junit.runner.RunWith -import se.scalablesolutions.akka.config.Config -import se.scalablesolutions.akka.config._ -import se.scalablesolutions.akka.config.ActiveObjectConfigurator -import se.scalablesolutions.akka.config.JavaConfig._ import se.scalablesolutions.akka.actor._ -/* -@RunWith(classOf[JUnitRunner]) -class TransactionalActiveObjectSpec extends -<<<<<<< HEAD:akka-core/src/test/scala/TransactionalActiveObjectSpec.scala - Spec with - ShouldMatchers with - BeforeAndAfterAll { -======= - Spec with - ShouldMatchers with - BeforeAndAfterAll { ->>>>>>> 38e8bea3fe6a7e9fcc9c5f353124144739bdc234:akka-core/src/test/scala/TransactionalActiveObjectSpec.scala - private val conf = new ActiveObjectConfigurator +@RunWith(classOf[JUnitRunner]) +class TransactionalTypedActorSpec extends + Spec with + ShouldMatchers with + BeforeAndAfterAll { + private var messageLog = "" - override def beforeAll { - Config.config - conf.configure( - new RestartStrategy(new AllForOne, 3, 5000, List(classOf[Exception]).toArray), - List( - new Component(classOf[TransactionalActiveObject], - new LifeCycle(new Permanent), - //new RestartCallbacks("preRestart", "postRestart")), - 10000), - new Component(classOf[ActiveObjectFailer], - new LifeCycle(new Permanent), - 10000)).toArray - ).supervise - } - override def afterAll { - conf.stop +// ActorRegistry.shutdownAll } - describe("Transactional in-memory Active Object ") { - + describe("Declaratively supervised transactional in-memory Typed Actor ") { it("map should not rollback state for stateful server in case of success") { - val stateful = conf.getInstance(classOf[TransactionalActiveObject]) - stateful.init + val stateful = TypedActor.newInstance(classOf[TransactionalTypedActor], classOf[TransactionalTypedActorImpl]) stateful.setMapState("testShouldNotRollbackStateForStatefulServerInCaseOfSuccess", "init") stateful.success("testShouldNotRollbackStateForStatefulServerInCaseOfSuccess", "new state") stateful.getMapState("testShouldNotRollbackStateForStatefulServerInCaseOfSuccess") should equal("new state") } it("map should rollback state for stateful server in case of failure") { - val stateful = conf.getInstance(classOf[TransactionalActiveObject]) - stateful.init + val stateful = TypedActor.newInstance(classOf[TransactionalTypedActor], classOf[TransactionalTypedActorImpl]) stateful.setMapState("testShouldRollbackStateForStatefulServerInCaseOfFailure", "init") - val failer = conf.getInstance(classOf[ActiveObjectFailer]) + val failer = TypedActor.newInstance(classOf[TypedActorFailer], classOf[TypedActorFailerImpl]) try { stateful.failure("testShouldRollbackStateForStatefulServerInCaseOfFailure", "new state", failer) fail("should have thrown an exception") @@ -73,11 +44,17 @@ class TransactionalActiveObjectSpec extends stateful.getMapState("testShouldRollbackStateForStatefulServerInCaseOfFailure") should equal("init") } - it("vector should rollback state for stateful server in case of failure") { - val stateful = conf.getInstance(classOf[TransactionalActiveObject]) - stateful.init + it("vector should not rollback state for stateful server in case of success") { + val stateful = TypedActor.newInstance(classOf[TransactionalTypedActor], classOf[TransactionalTypedActorImpl]) stateful.setVectorState("init") // set init state - val failer = conf.getInstance(classOf[ActiveObjectFailer]) + stateful.success("testShouldNotRollbackStateForStatefulServerInCaseOfSuccess", "new state") + stateful.getVectorState should equal("new state") + } + + it("vector should rollback state for stateful server in case of failure") { + val stateful = TypedActor.newInstance(classOf[TransactionalTypedActor], classOf[TransactionalTypedActorImpl]) + stateful.setVectorState("init") // set init state + val failer = TypedActor.newInstance(classOf[TypedActorFailer], classOf[TypedActorFailerImpl]) try { stateful.failure("testShouldRollbackStateForStatefulServerInCaseOfFailure", "new state", failer) fail("should have thrown an exception") @@ -85,33 +62,22 @@ class TransactionalActiveObjectSpec extends stateful.getVectorState should equal("init") } - it("vector should not rollback state for stateful server in case of success") { - val stateful = conf.getInstance(classOf[TransactionalActiveObject]) - stateful.init - stateful.setVectorState("init") // set init state + it("ref should not rollback state for stateful server in case of success") { + val stateful = TypedActor.newInstance(classOf[TransactionalTypedActor], classOf[TransactionalTypedActorImpl]) + stateful.setRefState("init") // set init state stateful.success("testShouldNotRollbackStateForStatefulServerInCaseOfSuccess", "new state") - stateful.getVectorState should equal("new state") + stateful.getRefState should equal("new state") } it("ref should rollback state for stateful server in case of failure") { - val stateful = conf.getInstance(classOf[TransactionalActiveObject]) - stateful.init + val stateful = TypedActor.newInstance(classOf[TransactionalTypedActor], classOf[TransactionalTypedActorImpl]) stateful.setRefState("init") // set init state - val failer = conf.getInstance(classOf[ActiveObjectFailer]) + val failer = TypedActor.newInstance(classOf[TypedActorFailer], classOf[TypedActorFailerImpl]) try { stateful.failure("testShouldRollbackStateForStatefulServerInCaseOfFailure", "new state", failer) fail("should have thrown an exception") } catch { case e => {} } stateful.getRefState should equal("init") } - - it("ref should not rollback state for stateful server in case of success") { - val stateful = conf.getInstance(classOf[TransactionalActiveObject]) - stateful.init - stateful.setRefState("init") // set init state - stateful.success("testShouldNotRollbackStateForStatefulServerInCaseOfSuccess", "new state") - stateful.getRefState should equal("new state") - } } } -*/ diff --git a/akka-core/src/test/scala/actor/typed-actor/TypedActorContextSpec.scala b/akka-core/src/test/scala/actor/typed-actor/TypedActorContextSpec.scala new file mode 100644 index 0000000000..11719650d6 --- /dev/null +++ b/akka-core/src/test/scala/actor/typed-actor/TypedActorContextSpec.scala @@ -0,0 +1,45 @@ +/** + * Copyright (C) 2009-2010 Scalable Solutions AB + */ + +package se.scalablesolutions.akka.actor + +import org.scalatest.Spec +import org.scalatest.Assertions +import org.scalatest.matchers.ShouldMatchers +import org.scalatest.BeforeAndAfterAll +import org.scalatest.junit.JUnitRunner +import org.junit.runner.RunWith + +import se.scalablesolutions.akka.dispatch.DefaultCompletableFuture; + +@RunWith(classOf[JUnitRunner]) +class TypedActorContextSpec extends + Spec with + ShouldMatchers with + BeforeAndAfterAll { + + describe("TypedActorContext") { + it("context.sender should return the sender TypedActor reference") { + val pojo = TypedActor.newInstance(classOf[SimpleJavaPojo], classOf[SimpleJavaPojoImpl]) + val pojoCaller = TypedActor.newInstance(classOf[SimpleJavaPojoCaller], classOf[SimpleJavaPojoCallerImpl]) + pojoCaller.setPojo(pojo) + try { + pojoCaller.getSenderFromSimpleJavaPojo should equal (pojoCaller) + } catch { + case e => fail("no sender available") + } + } + + it("context.senderFuture should return the senderFuture TypedActor reference") { + val pojo = TypedActor.newInstance(classOf[SimpleJavaPojo], classOf[SimpleJavaPojoImpl]) + val pojoCaller = TypedActor.newInstance(classOf[SimpleJavaPojoCaller], classOf[SimpleJavaPojoCallerImpl]) + pojoCaller.setPojo(pojo) + try { + pojoCaller.getSenderFutureFromSimpleJavaPojo.getClass.getName should equal (classOf[DefaultCompletableFuture[_]].getName) + } catch { + case e => fail("no sender future available", e) + } + } + } +} diff --git a/akka-core/src/test/scala/ActiveObjectGuiceConfiguratorSpec.scala b/akka-core/src/test/scala/actor/typed-actor/TypedActorGuiceConfiguratorSpec.scala similarity index 89% rename from akka-core/src/test/scala/ActiveObjectGuiceConfiguratorSpec.scala rename to akka-core/src/test/scala/actor/typed-actor/TypedActorGuiceConfiguratorSpec.scala index 3cb871d0d8..d076ec52cf 100644 --- a/akka-core/src/test/scala/ActiveObjectGuiceConfiguratorSpec.scala +++ b/akka-core/src/test/scala/actor/typed-actor/TypedActorGuiceConfiguratorSpec.scala @@ -15,18 +15,18 @@ import org.scalatest.junit.JUnitRunner import org.junit.runner.RunWith import se.scalablesolutions.akka.config.Config -import se.scalablesolutions.akka.config.ActiveObjectConfigurator +import se.scalablesolutions.akka.config.TypedActorConfigurator import se.scalablesolutions.akka.config.JavaConfig._ import se.scalablesolutions.akka.dispatch._ import se.scalablesolutions.akka.dispatch.FutureTimeoutException @RunWith(classOf[JUnitRunner]) -class ActiveObjectGuiceConfiguratorSpec extends +class TypedActorGuiceConfiguratorSpec extends Spec with ShouldMatchers with BeforeAndAfterAll { - private val conf = new ActiveObjectConfigurator + private val conf = new TypedActorConfigurator private var messageLog = "" override def beforeAll { @@ -40,6 +40,7 @@ class ActiveObjectGuiceConfiguratorSpec extends List( new Component( classOf[Foo], + classOf[FooImpl], new LifeCycle(new Permanent), 1000, dispatcher), @@ -55,9 +56,9 @@ class ActiveObjectGuiceConfiguratorSpec extends override def afterAll = conf.stop - describe("ActiveObjectGuiceConfigurator") { + describe("TypedActorGuiceConfigurator") { /* - it("should inject active object using guice") { + it("should inject typed actor using guice") { messageLog = "" val foo = conf.getInstance(classOf[Foo]) val bar = conf.getInstance(classOf[Bar]) @@ -81,7 +82,7 @@ class ActiveObjectGuiceConfiguratorSpec extends } } - it("should be able to invoke active object") { + it("should be able to invoke typed actor") { messageLog = "" val foo = conf.getInstance(classOf[Foo]) messageLog += foo.foo("foo ") @@ -91,7 +92,7 @@ class ActiveObjectGuiceConfiguratorSpec extends messageLog should equal("foo return_foo before_bar ") } - it("should be able to invoke active object's invocation") { + it("should be able to invoke typed actor's invocation") { messageLog = "" val foo = conf.getInstance(classOf[Foo]) val bar = conf.getInstance(classOf[Bar]) diff --git a/akka-core/src/test/scala/actor/typed-actor/TypedActorLifecycleSpec.scala b/akka-core/src/test/scala/actor/typed-actor/TypedActorLifecycleSpec.scala new file mode 100644 index 0000000000..2db8820f9c --- /dev/null +++ b/akka-core/src/test/scala/actor/typed-actor/TypedActorLifecycleSpec.scala @@ -0,0 +1,169 @@ +package se.scalablesolutions.akka.actor + +import org.junit.runner.RunWith +import org.scalatest.{BeforeAndAfterAll, Spec} +import org.scalatest.junit.JUnitRunner +import org.scalatest.matchers.ShouldMatchers + +import se.scalablesolutions.akka.actor.TypedActor._ + +import se.scalablesolutions.akka.config.{OneForOneStrategy, TypedActorConfigurator} +import se.scalablesolutions.akka.config.JavaConfig._ + +import java.util.concurrent.CountDownLatch + +/** + * @author Martin Krasser + */ +@RunWith(classOf[JUnitRunner]) +class TypedActorLifecycleSpec extends Spec with ShouldMatchers with BeforeAndAfterAll { + var conf1: TypedActorConfigurator = _ + var conf2: TypedActorConfigurator = _ + + override protected def beforeAll() = { + val strategy = new RestartStrategy(new AllForOne(), 3, 1000, Array(classOf[Exception])) + val comp3 = new Component(classOf[SamplePojo], classOf[SamplePojoImpl], new LifeCycle(new Permanent()), 1000) + val comp4 = new Component(classOf[SamplePojo], classOf[SamplePojoImpl], new LifeCycle(new Temporary()), 1000) + conf1 = new TypedActorConfigurator().configure(strategy, Array(comp3)).supervise + conf2 = new TypedActorConfigurator().configure(strategy, Array(comp4)).supervise + } + + override protected def afterAll() = { + conf1.stop + conf2.stop + } + + describe("TypedActor lifecycle management") { + it("should restart supervised, non-annotated typed actor on failure") { + SamplePojoImpl.reset + val obj = conf1.getInstance[SamplePojo](classOf[SamplePojo]) + val cdl = new CountDownLatch(2) + SamplePojoImpl.latch = cdl + assert(AspectInitRegistry.initFor(obj) ne null) + try { + obj.fail + fail("expected exception not thrown") + } catch { + case e: RuntimeException => { + cdl.await + assert(SamplePojoImpl._pre) + assert(SamplePojoImpl._post) + assert(!SamplePojoImpl._down) + assert(AspectInitRegistry.initFor(obj) ne null) + } + } + } + + it("should shutdown supervised, non-annotated typed actor on failure") { + SamplePojoImpl.reset + val obj = conf2.getInstance[SamplePojo](classOf[SamplePojo]) + val cdl = new CountDownLatch(1) + SamplePojoImpl.latch = cdl + assert(AspectInitRegistry.initFor(obj) ne null) + try { + obj.fail + fail("expected exception not thrown") + } catch { + case e: RuntimeException => { + cdl.await + assert(!SamplePojoImpl._pre) + assert(!SamplePojoImpl._post) + assert(SamplePojoImpl._down) + assert(AspectInitRegistry.initFor(obj) eq null) + } + } + } + + it("should shutdown non-supervised, non-initialized typed actor on TypedActor.stop") { + SamplePojoImpl.reset + val obj = TypedActor.newInstance(classOf[SamplePojo], classOf[SamplePojoImpl]) + TypedActor.stop(obj) + assert(!SamplePojoImpl._pre) + assert(!SamplePojoImpl._post) + assert(SamplePojoImpl._down) + } + + it("both preRestart and postRestart methods should be invoked when an actor is restarted") { + SamplePojoImpl.reset + val pojo = TypedActor.newInstance(classOf[SimpleJavaPojo], classOf[SimpleJavaPojoImpl]) + val supervisor = TypedActor.newInstance(classOf[SimpleJavaPojo], classOf[SimpleJavaPojoImpl]) + link(supervisor, pojo, new OneForOneStrategy(3, 2000), Array(classOf[Throwable])) + pojo.throwException + Thread.sleep(500) + SimpleJavaPojoImpl._pre should be(true) + SimpleJavaPojoImpl._post should be(true) + } + + /* + it("should shutdown non-supervised, annotated typed actor on TypedActor.stop") { + val obj = TypedActor.newInstance(classOf[SamplePojoAnnotated]) + assert(AspectInitRegistry.initFor(obj) ne null) + assert("hello akka" === obj.greet("akka")) + TypedActor.stop(obj) + assert(AspectInitRegistry.initFor(obj) eq null) + assert(!obj.pre) + assert(!obj.post) + assert(obj.down) + try { + obj.greet("akka") + fail("access to stopped typed actor") + } catch { + case e: Exception => {} + } + } + + it("should shutdown non-supervised, annotated typed actor on ActorRegistry.shutdownAll") { + val obj = TypedActor.newInstance(classOf[SamplePojoAnnotated]) + assert(AspectInitRegistry.initFor(obj) ne null) + assert("hello akka" === obj.greet("akka")) + ActorRegistry.shutdownAll + assert(AspectInitRegistry.initFor(obj) eq null) + assert(!obj.pre) + assert(!obj.post) + assert(obj.down) + try { + obj.greet("akka") + fail("access to stopped typed actor") + } catch { + case e: Exception => { } + } + } + + it("should restart supervised, annotated typed actor on failure") { + val obj = conf1.getInstance[SamplePojoAnnotated](classOf[SamplePojoAnnotated]) + val cdl = obj.newCountdownLatch(2) + assert(AspectInitRegistry.initFor(obj) ne null) + try { + obj.fail + fail("expected exception not thrown") + } catch { + case e: RuntimeException => { + cdl.await + assert(obj.pre) + assert(obj.post) + assert(!obj.down) + assert(AspectInitRegistry.initFor(obj) ne null) + } + } + } + + it("should shutdown supervised, annotated typed actor on failure") { + val obj = conf2.getInstance[SamplePojoAnnotated](classOf[SamplePojoAnnotated]) + val cdl = obj.newCountdownLatch(1) + assert(AspectInitRegistry.initFor(obj) ne null) + try { + obj.fail + fail("expected exception not thrown") + } catch { + case e: RuntimeException => { + cdl.await + assert(!obj.pre) + assert(!obj.post) + assert(obj.down) + assert(AspectInitRegistry.initFor(obj) eq null) + } + } + } + */ + } +} diff --git a/akka-core/src/test/scala/ActorObjectUtilFunctionsSpec.scala b/akka-core/src/test/scala/actor/typed-actor/TypedActorUtilFunctionsSpec.scala similarity index 100% rename from akka-core/src/test/scala/ActorObjectUtilFunctionsSpec.scala rename to akka-core/src/test/scala/actor/typed-actor/TypedActorUtilFunctionsSpec.scala diff --git a/akka-core/src/test/scala/actor/untyped-actor/UntypedActorFireForgetRequestReplySpec.scala b/akka-core/src/test/scala/actor/untyped-actor/UntypedActorFireForgetRequestReplySpec.scala new file mode 100644 index 0000000000..89a05eca9c --- /dev/null +++ b/akka-core/src/test/scala/actor/untyped-actor/UntypedActorFireForgetRequestReplySpec.scala @@ -0,0 +1,86 @@ +package se.scalablesolutions.akka.actor + +import java.util.concurrent.{TimeUnit, CyclicBarrier, TimeoutException} + +import org.scalatest.WordSpec +import org.scalatest.matchers.MustMatchers + +import se.scalablesolutions.akka.dispatch.Dispatchers +import Actor._ + +class UntypedActorFireForgetRequestReplySpec extends WordSpec with MustMatchers { + + "An UntypedActor" should { + "reply to message sent with 'sendOneWay' using 'reply'" in { + UntypedActorTestState.finished = new CyclicBarrier(2); + UntypedActorTestState.log = "NIL"; + val replyActor = UntypedActor.actorOf(classOf[ReplyUntypedActor]).start + val senderActor = UntypedActor.actorOf(classOf[SenderUntypedActor]).start + senderActor.sendOneWay(replyActor) + senderActor.sendOneWay("ReplyToSendOneWayUsingReply") + try { UntypedActorTestState.finished.await(1L, TimeUnit.SECONDS) } + catch { case e: TimeoutException => fail("Never got the message") } + UntypedActorTestState.log must be ("Reply") + } + + "reply to message sent with 'sendOneWay' using 'sender' reference" in { + UntypedActorTestState.finished = new CyclicBarrier(2); + UntypedActorTestState.log = "NIL"; + val replyActor = UntypedActor.actorOf(classOf[ReplyUntypedActor]).start + val senderActor = UntypedActor.actorOf(classOf[SenderUntypedActor]).start + senderActor.sendOneWay(replyActor) + senderActor.sendOneWay("ReplyToSendOneWayUsingSender") + try { UntypedActorTestState.finished.await(1L, TimeUnit.SECONDS) } + catch { case e: TimeoutException => fail("Never got the message") } + UntypedActorTestState.log must be ("Reply") + } + + "reply to message sent with 'sendRequestReply' using 'reply'" in { + UntypedActorTestState.finished = new CyclicBarrier(2); + UntypedActorTestState.log = "NIL"; + val replyActor = UntypedActor.actorOf(classOf[ReplyUntypedActor]).start + val senderActor = UntypedActor.actorOf(classOf[SenderUntypedActor]).start + senderActor.sendOneWay(replyActor) + senderActor.sendOneWay("ReplyToSendRequestReplyUsingReply") + try { UntypedActorTestState.finished.await(1L, TimeUnit.SECONDS) } + catch { case e: TimeoutException => fail("Never got the message") } + UntypedActorTestState.log must be ("Reply") + } + + "reply to message sent with 'sendRequestReply' using 'sender future' reference" in { + UntypedActorTestState.finished = new CyclicBarrier(2); + UntypedActorTestState.log = "NIL"; + val replyActor = UntypedActor.actorOf(classOf[ReplyUntypedActor]).start + val senderActor = UntypedActor.actorOf(classOf[SenderUntypedActor]).start + senderActor.sendOneWay(replyActor) + senderActor.sendOneWay("ReplyToSendRequestReplyUsingFuture") + try { UntypedActorTestState.finished.await(1L, TimeUnit.SECONDS) } + catch { case e: TimeoutException => fail("Never got the message") } + UntypedActorTestState.log must be ("Reply") + } + + "reply to message sent with 'sendRequestReplyFuture' using 'reply'" in { + UntypedActorTestState.finished = new CyclicBarrier(2); + UntypedActorTestState.log = "NIL"; + val replyActor = UntypedActor.actorOf(classOf[ReplyUntypedActor]).start + val senderActor = UntypedActor.actorOf(classOf[SenderUntypedActor]).start + senderActor.sendOneWay(replyActor) + senderActor.sendOneWay("ReplyToSendRequestReplyFutureUsingReply") + try { UntypedActorTestState.finished.await(1L, TimeUnit.SECONDS) } + catch { case e: TimeoutException => fail("Never got the message") } + UntypedActorTestState.log must be ("Reply") + } + + "reply to message sent with 'sendRequestReplyFuture' using 'sender future' reference" in { + UntypedActorTestState.finished = new CyclicBarrier(2); + UntypedActorTestState.log = "NIL"; + val replyActor = UntypedActor.actorOf(classOf[ReplyUntypedActor]).start + val senderActor = UntypedActor.actorOf(classOf[SenderUntypedActor]).start + senderActor.sendOneWay(replyActor) + senderActor.sendOneWay("ReplyToSendRequestReplyFutureUsingFuture") + try { UntypedActorTestState.finished.await(1L, TimeUnit.SECONDS) } + catch { case e: TimeoutException => fail("Never got the message") } + UntypedActorTestState.log must be ("Reply") + } + } +} diff --git a/akka-core/src/test/scala/ExecutorBasedEventDrivenDispatcherActorSpec.scala b/akka-core/src/test/scala/dispatch/ExecutorBasedEventDrivenDispatcherActorSpec.scala similarity index 95% rename from akka-core/src/test/scala/ExecutorBasedEventDrivenDispatcherActorSpec.scala rename to akka-core/src/test/scala/dispatch/ExecutorBasedEventDrivenDispatcherActorSpec.scala index b25a02db5e..9cdf43682e 100644 --- a/akka-core/src/test/scala/ExecutorBasedEventDrivenDispatcherActorSpec.scala +++ b/akka-core/src/test/scala/dispatch/ExecutorBasedEventDrivenDispatcherActorSpec.scala @@ -1,9 +1,10 @@ -package se.scalablesolutions.akka.actor +package se.scalablesolutions.akka.actor.dispatch import java.util.concurrent.{CountDownLatch, TimeUnit} import org.scalatest.junit.JUnitSuite import org.junit.Test import se.scalablesolutions.akka.dispatch.Dispatchers +import se.scalablesolutions.akka.actor.Actor import Actor._ object ExecutorBasedEventDrivenDispatcherActorSpec { diff --git a/akka-core/src/test/scala/ExecutorBasedEventDrivenDispatcherActorsSpec.scala b/akka-core/src/test/scala/dispatch/ExecutorBasedEventDrivenDispatcherActorsSpec.scala similarity index 93% rename from akka-core/src/test/scala/ExecutorBasedEventDrivenDispatcherActorsSpec.scala rename to akka-core/src/test/scala/dispatch/ExecutorBasedEventDrivenDispatcherActorsSpec.scala index b2c9cf4422..fc8f1aa37f 100644 --- a/akka-core/src/test/scala/ExecutorBasedEventDrivenDispatcherActorsSpec.scala +++ b/akka-core/src/test/scala/dispatch/ExecutorBasedEventDrivenDispatcherActorsSpec.scala @@ -1,10 +1,10 @@ -package se.scalablesolutions.akka.actor +package se.scalablesolutions.akka.actor.dispatch import org.scalatest.junit.JUnitSuite import org.junit.Test -import se.scalablesolutions.akka.dispatch.Dispatchers import org.scalatest.matchers.MustMatchers import java.util.concurrent.CountDownLatch +import se.scalablesolutions.akka.actor.Actor import Actor._ /** diff --git a/akka-core/src/test/scala/ExecutorBasedEventDrivenWorkStealingDispatcherSpec.scala b/akka-core/src/test/scala/dispatch/ExecutorBasedEventDrivenWorkStealingDispatcherSpec.scala similarity index 95% rename from akka-core/src/test/scala/ExecutorBasedEventDrivenWorkStealingDispatcherSpec.scala rename to akka-core/src/test/scala/dispatch/ExecutorBasedEventDrivenWorkStealingDispatcherSpec.scala index 8ad3e3f212..cde57a0544 100644 --- a/akka-core/src/test/scala/ExecutorBasedEventDrivenWorkStealingDispatcherSpec.scala +++ b/akka-core/src/test/scala/dispatch/ExecutorBasedEventDrivenWorkStealingDispatcherSpec.scala @@ -1,4 +1,4 @@ -package se.scalablesolutions.akka.actor +package se.scalablesolutions.akka.actor.dispatch import org.scalatest.matchers.MustMatchers import org.scalatest.junit.JUnitSuite @@ -6,9 +6,10 @@ import org.scalatest.junit.JUnitSuite import org.junit.Test import se.scalablesolutions.akka.dispatch.Dispatchers -import Actor._ import java.util.concurrent.{TimeUnit, CountDownLatch} +import se.scalablesolutions.akka.actor.{IllegalActorStateException, Actor} +import Actor._ object ExecutorBasedEventDrivenWorkStealingDispatcherSpec { val delayableActorDispatcher = Dispatchers.newExecutorBasedEventDrivenWorkStealingDispatcher("pooled-dispatcher") diff --git a/akka-core/src/test/scala/FutureSpec.scala b/akka-core/src/test/scala/dispatch/FutureSpec.scala similarity index 100% rename from akka-core/src/test/scala/FutureSpec.scala rename to akka-core/src/test/scala/dispatch/FutureSpec.scala diff --git a/akka-core/src/test/scala/dispatch/HawtDispatcherActorSpec.scala b/akka-core/src/test/scala/dispatch/HawtDispatcherActorSpec.scala new file mode 100644 index 0000000000..dcc8f7eafb --- /dev/null +++ b/akka-core/src/test/scala/dispatch/HawtDispatcherActorSpec.scala @@ -0,0 +1,69 @@ +package se.scalablesolutions.akka.actor.dispatch + +import java.util.concurrent.{CountDownLatch, TimeUnit} +import org.scalatest.junit.JUnitSuite +import org.junit.Test +import se.scalablesolutions.akka.actor.Actor +import Actor._ +import se.scalablesolutions.akka.dispatch.{HawtDispatcher, Dispatchers} + +object HawtDispatcherActorSpec { + class TestActor extends Actor { + self.dispatcher = new HawtDispatcher() + def receive = { + case "Hello" => + self.reply("World") + case "Failure" => + throw new RuntimeException("Expected exception; to test fault-tolerance") + } + } + + object OneWayTestActor { + val oneWay = new CountDownLatch(1) + } + class OneWayTestActor extends Actor { + self.dispatcher = new HawtDispatcher() + def receive = { + case "OneWay" => OneWayTestActor.oneWay.countDown + } + } +} + +class HawtDispatcherActorSpec extends JUnitSuite { + import HawtDispatcherActorSpec._ + + private val unit = TimeUnit.MILLISECONDS + + @Test def shouldSendOneWay = { + val actor = actorOf[OneWayTestActor].start + val result = actor ! "OneWay" + assert(OneWayTestActor.oneWay.await(1, TimeUnit.SECONDS)) + actor.stop + } + + @Test def shouldSendReplySync = { + val actor = actorOf[TestActor].start + val result = (actor !! ("Hello", 10000)).as[String] + assert("World" === result.get) + actor.stop + } + + @Test def shouldSendReplyAsync = { + val actor = actorOf[TestActor].start + val result = actor !! "Hello" + assert("World" === result.get.asInstanceOf[String]) + actor.stop + } + + @Test def shouldSendReceiveException = { + val actor = actorOf[TestActor].start + try { + actor !! "Failure" + fail("Should have thrown an exception") + } catch { + case e => + assert("Expected exception; to test fault-tolerance" === e.getMessage()) + } + actor.stop + } +} diff --git a/akka-core/src/test/scala/dispatch/HawtDispatcherEchoServer.scala b/akka-core/src/test/scala/dispatch/HawtDispatcherEchoServer.scala new file mode 100644 index 0000000000..208433bd4b --- /dev/null +++ b/akka-core/src/test/scala/dispatch/HawtDispatcherEchoServer.scala @@ -0,0 +1,221 @@ +/** + * Copyright (C) 2010, Progress Software Corporation and/or its + * subsidiaries or affiliates. All rights reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package se.scalablesolutions.akka.actor.dispatch + +import collection.mutable.ListBuffer +import java.util.concurrent.TimeUnit +import java.net.InetSocketAddress +import java.io.IOException +import java.nio.ByteBuffer +import java.nio.channels.{SocketChannel, SelectionKey, ServerSocketChannel} + +import se.scalablesolutions.akka.actor._ +import se.scalablesolutions.akka.actor.Actor._ +import se.scalablesolutions.akka.dispatch.HawtDispatcher +import org.fusesource.hawtdispatch.DispatchSource +import org.fusesource.hawtdispatch.ScalaDispatch._ + +/** + * This is an example of how to crate an Akka actor based TCP echo server using + * the HawtDispatch dispatcher and NIO event sources. + * + */ +object HawtDispatcherEchoServer { + + private val hawt = new HawtDispatcher + var port=4444; + var useReactorPattern=true + + def main(args:Array[String]):Unit = run + + def run() = { + val server = actorOf(new Server(port)) + server.start + Scheduler.schedule(server, DisplayStats, 1, 5, TimeUnit.SECONDS) + + println("Press enter to shutdown."); + System.in.read + server ! Shutdown + } + + case object Shutdown + case object DisplayStats + case class SessionClosed(session:ActorRef) + + class Server(val port: Int) extends Actor { + + self.dispatcher = hawt + + var channel:ServerSocketChannel = _ + var accept_source:DispatchSource = _ + var sessions = ListBuffer[ActorRef]() + + override def init = { + channel = ServerSocketChannel.open(); + channel.socket().bind(new InetSocketAddress(port)); + channel.configureBlocking(false); + + // Setup the accept source, it will callback to the handler methods + // via the actor's mailbox so you don't need to worry about + // synchronizing with the local variables + accept_source = createSource(channel, SelectionKey.OP_ACCEPT, HawtDispatcher.queue(self)); + accept_source.setEventHandler(^{ accept }); + accept_source.setDisposer(^{ + channel.close(); + println("Closed port: "+port); + }); + + accept_source.resume + + println("Listening on port: "+port); + } + + + private def accept() = { + var socket = channel.accept(); + while( socket!=null ) { + try { + socket.configureBlocking(false); + val session = actorOf(new Session(self, socket)) + session.start() + sessions += session + } catch { + case e: Exception => + socket.close + } + socket = channel.accept(); + } + } + + def receive = { + case SessionClosed(session) => + sessions = sessions.filterNot( _ == session ) + session.stop + case DisplayStats => + sessions.foreach { session=> + session ! DisplayStats + } + case Shutdown => + sessions.foreach { session=> + session.stop + } + sessions.clear + accept_source.release + self.stop + } + } + + class Session(val server:ActorRef, val channel: SocketChannel) extends Actor { + + self.dispatcher = hawt + + val buffer = ByteBuffer.allocate(1024); + val remote_address = channel.socket.getRemoteSocketAddress.toString + + var read_source:DispatchSource = _ + var write_source:DispatchSource = _ + + var readCounter = 0L + var writeCounter = 0L + var closed = false + + override def init = { + + if(useReactorPattern) { + // Then we will be using the reactor pattern for handling IO: + // Pin this actor to a single thread. The read/write event sources will poll + // a Selector on the pinned thread. Since the IO events are generated on the same + // thread as where the Actor is pinned to, it can avoid a substantial amount + // thread synchronization. Plus your GC will perform better since all the IO + // processing is done on a single thread. + HawtDispatcher.pin(self) + } else { + // Then we will be using sing the proactor pattern for handling IO: + // Then the actor will not be pinned to a specific thread. The read/write + // event sources will poll a Selector and then asynchronously dispatch the + // event's to the actor via the thread pool. + } + + // Setup the sources, they will callback to the handler methods + // via the actor's mailbox so you don't need to worry about + // synchronizing with the local variables + read_source = createSource(channel, SelectionKey.OP_READ, HawtDispatcher.queue(self)); + read_source.setEventHandler(^{ read }) + read_source.setCancelHandler(^{ close }) + + write_source = createSource(channel, SelectionKey.OP_READ, HawtDispatcher.queue(self)); + write_source.setEventHandler(^{ write }) + write_source.setCancelHandler(^{ close }) + + read_source.resume + println("Accepted connection from: "+remote_address); + } + + override def shutdown = { + closed = true + read_source.release + write_source.release + channel.close + } + + private def catchio(func: =>Unit):Unit = { + try { + func + } catch { + case e:IOException => close + } + } + + def read():Unit = catchio { + channel.read(buffer) match { + case -1 => + close // peer disconnected. + case 0 => + case count:Int => + readCounter += count + buffer.flip; + read_source.suspend + write_source.resume + write() + } + } + + def write() = catchio { + writeCounter += channel.write(buffer) + if (buffer.remaining == 0) { + buffer.clear + write_source.suspend + read_source.resume + } + } + + + def close() = { + if( !closed ) { + closed = true + server ! SessionClosed(self) + } + } + + def receive = { + case DisplayStats => + println("connection to %s reads: %,d bytes, writes: %,d".format(remote_address, readCounter, writeCounter)) + } + + } + +} diff --git a/akka-core/src/test/scala/ReactorBasedSingleThreadEventDrivenDispatcherActorSpec.scala b/akka-core/src/test/scala/dispatch/ReactorBasedSingleThreadEventDrivenDispatcherActorSpec.scala similarity index 95% rename from akka-core/src/test/scala/ReactorBasedSingleThreadEventDrivenDispatcherActorSpec.scala rename to akka-core/src/test/scala/dispatch/ReactorBasedSingleThreadEventDrivenDispatcherActorSpec.scala index a84cdfc8e5..de9b912bf5 100644 --- a/akka-core/src/test/scala/ReactorBasedSingleThreadEventDrivenDispatcherActorSpec.scala +++ b/akka-core/src/test/scala/dispatch/ReactorBasedSingleThreadEventDrivenDispatcherActorSpec.scala @@ -1,11 +1,12 @@ -package se.scalablesolutions.akka.actor +package se.scalablesolutions.akka.actor.dispatch import java.util.concurrent.{CountDownLatch, TimeUnit} import org.scalatest.junit.JUnitSuite import org.junit.Test -import Actor._ import se.scalablesolutions.akka.dispatch.Dispatchers +import se.scalablesolutions.akka.actor.Actor +import Actor._ object ReactorBasedSingleThreadEventDrivenDispatcherActorSpec { class TestActor extends Actor { diff --git a/akka-core/src/test/scala/ReactorBasedThreadPoolEventDrivenDispatcherActorSpec.scala b/akka-core/src/test/scala/dispatch/ReactorBasedThreadPoolEventDrivenDispatcherActorSpec.scala similarity index 95% rename from akka-core/src/test/scala/ReactorBasedThreadPoolEventDrivenDispatcherActorSpec.scala rename to akka-core/src/test/scala/dispatch/ReactorBasedThreadPoolEventDrivenDispatcherActorSpec.scala index f917b462b5..4001df8f56 100644 --- a/akka-core/src/test/scala/ReactorBasedThreadPoolEventDrivenDispatcherActorSpec.scala +++ b/akka-core/src/test/scala/dispatch/ReactorBasedThreadPoolEventDrivenDispatcherActorSpec.scala @@ -1,10 +1,11 @@ -package se.scalablesolutions.akka.actor +package se.scalablesolutions.akka.actor.dispatch import java.util.concurrent.{CountDownLatch, TimeUnit} import org.scalatest.junit.JUnitSuite import org.junit.Test import se.scalablesolutions.akka.dispatch.Dispatchers +import se.scalablesolutions.akka.actor.Actor import Actor._ object ReactorBasedThreadPoolEventDrivenDispatcherActorSpec { diff --git a/akka-core/src/test/scala/ThreadBasedActorSpec.scala b/akka-core/src/test/scala/dispatch/ThreadBasedActorSpec.scala similarity index 94% rename from akka-core/src/test/scala/ThreadBasedActorSpec.scala rename to akka-core/src/test/scala/dispatch/ThreadBasedActorSpec.scala index fc726ecf49..d69ee984d8 100644 --- a/akka-core/src/test/scala/ThreadBasedActorSpec.scala +++ b/akka-core/src/test/scala/dispatch/ThreadBasedActorSpec.scala @@ -1,10 +1,11 @@ -package se.scalablesolutions.akka.actor +package se.scalablesolutions.akka.actor.dispatch import java.util.concurrent.{CountDownLatch, TimeUnit} import org.scalatest.junit.JUnitSuite import org.junit.Test import se.scalablesolutions.akka.dispatch.Dispatchers +import se.scalablesolutions.akka.actor.Actor import Actor._ object ThreadBasedActorSpec { diff --git a/akka-core/src/test/scala/ThreadBasedDispatcherSpec.scala b/akka-core/src/test/scala/dispatch/ThreadBasedDispatcherSpec.scala similarity index 100% rename from akka-core/src/test/scala/ThreadBasedDispatcherSpec.scala rename to akka-core/src/test/scala/dispatch/ThreadBasedDispatcherSpec.scala diff --git a/akka-core/src/test/scala/ActorRegistrySpec.scala b/akka-core/src/test/scala/misc/ActorRegistrySpec.scala similarity index 100% rename from akka-core/src/test/scala/ActorRegistrySpec.scala rename to akka-core/src/test/scala/misc/ActorRegistrySpec.scala diff --git a/akka-core/src/test/scala/misc/SchedulerSpec.scala b/akka-core/src/test/scala/misc/SchedulerSpec.scala new file mode 100644 index 0000000000..1cedb27354 --- /dev/null +++ b/akka-core/src/test/scala/misc/SchedulerSpec.scala @@ -0,0 +1,86 @@ +package se.scalablesolutions.akka.actor + +import org.scalatest.junit.JUnitSuite +import Actor._ +import java.util.concurrent.{CountDownLatch, TimeUnit} +import se.scalablesolutions.akka.config.ScalaConfig._ +import org.multiverse.api.latches.StandardLatch +import org.junit.Test + +class SchedulerSpec extends JUnitSuite { + + def withCleanEndState(action: => Unit) { + action + Scheduler.restart + ActorRegistry.shutdownAll + } + + + @Test def schedulerShouldScheduleMoreThanOnce = withCleanEndState { + + case object Tick + val countDownLatch = new CountDownLatch(3) + val tickActor = actor { + case Tick => countDownLatch.countDown + } + // run every 50 millisec + Scheduler.schedule(tickActor, Tick, 0, 50, TimeUnit.MILLISECONDS) + + // after max 1 second it should be executed at least the 3 times already + assert(countDownLatch.await(1, TimeUnit.SECONDS)) + } + + @Test def schedulerShouldScheduleOnce = withCleanEndState { + case object Tick + val countDownLatch = new CountDownLatch(2) + val tickActor = actor { + case Tick => countDownLatch.countDown + } + // run every 50 millisec + Scheduler.scheduleOnce(tickActor, Tick, 50, TimeUnit.MILLISECONDS) + + // after 1 second the wait should fail + assert(countDownLatch.await(1, TimeUnit.SECONDS) == false) + // should still be 1 left + assert(countDownLatch.getCount == 1) + } + + /** + * ticket #307 + */ + @Test def actorRestartShouldPickUpScheduleAgain = withCleanEndState { + + object Ping + object Crash + + val restartLatch = new StandardLatch + val pingLatch = new CountDownLatch(6) + + val actor = actorOf(new Actor { + self.lifeCycle = Some(LifeCycle(Permanent)) + + def receive = { + case Ping => pingLatch.countDown + case Crash => throw new Exception("CRASH") + } + + override def postRestart(reason: Throwable) = restartLatch.open + }) + Supervisor( + SupervisorConfig( + RestartStrategy(AllForOne, 3, 1000, + List(classOf[Exception])), + Supervise( + actor, + LifeCycle(Permanent)) + :: Nil)).start + + Scheduler.schedule(actor, Ping, 500, 500, TimeUnit.MILLISECONDS) + // appx 2 pings before crash + Scheduler.scheduleOnce(actor, Crash, 1000, TimeUnit.MILLISECONDS) + + assert(restartLatch.tryAwait(2, TimeUnit.SECONDS)) + // should be enough time for the ping countdown to recover and reach 6 pings + assert(pingLatch.await(4, TimeUnit.SECONDS)) + } +} diff --git a/akka-core/src/test/scala/ClientInitiatedRemoteActorSpec.scala b/akka-core/src/test/scala/remote/ClientInitiatedRemoteActorSpec.scala similarity index 97% rename from akka-core/src/test/scala/ClientInitiatedRemoteActorSpec.scala rename to akka-core/src/test/scala/remote/ClientInitiatedRemoteActorSpec.scala index ea48d45252..e9ed8d4fa5 100644 --- a/akka-core/src/test/scala/ClientInitiatedRemoteActorSpec.scala +++ b/akka-core/src/test/scala/remote/ClientInitiatedRemoteActorSpec.scala @@ -1,13 +1,12 @@ -package se.scalablesolutions.akka.actor +package se.scalablesolutions.akka.actor.remote import java.util.concurrent.{CountDownLatch, TimeUnit} -import junit.framework.TestCase - import org.scalatest.junit.JUnitSuite import org.junit.{Test, Before, After} import se.scalablesolutions.akka.remote.{RemoteServer, RemoteClient} import se.scalablesolutions.akka.dispatch.Dispatchers +import se.scalablesolutions.akka.actor.{ActorRef, Actor} import Actor._ case class Send(actor: Actor) diff --git a/akka-core/src/test/scala/RemoteSupervisorSpec.scala b/akka-core/src/test/scala/remote/RemoteSupervisorSpec.scala similarity index 98% rename from akka-core/src/test/scala/RemoteSupervisorSpec.scala rename to akka-core/src/test/scala/remote/RemoteSupervisorSpec.scala index 10f8c3bcf9..936d1cf5c4 100644 --- a/akka-core/src/test/scala/RemoteSupervisorSpec.scala +++ b/akka-core/src/test/scala/remote/RemoteSupervisorSpec.scala @@ -2,20 +2,17 @@ * Copyright (C) 2009-2010 Scalable Solutions AB */ -package se.scalablesolutions.akka.actor +package se.scalablesolutions.akka.actor.remote import java.util.concurrent.{LinkedBlockingQueue, TimeUnit, BlockingQueue} import se.scalablesolutions.akka.serialization.BinaryString import se.scalablesolutions.akka.config.ScalaConfig._ -import se.scalablesolutions.akka.config.Config -import se.scalablesolutions.akka.remote.{RemoteNode, RemoteServer, RemoteClient} +import se.scalablesolutions.akka.remote.{RemoteServer, RemoteClient} import se.scalablesolutions.akka.OneWay -import se.scalablesolutions.akka.dispatch.Dispatchers -import Actor._ - import org.scalatest.junit.JUnitSuite -import org.junit.Test import org.junit.{Test, Before, After} +import se.scalablesolutions.akka.actor.{SupervisorFactory, Supervisor, ActorRef, Actor} +import Actor._ object Log { val messageLog: BlockingQueue[String] = new LinkedBlockingQueue[String] diff --git a/akka-core/src/test/scala/RemoteTransactionalActiveObjectSpec.scala b/akka-core/src/test/scala/remote/RemoteTransactionalTypedActorSpec.scala similarity index 71% rename from akka-core/src/test/scala/RemoteTransactionalActiveObjectSpec.scala rename to akka-core/src/test/scala/remote/RemoteTransactionalTypedActorSpec.scala index 897318ce7d..71d44fd4bd 100644 --- a/akka-core/src/test/scala/RemoteTransactionalActiveObjectSpec.scala +++ b/akka-core/src/test/scala/remote/RemoteTransactionalTypedActorSpec.scala @@ -13,25 +13,25 @@ import org.junit.runner.RunWith import org.junit.{Test, Before, After} import se.scalablesolutions.akka.config.Config -import se.scalablesolutions.akka.config.ActiveObjectConfigurator +import se.scalablesolutions.akka.config.TypedActorConfigurator import se.scalablesolutions.akka.remote.{RemoteNode, RemoteServer, RemoteClient} -object RemoteTransactionalActiveObjectSpec { +object RemoteTransactionalTypedActorSpec { val HOSTNAME = "localhost" val PORT = 9988 var server: RemoteServer = null } @RunWith(classOf[JUnitRunner]) -class RemoteTransactionalActiveObjectSpec extends +class RemoteTransactionalTypedActorSpec extends Spec with ShouldMatchers with BeforeAndAfterAll { - import RemoteTransactionalActiveObjectSpec._ + import RemoteTransactionalTypedActorSpec._ Config.config - private val conf = new ActiveObjectConfigurator + private val conf = new TypedActorConfigurator private var messageLog = "" override def beforeAll = { @@ -51,19 +51,19 @@ class RemoteTransactionalActiveObjectSpec extends } } - describe("Remote transactional in-memory Active Object ") { + describe("Remote transactional in-memory TypedActor ") { /* it("map should not rollback state for stateful server in case of success") { - val stateful = ActiveObject.newRemoteInstance(classOf[TransactionalActiveObject], 1000, HOSTNAME, PORT) + val stateful = TypedActor.newRemoteInstance(classOf[TransactionalTypedActor], 1000, HOSTNAME, PORT) stateful.setMapState("testShouldNotRollbackStateForStatefulServerInCaseOfSuccess", "init") // set init state stateful.success("testShouldNotRollbackStateForStatefulServerInCaseOfSuccess", "new state") // transactionrequired stateful.getMapState("testShouldNotRollbackStateForStatefulServerInCaseOfSuccess") should equal("new state") } it("map should rollback state for stateful server in case of failure") { - val stateful = ActiveObject.newRemoteInstance(classOf[TransactionalActiveObject], 1000, HOSTNAME, PORT) + val stateful = TypedActor.newRemoteInstance(classOf[TransactionalTypedActor], 1000, HOSTNAME, PORT) stateful.setMapState("testShouldRollbackStateForStatefulServerInCaseOfFailure", "init") // set init state - val failer =ActiveObject.newRemoteInstance(classOf[ActiveObjectFailer], 1000, HOSTNAME, PORT) //conf.getInstance(classOf[ActiveObjectFailer]) + val failer =TypedActor.newRemoteInstance(classOf[TypedActorFailer], 1000, HOSTNAME, PORT) //conf.getInstance(classOf[TypedActorFailer]) try { stateful.failure("testShouldRollbackStateForStatefulServerInCaseOfFailure", "new state", failer) // call failing transactionrequired method fail("should have thrown an exception") @@ -72,16 +72,16 @@ class RemoteTransactionalActiveObjectSpec extends } it("vector should not rollback state for stateful server in case of success") { - val stateful = ActiveObject.newRemoteInstance(classOf[TransactionalActiveObject], 1000, HOSTNAME, PORT) + val stateful = TypedActor.newRemoteInstance(classOf[TransactionalTypedActor], 1000, HOSTNAME, PORT) stateful.setVectorState("init") // set init state stateful.success("testShouldNotRollbackStateForStatefulServerInCaseOfSuccess", "new state") // transactionrequired stateful.getVectorState should equal("new state") } it("vector should rollback state for stateful server in case of failure") { - val stateful = ActiveObject.newRemoteInstance(classOf[TransactionalActiveObject], 1000, HOSTNAME, PORT) + val stateful = TypedActor.newRemoteInstance(classOf[TransactionalTypedActor], 1000, HOSTNAME, PORT) stateful.setVectorState("init") // set init state - val failer =ActiveObject.newRemoteInstance(classOf[ActiveObjectFailer], 1000, HOSTNAME, PORT) //conf.getInstance(classOf[ActiveObjectFailer]) + val failer =TypedActor.newRemoteInstance(classOf[TypedActorFailer], 1000, HOSTNAME, PORT) //conf.getInstance(classOf[TypedActorFailer]) try { stateful.failure("testShouldRollbackStateForStatefulServerInCaseOfFailure", "new state", failer) // call failing transactionrequired method fail("should have thrown an exception") @@ -90,16 +90,16 @@ class RemoteTransactionalActiveObjectSpec extends } it("ref should not rollback state for stateful server in case of success") { - val stateful = ActiveObject.newRemoteInstance(classOf[TransactionalActiveObject], 1000, HOSTNAME, PORT) + val stateful = TypedActor.newRemoteInstance(classOf[TransactionalTypedActor], 1000, HOSTNAME, PORT) stateful.setRefState("init") // set init state stateful.success("testShouldNotRollbackStateForStatefulServerInCaseOfSuccess", "new state") // transactionrequired stateful.getRefState should equal("new state") } it("ref should rollback state for stateful server in case of failure") { - val stateful = ActiveObject.newRemoteInstance(classOf[TransactionalActiveObject], 1000, HOSTNAME, PORT) + val stateful = TypedActor.newRemoteInstance(classOf[TransactionalTypedActor], 1000, HOSTNAME, PORT) stateful.setRefState("init") // set init state - val failer =ActiveObject.newRemoteInstance(classOf[ActiveObjectFailer], 1000, HOSTNAME, PORT) //conf.getInstance(classOf[ActiveObjectFailer]) + val failer =TypedActor.newRemoteInstance(classOf[TypedActorFailer], 1000, HOSTNAME, PORT) //conf.getInstance(classOf[TypedActorFailer]) try { stateful.failure("testShouldRollbackStateForStatefulServerInCaseOfFailure", "new state", failer) // call failing transactionrequired method fail("should have thrown an exception") diff --git a/akka-core/src/test/scala/ServerInitiatedRemoteActorSample.scala b/akka-core/src/test/scala/remote/ServerInitiatedRemoteActorSample.scala similarity index 94% rename from akka-core/src/test/scala/ServerInitiatedRemoteActorSample.scala rename to akka-core/src/test/scala/remote/ServerInitiatedRemoteActorSample.scala index dad54e6d3a..b9b8946bee 100644 --- a/akka-core/src/test/scala/ServerInitiatedRemoteActorSample.scala +++ b/akka-core/src/test/scala/remote/ServerInitiatedRemoteActorSample.scala @@ -1,4 +1,4 @@ -package sample +package se.scalablesolutions.akka.actor.remote import se.scalablesolutions.akka.actor.Actor import se.scalablesolutions.akka.remote.{RemoteClient, RemoteNode} diff --git a/akka-core/src/test/scala/ServerInitiatedRemoteActorSpec.scala b/akka-core/src/test/scala/remote/ServerInitiatedRemoteActorSpec.scala similarity index 84% rename from akka-core/src/test/scala/ServerInitiatedRemoteActorSpec.scala rename to akka-core/src/test/scala/remote/ServerInitiatedRemoteActorSpec.scala index 51685a0073..59cfe3778d 100644 --- a/akka-core/src/test/scala/ServerInitiatedRemoteActorSpec.scala +++ b/akka-core/src/test/scala/remote/ServerInitiatedRemoteActorSpec.scala @@ -1,12 +1,12 @@ -package se.scalablesolutions.akka.actor +package se.scalablesolutions.akka.actor.remote import java.util.concurrent.{CountDownLatch, TimeUnit} import org.scalatest.junit.JUnitSuite import org.junit.{Test, Before, After} -import Actor._ import se.scalablesolutions.akka.remote.{RemoteServer, RemoteClient} -import se.scalablesolutions.akka.dispatch.Dispatchers +import se.scalablesolutions.akka.actor.{ActorRef, Actor} +import Actor._ object ServerInitiatedRemoteActorSpec { val HOSTNAME = "localhost" @@ -52,8 +52,6 @@ object ServerInitiatedRemoteActorSpec { class ServerInitiatedRemoteActorSpec extends JUnitSuite { import ServerInitiatedRemoteActorSpec._ - import se.scalablesolutions.akka.config.Config.config - private val unit = TimeUnit.MILLISECONDS @Before @@ -84,7 +82,7 @@ class ServerInitiatedRemoteActorSpec extends JUnitSuite { @Test def shouldSendWithBang { val actor = RemoteClient.actorFor( - "se.scalablesolutions.akka.actor.ServerInitiatedRemoteActorSpec$RemoteActorSpecActorUnidirectional", + "se.scalablesolutions.akka.actor.remote.ServerInitiatedRemoteActorSpec$RemoteActorSpecActorUnidirectional", 5000L, HOSTNAME, PORT) val result = actor ! "OneWay" @@ -95,7 +93,7 @@ class ServerInitiatedRemoteActorSpec extends JUnitSuite { @Test def shouldSendWithBangBangAndGetReply { val actor = RemoteClient.actorFor( - "se.scalablesolutions.akka.actor.ServerInitiatedRemoteActorSpec$RemoteActorSpecActorBidirectional", + "se.scalablesolutions.akka.actor.remote.ServerInitiatedRemoteActorSpec$RemoteActorSpecActorBidirectional", 5000L, HOSTNAME, PORT) val result = actor !! "Hello" @@ -107,7 +105,7 @@ class ServerInitiatedRemoteActorSpec extends JUnitSuite { def shouldSendWithBangAndGetReplyThroughSenderRef { implicit val timeout = 500000000L val actor = RemoteClient.actorFor( - "se.scalablesolutions.akka.actor.ServerInitiatedRemoteActorSpec$RemoteActorSpecActorBidirectional", + "se.scalablesolutions.akka.actor.remote.ServerInitiatedRemoteActorSpec$RemoteActorSpecActorBidirectional", timeout, HOSTNAME, PORT) val sender = actorOf[RemoteActorSpecActorAsyncSender] @@ -122,7 +120,7 @@ class ServerInitiatedRemoteActorSpec extends JUnitSuite { def shouldSendWithBangBangAndReplyWithException { implicit val timeout = 500000000L val actor = RemoteClient.actorFor( - "se.scalablesolutions.akka.actor.ServerInitiatedRemoteActorSpec$RemoteActorSpecActorBidirectional", + "se.scalablesolutions.akka.actor.remote.ServerInitiatedRemoteActorSpec$RemoteActorSpecActorBidirectional", timeout, HOSTNAME, PORT) try { diff --git a/akka-core/src/test/scala/ShutdownSpec.scala b/akka-core/src/test/scala/remote/ShutdownSpec.scala similarity index 100% rename from akka-core/src/test/scala/ShutdownSpec.scala rename to akka-core/src/test/scala/remote/ShutdownSpec.scala diff --git a/akka-core/src/test/scala/RoutingSpec.scala b/akka-core/src/test/scala/routing/RoutingSpec.scala similarity index 96% rename from akka-core/src/test/scala/RoutingSpec.scala rename to akka-core/src/test/scala/routing/RoutingSpec.scala index 06b8117c24..747363efe6 100644 --- a/akka-core/src/test/scala/RoutingSpec.scala +++ b/akka-core/src/test/scala/routing/RoutingSpec.scala @@ -1,6 +1,5 @@ -package se.scalablesolutions.akka.routing +package se.scalablesolutions.akka.actor.routing -import se.scalablesolutions.akka.config.ScalaConfig._ import se.scalablesolutions.akka.actor.Actor import se.scalablesolutions.akka.actor.Actor._ import se.scalablesolutions.akka.util.Logging @@ -9,12 +8,11 @@ import org.scalatest.Suite import org.junit.runner.RunWith import org.scalatest.junit.JUnitRunner import org.scalatest.matchers.MustMatchers -import org.junit.{Before, After, Test} - -import scala.collection.mutable.HashSet +import org.junit.Test import java.util.concurrent.atomic.AtomicInteger import java.util.concurrent.{CountDownLatch, TimeUnit} +import se.scalablesolutions.akka.routing._ @RunWith(classOf[JUnitRunner]) class RoutingSpec extends junit.framework.TestCase with Suite with MustMatchers with Logging { diff --git a/akka-core/src/test/scala/ProtobufActorMessageSerializationSpec.scala b/akka-core/src/test/scala/serialization/ProtobufActorMessageSerializationSpec.scala similarity index 91% rename from akka-core/src/test/scala/ProtobufActorMessageSerializationSpec.scala rename to akka-core/src/test/scala/serialization/ProtobufActorMessageSerializationSpec.scala index fbf5411e02..011c656f8d 100644 --- a/akka-core/src/test/scala/ProtobufActorMessageSerializationSpec.scala +++ b/akka-core/src/test/scala/serialization/ProtobufActorMessageSerializationSpec.scala @@ -1,12 +1,11 @@ -package se.scalablesolutions.akka.actor +package se.scalablesolutions.akka.actor.serialization -import java.util.concurrent.{CountDownLatch, TimeUnit} +import java.util.concurrent.TimeUnit import org.scalatest.junit.JUnitSuite import org.junit.{Test, Before, After} import se.scalablesolutions.akka.remote.{RemoteServer, RemoteClient} -import se.scalablesolutions.akka.dispatch.Dispatchers - +import se.scalablesolutions.akka.actor.{ProtobufProtocol, Actor} import ProtobufProtocol.ProtobufPOJO import Actor._ diff --git a/akka-core/src/test/scala/SerializableTypeClassActorSpec.scala b/akka-core/src/test/scala/serialization/SerializableTypeClassActorSpec.scala similarity index 95% rename from akka-core/src/test/scala/SerializableTypeClassActorSpec.scala rename to akka-core/src/test/scala/serialization/SerializableTypeClassActorSpec.scala index 99a2b99dc9..a9bcc35790 100644 --- a/akka-core/src/test/scala/SerializableTypeClassActorSpec.scala +++ b/akka-core/src/test/scala/serialization/SerializableTypeClassActorSpec.scala @@ -1,16 +1,15 @@ -package se.scalablesolutions.akka.actor +package se.scalablesolutions.akka.actor.serialization -import Actor._ import org.scalatest.Spec -import org.scalatest.Assertions import org.scalatest.matchers.ShouldMatchers import org.scalatest.BeforeAndAfterAll import org.scalatest.junit.JUnitRunner import org.junit.runner.RunWith -import com.google.protobuf.Message +import se.scalablesolutions.akka.actor._ import ActorSerialization._ +import Actor._ @RunWith(classOf[JUnitRunner]) class SerializableTypeClassActorSpec extends @@ -110,6 +109,8 @@ class SerializableTypeClassActorSpec extends val actor2 = fromBinary(bytes) actor2.start (actor2 !! "hello").getOrElse("_") should equal("world 3") + + actor2.receiveTimeout should equal (Some(1000)) } it("should be able to serialize and deserialize a MyStatelessActorWithMessagesInMailbox") { @@ -164,7 +165,7 @@ class MyStatelessActor extends Actor { class MyStatelessActorWithMessagesInMailbox extends Actor { def receive = { case "hello" => - println("# messages in mailbox " + self.mailbox.size) + println("# messages in mailbox " + self.mailboxSize) Thread.sleep(500) case "hello-reply" => self.reply("world") } @@ -172,6 +173,7 @@ class MyStatelessActorWithMessagesInMailbox extends Actor { @serializable class MyJavaSerializableActor extends Actor { var count = 0 + self.receiveTimeout = Some(1000) def receive = { case "hello" => diff --git a/akka-core/src/test/scala/SerializerSpec.scala b/akka-core/src/test/scala/serialization/SerializerSpec.scala similarity index 93% rename from akka-core/src/test/scala/SerializerSpec.scala rename to akka-core/src/test/scala/serialization/SerializerSpec.scala index 95b291ed9d..bff387ec99 100644 --- a/akka-core/src/test/scala/SerializerSpec.scala +++ b/akka-core/src/test/scala/serialization/SerializerSpec.scala @@ -1,9 +1,7 @@ package se.scalablesolutions.akka.serialization -import junit.framework.TestCase - import org.scalatest.junit.JUnitSuite -import org.junit.{Test, Before, After} +import org.junit.Test import scala.reflect.BeanInfo diff --git a/akka-core/src/test/scala/stm/RefSpec.scala b/akka-core/src/test/scala/stm/RefSpec.scala new file mode 100644 index 0000000000..f04c1a7c44 --- /dev/null +++ b/akka-core/src/test/scala/stm/RefSpec.scala @@ -0,0 +1,162 @@ +package se.scalablesolutions.akka.stm + +import org.scalatest.WordSpec +import org.scalatest.matchers.MustMatchers + +class RefSpec extends WordSpec with MustMatchers { + + import se.scalablesolutions.akka.stm.local._ + + "A Ref" should { + + "optionally accept an initial value" in { + val emptyRef = Ref[Int] + val empty = atomic { emptyRef.getOption } + + empty must be(None) + + val ref = Ref(3) + val value = atomic { ref.get } + + value must be (3) + } + + "keep the initial value, even if the first transaction is rolled back" in { + val ref = Ref(3) + + try { + atomic(DefaultLocalTransactionFactory) { + ref.swap(5) + throw new Exception + } + } catch { + case e => {} + } + + val value = atomic { ref.get } + + value must be (3) + } + + "be settable using set" in { + val ref = Ref[Int] + + atomic { ref.set(3) } + + val value = atomic { ref.get } + + value must be (3) + } + + "be settable using swap" in { + val ref = Ref[Int] + + atomic { ref.swap(3) } + + val value = atomic { ref.get } + + value must be (3) + } + + "be changeable using alter" in { + val ref = Ref(0) + + def increment = atomic { + ref alter (_ + 1) + } + + increment + increment + increment + + val value = atomic { ref.get } + + value must be (3) + } + + "not be changeable using alter if no value has been set" in { + val ref = Ref[Int] + + def increment = atomic { + ref alter (_ + 1) + } + + evaluating { increment } must produce [RuntimeException] + } + + "be able to be mapped" in { + val ref1 = Ref(1) + + val ref2 = atomic { + ref1 map (_ + 1) + } + + val value1 = atomic { ref1.get } + val value2 = atomic { ref2.get } + + value1 must be (1) + value2 must be (2) + } + + "be able to be used in a 'foreach' for comprehension" in { + val ref = Ref(3) + + var result = 0 + + atomic { + for (value <- ref) { + result += value + } + } + + result must be (3) + } + + "be able to be used in a 'map' for comprehension" in { + val ref1 = Ref(1) + + val ref2 = atomic { + for (value <- ref1) yield value + 2 + } + + val value2 = atomic { ref2.get } + + value2 must be (3) + } + + "be able to be used in a 'flatMap' for comprehension" in { + val ref1 = Ref(1) + val ref2 = Ref(2) + + val ref3 = atomic { + for { + value1 <- ref1 + value2 <- ref2 + } yield value1 + value2 + } + + val value3 = atomic { ref3.get } + + value3 must be (3) + } + + "be able to be used in a 'filter' for comprehension" in { + val ref1 = Ref(1) + + val refLess2 = atomic { + for (value <- ref1 if value < 2) yield value + } + + val optLess2 = atomic { refLess2.getOption } + + val refGreater2 = atomic { + for (value <- ref1 if value > 2) yield value + } + + val optGreater2 = atomic { refGreater2.getOption } + + optLess2 must be (Some(1)) + optGreater2 must be (None) + } + } +} diff --git a/akka-core/src/test/scala/StmSpec.scala b/akka-core/src/test/scala/stm/StmSpec.scala similarity index 68% rename from akka-core/src/test/scala/StmSpec.scala rename to akka-core/src/test/scala/stm/StmSpec.scala index 16912726a9..133725bb98 100644 --- a/akka-core/src/test/scala/StmSpec.scala +++ b/akka-core/src/test/scala/stm/StmSpec.scala @@ -3,72 +3,66 @@ package se.scalablesolutions.akka.stm import se.scalablesolutions.akka.actor.{Actor, Transactor} import Actor._ -import org.scalatest.Spec -import org.scalatest.Assertions -import org.scalatest.matchers.ShouldMatchers -import org.scalatest.BeforeAndAfterAll -import org.scalatest.junit.JUnitRunner -import org.junit.runner.RunWith +import org.scalatest.WordSpec +import org.scalatest.matchers.MustMatchers -@RunWith(classOf[JUnitRunner]) -class StmSpec extends - Spec with - ShouldMatchers with - BeforeAndAfterAll { +class StmSpec extends WordSpec with MustMatchers { - describe("Local STM") { - it("should be able to do multiple consecutive atomic {..} statements") { - import local._ + "Local STM" should { - lazy val ref = Ref[Int]() + import se.scalablesolutions.akka.stm.local._ + + "be able to do multiple consecutive atomic {..} statements" in { + val ref = Ref(0) def increment = atomic { - ref.swap(ref.get.getOrElse(0) + 1) + ref alter (_ + 1) } def total: Int = atomic { - ref.get.getOrElse(0) + ref.getOrElse(0) } increment increment increment - total should equal(3) + + total must be (3) } - it("should be able to do nested atomic {..} statements") { - import local._ - - lazy val ref = Ref[Int]() + "be able to do nested atomic {..} statements" in { + val ref = Ref(0) def increment = atomic { - ref.swap(ref.get.getOrElse(0) + 1) + ref alter (_ + 1) } + def total: Int = atomic { - ref.get.getOrElse(0) + ref.getOrElse(0) } atomic { increment increment } + atomic { increment - total should equal(3) + total must be (3) } } - it("should roll back failing nested atomic {..} statements") { - import local._ - - lazy val ref = Ref[Int]() + "roll back failing nested atomic {..} statements" in { + val ref = Ref(0) def increment = atomic { - ref.swap(ref.get.getOrElse(0) + 1) + ref alter (_ + 1) } + def total: Int = atomic { - ref.get.getOrElse(0) + ref.getOrElse(0) } + try { atomic(DefaultLocalTransactionFactory) { increment @@ -78,21 +72,22 @@ class StmSpec extends } catch { case e => {} } - total should equal(0) + + total must be (0) } } - describe("Global STM") { - it("should be able to initialize with atomic {..} block inside actor constructor") { + "Global STM" should { + "be able to initialize with atomic {..} block inside actor constructor" in { import GlobalTransactionVectorTestActor._ try { val actor = actorOf[GlobalTransactionVectorTestActor].start actor !! Add(5) val size1 = (actor !! Size).as[Int].getOrElse(fail("Could not get Vector::size")) - size1 should equal(2) + size1 must be (2) actor !! Add(2) val size2 = (actor !! Size).as[Int].getOrElse(fail("Could not get Vector::size")) - size2 should equal(3) + size2 must be (3) } catch { case e => e.printStackTrace @@ -100,25 +95,26 @@ class StmSpec extends } } } + /* - describe("Transactor") { - it("should be able receive message sent with !! and pass it along to nested transactor with !! and receive reply; multiple times in a row") { + "Transactor" should { + "be able receive message sent with !! and pass it along to nested transactor with !! and receive reply; multiple times in a row" in { import GlobalTransactionVectorTestActor._ val actor = actorOf[NestedTransactorLevelOneActor].start actor !! (Add(2), 10000) val size1 = (actor !! (Size, 10000)).as[Int].getOrElse(fail("Could not get size")) - size1 should equal(2) + size1 must be (2) actor !! (Add(7), 10000) actor ! "HiLevelOne" val size2 = (actor !! (Size, 10000)).as[Int].getOrElse(fail("Could not get size")) - size2 should equal(7) + size2 must be (7) actor !! (Add(0), 10000) actor ! "HiLevelTwo" val size3 = (actor !! (Size, 10000)).as[Int].getOrElse(fail("Could not get size")) - size3 should equal(0) + size3 must be (0) actor !! (Add(3), 10000) val size4 = (actor !! (Size, 10000)).as[Int].getOrElse(fail("Could not get size")) - size4 should equal(3) + size4 must be (3) } } */ @@ -129,6 +125,7 @@ object GlobalTransactionVectorTestActor { case object Size case object Success } + class GlobalTransactionVectorTestActor extends Actor { import GlobalTransactionVectorTestActor._ import se.scalablesolutions.akka.stm.global._ @@ -148,6 +145,7 @@ class GlobalTransactionVectorTestActor extends Actor { class NestedTransactorLevelOneActor extends Actor { import GlobalTransactionVectorTestActor._ + private val nested = actorOf[NestedTransactorLevelTwoActor].start self.timeout = 10000 @@ -165,6 +163,7 @@ class NestedTransactorLevelOneActor extends Actor { class NestedTransactorLevelTwoActor extends Transactor { import GlobalTransactionVectorTestActor._ + private val ref = Ref(0) self.timeout = 10000 diff --git a/akka-core/src/test/scala/ticket/Ticket001Spec.scala b/akka-core/src/test/scala/ticket/Ticket001Spec.scala new file mode 100644 index 0000000000..b94796d9a3 --- /dev/null +++ b/akka-core/src/test/scala/ticket/Ticket001Spec.scala @@ -0,0 +1,13 @@ +package se.scalablesolutions.akka.actor.ticket + +import org.scalatest.WordSpec +import org.scalatest.matchers.MustMatchers + +class Ticket001Spec extends WordSpec with MustMatchers { + + "An XXX" should { + "do YYY" in { + 1 must be (1) + } + } +} diff --git a/akka-http/src/main/scala/AkkaCometServlet.scala b/akka-http/src/main/scala/AkkaCometServlet.scala index 92db0835cf..0313dfe6f7 100644 --- a/akka-http/src/main/scala/AkkaCometServlet.scala +++ b/akka-http/src/main/scala/AkkaCometServlet.scala @@ -50,6 +50,11 @@ class AkkaServlet extends AtmosphereServlet with Logging { addInitParameter(AtmosphereServlet.PROPERTY_USE_STREAM,"true") addInitParameter("com.sun.jersey.config.property.packages",c.getList("akka.rest.resource_packages").mkString(";")) addInitParameter("com.sun.jersey.spi.container.ResourceFilters",c.getList("akka.rest.filters").mkString(",")) + c.getInt("akka.rest.maxInactiveActivity").foreach { value => + log.info("MAX_INACTIVE:%s",value.toString) + addInitParameter(CometSupport.MAX_INACTIVE,value.toString) + } + val servlet = new AtmosphereRestServlet { override def getInitParameter(key : String) = AkkaServlet.this.getInitParameter(key) diff --git a/akka-jta/src/main/scala/AtomikosTransactionService.scala b/akka-jta/src/main/scala/AtomikosTransactionService.scala index 937f31a54e..f85ff56e6a 100644 --- a/akka-jta/src/main/scala/AtomikosTransactionService.scala +++ b/akka-jta/src/main/scala/AtomikosTransactionService.scala @@ -10,6 +10,7 @@ import com.atomikos.icatch.jta.{J2eeTransactionManager, J2eeUserTransaction} import com.atomikos.icatch.config.{TSInitInfo, UserTransactionService, UserTransactionServiceImp} import se.scalablesolutions.akka.config.Config._ +import se.scalablesolutions.akka.util.Duration import se.scalablesolutions.akka.stm.{TransactionService, TransactionContainer} object AtomikosTransactionService extends AtomikosTransactionService @@ -20,8 +21,8 @@ object AtomikosTransactionService extends AtomikosTransactionService * @author Jonas Bonér */ class AtomikosTransactionService extends TransactionService with TransactionProtocol { + val JTA_TRANSACTION_TIMEOUT = Duration(config.getInt("akka.jta.timeout", 60), TIME_UNIT) - val JTA_TRANSACTION_TIMEOUT: Int = config.getInt("akka.jta.timeout", 60000) / 1000 private val txService: UserTransactionService = new UserTransactionServiceImp private val info: TSInitInfo = txService.createTSInitInfo @@ -29,10 +30,11 @@ class AtomikosTransactionService extends TransactionService with TransactionProt try { txService.init(info) val tm: TransactionManager = new J2eeTransactionManager - tm.setTransactionTimeout(JTA_TRANSACTION_TIMEOUT) + tm.setTransactionTimeout(JTA_TRANSACTION_TIMEOUT.toSeconds.toInt) tm } catch { - case e => throw new SystemException("Could not create a new Atomikos J2EE Transaction Manager, due to: " + e.toString) + case e => throw new SystemException( + "Could not create a new Atomikos J2EE Transaction Manager, due to: " + e.toString) } ))) // TODO: gracefully shutdown of the TM diff --git a/akka-karaf/akka-features/src/main/resources/features.xml b/akka-karaf/akka-features/src/main/resources/features.xml new file mode 100644 index 0000000000..7506820024 --- /dev/null +++ b/akka-karaf/akka-features/src/main/resources/features.xml @@ -0,0 +1,22 @@ + + + + + mvn:com.weiglewilczek.scala-lang-osgi/scala-library/2.8.0.RC2 + mvn:org.eclipse.scalamodules/scalamodules-core/2.0-M2 + + + + + mvn:se.scalablesolutions.akka.akka-wrap/dispatch-json_2.8.0.RC3_osgi/0.7.4 + mvn:org.objenesis/objenesis/1.2 + mvn:sjson.json/sjson/0.6-SNAPSHOT + + + + sjson + mvn:se.scalablesolutions.akka.akka-wrap/jgroups-wrapper_2.8.0.RC3_osgi/2.9.0.GA + mvn:org.jboss.netty/netty/3.2.0.CR1 + mvn:se.scalablesolutions.akka/akka-core_2.8.0.RC3_osgi/0.9 + + diff --git a/akka-persistence/akka-persistence-common/src/main/scala/Storage.scala b/akka-persistence/akka-persistence-common/src/main/scala/Storage.scala index 135ee584b9..a734d4d815 100644 --- a/akka-persistence/akka-persistence-common/src/main/scala/Storage.scala +++ b/akka-persistence/akka-persistence-common/src/main/scala/Storage.scala @@ -90,7 +90,7 @@ trait PersistentMap[K, V] extends scala.collection.mutable.Map[K, V] val storage: MapStorageBackend[K, V] def commit = { - if (shouldClearOnCommit.isDefined && shouldClearOnCommit.get.get) storage.removeMapStorageFor(uuid) + if (shouldClearOnCommit.isDefined && shouldClearOnCommit.get) storage.removeMapStorageFor(uuid) removedEntries.toList.foreach(key => storage.removeMapStorageFor(uuid, key)) storage.insertMapStorageEntriesFor(uuid, newAndUpdatedEntries.toList) newAndUpdatedEntries.clear @@ -281,7 +281,7 @@ trait PersistentRef[T] extends Transactional with Committable with Abortable { val storage: RefStorageBackend[T] def commit = if (ref.isDefined) { - storage.insertRefStorageFor(uuid, ref.get.get) + storage.insertRefStorageFor(uuid, ref.get) ref.swap(null.asInstanceOf[T]) } @@ -292,7 +292,7 @@ trait PersistentRef[T] extends Transactional with Committable with Abortable { ref.swap(elem) } - def get: Option[T] = if (ref.isDefined) ref.get else storage.getRefStorageFor(uuid) + def get: Option[T] = if (ref.isDefined) ref.getOption else storage.getRefStorageFor(uuid) def isDefined: Boolean = ref.isDefined || storage.getRefStorageFor(uuid).isDefined @@ -365,7 +365,7 @@ trait PersistentQueue[A] extends scala.collection.mutable.Queue[A] case DEQ => storage.dequeue(uuid) } } - if (shouldClearOnCommit.isDefined && shouldClearOnCommit.get.get) { + if (shouldClearOnCommit.isDefined && shouldClearOnCommit.get) { storage.remove(uuid) } enqueuedNDequeuedEntries.clear @@ -386,7 +386,7 @@ trait PersistentQueue[A] extends scala.collection.mutable.Queue[A] register elems.foreach(e => { enqueuedNDequeuedEntries.add((Some(e), ENQ)) - localQ.get.get.enqueue(e) + localQ.get.enqueue(e) }) } @@ -395,15 +395,15 @@ trait PersistentQueue[A] extends scala.collection.mutable.Queue[A] // record for later playback enqueuedNDequeuedEntries.add((None, DEQ)) - val i = pickMeForDQ.get.get + val i = pickMeForDQ.get if (i < storage.size(uuid)) { // still we can DQ from storage pickMeForDQ.swap(i + 1) storage.peek(uuid, i, 1)(0) } else { // check we have transient candidates in localQ for DQ - if (localQ.get.get.isEmpty == false) { - val (a, q) = localQ.get.get.dequeue + if (localQ.get.isEmpty == false) { + val (a, q) = localQ.get.dequeue localQ.swap(q) a } else throw new NoSuchElementException("trying to dequeue from empty queue") @@ -418,7 +418,7 @@ trait PersistentQueue[A] extends scala.collection.mutable.Queue[A] } override def size: Int = try { - storage.size(uuid) + localQ.get.get.length + storage.size(uuid) + localQ.get.length } catch { case e: Exception => 0 } override def isEmpty: Boolean = diff --git a/akka-persistence/akka-persistence-mongo/src/main/scala/MongoStorageBackend.scala b/akka-persistence/akka-persistence-mongo/src/main/scala/MongoStorageBackend.scala index d5581b373b..950165567d 100644 --- a/akka-persistence/akka-persistence-mongo/src/main/scala/MongoStorageBackend.scala +++ b/akka-persistence/akka-persistence-mongo/src/main/scala/MongoStorageBackend.scala @@ -78,10 +78,8 @@ private[akka] object MongoStorageBackend extends val o = dbo.get(VALUE).asInstanceOf[Map[AnyRef, AnyRef]] o.putAll(m) - // remove existing reference - removeMapStorageFor(name) - // and insert - coll.insert(new BasicDBObject().append(KEY, name).append(VALUE, o)) + val newdbo = new BasicDBObject().append(KEY, name).append(VALUE, o) + coll.update(new BasicDBObject().append(KEY, name), newdbo, true, false) } } } diff --git a/akka-persistence/akka-persistence-redis/src/main/scala/RedisStorageBackend.scala b/akka-persistence/akka-persistence-redis/src/main/scala/RedisStorageBackend.scala index 33b1c04a73..eef60784a0 100644 --- a/akka-persistence/akka-persistence-redis/src/main/scala/RedisStorageBackend.scala +++ b/akka-persistence/akka-persistence-redis/src/main/scala/RedisStorageBackend.scala @@ -72,7 +72,7 @@ private [akka] object RedisStorageBackend extends // need an explicit definition in akka-conf val nodes = config.getList("akka.storage.redis.cluster") - val db = + def connect() = nodes match { case Seq() => // no cluster defined @@ -89,6 +89,8 @@ private [akka] object RedisStorageBackend extends } } + var db = connect() + /** * Map storage in Redis. *

@@ -411,6 +413,10 @@ private [akka] object RedisStorageBackend extends try { body } catch { + case e: RedisConnectionException => { + db = connect() + body + } case e: java.lang.NullPointerException => throw new StorageException("Could not connect to Redis server") case e => diff --git a/akka-samples/akka-sample-ants/src/main/scala/Ants.scala b/akka-samples/akka-sample-ants/src/main/scala/Ants.scala index f1cc0ba628..b1e5cee0b8 100644 --- a/akka-samples/akka-sample-ants/src/main/scala/Ants.scala +++ b/akka-samples/akka-sample-ants/src/main/scala/Ants.scala @@ -68,9 +68,9 @@ object World { lazy val ants = setup lazy val evaporator = actorOf[Evaporator].start - private val snapshotFactory = TransactionFactory(readonly = true, familyName = "snapshot") + private val snapshotFactory = TransactionFactory(readonly = true, familyName = "snapshot", hooks = false) - def snapshot = atomic(snapshotFactory) { Array.tabulate(Dim, Dim)(place(_, _).get) } + def snapshot = atomic(snapshotFactory) { Array.tabulate(Dim, Dim)(place(_, _).getOption) } def place(loc: (Int, Int)) = places(loc._1)(loc._2) @@ -139,7 +139,7 @@ class AntActor(initLoc: (Int, Int)) extends WorldActor { val locRef = Ref(initLoc) val name = "ant-from-" + initLoc._1 + "-" + initLoc._2 - implicit val txFactory = TransactionFactory(familyName = name) + implicit val txFactory = TransactionFactory(familyName = name, hooks = false) val homing = (p: Place) => p.pher + (100 * (if (p.home) 0 else 1)) val foraging = (p: Place) => p.pher + p.food @@ -211,7 +211,7 @@ class Evaporator extends WorldActor { import Config._ import World._ - implicit val txFactory = TransactionFactory(familyName = "evaporator") + implicit val txFactory = TransactionFactory(familyName = "evaporator", hooks = false) val evaporate = (pher: Float) => pher * EvapRate def act = for (x <- 0 until Dim; y <- 0 until Dim) { diff --git a/akka-samples/akka-sample-camel/src/main/java/sample/camel/BeanImpl.java b/akka-samples/akka-sample-camel/src/main/java/sample/camel/BeanImpl.java index 10437e7624..5fa9e8c599 100644 --- a/akka-samples/akka-sample-camel/src/main/java/sample/camel/BeanImpl.java +++ b/akka-samples/akka-sample-camel/src/main/java/sample/camel/BeanImpl.java @@ -1,9 +1,10 @@ package sample.camel; +import se.scalablesolutions.akka.actor.TypedActor; /** * @author Martin Krasser */ -public class BeanImpl implements BeanIntf { +public class BeanImpl extends TypedActor implements BeanIntf { public String foo(String s) { return "hello " + s; diff --git a/akka-samples/akka-sample-camel/src/main/java/sample/camel/TypedConsumer1.java b/akka-samples/akka-sample-camel/src/main/java/sample/camel/TypedConsumer1.java new file mode 100644 index 0000000000..f05d96c953 --- /dev/null +++ b/akka-samples/akka-sample-camel/src/main/java/sample/camel/TypedConsumer1.java @@ -0,0 +1,17 @@ +package sample.camel; + +import org.apache.camel.Body; +import org.apache.camel.Header; + +import se.scalablesolutions.akka.actor.annotation.consume; + +/** + * @author Martin Krasser + */ +public interface TypedConsumer1 { + @consume("file:data/input/pojo") + public void foo(String body); + + @consume("jetty:http://0.0.0.0:8877/camel/pojo") + public String bar(@Body String body, @Header("name") String header); +} diff --git a/akka-samples/akka-sample-camel/src/main/java/sample/camel/ConsumerPojo1.java b/akka-samples/akka-sample-camel/src/main/java/sample/camel/TypedConsumer1Impl.java similarity index 68% rename from akka-samples/akka-sample-camel/src/main/java/sample/camel/ConsumerPojo1.java rename to akka-samples/akka-sample-camel/src/main/java/sample/camel/TypedConsumer1Impl.java index ed29ac30e6..dc4014fb1f 100644 --- a/akka-samples/akka-sample-camel/src/main/java/sample/camel/ConsumerPojo1.java +++ b/akka-samples/akka-sample-camel/src/main/java/sample/camel/TypedConsumer1Impl.java @@ -3,22 +3,19 @@ package sample.camel; import org.apache.camel.Body; import org.apache.camel.Header; -import se.scalablesolutions.akka.actor.annotation.consume; +import se.scalablesolutions.akka.actor.TypedActor; /** * @author Martin Krasser */ -public class ConsumerPojo1 { +public class TypedConsumer1Impl extends TypedActor implements TypedConsumer1 { - @consume("file:data/input/pojo") public void foo(String body) { System.out.println("Received message:"); System.out.println(body); } - @consume("jetty:http://0.0.0.0:8877/camel/pojo") public String bar(@Body String body, @Header("name") String header) { return String.format("body=%s header=%s", body, header); } - } diff --git a/akka-samples/akka-sample-camel/src/main/java/sample/camel/ConsumerPojo2.java b/akka-samples/akka-sample-camel/src/main/java/sample/camel/TypedConsumer2.java similarity index 63% rename from akka-samples/akka-sample-camel/src/main/java/sample/camel/ConsumerPojo2.java rename to akka-samples/akka-sample-camel/src/main/java/sample/camel/TypedConsumer2.java index 429e6043ad..1fed5ac4ce 100644 --- a/akka-samples/akka-sample-camel/src/main/java/sample/camel/ConsumerPojo2.java +++ b/akka-samples/akka-sample-camel/src/main/java/sample/camel/TypedConsumer2.java @@ -7,11 +7,8 @@ import se.scalablesolutions.akka.actor.annotation.consume; /** * @author Martin Krasser */ -public class ConsumerPojo2 { +public interface TypedConsumer2 { @consume("direct:default") - public String foo(String body) { - return String.format("default: %s", body); - } - + public String foo(String body); } \ No newline at end of file diff --git a/akka-samples/akka-sample-camel/src/main/java/sample/camel/TypedConsumer2Impl.java b/akka-samples/akka-sample-camel/src/main/java/sample/camel/TypedConsumer2Impl.java new file mode 100644 index 0000000000..8a5fe1e4c9 --- /dev/null +++ b/akka-samples/akka-sample-camel/src/main/java/sample/camel/TypedConsumer2Impl.java @@ -0,0 +1,13 @@ +package sample.camel; + +import se.scalablesolutions.akka.actor.TypedActor; + +/** + * @author Martin Krasser + */ +public class TypedConsumer2Impl extends TypedActor implements TypedConsumer2 { + + public String foo(String body) { + return String.format("default: %s", body); + } +} \ No newline at end of file diff --git a/akka-samples/akka-sample-camel/src/main/java/sample/camel/TypedRemoteConsumer1.java b/akka-samples/akka-sample-camel/src/main/java/sample/camel/TypedRemoteConsumer1.java new file mode 100644 index 0000000000..7db5452d93 --- /dev/null +++ b/akka-samples/akka-sample-camel/src/main/java/sample/camel/TypedRemoteConsumer1.java @@ -0,0 +1,15 @@ +package sample.camel; + +import org.apache.camel.Body; +import org.apache.camel.Header; + +import se.scalablesolutions.akka.actor.annotation.consume; + +/** + * @author Martin Krasser + */ +public interface TypedRemoteConsumer1 { + + @consume("jetty:http://localhost:6644/camel/remote-typed-actor-1") + public String foo(@Body String body, @Header("name") String header); +} diff --git a/akka-samples/akka-sample-camel/src/main/java/sample/camel/RemoteConsumerPojo1.java b/akka-samples/akka-sample-camel/src/main/java/sample/camel/TypedRemoteConsumer1Impl.java similarity index 62% rename from akka-samples/akka-sample-camel/src/main/java/sample/camel/RemoteConsumerPojo1.java rename to akka-samples/akka-sample-camel/src/main/java/sample/camel/TypedRemoteConsumer1Impl.java index ab7e878b0d..1cc32b3e07 100644 --- a/akka-samples/akka-sample-camel/src/main/java/sample/camel/RemoteConsumerPojo1.java +++ b/akka-samples/akka-sample-camel/src/main/java/sample/camel/TypedRemoteConsumer1Impl.java @@ -3,16 +3,14 @@ package sample.camel; import org.apache.camel.Body; import org.apache.camel.Header; -import se.scalablesolutions.akka.actor.annotation.consume; +import se.scalablesolutions.akka.actor.TypedActor; /** * @author Martin Krasser */ -public class RemoteConsumerPojo1 { +public class TypedRemoteConsumer1Impl extends TypedActor implements TypedRemoteConsumer1 { - @consume("jetty:http://localhost:6644/camel/remote-active-object-1") public String foo(@Body String body, @Header("name") String header) { return String.format("remote1: body=%s header=%s", body, header); } - } diff --git a/akka-samples/akka-sample-camel/src/main/java/sample/camel/RemoteConsumerPojo2.java b/akka-samples/akka-sample-camel/src/main/java/sample/camel/TypedRemoteConsumer2.java similarity index 75% rename from akka-samples/akka-sample-camel/src/main/java/sample/camel/RemoteConsumerPojo2.java rename to akka-samples/akka-sample-camel/src/main/java/sample/camel/TypedRemoteConsumer2.java index e982fe5025..2b1c068b46 100644 --- a/akka-samples/akka-sample-camel/src/main/java/sample/camel/RemoteConsumerPojo2.java +++ b/akka-samples/akka-sample-camel/src/main/java/sample/camel/TypedRemoteConsumer2.java @@ -7,9 +7,9 @@ import se.scalablesolutions.akka.actor.annotation.consume; /** * @author Martin Krasser */ -public class RemoteConsumerPojo2 { +public class TypedRemoteConsumer2 { - @consume("jetty:http://localhost:6644/camel/remote-active-object-2") + @consume("jetty:http://localhost:6644/camel/remote-typed-actor-2") public String foo(@Body String body, @Header("name") String header) { return String.format("remote2: body=%s header=%s", body, header); } diff --git a/akka-samples/akka-sample-camel/src/main/resources/context-boot.xml b/akka-samples/akka-sample-camel/src/main/resources/context-jms.xml similarity index 100% rename from akka-samples/akka-sample-camel/src/main/resources/context-boot.xml rename to akka-samples/akka-sample-camel/src/main/resources/context-jms.xml diff --git a/akka-samples/akka-sample-camel/src/main/resources/context-standalone.xml b/akka-samples/akka-sample-camel/src/main/resources/context-standalone.xml index 36645a936d..9cf91056da 100644 --- a/akka-samples/akka-sample-camel/src/main/resources/context-standalone.xml +++ b/akka-samples/akka-sample-camel/src/main/resources/context-standalone.xml @@ -20,6 +20,6 @@ http://camel.apache.org/schema/spring/camel-spring.xsd"> - + diff --git a/akka-samples/akka-sample-camel/src/main/scala/Actors.scala b/akka-samples/akka-sample-camel/src/main/scala/Actors.scala index 7ab8b0dae5..64bdb19dfd 100644 --- a/akka-samples/akka-sample-camel/src/main/scala/Actors.scala +++ b/akka-samples/akka-sample-camel/src/main/scala/Actors.scala @@ -1,8 +1,9 @@ package sample.camel -import se.scalablesolutions.akka.actor.annotation.consume +import org.apache.camel.Exchange + import se.scalablesolutions.akka.actor.{Actor, ActorRef, RemoteActor} -import se.scalablesolutions.akka.camel.{Producer, Message, Consumer} +import se.scalablesolutions.akka.camel.{Failure, Producer, Message, Consumer} import se.scalablesolutions.akka.util.Logging /** @@ -29,9 +30,7 @@ class RemoteActor2 extends Actor with Consumer { class Producer1 extends Actor with Producer { def endpointUri = "direct:welcome" - override def oneway = false // default - override def async = true // default } class Consumer1 extends Actor with Consumer with Logging { @@ -42,8 +41,9 @@ class Consumer1 extends Actor with Consumer with Logging { } } -@consume("jetty:http://0.0.0.0:8877/camel/default") class Consumer2 extends Actor { + def endpointUri = "jetty:http://0.0.0.0:8877/camel/default" + def receive = { case msg: Message => self.reply("Hello %s" format msg.bodyAs[String]) } @@ -112,3 +112,32 @@ class PublisherBridge(uri: String, publisher: ActorRef) extends Actor with Consu } } } + +class HttpConsumer(producer: ActorRef) extends Actor with Consumer { + def endpointUri = "jetty:http://0.0.0.0:8875/" + + protected def receive = { + case msg => producer forward msg + } +} + +class HttpProducer(transformer: ActorRef) extends Actor with Producer { + def endpointUri = "jetty://http://akkasource.org/?bridgeEndpoint=true" + + override protected def receiveBeforeProduce = { + // only keep Exchange.HTTP_PATH message header (which needed by bridge endpoint) + case msg: Message => msg.setHeaders(msg.headers(Set(Exchange.HTTP_PATH))) + } + + override protected def receiveAfterProduce = { + // do not reply but forward result to transformer + case msg => transformer forward msg + } +} + +class HttpTransformer extends Actor { + protected def receive = { + case msg: Message => self.reply(msg.transformBody[String] {_ replaceAll ("Akka ", "AKKA ")}) + case msg: Failure => self.reply(msg) + } +} diff --git a/akka-samples/akka-sample-camel/src/main/scala/Boot.scala b/akka-samples/akka-sample-camel/src/main/scala/Boot.scala index 2cfb56e64f..98c7c34b7e 100644 --- a/akka-samples/akka-sample-camel/src/main/scala/Boot.scala +++ b/akka-samples/akka-sample-camel/src/main/scala/Boot.scala @@ -7,7 +7,7 @@ import org.apache.camel.spring.spi.ApplicationContextRegistry import org.springframework.context.support.ClassPathXmlApplicationContext import se.scalablesolutions.akka.actor.Actor._ -import se.scalablesolutions.akka.actor.{ActiveObject, Supervisor} +import se.scalablesolutions.akka.actor.{TypedActor, Supervisor} import se.scalablesolutions.akka.camel.CamelContextManager import se.scalablesolutions.akka.config.ScalaConfig._ @@ -16,16 +16,6 @@ import se.scalablesolutions.akka.config.ScalaConfig._ */ class Boot { - // ----------------------------------------------------------------------- - // Create CamelContext with Spring-based registry and custom route builder - // ----------------------------------------------------------------------- - - val context = new ClassPathXmlApplicationContext("/context-boot.xml", getClass) - val registry = new ApplicationContextRegistry(context) - - CamelContextManager.init(new DefaultCamelContext(registry)) - CamelContextManager.context.addRoutes(new CustomRouteBuilder) - // ----------------------------------------------------------------------- // Basic example // ----------------------------------------------------------------------- @@ -41,9 +31,17 @@ class Boot { // Supervise(actorOf[Consumer2], LifeCycle(Permanent)) :: Nil)) // ----------------------------------------------------------------------- - // Tranformer example + // Custom Camel route example // ----------------------------------------------------------------------- + // Create CamelContext and a Spring-based registry + val context = new ClassPathXmlApplicationContext("/context-jms.xml", getClass) + val registry = new ApplicationContextRegistry(context) + + // Use a custom Camel context and a custom touter builder + CamelContextManager.init(new DefaultCamelContext(registry)) + CamelContextManager.context.addRoutes(new CustomRouteBuilder) + val producer = actorOf[Producer1] val mediator = actorOf(new Transformer(producer)) val consumer = actorOf(new Consumer3(mediator)) @@ -52,12 +50,20 @@ class Boot { mediator.start consumer.start + // ----------------------------------------------------------------------- + // Asynchronous consumer-producer example (Akka homepage transformation) + // ----------------------------------------------------------------------- + + val httpTransformer = actorOf(new HttpTransformer).start + val httpProducer = actorOf(new HttpProducer(httpTransformer)).start + val httpConsumer = actorOf(new HttpConsumer(httpProducer)).start + // ----------------------------------------------------------------------- // Publish subscribe examples // ----------------------------------------------------------------------- // - // Cometd example commented out because camel-cometd is broken in Camel 2.3 + // Cometd example commented out because camel-cometd is broken since Camel 2.3 // //val cometdUri = "cometd://localhost:8111/test/abc?baseResource=file:target" @@ -83,7 +89,7 @@ class Boot { // Active object example // ----------------------------------------------------------------------- - ActiveObject.newInstance(classOf[ConsumerPojo1]) + TypedActor.newInstance(classOf[TypedConsumer1], classOf[TypedConsumer1Impl]) } /** diff --git a/akka-samples/akka-sample-camel/src/main/scala/ClientApplication.scala b/akka-samples/akka-sample-camel/src/main/scala/ClientApplication.scala index 467d715360..95f9703f5b 100644 --- a/akka-samples/akka-sample-camel/src/main/scala/ClientApplication.scala +++ b/akka-samples/akka-sample-camel/src/main/scala/ClientApplication.scala @@ -1,7 +1,7 @@ package sample.camel import se.scalablesolutions.akka.actor.Actor._ -import se.scalablesolutions.akka.actor.{ActiveObject, Actor, ActorRef} +import se.scalablesolutions.akka.actor.{TypedActor, Actor, ActorRef} import se.scalablesolutions.akka.camel.Message import se.scalablesolutions.akka.remote.RemoteClient @@ -18,15 +18,15 @@ object ClientApplication { val actor1 = actorOf[RemoteActor1] val actor2 = RemoteClient.actorFor("remote2", "localhost", 7777) - val actobj1 = ActiveObject.newRemoteInstance(classOf[RemoteConsumerPojo1], "localhost", 7777) - //val actobj2 = TODO: create reference to server-managed active object (RemoteConsumerPojo2) + val actobj1 = TypedActor.newRemoteInstance(classOf[TypedRemoteConsumer1], classOf[TypedRemoteConsumer1Impl], "localhost", 7777) + //val actobj2 = TODO: create reference to server-managed typed actor (TypedRemoteConsumer2) actor1.start println(actor1 !! Message("actor1")) // activates and publishes actor remotely println(actor2 !! Message("actor2")) // actor already activated and published remotely - println(actobj1.foo("x", "y")) // activates and publishes active object methods remotely + println(actobj1.foo("x", "y")) // activates and publishes typed actor methods remotely // ... } diff --git a/akka-samples/akka-sample-camel/src/main/scala/ServerApplication.scala b/akka-samples/akka-sample-camel/src/main/scala/ServerApplication.scala index 7d90e89720..8aa9293815 100644 --- a/akka-samples/akka-sample-camel/src/main/scala/ServerApplication.scala +++ b/akka-samples/akka-sample-camel/src/main/scala/ServerApplication.scala @@ -14,7 +14,7 @@ object ServerApplication { // def main(args: Array[String]) { - val camelService = CamelService.newInstance.load + val camelService = CamelService.start RemoteNode.start("localhost", 7777) RemoteNode.register("remote2", actorOf[RemoteActor2].start) } diff --git a/akka-samples/akka-sample-camel/src/main/scala/StandaloneApplication.scala b/akka-samples/akka-sample-camel/src/main/scala/StandaloneApplication.scala index 0a7304ba0e..6fa207a6e0 100644 --- a/akka-samples/akka-sample-camel/src/main/scala/StandaloneApplication.scala +++ b/akka-samples/akka-sample-camel/src/main/scala/StandaloneApplication.scala @@ -5,8 +5,9 @@ import org.apache.camel.builder.RouteBuilder import org.apache.camel.spring.spi.ApplicationContextRegistry import org.springframework.context.support.ClassPathXmlApplicationContext -import se.scalablesolutions.akka.camel.{CamelService, CamelContextManager} -import se.scalablesolutions.akka.actor.{ActorRegistry, ActiveObject} +import se.scalablesolutions.akka.actor.{Actor, ActorRegistry, TypedActor} +import se.scalablesolutions.akka.camel._ +import se.scalablesolutions.akka.util.Logging /** * @author Martin Krasser @@ -15,34 +16,35 @@ object StandaloneApplication { def main(args: Array[String]) { import CamelContextManager.context - // 'externally' register active objects + // 'externally' register typed actors val registry = new SimpleRegistry - registry.put("pojo1", ActiveObject.newInstance(classOf[BeanIntf], new BeanImpl)) - registry.put("pojo2", ActiveObject.newInstance(classOf[BeanImpl])) + registry.put("sample", TypedActor.newInstance(classOf[BeanIntf], classOf[BeanImpl])) // customize CamelContext CamelContextManager.init(new DefaultCamelContext(registry)) CamelContextManager.context.addRoutes(new StandaloneApplicationRoute) // start CamelService - val camelService = CamelService.newInstance.load + CamelService.start - // access 'externally' registered active objects - assert("hello msg1" == context.createProducerTemplate.requestBody("direct:test1", "msg1")) - assert("hello msg2" == context.createProducerTemplate.requestBody("direct:test2", "msg2")) + // access 'externally' registered typed actors + assert("hello msg1" == context.createProducerTemplate.requestBody("direct:test", "msg1")) - // 'internally' register active object (requires CamelService) - ActiveObject.newInstance(classOf[ConsumerPojo2]) + // set expectations on upcoming endpoint activation + val activation = CamelService.expectEndpointActivationCount(1) + + // 'internally' register typed actor (requires CamelService) + TypedActor.newInstance(classOf[TypedConsumer2], classOf[TypedConsumer2Impl]) // internal registration is done in background. Wait a bit ... - Thread.sleep(1000) + activation.await - // access 'internally' (automatically) registered active-objects - // (see @consume annotation value at ConsumerPojo2.foo method) + // access 'internally' (automatically) registered typed-actors + // (see @consume annotation value at TypedConsumer2.foo method) assert("default: msg3" == context.createProducerTemplate.requestBody("direct:default", "msg3")) // shutdown CamelService - camelService.unload + CamelService.stop // shutdown all (internally) created actors ActorRegistry.shutdownAll @@ -51,9 +53,8 @@ object StandaloneApplication { class StandaloneApplicationRoute extends RouteBuilder { def configure = { - // routes to active objects (in SimpleRegistry) - from("direct:test1").to("active-object:pojo1?method=foo") - from("direct:test2").to("active-object:pojo2?method=foo") + // route to typed actors (in SimpleRegistry) + from("direct:test").to("typed-actor:sample?method=foo") } } @@ -64,7 +65,7 @@ object StandaloneSpringApplication { // load Spring application context val appctx = new ClassPathXmlApplicationContext("/context-standalone.xml") - // access 'externally' registered active objects with active-object component + // access 'externally' registered typed actors with typed-actor component assert("hello msg3" == template.requestBody("direct:test3", "msg3")) // destroy Spring application context @@ -77,7 +78,47 @@ object StandaloneSpringApplication { class StandaloneSpringApplicationRoute extends RouteBuilder { def configure = { - // routes to active object (in ApplicationContextRegistry) - from("direct:test3").to("active-object:pojo3?method=foo") + // routes to typed actor (in ApplicationContextRegistry) + from("direct:test3").to("typed-actor:pojo3?method=foo") + } +} + +object StandaloneJmsApplication { + def main(args: Array[String]) = { + val context = new ClassPathXmlApplicationContext("/context-jms.xml") + val registry = new ApplicationContextRegistry(context) + + // Init CamelContextManager with custom CamelContext + CamelContextManager.init(new DefaultCamelContext(registry)) + + // Start CamelService + CamelService.start + // Expect two consumer endpoints to be activated + val completion = CamelService.expectEndpointActivationCount(2) + + val jmsUri = "jms:topic:test" + // Wire publisher and consumer using a JMS topic + val jmsSubscriber1 = Actor.actorOf(new Subscriber("jms-subscriber-1", jmsUri)).start + val jmsSubscriber2 = Actor.actorOf(new Subscriber("jms-subscriber-2", jmsUri)).start + val jmsPublisher = Actor.actorOf(new Publisher("jms-publisher", jmsUri)).start + + // wait for the consumer (subscriber) endpoint being activated + completion.await + + // Send 10 messages to via publisher actor + for(i <- 1 to 10) { + jmsPublisher ! ("Akka rocks (%d)" format i) + } + + // Send 10 messages to JMS topic directly + for(i <- 1 to 10) { + CamelContextManager.template.sendBody(jmsUri, "Camel rocks (%d)" format i) + } + + // Graceful shutdown of all endpoints/routes + CamelService.stop + + // Shutdown example actors + ActorRegistry.shutdownAll } } diff --git a/akka-samples/akka-sample-camel/src/test/scala/HttpConcurrencyTest.scala b/akka-samples/akka-sample-camel/src/test/scala/HttpConcurrencyTest.scala new file mode 100644 index 0000000000..54fa631744 --- /dev/null +++ b/akka-samples/akka-sample-camel/src/test/scala/HttpConcurrencyTest.scala @@ -0,0 +1,99 @@ +package sample.camel + +import collection.mutable.Set + +import java.util.concurrent.CountDownLatch + +import org.junit._ + +import se.scalablesolutions.akka.actor.Actor._ +import se.scalablesolutions.akka.actor.{ActorRegistry, ActorRef, Actor} +import se.scalablesolutions.akka.camel.{CamelService, Message, Producer, Consumer} +import se.scalablesolutions.akka.routing.CyclicIterator +import se.scalablesolutions.akka.routing.Routing._ +import org.scalatest.junit.JUnitSuite + +/** + * @author Martin Krasser + */ +@Ignore // do not run concurrency test by default +class HttpConcurrencyTest extends JUnitSuite { + import HttpConcurrencyTest._ + + @Test def shouldProcessMessagesConcurrently = { + val num = 50 + val latch1 = new CountDownLatch(num) + val latch2 = new CountDownLatch(num) + val latch3 = new CountDownLatch(num) + val client1 = actorOf(new HttpClientActor("client1", latch1)).start + val client2 = actorOf(new HttpClientActor("client2", latch2)).start + val client3 = actorOf(new HttpClientActor("client3", latch3)).start + for (i <- 1 to num) { + client1 ! Message("client1", Map(Message.MessageExchangeId -> i)) + client2 ! Message("client2", Map(Message.MessageExchangeId -> i)) + client3 ! Message("client3", Map(Message.MessageExchangeId -> i)) + } + latch1.await + latch2.await + latch3.await + assert(num == (client1 !! "getCorrelationIdCount").as[Int].get) + assert(num == (client2 !! "getCorrelationIdCount").as[Int].get) + assert(num == (client3 !! "getCorrelationIdCount").as[Int].get) + } +} + +object HttpConcurrencyTest { + @BeforeClass + def beforeClass = { + CamelService.start + + val workers = for (i <- 1 to 8) yield actorOf[HttpServerWorker].start + val balancer = loadBalancerActor(new CyclicIterator(workers.toList)) + + val completion = CamelService.expectEndpointActivationCount(1) + val server = actorOf(new HttpServerActor(balancer)).start + completion.await + } + + @AfterClass + def afterClass = { + CamelService.stop + ActorRegistry.shutdownAll + } + + class HttpClientActor(label: String, latch: CountDownLatch) extends Actor with Producer { + def endpointUri = "jetty:http://0.0.0.0:8855/echo" + var correlationIds = Set[Any]() + + override protected def receive = { + case "getCorrelationIdCount" => self.reply(correlationIds.size) + case msg => super.receive(msg) + } + + override protected def receiveAfterProduce = { + case msg: Message => { + val corr = msg.headers(Message.MessageExchangeId) + val body = msg.bodyAs[String] + correlationIds += corr + assert(label == body) + latch.countDown + print(".") + } + } + } + + class HttpServerActor(balancer: ActorRef) extends Actor with Consumer { + def endpointUri = "jetty:http://0.0.0.0:8855/echo" + var counter = 0 + + def receive = { + case msg => balancer forward msg + } + } + + class HttpServerWorker extends Actor { + protected def receive = { + case msg => self.reply(msg) + } + } +} diff --git a/akka-samples/akka-sample-chat/README b/akka-samples/akka-sample-chat/README index 475cb60015..fec39724e1 100644 --- a/akka-samples/akka-sample-chat/README +++ b/akka-samples/akka-sample-chat/README @@ -18,8 +18,8 @@ Then to run the sample: - Run 'sbt console' to start up a REPL (interpreter). 4. In the first REPL you get execute: - scala> import sample.chat._ - - scala> import se.scalablesolutions.akka.actor.Actor - - scala> val chatService = Actor.actorOf[ChatService].start + - scala> import se.scalablesolutions.akka.actor.Actor._ + - scala> val chatService = actorOf[ChatService].start 5. In the second REPL you get execute: - scala> import sample.chat._ - scala> Runner.run diff --git a/akka-samples/akka-sample-lift/src/main/scala/akka/SimpleService.scala b/akka-samples/akka-sample-lift/src/main/scala/akka/SimpleService.scala index 7557404da9..b361fbb16b 100644 --- a/akka-samples/akka-sample-lift/src/main/scala/akka/SimpleService.scala +++ b/akka-samples/akka-sample-lift/src/main/scala/akka/SimpleService.scala @@ -1,35 +1,24 @@ package sample.lift -import se.scalablesolutions.akka.actor.{Transactor, Actor} +import se.scalablesolutions.akka.actor._ +import se.scalablesolutions.akka.actor.Actor._ import se.scalablesolutions.akka.config.ScalaConfig._ import se.scalablesolutions.akka.stm.TransactionalMap import se.scalablesolutions.akka.persistence.cassandra.CassandraStorage -import Actor._ - +import scala.xml.Node import java.lang.Integer import javax.ws.rs.{GET, Path, Produces} import java.nio.ByteBuffer +import net.liftweb.http._ +import net.liftweb.http.rest._ -/** - * Try service out by invoking (multiple times): - *

- * curl http://localhost:9998/liftcount
- * 
- * Or browse to the URL from a web browser. - */ -@Path("/liftcount") -class SimpleService extends Transactor { - case object Tick +class SimpleServiceActor extends Transactor { private val KEY = "COUNTER" private var hasStartedTicking = false private lazy val storage = TransactionalMap[String, Integer]() - @GET - @Produces(Array("text/html")) - def count = (self !! Tick).getOrElse(

Error in counter

) - def receive = { - case Tick => if (hasStartedTicking) { + case "Tick" => if (hasStartedTicking) { val counter = storage.get(KEY).get.asInstanceOf[Integer].intValue storage.put(KEY, new Integer(counter + 1)) self.reply(

Tick: {counter + 1}

) @@ -41,27 +30,14 @@ class SimpleService extends Transactor { } } -/** - * Try service out by invoking (multiple times): - *
- * curl http://localhost:9998/persistentliftcount
- * 
- * Or browse to the URL from a web browser. - */ -@Path("/persistentliftcount") -class PersistentSimpleService extends Transactor { +class PersistentServiceActor extends Transactor { - case object Tick private val KEY = "COUNTER" private var hasStartedTicking = false private lazy val storage = CassandraStorage.newMap - @GET - @Produces(Array("text/html")) - def count = (self !! Tick).getOrElse(

Error in counter

) - def receive = { - case Tick => if (hasStartedTicking) { + case "Tick" => if (hasStartedTicking) { val bytes = storage.get(KEY.getBytes).get val counter = ByteBuffer.wrap(bytes).getInt storage.put(KEY.getBytes, ByteBuffer.allocate(4).putInt(counter + 1).array) @@ -73,3 +49,46 @@ class PersistentSimpleService extends Transactor { } } } + + +/** + * Try service out by invoking (multiple times): + *
+ * curl http://localhost:8080/liftcount
+ * 
+ * Or browse to the URL from a web browser. + */ + +object SimpleRestService extends RestHelper { + serve { + case Get("liftcount" :: _, req) => + //Fetch the first actor of type SimpleServiceActor + //Send it the "Tick" message and expect a Node back + val result = for( a <- ActorRegistry.actorsFor(classOf[SimpleServiceActor]).headOption; + r <- (a !! "Tick").as[Node] ) yield r + + //Return either the resulting NodeSeq or a default one + (result getOrElse

Error in counter

).asInstanceOf[Node] + } +} + + +/** + * Try service out by invoking (multiple times): + *
+ * curl http://localhost:8080/persistentliftcount
+ * 
+ * Or browse to the URL from a web browser. + */ + object PersistentRestService extends RestHelper { + serve { + case Get("persistentliftcount" :: _, req) => + //Fetch the first actor of type SimpleServiceActor + //Send it the "Tick" message and expect a Node back + val result = for( a <- ActorRegistry.actorsFor(classOf[PersistentServiceActor]).headOption; + r <- (a !! "Tick").as[Node] ) yield r + + //Return either the resulting NodeSeq or a default one + (result getOrElse

Error in counter

).asInstanceOf[Node] + } + } diff --git a/akka-samples/akka-sample-lift/src/main/scala/bootstrap/liftweb/Boot.scala b/akka-samples/akka-sample-lift/src/main/scala/bootstrap/liftweb/Boot.scala index 0f4a0e9020..2e56a5857a 100644 --- a/akka-samples/akka-sample-lift/src/main/scala/bootstrap/liftweb/Boot.scala +++ b/akka-samples/akka-sample-lift/src/main/scala/bootstrap/liftweb/Boot.scala @@ -13,7 +13,7 @@ import se.scalablesolutions.akka.actor.Actor._ import se.scalablesolutions.akka.config.ScalaConfig._ import se.scalablesolutions.akka.util.Logging -import sample.lift.{PersistentSimpleService, SimpleService} +import sample.lift._ /** * A class that's instantiated early and run. It allows the application @@ -35,6 +35,8 @@ class Boot extends Logging { true } } + LiftRules.statelessDispatchTable.append(SimpleRestService) + LiftRules.statelessDispatchTable.append(PersistentRestService) LiftRules.passNotFoundToChain = true @@ -42,10 +44,10 @@ class Boot extends Logging { SupervisorConfig( RestartStrategy(OneForOne, 3, 100, List(classOf[Exception])), Supervise( - actorOf[SimpleService], + actorOf[SimpleServiceActor], LifeCycle(Permanent)) :: Supervise( - actorOf[PersistentSimpleService], + actorOf[PersistentServiceActor], LifeCycle(Permanent)) :: Nil)) factory.newInstance.start diff --git a/akka-samples/akka-sample-lift/src/main/webapp/WEB-INF/web.xml b/akka-samples/akka-sample-lift/src/main/webapp/WEB-INF/web.xml index 23348604bb..3a1b672cec 100644 --- a/akka-samples/akka-sample-lift/src/main/webapp/WEB-INF/web.xml +++ b/akka-samples/akka-sample-lift/src/main/webapp/WEB-INF/web.xml @@ -13,7 +13,7 @@ AkkaServlet - se.scalablesolutions.akka.rest.AkkaServlet + se.scalablesolutions.akka.comet.AkkaServlet AkkaServlet diff --git a/akka-samples/akka-sample-osgi/src/main/scala/osgiExample.scala b/akka-samples/akka-sample-osgi/src/main/scala/osgiExample.scala new file mode 100644 index 0000000000..18323d04d3 --- /dev/null +++ b/akka-samples/akka-sample-osgi/src/main/scala/osgiExample.scala @@ -0,0 +1,33 @@ +/** + * Copyright (C) 2009-2010 Scalable Solutions AB + */ + +package se.scalablesolutions.akka +package sample.osgi + +import actor.{ Actor, ActorRegistry } +import actor.Actor._ + +import org.osgi.framework.{ BundleActivator, BundleContext } + +class Activator extends BundleActivator { + + def start(context: BundleContext) { + println("Starting the OSGi example ...") + val echo = actorOf[EchoActor].start + val answer = (echo !! "OSGi example") + println(answer getOrElse "No answer!") + } + + def stop(context: BundleContext) { + ActorRegistry.shutdownAll() + println("Stopped the OSGi example.") + } +} + +class EchoActor extends Actor { + + override def receive = { + case x => self reply x + } +} diff --git a/akka-samples/akka-sample-rest-java/src/main/java/sample/rest/java/Boot.java b/akka-samples/akka-sample-rest-java/src/main/java/sample/rest/java/Boot.java index 60eb4f11af..cd382ae6ec 100644 --- a/akka-samples/akka-sample-rest-java/src/main/java/sample/rest/java/Boot.java +++ b/akka-samples/akka-sample-rest-java/src/main/java/sample/rest/java/Boot.java @@ -4,11 +4,11 @@ package sample.rest.java; -import se.scalablesolutions.akka.config.ActiveObjectConfigurator; +import se.scalablesolutions.akka.config.TypedActorConfigurator; import static se.scalablesolutions.akka.config.JavaConfig.*; public class Boot { - public final static ActiveObjectConfigurator configurator = new ActiveObjectConfigurator(); + public final static TypedActorConfigurator configurator = new TypedActorConfigurator(); static { configurator.configure( new RestartStrategy(new OneForOne(), 3, 5000, new Class[]{Exception.class}), diff --git a/akka-samples/akka-sample-rest-java/src/main/java/sample/rest/java/PersistentSimpleService.java b/akka-samples/akka-sample-rest-java/src/main/java/sample/rest/java/PersistentSimpleService.java index 1108fcdb63..67368ceedd 100644 --- a/akka-samples/akka-sample-rest-java/src/main/java/sample/rest/java/PersistentSimpleService.java +++ b/akka-samples/akka-sample-rest-java/src/main/java/sample/rest/java/PersistentSimpleService.java @@ -4,42 +4,6 @@ package sample.rest.java; -import se.scalablesolutions.akka.actor.annotation.transactionrequired; -import se.scalablesolutions.akka.actor.annotation.prerestart; -import se.scalablesolutions.akka.actor.annotation.postrestart; -import se.scalablesolutions.akka.persistence.common.PersistentMap; -import se.scalablesolutions.akka.persistence.cassandra.CassandraStorage; - -import java.nio.ByteBuffer; - -@transactionrequired -public class PersistentSimpleService { - private String KEY = "COUNTER"; - - private boolean hasStartedTicking = false; - private PersistentMap storage; - - public String count() { - if (storage == null) storage = CassandraStorage.newMap(); - if (!hasStartedTicking) { - storage.put(KEY.getBytes(), ByteBuffer.allocate(4).putInt(0).array()); - hasStartedTicking = true; - return "Tick: 0\n"; - } else { - byte[] bytes = (byte[])storage.get(KEY.getBytes()).get(); - int counter = ByteBuffer.wrap(bytes).getInt(); - storage.put(KEY.getBytes(), ByteBuffer.allocate(4).putInt(counter + 1).array()); - return "Tick: " + counter + "\n"; - } - } - - @prerestart - public void preRestart() { - System.out.println("Prepare for restart by supervisor"); - } - - @postrestart - public void postRestart() { - System.out.println("Reinitialize after restart by supervisor"); - } +public interface PersistentSimpleService { + public String count(); } \ No newline at end of file diff --git a/akka-samples/akka-sample-rest-java/src/main/java/sample/rest/java/PersistentSimpleServiceImpl.java b/akka-samples/akka-sample-rest-java/src/main/java/sample/rest/java/PersistentSimpleServiceImpl.java new file mode 100644 index 0000000000..2b83f4acbb --- /dev/null +++ b/akka-samples/akka-sample-rest-java/src/main/java/sample/rest/java/PersistentSimpleServiceImpl.java @@ -0,0 +1,42 @@ +/** + * Copyright (C) 2009-2010 Scalable Solutions AB + */ + +package sample.rest.java; + +import se.scalablesolutions.akka.actor.TypedTransactor; +import se.scalablesolutions.akka.persistence.common.PersistentMap; +import se.scalablesolutions.akka.persistence.cassandra.CassandraStorage; + +import java.nio.ByteBuffer; + +public class PersistentSimpleServiceImpl extends TypedTransactor implements PersistentSimpleService { + private String KEY = "COUNTER"; + + private boolean hasStartedTicking = false; + private PersistentMap storage; + + public String count() { + if (storage == null) storage = CassandraStorage.newMap(); + if (!hasStartedTicking) { + storage.put(KEY.getBytes(), ByteBuffer.allocate(4).putInt(0).array()); + hasStartedTicking = true; + return "Tick: 0\n"; + } else { + byte[] bytes = (byte[])storage.get(KEY.getBytes()).get(); + int counter = ByteBuffer.wrap(bytes).getInt(); + storage.put(KEY.getBytes(), ByteBuffer.allocate(4).putInt(counter + 1).array()); + return "Tick: " + counter + "\n"; + } + } + + @Override + public void preRestart(Throwable cause) { + System.out.println("Prepare for restart by supervisor"); + } + + @Override + public void postRestart(Throwable cause) { + System.out.println("Reinitialize after restart by supervisor"); + } +} \ No newline at end of file diff --git a/akka-samples/akka-sample-rest-java/src/main/java/sample/rest/java/Receiver.java b/akka-samples/akka-sample-rest-java/src/main/java/sample/rest/java/Receiver.java index 06631659a2..4c60e44ae6 100644 --- a/akka-samples/akka-sample-rest-java/src/main/java/sample/rest/java/Receiver.java +++ b/akka-samples/akka-sample-rest-java/src/main/java/sample/rest/java/Receiver.java @@ -4,17 +4,6 @@ package sample.rest.java; -import javax.ws.rs.Path; -import javax.ws.rs.GET; -import javax.ws.rs.Produces; - -import se.scalablesolutions.akka.actor.ActiveObject; -import se.scalablesolutions.akka.actor.ActiveObjectContext; - -public class Receiver { - private ActiveObjectContext context = null; - public SimpleService receive() { - System.out.println("------ RECEIVE"); - return (SimpleService) context.getSender(); - } +public interface Receiver { + SimpleService receive(); } diff --git a/akka-samples/akka-sample-rest-java/src/main/java/sample/rest/java/ReceiverImpl.java b/akka-samples/akka-sample-rest-java/src/main/java/sample/rest/java/ReceiverImpl.java new file mode 100644 index 0000000000..c46042a848 --- /dev/null +++ b/akka-samples/akka-sample-rest-java/src/main/java/sample/rest/java/ReceiverImpl.java @@ -0,0 +1,14 @@ +/** + * Copyright (C) 2009-2010 Scalable Solutions AB + */ + +package sample.rest.java; + +import se.scalablesolutions.akka.actor.TypedActorContext; +import se.scalablesolutions.akka.actor.TypedActor; + +public class ReceiverImpl extends TypedActor implements Receiver { + public SimpleService receive() { + return (SimpleService) getContext().getSender(); + } +} diff --git a/akka-samples/akka-sample-rest-java/src/main/java/sample/rest/java/SimpleService.java b/akka-samples/akka-sample-rest-java/src/main/java/sample/rest/java/SimpleService.java index 097ba810b5..8055b3383a 100644 --- a/akka-samples/akka-sample-rest-java/src/main/java/sample/rest/java/SimpleService.java +++ b/akka-samples/akka-sample-rest-java/src/main/java/sample/rest/java/SimpleService.java @@ -4,43 +4,6 @@ package sample.rest.java; -import se.scalablesolutions.akka.actor.ActiveObject; -import se.scalablesolutions.akka.actor.ActiveObjectContext; -import se.scalablesolutions.akka.actor.annotation.transactionrequired; -import se.scalablesolutions.akka.actor.annotation.prerestart; -import se.scalablesolutions.akka.actor.annotation.postrestart; -import se.scalablesolutions.akka.stm.TransactionalMap; - -@transactionrequired -public class SimpleService { - private String KEY = "COUNTER"; - - private boolean hasStartedTicking = false; - private TransactionalMap storage; - private Receiver receiver = ActiveObject.newInstance(Receiver.class); - - public String count() { - if (storage == null) storage = new TransactionalMap(); - if (!hasStartedTicking) { - storage.put(KEY, 0); - hasStartedTicking = true; - return "Tick: 0\n"; - } else { - // Grabs the sender address and returns it - //SimpleService sender = receiver.receive(); - int counter = (Integer)storage.get(KEY).get() + 1; - storage.put(KEY, counter); - return "Tick: " + counter + "\n"; - } - } - - @prerestart - public void preRestart() { - System.out.println("Prepare for restart by supervisor"); - } - - @postrestart - public void postRestart() { - System.out.println("Reinitialize after restart by supervisor"); - } +public interface SimpleService { + public String count(); } \ No newline at end of file diff --git a/akka-samples/akka-sample-rest-java/src/main/java/sample/rest/java/SimpleServiceImpl.java b/akka-samples/akka-sample-rest-java/src/main/java/sample/rest/java/SimpleServiceImpl.java new file mode 100644 index 0000000000..96400a0404 --- /dev/null +++ b/akka-samples/akka-sample-rest-java/src/main/java/sample/rest/java/SimpleServiceImpl.java @@ -0,0 +1,43 @@ +/** + * Copyright (C) 2009-2010 Scalable Solutions AB + */ + +package sample.rest.java; + +import se.scalablesolutions.akka.actor.TypedActor; +import se.scalablesolutions.akka.actor.TypedTransactor; +import se.scalablesolutions.akka.actor.TypedActorContext; +import se.scalablesolutions.akka.stm.TransactionalMap; + +public class SimpleServiceImpl extends TypedTransactor implements SimpleService { + private String KEY = "COUNTER"; + + private boolean hasStartedTicking = false; + private TransactionalMap storage; + private Receiver receiver = TypedActor.newInstance(Receiver.class, ReceiverImpl.class); + + public String count() { + if (storage == null) storage = new TransactionalMap(); + if (!hasStartedTicking) { + storage.put(KEY, 0); + hasStartedTicking = true; + return "Tick: 0\n"; + } else { + // Grabs the sender address and returns it + //SimpleService sender = receiver.receive(); + int counter = (Integer)storage.get(KEY).get() + 1; + storage.put(KEY, counter); + return "Tick: " + counter + "\n"; + } + } + + @Override + public void preRestart(Throwable cause) { + System.out.println("Prepare for restart by supervisor"); + } + + @Override + public void postRestart(Throwable cause) { + System.out.println("Reinitialize after restart by supervisor"); + } +} \ No newline at end of file diff --git a/akka-spring/akka-spring-test-java/src/main/resources/se/scalablesolutions/akka/spring/foo/dispatcher-config.xml b/akka-spring/akka-spring-test-java/src/main/resources/se/scalablesolutions/akka/spring/foo/dispatcher-config.xml index 688d04f377..20879832d0 100644 --- a/akka-spring/akka-spring-test-java/src/main/resources/se/scalablesolutions/akka/spring/foo/dispatcher-config.xml +++ b/akka-spring/akka-spring-test-java/src/main/resources/se/scalablesolutions/akka/spring/foo/dispatcher-config.xml @@ -10,15 +10,15 @@ http://www.akkasource.org/schema/akka http://scalablesolutions.se/akka/akka-0.10.xsd"> - - + - - + @@ -69,14 +69,14 @@ http://scalablesolutions.se/akka/akka-0.10.xsd"> java.lang.NullPointerException - - - - + + + + - - + + diff --git a/akka-spring/akka-spring-test-java/src/main/resources/se/scalablesolutions/akka/spring/foo/supervisor-config.xml b/akka-spring/akka-spring-test-java/src/main/resources/se/scalablesolutions/akka/spring/foo/supervisor-config.xml index 9d35a40742..d96fdb1c93 100644 --- a/akka-spring/akka-spring-test-java/src/main/resources/se/scalablesolutions/akka/spring/foo/supervisor-config.xml +++ b/akka-spring/akka-spring-test-java/src/main/resources/se/scalablesolutions/akka/spring/foo/supervisor-config.xml @@ -16,14 +16,14 @@ http://scalablesolutions.se/akka/akka-0.10.xsd"> java.lang.NullPointerException - - - - + + + + - - + + @@ -32,10 +32,10 @@ http://scalablesolutions.se/akka/akka-0.10.xsd"> java.lang.Exception - - + - + @@ -53,15 +53,15 @@ http://scalablesolutions.se/akka/akka-0.10.xsd"> java.lang.Exception - - + + - - + + - - + + @@ -70,19 +70,19 @@ http://scalablesolutions.se/akka/akka-0.10.xsd"> java.lang.Exception - - - - + + + + java.lang.IOException - - - + + + diff --git a/akka-spring/akka-spring-test-java/src/main/resources/se/scalablesolutions/akka/spring/foo/test-config.xml b/akka-spring/akka-spring-test-java/src/main/resources/se/scalablesolutions/akka/spring/foo/test-config.xml index 665d03a05e..23d2476995 100644 --- a/akka-spring/akka-spring-test-java/src/main/resources/se/scalablesolutions/akka/spring/foo/test-config.xml +++ b/akka-spring/akka-spring-test-java/src/main/resources/se/scalablesolutions/akka/spring/foo/test-config.xml @@ -10,43 +10,43 @@ http://www.akkasource.org/schema/akka http://scalablesolutions.se/akka/akka-0.10.xsd"> - - - - - + - - + - + - + java.lang.NullPointerException - - - - + + + + - - + + \ No newline at end of file diff --git a/akka-spring/akka-spring-test-java/src/test/java/se/scalablesolutions/akka/spring/DispatcherConfigurationTest.java b/akka-spring/akka-spring-test-java/src/test/java/se/scalablesolutions/akka/spring/DispatcherConfigurationTest.java index 9f941e4142..862d781802 100644 --- a/akka-spring/akka-spring-test-java/src/test/java/se/scalablesolutions/akka/spring/DispatcherConfigurationTest.java +++ b/akka-spring/akka-spring-test-java/src/test/java/se/scalablesolutions/akka/spring/DispatcherConfigurationTest.java @@ -58,7 +58,7 @@ public class DispatcherConfigurationTest { */ @Test public void testDispatcherRef() { - MyPojo pojo = (MyPojo) context.getBean("active-object-with-dispatcher-ref"); + MyPojo pojo = (MyPojo) context.getBean("typed-actor-with-dispatcher-ref"); assertNotNull(pojo); } diff --git a/akka-spring/akka-spring-test-java/src/test/java/se/scalablesolutions/akka/spring/SupervisorConfigurationTest.java b/akka-spring/akka-spring-test-java/src/test/java/se/scalablesolutions/akka/spring/SupervisorConfigurationTest.java index 659433cb9f..c90fd56b72 100644 --- a/akka-spring/akka-spring-test-java/src/test/java/se/scalablesolutions/akka/spring/SupervisorConfigurationTest.java +++ b/akka-spring/akka-spring-test-java/src/test/java/se/scalablesolutions/akka/spring/SupervisorConfigurationTest.java @@ -13,8 +13,8 @@ import org.junit.Test; import org.springframework.context.ApplicationContext; import org.springframework.context.support.ClassPathXmlApplicationContext; -import se.scalablesolutions.akka.actor.ActiveObject; -import se.scalablesolutions.akka.config.ActiveObjectConfigurator; +import se.scalablesolutions.akka.actor.TypedActor; +import se.scalablesolutions.akka.config.TypedActorConfigurator; import se.scalablesolutions.akka.config.JavaConfig.AllForOne; import se.scalablesolutions.akka.config.JavaConfig.Component; import se.scalablesolutions.akka.config.JavaConfig.LifeCycle; @@ -45,10 +45,10 @@ public class SupervisorConfigurationTest { @Test public void testSupervision() { - // get ActiveObjectConfigurator bean from spring context - ActiveObjectConfigurator myConfigurator = (ActiveObjectConfigurator) context + // get TypedActorConfigurator bean from spring context + TypedActorConfigurator myConfigurator = (TypedActorConfigurator) context .getBean("supervision1"); - // get ActiveObjects + // get TypedActors Foo foo = myConfigurator.getInstance(Foo.class); assertNotNull(foo); IBar bar = myConfigurator.getInstance(IBar.class); @@ -59,7 +59,7 @@ public class SupervisorConfigurationTest { @Test public void testTransactionalState() { - ActiveObjectConfigurator conf = (ActiveObjectConfigurator) context + TypedActorConfigurator conf = (TypedActorConfigurator) context .getBean("supervision2"); StatefulPojo stateful = conf.getInstance(StatefulPojo.class); stateful.setMapState("testTransactionalState", "some map state"); @@ -73,23 +73,23 @@ public class SupervisorConfigurationTest { @Test public void testInitTransactionalState() { - StatefulPojo stateful = ActiveObject.newInstance(StatefulPojo.class, + StatefulPojo stateful = TypedActor.newInstance(StatefulPojo.class, 1000, true); assertTrue("should be inititalized", stateful.isInitialized()); } @Test public void testSupervisionWithDispatcher() { - ActiveObjectConfigurator myConfigurator = (ActiveObjectConfigurator) context + TypedActorConfigurator myConfigurator = (TypedActorConfigurator) context .getBean("supervision-with-dispatcher"); - // get ActiveObjects + // get TypedActors Foo foo = myConfigurator.getInstance(Foo.class); assertNotNull(foo); // TODO how to check dispatcher? } @Test - public void testRemoteActiveObject() { + public void testRemoteTypedActor() { new Thread(new Runnable() { public void run() { RemoteNode.start(); @@ -99,13 +99,13 @@ public class SupervisorConfigurationTest { Thread.currentThread().sleep(1000); } catch (Exception e) { } - Foo instance = ActiveObject.newRemoteInstance(Foo.class, 2000, "localhost", 9999); + Foo instance = TypedActor.newRemoteInstance(Foo.class, 2000, "localhost", 9999); System.out.println(instance.foo()); } @Test - public void testSupervisedRemoteActiveObject() { + public void testSupervisedRemoteTypedActor() { new Thread(new Runnable() { public void run() { RemoteNode.start(); @@ -116,7 +116,7 @@ public class SupervisorConfigurationTest { } catch (Exception e) { } - ActiveObjectConfigurator conf = new ActiveObjectConfigurator(); + TypedActorConfigurator conf = new TypedActorConfigurator(); conf.configure( new RestartStrategy(new AllForOne(), 3, 10000, new Class[] { Exception.class }), new Component[] { diff --git a/akka-spring/akka-spring-test-java/src/test/java/se/scalablesolutions/akka/spring/ActiveObjectConfigurationTest.java b/akka-spring/akka-spring-test-java/src/test/java/se/scalablesolutions/akka/spring/TypedActorConfigurationTest.java similarity index 70% rename from akka-spring/akka-spring-test-java/src/test/java/se/scalablesolutions/akka/spring/ActiveObjectConfigurationTest.java rename to akka-spring/akka-spring-test-java/src/test/java/se/scalablesolutions/akka/spring/TypedActorConfigurationTest.java index 7cc691f3e3..e8931fd1a2 100644 --- a/akka-spring/akka-spring-test-java/src/test/java/se/scalablesolutions/akka/spring/ActiveObjectConfigurationTest.java +++ b/akka-spring/akka-spring-test-java/src/test/java/se/scalablesolutions/akka/spring/TypedActorConfigurationTest.java @@ -21,10 +21,10 @@ import se.scalablesolutions.akka.remote.RemoteNode; import se.scalablesolutions.akka.spring.foo.MyPojo; /** - * Tests for spring configuration of active objects and supervisor configuration. + * Tests for spring configuration of typed actors and supervisor configuration. * @author michaelkober */ -public class ActiveObjectConfigurationTest { +public class TypedActorConfigurationTest { private ApplicationContext context = null; @@ -34,7 +34,7 @@ public class ActiveObjectConfigurationTest { } /** - * Tests that the <akka:active-object/> and <akka:supervision/> and <akka:dispatcher/> element + * Tests that the <akka:typed-actor/> and <akka:supervision/> and <akka:dispatcher/> element * can be used as a top level element. */ @Test @@ -43,43 +43,43 @@ public class ActiveObjectConfigurationTest { DefaultListableBeanFactory beanFactory = new DefaultListableBeanFactory(); XmlBeanDefinitionReader reader = new XmlBeanDefinitionReader(beanFactory); reader.loadBeanDefinitions(CONTEXT); - assertTrue(beanFactory.containsBeanDefinition("simple-active-object")); - assertTrue(beanFactory.containsBeanDefinition("remote-active-object")); + assertTrue(beanFactory.containsBeanDefinition("simple-typed-actor")); + assertTrue(beanFactory.containsBeanDefinition("remote-typed-actor")); assertTrue(beanFactory.containsBeanDefinition("supervision1")); assertTrue(beanFactory.containsBeanDefinition("dispatcher1")); } @Test - public void testSimpleActiveObject() { - MyPojo myPojo = (MyPojo) context.getBean("simple-active-object"); + public void testSimpleTypedActor() { + MyPojo myPojo = (MyPojo) context.getBean("simple-typed-actor"); String msg = myPojo.getFoo(); msg += myPojo.getBar(); assertEquals("wrong invocation order", "foobar", msg); } @Test(expected = FutureTimeoutException.class) - public void testSimpleActiveObject_Timeout() { - MyPojo myPojo = (MyPojo) context.getBean("simple-active-object"); + public void testSimpleTypedActor_Timeout() { + MyPojo myPojo = (MyPojo) context.getBean("simple-typed-actor"); myPojo.longRunning(); } @Test - public void testSimpleActiveObject_NoTimeout() { - MyPojo myPojo = (MyPojo) context.getBean("simple-active-object-long-timeout"); + public void testSimpleTypedActor_NoTimeout() { + MyPojo myPojo = (MyPojo) context.getBean("simple-typed-actor-long-timeout"); String msg = myPojo.longRunning(); assertEquals("this took long", msg); } @Test - public void testTransactionalActiveObject() { - MyPojo myPojo = (MyPojo) context.getBean("transactional-active-object"); + public void testTransactionalTypedActor() { + MyPojo myPojo = (MyPojo) context.getBean("transactional-typed-actor"); String msg = myPojo.getFoo(); msg += myPojo.getBar(); assertEquals("wrong invocation order", "foobar", msg); } @Test - public void testRemoteActiveObject() { + public void testRemoteTypedActor() { new Thread(new Runnable() { public void run() { RemoteNode.start(); @@ -91,7 +91,7 @@ public class ActiveObjectConfigurationTest { } Config.config(); - MyPojo myPojo = (MyPojo) context.getBean("remote-active-object"); + MyPojo myPojo = (MyPojo) context.getBean("remote-typed-actor"); assertEquals("foo", myPojo.getFoo()); } diff --git a/akka-spring/src/main/resources/se/scalablesolutions/akka/spring/akka-0.10.xsd b/akka-spring/src/main/resources/se/scalablesolutions/akka/spring/akka-0.10.xsd index 6eb0ec48fa..6dd0ee7681 100644 --- a/akka-spring/src/main/resources/se/scalablesolutions/akka/spring/akka-0.10.xsd +++ b/akka-spring/src/main/resources/se/scalablesolutions/akka/spring/akka-0.10.xsd @@ -105,85 +105,54 @@ - - - - - - Pre restart callback method that is called during restart. - - - - - - - Post restart callback method that is called during restart. - - - - - - - - - - - Shutdown callback method that is called during shut down. - - - - - - - + + - - - + - Name of the target class. + Name of the interface implemented by implementation class. + + + + + + + Name of the implementation class. - default timeout for '!!' invocations + Theh default timeout for '!!' invocations. - Set to true if messages should have REQUIRES_NEW semantics - - - - - - - Interface implemented by target class. + Set this to true if messages should have REQUIRES_NEW semantics. - Lifecycle, permanent or temporary + Defines the lifecycle, can be either 'permanent' or 'temporary'. - + - Supported scopes are singleton and prototype + Supported scopes are 'singleton' and 'prototype'. @@ -196,10 +165,10 @@ - - + + - + @@ -211,21 +180,21 @@ - Failover scheme, AllForOne or OneForOne + Failover scheme, can be one of 'AllForOne' or 'OneForOne'. - Maximal number of retries. + Maximal number of restarts. - Timerange for restart. + Time range for maximal number of restart. @@ -235,7 +204,7 @@ - + @@ -252,8 +221,8 @@ - - + + diff --git a/akka-spring/src/main/scala/ActiveObjectFactoryBean.scala b/akka-spring/src/main/scala/ActiveObjectFactoryBean.scala deleted file mode 100644 index 6f62c5a8c4..0000000000 --- a/akka-spring/src/main/scala/ActiveObjectFactoryBean.scala +++ /dev/null @@ -1,198 +0,0 @@ -/** - * Copyright (C) 2009-2010 Scalable Solutions AB - */ - -package se.scalablesolutions.akka.spring - -import java.beans.PropertyDescriptor -import java.lang.reflect.Method -import javax.annotation.PreDestroy -import javax.annotation.PostConstruct -import reflect.BeanProperty - -import org.springframework.beans.BeanWrapperImpl -import org.springframework.beans.BeanWrapper -import org.springframework.beans.BeanUtils -import org.springframework.beans.factory.BeanFactory -import org.springframework.beans.factory.config.AbstractFactoryBean -import org.springframework.context.{ApplicationContext,ApplicationContextAware} -import org.springframework.util.ReflectionUtils -import org.springframework.util.StringUtils - -import se.scalablesolutions.akka.actor.{ActiveObjectConfiguration, ActiveObject} -import se.scalablesolutions.akka.config.ScalaConfig.{ShutdownCallback, RestartCallbacks} -import se.scalablesolutions.akka.dispatch.MessageDispatcher -import se.scalablesolutions.akka.util.Logging - -/** - * Factory bean for active objects. - * - * @author michaelkober - * @author Johan Rask - * @author Martin Krasser - */ -class ActiveObjectFactoryBean extends AbstractFactoryBean[AnyRef] with Logging with ApplicationContextAware { - import StringReflect._ - import AkkaSpringConfigurationTags._ - - @BeanProperty var target: String = "" - @BeanProperty var timeout: Long = _ - @BeanProperty var interface: String = "" - @BeanProperty var transactional: Boolean = false - @BeanProperty var pre: String = "" - @BeanProperty var post: String = "" - @BeanProperty var shutdown: String = "" - @BeanProperty var host: String = "" - @BeanProperty var port: Int = _ - @BeanProperty var lifecycle: String = "" - @BeanProperty var dispatcher: DispatcherProperties = _ - @BeanProperty var scope:String = VAL_SCOPE_SINGLETON - @BeanProperty var property:PropertyEntries = _ - @BeanProperty var applicationContext:ApplicationContext = _ - - // Holds info about if deps has been set or not. Depends on - // if interface is specified or not. We must set deps on - // target instance if interface is specified - var hasSetDependecies = false - - - override def isSingleton:Boolean = { - if(scope.equals(VAL_SCOPE_SINGLETON)) { - true - } else { - false - } - } - - /* - * @see org.springframework.beans.factory.FactoryBean#getObjectType() - */ - def getObjectType: Class[AnyRef] = try { - target.toClass - } catch { - // required by contract to return null - case e: ClassNotFoundException => null - } - - /* - * @see org.springframework.beans.factory.config.AbstractFactoryBean#createInstance() - */ - def createInstance: AnyRef = { - var argumentList = "" - if (isRemote) argumentList += "r" - if (hasInterface) argumentList += "i" - if (hasDispatcher) argumentList += "d" - - postConstruct( - setProperties( - create(argumentList))) - - } - - /** - * Stop the active object if it is a singleton. - */ - override def destroyInstance(instance:AnyRef) { - ActiveObject.stop(instance) - } - - /** - * Invokes any method annotated with @PostConstruct - * When interfaces are specified, this method is invoked both on the - * target instance and on the active object, so a developer is free do decide - * where the annotation should be. If no interface is specified it is only invoked - * on the active object - */ - private def postConstruct(ref:AnyRef) : AnyRef = { - // Invoke postConstruct method if any - for(method <- ref.getClass.getMethods) { - if(method.isAnnotationPresent(classOf[PostConstruct])) { - method.invoke(ref) - } - } - ref - } - - - private def setProperties(ref:AnyRef) : AnyRef = { - if(hasSetDependecies) { - return ref - } - - log.debug("Processing properties and dependencies for target class %s",target) - val beanWrapper = new BeanWrapperImpl(ref); - if(ref.isInstanceOf[ApplicationContextAware]) { - log.debug("Setting application context") - beanWrapper.setPropertyValue("applicationContext",applicationContext) - } - for(entry <- property.entryList) { - val propertyDescriptor = BeanUtils.getPropertyDescriptor(ref.getClass,entry.name) - val method = propertyDescriptor.getWriteMethod(); - - if(StringUtils.hasText(entry.ref)) { - log.debug("Setting property %s with bean ref %s using method %s", - entry.name,entry.ref,method.getName) - method.invoke(ref,getBeanFactory().getBean(entry.ref)) - } else if(StringUtils.hasText(entry.value)) { - log.debug("Setting property %s with value %s using method %s", - entry.name,entry.value,method.getName) - beanWrapper.setPropertyValue(entry.name,entry.value) - } else { - throw new AkkaBeansException("Either property@ref or property@value must be set on property element") - } - } - ref - } - - private[akka] def create(argList : String) : AnyRef = { - if (argList == "r") { - ActiveObject.newInstance(target.toClass, createConfig.makeRemote(host, port)) - } else if (argList == "ri" ) { - ActiveObject.newInstance(interface.toClass, aNewInstance(target.toClass), createConfig.makeRemote(host, port)) - } else if (argList == "rd") { - ActiveObject.newInstance(target.toClass, createConfig.makeRemote(host, port).dispatcher(dispatcherInstance)) - } else if (argList == "rid") { - ActiveObject.newInstance(interface.toClass, aNewInstance(target.toClass), createConfig.makeRemote(host, port).dispatcher(dispatcherInstance)) - } else if (argList == "i") { - ActiveObject.newInstance(interface.toClass, aNewInstance(target.toClass), createConfig) - } else if (argList == "id") { - ActiveObject.newInstance(interface.toClass, aNewInstance(target.toClass), createConfig.dispatcher(dispatcherInstance)) - } else if (argList == "d") { - ActiveObject.newInstance(target.toClass, createConfig.dispatcher(dispatcherInstance)) - } else { - ActiveObject.newInstance(target.toClass, createConfig) - } - } - - - - private[akka] def createConfig: ActiveObjectConfiguration = { - val config = new ActiveObjectConfiguration().timeout(timeout) - if (hasRestartCallbacks) config.restartCallbacks(pre, post) - if (hasShutdownCallback) config.shutdownCallback(shutdown) - if (transactional) config.makeTransactionRequired - config - } - def aNewInstance[T <: AnyRef](clazz: Class[T]) : T = { - var ref = clazz.newInstance().asInstanceOf[T] - postConstruct( - setProperties(ref)) - hasSetDependecies = true - ref - } - - private[akka] def isRemote = (host != null) && (!host.isEmpty) - - private[akka] def hasInterface = (interface != null) && (!interface.isEmpty) - - private[akka] def hasRestartCallbacks = ((pre != null) && !pre.isEmpty) || ((post != null) && !post.isEmpty) - - private[akka] def hasShutdownCallback = ((shutdown != null) && !shutdown.isEmpty) - - private[akka] def hasDispatcher = (dispatcher != null) && (dispatcher.dispatcherType != null) && (!dispatcher.dispatcherType.isEmpty) - - private[akka] def dispatcherInstance : MessageDispatcher = { - import DispatcherFactoryBean._ - createNewInstance(dispatcher) - } -} diff --git a/akka-spring/src/main/scala/AkkaBeansException.scala b/akka-spring/src/main/scala/AkkaBeansException.scala deleted file mode 100644 index 8cbffa86f7..0000000000 --- a/akka-spring/src/main/scala/AkkaBeansException.scala +++ /dev/null @@ -1,14 +0,0 @@ -package se.scalablesolutions.akka.spring - -import org.springframework.beans.BeansException - -/** -* Exception to use when something goes wrong during bean creation -@author Johan Rask -*/ -class AkkaBeansException(errorMsg:String,t:Throwable) extends BeansException(errorMsg,t) { - - def this(errorMsg:String) = { - this(errorMsg,null) - } -} diff --git a/akka-spring/src/main/scala/AkkaNamespaceHandler.scala b/akka-spring/src/main/scala/AkkaNamespaceHandler.scala index 466dbeca30..694daa90d4 100644 --- a/akka-spring/src/main/scala/AkkaNamespaceHandler.scala +++ b/akka-spring/src/main/scala/AkkaNamespaceHandler.scala @@ -12,7 +12,7 @@ import AkkaSpringConfigurationTags._ */ class AkkaNamespaceHandler extends NamespaceHandlerSupport { def init = { - registerBeanDefinitionParser(ACTIVE_OBJECT_TAG, new ActiveObjectBeanDefinitionParser()); + registerBeanDefinitionParser(TYPED_ACTOR_TAG, new TypedActorBeanDefinitionParser()); registerBeanDefinitionParser(SUPERVISION_TAG, new SupervisionBeanDefinitionParser()); registerBeanDefinitionParser(DISPATCHER_TAG, new DispatcherBeanDefinitionParser()); registerBeanDefinitionParser(CAMEL_SERVICE_TAG, new CamelServiceBeanDefinitionParser); diff --git a/akka-spring/src/main/scala/AkkaSpringConfigurationTags.scala b/akka-spring/src/main/scala/AkkaSpringConfigurationTags.scala index 80a9f2e8d0..857d20fa55 100644 --- a/akka-spring/src/main/scala/AkkaSpringConfigurationTags.scala +++ b/akka-spring/src/main/scala/AkkaSpringConfigurationTags.scala @@ -13,19 +13,17 @@ object AkkaSpringConfigurationTags { // --- TAGS // // top level tags - val ACTIVE_OBJECT_TAG = "active-object" + val TYPED_ACTOR_TAG = "typed-actor" val SUPERVISION_TAG = "supervision" val DISPATCHER_TAG = "dispatcher" val PROPERTYENTRY_TAG = "property" val CAMEL_SERVICE_TAG = "camel-service" - // active-object sub tags - val RESTART_CALLBACKS_TAG = "restart-callbacks" - val SHUTDOWN_CALLBACK_TAG = "shutdown-callback" + // typed-actor sub tags val REMOTE_TAG = "remote" // superivision sub tags - val ACTIVE_OBJECTS_TAG = "active-objects" + val TYPED_ACTORS_TAG = "typed-actors" val STRATEGY_TAG = "restart-strategy" val TRAP_EXISTS_TAG = "trap-exits" val TRAP_EXIT_TAG = "trap-exit" @@ -38,16 +36,13 @@ object AkkaSpringConfigurationTags { // --- ATTRIBUTES // - // active object attributes + // typed actor attributes val TIMEOUT = "timeout" - val TARGET = "target" + val IMPLEMENTATION = "implementation" val INTERFACE = "interface" val TRANSACTIONAL = "transactional" val HOST = "host" val PORT = "port" - val PRE_RESTART = "pre" - val POST_RESTART = "post" - val SHUTDOWN = "shutdown" val LIFECYCLE = "lifecycle" val SCOPE = "scope" diff --git a/akka-spring/src/main/scala/CamelServiceFactoryBean.scala b/akka-spring/src/main/scala/CamelServiceFactoryBean.scala index 50a3bd748c..2baa36ac03 100644 --- a/akka-spring/src/main/scala/CamelServiceFactoryBean.scala +++ b/akka-spring/src/main/scala/CamelServiceFactoryBean.scala @@ -5,7 +5,8 @@ package se.scalablesolutions.akka.spring import org.apache.camel.CamelContext import org.springframework.beans.factory.{DisposableBean, InitializingBean, FactoryBean} -import se.scalablesolutions.akka.camel.{CamelContextManager, CamelService} + +import se.scalablesolutions.akka.camel.{CamelContextManager, CamelService, CamelServiceFactory} /** * Factory bean for a {@link CamelService}. @@ -31,7 +32,7 @@ class CamelServiceFactoryBean extends FactoryBean[CamelService] with Initializin if (camelContext ne null) { CamelContextManager.init(camelContext) } - instance = CamelService.newInstance + instance = CamelServiceFactory.createCamelService instance.load } diff --git a/akka-spring/src/main/scala/DispatcherBeanDefinitionParser.scala b/akka-spring/src/main/scala/DispatcherBeanDefinitionParser.scala index 826125fcfc..9d4a16ff9b 100644 --- a/akka-spring/src/main/scala/DispatcherBeanDefinitionParser.scala +++ b/akka-spring/src/main/scala/DispatcherBeanDefinitionParser.scala @@ -12,7 +12,7 @@ import org.springframework.beans.factory.xml.{ParserContext, AbstractSingleBeanD * Parser for custom namespace configuration. * @author michaelkober */ -class DispatcherBeanDefinitionParser extends AbstractSingleBeanDefinitionParser with ActiveObjectParser with DispatcherParser { +class DispatcherBeanDefinitionParser extends AbstractSingleBeanDefinitionParser with TypedActorParser with DispatcherParser { /* * @see org.springframework.beans.factory.xml.AbstractSingleBeanDefinitionParser#doParse(org.w3c.dom.Element, org.springframework.beans.factory.xml.ParserContext, org.springframework.beans.factory.support.BeanDefinitionBuilder) */ diff --git a/akka-spring/src/main/scala/DispatcherParser.scala b/akka-spring/src/main/scala/DispatcherParser.scala index dc156c3d58..fb9855102e 100644 --- a/akka-spring/src/main/scala/DispatcherParser.scala +++ b/akka-spring/src/main/scala/DispatcherParser.scala @@ -31,8 +31,8 @@ trait DispatcherParser extends BeanParser { properties.name = mandatory(dispatcherElement, NAME) properties.dispatcherType = mandatory(dispatcherElement, TYPE) if (properties.dispatcherType == THREAD_BASED) { - if (dispatcherElement.getParentNode.getNodeName != "active-object") { - throw new IllegalArgumentException("Thread based dispatcher must be nested in active-object element!") + if (dispatcherElement.getParentNode.getNodeName != "typed-actor") { + throw new IllegalArgumentException("Thread based dispatcher must be nested in typed-actor element!") } } val threadPoolElement = DomUtils.getChildElementByTagName(dispatcherElement, THREAD_POOL_TAG); diff --git a/akka-spring/src/main/scala/StringReflect.scala b/akka-spring/src/main/scala/StringReflect.scala index 7dda9dba08..9e8cab8172 100644 --- a/akka-spring/src/main/scala/StringReflect.scala +++ b/akka-spring/src/main/scala/StringReflect.scala @@ -1,9 +1,11 @@ /** * Copyright (C) 2009-2010 Scalable Solutions AB */ + package se.scalablesolutions.akka.spring object StringReflect { + /** * Implicit conversion from String to StringReflect. */ @@ -15,10 +17,9 @@ object StringReflect { * @author michaelkober */ class StringReflect(val self: String) { + if (self == null || self == "") throw new IllegalArgumentException("Class name can't be null or empty string [" + self + "]") def toClass[T <: AnyRef]: Class[T] = { val clazz = Class.forName(self) clazz.asInstanceOf[Class[T]] } } - - diff --git a/akka-spring/src/main/scala/SupervisionBeanDefinitionParser.scala b/akka-spring/src/main/scala/SupervisionBeanDefinitionParser.scala index dde14f3cb2..5d430c9450 100644 --- a/akka-spring/src/main/scala/SupervisionBeanDefinitionParser.scala +++ b/akka-spring/src/main/scala/SupervisionBeanDefinitionParser.scala @@ -18,7 +18,7 @@ import org.springframework.util.xml.DomUtils * Parser for custom namespace for Akka declarative supervisor configuration. * @author michaelkober */ -class SupervisionBeanDefinitionParser extends AbstractSingleBeanDefinitionParser with ActiveObjectParser { +class SupervisionBeanDefinitionParser extends AbstractSingleBeanDefinitionParser with TypedActorParser { /* (non-Javadoc) * @see org.springframework.beans.factory.xml.AbstractSingleBeanDefinitionParser#doParse(org.w3c.dom.Element, org.springframework.beans.factory.xml.ParserContext, org.springframework.beans.factory.support.BeanDefinitionBuilder) */ @@ -31,9 +31,9 @@ class SupervisionBeanDefinitionParser extends AbstractSingleBeanDefinitionParser */ private[akka] def parseSupervisor(element: Element, builder: BeanDefinitionBuilder) { val strategyElement = mandatoryElement(element, STRATEGY_TAG); - val activeObjectsElement = mandatoryElement(element, ACTIVE_OBJECTS_TAG); + val typedActorsElement = mandatoryElement(element, TYPED_ACTORS_TAG); parseRestartStrategy(strategyElement, builder) - parseActiveObjectList(activeObjectsElement, builder) + parseTypedActorList(typedActorsElement, builder) } private[akka] def parseRestartStrategy(element: Element, builder: BeanDefinitionBuilder) { @@ -46,10 +46,10 @@ class SupervisionBeanDefinitionParser extends AbstractSingleBeanDefinitionParser builder.addPropertyValue("restartStrategy", restartStrategy) } - private[akka] def parseActiveObjectList(element: Element, builder: BeanDefinitionBuilder) { - val activeObjects = DomUtils.getChildElementsByTagName(element, ACTIVE_OBJECT_TAG).toArray.toList.asInstanceOf[List[Element]] - val activeObjectProperties = activeObjects.map(parseActiveObject(_)) - builder.addPropertyValue("supervised", activeObjectProperties) + private[akka] def parseTypedActorList(element: Element, builder: BeanDefinitionBuilder) { + val typedActors = DomUtils.getChildElementsByTagName(element, TYPED_ACTOR_TAG).toArray.toList.asInstanceOf[List[Element]] + val typedActorProperties = typedActors.map(parseTypedActor(_)) + builder.addPropertyValue("supervised", typedActorProperties) } private def parseTrapExits(element: Element): Array[Class[_ <: Throwable]] = { diff --git a/akka-spring/src/main/scala/SupervisionFactoryBean.scala b/akka-spring/src/main/scala/SupervisionFactoryBean.scala index d8c44c3502..80a1f8a5fa 100644 --- a/akka-spring/src/main/scala/SupervisionFactoryBean.scala +++ b/akka-spring/src/main/scala/SupervisionFactoryBean.scala @@ -4,7 +4,7 @@ package se.scalablesolutions.akka.spring import org.springframework.beans.factory.config.AbstractFactoryBean -import se.scalablesolutions.akka.config.ActiveObjectConfigurator +import se.scalablesolutions.akka.config.TypedActorConfigurator import se.scalablesolutions.akka.config.JavaConfig._ import AkkaSpringConfigurationTags._ import reflect.BeanProperty @@ -14,20 +14,20 @@ import reflect.BeanProperty * Factory bean for supervisor configuration. * @author michaelkober */ -class SupervisionFactoryBean extends AbstractFactoryBean[ActiveObjectConfigurator] { +class SupervisionFactoryBean extends AbstractFactoryBean[TypedActorConfigurator] { @BeanProperty var restartStrategy: RestartStrategy = _ - @BeanProperty var supervised: List[ActiveObjectProperties] = _ + @BeanProperty var supervised: List[TypedActorProperties] = _ /* * @see org.springframework.beans.factory.FactoryBean#getObjectType() */ - def getObjectType: Class[ActiveObjectConfigurator] = classOf[ActiveObjectConfigurator] + def getObjectType: Class[TypedActorConfigurator] = classOf[TypedActorConfigurator] /* * @see org.springframework.beans.factory.config.AbstractFactoryBean#createInstance() */ - def createInstance: ActiveObjectConfigurator = { - val configurator = new ActiveObjectConfigurator() + def createInstance: TypedActorConfigurator = { + val configurator = new TypedActorConfigurator() configurator.configure( restartStrategy, @@ -36,9 +36,9 @@ class SupervisionFactoryBean extends AbstractFactoryBean[ActiveObjectConfigurato } /** - * Create configuration for ActiveObject + * Create configuration for TypedActor */ - private[akka] def createComponent(props: ActiveObjectProperties): Component = { + private[akka] def createComponent(props: TypedActorProperties): Component = { import StringReflect._ val lifeCycle = if (!props.lifecycle.isEmpty && props.lifecycle.equalsIgnoreCase(VAL_LIFECYCYLE_TEMPORARY)) new LifeCycle(new Temporary()) else new LifeCycle(new Permanent()) val isRemote = (props.host != null) && (!props.host.isEmpty) diff --git a/akka-spring/src/main/scala/ActiveObjectBeanDefinitionParser.scala b/akka-spring/src/main/scala/TypedActorBeanDefinitionParser.scala similarity index 80% rename from akka-spring/src/main/scala/ActiveObjectBeanDefinitionParser.scala rename to akka-spring/src/main/scala/TypedActorBeanDefinitionParser.scala index 0189147994..ec987aacc0 100644 --- a/akka-spring/src/main/scala/ActiveObjectBeanDefinitionParser.scala +++ b/akka-spring/src/main/scala/TypedActorBeanDefinitionParser.scala @@ -13,17 +13,17 @@ import org.w3c.dom.Element * Parser for custom namespace configuration. * @author michaelkober */ -class ActiveObjectBeanDefinitionParser extends AbstractSingleBeanDefinitionParser with ActiveObjectParser { +class TypedActorBeanDefinitionParser extends AbstractSingleBeanDefinitionParser with TypedActorParser { /* * @see org.springframework.beans.factory.xml.AbstractSingleBeanDefinitionParser#doParse(org.w3c.dom.Element, org.springframework.beans.factory.xml.ParserContext, org.springframework.beans.factory.support.BeanDefinitionBuilder) */ override def doParse(element: Element, parserContext: ParserContext, builder: BeanDefinitionBuilder) { - val activeObjectConf = parseActiveObject(element) - activeObjectConf.setAsProperties(builder) + val typedActorConf = parseTypedActor(element) + typedActorConf.setAsProperties(builder) } /* * @see org.springframework.beans.factory.xml.AbstractSingleBeanDefinitionParser#getBeanClass(org.w3c.dom.Element) */ - override def getBeanClass(element: Element): Class[_] = classOf[ActiveObjectFactoryBean] + override def getBeanClass(element: Element): Class[_] = classOf[TypedActorFactoryBean] } diff --git a/akka-spring/src/main/scala/TypedActorFactoryBean.scala b/akka-spring/src/main/scala/TypedActorFactoryBean.scala new file mode 100644 index 0000000000..0cb70e5ae3 --- /dev/null +++ b/akka-spring/src/main/scala/TypedActorFactoryBean.scala @@ -0,0 +1,153 @@ +/** + * Copyright (C) 2009-2010 Scalable Solutions AB + */ + +package se.scalablesolutions.akka.spring + +import java.beans.PropertyDescriptor +import java.lang.reflect.Method +import javax.annotation.PreDestroy +import javax.annotation.PostConstruct +import reflect.BeanProperty + +import org.springframework.beans.BeanWrapperImpl +import org.springframework.beans.BeanWrapper +import org.springframework.beans.BeanUtils +import org.springframework.beans.BeansException +import org.springframework.beans.factory.BeanFactory +import org.springframework.beans.factory.config.AbstractFactoryBean +import org.springframework.context.{ApplicationContext,ApplicationContextAware} +import org.springframework.util.ReflectionUtils +import org.springframework.util.StringUtils + +import se.scalablesolutions.akka.actor.{AspectInitRegistry, TypedActorConfiguration, TypedActor} +import se.scalablesolutions.akka.dispatch.MessageDispatcher +import se.scalablesolutions.akka.util.{Logging, Duration} + +/** + * Exception to use when something goes wrong during bean creation. + * + * @author Johan Rask + */ +class AkkaBeansException(message: String, cause:Throwable) extends BeansException(message, cause) { + def this(message: String) = this(message, null) +} + +/** + * Factory bean for typed actors. + * + * @author michaelkober + * @author Johan Rask + * @author Martin Krasser + * @author Jonas Bonér + */ +class TypedActorFactoryBean extends AbstractFactoryBean[AnyRef] with Logging with ApplicationContextAware { + import StringReflect._ + import AkkaSpringConfigurationTags._ + + @BeanProperty var interface: String = "" + @BeanProperty var implementation: String = "" + @BeanProperty var timeout: Long = _ + @BeanProperty var transactional: Boolean = false + @BeanProperty var host: String = "" + @BeanProperty var port: Int = _ + @BeanProperty var lifecycle: String = "" + @BeanProperty var dispatcher: DispatcherProperties = _ + @BeanProperty var scope: String = VAL_SCOPE_SINGLETON + @BeanProperty var property: PropertyEntries = _ + @BeanProperty var applicationContext: ApplicationContext = _ + + // Holds info about if deps has been set or not. Depends on + // if interface is specified or not. We must set deps on + // target instance if interface is specified + var hasSetDependecies = false + + override def isSingleton = scope.equals(VAL_SCOPE_SINGLETON) + + /* + * @see org.springframework.beans.factory.FactoryBean#getObjectType() + */ + def getObjectType: Class[AnyRef] = try { + implementation.toClass + } catch { + // required by contract to return null + case e: IllegalArgumentException => null + } + + /* + * @see org.springframework.beans.factory.config.AbstractFactoryBean#createInstance() + */ + def createInstance: AnyRef = { + var argumentList = "" + if (isRemote) argumentList += "r" + if (hasInterface) argumentList += "i" + if (hasDispatcher) argumentList += "d" + val ref = create(argumentList) + setProperties(AspectInitRegistry.initFor(ref).targetInstance) + ref + } + + /** + * Stop the typed actor if it is a singleton. + */ + override def destroyInstance(instance: AnyRef) = TypedActor.stop(instance) + + private def setProperties(ref: AnyRef): AnyRef = { + if (hasSetDependecies) return ref + log.debug("Processing properties and dependencies for implementation class\n\t[%s]", implementation) + val beanWrapper = new BeanWrapperImpl(ref); + if (ref.isInstanceOf[ApplicationContextAware]) { + log.debug("Setting application context") + beanWrapper.setPropertyValue("applicationContext", applicationContext) + } + for (entry <- property.entryList) { + val propertyDescriptor = BeanUtils.getPropertyDescriptor(ref.getClass, entry.name) + val method = propertyDescriptor.getWriteMethod + if (StringUtils.hasText(entry.ref)) { + log.debug("Setting property %s with bean ref %s using method %s", entry.name, entry.ref, method.getName) + method.invoke(ref,getBeanFactory().getBean(entry.ref)) + } else if(StringUtils.hasText(entry.value)) { + log.debug("Setting property %s with value %s using method %s", entry.name, entry.value, method.getName) + beanWrapper.setPropertyValue(entry.name,entry.value) + } else throw new AkkaBeansException("Either property@ref or property@value must be set on property element") + } + ref + } + + private[akka] def create(argList: String): AnyRef = { + if (interface == null || interface == "") throw new AkkaBeansException( + "The 'interface' part of the 'akka:actor' element in the Spring config file can't be null or empty string") + if (implementation == null || implementation == "") throw new AkkaBeansException( + "The 'implementation' part of the 'akka:typed-actor' element in the Spring config file can't be null or empty string") + argList match { + case "ri" => TypedActor.newInstance(interface.toClass, implementation.toClass, createConfig.makeRemote(host, port)) + case "i" => TypedActor.newInstance(interface.toClass, implementation.toClass, createConfig) + case "id" => TypedActor.newInstance(interface.toClass, implementation.toClass, createConfig.dispatcher(dispatcherInstance)) + case "rid" => TypedActor.newInstance(interface.toClass, implementation.toClass, createConfig.makeRemote(host, port).dispatcher(dispatcherInstance)) + case _ => TypedActor.newInstance(interface.toClass, implementation.toClass, createConfig) + // case "rd" => TypedActor.newInstance(implementation.toClass, createConfig.makeRemote(host, port).dispatcher(dispatcherInstance)) + // case "r" => TypedActor.newInstance(implementation.toClass, createConfig.makeRemote(host, port)) + // case "d" => TypedActor.newInstance(implementation.toClass, createConfig.dispatcher(dispatcherInstance)) + } + } + + private[akka] def createConfig: TypedActorConfiguration = { + val config = new TypedActorConfiguration().timeout(Duration(timeout, "millis")) + if (transactional) config.makeTransactionRequired + config + } + + private[akka] def isRemote = (host != null) && (!host.isEmpty) + + private[akka] def hasInterface = (interface != null) && (!interface.isEmpty) + + private[akka] def hasDispatcher = + (dispatcher != null) && + (dispatcher.dispatcherType != null) && + (!dispatcher.dispatcherType.isEmpty) + + private[akka] def dispatcherInstance: MessageDispatcher = { + import DispatcherFactoryBean._ + createNewInstance(dispatcher) + } +} diff --git a/akka-spring/src/main/scala/ActiveObjectParser.scala b/akka-spring/src/main/scala/TypedActorParser.scala similarity index 55% rename from akka-spring/src/main/scala/ActiveObjectParser.scala rename to akka-spring/src/main/scala/TypedActorParser.scala index 8838360a44..5f4d68f297 100644 --- a/akka-spring/src/main/scala/ActiveObjectParser.scala +++ b/akka-spring/src/main/scala/TypedActorParser.scala @@ -10,24 +10,22 @@ import scala.collection.JavaConversions._ import se.scalablesolutions.akka.actor.IllegalActorStateException /** - * Parser trait for custom namespace configuration for active-object. + * Parser trait for custom namespace configuration for typed-actor. * @author michaelkober * @author Johan Rask * @author Martin Krasser */ -trait ActiveObjectParser extends BeanParser with DispatcherParser { +trait TypedActorParser extends BeanParser with DispatcherParser { import AkkaSpringConfigurationTags._ /** - * Parses the given element and returns a ActiveObjectProperties. + * Parses the given element and returns a TypedActorProperties. * @param element dom element to parse - * @return configuration for the active object + * @return configuration for the typed actor */ - def parseActiveObject(element: Element): ActiveObjectProperties = { - val objectProperties = new ActiveObjectProperties() + def parseTypedActor(element: Element): TypedActorProperties = { + val objectProperties = new TypedActorProperties() val remoteElement = DomUtils.getChildElementByTagName(element, REMOTE_TAG); - val restartCallbacksElement = DomUtils.getChildElementByTagName(element, RESTART_CALLBACKS_TAG); - val shutdownCallbackElement = DomUtils.getChildElementByTagName(element, SHUTDOWN_CALLBACK_TAG); val dispatcherElement = DomUtils.getChildElementByTagName(element, DISPATCHER_TAG) val propertyEntries = DomUtils.getChildElementsByTagName(element,PROPERTYENTRY_TAG) @@ -36,29 +34,17 @@ trait ActiveObjectParser extends BeanParser with DispatcherParser { objectProperties.port = mandatory(remoteElement, PORT).toInt } - if (restartCallbacksElement != null) { - objectProperties.preRestart = restartCallbacksElement.getAttribute(PRE_RESTART) - objectProperties.postRestart = restartCallbacksElement.getAttribute(POST_RESTART) - if ((objectProperties.preRestart.isEmpty) && (objectProperties.preRestart.isEmpty)) { - throw new IllegalActorStateException("At least one of pre or post must be defined.") - } - } - - if (shutdownCallbackElement != null) { - objectProperties.shutdown = shutdownCallbackElement.getAttribute("method") - } - if (dispatcherElement != null) { val dispatcherProperties = parseDispatcher(dispatcherElement) objectProperties.dispatcher = dispatcherProperties } - for(element <- propertyEntries) { - val entry = new PropertyEntry() - entry.name = element.getAttribute("name"); - entry.value = element.getAttribute("value") - entry.ref = element.getAttribute("ref") - objectProperties.propertyEntries.add(entry) + for (element <- propertyEntries) { + val entry = new PropertyEntry + entry.name = element.getAttribute("name"); + entry.value = element.getAttribute("value") + entry.ref = element.getAttribute("ref") + objectProperties.propertyEntries.add(entry) } try { @@ -69,7 +55,7 @@ trait ActiveObjectParser extends BeanParser with DispatcherParser { throw nfe } - objectProperties.target = mandatory(element, TARGET) + objectProperties.target = mandatory(element, IMPLEMENTATION) objectProperties.transactional = if (element.getAttribute(TRANSACTIONAL).isEmpty) false else element.getAttribute(TRANSACTIONAL).toBoolean if (!element.getAttribute(INTERFACE).isEmpty) { diff --git a/akka-spring/src/main/scala/ActiveObjectProperties.scala b/akka-spring/src/main/scala/TypedActorProperties.scala similarity index 76% rename from akka-spring/src/main/scala/ActiveObjectProperties.scala rename to akka-spring/src/main/scala/TypedActorProperties.scala index 0f4b09d559..46c9cd35aa 100644 --- a/akka-spring/src/main/scala/ActiveObjectProperties.scala +++ b/akka-spring/src/main/scala/TypedActorProperties.scala @@ -8,18 +8,15 @@ import org.springframework.beans.factory.support.BeanDefinitionBuilder import AkkaSpringConfigurationTags._ /** - * Data container for active object configuration data. + * Data container for typed actor configuration data. * @author michaelkober * @author Martin Krasser */ -class ActiveObjectProperties { +class TypedActorProperties { var target: String = "" var timeout: Long = _ var interface: String = "" var transactional: Boolean = false - var preRestart: String = "" - var postRestart: String = "" - var shutdown: String = "" var host: String = "" var port: Int = _ var lifecycle: String = "" @@ -35,11 +32,8 @@ class ActiveObjectProperties { def setAsProperties(builder: BeanDefinitionBuilder) { builder.addPropertyValue(HOST, host) builder.addPropertyValue(PORT, port) - builder.addPropertyValue(PRE_RESTART, preRestart) - builder.addPropertyValue(POST_RESTART, postRestart) - builder.addPropertyValue(SHUTDOWN, shutdown) builder.addPropertyValue(TIMEOUT, timeout) - builder.addPropertyValue(TARGET, target) + builder.addPropertyValue(IMPLEMENTATION, target) builder.addPropertyValue(INTERFACE, interface) builder.addPropertyValue(TRANSACTIONAL, transactional) builder.addPropertyValue(LIFECYCLE, lifecycle) diff --git a/akka-spring/src/test/java/se/scalablesolutions/akka/spring/Pojo.java b/akka-spring/src/test/java/se/scalablesolutions/akka/spring/Pojo.java index 04995b75c8..f588fc777a 100644 --- a/akka-spring/src/test/java/se/scalablesolutions/akka/spring/Pojo.java +++ b/akka-spring/src/test/java/se/scalablesolutions/akka/spring/Pojo.java @@ -2,38 +2,50 @@ package se.scalablesolutions.akka.spring; import org.springframework.context.ApplicationContext; import org.springframework.context.ApplicationContextAware; + import javax.annotation.PreDestroy; import javax.annotation.PostConstruct; -public class Pojo implements PojoInf,ApplicationContextAware { +import se.scalablesolutions.akka.actor.*; - private String string; +public class Pojo extends TypedActor implements PojoInf, ApplicationContextAware { - private boolean gotApplicationContext = false; - private boolean postConstructInvoked = false; - - public boolean gotApplicationContext() { - return gotApplicationContext; - } - public void setApplicationContext(ApplicationContext context) { - gotApplicationContext = true; - } + private String stringFromVal; + private String stringFromRef; - public void setString(String s) { - string = s; - } + private boolean gotApplicationContext = false; + private boolean initInvoked = false; + + public boolean gotApplicationContext() { + return gotApplicationContext; + } + + public void setApplicationContext(ApplicationContext context) { + gotApplicationContext = true; + } - public String getString() { - return string; - } - - @PostConstruct - public void create() { - postConstructInvoked = true; - } + public String getStringFromVal() { + return stringFromVal; + } - public boolean isPostConstructInvoked() { - return postConstructInvoked; + public void setStringFromVal(String s) { + stringFromVal = s; + } + + public String getStringFromRef() { + return stringFromRef; + } + + public void setStringFromRef(String s) { + stringFromRef = s; + } + + @Override + public void init() { + initInvoked = true; + } + + public boolean isInitInvoked() { + return initInvoked; + } } - - } diff --git a/akka-spring/src/test/java/se/scalablesolutions/akka/spring/PojoInf.java b/akka-spring/src/test/java/se/scalablesolutions/akka/spring/PojoInf.java index 70d64245db..9ebf80e89b 100644 --- a/akka-spring/src/test/java/se/scalablesolutions/akka/spring/PojoInf.java +++ b/akka-spring/src/test/java/se/scalablesolutions/akka/spring/PojoInf.java @@ -5,10 +5,9 @@ import javax.annotation.PostConstruct; public interface PojoInf { - public String getString(); - public boolean gotApplicationContext(); - public boolean isPostConstructInvoked(); - - @PostConstruct - public void create(); - } + public String getStringFromVal(); + public String getStringFromRef(); + public boolean gotApplicationContext(); + public boolean isInitInvoked(); + +} diff --git a/akka-spring/src/test/java/se/scalablesolutions/akka/spring/SampleBean.java b/akka-spring/src/test/java/se/scalablesolutions/akka/spring/SampleBean.java index e8adaa38e7..2828c42bcb 100644 --- a/akka-spring/src/test/java/se/scalablesolutions/akka/spring/SampleBean.java +++ b/akka-spring/src/test/java/se/scalablesolutions/akka/spring/SampleBean.java @@ -1,22 +1,25 @@ package se.scalablesolutions.akka.spring; -import se.scalablesolutions.akka.actor.annotation.shutdown; +import se.scalablesolutions.akka.actor.*; -public class SampleBean { +public class SampleBean extends TypedActor implements SampleBeanIntf { - public boolean down; + private boolean down; public SampleBean() { down = false; } + public boolean down() { + return down; + } + public String foo(String s) { return "hello " + s; } - @shutdown + @Override public void shutdown() { down = true; } - } diff --git a/akka-spring/src/test/java/se/scalablesolutions/akka/spring/SampleBeanIntf.java b/akka-spring/src/test/java/se/scalablesolutions/akka/spring/SampleBeanIntf.java new file mode 100644 index 0000000000..ec189ecd5f --- /dev/null +++ b/akka-spring/src/test/java/se/scalablesolutions/akka/spring/SampleBeanIntf.java @@ -0,0 +1,6 @@ +package se.scalablesolutions.akka.spring; + +public interface SampleBeanIntf { + public boolean down(); + public String foo(String s); + } diff --git a/akka-spring/src/test/java/se/scalablesolutions/akka/spring/SampleRoute.java b/akka-spring/src/test/java/se/scalablesolutions/akka/spring/SampleRoute.java index 3865ea1615..5e7e5ea126 100644 --- a/akka-spring/src/test/java/se/scalablesolutions/akka/spring/SampleRoute.java +++ b/akka-spring/src/test/java/se/scalablesolutions/akka/spring/SampleRoute.java @@ -6,7 +6,6 @@ public class SampleRoute extends RouteBuilder { @Override public void configure() throws Exception { - from("direct:test").to("active-object:sample?method=foo"); + from("direct:test").to("typed-actor:sample?method=foo"); } - } diff --git a/akka-spring/src/test/resources/appContext.xml b/akka-spring/src/test/resources/appContext.xml index e9a651b735..29dc5dbd87 100644 --- a/akka-spring/src/test/resources/appContext.xml +++ b/akka-spring/src/test/resources/appContext.xml @@ -7,27 +7,30 @@ http://www.akkasource.org/schema/akka http://scalablesolutions.se/akka/akka-0.10.xsd"> - - - + + + - - - + + + + - + - - - - diff --git a/akka-spring/src/test/resources/appContextCamelServiceCustom.xml b/akka-spring/src/test/resources/appContextCamelServiceCustom.xml index ba18a325d0..84f442ebf6 100644 --- a/akka-spring/src/test/resources/appContextCamelServiceCustom.xml +++ b/akka-spring/src/test/resources/appContextCamelServiceCustom.xml @@ -20,6 +20,8 @@ http://camel.apache.org/schema/spring/camel-spring.xsd"> - - + diff --git a/akka-spring/src/test/resources/failing-appContext.xml b/akka-spring/src/test/resources/failing-appContext.xml index 1acab0e830..66382056bd 100644 --- a/akka-spring/src/test/resources/failing-appContext.xml +++ b/akka-spring/src/test/resources/failing-appContext.xml @@ -7,15 +7,15 @@ http://www.akkasource.org/schema/akka classpath:se/scalablesolutions/akka/spring/akka-0.10.xsd"> - - - - - - - + + + + + + + \ No newline at end of file diff --git a/akka-spring/src/test/scala/CamelServiceSpringFeatureTest.scala b/akka-spring/src/test/scala/CamelServiceSpringFeatureTest.scala index d260afa44f..e8b0d727c3 100644 --- a/akka-spring/src/test/scala/CamelServiceSpringFeatureTest.scala +++ b/akka-spring/src/test/scala/CamelServiceSpringFeatureTest.scala @@ -6,7 +6,7 @@ import org.scalatest.{BeforeAndAfterAll, BeforeAndAfterEach, FeatureSpec} import org.springframework.context.support.ClassPathXmlApplicationContext import se.scalablesolutions.akka.camel.CamelContextManager -import se.scalablesolutions.akka.actor.{ActiveObject, ActorRegistry} +import se.scalablesolutions.akka.actor.{TypedActor, ActorRegistry} class CamelServiceSpringFeatureTest extends FeatureSpec with BeforeAndAfterEach with BeforeAndAfterAll { override protected def beforeAll = { @@ -19,24 +19,23 @@ class CamelServiceSpringFeatureTest extends FeatureSpec with BeforeAndAfterEach feature("start CamelService from Spring application context") { import CamelContextManager._ - - scenario("with a custom CamelContext and access a registered active object") { + scenario("with a custom CamelContext and access a registered typed actor") { val appctx = new ClassPathXmlApplicationContext("/appContextCamelServiceCustom.xml") assert(context.isInstanceOf[SpringCamelContext]) assert("hello sample" === template.requestBody("direct:test", "sample")) appctx.close } - scenario("with a default CamelContext and access a registered active object") { + scenario("with a default CamelContext and access a registered typed actor") { val appctx = new ClassPathXmlApplicationContext("/appContextCamelServiceDefault.xml") // create a custom registry val registry = new SimpleRegistry - registry.put("custom", ActiveObject.newInstance(classOf[SampleBean])) + registry.put("custom", TypedActor.newInstance(classOf[SampleBeanIntf], classOf[SampleBean])) // set custom registry in DefaultCamelContext assert(context.isInstanceOf[DefaultCamelContext]) context.asInstanceOf[DefaultCamelContext].setRegistry(registry) - // access registered active object - assert("hello sample" === template.requestBody("active-object:custom?method=foo", "sample")) + // access registered typed actor + assert("hello sample" === template.requestBody("typed-actor:custom?method=foo", "sample")) appctx.close } } diff --git a/akka-spring/src/test/scala/DispatcherBeanDefinitionParserTest.scala b/akka-spring/src/test/scala/DispatcherBeanDefinitionParserTest.scala index bd5490a1b0..64e9ea2425 100644 --- a/akka-spring/src/test/scala/DispatcherBeanDefinitionParserTest.scala +++ b/akka-spring/src/test/scala/DispatcherBeanDefinitionParserTest.scala @@ -89,7 +89,7 @@ class DispatcherBeanDefinitionParserTest extends Spec with ShouldMatchers { } - it("should throw IllegalArgumentException when configuring a thread based dispatcher without ActiveObject") { + it("should throw IllegalArgumentException when configuring a thread based dispatcher without TypedActor") { val xml = evaluating { parser.parseDispatcher(dom(xml).getDocumentElement) } should produce [IllegalArgumentException] } diff --git a/akka-spring/src/test/scala/SupervisionBeanDefinitionParserTest.scala b/akka-spring/src/test/scala/SupervisionBeanDefinitionParserTest.scala index ffc1f7a95d..a5fc44007f 100644 --- a/akka-spring/src/test/scala/SupervisionBeanDefinitionParserTest.scala +++ b/akka-spring/src/test/scala/SupervisionBeanDefinitionParserTest.scala @@ -26,8 +26,8 @@ class SupervisionBeanDefinitionParserTest extends Spec with ShouldMatchers { val parser = new Parser() val builder = BeanDefinitionBuilder.genericBeanDefinition("foo.bar.Foo") - it("should be able to parse active object configuration") { - val props = parser.parseActiveObject(createActiveObjectElement); + it("should be able to parse typed actor configuration") { + val props = parser.parseTypedActor(createTypedActorElement); assert(props != null) assert(props.timeout == 1000) assert(props.target == "foo.bar.MyPojo") @@ -45,9 +45,9 @@ class SupervisionBeanDefinitionParserTest extends Spec with ShouldMatchers { expect(1000) { strategy.withinTimeRange } } - it("should parse the supervised active objects") { + it("should parse the supervised typed actors") { parser.parseSupervisor(createSupervisorElement, builder); - val supervised = builder.getBeanDefinition.getPropertyValues.getPropertyValue("supervised").getValue.asInstanceOf[List[ActiveObjectProperties]] + val supervised = builder.getBeanDefinition.getPropertyValues.getPropertyValue("supervised").getValue.asInstanceOf[List[TypedActorProperties]] assert(supervised != null) expect(4) { supervised.length } val iterator = supervised.iterator @@ -59,9 +59,6 @@ class SupervisionBeanDefinitionParserTest extends Spec with ShouldMatchers { expect("foo.bar.Bar") { prop2.target } expect("foo.bar.MyPojo") { prop3.target } expect("foo.bar.MyPojo") { prop4.target } - expect("preRestart") { prop3.preRestart } - expect("postRestart") { prop3.postRestart } - expect("shutdown") { prop4.shutdown } expect("permanent") { prop1.lifecycle } expect("temporary") { prop4.lifecycle } } @@ -75,9 +72,9 @@ class SupervisionBeanDefinitionParserTest extends Spec with ShouldMatchers { } } - private def createActiveObjectElement : Element = { - val xml = dom(xml).getDocumentElement @@ -91,16 +88,16 @@ class SupervisionBeanDefinitionParserTest extends Spec with ShouldMatchers { java.lang.NullPointerException - - - - + + + + - - + + - - + + dom(xml).getDocumentElement } @@ -113,9 +110,9 @@ class SupervisionBeanDefinitionParserTest extends Spec with ShouldMatchers { java.io.IOException - - - + + + dom(xml).getDocumentElement } @@ -124,10 +121,10 @@ class SupervisionBeanDefinitionParserTest extends Spec with ShouldMatchers { val xml = - - - - + + + + dom(xml).getDocumentElement } diff --git a/akka-spring/src/test/scala/SupervisionFactoryBeanTest.scala b/akka-spring/src/test/scala/SupervisionFactoryBeanTest.scala index 7313725202..44d1cbd079 100644 --- a/akka-spring/src/test/scala/SupervisionFactoryBeanTest.scala +++ b/akka-spring/src/test/scala/SupervisionFactoryBeanTest.scala @@ -8,7 +8,7 @@ import org.scalatest.matchers.ShouldMatchers import org.scalatest.junit.JUnitRunner import org.junit.runner.RunWith import se.scalablesolutions.akka.config.JavaConfig._ -import se.scalablesolutions.akka.config.ActiveObjectConfigurator +import se.scalablesolutions.akka.config.TypedActorConfigurator private[akka] class Foo @@ -16,10 +16,10 @@ private[akka] class Foo class SupervisionFactoryBeanTest extends Spec with ShouldMatchers { val restartStrategy = new RestartStrategy(new AllForOne(), 3, 1000, Array(classOf[Throwable])) - val activeObjects = List(createActiveObjectProperties("se.scalablesolutions.akka.spring.Foo", 1000L)) + val typedActors = List(createTypedActorProperties("se.scalablesolutions.akka.spring.Foo", 1000L)) - def createActiveObjectProperties(target: String, timeout: Long) : ActiveObjectProperties = { - val properties = new ActiveObjectProperties() + def createTypedActorProperties(target: String, timeout: Long) : TypedActorProperties = { + val properties = new TypedActorProperties() properties.target = target properties.timeout = timeout properties @@ -30,12 +30,12 @@ class SupervisionFactoryBeanTest extends Spec with ShouldMatchers { it("should have java getters and setters for all properties") { bean.setRestartStrategy(restartStrategy) assert(bean.getRestartStrategy == restartStrategy) - bean.setSupervised(activeObjects) - assert(bean.getSupervised == activeObjects) + bean.setSupervised(typedActors) + assert(bean.getSupervised == typedActors) } - it("should return the object type ActiveObjectConfigurator") { - assert(bean.getObjectType == classOf[ActiveObjectConfigurator]) + it("should return the object type TypedActorConfigurator") { + assert(bean.getObjectType == classOf[TypedActorConfigurator]) } } } diff --git a/akka-spring/src/test/scala/ActiveObjectBeanDefinitionParserTest.scala b/akka-spring/src/test/scala/TypedActorBeanDefinitionParserTest.scala similarity index 56% rename from akka-spring/src/test/scala/ActiveObjectBeanDefinitionParserTest.scala rename to akka-spring/src/test/scala/TypedActorBeanDefinitionParserTest.scala index dc48ecc4b1..7c0dd30f37 100644 --- a/akka-spring/src/test/scala/ActiveObjectBeanDefinitionParserTest.scala +++ b/akka-spring/src/test/scala/TypedActorBeanDefinitionParserTest.scala @@ -12,25 +12,25 @@ import ScalaDom._ import org.w3c.dom.Element /** - * Test for ActiveObjectParser + * Test for TypedActorParser * @author michaelkober */ @RunWith(classOf[JUnitRunner]) -class ActiveObjectBeanDefinitionParserTest extends Spec with ShouldMatchers { - private class Parser extends ActiveObjectParser +class TypedActorBeanDefinitionParserTest extends Spec with ShouldMatchers { + private class Parser extends TypedActorParser - describe("An ActiveObjectParser") { + describe("An TypedActorParser") { val parser = new Parser() - it("should parse the active object configuration") { - val xml = - + - val props = parser.parseActiveObject(dom(xml).getDocumentElement); + val props = parser.parseTypedActor(dom(xml).getDocumentElement); assert(props != null) assert(props.timeout === 1000) assert(props.target === "foo.bar.MyPojo") @@ -40,29 +40,29 @@ class ActiveObjectBeanDefinitionParserTest extends Spec with ShouldMatchers { } it("should throw IllegalArgumentException on missing mandatory attributes") { - val xml = - evaluating { parser.parseActiveObject(dom(xml).getDocumentElement) } should produce [IllegalArgumentException] + evaluating { parser.parseTypedActor(dom(xml).getDocumentElement) } should produce [IllegalArgumentException] } - it("should parse ActiveObjects configuration with dispatcher") { - val xml = - - val props = parser.parseActiveObject(dom(xml).getDocumentElement); + + val props = parser.parseTypedActor(dom(xml).getDocumentElement); assert(props != null) assert(props.dispatcher.dispatcherType === "thread-based") } - it("should parse remote ActiveObjects configuration") { - val xml = - - val props = parser.parseActiveObject(dom(xml).getDocumentElement); + + val props = parser.parseTypedActor(dom(xml).getDocumentElement); assert(props != null) assert(props.host === "com.some.host") assert(props.port === 9999) diff --git a/akka-spring/src/test/scala/ActiveObjectFactoryBeanTest.scala b/akka-spring/src/test/scala/TypedActorFactoryBeanTest.scala similarity index 51% rename from akka-spring/src/test/scala/ActiveObjectFactoryBeanTest.scala rename to akka-spring/src/test/scala/TypedActorFactoryBeanTest.scala index 68dac8e97c..4278cf14ac 100644 --- a/akka-spring/src/test/scala/ActiveObjectFactoryBeanTest.scala +++ b/akka-spring/src/test/scala/TypedActorFactoryBeanTest.scala @@ -3,30 +3,33 @@ */ package se.scalablesolutions.akka.spring -import org.scalatest.Spec -import org.scalatest.matchers.ShouldMatchers -import org.scalatest.junit.JUnitRunner +import se.scalablesolutions.akka.actor.ActorRegistry; + import org.junit.runner.RunWith -import org.springframework.core.io.ResourceEditor -import org.springframework.context.support.ClassPathXmlApplicationContext; +import org.springframework.context.support.ClassPathXmlApplicationContext +import org.scalatest.junit.JUnitRunner +import org.scalatest.{BeforeAndAfterAll, Spec} +import org.scalatest.matchers.ShouldMatchers /** - * Test for ActiveObjectFactoryBean + * Test for TypedActorFactoryBean * @author michaelkober */ @RunWith(classOf[JUnitRunner]) -class ActiveObjectFactoryBeanTest extends Spec with ShouldMatchers { +class TypedActorFactoryBeanTest extends Spec with ShouldMatchers with BeforeAndAfterAll { - describe("A ActiveObjectFactoryBean") { - val bean = new ActiveObjectFactoryBean + override protected def afterAll = ActorRegistry.shutdownAll + + describe("A TypedActorFactoryBean") { + val bean = new TypedActorFactoryBean it("should have java getters and setters for all properties") { - bean.setTarget("java.lang.String") - assert(bean.getTarget == "java.lang.String") + bean.setImplementation("java.lang.String") + assert(bean.getImplementation == "java.lang.String") bean.setTimeout(1000) assert(bean.getTimeout == 1000) } - it("should create a remote active object when a host is set") { + it("should create a remote typed actor when a host is set") { bean.setHost("some.host.com"); assert(bean.isRemote) } @@ -36,7 +39,7 @@ class ActiveObjectFactoryBeanTest extends Spec with ShouldMatchers { assert(bean.hasInterface) } - it("should create an active object with dispatcher if dispatcher is set") { + it("should create an typed actor with dispatcher if dispatcher is set") { val props = new DispatcherProperties() props.dispatcherType = "executor-based-event-driven" bean.setDispatcher(props); @@ -44,51 +47,49 @@ class ActiveObjectFactoryBeanTest extends Spec with ShouldMatchers { } it("should return the object type") { - bean.setTarget("java.lang.String") + bean.setImplementation("java.lang.String") assert(bean.getObjectType == classOf[String]) } - it("should create a proxy of type ResourceEditor") { - val bean = new ActiveObjectFactoryBean() - // we must have a java class here - bean.setTarget("org.springframework.core.io.ResourceEditor") + it("should create a proxy of type PojoInf") { + val bean = new TypedActorFactoryBean() + bean.setInterface("se.scalablesolutions.akka.spring.PojoInf") + bean.setImplementation("se.scalablesolutions.akka.spring.Pojo") + bean.timeout = 1000 val entries = new PropertyEntries() val entry = new PropertyEntry() - entry.name = "source" - entry.value = "sourceBeanIsAString" + entry.name = "stringFromVal" + entry.value = "tests rock" entries.add(entry) bean.setProperty(entries) - assert(bean.getObjectType == classOf[ResourceEditor]) + assert(classOf[PojoInf].isAssignableFrom(bean.getObjectType)) // Check that we have injected the depencency correctly - val target:ResourceEditor = bean.createInstance.asInstanceOf[ResourceEditor] - assert(target.getSource === entry.value) + val target = bean.createInstance.asInstanceOf[PojoInf] + assert(target.getStringFromVal === entry.value) } it("should create an application context and verify dependency injection") { var ctx = new ClassPathXmlApplicationContext("appContext.xml"); - val target:ResourceEditor = ctx.getBean("bean").asInstanceOf[ResourceEditor] - assert(target.getSource === "someString") - - val pojoInf = ctx.getBean("pojoInf").asInstanceOf[PojoInf]; - println("pojoInf = " + pojoInf.getString) - Thread.sleep(200) - assert(pojoInf.isPostConstructInvoked) - assert(pojoInf.getString == "akka rocks") - assert(pojoInf.gotApplicationContext) + val ta = ctx.getBean("typedActor").asInstanceOf[PojoInf]; + assert(ta.isInitInvoked) + assert(ta.getStringFromVal == "akka rocks") + assert(ta.getStringFromRef == "spring rocks") + assert(ta.gotApplicationContext) + ctx.close } - it("should stop the created active object when scope is singleton and the context is closed") { + it("should stop the created typed actor when scope is singleton and the context is closed") { var ctx = new ClassPathXmlApplicationContext("appContext.xml"); - val target = ctx.getBean("bean-singleton").asInstanceOf[SampleBean] + val target = ctx.getBean("bean-singleton").asInstanceOf[SampleBeanIntf] assert(!target.down) ctx.close assert(target.down) } - it("should not stop the created active object when scope is prototype and the context is closed") { + it("should not stop the created typed actor when scope is prototype and the context is closed") { var ctx = new ClassPathXmlApplicationContext("appContext.xml"); - val target = ctx.getBean("bean-prototype").asInstanceOf[SampleBean] + val target = ctx.getBean("bean-prototype").asInstanceOf[SampleBeanIntf] assert(!target.down) ctx.close assert(!target.down) diff --git a/config/akka-reference.conf b/config/akka-reference.conf index c96782406b..33806c1d3d 100644 --- a/config/akka-reference.conf +++ b/config/akka-reference.conf @@ -5,67 +5,78 @@ # This file has all the default settings, so all these could be removed with no visible effect. # Modify as needed. - +log { filename = "./logs/akka.log" roll = "daily" # Options: never, hourly, daily, sunday/monday/... level = "debug" # Options: fatal, critical, error, warning, info, debug, trace console = on # syslog_host = "" # syslog_server_name = "" - - + akka { # example of package level logging settings + node = "se.scalablesolutions.akka" + level = "debug" + } +} + +akka { version = "0.10" - # FQN (Fully Qualified Name) to the class doing initial active object/actor + time-unit = "seconds" # default timeout time unit for all timeout properties throughout the config + + # FQN (Fully Qualified Name) to the class doing initial typed actor/actor # supervisor bootstrap, should be defined in default constructor boot = ["sample.camel.Boot", "sample.rest.java.Boot", "sample.rest.scala.Boot", "sample.security.Boot"] - - timeout = 5000 # default timeout for future based invocations + actor { + timeout = 5 # default timeout for future based invocations serialize-messages = off # does a deep clone of (non-primitive) messages to ensure immutability throughput = 5 # default throughput for ExecutorBasedEventDrivenDispatcher - + } - - fair = on # should global transactions be fair or non-fair (non fair yield better performance) - jta-aware = off # 'on' means that if there JTA Transaction Manager available then the STM will - # begin (or join), commit or rollback the JTA transaction. Default is 'off'. - + stm { + fair = on # should global transactions be fair or non-fair (non fair yield better performance) + jta-aware = off # 'on' means that if there JTA Transaction Manager available then the STM will + # begin (or join), commit or rollback the JTA transaction. Default is 'off'. + timeout = 5 # default timeout for blocking transactions and transaction set (in unit defined by + # the time-unit property) + } - - provider = "from-jndi" # Options: "from-jndi" (means that Akka will try to detect a TransactionManager in the JNDI) - # "atomikos" (means that Akka will use the Atomikos based JTA impl in 'akka-jta', - # e.g. you need the akka-jta JARs on classpath). - timeout = 60000 - + jta { + provider = "from-jndi" # Options: "from-jndi" (means that Akka will try to detect a TransactionManager in the JNDI) + # "atomikos" (means that Akka will use the Atomikos based JTA impl in 'akka-jta', + # e.g. you need the akka-jta JARs on classpath). + timeout = 60 + } - + rest { service = on hostname = "localhost" port = 9998 - filters = ["se.scalablesolutions.akka.security.AkkaSecurityFilterFactory"] # List with all jersey filters to use - resource_packages = ["sample.rest.scala","sample.rest.java","sample.security"] # List with all resource packages for your Jersey services + filters = ["se.scalablesolutions.akka.security.AkkaSecurityFilterFactory"] # List with all jersey filters to use + resource_packages = ["sample.rest.scala", + "sample.rest.java", + "sample.security"] # List with all resource packages for your Jersey services authenticator = "sample.security.BasicAuthenticationService" # The authentication service to use. Need to be overridden (uses sample now) - + #maxInactiveActivity = 60000 #Atmosphere CometSupport maxInactiveActivity #IF you are using a KerberosAuthenticationActor - # + # kerberos { # servicePrincipal = "HTTP/localhost@EXAMPLE.COM" # keyTabLocation = "URL to keytab" # kerberosDebug = "true" # realm = "EXAMPLE.COM" - # - + # } + } - + remote { compression-scheme = "zlib" # Options: "zlib" (lzf to come), leave out for no compression zlib-compression-level = 6 # Options: 0-9 (1 being fastest and 9 being the most compressed), default is 6 - - service = off #on / off + ssl { + service = off #on / off (THIS FEATURE IS NOT ACTIVATED YET, STAY TUNED) #You can either use java command-line options or use the settings below @@ -79,43 +90,43 @@ #This can be useful for debugging debug = off #if on, very verbose debug, same as -Djavax.net.debug=ssl - + } - + cluster { service = on name = "default" # The name of the cluster serializer = "se.scalablesolutions.akka.serialization.Serializer$Java$" # FQN of the serializer class - + } - + server { service = on hostname = "localhost" port = 9999 - connection-timeout = 1000 # in millis (1 sec default) - + connection-timeout = 1 + } - - reconnect-delay = 5000 # in millis (5 sec default) - read-timeout = 10000 # in millis (10 sec default) - - + client { + reconnect-delay = 5 + read-timeout = 10 + } + } - - + storage { + cassandra { hostname = "127.0.0.1" # IP address or hostname of one of the Cassandra cluster's seeds port = 9160 consistency-level = "QUORUM" # Options: ZERO, ONE, QUORUM, DCQUORUM, DCQUORUMSYNC, ALL, ANY - + } - + mongodb { hostname = "127.0.0.1" # IP address or hostname of the MongoDB DB instance port = 27017 dbname = "mydb" - + } - + redis { hostname = "127.0.0.1" # IP address or hostname of the Redis instance port = 6379 - - - + } + } +} diff --git a/embedded-repo/com/redis/redisclient/2.8.0-1.4/redisclient-2.8.0-1.4.jar b/embedded-repo/com/redis/redisclient/2.8.0-1.4/redisclient-2.8.0-1.4.jar index a5c824b19e..b811e6ab92 100644 Binary files a/embedded-repo/com/redis/redisclient/2.8.0-1.4/redisclient-2.8.0-1.4.jar and b/embedded-repo/com/redis/redisclient/2.8.0-1.4/redisclient-2.8.0-1.4.jar differ diff --git a/embedded-repo/net/lag/configgy/2.8.0-1.5.5/configgy-2.8.0-1.5.5.jar b/embedded-repo/net/lag/configgy/2.8.0-1.5.5/configgy-2.8.0-1.5.5.jar new file mode 100644 index 0000000000..8568788a70 Binary files /dev/null and b/embedded-repo/net/lag/configgy/2.8.0-1.5.5/configgy-2.8.0-1.5.5.jar differ diff --git a/embedded-repo/net/lag/configgy/2.8.0-1.5.5/configgy-2.8.0-1.5.5.pom b/embedded-repo/net/lag/configgy/2.8.0-1.5.5/configgy-2.8.0-1.5.5.pom new file mode 100644 index 0000000000..1a3c2f8f83 --- /dev/null +++ b/embedded-repo/net/lag/configgy/2.8.0-1.5.5/configgy-2.8.0-1.5.5.pom @@ -0,0 +1,46 @@ + + + 4.0.0 + net.lag + configgy + jar + 2.8.0-1.5.5 + + + Apache 2 + http://www.apache.org/licenses/LICENSE-2.0.txt + repo + + + + + org.scala-tools + vscaladoc + 1.1-md-3 + compile + + + org.scala-lang + scala-library + 2.8.0 + compile + + + + + PublicReleasesRepository + Public Releases Repository + http://maven/content/groups/public/ + + + PublicSnapshots + Public Snapshots + http://maven/content/groups/public-snapshots/ + + + ScalaToolsMaven2Repository + Scala-Tools Maven2 Repository + http://scala-tools.org/repo-releases/ + + + \ No newline at end of file diff --git a/embedded-repo/org/apache/camel/camel-jetty/2.4.0.1/camel-jetty-2.4.0.1-sources.jar b/embedded-repo/org/apache/camel/camel-jetty/2.4.0.1/camel-jetty-2.4.0.1-sources.jar new file mode 100644 index 0000000000..7aa1393153 Binary files /dev/null and b/embedded-repo/org/apache/camel/camel-jetty/2.4.0.1/camel-jetty-2.4.0.1-sources.jar differ diff --git a/embedded-repo/org/apache/camel/camel-jetty/2.4.0.1/camel-jetty-2.4.0.1-sources.jar.md5 b/embedded-repo/org/apache/camel/camel-jetty/2.4.0.1/camel-jetty-2.4.0.1-sources.jar.md5 new file mode 100644 index 0000000000..46500d76fc --- /dev/null +++ b/embedded-repo/org/apache/camel/camel-jetty/2.4.0.1/camel-jetty-2.4.0.1-sources.jar.md5 @@ -0,0 +1 @@ +607f775c6b2ec1954fe60717875aefea \ No newline at end of file diff --git a/embedded-repo/org/apache/camel/camel-jetty/2.4.0.1/camel-jetty-2.4.0.1-sources.jar.sha1 b/embedded-repo/org/apache/camel/camel-jetty/2.4.0.1/camel-jetty-2.4.0.1-sources.jar.sha1 new file mode 100644 index 0000000000..3eb85256e2 --- /dev/null +++ b/embedded-repo/org/apache/camel/camel-jetty/2.4.0.1/camel-jetty-2.4.0.1-sources.jar.sha1 @@ -0,0 +1 @@ +ee377a85bf07b2afb3a98157f926ebdb47a5e88c \ No newline at end of file diff --git a/embedded-repo/org/apache/camel/camel-jetty/2.4.0.1/camel-jetty-2.4.0.1.jar b/embedded-repo/org/apache/camel/camel-jetty/2.4.0.1/camel-jetty-2.4.0.1.jar new file mode 100644 index 0000000000..7222c09136 Binary files /dev/null and b/embedded-repo/org/apache/camel/camel-jetty/2.4.0.1/camel-jetty-2.4.0.1.jar differ diff --git a/embedded-repo/org/apache/camel/camel-jetty/2.4.0.1/camel-jetty-2.4.0.1.jar.md5 b/embedded-repo/org/apache/camel/camel-jetty/2.4.0.1/camel-jetty-2.4.0.1.jar.md5 new file mode 100644 index 0000000000..be8f3065c7 --- /dev/null +++ b/embedded-repo/org/apache/camel/camel-jetty/2.4.0.1/camel-jetty-2.4.0.1.jar.md5 @@ -0,0 +1 @@ +ba1be87b58c03e8ae6f890ca87c74b5b \ No newline at end of file diff --git a/embedded-repo/org/apache/camel/camel-jetty/2.4.0.1/camel-jetty-2.4.0.1.jar.sha1 b/embedded-repo/org/apache/camel/camel-jetty/2.4.0.1/camel-jetty-2.4.0.1.jar.sha1 new file mode 100644 index 0000000000..aa86a31839 --- /dev/null +++ b/embedded-repo/org/apache/camel/camel-jetty/2.4.0.1/camel-jetty-2.4.0.1.jar.sha1 @@ -0,0 +1 @@ +aaa96009d7e151f89703b4d932fc73ebcf9bc973 \ No newline at end of file diff --git a/embedded-repo/org/apache/camel/camel-jetty/2.4.0.1/camel-jetty-2.4.0.1.pom b/embedded-repo/org/apache/camel/camel-jetty/2.4.0.1/camel-jetty-2.4.0.1.pom new file mode 100644 index 0000000000..e3cdbce60d --- /dev/null +++ b/embedded-repo/org/apache/camel/camel-jetty/2.4.0.1/camel-jetty-2.4.0.1.pom @@ -0,0 +1,144 @@ + + + + + 4.0.0 + + + org.apache.camel + camel-parent + 2.4.0 + + + camel-jetty + bundle + Camel :: Jetty + Camel Jetty support + 2.4.0.1 + + + org.apache.camel.component.jetty.* + + + + + + org.apache.camel + camel-core + 2.4.0 + + + org.apache.camel + camel-http + 2.4.0 + + + org.eclipse.jetty + jetty-server + + + org.eclipse.jetty + jetty-security + + + org.eclipse.jetty + jetty-servlet + + + org.eclipse.jetty + jetty-servlets + ${jetty-version} + + + org.eclipse.jetty + jetty-client + + + org.eclipse.jetty + jetty-jmx + ${jetty-version} + + + + org.apache.camel + camel-test + 2.4.0 + test + + + org.apache.camel + camel-spring + 2.4.0 + test + + + javax.mail + mail + ${javax-mail-version} + test + + + + org.springframework + spring-context + true + test + + + org.springframework + spring-aop + true + test + + + org.springframework + spring-test + true + test + + + + junit + junit + test + + + log4j + log4j + test + + + + + + + + maven-surefire-plugin + + pertest + + + + **/*XXXTest.* + + + + + + + diff --git a/embedded-repo/org/apache/camel/camel-jetty/2.4.0.1/camel-jetty-2.4.0.1.pom.md5 b/embedded-repo/org/apache/camel/camel-jetty/2.4.0.1/camel-jetty-2.4.0.1.pom.md5 new file mode 100644 index 0000000000..295aae7a23 --- /dev/null +++ b/embedded-repo/org/apache/camel/camel-jetty/2.4.0.1/camel-jetty-2.4.0.1.pom.md5 @@ -0,0 +1 @@ +fba57baa166195ac2b2a013c3cc6d3f1 \ No newline at end of file diff --git a/embedded-repo/org/apache/camel/camel-jetty/2.4.0.1/camel-jetty-2.4.0.1.pom.sha1 b/embedded-repo/org/apache/camel/camel-jetty/2.4.0.1/camel-jetty-2.4.0.1.pom.sha1 new file mode 100644 index 0000000000..7fb0b3347f --- /dev/null +++ b/embedded-repo/org/apache/camel/camel-jetty/2.4.0.1/camel-jetty-2.4.0.1.pom.sha1 @@ -0,0 +1 @@ +c41ff483ac35754c1d41b1823561935849b362ed \ No newline at end of file diff --git a/embedded-repo/org/apache/camel/camel-jetty/maven-metadata.xml b/embedded-repo/org/apache/camel/camel-jetty/maven-metadata.xml new file mode 100644 index 0000000000..0e25837b64 --- /dev/null +++ b/embedded-repo/org/apache/camel/camel-jetty/maven-metadata.xml @@ -0,0 +1,11 @@ + + org.apache.camel + camel-jetty + 2.4.0.1 + + + 2.4.0.1 + + 20100723102939 + + \ No newline at end of file diff --git a/embedded-repo/org/apache/camel/camel-jetty/maven-metadata.xml.md5 b/embedded-repo/org/apache/camel/camel-jetty/maven-metadata.xml.md5 new file mode 100644 index 0000000000..fcff48b3ce --- /dev/null +++ b/embedded-repo/org/apache/camel/camel-jetty/maven-metadata.xml.md5 @@ -0,0 +1 @@ +34f1efbcb11f7251390994d8f81598b2 \ No newline at end of file diff --git a/embedded-repo/org/apache/camel/camel-jetty/maven-metadata.xml.sha1 b/embedded-repo/org/apache/camel/camel-jetty/maven-metadata.xml.sha1 new file mode 100644 index 0000000000..64ef58e71f --- /dev/null +++ b/embedded-repo/org/apache/camel/camel-jetty/maven-metadata.xml.sha1 @@ -0,0 +1 @@ +2e1bb47c5a8c19f98b70e5e6af450861933deacc \ No newline at end of file diff --git a/embedded-repo/org/multiverse/multiverse-alpha/0.6-2010-07-15/multiverse-alpha-0.6-2010-07-15.jar b/embedded-repo/org/multiverse/multiverse-alpha/0.6-2010-07-15/multiverse-alpha-0.6-2010-07-15.jar new file mode 100644 index 0000000000..0929a5bc75 Binary files /dev/null and b/embedded-repo/org/multiverse/multiverse-alpha/0.6-2010-07-15/multiverse-alpha-0.6-2010-07-15.jar differ diff --git a/embedded-repo/org/multiverse/multiverse-alpha/0.6-2010-07-15/multiverse-alpha-0.6-2010-07-15.pom b/embedded-repo/org/multiverse/multiverse-alpha/0.6-2010-07-15/multiverse-alpha-0.6-2010-07-15.pom new file mode 100644 index 0000000000..32e32fc857 --- /dev/null +++ b/embedded-repo/org/multiverse/multiverse-alpha/0.6-2010-07-15/multiverse-alpha-0.6-2010-07-15.pom @@ -0,0 +1,261 @@ + + + 4.0.0 + + multiverse-alpha + Alpha Multiverse STM engine + + Contains an all in one jar that that contains the AlphaStm including the Multiverse + Javaagent and the Multiverse Compiler. This is the JAR you want to include in your + projects, if you do, you don't need to worry about any Multiverse dependency + at all. + + jar + 0.6-2010-07-15 + + + org.multiverse + multiverse + 0.6-2010-07-15 + + + + org.multiverse.javaagent.MultiverseJavaAgent + org.multiverse.stms.alpha.instrumentation.AlphaStmInstrumentor + + + + + + + maven-antrun-plugin + + + + create-main-jar + compile + + run + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Compiles the tests + test-compile + + run + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + ant + ant + 1.7.0 + + + junit + junit + ${junit.version} + + + ant + optional + 1.5.4 + + + com.tonicsystems.jarjar + jarjar + 1.0-rc8 + + + + + + org.apache.maven.plugins + maven-jar-plugin + + + + ${multiverse.agentclass} + + ${project.build.outputDirectory}/META-INF/MANIFEST.MF + + + + + + + + + + ${project.groupId} + multiverse-alpha-unborn + ${project.version} + + + ${project.groupId} + multiverse-core + ${project.version} + + + ${project.groupId} + multiverse-instrumentation + ${project.version} + + + ${project.groupId} + multiverse-alpha-unborn + ${project.version} + test + + + + args4j + args4j + ${args4j.version} + provided + + + + com.tonicsystems.jarjar + jarjar + 1.0-rc8 + provided + + + + asm + asm-all + ${asm.version} + + + diff --git a/embedded-repo/org/multiverse/multiverse/0.6-2010-07-15/multiverse-0.6-2010-07-15.pom b/embedded-repo/org/multiverse/multiverse/0.6-2010-07-15/multiverse-0.6-2010-07-15.pom new file mode 100644 index 0000000000..5d89a92a8c --- /dev/null +++ b/embedded-repo/org/multiverse/multiverse/0.6-2010-07-15/multiverse-0.6-2010-07-15.pom @@ -0,0 +1,488 @@ + + + 4.0.0 + + Multiverse Software Transactional Memory + + Multiverse is a Software Transactional Memory implementation that can be used in Java + but also in other languages running on the JVM like Scala or Groovy. Essentially it is a framework that allows + different STM implementation (with different featuresets or performance characteristics) to be used + under the hood. The main STM implementation is multiverse-alpha.. + + org.multiverse + multiverse + 0.6-2010-07-15 + 2008 + pom + + + UTF-8 + 1.6 + 3.2 + 2.0.16 + 4.8.1 + 1.8.2 + + + + + The Apache License, ASL Version 2.0 + http://www.apache.org/licenses/LICENSE-2.0 + + + + + Multiverse + http://multiverse.codehaus.org + + + + + pveentjer + Peter Veentjer + +1 + alarmnummer AT gmail DOTCOM + + Founder + + + + aphillips + Andrew Phillips + +1 + aphillips AT qrmedia DOTCOM + + Committer + + + + + + + maven.atlassian.com + Atlassian Maven Proxy + https://maven.atlassian.com/content/groups/public + + + repo1.maven + Maven Main Repository + http://repo1.maven.org/maven2 + + + maven2-repository.dev.java.net + Java.net Repository for Maven + http://download.java.net/maven/2 + + + java.net + Java.net Legacy Repository for Maven + http://download.java.net/maven/1 + legacy + + + google-maven-repository + Google Maven Repository + http://google-maven-repository.googlecode.com/svn/repository/ + + + repository.codehaus.org + Codehaus Maven Repository + http://repository.codehaus.org + + + ibiblio + http://www.ibiblio.org/maven + + + sourceforge + http://maven-plugins.sourceforge.net/repository + + + + mandubian-mvn + http://mandubian-mvn.googlecode.com/svn/trunk/mandubian-mvn/repository + + + + + + snapshots + http://snapshots.maven.codehaus.org/maven2 + + + + + multiverse-benchy + + multiverse-core + multiverse-core-tests + multiverse-instrumentation + multiverse-alpha-unborn + multiverse-alpha + + multiverse-site + + multiverse-performance-tool + + + + + + + maven-compiler-plugin + + ${sourceEncoding} + ${targetJdk} + ${targetJdk} + + + + maven-resources-plugin + + ${sourceEncoding} + + + + maven-surefire-plugin + + + **/*LongTest.java + **/*longTest.java + **/*StressTest.java + **/*stressTest.java + **/*PerformanceTest.java + **/*performanceTest.java + + + **/*Test.java + + once + + + + + + + maven-enforcer-plugin + + + enforce-java + + enforce + + + + + ${targetJdk} + + + + + + + + + maven-source-plugin + + + attach-sources + + jar + + + + + + + + + + org.apache.maven.wagon + wagon-webdav + 1.0-beta-2 + + + org.apache.maven.wagon + wagon-ftp + 1.0-beta-6 + + + + + + + junit + junit + ${junit.version} + test + + + org.mockito + mockito-all + ${mockito.version} + test + + + + + scm:git:git://git.codehaus.org/multiverse.git + scm:git:ssh://git@git.codehaus.org/multiverse.git + http://git.codehaus.org/gitweb.cgi?p=multiverse.git + + + + Jira + http://jira.codehaus.org/browse/MULTIVERSE + + + + + Development List + dev-subscribe@multiverse.codehaus.org + dev-unsubscribe@multiverse.codehaus.org + dev@multiverse.codehaus.org + http://archive.multiverse.codehaus.org/dev + + + + User List + user-subscribe@multiverse.codehaus.org + user-unsubscribe@multiverse.codehaus.org + user@multiverse.codehaus.org + http://archive.multiverse.codehaus.org/user + + + + Commits List + scm-subscribe@multiverse.codehaus.org + scm-unsubscribe@multiverse.codehaus.org + http://archive.multiverse.codehaus.org/scm + + + + + + + maven-javadoc-plugin + 2.6.1 + + ${sourceEncoding} + true + + + + default + + aggregate + + + + + + org.codehaus.mojo + jxr-maven-plugin + + + org.codehaus.mojo + taglist-maven-plugin + 2.3 + + ${sourceEncoding} + + FIXME + Fixme + fixme + TODO + todo + Todo + @todo + @deprecated + + + + + maven-project-info-reports-plugin + + + maven-changes-plugin + 2.0-beta-3 + + + + changes-report + + + + + ${basedir}/changes.xml + + + + maven-surefire-report-plugin + + false + + + + + report-only + + + + + + org.codehaus.mojo + findbugs-maven-plugin + 2.0.1 + + + maven-pmd-plugin + 2.3 + + ${sourceEncoding} + ${targetJdk} + + + + + + + + + multiverse-releases + Multiverse Central Repository + dav:https://dav.codehaus.org/repository/multiverse/ + + + multiverse-snapshots + Multiverse Central Development Repository + dav:https://dav.codehaus.org/snapshots.repository/multiverse/ + + + http://dist.codehaus.org/multiverse/ + + + + + release + + + + + maven-javadoc-plugin + 2.6.1 + false + + + generate-resources + + aggregate + + + + + ${sourceEncoding} + true + + + + + maven-assembly-plugin + 2.2-beta-2 + false + + + distribution + package + + single + + + + distribution.xml + + + + + + + + + + + stress + + + + org.apache.maven.plugins + maven-surefire-plugin + + + **/*LongTest.java + + -Xmx256m + once + + + + + + + diff --git a/embedded-repo/org/scala-tools/vscaladoc/1.1-md-3/vscaladoc-1.1-md-3.jar b/embedded-repo/org/scala-tools/vscaladoc/1.1-md-3/vscaladoc-1.1-md-3.jar new file mode 100644 index 0000000000..daa3365531 Binary files /dev/null and b/embedded-repo/org/scala-tools/vscaladoc/1.1-md-3/vscaladoc-1.1-md-3.jar differ diff --git a/embedded-repo/org/scala-tools/vscaladoc/1.1-md-3/vscaladoc-1.1-md-3.pom b/embedded-repo/org/scala-tools/vscaladoc/1.1-md-3/vscaladoc-1.1-md-3.pom new file mode 100644 index 0000000000..aa542db6ac --- /dev/null +++ b/embedded-repo/org/scala-tools/vscaladoc/1.1-md-3/vscaladoc-1.1-md-3.pom @@ -0,0 +1,152 @@ + + 4.0.0 + + org.scala-tools + scala-tools-parent + 1.3 + + vscaladoc + 1.1-md-3 + ${project.artifactId} + 2008 + + scm:svn:http://vscaladoc.googlecode.com/svn/tags/vscaladoc-1.1 + scm:svn:https://vscaladoc.googlecode.com/svn/tags/vscaladoc-1.1 + http://code.google.com/p/vscaladoc/source/browse/tags/vscaladoc-1.1 + + + code.google + http://code.google.com/p/vscaladoc/issues/list + + + hudson + http://scala-tools.org/hudson/job/vscaladoc + + + + 2.7.1 + + + + + scala-tools.org + Scala-Tools Maven2 Repository + http://scala-tools.org/repo-releases + + + + + + scala-tools.org + Scala-Tools Maven2 Repository + http://scala-tools.org/repo-releases + + + + + + org.scala-lang + scala-library + ${scala.version} + true + + + org.scala-lang + scala-compiler + ${scala.version} + true + + + junit + junit + 3.8.1 + test + + + + + src/main/scala + src/test/scala + + + org.scala-tools + maven-scala-plugin + + + compile + + compile + testCompile + + + + + + -target:jvm-1.5 + + ${scala.version} + + + + org.apache.maven.plugins + maven-eclipse-plugin + + true + + org.scala-lang:scala-library + + + ch.epfl.lamp.sdt.launching.SCALA_CONTAINER + + + ch.epfl.lamp.sdt.core.scalanature + org.eclipse.jdt.core.javanature + + + ch.epfl.lamp.sdt.core.scalabuilder + + + + + net.sf.alchim + yuicompressor-maven-plugin + + + + compress + + + + + true + + + + true + + true + ${project.build.directory}/classes/org/scala_tools/vscaladoc/_highlighter/shAll.js + + + shCore*.js + shBrush*.js + + + + + + + + + + + org.scala-tools + maven-scala-plugin + + 1.0 + ${scala.version} + + + + + diff --git a/embedded-repo/sbinary/sbinary/2.8.0-0.3.1/sbinary-2.8.0-0.3.1.jar b/embedded-repo/sbinary/sbinary/2.8.0-0.3.1/sbinary-2.8.0-0.3.1.jar new file mode 100644 index 0000000000..5d76ec911a Binary files /dev/null and b/embedded-repo/sbinary/sbinary/2.8.0-0.3.1/sbinary-2.8.0-0.3.1.jar differ diff --git a/embedded-repo/sbinary/sbinary/2.8.0-0.3.1/sbinary-2.8.0-0.3.1.pom b/embedded-repo/sbinary/sbinary/2.8.0-0.3.1/sbinary-2.8.0-0.3.1.pom new file mode 100644 index 0000000000..7a720793cc --- /dev/null +++ b/embedded-repo/sbinary/sbinary/2.8.0-0.3.1/sbinary-2.8.0-0.3.1.pom @@ -0,0 +1,33 @@ + + + 4.0.0 + sbinary + sbinary + jar + 2.8.0-0.3.1 + + + org.scala-lang + scala-library + 2.8.0 + compile + + + + + PublicReleasesRepository + Public Releases Repository + http://maven/content/groups/public/ + + + PublicSnapshots + Public Snapshots + http://maven/content/groups/public-snapshots/ + + + ScalaToolsMaven2Repository + Scala-Tools Maven2 Repository + http://scala-tools.org/repo-releases/ + + + \ No newline at end of file diff --git a/embedded-repo/sjson/json/sjson/0.7-2.8.0/sjson-0.7-2.8.0.jar b/embedded-repo/sjson/json/sjson/0.7-2.8.0/sjson-0.7-2.8.0.jar new file mode 100644 index 0000000000..e8700f11d3 Binary files /dev/null and b/embedded-repo/sjson/json/sjson/0.7-2.8.0/sjson-0.7-2.8.0.jar differ diff --git a/embedded-repo/sjson/json/sjson/0.7-2.8.0/sjson-0.7-2.8.0.pom b/embedded-repo/sjson/json/sjson/0.7-2.8.0/sjson-0.7-2.8.0.pom new file mode 100644 index 0000000000..dfc9017dcf --- /dev/null +++ b/embedded-repo/sjson/json/sjson/0.7-2.8.0/sjson-0.7-2.8.0.pom @@ -0,0 +1,9 @@ + + + 4.0.0 + sjson.json + sjson + 0.7-2.8.0 + POM was created from install:install-file + diff --git a/project/build.properties b/project/build.properties index cc8e376f1b..ba020623b4 100644 --- a/project/build.properties +++ b/project/build.properties @@ -1,7 +1,7 @@ project.organization=se.scalablesolutions.akka project.name=akka project.version=0.10 -scala.version=2.8.0.RC3 +scala.version=2.8.0 sbt.version=0.7.4 def.scala.version=2.7.7 -build.scala.versions=2.8.0.RC3 +build.scala.versions=2.8.0 diff --git a/project/build/AkkaProject.scala b/project/build/AkkaProject.scala index 77b2dde318..e63bfc573f 100644 --- a/project/build/AkkaProject.scala +++ b/project/build/AkkaProject.scala @@ -2,30 +2,20 @@ | Copyright (C) 2009-2010 Scalable Solutions AB | \---------------------------------------------------------------------------*/ -import sbt._ -import sbt.CompileOrder._ - -import spde._ +import com.weiglewilczek.bnd4sbt.BNDPlugin import de.tuxed.codefellow.plugin.CodeFellowPlugin - +import java.io.File import java.util.jar.Attributes import java.util.jar.Attributes.Name._ -import java.io.File +import sbt._ +import sbt.CompileOrder._ +import spde._ -class AkkaParent(info: ProjectInfo) extends DefaultProject(info) { +class AkkaParentProject(info: ProjectInfo) extends DefaultProject(info) { - // ------------------------------------------------------------ - // project versions - val JERSEY_VERSION = "1.2" - val ATMO_VERSION = "0.6" - val CASSANDRA_VERSION = "0.6.1" - val LIFT_VERSION = "2.0-scala280-SNAPSHOT" - val SCALATEST_VERSION = "1.2-for-scala-2.8.0.RC3-SNAPSHOT" - val MULTIVERSE_VERSION = "0.6-SNAPSHOT" - - // ------------------------------------------------------------ - lazy val deployPath = info.projectPath / "deploy" - lazy val distPath = info.projectPath / "dist" + // ------------------------------------------------------------------------------------------------------------------- + // Compile settings + // ------------------------------------------------------------------------------------------------------------------- override def compileOptions = super.compileOptions ++ Seq("-deprecation", @@ -35,61 +25,214 @@ class AkkaParent(info: ProjectInfo) extends DefaultProject(info) { "-Xwarninit", "-encoding", "utf8") .map(x => CompileOption(x)) - override def javaCompileOptions = JavaCompileOption("-Xlint:unchecked") :: super.javaCompileOptions.toList - def distName = "%s_%s-%s.zip".format(name, buildScalaVersion, version) + // ------------------------------------------------------------------------------------------------------------------- + // Deploy/dist settings + // ------------------------------------------------------------------------------------------------------------------- + lazy val deployPath = info.projectPath / "deploy" + lazy val distPath = info.projectPath / "dist" + def distName = "%s_%s-%s.zip".format(name, buildScalaVersion, version) lazy val dist = zipTask(allArtifacts, "dist", distName) dependsOn (`package`) describedAs("Zips up the distribution.") // ------------------------------------------------------------------------------------------------------------------- - // Repositories + // All repositories *must* go here! See ModuleConigurations below. + // ------------------------------------------------------------------------------------------------------------------- + + object Repositories { + lazy val AkkaRepo = MavenRepository("Akka Repository", "http://scalablesolutions.se/akka/repository") + lazy val CodehausSnapshotRepo = MavenRepository("Codehaus Snapshots", "http://snapshots.repository.codehaus.org") + lazy val EmbeddedRepo = MavenRepository("Embedded Repo", (info.projectPath / "embedded-repo").asURL.toString) + lazy val FusesourceSnapshotRepo = MavenRepository("Fusesource Snapshots", "http://repo.fusesource.com/nexus/content/repositories/snapshots") + lazy val GuiceyFruitRepo = MavenRepository("GuiceyFruit Repo", "http://guiceyfruit.googlecode.com/svn/repo/releases/") + lazy val JBossRepo = MavenRepository("JBoss Repo", "https://repository.jboss.org/nexus/content/groups/public/") + lazy val JavaNetRepo = MavenRepository("java.net Repo", "http://download.java.net/maven/2") + lazy val SonatypeSnapshotRepo = MavenRepository("Sonatype OSS Repo", "http://oss.sonatype.org/content/repositories/releases") + lazy val SunJDMKRepo = MavenRepository("Sun JDMK Repo", "http://wp5.e-taxonomy.eu/cdmlib/mavenrepo") + } + + // ------------------------------------------------------------------------------------------------------------------- + // ModuleConfigurations // Every dependency that cannot be resolved from the built-in repositories (Maven Central and Scala Tools Releases) // must be resolved from a ModuleConfiguration. This will result in a significant acceleration of the update action. // Therefore, if repositories are defined, this must happen as def, not as val. // ------------------------------------------------------------------------------------------------------------------- - val embeddedRepo = "Embedded Repo" at (info.projectPath / "embedded-repo").asURL.toString - val scalaTestModuleConfig = ModuleConfiguration("org.scalatest", ScalaToolsSnapshots) - def guiceyFruitRepo = "GuiceyFruit Repo" at "http://guiceyfruit.googlecode.com/svn/repo/releases/" - val guiceyFruitModuleConfig = ModuleConfiguration("org.guiceyfruit", guiceyFruitRepo) - def jbossRepo = "JBoss Repo" at "https://repository.jboss.org/nexus/content/groups/public/" - val jbossModuleConfig = ModuleConfiguration("org.jboss", jbossRepo) - val nettyModuleConfig = ModuleConfiguration("org.jboss.netty", jbossRepo) - val jgroupsModuleConfig = ModuleConfiguration("jgroups", jbossRepo) - def sunjdmkRepo = "Sun JDMK Repo" at "http://wp5.e-taxonomy.eu/cdmlib/mavenrepo" - val jmsModuleConfig = ModuleConfiguration("javax.jms", sunjdmkRepo) - val jdmkModuleConfig = ModuleConfiguration("com.sun.jdmk", sunjdmkRepo) - val jmxModuleConfig = ModuleConfiguration("com.sun.jmx", sunjdmkRepo) - def javaNetRepo = "java.net Repo" at "http://download.java.net/maven/2" - def sonatypeSnapshotRepo = "Sonatype OSS Repo" at "http://oss.sonatype.org/content/repositories/releases" - val jerseyModuleConfig = ModuleConfiguration("com.sun.jersey", javaNetRepo) - val jerseyContrModuleConfig = ModuleConfiguration("com.sun.jersey.contribs", javaNetRepo) - val grizzlyModuleConfig = ModuleConfiguration("com.sun.grizzly", javaNetRepo) - val atmosphereModuleConfig = ModuleConfiguration("org.atmosphere", sonatypeSnapshotRepo) - val liftModuleConfig = ModuleConfiguration("net.liftweb", ScalaToolsSnapshots) - def codehausSnapshotRepo = "Codehaus Snapshots" at "http://snapshots.repository.codehaus.org" - val multiverseModuleConfig = ModuleConfiguration("org.multiverse", codehausSnapshotRepo) - // ------------------------------------------------------------ - // project defintions - lazy val akka_core = project("akka-core", "akka-core", new AkkaCoreProject(_)) - lazy val akka_amqp = project("akka-amqp", "akka-amqp", new AkkaAMQPProject(_), akka_core) - lazy val akka_http = project("akka-http", "akka-http", new AkkaHttpProject(_), akka_core, akka_camel) - lazy val akka_camel = project("akka-camel", "akka-camel", new AkkaCamelProject(_), akka_core) + import Repositories._ + lazy val atmosphereModuleConfig = ModuleConfiguration("org.atmosphere", SonatypeSnapshotRepo) + lazy val grizzlyModuleConfig = ModuleConfiguration("com.sun.grizzly", JavaNetRepo) + lazy val guiceyFruitModuleConfig = ModuleConfiguration("org.guiceyfruit", GuiceyFruitRepo) + // lazy val hawtdispatchModuleConfig = ModuleConfiguration("org.fusesource.hawtdispatch", FusesourceSnapshotRepo) + lazy val jbossModuleConfig = ModuleConfiguration("org.jboss", JBossRepo) + lazy val jdmkModuleConfig = ModuleConfiguration("com.sun.jdmk", SunJDMKRepo) + lazy val jerseyContrModuleConfig = ModuleConfiguration("com.sun.jersey.contribs", JavaNetRepo) + lazy val jerseyModuleConfig = ModuleConfiguration("com.sun.jersey", JavaNetRepo) + lazy val jgroupsModuleConfig = ModuleConfiguration("jgroups", JBossRepo) + lazy val jmsModuleConfig = ModuleConfiguration("javax.jms", SunJDMKRepo) + lazy val jmxModuleConfig = ModuleConfiguration("com.sun.jmx", SunJDMKRepo) + lazy val liftModuleConfig = ModuleConfiguration("net.liftweb", ScalaToolsSnapshots) + lazy val multiverseModuleConfig = ModuleConfiguration("org.multiverse", CodehausSnapshotRepo) + lazy val nettyModuleConfig = ModuleConfiguration("org.jboss.netty", JBossRepo) + lazy val scalaTestModuleConfig = ModuleConfiguration("org.scalatest", ScalaToolsSnapshots) + lazy val embeddedRepo = EmbeddedRepo // This is the only exception, because the embedded repo is fast! + + // ------------------------------------------------------------------------------------------------------------------- + // Versions + // ------------------------------------------------------------------------------------------------------------------- + + lazy val ATMO_VERSION = "0.6" + lazy val CAMEL_VERSION = "2.4.0" + lazy val CASSANDRA_VERSION = "0.6.1" + lazy val DispatchVersion = "0.7.4" + lazy val HAWTDISPATCH_VERSION = "1.0" + lazy val JacksonVersion = "1.2.1" + lazy val JERSEY_VERSION = "1.2" + lazy val LIFT_VERSION = "2.0-scala280-SNAPSHOT" + lazy val MULTIVERSE_VERSION = "0.6-SNAPSHOT" + lazy val SCALATEST_VERSION = "1.2-for-scala-2.8.0.final-SNAPSHOT" + lazy val Slf4jVersion = "1.6.0" + lazy val SPRING_VERSION = "3.0.3.RELEASE" + lazy val WerkzVersion = "2.2.1" + + // ------------------------------------------------------------------------------------------------------------------- + // Dependencies + // ------------------------------------------------------------------------------------------------------------------- + + object Dependencies { + + // Compile + + lazy val annotation = "javax.annotation" % "jsr250-api" % "1.0" % "compile" + + lazy val aopalliance = "aopalliance" % "aopalliance" % "1.0" % "compile" + + lazy val atmo = "org.atmosphere" % "atmosphere-annotations" % ATMO_VERSION % "compile" + lazy val atmo_jbossweb = "org.atmosphere" % "atmosphere-compat-jbossweb" % ATMO_VERSION % "compile" + lazy val atmo_jersey = "org.atmosphere" % "atmosphere-jersey" % ATMO_VERSION % "compile" + lazy val atmo_runtime = "org.atmosphere" % "atmosphere-runtime" % ATMO_VERSION % "compile" + lazy val atmo_tomcat = "org.atmosphere" % "atmosphere-compat-tomcat" % ATMO_VERSION % "compile" + lazy val atmo_weblogic = "org.atmosphere" % "atmosphere-compat-weblogic" % ATMO_VERSION % "compile" + + lazy val atomikos_transactions = "com.atomikos" % "transactions" % "3.2.3" % "compile" + lazy val atomikos_transactions_api = "com.atomikos" % "transactions-api" % "3.2.3" % "compile" + lazy val atomikos_transactions_jta = "com.atomikos" % "transactions-jta" % "3.2.3" % "compile" + + lazy val camel_core = "org.apache.camel" % "camel-core" % CAMEL_VERSION % "compile" + + lazy val cassandra = "org.apache.cassandra" % "cassandra" % CASSANDRA_VERSION % "compile" + + lazy val commons_codec = "commons-codec" % "commons-codec" % "1.4" % "compile" + + lazy val commons_io = "commons-io" % "commons-io" % "1.4" % "compile" + + lazy val commons_logging = "commons-logging" % "commons-logging" % "1.1.1" % "compile" + + lazy val commons_pool = "commons-pool" % "commons-pool" % "1.5.4" % "compile" + + lazy val configgy = "net.lag" % "configgy" % "2.8.0-1.5.5" % "compile" + + lazy val dispatch_http = "net.databinder" % "dispatch-http_2.8.0" % DispatchVersion % "compile" + lazy val dispatch_json = "net.databinder" % "dispatch-json_2.8.0" % DispatchVersion % "compile" + + lazy val grizzly = "com.sun.grizzly" % "grizzly-comet-webserver" % "1.9.18-i" % "compile" + + lazy val guicey = "org.guiceyfruit" % "guice-all" % "2.0" % "compile" + + lazy val h2_lzf = "voldemort.store.compress" % "h2-lzf" % "1.0" % "compile" + + lazy val hawtdispatch = "org.fusesource.hawtdispatch" % "hawtdispatch-scala" % HAWTDISPATCH_VERSION % "compile" + + lazy val jackson = "org.codehaus.jackson" % "jackson-mapper-asl" % JacksonVersion % "compile" + lazy val jackson_core = "org.codehaus.jackson" % "jackson-core-asl" % JacksonVersion % "compile" + lazy val jackson_core_asl = "org.codehaus.jackson" % "jackson-core-asl" % JacksonVersion % "compile" + + lazy val jersey = "com.sun.jersey" % "jersey-core" % JERSEY_VERSION % "compile" + lazy val jersey_json = "com.sun.jersey" % "jersey-json" % JERSEY_VERSION % "compile" + lazy val jersey_server = "com.sun.jersey" % "jersey-server" % JERSEY_VERSION % "compile" + lazy val jersey_contrib = "com.sun.jersey.contribs" % "jersey-scala" % JERSEY_VERSION % "compile" + + lazy val jgroups = "jgroups" % "jgroups" % "2.9.0.GA" % "compile" + + lazy val jsr166x = "jsr166x" % "jsr166x" % "1.0" % "compile" + + lazy val jsr250 = "javax.annotation" % "jsr250-api" % "1.0" % "compile" + + lazy val jsr311 = "javax.ws.rs" % "jsr311-api" % "1.1" % "compile" + + lazy val jta_1_1 = "org.apache.geronimo.specs" % "geronimo-jta_1.1_spec" % "1.1.1" % "compile" intransitive + + lazy val lift_util = "net.liftweb" % "lift-util" % LIFT_VERSION % "compile" + lazy val lift_webkit = "net.liftweb" % "lift-webkit" % LIFT_VERSION % "compile" + + lazy val log4j = "log4j" % "log4j" % "1.2.15" % "compile" + + lazy val mongo = "org.mongodb" % "mongo-java-driver" % "2.0" % "compile" + + lazy val multiverse = "org.multiverse" % "multiverse-alpha" % MULTIVERSE_VERSION % "compile" intransitive + + lazy val netty = "org.jboss.netty" % "netty" % "3.2.1.Final" % "compile" + + lazy val protobuf = "com.google.protobuf" % "protobuf-java" % "2.3.0" % "compile" + + lazy val osgi_core = "org.osgi" % "org.osgi.core" % "4.2.0" + + lazy val rabbit = "com.rabbitmq" % "amqp-client" % "1.8.1" % "compile" + + lazy val redis = "com.redis" % "redisclient" % "2.8.0-1.4" % "compile" + + lazy val sbinary = "sbinary" % "sbinary" % "2.8.0-0.3.1" % "compile" + + lazy val servlet = "javax.servlet" % "servlet-api" % "2.5" % "compile" + + lazy val sjson = "sjson.json" % "sjson" % "0.7-2.8.0" % "compile" + + lazy val slf4j = "org.slf4j" % "slf4j-api" % Slf4jVersion % "compile" + lazy val slf4j_log4j = "org.slf4j" % "slf4j-log4j12" % Slf4jVersion % "compile" + + lazy val spring_beans = "org.springframework" % "spring-beans" % SPRING_VERSION % "compile" + lazy val spring_context = "org.springframework" % "spring-context" % SPRING_VERSION % "compile" + + lazy val stax_api = "javax.xml.stream" % "stax-api" % "1.0-2" % "compile" + + lazy val thrift = "com.facebook" % "thrift" % "r917130" % "compile" + + lazy val werkz = "org.codehaus.aspectwerkz" % "aspectwerkz-nodeps-jdk5" % WerkzVersion % "compile" + lazy val werkz_core = "org.codehaus.aspectwerkz" % "aspectwerkz-jdk5" % WerkzVersion % "compile" + + // Test + + lazy val camel_spring = "org.apache.camel" % "camel-spring" % CAMEL_VERSION % "test" + lazy val cassandra_clhm = "org.apache.cassandra" % "clhm-production" % CASSANDRA_VERSION % "test" + lazy val commons_coll = "commons-collections" % "commons-collections" % "3.2.1" % "test" + lazy val google_coll = "com.google.collections" % "google-collections" % "1.0" % "test" + lazy val high_scale = "org.apache.cassandra" % "high-scale-lib" % CASSANDRA_VERSION % "test" + lazy val jettyServer = "org.mortbay.jetty" % "jetty" % "6.1.22" % "test" + lazy val junit = "junit" % "junit" % "4.5" % "test" + lazy val mockito = "org.mockito" % "mockito-all" % "1.8.1" % "test" + lazy val scalatest = "org.scalatest" % "scalatest" % SCALATEST_VERSION % "test" + } + + // ------------------------------------------------------------------------------------------------------------------- + // Subprojects + // ------------------------------------------------------------------------------------------------------------------- + + lazy val akka_core = project("akka-core", "akka-core", new AkkaCoreProject(_)) + lazy val akka_amqp = project("akka-amqp", "akka-amqp", new AkkaAMQPProject(_), akka_core) + lazy val akka_http = project("akka-http", "akka-http", new AkkaHttpProject(_), akka_core, akka_camel) + lazy val akka_camel = project("akka-camel", "akka-camel", new AkkaCamelProject(_), akka_core) lazy val akka_persistence = project("akka-persistence", "akka-persistence", new AkkaPersistenceParentProject(_)) - lazy val akka_spring = project("akka-spring", "akka-spring", new AkkaSpringProject(_), akka_core, akka_camel) - lazy val akka_jta = project("akka-jta", "akka-jta", new AkkaJTAProject(_), akka_core) - lazy val akka_kernel = project("akka-kernel", "akka-kernel", new AkkaKernelProject(_), - akka_core, akka_http, akka_spring, akka_camel, akka_persistence, akka_amqp) + lazy val akka_spring = project("akka-spring", "akka-spring", new AkkaSpringProject(_), akka_core, akka_camel) + lazy val akka_jta = project("akka-jta", "akka-jta", new AkkaJTAProject(_), akka_core) + lazy val akka_kernel = project("akka-kernel", "akka-kernel", new AkkaKernelProject(_), + akka_core, akka_http, akka_spring, akka_camel, akka_persistence, akka_amqp) + lazy val akka_osgi = project("akka-osgi", "akka-osgi", new AkkaOSGiParentProject(_)) + lazy val akka_samples = project("akka-samples", "akka-samples", new AkkaSamplesParentProject(_)) - // active object tests in java - lazy val akka_active_object_test = project("akka-active-object-test", "akka-active-object-test", new AkkaActiveObjectTestProject(_), akka_kernel) + // ------------------------------------------------------------------------------------------------------------------- + // Miscellaneous + // ------------------------------------------------------------------------------------------------------------------- - // examples - lazy val akka_samples = project("akka-samples", "akka-samples", new AkkaSamplesParentProject(_)) - - // ------------------------------------------------------------ - // Run Akka microkernel using 'sbt run' + use for packaging executable JAR override def mainClass = Some("se.scalablesolutions.akka.kernel.Main") override def packageOptions = @@ -171,105 +314,95 @@ class AkkaParent(info: ProjectInfo) extends DefaultProject(info) { " -Dpackaging=jar -DgeneratePom=true" command ! log } - None + None } dependsOn(dist) describedAs("Run mvn install for artifacts in dist.") - // ------------------------------------------------------------ - // subprojects + // ------------------------------------------------------------------------------------------------------------------- + // akka-core subproject + // ------------------------------------------------------------------------------------------------------------------- + class AkkaCoreProject(info: ProjectInfo) extends AkkaDefaultProject(info, distPath) with CodeFellowPlugin { - val netty = "org.jboss.netty" % "netty" % "3.2.1.Final" % "compile" - val commons_codec = "commons-codec" % "commons-codec" % "1.4" % "compile" - val commons_io = "commons-io" % "commons-io" % "1.4" % "compile" - val dispatch_json = "net.databinder" % "dispatch-json_2.8.0.RC3" % "0.7.4" % "compile" - val dispatch_http = "net.databinder" % "dispatch-http_2.8.0.RC3" % "0.7.4" % "compile" - val sjson = "sjson.json" % "sjson" % "0.6-SNAPSHOT-2.8.RC3" % "compile" - val sbinary = "sbinary" % "sbinary" % "2.8.0.RC3-0.3.1-SNAPSHOT" % "compile" - val jackson = "org.codehaus.jackson" % "jackson-mapper-asl" % "1.2.1" % "compile" - val jackson_core = "org.codehaus.jackson" % "jackson-core-asl" % "1.2.1" % "compile" - val h2_lzf = "voldemort.store.compress" % "h2-lzf" % "1.0" % "compile" - val jsr166x = "jsr166x" % "jsr166x" % "1.0" % "compile" - val jta_1_1 = "org.apache.geronimo.specs" % "geronimo-jta_1.1_spec" % "1.1.1" % "compile" intransitive() - val werkz = "org.codehaus.aspectwerkz" % "aspectwerkz-nodeps-jdk5" % "2.2.1" % "compile" - val werkz_core = "org.codehaus.aspectwerkz" % "aspectwerkz-jdk5" % "2.2.1" % "compile" - val configgy = "net.lag" % "configgy" % "2.8.0.RC3-1.5.2-SNAPSHOT" % "compile" - val guicey = "org.guiceyfruit" % "guice-all" % "2.0" % "compile" - val aopalliance = "aopalliance" % "aopalliance" % "1.0" % "compile" - val protobuf = "com.google.protobuf" % "protobuf-java" % "2.3.0" % "compile" - val multiverse = "org.multiverse" % "multiverse-alpha" % MULTIVERSE_VERSION % "compile" intransitive() - val jgroups = "jgroups" % "jgroups" % "2.9.0.GA" % "compile" + val aopalliance = Dependencies.aopalliance + val commons_codec = Dependencies.commons_codec + val commons_io = Dependencies.commons_io + val configgy = Dependencies.configgy + val dispatch_http = Dependencies.dispatch_http + val dispatch_json = Dependencies.dispatch_json + val guicey = Dependencies.guicey + val h2_lzf = Dependencies.h2_lzf + val hawtdispatch = Dependencies.hawtdispatch + val jackson = Dependencies.jackson + val jackson_core = Dependencies.jackson_core + val jgroups = Dependencies.jgroups + val jsr166x = Dependencies.jsr166x + val jta_1_1 = Dependencies.jta_1_1 + val multiverse = Dependencies.multiverse + val netty = Dependencies.netty + val protobuf = Dependencies.protobuf + val sbinary = Dependencies.sbinary + val sjson = Dependencies.sjson + val werkz = Dependencies.werkz + val werkz_core = Dependencies.werkz_core // testing - val scalatest = "org.scalatest" % "scalatest" % SCALATEST_VERSION % "test" - val junit = "junit" % "junit" % "4.5" % "test" + val junit = Dependencies.junit + val scalatest = Dependencies.scalatest } + // ------------------------------------------------------------------------------------------------------------------- + // akka-amqp subproject + // ------------------------------------------------------------------------------------------------------------------- + class AkkaAMQPProject(info: ProjectInfo) extends AkkaDefaultProject(info, distPath) with CodeFellowPlugin { - val commons_io = "commons-io" % "commons-io" % "1.4" % "compile" - val rabbit = "com.rabbitmq" % "amqp-client" % "1.8.1" % "compile" + val commons_io = Dependencies.commons_io + val rabbit = Dependencies.rabbit // testing - val multiverse = "org.multiverse" % "multiverse-alpha" % MULTIVERSE_VERSION % "test" intransitive() - val scalatest = "org.scalatest" % "scalatest" % SCALATEST_VERSION % "test" - val junit = "junit" % "junit" % "4.5" % "test" + val junit = Dependencies.junit + val multiverse = Dependencies.multiverse + val scalatest = Dependencies.scalatest } + // ------------------------------------------------------------------------------------------------------------------- + // akka-http subproject + // ------------------------------------------------------------------------------------------------------------------- + class AkkaHttpProject(info: ProjectInfo) extends AkkaDefaultProject(info, distPath) with CodeFellowPlugin { - val jackson_core_asl = "org.codehaus.jackson" % "jackson-core-asl" % "1.2.1" % "compile" - val stax_api = "javax.xml.stream" % "stax-api" % "1.0-2" % "compile" - val servlet = "javax.servlet" % "servlet-api" % "2.5" % "compile" - val jersey = "com.sun.jersey" % "jersey-core" % JERSEY_VERSION % "compile" - val jersey_server = "com.sun.jersey" % "jersey-server" % JERSEY_VERSION % "compile" - val jersey_json = "com.sun.jersey" % "jersey-json" % JERSEY_VERSION % "compile" - val jersey_contrib = "com.sun.jersey.contribs" % "jersey-scala" % JERSEY_VERSION % "compile" - val jsr311 = "javax.ws.rs" % "jsr311-api" % "1.1" % "compile" - val grizzly = "com.sun.grizzly" % "grizzly-comet-webserver" % "1.9.18-i" % "compile" - val atmo = "org.atmosphere" % "atmosphere-annotations" % ATMO_VERSION % "compile" - val atmo_jersey = "org.atmosphere" % "atmosphere-jersey" % ATMO_VERSION % "compile" - val atmo_runtime = "org.atmosphere" % "atmosphere-runtime" % ATMO_VERSION % "compile" - val atmo_tomcat = "org.atmosphere" % "atmosphere-compat-tomcat" % ATMO_VERSION % "compile" - val atmo_weblogic = "org.atmosphere" % "atmosphere-compat-weblogic" % ATMO_VERSION % "compile" - val atmo_jbossweb = "org.atmosphere" % "atmosphere-compat-jbossweb" % ATMO_VERSION % "compile" - val commons_logging = "commons-logging" % "commons-logging" % "1.1.1" % "compile" - val annotation = "javax.annotation" % "jsr250-api" % "1.0" % "compile" + val annotation = Dependencies.annotation + val atmo = Dependencies.atmo + val atmo_jbossweb = Dependencies.atmo_jbossweb + val atmo_jersey = Dependencies.atmo_jersey + val atmo_runtime = Dependencies.atmo_runtime + val atmo_tomcat = Dependencies.atmo_tomcat + val atmo_weblogic = Dependencies.atmo_weblogic + val commons_logging = Dependencies.commons_logging + val grizzly = Dependencies.grizzly + val jackson_core_asl = Dependencies.jackson_core_asl + val jersey = Dependencies.jersey + val jersey_contrib = Dependencies.jersey_contrib + val jersey_json = Dependencies.jersey_json + val jersey_server = Dependencies.jersey_server + val jsr311 = Dependencies.jsr311 + val servlet = Dependencies.servlet + val stax_api = Dependencies.stax_api // testing - val scalatest = "org.scalatest" % "scalatest" % SCALATEST_VERSION % "test" - val junit = "junit" % "junit" % "4.5" % "test" - val mockito = "org.mockito" % "mockito-all" % "1.8.1" % "test" + val junit = Dependencies.junit + val mockito = Dependencies.mockito + val scalatest = Dependencies.scalatest } + // ------------------------------------------------------------------------------------------------------------------- + // akka-camel subproject + // ------------------------------------------------------------------------------------------------------------------- + class AkkaCamelProject(info: ProjectInfo) extends AkkaDefaultProject(info, distPath) with CodeFellowPlugin { - val camel_core = "org.apache.camel" % "camel-core" % "2.3.0" % "compile" + val camel_core = Dependencies.camel_core } - class AkkaPersistenceCommonProject(info: ProjectInfo) extends AkkaDefaultProject(info, distPath) { - val thrift = "com.facebook" % "thrift" % "r917130" % "compile" - val commons_pool = "commons-pool" % "commons-pool" % "1.5.4" % "compile" - } - - class AkkaRedisProject(info: ProjectInfo) extends AkkaDefaultProject(info, distPath) { - val redis = "com.redis" % "redisclient" % "2.8.0.RC3-1.4" % "compile" - val commons_codec = "commons-codec" % "commons-codec" % "1.4" % "compile" - override def testOptions = TestFilter((name: String) => name.endsWith("Test")) :: Nil - } - - class AkkaMongoProject(info: ProjectInfo) extends AkkaDefaultProject(info, distPath) { - val mongo = "org.mongodb" % "mongo-java-driver" % "1.4" % "compile" - override def testOptions = TestFilter((name: String) => name.endsWith("Test")) :: Nil - } - - class AkkaCassandraProject(info: ProjectInfo) extends AkkaDefaultProject(info, distPath) { - val cassandra = "org.apache.cassandra" % "cassandra" % CASSANDRA_VERSION % "compile" - val slf4j = "org.slf4j" % "slf4j-api" % "1.6.0" % "compile" - val slf4j_log4j = "org.slf4j" % "slf4j-log4j12" % "1.6.0" % "compile" - val log4j = "log4j" % "log4j" % "1.2.15" % "compile" - // testing - val high_scale = "org.apache.cassandra" % "high-scale-lib" % CASSANDRA_VERSION % "test" - val cassandra_clhm = "org.apache.cassandra" % "clhm-production" % CASSANDRA_VERSION % "test" - val commons_coll = "commons-collections" % "commons-collections" % "3.2.1" % "test" - val google_coll = "com.google.collections" % "google-collections" % "1.0" % "test" - override def testOptions = TestFilter((name: String) => name.endsWith("Test")) :: Nil - } + // ------------------------------------------------------------------------------------------------------------------- + // akka-persistence subproject + // ------------------------------------------------------------------------------------------------------------------- class AkkaPersistenceParentProject(info: ProjectInfo) extends ParentProject(info) { lazy val akka_persistence_common = project("akka-persistence-common", "akka-persistence-common", @@ -282,52 +415,212 @@ class AkkaParent(info: ProjectInfo) extends DefaultProject(info) { new AkkaCassandraProject(_), akka_persistence_common) } - class AkkaKernelProject(info: ProjectInfo) extends AkkaDefaultProject(info, distPath) + // ------------------------------------------------------------------------------------------------------------------- + // akka-persistence-common subproject + // ------------------------------------------------------------------------------------------------------------------- + + class AkkaPersistenceCommonProject(info: ProjectInfo) extends AkkaDefaultProject(info, distPath) { + val commons_pool = Dependencies.commons_pool + val thrift = Dependencies.thrift + } - class AkkaSpringProject(info: ProjectInfo) extends AkkaDefaultProject(info, distPath) with CodeFellowPlugin { - val spring_beans = "org.springframework" % "spring-beans" % "3.0.1.RELEASE" % "compile" - val spring_context = "org.springframework" % "spring-context" % "3.0.1.RELEASE" % "compile" + // ------------------------------------------------------------------------------------------------------------------- + // akka-persistence-redis subproject + // ------------------------------------------------------------------------------------------------------------------- + + class AkkaRedisProject(info: ProjectInfo) extends AkkaDefaultProject(info, distPath) { + val commons_codec = Dependencies.commons_codec + val redis = Dependencies.redis + + override def testOptions = TestFilter((name: String) => name.endsWith("Test")) :: Nil + } + + // ------------------------------------------------------------------------------------------------------------------- + // akka-persistence-mongo subproject + // ------------------------------------------------------------------------------------------------------------------- + + class AkkaMongoProject(info: ProjectInfo) extends AkkaDefaultProject(info, distPath) { + val mongo = Dependencies.mongo + + override def testOptions = TestFilter((name: String) => name.endsWith("Test")) :: Nil + } + + // ------------------------------------------------------------------------------------------------------------------- + // akka-persistence-cassandra subproject + // ------------------------------------------------------------------------------------------------------------------- + + class AkkaCassandraProject(info: ProjectInfo) extends AkkaDefaultProject(info, distPath) { + val cassandra = Dependencies.cassandra + val log4j = Dependencies.log4j + val slf4j = Dependencies.slf4j + val slf4j_log4j = Dependencies.slf4j_log4j // testing - val camel_spring = "org.apache.camel" % "camel-spring" % "2.3.0" % "test" - // enforce version 3.0.1.RELEASE otherwise version 2.5.6 is pulled via camel-spring - val spring_tx = "org.springframework" % "spring-tx" % "3.0.1.RELEASE" % "test" - val scalatest = "org.scalatest" % "scalatest" % SCALATEST_VERSION % "test" - val junit = "junit" % "junit" % "4.5" % "test" + val cassandra_clhm = Dependencies.cassandra_clhm + val commons_coll = Dependencies.commons_coll + val google_coll = Dependencies.google_coll + val high_scale = Dependencies.high_scale + + override def testOptions = TestFilter((name: String) => name.endsWith("Test")) :: Nil } + // ------------------------------------------------------------------------------------------------------------------- + // akka-kernel subproject + // ------------------------------------------------------------------------------------------------------------------- + + class AkkaKernelProject(info: ProjectInfo) extends AkkaDefaultProject(info, distPath) + + // ------------------------------------------------------------------------------------------------------------------- + // akka-spring subproject + // ------------------------------------------------------------------------------------------------------------------- + + class AkkaSpringProject(info: ProjectInfo) extends AkkaDefaultProject(info, distPath) with CodeFellowPlugin { + val spring_beans = Dependencies.spring_beans + val spring_context = Dependencies.spring_context + + // testing + val camel_spring = Dependencies.camel_spring + val junit = Dependencies.junit + val scalatest = Dependencies.scalatest + } + + // ------------------------------------------------------------------------------------------------------------------- + // akka-jta subproject + // ------------------------------------------------------------------------------------------------------------------- + class AkkaJTAProject(info: ProjectInfo) extends AkkaDefaultProject(info, distPath) with CodeFellowPlugin { - val atomikos_transactions = "com.atomikos" % "transactions" % "3.2.3" % "compile" - val atomikos_transactions_jta = "com.atomikos" % "transactions-jta" % "3.2.3" % "compile" - val atomikos_transactions_api = "com.atomikos" % "transactions-api" % "3.2.3" % "compile" + val atomikos_transactions = Dependencies.atomikos_transactions + val atomikos_transactions_api = Dependencies.atomikos_transactions_api + val atomikos_transactions_jta = Dependencies.atomikos_transactions_jta + val jta_1_1 = Dependencies.jta_1_1 //val atomikos_transactions_util = "com.atomikos" % "transactions-util" % "3.2.3" % "compile" - val jta_spec = "org.apache.geronimo.specs" % "geronimo-jta_1.1_spec" % "1.1.1" % "compile" intransitive() } - // ================= TEST ================== - class AkkaActiveObjectTestProject(info: ProjectInfo) extends AkkaDefaultProject(info, distPath) with CodeFellowPlugin { + // ------------------------------------------------------------------------------------------------------------------- + // OSGi stuff + // ------------------------------------------------------------------------------------------------------------------- + + class AkkaOSGiParentProject(info: ProjectInfo) extends ParentProject(info) { + lazy val akka_osgi_dependencies_bundle = project("akka-osgi-dependencies-bundle", "akka-osgi-dependencies-bundle", + new AkkaOSGiDependenciesBundleProject(_), akka_kernel, akka_jta) // akka_kernel does not depend on akka_jta (why?) therefore we list akka_jta here + lazy val akka_osgi_assembly = project("akka-osgi-assembly", "akka-osgi-assembly", + new AkkaOSGiAssemblyProject(_), akka_osgi_dependencies_bundle, akka_core, akka_amqp, akka_http, + akka_camel, akka_spring, akka_jta, akka_persistence.akka_persistence_common, + akka_persistence.akka_persistence_redis, akka_persistence.akka_persistence_mongo, + akka_persistence.akka_persistence_cassandra) + } + + class AkkaOSGiDependenciesBundleProject(info: ProjectInfo) extends AkkaDefaultProject(info, distPath) with BNDPlugin { + override def bndClasspath = compileClasspath + override def bndPrivatePackage = Seq("") + override def bndImportPackage = Seq("*;resolution:=optional") + override def bndExportPackage = Seq( + "org.aopalliance.*;version=1.0.0", + + // Provided by other bundles + "!se.scalablesolutions.akka.*", + "!net.liftweb.*", + "!com.google.inject.*", + "!javax.transaction.*", + "!javax.ws.rs.*", + "!javax.jms.*", + "!javax.transaction,*", + "!org.apache.commons.io.*", + "!org.apache.commons.pool.*", + "!org.codehaus.jackson.*", + "!org.jboss.netty.*", + "!org.springframework.*", + "!org.apache.camel.*", + "!org.fusesource.commons.management.*", + + "*;version=0.0.0") + } + + class AkkaOSGiAssemblyProject(info: ProjectInfo) extends DefaultProject(info) { + + // Scala bundle + val scala_bundle = "com.weiglewilczek.scala-lang-osgi" % "scala-library" % buildScalaVersion % "compile" intransitive + + // Lift bundles +// val lift_util = Dependencies.lift_util.intransitive +// val lift_actor = "net.liftweb" % "lift-actor" % LIFT_VERSION % "compile" intransitive +// val lift_common = "net.liftweb" % "lift-common" % LIFT_VERSION % "compile" intransitive +// val lift_json = "net.liftweb" % "lift-json" % LIFT_VERSION % "compile" intransitive + + // Camel bundles + val camel_core = Dependencies.camel_core.intransitive + val fusesource_commonman = "org.fusesource.commonman" % "commons-management" % "1.0" intransitive + + // Spring bundles + val spring_beans = Dependencies.spring_beans.intransitive + val spring_context = Dependencies.spring_context.intransitive + val spring_aop = "org.springframework" % "spring-aop" % SPRING_VERSION % "compile" intransitive + val spring_asm = "org.springframework" % "spring-asm" % SPRING_VERSION % "compile" intransitive + val spring_core = "org.springframework" % "spring-core" % SPRING_VERSION % "compile" intransitive + val spring_expression = "org.springframework" % "spring-expression" % SPRING_VERSION % "compile" intransitive + val spring_jms = "org.springframework" % "spring-jms" % SPRING_VERSION % "compile" intransitive + val spring_tx = "org.springframework" % "spring-tx" % SPRING_VERSION % "compile" intransitive + + val commons_codec = Dependencies.commons_codec.intransitive + val commons_io = Dependencies.commons_io.intransitive + val commons_pool = Dependencies.commons_pool.intransitive + val guicey = Dependencies.guicey.intransitive + val jackson = Dependencies.jackson.intransitive + val jackson_core = Dependencies.jackson_core.intransitive + val jsr311 = Dependencies.jsr311.intransitive + val jta_1_1 = Dependencies.jta_1_1.intransitive + val netty = Dependencies.netty.intransitive + val commons_fileupload = "commons-fileupload" % "commons-fileupload" % "1.2.1" % "compile" intransitive + val jms_1_1 = "org.apache.geronimo.specs" % "geronimo-jms_1.1_spec" % "1.1.1" % "compile" intransitive + val joda = "joda-time" % "joda-time" % "1.6" intransitive + + override def packageAction = + task { + val libs: Seq[Path] = managedClasspath(config("compile")).get.toSeq + val prjs: Seq[Path] = info.dependencies.toSeq.asInstanceOf[Seq[DefaultProject]] map { _.jarPath } + val all = libs ++ prjs + val destination = outputPath / "bundles" + FileUtilities.copyFlat(all, destination, log) + log info "Copied %s bundles to %s".format(all.size, destination) + None + } + + override def artifacts = Set.empty + } + + // ------------------------------------------------------------------------------------------------------------------- + // Test + // ------------------------------------------------------------------------------------------------------------------- + + class AkkaTypedActorTestProject(info: ProjectInfo) extends DefaultProject(info) { // testing val junit = "junit" % "junit" % "4.5" % "test" val jmock = "org.jmock" % "jmock" % "2.4.0" % "test" } - // ================= EXAMPLES ================== + // ------------------------------------------------------------------------------------------------------------------- + // Examples + // ------------------------------------------------------------------------------------------------------------------- + class AkkaSampleAntsProject(info: ProjectInfo) extends DefaultSpdeProject(info) with CodeFellowPlugin { - val scalaToolsSnapshots = ScalaToolsSnapshots +// val scalaToolsSnapshots = ScalaToolsSnapshots override def spdeSourcePath = mainSourcePath / "spde" } class AkkaSampleChatProject(info: ProjectInfo) extends AkkaDefaultProject(info, deployPath) with CodeFellowPlugin class AkkaSamplePubSubProject(info: ProjectInfo) extends AkkaDefaultProject(info, deployPath) with CodeFellowPlugin - class AkkaSampleLiftProject(info: ProjectInfo) extends AkkaDefaultProject(info, deployPath) with CodeFellowPlugin { - val commons_logging = "commons-logging" % "commons-logging" % "1.1.1" % "compile" - val lift = "net.liftweb" % "lift-webkit" % LIFT_VERSION % "compile" - val lift_util = "net.liftweb" % "lift-util" % LIFT_VERSION % "compile" - val servlet = "javax.servlet" % "servlet-api" % "2.5" % "compile" + class AkkaSampleLiftProject(info: ProjectInfo) extends DefaultWebProject(info) with DeployProject with CodeFellowPlugin { + val commons_logging = Dependencies.commons_logging + val lift_util = Dependencies.lift_util + val lift_webkit = Dependencies.lift_webkit + val servlet = Dependencies.servlet + // testing - val jetty = "org.mortbay.jetty" % "jetty" % "6.1.22" % "test" - val junit = "junit" % "junit" % "4.5" % "test" + val jettyServer = Dependencies.jettyServer + val junit = Dependencies.junit + + def deployPath = AkkaParentProject.this.deployPath } class AkkaSampleRestJavaProject(info: ProjectInfo) extends AkkaDefaultProject(info, deployPath) with CodeFellowPlugin @@ -335,20 +628,36 @@ class AkkaParent(info: ProjectInfo) extends DefaultProject(info) { class AkkaSampleRemoteProject(info: ProjectInfo) extends AkkaDefaultProject(info, deployPath) with CodeFellowPlugin class AkkaSampleRestScalaProject(info: ProjectInfo) extends AkkaDefaultProject(info, deployPath) with CodeFellowPlugin { - val jsr311 = "javax.ws.rs" % "jsr311-api" % "1.1.1" % "compile" + val jsr311 = Dependencies.jsr311 } class AkkaSampleCamelProject(info: ProjectInfo) extends AkkaDefaultProject(info, deployPath) with CodeFellowPlugin { - val spring_jms = "org.springframework" % "spring-jms" % "3.0.1.RELEASE" % "compile" - val camel_jetty = "org.apache.camel" % "camel-jetty" % "2.3.0" % "compile" - val camel_jms = "org.apache.camel" % "camel-jms" % "2.3.0" % "compile" - val activemq_core = "org.apache.activemq" % "activemq-core" % "5.3.2" % "compile" + override def ivyXML = + + + + + + + + + + + + + } class AkkaSampleSecurityProject(info: ProjectInfo) extends AkkaDefaultProject(info, deployPath) with CodeFellowPlugin { - val jsr311 = "javax.ws.rs" % "jsr311-api" % "1.1.1" % "compile" - val jsr250 = "javax.annotation" % "jsr250-api" % "1.0" % "compile" - val commons_codec = "commons-codec" % "commons-codec" % "1.4" % "compile" + val commons_codec = Dependencies.commons_codec + val jsr250 = Dependencies.jsr250 + val jsr311 = Dependencies.jsr311 + } + + class AkkaSampleOSGiProject(info: ProjectInfo) extends AkkaDefaultProject(info, distPath) with BNDPlugin { + val osgi_core = Dependencies.osgi_core + override lazy val bndBundleActivator = Some("se.scalablesolutions.akka.sample.osgi.Activator") + override lazy val bndExportPackage = Nil // Necessary because of mixing-in AkkaDefaultProject which exports all ...akka.* packages! } class AkkaSamplesParentProject(info: ProjectInfo) extends ParentProject(info) { @@ -370,10 +679,14 @@ class AkkaParent(info: ProjectInfo) extends DefaultProject(info) { new AkkaSampleSecurityProject(_), akka_kernel) lazy val akka_sample_remote = project("akka-sample-remote", "akka-sample-remote", new AkkaSampleRemoteProject(_), akka_kernel) + lazy val akka_sample_osgi = project("akka-sample-osgi", "akka-sample-osgi", + new AkkaSampleOSGiProject(_), akka_core) } - // ------------------------------------------------------------ - // helper functions + // ------------------------------------------------------------------------------------------------------------------- + // Helpers + // ------------------------------------------------------------------------------------------------------------------- + def removeDupEntries(paths: PathFinder) = Path.lazyPathFinder { val mapped = paths.get map { p => (p.relativePath, p) } @@ -401,9 +714,9 @@ class AkkaParent(info: ProjectInfo) extends DefaultProject(info) { def akkaArtifacts = descendents(info.projectPath / "dist", "*" + buildScalaVersion + "-" + version + ".jar") // ------------------------------------------------------------ - class AkkaDefaultProject(info: ProjectInfo, val deployPath: Path) extends DefaultProject(info) with DeployProject + class AkkaDefaultProject(info: ProjectInfo, val deployPath: Path) extends DefaultProject(info) with DeployProject with OSGiProject - trait DeployProject extends DefaultProject { + trait DeployProject { self: Project => // defines where the deployTask copies jars to def deployPath: Path @@ -422,4 +735,8 @@ class AkkaParent(info: ProjectInfo) extends DefaultProject(info) { FileUtilities.copyFile(jar, toDir / jar.name, log) } else None } + + trait OSGiProject extends DefaultProject with BNDPlugin { + override def bndExportPackage = Seq("se.scalablesolutions.akka.*;version=%s".format(projectVersion.value)) + } } diff --git a/project/plugins/Plugins.scala b/project/plugins/Plugins.scala index 92b9943998..ce3319b885 100644 --- a/project/plugins/Plugins.scala +++ b/project/plugins/Plugins.scala @@ -1,10 +1,31 @@ import sbt._ class Plugins(info: ProjectInfo) extends PluginDefinition(info) { - val databinderRepo = "Databinder Repository" at "http://databinder.net/repo" - val embeddedRepo = "Embedded Repo" at (info.projectPath / "embedded-repo").asURL.toString - val spdeSbt = "us.technically.spde" % "spde-sbt-plugin" % "0.4.1" - val codeFellow = "de.tuxed" % "codefellow-plugin" % "0.3" // for code completion and more in VIM -// val repo = "GH-pages repo" at "http://mpeltonen.github.com/maven/" -// val idea = "com.github.mpeltonen" % "sbt-idea-plugin" % "0.1-SNAPSHOT" + + // ------------------------------------------------------------------------------------------------------------------- + // All repositories *must* go here! See ModuleConigurations below. + // ------------------------------------------------------------------------------------------------------------------- + object Repositories { + lazy val AquteRepo = "aQute Maven Repository" at "http://www.aqute.biz/repo" + lazy val DatabinderRepo = "Databinder Repository" at "http://databinder.net/repo" + lazy val EmbeddedRepo = "Embedded Repo" at (info.projectPath / "embedded-repo").asURL.toString + } + + // ------------------------------------------------------------------------------------------------------------------- + // ModuleConfigurations + // Every dependency that cannot be resolved from the built-in repositories (Maven Central and Scala Tools Releases) + // must be resolved from a ModuleConfiguration. This will result in a significant acceleration of the update action. + // Therefore, if repositories are defined, this must happen as def, not as val. + // ------------------------------------------------------------------------------------------------------------------- + import Repositories._ + lazy val aquteModuleConfig = ModuleConfiguration("biz.aQute", AquteRepo) + lazy val codeFellowModuleConfig = ModuleConfiguration("de.tuxed", EmbeddedRepo) + lazy val spdeModuleConfig = ModuleConfiguration("us.technically.spde", DatabinderRepo) + + // ------------------------------------------------------------------------------------------------------------------- + // Dependencies + // ------------------------------------------------------------------------------------------------------------------- + lazy val bnd4sbt = "com.weiglewilczek.bnd4sbt" % "bnd4sbt" % "1.0.0.RC4" + lazy val codeFellow = "de.tuxed" % "codefellow-plugin" % "0.3" // for code completion and more in VIM + lazy val spdeSbt = "us.technically.spde" % "spde-sbt-plugin" % "0.4.1" }