From b7b79484baa2d6b97bd5d8e34ee18ad5a566b875 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jonas=20Bon=C3=A9r?= Date: Mon, 23 Aug 2010 21:25:11 +0200 Subject: [PATCH 1/8] minor reformatting --- .../src/main/scala/dataflow/DataFlowVariable.scala | 11 ++++------- akka-core/src/test/scala/dataflow/DataFlowSpec.scala | 6 ++++-- 2 files changed, 8 insertions(+), 9 deletions(-) diff --git a/akka-core/src/main/scala/dataflow/DataFlowVariable.scala b/akka-core/src/main/scala/dataflow/DataFlowVariable.scala index fa4325e4c2..f35d0f898e 100644 --- a/akka-core/src/main/scala/dataflow/DataFlowVariable.scala +++ b/akka-core/src/main/scala/dataflow/DataFlowVariable.scala @@ -21,11 +21,7 @@ object DataFlow { object Start object Exit - import java.util.concurrent.atomic.AtomicReference - import java.util.concurrent.{ConcurrentLinkedQueue, LinkedBlockingQueue} import scala.collection.JavaConversions._ - import se.scalablesolutions.akka.actor.Actor - import se.scalablesolutions.akka.dispatch.CompletableFuture def thread(body: => Unit): Unit = spawn(body) @@ -81,9 +77,9 @@ object DataFlow { private[this] val in = actorOf(new In(this)).start - def <<(ref: DataFlowVariable[T]): Unit = if(this.value.get.isEmpty) in ! Set(ref()) + def <<(ref: DataFlowVariable[T]): Unit = if (this.value.get.isEmpty) in ! Set(ref()) - def <<(value: T): Unit = if(this.value.get.isEmpty) in ! Set(value) + def <<(value: T): Unit = if (this.value.get.isEmpty) in ! Set(value) def apply(): T = { value.get getOrElse { @@ -91,7 +87,8 @@ object DataFlow { blockedReaders offer out val result = (out !! Get).as[T] out ! Exit - result.getOrElse(throw new DataFlowVariableException("Timed out (after " + TIME_OUT + " milliseconds) while waiting for result")) + result.getOrElse(throw new DataFlowVariableException( + "Timed out (after " + TIME_OUT + " milliseconds) while waiting for result")) } } diff --git a/akka-core/src/test/scala/dataflow/DataFlowSpec.scala b/akka-core/src/test/scala/dataflow/DataFlowSpec.scala index 0e917c904e..ce54699a6f 100644 --- a/akka-core/src/test/scala/dataflow/DataFlowSpec.scala +++ b/akka-core/src/test/scala/dataflow/DataFlowSpec.scala @@ -11,10 +11,12 @@ import org.scalatest.BeforeAndAfterAll import org.scalatest.junit.JUnitRunner import org.junit.runner.RunWith -import se.scalablesolutions.akka.dispatch.DefaultCompletableFuture import java.util.concurrent.{TimeUnit, CountDownLatch} -import annotation.tailrec import java.util.concurrent.atomic.{AtomicLong, AtomicReference, AtomicInteger} + +import scala.annotation.tailrec + +import se.scalablesolutions.akka.dispatch.DefaultCompletableFuture import se.scalablesolutions.akka.actor.ActorRegistry @RunWith(classOf[JUnitRunner]) From c67b17a91234850fb65dbd42bffdf5a00e986c06 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jonas=20Bon=C3=A9r?= Date: Tue, 24 Aug 2010 23:21:28 +0200 Subject: [PATCH 2/8] splitted up akka-core into three modules; akka-actors, akka-typed-actors, akka-core --- akka-active-object-test/pom.xml | 113 - .../scalablesolutions/akka/api/AllTest.java | 20 - .../akka/api/InMemFailer.java | 7 - .../akka/api/InMemNestedStateTest.java | 134 - .../akka/api/InMemoryStateTest.java | 162 - .../akka/api/MiscActiveObjectTest.java | 35 - .../akka/api/RemoteInMemoryStateTest.java | 134 - .../akka/api/SimpleJavaPojo.java | 36 - .../api/TypedActorGuiceConfiguratorTest.java | 115 - .../src/main/resources/logback.xml | 0 .../src/main/scala/actor/Actor.scala | 5 +- .../src/main/scala/actor/ActorRef.scala | 117 +- .../src/main/scala/actor/ActorRegistry.scala | 0 .../src/main/scala/actor/Agent.scala | 0 .../src/main/scala/actor/FSM.scala | 0 .../src/main/scala/actor/Implicits.scala | 0 .../src/main/scala/actor/Scheduler.scala | 0 .../src/main/scala/actor/Supervisor.scala | 8 +- .../src/main/scala/actor/UntypedActor.scala | 0 .../src/main/scala/config/Config.scala | 20 +- .../src/main/scala/config/Configuration.scala | 0 .../src/main/scala/config/Configurator.scala | 0 .../main/scala/config/SupervisionConfig.scala | 0 .../scala/dataflow/DataFlowVariable.scala | 0 ...actReactorBasedEventDrivenDispatcher.scala | 0 .../src/main/scala/dispatch/Dispatchers.scala | 0 .../ExecutorBasedEventDrivenDispatcher.scala | 0 ...sedEventDrivenWorkStealingDispatcher.scala | 0 .../src/main/scala/dispatch/Future.scala | 0 .../main/scala/dispatch/HawtDispatcher.scala | 0 .../main/scala/dispatch/MessageHandling.scala | 0 ...sedSingleThreadEventDrivenDispatcher.scala | 0 ...BasedThreadPoolEventDrivenDispatcher.scala | 0 .../dispatch/ThreadBasedDispatcher.scala | 0 .../scala/dispatch/ThreadPoolBuilder.scala | 0 .../src/main/scala/routing/Iterators.scala | 0 .../src/main/scala/routing/Listeners.scala | 0 .../src/main/scala/routing/Routers.scala | 0 .../src/main/scala/routing/Routing.scala | 0 .../src/main/scala/stm/JTA.scala | 3 +- .../src/main/scala/stm/Ref.scala | 0 .../src/main/scala/stm/Transaction.scala | 0 .../main/scala/stm/TransactionFactory.scala | 0 .../scala/stm/TransactionFactoryBuilder.scala | 0 .../scala/stm/TransactionManagement.scala | 0 .../src/main/scala/stm/TransactionalMap.scala | 0 .../main/scala/stm/TransactionalVector.scala | 0 .../src/main/scala/stm/global/Atomic.scala | 0 .../src/main/scala/stm/global/GlobalStm.scala | 13 +- .../src/main/scala/stm/global/package.scala | 0 .../src/main/scala/stm/local/Atomic.scala | 0 .../src/main/scala/stm/local/LocalStm.scala | 10 +- .../src/main/scala/stm/local/package.scala | 0 .../src/main/scala/stm/transactional.scala | 0 .../src/main/scala/util/AkkaException.scala | 0 .../src/main/scala/util/Bootable.scala | 0 .../src/main/scala/util/Duration.scala | 0 .../src/main/scala/util/HashCode.scala | 0 .../src/main/scala/util/Helpers.scala | 0 .../main/scala/util/ListenerManagement.scala | 0 .../src/main/scala/util/LockUtil.scala | 0 .../src/main/scala/util/Logging.scala | 0 .../src/main/scala/util/Uuid.scala | 0 .../src/test/resources/logback-test.xml | 21 + akka-actors/src/test/scala/Messages.scala | 44 + .../ActorFireForgetRequestReplySpec.scala | 92 + .../test/scala/actor/actor/AgentSpec.scala | 111 + .../src/test/scala/actor/actor/Bench.scala | 119 + .../test/scala/actor/actor/FSMActorSpec.scala | 82 + .../scala/actor/actor/ForwardActorSpec.scala | 81 + .../actor/actor/ReceiveTimeoutSpec.scala | 77 + .../scala/actor/actor/TransactorSpec.scala | 255 + .../supervisor/RestartStrategySpec.scala | 74 + .../supervisor/SupervisorHierarchySpec.scala | 81 + .../actor/supervisor/SupervisorSpec.scala | 605 ++ .../test/scala/dataflow/DataFlowSpec.scala | 173 + .../test/scala/dispatch/DispatchersSpec.scala | 74 + ...rBasedEventDrivenDispatcherActorSpec.scala | 68 + ...BasedEventDrivenDispatcherActorsSpec.scala | 61 + ...ventDrivenWorkStealingDispatcherSpec.scala | 107 + .../src/test/scala/dispatch/FutureSpec.scala | 106 + .../dispatch/HawtDispatcherActorSpec.scala | 71 + .../dispatch/HawtDispatcherEchoServer.scala | 207 + ...ThreadEventDrivenDispatcherActorSpec.scala | 71 + ...adPoolEventDrivenDispatcherActorSpec.scala | 66 + .../scala/dispatch/ThreadBasedActorSpec.scala | 67 + .../dispatch/ThreadBasedDispatcherSpec.scala | 91 + .../test/scala/misc/ActorRegistrySpec.scala | 255 + .../src/test/scala/misc/SchedulerSpec.scala | 127 + .../src/test/scala/routing/RoutingSpec.scala | 179 + .../src/test/scala/stm/JavaStmSpec.scala | 0 .../src/test/scala/stm/RefSpec.scala | 0 .../src/test/scala/stm/StmSpec.scala | 0 .../src/test/scala/ticket/Ticket001Spec.scala | 13 + .../actor/BootableActorLoaderService.scala | 101 - .../scala/actor/SerializationProtocol.scala | 253 - .../main/scala/remote/MessageSerializer.scala | 2 +- .../src/main/scala/remote/RemoteServer.scala | 2 +- .../scala/serialization/Compression.scala | 4 +- akka-core/src/test/scala/TestClasses.bak | 102 - .../akka/config/DependencyBinding.java | 0 .../akka/config/TypedActorGuiceModule.java | 0 .../akka/remote/protocol/RemoteProtocol.java | 5190 +++++++++++++++++ .../src/main/scala/actor/TypedActor.scala | 10 +- .../scala/config/TypedActorConfigurator.scala | 0 .../config/TypedActorGuiceConfigurator.scala | 0 .../se/scalablesolutions/akka/actor}/Bar.java | 2 +- .../akka/actor}/BarImpl.java | 7 +- .../se/scalablesolutions/akka/actor}/Ext.java | 2 +- .../akka/actor}/ExtImpl.java | 2 +- .../se/scalablesolutions/akka/actor/Foo.java | 14 + .../scalablesolutions/akka/actor/FooImpl.java | 16 +- .../actor/NestedTransactionalTypedActor.java | 12 + .../NestedTransactionalTypedActorImpl.java | 39 +- .../akka/actor/ProtobufProtocol.java | 1060 ++++ .../akka/actor/RemoteTypedActorOne.java | 6 + .../akka/actor/RemoteTypedActorOneImpl.java | 29 + .../akka/actor/RemoteTypedActorTwo.java | 6 + .../akka/actor/RemoteTypedActorTwoImpl.java | 29 + .../akka/actor/SamplePojo.java | 8 + .../akka/actor/SamplePojoImpl.java | 45 + .../akka/actor/SimpleJavaPojo.java | 14 + .../akka/actor/SimpleJavaPojoCaller.java | 9 + .../akka/actor/SimpleJavaPojoCallerImpl.java | 26 + .../akka/actor/SimpleJavaPojoImpl.java | 53 + .../akka/actor/TransactionalTypedActor.java | 14 + .../actor/TransactionalTypedActorImpl.java | 34 +- .../akka/actor/TypedActorFailer.java | 5 + .../akka/actor/TypedActorFailerImpl.java | 9 + .../scalablesolutions/akka/stm/Address.java | 13 + .../akka/stm/CounterExample.java | 26 + .../akka/stm/JavaStmTests.java | 91 + .../akka/stm/RefExample.java | 36 + .../akka/stm/StmExamples.java | 18 + .../akka/stm/TransactionFactoryExample.java | 30 + .../akka/stm/TransactionalMapExample.java | 35 + .../akka/stm/TransactionalVectorExample.java | 34 + .../se/scalablesolutions/akka/stm/User.java | 13 + .../src/test}/resources/META-INF/aop.xml | 0 .../NestedTransactionalTypedActorSpec.scala | 102 + ...artNestedTransactionalTypedActorSpec.scala | 118 + .../RestartTransactionalTypedActorSpec.scala | 92 + .../TransactionalTypedActorSpec.scala | 83 + .../typed-actor/TypedActorContextSpec.scala | 38 + .../TypedActorGuiceConfiguratorSpec.scala | 131 + .../typed-actor/TypedActorLifecycleSpec.scala | 169 + .../actor/typed-actor/TypedActorSpec.scala | 31 + .../TypedActorUtilFunctionsSpec.scala | 23 + project/build/AkkaProject.scala | 96 +- 149 files changed, 11195 insertions(+), 1399 deletions(-) delete mode 100644 akka-active-object-test/pom.xml delete mode 100644 akka-active-object-test/src/test/java/se/scalablesolutions/akka/api/AllTest.java delete mode 100644 akka-active-object-test/src/test/java/se/scalablesolutions/akka/api/InMemFailer.java delete mode 100644 akka-active-object-test/src/test/java/se/scalablesolutions/akka/api/InMemNestedStateTest.java delete mode 100644 akka-active-object-test/src/test/java/se/scalablesolutions/akka/api/InMemoryStateTest.java delete mode 100644 akka-active-object-test/src/test/java/se/scalablesolutions/akka/api/MiscActiveObjectTest.java delete mode 100644 akka-active-object-test/src/test/java/se/scalablesolutions/akka/api/RemoteInMemoryStateTest.java delete mode 100644 akka-active-object-test/src/test/java/se/scalablesolutions/akka/api/SimpleJavaPojo.java delete mode 100644 akka-active-object-test/src/test/java/se/scalablesolutions/akka/api/TypedActorGuiceConfiguratorTest.java rename {akka-core => akka-actors}/src/main/resources/logback.xml (100%) rename {akka-core => akka-actors}/src/main/scala/actor/Actor.scala (99%) rename {akka-core => akka-actors}/src/main/scala/actor/ActorRef.scala (94%) rename {akka-core => akka-actors}/src/main/scala/actor/ActorRegistry.scala (100%) rename {akka-core => akka-actors}/src/main/scala/actor/Agent.scala (100%) rename akka-core/src/main/scala/actor/Fsm.scala => akka-actors/src/main/scala/actor/FSM.scala (100%) rename {akka-core => akka-actors}/src/main/scala/actor/Implicits.scala (100%) rename {akka-core => akka-actors}/src/main/scala/actor/Scheduler.scala (100%) rename {akka-core => akka-actors}/src/main/scala/actor/Supervisor.scala (96%) rename {akka-core => akka-actors}/src/main/scala/actor/UntypedActor.scala (100%) rename {akka-core => akka-actors}/src/main/scala/config/Config.scala (83%) rename {akka-core => akka-actors}/src/main/scala/config/Configuration.scala (100%) rename {akka-core => akka-actors}/src/main/scala/config/Configurator.scala (100%) rename {akka-core => akka-actors}/src/main/scala/config/SupervisionConfig.scala (100%) rename {akka-core => akka-actors}/src/main/scala/dataflow/DataFlowVariable.scala (100%) rename {akka-core => akka-actors}/src/main/scala/dispatch/AbstractReactorBasedEventDrivenDispatcher.scala (100%) rename {akka-core => akka-actors}/src/main/scala/dispatch/Dispatchers.scala (100%) rename {akka-core => akka-actors}/src/main/scala/dispatch/ExecutorBasedEventDrivenDispatcher.scala (100%) rename {akka-core => akka-actors}/src/main/scala/dispatch/ExecutorBasedEventDrivenWorkStealingDispatcher.scala (100%) rename {akka-core => akka-actors}/src/main/scala/dispatch/Future.scala (100%) rename {akka-core => akka-actors}/src/main/scala/dispatch/HawtDispatcher.scala (100%) rename {akka-core => akka-actors}/src/main/scala/dispatch/MessageHandling.scala (100%) rename {akka-core => akka-actors}/src/main/scala/dispatch/ReactorBasedSingleThreadEventDrivenDispatcher.scala (100%) rename {akka-core => akka-actors}/src/main/scala/dispatch/ReactorBasedThreadPoolEventDrivenDispatcher.scala (100%) rename {akka-core => akka-actors}/src/main/scala/dispatch/ThreadBasedDispatcher.scala (100%) rename {akka-core => akka-actors}/src/main/scala/dispatch/ThreadPoolBuilder.scala (100%) rename {akka-core => akka-actors}/src/main/scala/routing/Iterators.scala (100%) rename {akka-core => akka-actors}/src/main/scala/routing/Listeners.scala (100%) rename {akka-core => akka-actors}/src/main/scala/routing/Routers.scala (100%) rename {akka-core => akka-actors}/src/main/scala/routing/Routing.scala (100%) rename {akka-core => akka-actors}/src/main/scala/stm/JTA.scala (98%) rename {akka-core => akka-actors}/src/main/scala/stm/Ref.scala (100%) rename {akka-core => akka-actors}/src/main/scala/stm/Transaction.scala (100%) rename {akka-core => akka-actors}/src/main/scala/stm/TransactionFactory.scala (100%) rename {akka-core => akka-actors}/src/main/scala/stm/TransactionFactoryBuilder.scala (100%) rename {akka-core => akka-actors}/src/main/scala/stm/TransactionManagement.scala (100%) rename {akka-core => akka-actors}/src/main/scala/stm/TransactionalMap.scala (100%) rename {akka-core => akka-actors}/src/main/scala/stm/TransactionalVector.scala (100%) rename {akka-core => akka-actors}/src/main/scala/stm/global/Atomic.scala (100%) rename {akka-core => akka-actors}/src/main/scala/stm/global/GlobalStm.scala (78%) rename {akka-core => akka-actors}/src/main/scala/stm/global/package.scala (100%) rename {akka-core => akka-actors}/src/main/scala/stm/local/Atomic.scala (100%) rename {akka-core => akka-actors}/src/main/scala/stm/local/LocalStm.scala (79%) rename {akka-core => akka-actors}/src/main/scala/stm/local/package.scala (100%) rename {akka-core => akka-actors}/src/main/scala/stm/transactional.scala (100%) rename {akka-core => akka-actors}/src/main/scala/util/AkkaException.scala (100%) rename {akka-core => akka-actors}/src/main/scala/util/Bootable.scala (100%) rename {akka-core => akka-actors}/src/main/scala/util/Duration.scala (100%) rename {akka-core => akka-actors}/src/main/scala/util/HashCode.scala (100%) rename {akka-core => akka-actors}/src/main/scala/util/Helpers.scala (100%) rename {akka-core => akka-actors}/src/main/scala/util/ListenerManagement.scala (100%) rename {akka-core => akka-actors}/src/main/scala/util/LockUtil.scala (100%) rename {akka-core => akka-actors}/src/main/scala/util/Logging.scala (100%) rename {akka-core => akka-actors}/src/main/scala/util/Uuid.scala (100%) create mode 100644 akka-actors/src/test/resources/logback-test.xml create mode 100644 akka-actors/src/test/scala/Messages.scala create mode 100644 akka-actors/src/test/scala/actor/actor/ActorFireForgetRequestReplySpec.scala create mode 100644 akka-actors/src/test/scala/actor/actor/AgentSpec.scala create mode 100644 akka-actors/src/test/scala/actor/actor/Bench.scala create mode 100644 akka-actors/src/test/scala/actor/actor/FSMActorSpec.scala create mode 100644 akka-actors/src/test/scala/actor/actor/ForwardActorSpec.scala create mode 100644 akka-actors/src/test/scala/actor/actor/ReceiveTimeoutSpec.scala create mode 100644 akka-actors/src/test/scala/actor/actor/TransactorSpec.scala create mode 100644 akka-actors/src/test/scala/actor/supervisor/RestartStrategySpec.scala create mode 100644 akka-actors/src/test/scala/actor/supervisor/SupervisorHierarchySpec.scala create mode 100644 akka-actors/src/test/scala/actor/supervisor/SupervisorSpec.scala create mode 100644 akka-actors/src/test/scala/dataflow/DataFlowSpec.scala create mode 100644 akka-actors/src/test/scala/dispatch/DispatchersSpec.scala create mode 100644 akka-actors/src/test/scala/dispatch/ExecutorBasedEventDrivenDispatcherActorSpec.scala create mode 100644 akka-actors/src/test/scala/dispatch/ExecutorBasedEventDrivenDispatcherActorsSpec.scala create mode 100644 akka-actors/src/test/scala/dispatch/ExecutorBasedEventDrivenWorkStealingDispatcherSpec.scala create mode 100644 akka-actors/src/test/scala/dispatch/FutureSpec.scala create mode 100644 akka-actors/src/test/scala/dispatch/HawtDispatcherActorSpec.scala create mode 100644 akka-actors/src/test/scala/dispatch/HawtDispatcherEchoServer.scala create mode 100644 akka-actors/src/test/scala/dispatch/ReactorBasedSingleThreadEventDrivenDispatcherActorSpec.scala create mode 100644 akka-actors/src/test/scala/dispatch/ReactorBasedThreadPoolEventDrivenDispatcherActorSpec.scala create mode 100644 akka-actors/src/test/scala/dispatch/ThreadBasedActorSpec.scala create mode 100644 akka-actors/src/test/scala/dispatch/ThreadBasedDispatcherSpec.scala create mode 100644 akka-actors/src/test/scala/misc/ActorRegistrySpec.scala create mode 100644 akka-actors/src/test/scala/misc/SchedulerSpec.scala create mode 100644 akka-actors/src/test/scala/routing/RoutingSpec.scala rename {akka-core => akka-actors}/src/test/scala/stm/JavaStmSpec.scala (100%) rename {akka-core => akka-actors}/src/test/scala/stm/RefSpec.scala (100%) rename {akka-core => akka-actors}/src/test/scala/stm/StmSpec.scala (100%) create mode 100644 akka-actors/src/test/scala/ticket/Ticket001Spec.scala delete mode 100644 akka-core/src/main/scala/actor/BootableActorLoaderService.scala delete mode 100644 akka-core/src/main/scala/actor/SerializationProtocol.scala delete mode 100644 akka-core/src/test/scala/TestClasses.bak rename {akka-core => akka-typed-actors}/src/main/java/se/scalablesolutions/akka/config/DependencyBinding.java (100%) rename {akka-core => akka-typed-actors}/src/main/java/se/scalablesolutions/akka/config/TypedActorGuiceModule.java (100%) create mode 100644 akka-typed-actors/src/main/java/se/scalablesolutions/akka/remote/protocol/RemoteProtocol.java rename {akka-core => akka-typed-actors}/src/main/scala/actor/TypedActor.scala (98%) rename {akka-core => akka-typed-actors}/src/main/scala/config/TypedActorConfigurator.scala (100%) rename {akka-core => akka-typed-actors}/src/main/scala/config/TypedActorGuiceConfigurator.scala (100%) rename {akka-active-object-test/src/test/java/se/scalablesolutions/akka/api => akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor}/Bar.java (61%) rename {akka-active-object-test/src/test/java/se/scalablesolutions/akka/api => akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor}/BarImpl.java (50%) rename {akka-active-object-test/src/test/java/se/scalablesolutions/akka/api => akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor}/Ext.java (50%) rename {akka-active-object-test/src/test/java/se/scalablesolutions/akka/api => akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor}/ExtImpl.java (62%) create mode 100644 akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/Foo.java rename akka-active-object-test/src/test/java/se/scalablesolutions/akka/api/Foo.java => akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/FooImpl.java (70%) create mode 100644 akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/NestedTransactionalTypedActor.java rename akka-active-object-test/src/test/java/se/scalablesolutions/akka/api/InMemStatefulNested.java => akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/NestedTransactionalTypedActorImpl.java (58%) create mode 100644 akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/ProtobufProtocol.java create mode 100644 akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/RemoteTypedActorOne.java create mode 100644 akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/RemoteTypedActorOneImpl.java create mode 100644 akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/RemoteTypedActorTwo.java create mode 100644 akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/RemoteTypedActorTwoImpl.java create mode 100644 akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/SamplePojo.java create mode 100644 akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/SamplePojoImpl.java create mode 100644 akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/SimpleJavaPojo.java create mode 100644 akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/SimpleJavaPojoCaller.java create mode 100644 akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/SimpleJavaPojoCallerImpl.java create mode 100644 akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/SimpleJavaPojoImpl.java create mode 100644 akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/TransactionalTypedActor.java rename akka-active-object-test/src/test/java/se/scalablesolutions/akka/api/InMemStateful.java => akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/TransactionalTypedActorImpl.java (62%) create mode 100644 akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/TypedActorFailer.java create mode 100644 akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/TypedActorFailerImpl.java create mode 100644 akka-typed-actors/src/test/java/se/scalablesolutions/akka/stm/Address.java create mode 100644 akka-typed-actors/src/test/java/se/scalablesolutions/akka/stm/CounterExample.java create mode 100644 akka-typed-actors/src/test/java/se/scalablesolutions/akka/stm/JavaStmTests.java create mode 100644 akka-typed-actors/src/test/java/se/scalablesolutions/akka/stm/RefExample.java create mode 100644 akka-typed-actors/src/test/java/se/scalablesolutions/akka/stm/StmExamples.java create mode 100644 akka-typed-actors/src/test/java/se/scalablesolutions/akka/stm/TransactionFactoryExample.java create mode 100644 akka-typed-actors/src/test/java/se/scalablesolutions/akka/stm/TransactionalMapExample.java create mode 100644 akka-typed-actors/src/test/java/se/scalablesolutions/akka/stm/TransactionalVectorExample.java create mode 100644 akka-typed-actors/src/test/java/se/scalablesolutions/akka/stm/User.java rename {akka-core/src/main => akka-typed-actors/src/test}/resources/META-INF/aop.xml (100%) create mode 100644 akka-typed-actors/src/test/scala/actor/typed-actor/NestedTransactionalTypedActorSpec.scala create mode 100644 akka-typed-actors/src/test/scala/actor/typed-actor/RestartNestedTransactionalTypedActorSpec.scala create mode 100644 akka-typed-actors/src/test/scala/actor/typed-actor/RestartTransactionalTypedActorSpec.scala create mode 100644 akka-typed-actors/src/test/scala/actor/typed-actor/TransactionalTypedActorSpec.scala create mode 100644 akka-typed-actors/src/test/scala/actor/typed-actor/TypedActorContextSpec.scala create mode 100644 akka-typed-actors/src/test/scala/actor/typed-actor/TypedActorGuiceConfiguratorSpec.scala create mode 100644 akka-typed-actors/src/test/scala/actor/typed-actor/TypedActorLifecycleSpec.scala create mode 100644 akka-typed-actors/src/test/scala/actor/typed-actor/TypedActorSpec.scala create mode 100644 akka-typed-actors/src/test/scala/actor/typed-actor/TypedActorUtilFunctionsSpec.scala diff --git a/akka-active-object-test/pom.xml b/akka-active-object-test/pom.xml deleted file mode 100644 index e498c34d32..0000000000 --- a/akka-active-object-test/pom.xml +++ /dev/null @@ -1,113 +0,0 @@ - - 4.0.0 - - Akka TypedActor Tests in Java - akka-active-object-test - se.scalablesolutions.akka - 0.9 - jar - - - 2.8.0.RC3 - - - - - embedded-repo - Embedded Repository - file:///Users/jboner/src/scala/akka/embedded-repo - - - - jboss - JBoss Repository - https://repository.jboss.org/nexus/content/groups/public - - - - - - - scala-tools.org - Scala-Tools Maven2 Repository - http://scala-tools.org/repo-releases - - - - - - se.scalablesolutions.akka - akka-core_2.8.0.RC3 - 0.9.1 - - - org.multiverse - multiverse-alpha - - - - - junit - junit - 4.5 - test - - - org.jmock - jmock - 2.4.0 - test - - - org.multiverse - multiverse-alpha - 0.6-SNAPSHOT - compile - - - - - src/main/java - src/test/java - - - org.apache.maven.plugins - maven-compiler-plugin - - 1.5 - 1.5 - - **/* - - - - - org.apache.maven.plugins - maven-surefire-plugin - - - **/*Persistent* - - - - - - - false - src/test/resources - - - false - src/test/java - - ** - - - **/*.java - - - - - diff --git a/akka-active-object-test/src/test/java/se/scalablesolutions/akka/api/AllTest.java b/akka-active-object-test/src/test/java/se/scalablesolutions/akka/api/AllTest.java deleted file mode 100644 index 6e0ebecc33..0000000000 --- a/akka-active-object-test/src/test/java/se/scalablesolutions/akka/api/AllTest.java +++ /dev/null @@ -1,20 +0,0 @@ -package se.scalablesolutions.akka.api; - -import junit.framework.TestCase; -import junit.framework.Test; -import junit.framework.TestSuite; - -public class AllTest extends TestCase { - public static Test suite() { - TestSuite suite = new TestSuite("All Java tests"); - suite.addTestSuite(InMemoryStateTest.class); - suite.addTestSuite(InMemNestedStateTest.class); - suite.addTestSuite(RemoteInMemoryStateTest.class); - suite.addTestSuite(TypedActorGuiceConfiguratorTest.class); - return suite; - } - - public static void main(String[] args) { - junit.textui.TestRunner.run(suite()); - } -} diff --git a/akka-active-object-test/src/test/java/se/scalablesolutions/akka/api/InMemFailer.java b/akka-active-object-test/src/test/java/se/scalablesolutions/akka/api/InMemFailer.java deleted file mode 100644 index 3908e3cd59..0000000000 --- a/akka-active-object-test/src/test/java/se/scalablesolutions/akka/api/InMemFailer.java +++ /dev/null @@ -1,7 +0,0 @@ -package se.scalablesolutions.akka.api; - -public class InMemFailer implements java.io.Serializable { - public int fail() { - throw new RuntimeException("Expected exception; to test fault-tolerance"); - } -} diff --git a/akka-active-object-test/src/test/java/se/scalablesolutions/akka/api/InMemNestedStateTest.java b/akka-active-object-test/src/test/java/se/scalablesolutions/akka/api/InMemNestedStateTest.java deleted file mode 100644 index db9d4d4146..0000000000 --- a/akka-active-object-test/src/test/java/se/scalablesolutions/akka/api/InMemNestedStateTest.java +++ /dev/null @@ -1,134 +0,0 @@ -/** - * Copyright (C) 2009-2010 Scalable Solutions AB - */ - -package se.scalablesolutions.akka.api; - -import se.scalablesolutions.akka.config.*; -import se.scalablesolutions.akka.config.Config; -import se.scalablesolutions.akka.config.TypedActorConfigurator; -import static se.scalablesolutions.akka.config.JavaConfig.*; -import se.scalablesolutions.akka.actor.*; -import junit.framework.TestCase; - -public class InMemNestedStateTest extends TestCase { - static String messageLog = ""; - - final private TypedActorConfigurator conf = new TypedActorConfigurator(); - - public InMemNestedStateTest() { - conf.configure( - new RestartStrategy(new AllForOne(), 3, 5000, new Class[]{Exception.class}), - new Component[]{ - new Component(InMemStateful.class, new LifeCycle(new Permanent()), 10000000), - new Component(InMemStatefulNested.class, new LifeCycle(new Permanent()), 10000000), - new Component(InMemFailer.class, new LifeCycle(new Permanent()), 1000) - //new Component("inmem-clasher", InMemClasher.class, InMemClasherImpl.class, new LifeCycle(new Permanent()), 100000) - }).supervise(); - Config.config(); - InMemStateful stateful = conf.getInstance(InMemStateful.class); - stateful.init(); - InMemStatefulNested nested = conf.getInstance(InMemStatefulNested.class); - nested.init(); - } - - public void testMapShouldNotRollbackStateForStatefulServerInCaseOfSuccess() throws Exception { - InMemStateful stateful = conf.getInstance(InMemStateful.class); - stateful.setMapState("testShouldNotRollbackStateForStatefulServerInCaseOfSuccess", "init"); // set init state - Thread.sleep(100); - InMemStatefulNested nested = conf.getInstance(InMemStatefulNested.class); - nested.setMapState("testShouldNotRollbackStateForStatefulServerInCaseOfSuccess", "init"); // set init state - Thread.sleep(100); - stateful.success("testShouldNotRollbackStateForStatefulServerInCaseOfSuccess", "new state", nested); // transactionrequired - Thread.sleep(100); - assertEquals("new state", stateful.getMapState("testShouldNotRollbackStateForStatefulServerInCaseOfSuccess")); - Thread.sleep(100); - assertEquals("new state", nested.getMapState("testShouldNotRollbackStateForStatefulServerInCaseOfSuccess")); - } - - public void testMapShouldRollbackStateForStatefulServerInCaseOfFailure() throws InterruptedException { - InMemStateful stateful = conf.getInstance(InMemStateful.class); - stateful.setMapState("testShouldRollbackStateForStatefulServerInCaseOfFailure", "init"); // set init state - Thread.sleep(100); - InMemStatefulNested nested = conf.getInstance(InMemStatefulNested.class); - nested.setMapState("testShouldRollbackStateForStatefulServerInCaseOfFailure", "init"); // set init state - Thread.sleep(100); - InMemFailer failer = conf.getInstance(InMemFailer.class); - try { - stateful.failure("testShouldRollbackStateForStatefulServerInCaseOfFailure", "new state", nested, failer); // call failing transactionrequired method - Thread.sleep(100); - fail("should have thrown an exception"); - } catch (RuntimeException e) { - } // expected - assertEquals("init", stateful.getMapState("testShouldRollbackStateForStatefulServerInCaseOfFailure")); // check that state is == init state - Thread.sleep(100); - assertEquals("init", nested.getMapState("testShouldRollbackStateForStatefulServerInCaseOfFailure")); // check that state is == init state - } - - public void testVectorShouldNotRollbackStateForStatefulServerInCaseOfSuccess() throws Exception { - InMemStateful stateful = conf.getInstance(InMemStateful.class); - stateful.setVectorState("init"); // set init state - Thread.sleep(100); - InMemStatefulNested nested = conf.getInstance(InMemStatefulNested.class); - Thread.sleep(100); - nested.setVectorState("init"); // set init state - Thread.sleep(100); - stateful.success("testShouldNotRollbackStateForStatefulServerInCaseOfSuccess", "new state", nested); // transactionrequired - Thread.sleep(100); - assertEquals("new state", stateful.getVectorState()); - Thread.sleep(100); - assertEquals("new state", nested.getVectorState()); - } - - public void testVectorShouldRollbackStateForStatefulServerInCaseOfFailure() throws InterruptedException { - InMemStateful stateful = conf.getInstance(InMemStateful.class); - stateful.setVectorState("init"); // set init state - Thread.sleep(100); - InMemStatefulNested nested = conf.getInstance(InMemStatefulNested.class); - nested.setVectorState("init"); // set init state - Thread.sleep(100); - InMemFailer failer = conf.getInstance(InMemFailer.class); - try { - stateful.failure("testShouldRollbackStateForStatefulServerInCaseOfFailure", "new state", nested, failer); // call failing transactionrequired method - Thread.sleep(100); - fail("should have thrown an exception"); - } catch (RuntimeException e) { - } // expected - assertEquals("init", stateful.getVectorState()); // check that state is == init state - Thread.sleep(100); - assertEquals("init", nested.getVectorState()); // check that state is == init state - } - - public void testRefShouldNotRollbackStateForStatefulServerInCaseOfSuccess() throws Exception { - InMemStateful stateful = conf.getInstance(InMemStateful.class); - InMemStatefulNested nested = conf.getInstance(InMemStatefulNested.class); - stateful.setRefState("init"); // set init state - Thread.sleep(100); - nested.setRefState("init"); // set init state - Thread.sleep(100); - stateful.success("testShouldNotRollbackStateForStatefulServerInCaseOfSuccess", "new state", nested); // transactionrequired - Thread.sleep(100); - assertEquals("new state", stateful.getRefState()); - Thread.sleep(100); - assertEquals("new state", nested.getRefState()); - } - - public void testRefShouldRollbackStateForStatefulServerInCaseOfFailure() throws InterruptedException { - InMemStateful stateful = conf.getInstance(InMemStateful.class); - InMemStatefulNested nested = conf.getInstance(InMemStatefulNested.class); - stateful.setRefState("init"); // set init state - Thread.sleep(100); - nested.setRefState("init"); // set init state - Thread.sleep(100); - InMemFailer failer = conf.getInstance(InMemFailer.class); - try { - stateful.failure("testShouldRollbackStateForStatefulServerInCaseOfFailure", "new state", nested, failer); // call failing transactionrequired method - Thread.sleep(100); - fail("should have thrown an exception"); - } catch (RuntimeException e) { - } // expected - assertEquals("init", stateful.getRefState()); // check that state is == init state - Thread.sleep(100); - assertEquals("init", nested.getRefState()); // check that state is == init state - } -} diff --git a/akka-active-object-test/src/test/java/se/scalablesolutions/akka/api/InMemoryStateTest.java b/akka-active-object-test/src/test/java/se/scalablesolutions/akka/api/InMemoryStateTest.java deleted file mode 100644 index 6562d0d611..0000000000 --- a/akka-active-object-test/src/test/java/se/scalablesolutions/akka/api/InMemoryStateTest.java +++ /dev/null @@ -1,162 +0,0 @@ -/** - * Copyright (C) 2009-2010 Scalable Solutions AB - */ - -package se.scalablesolutions.akka.api; - -import junit.framework.TestCase; - -import se.scalablesolutions.akka.config.Config; -import se.scalablesolutions.akka.config.*; -import se.scalablesolutions.akka.config.TypedActorConfigurator; - -import static se.scalablesolutions.akka.config.JavaConfig.*; - -import se.scalablesolutions.akka.actor.*; - -public class InMemoryStateTest extends TestCase { - static String messageLog = ""; - - final private TypedActorConfigurator conf = new TypedActorConfigurator(); - - public InMemoryStateTest() { - Config.config(); - conf.configure( - new RestartStrategy(new AllForOne(), 3, 5000, new Class[] {Exception.class}), - new Component[]{ - new Component(InMemStateful.class, - new LifeCycle(new Permanent()), - //new RestartCallbacks("preRestart", "postRestart")), - 10000), - new Component(InMemFailer.class, - new LifeCycle(new Permanent()), - 10000) - }).supervise(); - InMemStateful stateful = conf.getInstance(InMemStateful.class); - stateful.init(); - } - - public void testMapShouldNotRollbackStateForStatefulServerInCaseOfSuccess() { - InMemStateful stateful = conf.getInstance(InMemStateful.class); - stateful.setMapState("testShouldNotRollbackStateForStatefulServerInCaseOfSuccess", "init"); // set init state - stateful.success("testShouldNotRollbackStateForStatefulServerInCaseOfSuccess", "new state"); // transactionrequired - assertEquals("new state", stateful.getMapState("testShouldNotRollbackStateForStatefulServerInCaseOfSuccess")); - } - - public void testMapShouldRollbackStateForStatefulServerInCaseOfFailure() { - InMemStateful stateful = conf.getInstance(InMemStateful.class); - stateful.setMapState("testShouldRollbackStateForStatefulServerInCaseOfFailure", "init"); // set init state - InMemFailer failer = conf.getInstance(InMemFailer.class); - try { - stateful.failure("testShouldRollbackStateForStatefulServerInCaseOfFailure", "new state", failer); // call failing transactionrequired method - fail("should have thrown an exception"); - } catch (RuntimeException e) { - } // expected - assertEquals("init", stateful.getMapState("testShouldRollbackStateForStatefulServerInCaseOfFailure")); // check that state is == init state - } - - public void testVectorShouldNotRollbackStateForStatefulServerInCaseOfSuccess() { - InMemStateful stateful = conf.getInstance(InMemStateful.class); - stateful.setVectorState("init"); // set init state - stateful.success("testShouldNotRollbackStateForStatefulServerInCaseOfSuccess", "new state"); // transactionrequired - assertEquals("new state", stateful.getVectorState()); - } - - public void testVectorShouldRollbackStateForStatefulServerInCaseOfFailure() { - InMemStateful stateful = conf.getInstance(InMemStateful.class); - stateful.setVectorState("init"); // set init state - InMemFailer failer = conf.getInstance(InMemFailer.class); - try { - stateful.failure("testShouldRollbackStateForStatefulServerInCaseOfFailure", "new state", failer); // call failing transactionrequired method - fail("should have thrown an exception"); - } catch (RuntimeException e) { - } // expected - assertEquals("init", stateful.getVectorState()); // check that state is == init state - } - - public void testRefShouldNotRollbackStateForStatefulServerInCaseOfSuccess() { - InMemStateful stateful = conf.getInstance(InMemStateful.class); - stateful.setRefState("init"); // set init state - stateful.success("testShouldNotRollbackStateForStatefulServerInCaseOfSuccess", "new state"); // transactionrequired - assertEquals("new state", stateful.getRefState()); - } - - public void testRefShouldRollbackStateForStatefulServerInCaseOfFailure() { - InMemStateful stateful = conf.getInstance(InMemStateful.class); - stateful.setRefState("init"); // set init state - InMemFailer failer = conf.getInstance(InMemFailer.class); - try { - stateful.failure("testShouldRollbackStateForStatefulServerInCaseOfFailure", "new state", failer); // call failing transactionrequired method - fail("should have thrown an exception"); - } catch (RuntimeException e) { - } // expected - assertEquals("init", stateful.getRefState()); // check that state is == init state - } -/* - public void testNestedNonTransactionalMethodHangs() { - InMemStateful stateful = conf.getInstance(InMemStateful.class); - stateful.setMapState("testShouldRollbackStateForStatefulServerInCaseOfFailure", "init"); // set init state - InMemFailer failer = conf.getInstance(InMemFailer.class); - try { - stateful.thisMethodHangs("testShouldRollbackStateForStatefulServerInCaseOfFailure", "new state", failer); // call failing transactionrequired method - fail("should have thrown an exception"); - } catch (RuntimeException e) { - } // expected - assertEquals("init", stateful.getMapState("testShouldRollbackStateForStatefulServerInCaseOfFailure")); // check that state is == init state - } - */ - // public void testShouldRollbackStateForStatefulServerInCaseOfMessageClash() - // { - // InMemStateful stateful = conf.getInstance(InMemStateful.class); - // stateful.setState("stateful", "init"); // set init state - // - // InMemClasher clasher = conf.getInstance(InMemClasher.class); - // clasher.setState("clasher", "init"); // set init state - // - // // try { - // // stateful.clashOk("stateful", "new state", clasher); - // // } catch (RuntimeException e) { } // expected - // // assertEquals("new state", stateful.getState("stateful")); // check that - // // state is == init state - // // assertEquals("was here", clasher.getState("clasher")); // check that - // // state is == init state - // - // try { - // stateful.clashNotOk("stateful", "new state", clasher); - // fail("should have thrown an exception"); - // } catch (RuntimeException e) { - // } // expected - // assertEquals("init", stateful.getState("stateful")); // check that state is - // // == init state - // // assertEquals("init", clasher.getState("clasher")); // check that state - // is - // // == init state - // } -} - -/* -interface InMemClasher { - public void clash(); - - public String getState(String key); - - public void setState(String key, String value); -} - -class InMemClasherImpl implements InMemClasher { - @state - private TransactionalMap state = new InMemoryTransactionalMap(); - - public String getState(String key) { - return (String) state.get(key).get(); - } - - public void setState(String key, String msg) { - state.put(key, msg); - } - - public void clash() { - state.put("clasher", "was here"); - } -} -*/ diff --git a/akka-active-object-test/src/test/java/se/scalablesolutions/akka/api/MiscActiveObjectTest.java b/akka-active-object-test/src/test/java/se/scalablesolutions/akka/api/MiscActiveObjectTest.java deleted file mode 100644 index 6ab6d2ff03..0000000000 --- a/akka-active-object-test/src/test/java/se/scalablesolutions/akka/api/MiscActiveObjectTest.java +++ /dev/null @@ -1,35 +0,0 @@ -package se.scalablesolutions.akka.api; - -import static se.scalablesolutions.akka.actor.TypedActor.link; -import static se.scalablesolutions.akka.actor.TypedActor.newInstance; - -import org.junit.Assert; -import org.junit.Test; - -import se.scalablesolutions.akka.config.OneForOneStrategy; -import junit.framework.TestCase; - -/** - *

Small misc tests that do not fit anywhere else and does not require a separate testcase

- * - * @author johanrask - * - */ -public class MiscTypedActorTest extends TestCase { - - - /** - * Verifies that both preRestart and postRestart methods are invoked when - * an actor is restarted - */ - public void testFailingPostRestartInvocation() throws InterruptedException { - SimpleJavaPojo pojo = newInstance(SimpleJavaPojo.class,500); - SimpleJavaPojo supervisor = newInstance(SimpleJavaPojo.class,500); - link(supervisor,pojo,new OneForOneStrategy(3, 2000),new Class[]{Throwable.class}); - pojo.throwException(); - Thread.sleep(500); - Assert.assertTrue(pojo.pre); - Assert.assertTrue(pojo.post); - } - -} diff --git a/akka-active-object-test/src/test/java/se/scalablesolutions/akka/api/RemoteInMemoryStateTest.java b/akka-active-object-test/src/test/java/se/scalablesolutions/akka/api/RemoteInMemoryStateTest.java deleted file mode 100644 index 89f7e92162..0000000000 --- a/akka-active-object-test/src/test/java/se/scalablesolutions/akka/api/RemoteInMemoryStateTest.java +++ /dev/null @@ -1,134 +0,0 @@ -/** - * Copyright (C) 2009-2010 Scalable Solutions AB - */ - -package se.scalablesolutions.akka.api; - -import se.scalablesolutions.akka.config.Config; -import se.scalablesolutions.akka.actor.TypedActor; -import se.scalablesolutions.akka.config.TypedActorConfigurator; -import se.scalablesolutions.akka.remote.RemoteNode; - -import junit.framework.TestCase; - -public class RemoteInMemoryStateTest extends TestCase { - static String messageLog = ""; - - static { - new Thread(new Runnable() { - public void run() { - RemoteNode.start(); - } - }).start(); - try { Thread.currentThread().sleep(1000); } catch (Exception e) {} - Config.config(); - } - final TypedActorConfigurator conf = new TypedActorConfigurator(); - - protected void tearDown() { - conf.stop(); - } - - public void testMapShouldNotRollbackStateForStatefulServerInCaseOfSuccess() { - InMemStateful stateful = TypedActor.newRemoteInstance(InMemStateful.class, 1000, "localhost", 9999); - stateful.init(); - stateful.setMapState("testShouldNotRollbackStateForStatefulServerInCaseOfSuccess", "init"); // set init state - stateful.success("testShouldNotRollbackStateForStatefulServerInCaseOfSuccess", "new state"); // transactionrequired - assertEquals("new state", stateful.getMapState("testShouldNotRollbackStateForStatefulServerInCaseOfSuccess")); - } - - public void testMapShouldRollbackStateForStatefulServerInCaseOfFailure() { - InMemStateful stateful = TypedActor.newRemoteInstance(InMemStateful.class, 10000, "localhost", 9999); - stateful.init(); - stateful.setMapState("testShouldRollbackStateForStatefulServerInCaseOfFailure", "init"); // set init state - InMemFailer failer = TypedActor.newRemoteInstance(InMemFailer.class, 1000, "localhost", 9999); //conf.getInstance(InMemFailer.class); - try { - stateful.failure("testShouldRollbackStateForStatefulServerInCaseOfFailure", "new state", failer); // call failing transactionrequired method - fail("should have thrown an exception"); - } catch (RuntimeException e) { - } // expected - assertEquals("init", stateful.getMapState("testShouldRollbackStateForStatefulServerInCaseOfFailure")); // check that state is == init state - } - - public void testVectorShouldNotRollbackStateForStatefulServerInCaseOfSuccess() { - InMemStateful stateful = TypedActor.newRemoteInstance(InMemStateful.class, 10000, "localhost", 9999); - stateful.init(); - stateful.setVectorState("init"); // set init state - stateful.success("testShouldNotRollbackStateForStatefulServerInCaseOfSuccess", "new state"); // transactionrequired - assertEquals("new state", stateful.getVectorState()); - } - - public void testVectorShouldRollbackStateForStatefulServerInCaseOfFailure() { - InMemStateful stateful = TypedActor.newRemoteInstance(InMemStateful.class, 10000, "localhost", 9999); - stateful.init(); - stateful.setVectorState("init"); // set init state - InMemFailer failer = TypedActor.newRemoteInstance(InMemFailer.class, 10000, "localhost", 9999); //conf.getInstance(InMemFailer.class); - try { - stateful.failure("testShouldRollbackStateForStatefulServerInCaseOfFailure", "new state", failer); // call failing transactionrequired method - fail("should have thrown an exception"); - } catch (RuntimeException e) { - } // expected - assertEquals("init", stateful.getVectorState()); // check that state is == init state - } - - public void testRefShouldNotRollbackStateForStatefulServerInCaseOfSuccess() { - InMemStateful stateful = TypedActor.newRemoteInstance(InMemStateful.class, 10000, "localhost", 9999); - stateful.init(); - stateful.setRefState("init"); // set init state - stateful.success("testShouldNotRollbackStateForStatefulServerInCaseOfSuccess", "new state"); // transactionrequired - assertEquals("new state", stateful.getRefState()); - } - - public void testRefShouldRollbackStateForStatefulServerInCaseOfFailure() { - InMemStateful stateful = TypedActor.newRemoteInstance(InMemStateful.class, 10000, "localhost", 9999); - stateful.init(); - stateful.setRefState("init"); // set init state - InMemFailer failer = TypedActor.newRemoteInstance(InMemFailer.class, 10000, "localhost", 9999); //conf.getInstance(InMemFailer.class); - try { - stateful.failure("testShouldRollbackStateForStatefulServerInCaseOfFailure", "new state", failer); // call failing transactionrequired method - fail("should have thrown an exception"); - } catch (RuntimeException e) { - } // expected - assertEquals("init", stateful.getRefState()); // check that state is == init state - } - /* - public void testNestedNonTransactionalMethodHangs() { - InMemStateful stateful = conf.getInstance(InMemStateful.class); - stateful.setMapState("testShouldRollbackStateForStatefulServerInCaseOfFailure", "init"); // set init state - InMemFailer failer = conf.getInstance(InMemFailer.class); - try { - stateful.thisMethodHangs("testShouldRollbackStateForStatefulServerInCaseOfFailure", "new state", failer); // call failing transactionrequired method - fail("should have thrown an exception"); - } catch (RuntimeException e) { - } // expected - assertEquals("init", stateful.getMapState("testShouldRollbackStateForStatefulServerInCaseOfFailure")); // check that state is == init state - } - */ - // public void testShouldRollbackStateForStatefulServerInCaseOfMessageClash() - // { - // InMemStateful stateful = conf.getInstance(InMemStateful.class); - // stateful.setState("stateful", "init"); // set init state - // - // InMemClasher clasher = conf.getInstance(InMemClasher.class); - // clasher.setState("clasher", "init"); // set init state - // - // // try { - // // stateful.clashOk("stateful", "new state", clasher); - // // } catch (RuntimeException e) { } // expected - // // assertEquals("new state", stateful.getState("stateful")); // check that - // // state is == init state - // // assertEquals("was here", clasher.getState("clasher")); // check that - // // state is == init state - // - // try { - // stateful.clashNotOk("stateful", "new state", clasher); - // fail("should have thrown an exception"); - // } catch (RuntimeException e) { - // } // expected - // assertEquals("init", stateful.getState("stateful")); // check that state is - // // == init state - // // assertEquals("init", clasher.getState("clasher")); // check that state - // is - // // == init state - // } -} diff --git a/akka-active-object-test/src/test/java/se/scalablesolutions/akka/api/SimpleJavaPojo.java b/akka-active-object-test/src/test/java/se/scalablesolutions/akka/api/SimpleJavaPojo.java deleted file mode 100644 index f2f05842e9..0000000000 --- a/akka-active-object-test/src/test/java/se/scalablesolutions/akka/api/SimpleJavaPojo.java +++ /dev/null @@ -1,36 +0,0 @@ -package se.scalablesolutions.akka.api; - -import se.scalablesolutions.akka.actor.annotation.prerestart; -import se.scalablesolutions.akka.actor.annotation.postrestart; - -public class SimpleJavaPojo { - - public boolean pre = false; - public boolean post = false; - - private String name; - - public void setName(String name) { - this.name = name; - } - - public String getName() { - return name; - } - - @prerestart - public void pre() { - System.out.println("** pre()"); - pre = true; - } - - @postrestart - public void post() { - System.out.println("** post()"); - post = true; - } - - public void throwException() { - throw new RuntimeException(); - } -} diff --git a/akka-active-object-test/src/test/java/se/scalablesolutions/akka/api/TypedActorGuiceConfiguratorTest.java b/akka-active-object-test/src/test/java/se/scalablesolutions/akka/api/TypedActorGuiceConfiguratorTest.java deleted file mode 100644 index e604b4da69..0000000000 --- a/akka-active-object-test/src/test/java/se/scalablesolutions/akka/api/TypedActorGuiceConfiguratorTest.java +++ /dev/null @@ -1,115 +0,0 @@ -/** - * Copyright (C) 2009-2010 Scalable Solutions AB - */ - -package se.scalablesolutions.akka.api; - -import com.google.inject.AbstractModule; -import com.google.inject.Scopes; - -import junit.framework.TestCase; - -import se.scalablesolutions.akka.config.Config; -import se.scalablesolutions.akka.config.TypedActorConfigurator; -import static se.scalablesolutions.akka.config.JavaConfig.*; -import se.scalablesolutions.akka.dispatch.*; - -public class TypedActorGuiceConfiguratorTest extends TestCase { - static String messageLog = ""; - - final private TypedActorConfigurator conf = new TypedActorConfigurator(); - - protected void setUp() { - Config.config(); - MessageDispatcher dispatcher = Dispatchers.newExecutorBasedEventDrivenDispatcher("test"); - - conf.addExternalGuiceModule(new AbstractModule() { - protected void configure() { - bind(Ext.class).to(ExtImpl.class).in(Scopes.SINGLETON); - } - }).configure( - new RestartStrategy(new AllForOne(), 3, 5000, new Class[]{Exception.class}), - new Component[]{ - new Component( - Foo.class, - new LifeCycle(new Permanent()), - 1000, - dispatcher), - //new RemoteAddress("localhost", 9999)), - new Component( - Bar.class, - BarImpl.class, - new LifeCycle(new Permanent()), - 1000, - dispatcher) - }).inject().supervise(); - - } - - public void testGuiceTypedActorInjection() { - messageLog = ""; - Foo foo = conf.getInstance(Foo.class); - Bar bar = conf.getInstance(Bar.class); - assertEquals(foo.getBar(), bar); - } - - public void testGuiceExternalDependencyInjection() { - messageLog = ""; - Bar bar = conf.getInstance(Bar.class); - Ext ext = conf.getExternalDependency(Ext.class); - assertTrue(bar.getExt().toString().equals(ext.toString())); - } - - public void testLookupNonSupervisedInstance() { - try { - String str = conf.getInstance(String.class); - fail("exception should have been thrown"); - } catch (Exception e) { - assertEquals(IllegalStateException.class, e.getClass()); - } - } - - public void testTypedActorInvocation() throws InterruptedException { - messageLog = ""; - Foo foo = conf.getInstance(Foo.class); - messageLog += foo.foo("foo "); - foo.bar("bar "); - messageLog += "before_bar "; - Thread.sleep(500); - assertEquals("foo return_foo before_bar ", messageLog); - } - - public void testTypedActorInvocationsInvocation() throws InterruptedException { - messageLog = ""; - Foo foo = conf.getInstance(Foo.class); - Bar bar = conf.getInstance(Bar.class); - messageLog += foo.foo("foo "); - foo.bar("bar "); - messageLog += "before_bar "; - Thread.sleep(500); - assertEquals("foo return_foo before_bar ", messageLog); - } - - - public void testForcedTimeout() { - messageLog = ""; - Foo foo = conf.getInstance(Foo.class); - try { - foo.longRunning(); - fail("exception should have been thrown"); - } catch (se.scalablesolutions.akka.dispatch.FutureTimeoutException e) { - } - } - - public void testForcedException() { - messageLog = ""; - Foo foo = conf.getInstance(Foo.class); - try { - foo.throwsException(); - fail("exception should have been thrown"); - } catch (RuntimeException e) { - } - } -} - - diff --git a/akka-core/src/main/resources/logback.xml b/akka-actors/src/main/resources/logback.xml similarity index 100% rename from akka-core/src/main/resources/logback.xml rename to akka-actors/src/main/resources/logback.xml diff --git a/akka-core/src/main/scala/actor/Actor.scala b/akka-actors/src/main/scala/actor/Actor.scala similarity index 99% rename from akka-core/src/main/scala/actor/Actor.scala rename to akka-actors/src/main/scala/actor/Actor.scala index 2435598c8c..6db8304fb4 100644 --- a/akka-core/src/main/scala/actor/Actor.scala +++ b/akka-actors/src/main/scala/actor/Actor.scala @@ -7,7 +7,6 @@ package se.scalablesolutions.akka.actor import se.scalablesolutions.akka.dispatch._ import se.scalablesolutions.akka.config.Config._ import se.scalablesolutions.akka.config.ScalaConfig._ -import se.scalablesolutions.akka.serialization.Serializer import se.scalablesolutions.akka.util.Helpers.{narrow, narrowSilently} import se.scalablesolutions.akka.util.{Logging, Duration} import se.scalablesolutions.akka.AkkaException @@ -502,3 +501,7 @@ private[actor] class AnyOptionAsTypedOption(anyOption: Option[Any]) { */ def asSilently[T: Manifest]: Option[T] = narrowSilently[T](anyOption) } + +trait Proxyable { + def swapProxiedActor(newInstance: Actor) +} diff --git a/akka-core/src/main/scala/actor/ActorRef.scala b/akka-actors/src/main/scala/actor/ActorRef.scala similarity index 94% rename from akka-core/src/main/scala/actor/ActorRef.scala rename to akka-actors/src/main/scala/actor/ActorRef.scala index 87999b8580..2d687e3738 100644 --- a/akka-core/src/main/scala/actor/ActorRef.scala +++ b/akka-actors/src/main/scala/actor/ActorRef.scala @@ -5,18 +5,15 @@ package se.scalablesolutions.akka.actor import se.scalablesolutions.akka.dispatch._ -import se.scalablesolutions.akka.config.Config.config +import se.scalablesolutions.akka.config.Config._ import se.scalablesolutions.akka.config.{AllForOneStrategy, OneForOneStrategy, FaultHandlingStrategy} import se.scalablesolutions.akka.config.ScalaConfig._ import se.scalablesolutions.akka.stm.global._ import se.scalablesolutions.akka.stm.TransactionManagement._ import se.scalablesolutions.akka.stm.{TransactionManagement, TransactionSetAbortedException} -import se.scalablesolutions.akka.remote.protocol.RemoteProtocol._ -import se.scalablesolutions.akka.remote.{RemoteNode, RemoteServer, RemoteClient, MessageSerializer, RemoteRequestProtocolIdFactory} -import se.scalablesolutions.akka.serialization.{Serializer, BinaryString} import se.scalablesolutions.akka.util.{HashCode, Logging, UUID, ReentrantGuard} +import se.scalablesolutions.akka.remote.{RemoteClientModule, RemoteServerModule} import se.scalablesolutions.akka.AkkaException -import RemoteActorSerialization._ import org.multiverse.api.ThreadLocalTransaction._ import org.multiverse.commitbarriers.CountDownCommitBarrier @@ -33,8 +30,6 @@ import java.lang.reflect.Field import scala.reflect.BeanProperty -import com.google.protobuf.ByteString - /** * ActorRef is an immutable and serializable handle to an Actor. *

@@ -67,20 +62,23 @@ import com.google.protobuf.ByteString * * @author Jonas Bonér */ -trait ActorRef extends ActorRefShared with TransactionManagement with java.lang.Comparable[ActorRef] { - scalaRef: ScalaActorRef => +trait ActorRef extends + ActorRefShared with + TransactionManagement with + Logging with + java.lang.Comparable[ActorRef] { scalaRef: ScalaActorRef => // Only mutable for RemoteServer in order to maintain identity across nodes @volatile protected[akka] var _uuid = UUID.newUuid.toString @volatile protected[this] var _isRunning = false @volatile protected[this] var _isShutDown = false @volatile protected[akka] var _isBeingRestarted = false - @volatile protected[akka] var _homeAddress = new InetSocketAddress(RemoteServer.HOSTNAME, RemoteServer.PORT) + @volatile protected[akka] var _homeAddress = new InetSocketAddress(RemoteServerModule.HOSTNAME, RemoteServerModule.PORT) @volatile protected[akka] var _futureTimeout: Option[ScheduledFuture[AnyRef]] = None @volatile protected[akka] var startOnCreation = false @volatile protected[akka] var registeredInRemoteNodeDuringSerialization = false protected[akka] val guard = new ReentrantGuard - + /** * User overridable callback/setting. *

@@ -681,13 +679,11 @@ class LocalActorRef private[akka]( if (runActorInitialization && !isDeserialized) initializeActorInstance private[akka] def this(clazz: Class[_ <: Actor]) = this(Left(Some(clazz))) - private[akka] def this(factory: () => Actor) = this(Right(Some(factory))) + private[akka] def this(factory: () => Actor) = this(Right(Some(factory))) // used only for deserialization private[akka] def this(__uuid: String, __id: String, - __actorClassName: String, - __actorBytes: Array[Byte], __hostname: String, __port: Int, __isTransactor: Boolean, @@ -697,16 +693,8 @@ class LocalActorRef private[akka]( __supervisor: Option[ActorRef], __hotswap: Option[PartialFunction[Any, Unit]], __loader: ClassLoader, - __messages: List[RemoteRequestProtocol], - __format: Format[_ <: Actor]) = { - this(() => { - val actorClass = __loader.loadClass(__actorClassName) - if (__format.isInstanceOf[SerializerBasedActorFormat[_]]) - __format.asInstanceOf[SerializerBasedActorFormat[_]] - .serializer - .fromBinary(__actorBytes, Some(actorClass)).asInstanceOf[Actor] - else actorClass.newInstance.asInstanceOf[Actor] - }) + __factory: () => Actor) = { + this(__factory) loader = Some(__loader) isDeserialized = true _uuid = __uuid @@ -721,7 +709,6 @@ class LocalActorRef private[akka]( actorSelfFields._1.set(actor, this) actorSelfFields._2.set(actor, Some(this)) start - __messages.foreach(message => this ! MessageSerializer.deserialize(message.getMessage)) checkReceiveTimeout ActorRegistry.register(this) } @@ -755,21 +742,24 @@ class LocalActorRef private[akka]( /** * Invoking 'makeRemote' means that an actor will be moved to and invoked on a remote host. */ - def makeRemote(hostname: String, port: Int): Unit = + def makeRemote(hostname: String, port: Int): Unit = { + RemoteClientModule.ensureRemotingEnabled if (!isRunning || isBeingRestarted) makeRemote(new InetSocketAddress(hostname, port)) else throw new ActorInitializationException( - "Can't make a running actor remote. Make sure you call 'makeRemote' before 'start'.") + "Can't make a running actor remote. Make sure you call 'makeRemote' before 'start'.") + } /** * Invoking 'makeRemote' means that an actor will be moved to and invoked on a remote host. */ def makeRemote(address: InetSocketAddress): Unit = guard.withGuard { + RemoteClientModule.ensureRemotingEnabled if (!isRunning || isBeingRestarted) { _remoteAddress = Some(address) - RemoteClient.register(address.getHostName, address.getPort, uuid) - homeAddress = (RemoteServer.HOSTNAME, RemoteServer.PORT) + RemoteClientModule.register(address, uuid) + homeAddress = (RemoteServerModule.HOSTNAME, RemoteServerModule.PORT) } else throw new ActorInitializationException( - "Can't make a running actor remote. Make sure you call 'makeRemote' before 'start'.") + "Can't make a running actor remote. Make sure you call 'makeRemote' before 'start'.") } /** @@ -839,9 +829,10 @@ class LocalActorRef private[akka]( _isShutDown = true actor.shutdown ActorRegistry.unregister(this) - remoteAddress.foreach(address => RemoteClient.unregister( - address.getHostName, address.getPort, uuid)) - RemoteNode.unregister(this) + remoteAddress.foreach { address => + RemoteClientModule.unregister(address, uuid) + } + RemoteClientModule.unregister(this) nullOutActorRefReferencesFor(actorInstance.get) } //else if (isBeingRestarted) throw new ActorKilledException("Actor [" + toString + "] is being restarted.") } @@ -896,6 +887,7 @@ class LocalActorRef private[akka]( * To be invoked from within the actor itself. */ def startLinkRemote(actorRef: ActorRef, hostname: String, port: Int) = guard.withGuard { + RemoteClientModule.ensureRemotingEnabled try { actorRef.makeRemote(hostname, port) actorRef.start @@ -921,6 +913,7 @@ class LocalActorRef private[akka]( * To be invoked from within the actor itself. */ def spawnRemote(clazz: Class[_ <: Actor], hostname: String, port: Int): ActorRef = guard.withGuard { + RemoteClientModule.ensureRemotingEnabled val actor = spawnButDoNotStart(clazz) actor.makeRemote(hostname, port) actor.start @@ -948,6 +941,7 @@ class LocalActorRef private[akka]( * To be invoked from within the actor itself. */ def spawnLinkRemote(clazz: Class[_ <: Actor], hostname: String, port: Int): ActorRef = guard.withGuard { + RemoteClientModule.ensureRemotingEnabled val actor = spawnButDoNotStart(clazz) try { actor.makeRemote(hostname, port) @@ -984,10 +978,8 @@ class LocalActorRef private[akka]( protected[akka] def postMessageToMailbox(message: Any, senderOption: Option[ActorRef]): Unit = { joinTransaction(message) - if (remoteAddress.isDefined) { - RemoteClient.clientFor(remoteAddress.get).send[Any]( - createRemoteRequestProtocolBuilder(this, message, true, senderOption).build, None) - } else { + if (remoteAddress.isDefined) RemoteClientModule.send(message, senderOption, None, remoteAddress.get, this) + else { val invocation = new MessageInvocation(this, message, senderOption, None, transactionSet.get) invocation.send } @@ -1000,12 +992,9 @@ class LocalActorRef private[akka]( senderFuture: Option[CompletableFuture[T]]): CompletableFuture[T] = { joinTransaction(message) - if (remoteAddress.isDefined) { - val future = RemoteClient.clientFor(remoteAddress.get).send( - createRemoteRequestProtocolBuilder(this, message, false, senderOption).build, senderFuture) - if (future.isDefined) future.get - else throw new IllegalActorStateException("Expected a future from remote call to actor " + toString) - } else { + if (remoteAddress.isDefined) RemoteClientModule.send( + message, senderOption, senderFuture, remoteAddress.get, this) + else { val future = if (senderFuture.isDefined) senderFuture.get else new DefaultCompletableFuture[T](timeout) val invocation = new MessageInvocation( @@ -1088,7 +1077,7 @@ class LocalActorRef private[akka]( Actor.log.debug("Restarting linked actors for actor [%s].", id) restartLinkedActors(reason, maxNrOfRetries, withinTimeRange) Actor.log.debug("Invoking 'preRestart' for failed actor instance [%s].", id) - if (isTypedActorDispatcher(failedActor)) restartTypedActorDispatcher(failedActor, reason) + if (isProxyableDispatcher(failedActor)) restartProxyableDispatcher(failedActor, reason) else restartActor(failedActor, reason) _isBeingRestarted = false } @@ -1107,8 +1096,9 @@ class LocalActorRef private[akka]( } protected[akka] def registerSupervisorAsRemoteActor: Option[String] = guard.withGuard { + RemoteClientModule.ensureRemotingEnabled if (_supervisor.isDefined) { - RemoteClient.clientFor(remoteAddress.get).registerSupervisorForActor(this) + remoteAddress.foreach(address => RemoteClientModule.registerSupervisorForActor(address, this)) Some(_supervisor.get.uuid) } else None } @@ -1126,9 +1116,9 @@ class LocalActorRef private[akka]( // ========= PRIVATE FUNCTIONS ========= - private def isTypedActorDispatcher(a: Actor): Boolean = a.isInstanceOf[TypedActor] + private def isProxyableDispatcher(a: Actor): Boolean = a.isInstanceOf[Proxyable] - private def restartTypedActorDispatcher(failedActor: Actor, reason: Throwable) = { + private def restartProxyableDispatcher(failedActor: Actor, reason: Throwable) = { failedActor.preRestart(reason) failedActor.postRestart(reason) } @@ -1140,7 +1130,8 @@ class LocalActorRef private[akka]( freshActor.init freshActor.initTransactionalState actorInstance.set(freshActor) - if (failedActor.isInstanceOf[TypedActor]) failedActor.asInstanceOf[TypedActor].swapInstanceInProxy(freshActor) + if (failedActor.isInstanceOf[Proxyable]) + failedActor.asInstanceOf[Proxyable].swapProxiedActor(freshActor) Actor.log.debug("Invoking 'postRestart' for new actor instance [%s].", id) freshActor.postRestart(reason) } @@ -1316,6 +1307,7 @@ class LocalActorRef private[akka]( checkReceiveTimeout } +/* private def serializeMessage(message: AnyRef): AnyRef = if (Actor.SERIALIZE_MESSAGES) { if (!message.isInstanceOf[String] && !message.isInstanceOf[Byte] && @@ -1339,6 +1331,7 @@ class LocalActorRef private[akka]( Serializer.Java.deepClone(message) } else message } else message + */ } /** @@ -1347,7 +1340,7 @@ class LocalActorRef private[akka]( * @author Jonas Bonér */ object RemoteActorSystemMessage { - val Stop = BinaryString("RemoteActorRef:stop") + val Stop = "RemoteActorRef:stop".intern } /** @@ -1357,26 +1350,31 @@ object RemoteActorSystemMessage { * @author Jonas Bonér */ private[akka] case class RemoteActorRef private[akka] ( - uuuid: String, val className: String, val hostname: String, val port: Int, _timeout: Long, loader: Option[ClassLoader]) - // uuid: String, className: String, hostname: String, port: Int, timeOut: Long, isOnRemoteHost: Boolean) extends ActorRef { + uuuid: String, + val className: String, + val hostname: String, + val port: Int, + _timeout: Long, + loader: Option[ClassLoader]) extends ActorRef with ScalaActorRef { + RemoteClientModule.ensureRemotingEnabled + _uuid = uuuid timeout = _timeout start - lazy val remoteClient = RemoteClient.clientFor(hostname, port, loader) + lazy val remoteClient = RemoteClientModule.clientFor(hostname, port, loader) - def postMessageToMailbox(message: Any, senderOption: Option[ActorRef]): Unit = { - remoteClient.send[Any](createRemoteRequestProtocolBuilder(this, message, true, senderOption).build, None) - } + def postMessageToMailbox(message: Any, senderOption: Option[ActorRef]): Unit = + RemoteClientModule.send(message, senderOption, None, remoteAddress.get, this) def postMessageToMailboxAndCreateFutureResultWithTimeout[T]( message: Any, timeout: Long, senderOption: Option[ActorRef], senderFuture: Option[CompletableFuture[T]]): CompletableFuture[T] = { - val future = remoteClient.send(createRemoteRequestProtocolBuilder(this, message, false, senderOption).build, senderFuture) + val future = RemoteClientModule.send(message, senderOption, None, remoteAddress.get, this) if (future.isDefined) future.get else throw new IllegalActorStateException("Expected a future from remote call to actor " + toString) } @@ -1582,15 +1580,12 @@ trait ScalaActorRef extends ActorRefShared { ref: ActorRef => def !!(message: Any, timeout: Long = this.timeout)(implicit sender: Option[ActorRef] = None): Option[Any] = { if (isRunning) { val future = postMessageToMailboxAndCreateFutureResultWithTimeout[Any](message, timeout, sender, None) - val isTypedActor = message.isInstanceOf[JoinPoint] - if (isTypedActor && TypedActor.isOneWay(message.asInstanceOf[JoinPoint])) { - future.asInstanceOf[CompletableFuture[Option[_]]].completeWithResult(None) - } + val isMessageJoinPoint = TypedActorModule.resolveFutureIfMessageIsJoinPoint(message, future) try { future.await } catch { case e: FutureTimeoutException => - if (isTypedActor) throw e + if (isMessageJoinPoint) throw e else None } if (future.exception.isDefined) throw future.exception.get diff --git a/akka-core/src/main/scala/actor/ActorRegistry.scala b/akka-actors/src/main/scala/actor/ActorRegistry.scala similarity index 100% rename from akka-core/src/main/scala/actor/ActorRegistry.scala rename to akka-actors/src/main/scala/actor/ActorRegistry.scala diff --git a/akka-core/src/main/scala/actor/Agent.scala b/akka-actors/src/main/scala/actor/Agent.scala similarity index 100% rename from akka-core/src/main/scala/actor/Agent.scala rename to akka-actors/src/main/scala/actor/Agent.scala diff --git a/akka-core/src/main/scala/actor/Fsm.scala b/akka-actors/src/main/scala/actor/FSM.scala similarity index 100% rename from akka-core/src/main/scala/actor/Fsm.scala rename to akka-actors/src/main/scala/actor/FSM.scala diff --git a/akka-core/src/main/scala/actor/Implicits.scala b/akka-actors/src/main/scala/actor/Implicits.scala similarity index 100% rename from akka-core/src/main/scala/actor/Implicits.scala rename to akka-actors/src/main/scala/actor/Implicits.scala diff --git a/akka-core/src/main/scala/actor/Scheduler.scala b/akka-actors/src/main/scala/actor/Scheduler.scala similarity index 100% rename from akka-core/src/main/scala/actor/Scheduler.scala rename to akka-actors/src/main/scala/actor/Scheduler.scala diff --git a/akka-core/src/main/scala/actor/Supervisor.scala b/akka-actors/src/main/scala/actor/Supervisor.scala similarity index 96% rename from akka-core/src/main/scala/actor/Supervisor.scala rename to akka-actors/src/main/scala/actor/Supervisor.scala index b146a74c12..27baedbab3 100644 --- a/akka-core/src/main/scala/actor/Supervisor.scala +++ b/akka-actors/src/main/scala/actor/Supervisor.scala @@ -7,7 +7,7 @@ package se.scalablesolutions.akka.actor import se.scalablesolutions.akka.config.ScalaConfig._ import se.scalablesolutions.akka.config.{AllForOneStrategy, OneForOneStrategy, FaultHandlingStrategy} import se.scalablesolutions.akka.util.Logging -import se.scalablesolutions.akka.remote.RemoteServer +import se.scalablesolutions.akka.remote.RemoteServerModule import se.scalablesolutions.akka.AkkaException import Actor._ @@ -162,8 +162,10 @@ sealed class Supervisor private[akka] ( _childActors.put(className, actorRef :: currentActors) actorRef.lifeCycle = Some(lifeCycle) supervisor.link(actorRef) - remoteAddress.foreach(address => RemoteServer.registerActor( - new InetSocketAddress(address.hostname, address.port), actorRef.uuid, actorRef)) + remoteAddress.foreach { address => + RemoteServerModule.registerActor( + new InetSocketAddress(address.hostname, address.port), actorRef.uuid, actorRef) + } case supervisorConfig @ SupervisorConfig(_, _) => // recursive supervisor configuration val childSupervisor = Supervisor(supervisorConfig) supervisor.link(childSupervisor.supervisor) diff --git a/akka-core/src/main/scala/actor/UntypedActor.scala b/akka-actors/src/main/scala/actor/UntypedActor.scala similarity index 100% rename from akka-core/src/main/scala/actor/UntypedActor.scala rename to akka-actors/src/main/scala/actor/UntypedActor.scala diff --git a/akka-core/src/main/scala/config/Config.scala b/akka-actors/src/main/scala/config/Config.scala similarity index 83% rename from akka-core/src/main/scala/config/Config.scala rename to akka-actors/src/main/scala/config/Config.scala index 04fe54848d..bc16ce59c8 100644 --- a/akka-core/src/main/scala/config/Config.scala +++ b/akka-actors/src/main/scala/config/Config.scala @@ -4,19 +4,27 @@ package se.scalablesolutions.akka.config -import se.scalablesolutions.akka.util.Logging import se.scalablesolutions.akka.AkkaException +import se.scalablesolutions.akka.util.Logging +import se.scalablesolutions.akka.actor.{ActorRef, IllegalActorStateException} +import se.scalablesolutions.akka.dispatch.CompletableFuture import net.lag.configgy.{Config => CConfig, Configgy, ParseException} +import java.net.InetSocketAddress +import java.lang.reflect.Method + class ConfigurationException(message: String) extends AkkaException(message) +class ModuleNotAvailableException(message: String) extends AkkaException(message) + +object ConfigLogger extends Logging /** * Loads up the configuration (from the akka.conf file). * * @author Jonas Bonér */ -object Config extends Logging { +object Config { val VERSION = "1.0-SNAPSHOT" // Set Multiverse options for max speed @@ -37,7 +45,7 @@ object Config extends Logging { val configFile = System.getProperty("akka.config", "") try { Configgy.configure(configFile) - log.info("Config loaded from -Dakka.config=%s", configFile) + ConfigLogger.log.info("Config loaded from -Dakka.config=%s", configFile) } catch { case e: ParseException => throw new ConfigurationException( "Config could not be loaded from -Dakka.config=" + configFile + @@ -47,7 +55,7 @@ object Config extends Logging { } else if (getClass.getClassLoader.getResource("akka.conf") != null) { try { Configgy.configureFromResource("akka.conf", getClass.getClassLoader) - log.info("Config loaded from the application classpath.") + ConfigLogger.log.info("Config loaded from the application classpath.") } catch { case e: ParseException => throw new ConfigurationException( "Can't load 'akka.conf' config file from application classpath," + @@ -58,7 +66,7 @@ object Config extends Logging { try { val configFile = HOME.get + "/config/akka.conf" Configgy.configure(configFile) - log.info("AKKA_HOME is defined as [%s], config loaded from [%s].", HOME.get, configFile) + ConfigLogger.log.info("AKKA_HOME is defined as [%s], config loaded from [%s].", HOME.get, configFile) } catch { case e: ParseException => throw new ConfigurationException( "AKKA_HOME is defined as [" + HOME.get + "] " + @@ -67,7 +75,7 @@ object Config extends Logging { } Configgy.config } else { - log.warning( + ConfigLogger.log.warning( "\nCan't load 'akka.conf'." + "\nOne of the three ways of locating the 'akka.conf' file needs to be defined:" + "\n\t1. Define the '-Dakka.config=...' system property option." + diff --git a/akka-core/src/main/scala/config/Configuration.scala b/akka-actors/src/main/scala/config/Configuration.scala similarity index 100% rename from akka-core/src/main/scala/config/Configuration.scala rename to akka-actors/src/main/scala/config/Configuration.scala diff --git a/akka-core/src/main/scala/config/Configurator.scala b/akka-actors/src/main/scala/config/Configurator.scala similarity index 100% rename from akka-core/src/main/scala/config/Configurator.scala rename to akka-actors/src/main/scala/config/Configurator.scala diff --git a/akka-core/src/main/scala/config/SupervisionConfig.scala b/akka-actors/src/main/scala/config/SupervisionConfig.scala similarity index 100% rename from akka-core/src/main/scala/config/SupervisionConfig.scala rename to akka-actors/src/main/scala/config/SupervisionConfig.scala diff --git a/akka-core/src/main/scala/dataflow/DataFlowVariable.scala b/akka-actors/src/main/scala/dataflow/DataFlowVariable.scala similarity index 100% rename from akka-core/src/main/scala/dataflow/DataFlowVariable.scala rename to akka-actors/src/main/scala/dataflow/DataFlowVariable.scala diff --git a/akka-core/src/main/scala/dispatch/AbstractReactorBasedEventDrivenDispatcher.scala b/akka-actors/src/main/scala/dispatch/AbstractReactorBasedEventDrivenDispatcher.scala similarity index 100% rename from akka-core/src/main/scala/dispatch/AbstractReactorBasedEventDrivenDispatcher.scala rename to akka-actors/src/main/scala/dispatch/AbstractReactorBasedEventDrivenDispatcher.scala diff --git a/akka-core/src/main/scala/dispatch/Dispatchers.scala b/akka-actors/src/main/scala/dispatch/Dispatchers.scala similarity index 100% rename from akka-core/src/main/scala/dispatch/Dispatchers.scala rename to akka-actors/src/main/scala/dispatch/Dispatchers.scala diff --git a/akka-core/src/main/scala/dispatch/ExecutorBasedEventDrivenDispatcher.scala b/akka-actors/src/main/scala/dispatch/ExecutorBasedEventDrivenDispatcher.scala similarity index 100% rename from akka-core/src/main/scala/dispatch/ExecutorBasedEventDrivenDispatcher.scala rename to akka-actors/src/main/scala/dispatch/ExecutorBasedEventDrivenDispatcher.scala diff --git a/akka-core/src/main/scala/dispatch/ExecutorBasedEventDrivenWorkStealingDispatcher.scala b/akka-actors/src/main/scala/dispatch/ExecutorBasedEventDrivenWorkStealingDispatcher.scala similarity index 100% rename from akka-core/src/main/scala/dispatch/ExecutorBasedEventDrivenWorkStealingDispatcher.scala rename to akka-actors/src/main/scala/dispatch/ExecutorBasedEventDrivenWorkStealingDispatcher.scala diff --git a/akka-core/src/main/scala/dispatch/Future.scala b/akka-actors/src/main/scala/dispatch/Future.scala similarity index 100% rename from akka-core/src/main/scala/dispatch/Future.scala rename to akka-actors/src/main/scala/dispatch/Future.scala diff --git a/akka-core/src/main/scala/dispatch/HawtDispatcher.scala b/akka-actors/src/main/scala/dispatch/HawtDispatcher.scala similarity index 100% rename from akka-core/src/main/scala/dispatch/HawtDispatcher.scala rename to akka-actors/src/main/scala/dispatch/HawtDispatcher.scala diff --git a/akka-core/src/main/scala/dispatch/MessageHandling.scala b/akka-actors/src/main/scala/dispatch/MessageHandling.scala similarity index 100% rename from akka-core/src/main/scala/dispatch/MessageHandling.scala rename to akka-actors/src/main/scala/dispatch/MessageHandling.scala diff --git a/akka-core/src/main/scala/dispatch/ReactorBasedSingleThreadEventDrivenDispatcher.scala b/akka-actors/src/main/scala/dispatch/ReactorBasedSingleThreadEventDrivenDispatcher.scala similarity index 100% rename from akka-core/src/main/scala/dispatch/ReactorBasedSingleThreadEventDrivenDispatcher.scala rename to akka-actors/src/main/scala/dispatch/ReactorBasedSingleThreadEventDrivenDispatcher.scala diff --git a/akka-core/src/main/scala/dispatch/ReactorBasedThreadPoolEventDrivenDispatcher.scala b/akka-actors/src/main/scala/dispatch/ReactorBasedThreadPoolEventDrivenDispatcher.scala similarity index 100% rename from akka-core/src/main/scala/dispatch/ReactorBasedThreadPoolEventDrivenDispatcher.scala rename to akka-actors/src/main/scala/dispatch/ReactorBasedThreadPoolEventDrivenDispatcher.scala diff --git a/akka-core/src/main/scala/dispatch/ThreadBasedDispatcher.scala b/akka-actors/src/main/scala/dispatch/ThreadBasedDispatcher.scala similarity index 100% rename from akka-core/src/main/scala/dispatch/ThreadBasedDispatcher.scala rename to akka-actors/src/main/scala/dispatch/ThreadBasedDispatcher.scala diff --git a/akka-core/src/main/scala/dispatch/ThreadPoolBuilder.scala b/akka-actors/src/main/scala/dispatch/ThreadPoolBuilder.scala similarity index 100% rename from akka-core/src/main/scala/dispatch/ThreadPoolBuilder.scala rename to akka-actors/src/main/scala/dispatch/ThreadPoolBuilder.scala diff --git a/akka-core/src/main/scala/routing/Iterators.scala b/akka-actors/src/main/scala/routing/Iterators.scala similarity index 100% rename from akka-core/src/main/scala/routing/Iterators.scala rename to akka-actors/src/main/scala/routing/Iterators.scala diff --git a/akka-core/src/main/scala/routing/Listeners.scala b/akka-actors/src/main/scala/routing/Listeners.scala similarity index 100% rename from akka-core/src/main/scala/routing/Listeners.scala rename to akka-actors/src/main/scala/routing/Listeners.scala diff --git a/akka-core/src/main/scala/routing/Routers.scala b/akka-actors/src/main/scala/routing/Routers.scala similarity index 100% rename from akka-core/src/main/scala/routing/Routers.scala rename to akka-actors/src/main/scala/routing/Routers.scala diff --git a/akka-core/src/main/scala/routing/Routing.scala b/akka-actors/src/main/scala/routing/Routing.scala similarity index 100% rename from akka-core/src/main/scala/routing/Routing.scala rename to akka-actors/src/main/scala/routing/Routing.scala diff --git a/akka-core/src/main/scala/stm/JTA.scala b/akka-actors/src/main/scala/stm/JTA.scala similarity index 98% rename from akka-core/src/main/scala/stm/JTA.scala rename to akka-actors/src/main/scala/stm/JTA.scala index 80a0cda4ec..485f3e5104 100644 --- a/akka-core/src/main/scala/stm/JTA.scala +++ b/akka-actors/src/main/scala/stm/JTA.scala @@ -106,7 +106,8 @@ object TransactionContainer extends Logging { * * @author Jonas Bonér */ -class TransactionContainer private (val tm: Either[Option[UserTransaction], Option[TransactionManager]]) { +class TransactionContainer private ( + val tm: Either[Option[UserTransaction], Option[TransactionManager]]) extends Logging { def registerSynchronization(sync: Synchronization) = { TransactionContainer.findSynchronizationRegistry match { // try to use SynchronizationRegistry in JNDI diff --git a/akka-core/src/main/scala/stm/Ref.scala b/akka-actors/src/main/scala/stm/Ref.scala similarity index 100% rename from akka-core/src/main/scala/stm/Ref.scala rename to akka-actors/src/main/scala/stm/Ref.scala diff --git a/akka-core/src/main/scala/stm/Transaction.scala b/akka-actors/src/main/scala/stm/Transaction.scala similarity index 100% rename from akka-core/src/main/scala/stm/Transaction.scala rename to akka-actors/src/main/scala/stm/Transaction.scala diff --git a/akka-core/src/main/scala/stm/TransactionFactory.scala b/akka-actors/src/main/scala/stm/TransactionFactory.scala similarity index 100% rename from akka-core/src/main/scala/stm/TransactionFactory.scala rename to akka-actors/src/main/scala/stm/TransactionFactory.scala diff --git a/akka-core/src/main/scala/stm/TransactionFactoryBuilder.scala b/akka-actors/src/main/scala/stm/TransactionFactoryBuilder.scala similarity index 100% rename from akka-core/src/main/scala/stm/TransactionFactoryBuilder.scala rename to akka-actors/src/main/scala/stm/TransactionFactoryBuilder.scala diff --git a/akka-core/src/main/scala/stm/TransactionManagement.scala b/akka-actors/src/main/scala/stm/TransactionManagement.scala similarity index 100% rename from akka-core/src/main/scala/stm/TransactionManagement.scala rename to akka-actors/src/main/scala/stm/TransactionManagement.scala diff --git a/akka-core/src/main/scala/stm/TransactionalMap.scala b/akka-actors/src/main/scala/stm/TransactionalMap.scala similarity index 100% rename from akka-core/src/main/scala/stm/TransactionalMap.scala rename to akka-actors/src/main/scala/stm/TransactionalMap.scala diff --git a/akka-core/src/main/scala/stm/TransactionalVector.scala b/akka-actors/src/main/scala/stm/TransactionalVector.scala similarity index 100% rename from akka-core/src/main/scala/stm/TransactionalVector.scala rename to akka-actors/src/main/scala/stm/TransactionalVector.scala diff --git a/akka-core/src/main/scala/stm/global/Atomic.scala b/akka-actors/src/main/scala/stm/global/Atomic.scala similarity index 100% rename from akka-core/src/main/scala/stm/global/Atomic.scala rename to akka-actors/src/main/scala/stm/global/Atomic.scala diff --git a/akka-core/src/main/scala/stm/global/GlobalStm.scala b/akka-actors/src/main/scala/stm/global/GlobalStm.scala similarity index 78% rename from akka-core/src/main/scala/stm/global/GlobalStm.scala rename to akka-actors/src/main/scala/stm/global/GlobalStm.scala index 1fd53ffe51..76de9d5f57 100644 --- a/akka-core/src/main/scala/stm/global/GlobalStm.scala +++ b/akka-actors/src/main/scala/stm/global/GlobalStm.scala @@ -9,6 +9,8 @@ import se.scalablesolutions.akka.util.Logging import org.multiverse.api.{Transaction => MultiverseTransaction} import org.multiverse.templates.TransactionalCallable +object GlobalStm extends Logging + /** * Global transaction management, global in the context of multiple threads. * Use this if you need to have one transaction span multiple threads (or Actors). @@ -23,12 +25,14 @@ import org.multiverse.templates.TransactionalCallable * } * */ -class GlobalStm extends TransactionManagement with Logging { +class GlobalStm extends TransactionManagement { val DefaultGlobalTransactionConfig = TransactionConfig() - val DefaultGlobalTransactionFactory = TransactionFactory(DefaultGlobalTransactionConfig, "DefaultGlobalTransaction") + val DefaultGlobalTransactionFactory = TransactionFactory( + DefaultGlobalTransactionConfig, "DefaultGlobalTransaction") - def atomic[T](body: => T)(implicit factory: TransactionFactory = DefaultGlobalTransactionFactory): T = atomic(factory)(body) + def atomic[T](body: => T)(implicit factory: TransactionFactory = DefaultGlobalTransactionFactory): T = + atomic(factory)(body) def atomic[T](factory: TransactionFactory)(body: => T): T = { factory.boilerplate.execute(new TransactionalCallable[T]() { @@ -37,7 +41,8 @@ class GlobalStm extends TransactionManagement with Logging { factory.addHooks val result = body val txSet = getTransactionSetInScope - log.trace("Committing global transaction [" + mtx + "]\n\tand joining transaction set [" + txSet + "]") + GlobalStm.log.trace( + "Committing global transaction [" + mtx + "]\n\tand joining transaction set [" + txSet + "]") try { txSet.tryJoinCommit( mtx, diff --git a/akka-core/src/main/scala/stm/global/package.scala b/akka-actors/src/main/scala/stm/global/package.scala similarity index 100% rename from akka-core/src/main/scala/stm/global/package.scala rename to akka-actors/src/main/scala/stm/global/package.scala diff --git a/akka-core/src/main/scala/stm/local/Atomic.scala b/akka-actors/src/main/scala/stm/local/Atomic.scala similarity index 100% rename from akka-core/src/main/scala/stm/local/Atomic.scala rename to akka-actors/src/main/scala/stm/local/Atomic.scala diff --git a/akka-core/src/main/scala/stm/local/LocalStm.scala b/akka-actors/src/main/scala/stm/local/LocalStm.scala similarity index 79% rename from akka-core/src/main/scala/stm/local/LocalStm.scala rename to akka-actors/src/main/scala/stm/local/LocalStm.scala index 477027aab2..c24097f9e5 100644 --- a/akka-core/src/main/scala/stm/local/LocalStm.scala +++ b/akka-actors/src/main/scala/stm/local/LocalStm.scala @@ -9,6 +9,8 @@ import se.scalablesolutions.akka.util.Logging import org.multiverse.api.{Transaction => MultiverseTransaction} import org.multiverse.templates.TransactionalCallable +object LocalStm extends Logging + /** * Local transaction management, local in the context of threads. * Use this if you do not need to have one transaction span @@ -27,16 +29,18 @@ import org.multiverse.templates.TransactionalCallable class LocalStm extends TransactionManagement with Logging { val DefaultLocalTransactionConfig = TransactionConfig() - val DefaultLocalTransactionFactory = TransactionFactory(DefaultLocalTransactionConfig, "DefaultLocalTransaction") + val DefaultLocalTransactionFactory = TransactionFactory( + DefaultLocalTransactionConfig, "DefaultLocalTransaction") - def atomic[T](body: => T)(implicit factory: TransactionFactory = DefaultLocalTransactionFactory): T = atomic(factory)(body) + def atomic[T](body: => T)(implicit factory: TransactionFactory = DefaultLocalTransactionFactory): T = + atomic(factory)(body) def atomic[T](factory: TransactionFactory)(body: => T): T = { factory.boilerplate.execute(new TransactionalCallable[T]() { def call(mtx: MultiverseTransaction): T = { factory.addHooks val result = body - log.trace("Committing local transaction [" + mtx + "]") + LocalStm.log.trace("Committing local transaction [" + mtx + "]") result } }) diff --git a/akka-core/src/main/scala/stm/local/package.scala b/akka-actors/src/main/scala/stm/local/package.scala similarity index 100% rename from akka-core/src/main/scala/stm/local/package.scala rename to akka-actors/src/main/scala/stm/local/package.scala diff --git a/akka-core/src/main/scala/stm/transactional.scala b/akka-actors/src/main/scala/stm/transactional.scala similarity index 100% rename from akka-core/src/main/scala/stm/transactional.scala rename to akka-actors/src/main/scala/stm/transactional.scala diff --git a/akka-core/src/main/scala/util/AkkaException.scala b/akka-actors/src/main/scala/util/AkkaException.scala similarity index 100% rename from akka-core/src/main/scala/util/AkkaException.scala rename to akka-actors/src/main/scala/util/AkkaException.scala diff --git a/akka-core/src/main/scala/util/Bootable.scala b/akka-actors/src/main/scala/util/Bootable.scala similarity index 100% rename from akka-core/src/main/scala/util/Bootable.scala rename to akka-actors/src/main/scala/util/Bootable.scala diff --git a/akka-core/src/main/scala/util/Duration.scala b/akka-actors/src/main/scala/util/Duration.scala similarity index 100% rename from akka-core/src/main/scala/util/Duration.scala rename to akka-actors/src/main/scala/util/Duration.scala diff --git a/akka-core/src/main/scala/util/HashCode.scala b/akka-actors/src/main/scala/util/HashCode.scala similarity index 100% rename from akka-core/src/main/scala/util/HashCode.scala rename to akka-actors/src/main/scala/util/HashCode.scala diff --git a/akka-core/src/main/scala/util/Helpers.scala b/akka-actors/src/main/scala/util/Helpers.scala similarity index 100% rename from akka-core/src/main/scala/util/Helpers.scala rename to akka-actors/src/main/scala/util/Helpers.scala diff --git a/akka-core/src/main/scala/util/ListenerManagement.scala b/akka-actors/src/main/scala/util/ListenerManagement.scala similarity index 100% rename from akka-core/src/main/scala/util/ListenerManagement.scala rename to akka-actors/src/main/scala/util/ListenerManagement.scala diff --git a/akka-core/src/main/scala/util/LockUtil.scala b/akka-actors/src/main/scala/util/LockUtil.scala similarity index 100% rename from akka-core/src/main/scala/util/LockUtil.scala rename to akka-actors/src/main/scala/util/LockUtil.scala diff --git a/akka-core/src/main/scala/util/Logging.scala b/akka-actors/src/main/scala/util/Logging.scala similarity index 100% rename from akka-core/src/main/scala/util/Logging.scala rename to akka-actors/src/main/scala/util/Logging.scala diff --git a/akka-core/src/main/scala/util/Uuid.scala b/akka-actors/src/main/scala/util/Uuid.scala similarity index 100% rename from akka-core/src/main/scala/util/Uuid.scala rename to akka-actors/src/main/scala/util/Uuid.scala diff --git a/akka-actors/src/test/resources/logback-test.xml b/akka-actors/src/test/resources/logback-test.xml new file mode 100644 index 0000000000..78eae40ec4 --- /dev/null +++ b/akka-actors/src/test/resources/logback-test.xml @@ -0,0 +1,21 @@ + + + + + + + + + + + + + + [%4p] [%d{ISO8601}] [%t] %c{1}: %m%n + + + + + + + diff --git a/akka-actors/src/test/scala/Messages.scala b/akka-actors/src/test/scala/Messages.scala new file mode 100644 index 0000000000..ad1fcf8885 --- /dev/null +++ b/akka-actors/src/test/scala/Messages.scala @@ -0,0 +1,44 @@ +/** + * Copyright (C) 2009-2010 Scalable Solutions AB + */ + +package se.scalablesolutions.akka + +import se.scalablesolutions.akka.serialization.Serializable +import sbinary._ +import sbinary.Operations._ + +sealed abstract class TestMessage + +case object Ping extends TestMessage +case object Pong extends TestMessage +case object OneWay extends TestMessage +case object Die extends TestMessage +case object NotifySupervisorExit extends TestMessage + +case class User(val usernamePassword: Tuple2[String, String], + val email: String, + val age: Int) + extends Serializable.SBinary[User] { + def this() = this(null, null, 0) + import sbinary.DefaultProtocol._ + implicit object UserFormat extends Format[User] { + def reads(in : Input) = User( + read[Tuple2[String, String]](in), + read[String](in), + read[Int](in)) + def writes(out: Output, value: User) = { + write[Tuple2[String, String]](out, value.usernamePassword) + write[String](out, value.email) + write[Int](out, value.age) + } + } + def fromBytes(bytes: Array[Byte]) = fromByteArray[User](bytes) + def toBytes: Array[Byte] = toByteArray(this) +} + +case object RemotePing extends TestMessage +case object RemotePong extends TestMessage +case object RemoteOneWay extends TestMessage +case object RemoteDie extends TestMessage +case object RemoteNotifySupervisorExit extends TestMessage diff --git a/akka-actors/src/test/scala/actor/actor/ActorFireForgetRequestReplySpec.scala b/akka-actors/src/test/scala/actor/actor/ActorFireForgetRequestReplySpec.scala new file mode 100644 index 0000000000..9d3ce765ec --- /dev/null +++ b/akka-actors/src/test/scala/actor/actor/ActorFireForgetRequestReplySpec.scala @@ -0,0 +1,92 @@ +package se.scalablesolutions.akka.actor + +import java.util.concurrent.{TimeUnit, CyclicBarrier, TimeoutException} +import se.scalablesolutions.akka.config.ScalaConfig._ +import org.scalatest.junit.JUnitSuite +import org.junit.Test + +import se.scalablesolutions.akka.dispatch.Dispatchers +import Actor._ + +object ActorFireForgetRequestReplySpec { + class ReplyActor extends Actor { + self.dispatcher = Dispatchers.newThreadBasedDispatcher(self) + + def receive = { + case "Send" => + self.reply("Reply") + case "SendImplicit" => + self.sender.get ! "ReplyImplicit" + } + } + + class CrashingTemporaryActor extends Actor { + self.lifeCycle = Some(LifeCycle(Temporary)) + + def receive = { + case "Die" => + state.finished.await + throw new Exception("Expected exception") + } + } + + class SenderActor(replyActor: ActorRef) extends Actor { + self.dispatcher = Dispatchers.newThreadBasedDispatcher(self) + + def receive = { + case "Init" => replyActor ! "Send" + case "Reply" => { + state.s = "Reply" + state.finished.await + } + case "InitImplicit" => replyActor ! "SendImplicit" + case "ReplyImplicit" => { + state.s = "ReplyImplicit" + state.finished.await + } + } + } + + object state { + var s = "NIL" + val finished = new CyclicBarrier(2) + } +} + +class ActorFireForgetRequestReplySpec extends JUnitSuite { + import ActorFireForgetRequestReplySpec._ + + @Test + def shouldReplyToBangMessageUsingReply = { + state.finished.reset + val replyActor = actorOf[ReplyActor].start + val senderActor = actorOf(new SenderActor(replyActor)).start + senderActor ! "Init" + try { state.finished.await(1L, TimeUnit.SECONDS) } + catch { case e: TimeoutException => fail("Never got the message") } + assert("Reply" === state.s) + } + + @Test + def shouldReplyToBangMessageUsingImplicitSender = { + state.finished.reset + val replyActor = actorOf[ReplyActor].start + val senderActor = actorOf(new SenderActor(replyActor)).start + senderActor ! "InitImplicit" + try { state.finished.await(1L, TimeUnit.SECONDS) } + catch { case e: TimeoutException => fail("Never got the message") } + assert("ReplyImplicit" === state.s) + } + + @Test + def shouldShutdownCrashedTemporaryActor = { + state.finished.reset + val actor = actorOf[CrashingTemporaryActor].start + assert(actor.isRunning) + actor ! "Die" + try { state.finished.await(1L, TimeUnit.SECONDS) } + catch { case e: TimeoutException => fail("Never got the message") } + Thread.sleep(100) + assert(actor.isShutdown) + } +} diff --git a/akka-actors/src/test/scala/actor/actor/AgentSpec.scala b/akka-actors/src/test/scala/actor/actor/AgentSpec.scala new file mode 100644 index 0000000000..71911c3ad8 --- /dev/null +++ b/akka-actors/src/test/scala/actor/actor/AgentSpec.scala @@ -0,0 +1,111 @@ +package se.scalablesolutions.akka.actor + +import se.scalablesolutions.akka.actor.Actor.transactor +import org.scalatest.Suite +import org.scalatest.junit.JUnitRunner +import org.scalatest.matchers.MustMatchers + +import org.junit.runner.RunWith +import org.junit.Test + +import java.util.concurrent.{TimeUnit, CountDownLatch} + +@RunWith(classOf[JUnitRunner]) +class AgentSpec extends junit.framework.TestCase with Suite with MustMatchers { + + @Test def testSendFun = { + val agent = Agent(5) + agent send (_ + 1) + agent send (_ * 2) + val result = agent() + result must be(12) + agent.close + } + + @Test def testSendValue = { + val agent = Agent(5) + agent send 6 + val result = agent() + result must be(6) + agent.close + } + + @Test def testSendProc = { + val agent = Agent(5) + var result = 0 + val latch = new CountDownLatch(2) + agent sendProc { e => result += e; latch.countDown } + agent sendProc { e => result += e; latch.countDown } + assert(latch.await(5, TimeUnit.SECONDS)) + result must be(10) + agent.close + } + + @Test def testOneAgentsendWithinEnlosingTransactionSuccess = { + case object Go + val agent = Agent(5) + val latch = new CountDownLatch(1) + val tx = transactor { + case Go => agent send { e => latch.countDown; e + 1 } + } + tx ! Go + assert(latch.await(5, TimeUnit.SECONDS)) + val result = agent() + result must be(6) + agent.close + tx.stop + } + +/* + // Strange test - do we really need it? + @Test def testDoingAgentGetInEnlosingTransactionShouldYieldException = { + case object Go + val latch = new CountDownLatch(1) + val agent = Agent(5) + val tx = transactor { + case Go => + agent send (_ * 2) + try { agent() } + catch { + case _ => latch.countDown + } + } + tx ! Go + assert(latch.await(5, TimeUnit.SECONDS)) + agent.close + tx.stop + assert(true) + } +*/ + + @Test def testAgentForeach = { + val agent1 = Agent(3) + var result = 0 + for (first <- agent1) { + result = first + 1 + } + result must be(4) + agent1.close + } + + @Test def testAgentMap = { + val agent1 = Agent(3) + val result = for (first <- agent1) yield first + 1 + result() must be(4) + result.close + agent1.close + } + + @Test def testAgentFlatMap = { + val agent1 = Agent(3) + val agent2 = Agent(5) + val result = for { + first <- agent1 + second <- agent2 + } yield second + first + result() must be(8) + result.close + agent1.close + agent2.close + } +} diff --git a/akka-actors/src/test/scala/actor/actor/Bench.scala b/akka-actors/src/test/scala/actor/actor/Bench.scala new file mode 100644 index 0000000000..8e3a44f3a0 --- /dev/null +++ b/akka-actors/src/test/scala/actor/actor/Bench.scala @@ -0,0 +1,119 @@ +/* The Computer Language Benchmarks Game + http://shootout.alioth.debian.org/ + contributed by Julien Gaugaz + inspired by the version contributed by Yura Taras and modified by Isaac Gouy +*/ +package se.scalablesolutions.akka.actor + +import se.scalablesolutions.akka.actor.Actor._ + +object Chameneos { + + sealed trait ChameneosEvent + case class Meet(from: ActorRef, colour: Colour) extends ChameneosEvent + case class Change(colour: Colour) extends ChameneosEvent + case class MeetingCount(count: Int) extends ChameneosEvent + case object Exit extends ChameneosEvent + + abstract class Colour + case object RED extends Colour + case object YELLOW extends Colour + case object BLUE extends Colour + case object FADED extends Colour + + val colours = Array[Colour](BLUE, RED, YELLOW) + + var start = 0L + var end = 0L + + class Chameneo(var mall: ActorRef, var colour: Colour, cid: Int) extends Actor { + var meetings = 0 + self.start + mall ! Meet(self, colour) + + def receive = { + case Meet(from, otherColour) => + colour = complement(otherColour) + meetings = meetings +1 + from ! Change(colour) + mall ! Meet(self, colour) + + case Change(newColour) => + colour = newColour + meetings = meetings +1 + mall ! Meet(self, colour) + + case Exit => + colour = FADED + self.sender.get ! MeetingCount(meetings) + } + + def complement(otherColour: Colour): Colour = colour match { + case RED => otherColour match { + case RED => RED + case YELLOW => BLUE + case BLUE => YELLOW + case FADED => FADED + } + case YELLOW => otherColour match { + case RED => BLUE + case YELLOW => YELLOW + case BLUE => RED + case FADED => FADED + } + case BLUE => otherColour match { + case RED => YELLOW + case YELLOW => RED + case BLUE => BLUE + case FADED => FADED + } + case FADED => FADED + } + + override def toString = cid + "(" + colour + ")" + } + + class Mall(var n: Int, numChameneos: Int) extends Actor { + var waitingChameneo: Option[ActorRef] = None + var sumMeetings = 0 + var numFaded = 0 + + override def init = { + for (i <- 0 until numChameneos) actorOf(new Chameneo(self, colours(i % 3), i)) + } + + def receive = { + case MeetingCount(i) => + numFaded += 1 + sumMeetings += i + if (numFaded == numChameneos) { + Chameneos.end = System.currentTimeMillis + self.stop + } + + case msg @ Meet(a, c) => + if (n > 0) { + waitingChameneo match { + case Some(chameneo) => + n -= 1 + chameneo ! msg + waitingChameneo = None + case None => waitingChameneo = self.sender + } + } else { + waitingChameneo.foreach(_ ! Exit) + self.sender.get ! Exit + } + } + } + + def run { +// System.setProperty("akka.config", "akka.conf") + Chameneos.start = System.currentTimeMillis + actorOf(new Mall(1000000, 4)).start + Thread.sleep(10000) + println("Elapsed: " + (end - start)) + } + + def main(args : Array[String]): Unit = run +} diff --git a/akka-actors/src/test/scala/actor/actor/FSMActorSpec.scala b/akka-actors/src/test/scala/actor/actor/FSMActorSpec.scala new file mode 100644 index 0000000000..e4515bd3da --- /dev/null +++ b/akka-actors/src/test/scala/actor/actor/FSMActorSpec.scala @@ -0,0 +1,82 @@ +/** + * Copyright (C) 2009-2010 Scalable Solutions AB + */ + +package se.scalablesolutions.akka.actor + +import org.scalatest.junit.JUnitSuite +import org.junit.Test + +import org.multiverse.api.latches.StandardLatch + +import java.util.concurrent.TimeUnit + +object FSMActorSpec { + + class Lock(code: String, + timeout: Int, + unlockedLatch: StandardLatch, + lockedLatch: StandardLatch) extends Actor with FSM[CodeState] { + + def initialState = State(NextState, locked, CodeState("", code)) + + def locked: StateFunction = { + case Event(digit: Char, CodeState(soFar, code)) => { + soFar + digit match { + case incomplete if incomplete.length < code.length => + State(NextState, locked, CodeState(incomplete, code)) + case codeTry if (codeTry == code) => { + doUnlock + State(NextState, open, CodeState("", code), Some(timeout)) + } + case wrong => { + log.error("Wrong code %s", wrong) + State(NextState, locked, CodeState("", code)) + } + } + } + } + + def open: StateFunction = { + case Event(StateTimeout, stateData) => { + doLock + State(NextState, locked, stateData) + } + } + + private def doLock() { + log.info("Locked") + lockedLatch.open + } + + private def doUnlock = { + log.info("Unlocked") + unlockedLatch.open + } + } + + case class CodeState(soFar: String, code: String) +} + +class FSMActorSpec extends JUnitSuite { + import FSMActorSpec._ + + @Test + def unlockTheLock = { + val unlockedLatch = new StandardLatch + val lockedLatch = new StandardLatch + + // lock that locked after being open for 1 sec + val lock = Actor.actorOf(new Lock("33221", 1000, unlockedLatch, lockedLatch)).start + + lock ! '3' + lock ! '3' + lock ! '2' + lock ! '2' + lock ! '1' + + assert(unlockedLatch.tryAwait(1, TimeUnit.SECONDS)) + assert(lockedLatch.tryAwait(2, TimeUnit.SECONDS)) + } +} + diff --git a/akka-actors/src/test/scala/actor/actor/ForwardActorSpec.scala b/akka-actors/src/test/scala/actor/actor/ForwardActorSpec.scala new file mode 100644 index 0000000000..e3ab0bded7 --- /dev/null +++ b/akka-actors/src/test/scala/actor/actor/ForwardActorSpec.scala @@ -0,0 +1,81 @@ +package se.scalablesolutions.akka.actor + +import java.util.concurrent.{TimeUnit, CountDownLatch} +import org.scalatest.junit.JUnitSuite +import org.junit.Test + +import Actor._ + +object ForwardActorSpec { + object ForwardState { + var sender: Option[ActorRef] = None + } + + class ReceiverActor extends Actor { + val latch = new CountDownLatch(1) + def receive = { + case "SendBang" => { + ForwardState.sender = self.sender + latch.countDown + } + case "SendBangBang" => self.reply("SendBangBang") + } + } + + + class ForwardActor extends Actor { + val receiverActor = actorOf[ReceiverActor] + receiverActor.start + def receive = { + case "SendBang" => receiverActor.forward("SendBang") + case "SendBangBang" => receiverActor.forward("SendBangBang") + } + } + + class BangSenderActor extends Actor { + val forwardActor = actorOf[ForwardActor] + forwardActor.start + forwardActor ! "SendBang" + def receive = { + case _ => {} + } + } + + class BangBangSenderActor extends Actor { + val latch = new CountDownLatch(1) + val forwardActor = actorOf[ForwardActor] + forwardActor.start + (forwardActor !! "SendBangBang") match { + case Some(_) => latch.countDown + case None => {} + } + def receive = { + case _ => {} + } + } +} + +class ForwardActorSpec extends JUnitSuite { + import ForwardActorSpec._ + + @Test + def shouldForwardActorReferenceWhenInvokingForwardOnBang { + val senderActor = actorOf[BangSenderActor] + val latch = senderActor.actor.asInstanceOf[BangSenderActor] + .forwardActor.actor.asInstanceOf[ForwardActor] + .receiverActor.actor.asInstanceOf[ReceiverActor] + .latch + senderActor.start + assert(latch.await(1L, TimeUnit.SECONDS)) + assert(ForwardState.sender ne null) + assert(senderActor.toString === ForwardState.sender.get.toString) + } + + @Test + def shouldForwardActorReferenceWhenInvokingForwardOnBangBang { + val senderActor = actorOf[BangBangSenderActor] + senderActor.start + val latch = senderActor.actor.asInstanceOf[BangBangSenderActor].latch + assert(latch.await(1L, TimeUnit.SECONDS)) + } +} diff --git a/akka-actors/src/test/scala/actor/actor/ReceiveTimeoutSpec.scala b/akka-actors/src/test/scala/actor/actor/ReceiveTimeoutSpec.scala new file mode 100644 index 0000000000..ff43467efc --- /dev/null +++ b/akka-actors/src/test/scala/actor/actor/ReceiveTimeoutSpec.scala @@ -0,0 +1,77 @@ +package se.scalablesolutions.akka.actor + +import org.scalatest.junit.JUnitSuite +import org.junit.Test + +import java.util.concurrent.TimeUnit +import org.multiverse.api.latches.StandardLatch +import Actor._ + +class ReceiveTimeoutSpec extends JUnitSuite { + + @Test def receiveShouldGetTimeout= { + + val timeoutLatch = new StandardLatch + + val timeoutActor = actorOf(new Actor { + self.receiveTimeout = Some(500L) + + protected def receive = { + case ReceiveTimeout => timeoutLatch.open + } + }).start + + assert(timeoutLatch.tryAwait(3, TimeUnit.SECONDS)) + } + + @Test def swappedReceiveShouldAlsoGetTimout = { + val timeoutLatch = new StandardLatch + + val timeoutActor = actorOf(new Actor { + self.receiveTimeout = Some(500L) + + protected def receive = { + case ReceiveTimeout => timeoutLatch.open + } + }).start + + // after max 1 second the timeout should already been sent + assert(timeoutLatch.tryAwait(3, TimeUnit.SECONDS)) + + val swappedLatch = new StandardLatch + timeoutActor ! HotSwap(Some{ + case ReceiveTimeout => swappedLatch.open + }) + + assert(swappedLatch.tryAwait(3, TimeUnit.SECONDS)) + } + + @Test def timeoutShouldBeCancelledAfterRegularReceive = { + + val timeoutLatch = new StandardLatch + case object Tick + val timeoutActor = actorOf(new Actor { + self.receiveTimeout = Some(500L) + + protected def receive = { + case Tick => () + case ReceiveTimeout => timeoutLatch.open + } + }).start + timeoutActor ! Tick + + assert(timeoutLatch.tryAwait(2, TimeUnit.SECONDS) == false) + } + + @Test def timeoutShouldNotBeSentWhenNotSpecified = { + val timeoutLatch = new StandardLatch + val timeoutActor = actorOf(new Actor { + + protected def receive = { + case ReceiveTimeout => timeoutLatch.open + } + }).start + + assert(timeoutLatch.tryAwait(1, TimeUnit.SECONDS) == false) + } +} diff --git a/akka-actors/src/test/scala/actor/actor/TransactorSpec.scala b/akka-actors/src/test/scala/actor/actor/TransactorSpec.scala new file mode 100644 index 0000000000..dd23a76a88 --- /dev/null +++ b/akka-actors/src/test/scala/actor/actor/TransactorSpec.scala @@ -0,0 +1,255 @@ +package se.scalablesolutions.akka.actor + +import java.util.concurrent.{TimeUnit, CountDownLatch} +import org.scalatest.junit.JUnitSuite +import org.junit.Test + +import se.scalablesolutions.akka.stm.{Ref, TransactionalMap, TransactionalVector} +import Actor._ + +object TransactorSpec { + case class GetMapState(key: String) + case object GetVectorState + case object GetVectorSize + case object GetRefState + + case class SetMapState(key: String, value: String) + case class SetVectorState(key: String) + case class SetRefState(key: String) + case class Success(key: String, value: String) + case class Failure(key: String, value: String, failer: ActorRef) + + case class SetMapStateOneWay(key: String, value: String) + case class SetVectorStateOneWay(key: String) + case class SetRefStateOneWay(key: String) + case class SuccessOneWay(key: String, value: String) + case class FailureOneWay(key: String, value: String, failer: ActorRef) + + case object GetNotifier +} +import TransactorSpec._ + +class StatefulTransactor(expectedInvocationCount: Int) extends Transactor { + def this() = this(0) + self.timeout = 5000 + + val notifier = new CountDownLatch(expectedInvocationCount) + + private val mapState = TransactionalMap[String, String]() + private val vectorState = TransactionalVector[String]() + private val refState = Ref[String]() + + def receive = { + case GetNotifier => + self.reply(notifier) + case GetMapState(key) => + self.reply(mapState.get(key).get) + notifier.countDown + case GetVectorSize => + self.reply(vectorState.length.asInstanceOf[AnyRef]) + notifier.countDown + case GetRefState => + self.reply(refState.get) + notifier.countDown + case SetMapState(key, msg) => + mapState.put(key, msg) + self.reply(msg) + notifier.countDown + case SetVectorState(msg) => + vectorState.add(msg) + self.reply(msg) + notifier.countDown + case SetRefState(msg) => + refState.swap(msg) + self.reply(msg) + notifier.countDown + case Success(key, msg) => + mapState.put(key, msg) + vectorState.add(msg) + refState.swap(msg) + self.reply(msg) + notifier.countDown + case Failure(key, msg, failer) => + mapState.put(key, msg) + vectorState.add(msg) + refState.swap(msg) + failer !! "Failure" + self.reply(msg) + notifier.countDown + case SetMapStateOneWay(key, msg) => + mapState.put(key, msg) + notifier.countDown + case SetVectorStateOneWay(msg) => + vectorState.add(msg) + notifier.countDown + case SetRefStateOneWay(msg) => + refState.swap(msg) + notifier.countDown + case SuccessOneWay(key, msg) => + mapState.put(key, msg) + vectorState.add(msg) + refState.swap(msg) + notifier.countDown + case FailureOneWay(key, msg, failer) => + mapState.put(key, msg) + vectorState.add(msg) + refState.swap(msg) + notifier.countDown + failer ! "Failure" + } +} + +@serializable +class FailerTransactor extends Transactor { + + def receive = { + case "Failure" => + throw new RuntimeException("Expected exception; to test fault-tolerance") + } +} + +class TransactorSpec extends JUnitSuite { + + @Test + def shouldOneWayMapShouldNotRollbackStateForStatefulServerInCaseOfSuccess = { + val stateful = actorOf(new StatefulTransactor(2)) + stateful.start + stateful ! SetMapStateOneWay("testShouldNotRollbackStateForStatefulServerInCaseOfSuccess", "init") // set init state + stateful ! SuccessOneWay("testShouldNotRollbackStateForStatefulServerInCaseOfSuccess", "new state") // transactionrequired + val notifier = (stateful !! GetNotifier).as[CountDownLatch] + assert(notifier.get.await(1, TimeUnit.SECONDS)) + assert("new state" === (stateful !! GetMapState("testShouldNotRollbackStateForStatefulServerInCaseOfSuccess")).get) + } + + @Test + def shouldMapShouldNotRollbackStateForStatefulServerInCaseOfSuccess = { + val stateful = actorOf[StatefulTransactor] + stateful.start + stateful !! SetMapState("testShouldNotRollbackStateForStatefulServerInCaseOfSuccess", "init") // set init state + stateful !! Success("testShouldNotRollbackStateForStatefulServerInCaseOfSuccess", "new state") // transactionrequired + assert("new state" === (stateful !! GetMapState("testShouldNotRollbackStateForStatefulServerInCaseOfSuccess")).get) + } + + @Test + def shouldOneWayMapShouldRollbackStateForStatefulServerInCaseOfFailure = { + val stateful = actorOf(new StatefulTransactor(2)) + stateful.start + val failer = actorOf[FailerTransactor] + failer.start + stateful ! SetMapStateOneWay("testShouldRollbackStateForStatefulServerInCaseOfFailure", "init") // set init state + stateful ! FailureOneWay("testShouldRollbackStateForStatefulServerInCaseOfFailure", "new state", failer) // call failing transactionrequired method + val notifier = (stateful !! GetNotifier).as[CountDownLatch] + assert(notifier.get.await(5, TimeUnit.SECONDS)) + assert("init" === (stateful !! GetMapState("testShouldRollbackStateForStatefulServerInCaseOfFailure")).get) // check that state is == init state + } + + @Test + def shouldMapShouldRollbackStateForStatefulServerInCaseOfFailure = { + val stateful = actorOf[StatefulTransactor] + stateful.start + stateful !! SetMapState("testShouldRollbackStateForStatefulServerInCaseOfFailure", "init") // set init state + val failer = actorOf[FailerTransactor] + failer.start + try { + stateful !! Failure("testShouldRollbackStateForStatefulServerInCaseOfFailure", "new state", failer) // call failing transactionrequired method + fail("should have thrown an exception") + } catch {case e: RuntimeException => {}} + assert("init" === (stateful !! GetMapState("testShouldRollbackStateForStatefulServerInCaseOfFailure")).get) // check that state is == init state + } + + @Test + def shouldOneWayVectorShouldNotRollbackStateForStatefulServerInCaseOfSuccess = { + val stateful = actorOf(new StatefulTransactor(2)) + stateful.start + stateful ! SetVectorStateOneWay("init") // set init state + stateful ! SuccessOneWay("testShouldNotRollbackStateForStatefulServerInCaseOfSuccess", "new state") // transactionrequired + val notifier = (stateful !! GetNotifier).as[CountDownLatch] + assert(notifier.get.await(1, TimeUnit.SECONDS)) + assert(2 === (stateful !! GetVectorSize).get) + } + + @Test + def shouldVectorShouldNotRollbackStateForStatefulServerInCaseOfSuccess = { + val stateful = actorOf[StatefulTransactor] + stateful.start + stateful !! SetVectorState("init") // set init state + stateful !! Success("testShouldNotRollbackStateForStatefulServerInCaseOfSuccess", "new state") // transactionrequired + assert(2 === (stateful !! GetVectorSize).get) + } + + @Test + def shouldOneWayVectorShouldRollbackStateForStatefulServerInCaseOfFailure = { + val stateful = actorOf(new StatefulTransactor(2)) + stateful.start + stateful ! SetVectorStateOneWay("init") // set init state + Thread.sleep(1000) + val failer = actorOf[FailerTransactor] + failer.start + stateful ! FailureOneWay("testShouldRollbackStateForStatefulServerInCaseOfFailure", "new state", failer) // call failing transactionrequired method + val notifier = (stateful !! GetNotifier).as[CountDownLatch] + assert(notifier.get.await(1, TimeUnit.SECONDS)) + assert(1 === (stateful !! GetVectorSize).get) + } + + @Test + def shouldVectorShouldRollbackStateForStatefulServerInCaseOfFailure = { + val stateful = actorOf[StatefulTransactor] + stateful.start + stateful !! SetVectorState("init") // set init state + val failer = actorOf[FailerTransactor] + failer.start + try { + stateful !! Failure("testShouldRollbackStateForStatefulServerInCaseOfFailure", "new state", failer) // call failing transactionrequired method + fail("should have thrown an exception") + } catch {case e: RuntimeException => {}} + assert(1 === (stateful !! GetVectorSize).get) + } + + @Test + def shouldOneWayRefShouldNotRollbackStateForStatefulServerInCaseOfSuccess = { + val stateful = actorOf(new StatefulTransactor(2)) + stateful.start + stateful ! SetRefStateOneWay("init") // set init state + stateful ! SuccessOneWay("testShouldNotRollbackStateForStatefulServerInCaseOfSuccess", "new state") // transactionrequired + val notifier = (stateful !! GetNotifier).as[CountDownLatch] + assert(notifier.get.await(1, TimeUnit.SECONDS)) + assert("new state" === (stateful !! GetRefState).get) + } + + @Test + def shouldRefShouldNotRollbackStateForStatefulServerInCaseOfSuccess = { + val stateful = actorOf[StatefulTransactor] + stateful.start + stateful !! SetRefState("init") // set init state + stateful !! Success("testShouldNotRollbackStateForStatefulServerInCaseOfSuccess", "new state") // transactionrequired + assert("new state" === (stateful !! GetRefState).get) + } + + @Test + def shouldOneWayRefShouldRollbackStateForStatefulServerInCaseOfFailure = { + val stateful = actorOf(new StatefulTransactor(2)) + stateful.start + stateful ! SetRefStateOneWay("init") // set init state + Thread.sleep(1000) + val failer = actorOf[FailerTransactor] + failer.start + stateful ! FailureOneWay("testShouldRollbackStateForStatefulServerInCaseOfFailure", "new state", failer) // call failing transactionrequired method + val notifier = (stateful !! GetNotifier).as[CountDownLatch] + assert(notifier.get.await(1, TimeUnit.SECONDS)) + assert("init" === (stateful !! (GetRefState, 1000000)).get) // check that state is == init state + } + + @Test + def shouldRefShouldRollbackStateForStatefulServerInCaseOfFailure = { + val stateful = actorOf[StatefulTransactor] + stateful.start + stateful !! SetRefState("init") // set init state + val failer = actorOf[FailerTransactor] + failer.start + try { + stateful !! Failure("testShouldRollbackStateForStatefulServerInCaseOfFailure", "new state", failer) // call failing transactionrequired method + fail("should have thrown an exception") + } catch {case e: RuntimeException => {}} + assert("init" === (stateful !! GetRefState).get) // check that state is == init state + } +} diff --git a/akka-actors/src/test/scala/actor/supervisor/RestartStrategySpec.scala b/akka-actors/src/test/scala/actor/supervisor/RestartStrategySpec.scala new file mode 100644 index 0000000000..5023c756e1 --- /dev/null +++ b/akka-actors/src/test/scala/actor/supervisor/RestartStrategySpec.scala @@ -0,0 +1,74 @@ +/** + * Copyright (C) 2009-2010 Scalable Solutions AB + */ + +package se.scalablesolutions.akka.actor + +import org.scalatest.junit.JUnitSuite +import org.junit.Test + +import Actor._ +import se.scalablesolutions.akka.config.OneForOneStrategy +import java.util.concurrent.{TimeUnit, CountDownLatch} +import se.scalablesolutions.akka.config.ScalaConfig.{Permanent, LifeCycle} +import org.multiverse.api.latches.StandardLatch + +class RestartStrategySpec extends JUnitSuite { + + object Ping + object Crash + + @Test + def slaveShouldStayDeadAfterMaxRestarts = { + + val boss = actorOf(new Actor{ + self.trapExit = List(classOf[Throwable]) + self.faultHandler = Some(OneForOneStrategy(1, 1000)) + protected def receive = { case _ => () } + }).start + + val restartLatch = new StandardLatch + val secondRestartLatch = new StandardLatch + val countDownLatch = new CountDownLatch(2) + + + val slave = actorOf(new Actor{ + + protected def receive = { + case Ping => countDownLatch.countDown + case Crash => throw new Exception("Crashing...") + } + override def postRestart(reason: Throwable) = { + restartLatch.open + } + + override def shutdown = { + if (restartLatch.isOpen) { + secondRestartLatch.open + } + } + }) + boss.startLink(slave) + + slave ! Ping + slave ! Crash + slave ! Ping + + // test restart and post restart ping + assert(restartLatch.tryAwait(1, TimeUnit.SECONDS)) + assert(countDownLatch.await(1, TimeUnit.SECONDS)) + + // now crash again... should not restart + slave ! Crash + + assert(secondRestartLatch.tryAwait(1, TimeUnit.SECONDS)) + val exceptionLatch = new StandardLatch + try { + slave ! Ping // this should fail + } catch { + case e => exceptionLatch.open // expected here + } + assert(exceptionLatch.tryAwait(1, TimeUnit.SECONDS)) + } +} + diff --git a/akka-actors/src/test/scala/actor/supervisor/SupervisorHierarchySpec.scala b/akka-actors/src/test/scala/actor/supervisor/SupervisorHierarchySpec.scala new file mode 100644 index 0000000000..ffc9dbd860 --- /dev/null +++ b/akka-actors/src/test/scala/actor/supervisor/SupervisorHierarchySpec.scala @@ -0,0 +1,81 @@ +/** + * Copyright (C) 2009-2010 Scalable Solutions AB + */ + +package se.scalablesolutions.akka.actor + +import org.scalatest.junit.JUnitSuite +import org.junit.Test + +import Actor._ +import se.scalablesolutions.akka.config.OneForOneStrategy + +import java.util.concurrent.{TimeUnit, CountDownLatch} + +object SupervisorHierarchySpec { + class FireWorkerException(msg: String) extends Exception(msg) + + class CountDownActor(countDown: CountDownLatch) extends Actor { + protected def receive = { case _ => () } + override def postRestart(reason: Throwable) = countDown.countDown + } + + class CrasherActor extends Actor { + protected def receive = { case _ => () } + } +} + +class SupervisorHierarchySpec extends JUnitSuite { + import SupervisorHierarchySpec._ + + @Test + def killWorkerShouldRestartMangerAndOtherWorkers = { + val countDown = new CountDownLatch(4) + + val workerOne = actorOf(new CountDownActor(countDown)) + val workerTwo = actorOf(new CountDownActor(countDown)) + val workerThree = actorOf(new CountDownActor(countDown)) + + val boss = actorOf(new Actor{ + self.trapExit = List(classOf[Throwable]) + self.faultHandler = Some(OneForOneStrategy(5, 1000)) + + protected def receive = { case _ => () } + }).start + + val manager = actorOf(new CountDownActor(countDown)) + boss.startLink(manager) + + manager.startLink(workerOne) + manager.startLink(workerTwo) + manager.startLink(workerThree) + + workerOne ! Exit(workerOne, new FireWorkerException("Fire the worker!")) + + // manager + all workers should be restarted by only killing a worker + // manager doesn't trap exits, so boss will restart manager + + assert(countDown.await(2, TimeUnit.SECONDS)) + } + + @Test + def supervisorShouldReceiveNotificationMessageWhenMaximumNumberOfRestartsWithinTimeRangeIsReached = { + val countDown = new CountDownLatch(2) + val crasher = actorOf(new CountDownActor(countDown)) + val boss = actorOf(new Actor{ + self.trapExit = List(classOf[Throwable]) + self.faultHandler = Some(OneForOneStrategy(1, 5000)) + protected def receive = { + case MaximumNumberOfRestartsWithinTimeRangeReached(_, _, _, _) => + countDown.countDown + } + }).start + boss.startLink(crasher) + + crasher ! Exit(crasher, new FireWorkerException("Fire the worker!")) + crasher ! Exit(crasher, new FireWorkerException("Fire the worker!")) + + assert(countDown.await(2, TimeUnit.SECONDS)) + } +} + diff --git a/akka-actors/src/test/scala/actor/supervisor/SupervisorSpec.scala b/akka-actors/src/test/scala/actor/supervisor/SupervisorSpec.scala new file mode 100644 index 0000000000..01eb9cb006 --- /dev/null +++ b/akka-actors/src/test/scala/actor/supervisor/SupervisorSpec.scala @@ -0,0 +1,605 @@ +/** + * Copyright (C) 2009-2010 Scalable Solutions AB + */ + +package se.scalablesolutions.akka.actor + +import se.scalablesolutions.akka.config.ScalaConfig._ +import se.scalablesolutions.akka.config.OneForOneStrategy +import se.scalablesolutions.akka.{OneWay, Die, Ping} +import Actor._ + +import org.scalatest.junit.JUnitSuite +import org.junit.Test +import java.util.concurrent.{TimeUnit, LinkedBlockingQueue} + +object SupervisorSpec { + var messageLog = new LinkedBlockingQueue[String] + var oneWayLog = new LinkedBlockingQueue[String] + + def clearMessageLogs { + messageLog.clear + oneWayLog.clear + } + + class PingPong1Actor extends Actor { + import self._ + //dispatcher = Dispatchers.newThreadBasedDispatcher(self) + def receive = { + case Ping => + messageLog.put("ping") + reply("pong") + + case OneWay => + oneWayLog.put("oneway") + + case Die => + println("******************** GOT DIE 1") + throw new RuntimeException("Expected exception; to test fault-tolerance") + } + override def postRestart(reason: Throwable) { + println("******************** restart 1") + messageLog.put(reason.getMessage) + } + } + + class PingPong2Actor extends Actor { + import self._ + def receive = { + case Ping => + messageLog.put("ping") + reply("pong") + case Die => + println("******************** GOT DIE 2") + throw new RuntimeException("Expected exception; to test fault-tolerance") + } + override def postRestart(reason: Throwable) { + println("******************** restart 2") + messageLog.put(reason.getMessage) + } + } + + class PingPong3Actor extends Actor { + import self._ + def receive = { + case Ping => + messageLog.put("ping") + reply("pong") + case Die => + println("******************** GOT DIE 3") + throw new RuntimeException("Expected exception; to test fault-tolerance") + } + + override def postRestart(reason: Throwable) { + println("******************** restart 3") + messageLog.put(reason.getMessage) + } + } + + class TemporaryActor extends Actor { + import self._ + lifeCycle = Some(LifeCycle(Temporary)) + def receive = { + case Ping => + messageLog.put("ping") + reply("pong") + case Die => + println("******************** GOT DIE 3") + throw new RuntimeException("Expected exception; to test fault-tolerance") + } + + override def postRestart(reason: Throwable) { + println("******************** restart temporary") + messageLog.put(reason.getMessage) + } + } + + class Master extends Actor { + self.trapExit = classOf[Exception] :: Nil + self.faultHandler = Some(OneForOneStrategy(5, 1000)) + val temp = self.spawnLink[TemporaryActor] + override def receive = { + case Die => temp !! (Die, 5000) + } + } +} + +/** + * @author Jonas Bonér + */ +class SupervisorSpec extends JUnitSuite { + import SupervisorSpec._ + + var pingpong1: ActorRef = _ + var pingpong2: ActorRef = _ + var pingpong3: ActorRef = _ + var temporaryActor: ActorRef = _ + +/* + @Test def shouldStartServer = { + clearMessageLogs + val sup = getSingleActorAllForOneSupervisor + sup.start + + expect("pong") { + (pingpong1 !! (Ping, 5000)).getOrElse("nil") + } + } +*/ + @Test def shoulNotRestartProgrammaticallyLinkedTemporaryActor = { + clearMessageLogs + val master = actorOf[Master].start + + intercept[RuntimeException] { + master !! (Die, 5000) + } + + Thread.sleep(1000) + assert(messageLog.size === 0) + } + + @Test def shoulNotRestartTemporaryActor = { + clearMessageLogs + val sup = getTemporaryActorAllForOneSupervisor + + intercept[RuntimeException] { + temporaryActor !! (Die, 5000) + } + + Thread.sleep(1000) + assert(messageLog.size === 0) + } + + @Test def shouldStartServerForNestedSupervisorHierarchy = { + clearMessageLogs + val sup = getNestedSupervisorsAllForOneConf + sup.start + + expect("pong") { + (pingpong1 !! (Ping, 5000)).getOrElse("nil") + } + } + + @Test def shouldKillSingleActorOneForOne = { + clearMessageLogs + val sup = getSingleActorOneForOneSupervisor + + intercept[RuntimeException] { + pingpong1 !! (Die, 5000) + } + + expect("Expected exception; to test fault-tolerance") { + messageLog.poll(5, TimeUnit.SECONDS) + } + } + + @Test def shouldCallKillCallSingleActorOneForOne = { + clearMessageLogs + val sup = getSingleActorOneForOneSupervisor + + expect("pong") { + (pingpong1 !! (Ping, 5000)).getOrElse("nil") + } + + expect("ping") { + messageLog.poll(5, TimeUnit.SECONDS) + } + intercept[RuntimeException] { + pingpong1 !! (Die, 5000) + } + + expect("Expected exception; to test fault-tolerance") { + messageLog.poll(5, TimeUnit.SECONDS) + } + expect("pong") { + (pingpong1 !! (Ping, 5000)).getOrElse("nil") + } + + expect("ping") { + messageLog.poll(5, TimeUnit.SECONDS) + } + } + + @Test def shouldKillSingleActorAllForOne = { + clearMessageLogs + val sup = getSingleActorAllForOneSupervisor + + intercept[RuntimeException] { + pingpong1 !! (Die, 5000) + } + + expect("Expected exception; to test fault-tolerance") { + messageLog.poll(5, TimeUnit.SECONDS) + } + } + + @Test def shouldCallKillCallSingleActorAllForOne = { + clearMessageLogs + val sup = getSingleActorAllForOneSupervisor + + expect("pong") { + (pingpong1 !! (Ping, 5000)).getOrElse("nil") + } + + expect("ping") { + messageLog.poll(5, TimeUnit.SECONDS) + } + intercept[RuntimeException] { + pingpong1 !! (Die, 5000) + } + + expect("Expected exception; to test fault-tolerance") { + messageLog.poll(5, TimeUnit.SECONDS) + } + expect("pong") { + (pingpong1 !! (Ping, 5000)).getOrElse("nil") + } + + expect("ping") { + messageLog.poll(5, TimeUnit.SECONDS) + } + } + + @Test def shouldKillMultipleActorsOneForOne1 = { + clearMessageLogs + val sup = getMultipleActorsOneForOneConf + + intercept[RuntimeException] { + pingpong1 !! (Die, 5000) + } + + expect("Expected exception; to test fault-tolerance") { + messageLog.poll(5, TimeUnit.SECONDS) + } + } + + @Test def shouldKillMultipleActorsOneForOne2 = { + clearMessageLogs + val sup = getMultipleActorsOneForOneConf + + intercept[RuntimeException] { + pingpong3 !! (Die, 5000) + } + + expect("Expected exception; to test fault-tolerance") { + messageLog.poll(5, TimeUnit.SECONDS) + } + } + + @Test def shouldKillCallMultipleActorsOneForOne = { + clearMessageLogs + val sup = getMultipleActorsOneForOneConf + + expect("pong") { + (pingpong1 !! (Ping, 5000)).getOrElse("nil") + } + + expect("pong") { + (pingpong2 !! (Ping, 5000)).getOrElse("nil") + } + + expect("pong") { + (pingpong3 !! (Ping, 5000)).getOrElse("nil") + } + + expect("ping") { + messageLog.poll(5, TimeUnit.SECONDS) + } + expect("ping") { + messageLog.poll(5, TimeUnit.SECONDS) + } + expect("ping") { + messageLog.poll(5, TimeUnit.SECONDS) + } + intercept[RuntimeException] { + pingpong2 !! (Die, 5000) + } + + expect("Expected exception; to test fault-tolerance") { + messageLog.poll(5, TimeUnit.SECONDS) + } + expect("pong") { + (pingpong1 !! (Ping, 5000)).getOrElse("nil") + } + + expect("pong") { + (pingpong2 !! (Ping, 5000)).getOrElse("nil") + } + + expect("pong") { + (pingpong3 !! (Ping, 5000)).getOrElse("nil") + } + + expect("ping") { + messageLog.poll(5, TimeUnit.SECONDS) + } + expect("ping") { + messageLog.poll(5, TimeUnit.SECONDS) + } + expect("ping") { + messageLog.poll(5, TimeUnit.SECONDS) + } + } + + @Test def shouldKillMultipleActorsAllForOne = { + clearMessageLogs + val sup = getMultipleActorsAllForOneConf + + intercept[RuntimeException] { + pingpong2 !! (Die, 5000) + } + + expect("Expected exception; to test fault-tolerance") { + messageLog.poll(5, TimeUnit.SECONDS) + } + expect("Expected exception; to test fault-tolerance") { + messageLog.poll(5, TimeUnit.SECONDS) + } + expect("Expected exception; to test fault-tolerance") { + messageLog.poll(5, TimeUnit.SECONDS) + } + } + + @Test def shouldCallKillCallMultipleActorsAllForOne = { + clearMessageLogs + val sup = getMultipleActorsAllForOneConf + + expect("pong") { + (pingpong1 !! (Ping, 5000)).getOrElse("nil") + } + + expect("pong") { + (pingpong2 !! (Ping, 5000)).getOrElse("nil") + } + + expect("pong") { + (pingpong3 !! (Ping, 5000)).getOrElse("nil") + } + + expect("ping") { + messageLog.poll(5, TimeUnit.SECONDS) + } + expect("ping") { + messageLog.poll(5, TimeUnit.SECONDS) + } + expect("ping") { + messageLog.poll(5, TimeUnit.SECONDS) + } + intercept[RuntimeException] { + pingpong2 !! (Die, 5000) + } + + expect("Expected exception; to test fault-tolerance") { + messageLog.poll(5, TimeUnit.SECONDS) + } + expect("Expected exception; to test fault-tolerance") { + messageLog.poll(5, TimeUnit.SECONDS) + } + expect("Expected exception; to test fault-tolerance") { + messageLog.poll(5, TimeUnit.SECONDS) + } + expect("pong") { + (pingpong1 !! (Ping, 5000)).getOrElse("nil") + } + + expect("pong") { + (pingpong2 !! (Ping, 5000)).getOrElse("nil") + } + + expect("pong") { + (pingpong3 !! (Ping, 5000)).getOrElse("nil") + } + + expect("ping") { + messageLog.poll(5, TimeUnit.SECONDS) + } + expect("ping") { + messageLog.poll(5, TimeUnit.SECONDS) + } + expect("ping") { + messageLog.poll(5, TimeUnit.SECONDS) + } + } + + @Test def shouldOneWayKillSingleActorOneForOne = { + clearMessageLogs + val sup = getSingleActorOneForOneSupervisor + + pingpong1 ! Die + + expect("Expected exception; to test fault-tolerance") { + messageLog.poll(5, TimeUnit.SECONDS) + } + } + + @Test def shouldOneWayCallKillCallSingleActorOneForOne = { + clearMessageLogs + val sup = getSingleActorOneForOneSupervisor + + pingpong1 ! OneWay + + expect("oneway") { + oneWayLog.poll(5, TimeUnit.SECONDS) + } + pingpong1 ! Die + + expect("Expected exception; to test fault-tolerance") { + messageLog.poll(5, TimeUnit.SECONDS) + } + pingpong1 ! OneWay + + expect("oneway") { + oneWayLog.poll(5, TimeUnit.SECONDS) + } + } + + @Test def shouldRestartKilledActorsForNestedSupervisorHierarchy = { + clearMessageLogs + val sup = getNestedSupervisorsAllForOneConf + + + expect("pong") { + (pingpong1 !! (Ping, 5000)).getOrElse("nil") + } + + expect("pong") { + (pingpong2 !! (Ping, 5000)).getOrElse("nil") + } + + expect("pong") { + (pingpong3 !! (Ping, 5000)).getOrElse("nil") + } + + expect("ping") { + messageLog.poll(5, TimeUnit.SECONDS) + } + expect("ping") { + messageLog.poll(5, TimeUnit.SECONDS) + } + expect("ping") { + messageLog.poll(5, TimeUnit.SECONDS) + } + intercept[RuntimeException] { + pingpong2 !! (Die, 5000) + } + + expect("Expected exception; to test fault-tolerance") { + messageLog.poll(5 , TimeUnit.SECONDS) + } + expect("Expected exception; to test fault-tolerance") { + messageLog.poll(5, TimeUnit.SECONDS) + } + expect("Expected exception; to test fault-tolerance") { + messageLog.poll(5, TimeUnit.SECONDS) + } + expect("pong") { + (pingpong1 !! (Ping, 5000)).getOrElse("nil") + } + + expect("pong") { + (pingpong2 !! (Ping, 5000)).getOrElse("nil") + } + + expect("pong") { + (pingpong3 !! (Ping, 5000)).getOrElse("nil") + } + + expect("ping") { + messageLog.poll(5, TimeUnit.SECONDS) + } + expect("ping") { + messageLog.poll(5, TimeUnit.SECONDS) + } + expect("ping") { + messageLog.poll(5, TimeUnit.SECONDS) + } + } + + // ============================================= + // Create some supervisors with different configurations + + def getTemporaryActorAllForOneSupervisor: Supervisor = { + temporaryActor = actorOf[TemporaryActor].start + + Supervisor( + SupervisorConfig( + RestartStrategy(AllForOne, 3, 5000, List(classOf[Exception])), + Supervise( + temporaryActor, + LifeCycle(Temporary)) + :: Nil)) + } + + def getSingleActorAllForOneSupervisor: Supervisor = { + pingpong1 = actorOf[PingPong1Actor].start + + Supervisor( + SupervisorConfig( + RestartStrategy(AllForOne, 3, 5000, List(classOf[Exception])), + Supervise( + pingpong1, + LifeCycle(Permanent)) + :: Nil)) + } + + def getSingleActorOneForOneSupervisor: Supervisor = { + pingpong1 = actorOf[PingPong1Actor].start + + Supervisor( + SupervisorConfig( + RestartStrategy(OneForOne, 3, 5000, List(classOf[Exception])), + Supervise( + pingpong1, + LifeCycle(Permanent)) + :: Nil)) + } + + def getMultipleActorsAllForOneConf: Supervisor = { + pingpong1 = actorOf[PingPong1Actor].start + pingpong2 = actorOf[PingPong2Actor].start + pingpong3 = actorOf[PingPong3Actor].start + + Supervisor( + SupervisorConfig( + RestartStrategy(AllForOne, 3, 5000, List(classOf[Exception])), + Supervise( + pingpong1, + LifeCycle(Permanent)) + :: + Supervise( + pingpong2, + LifeCycle(Permanent)) + :: + Supervise( + pingpong3, + LifeCycle(Permanent)) + :: Nil)) + } + + def getMultipleActorsOneForOneConf: Supervisor = { + pingpong1 = actorOf[PingPong1Actor].start + pingpong2 = actorOf[PingPong2Actor].start + pingpong3 = actorOf[PingPong3Actor].start + + Supervisor( + SupervisorConfig( + RestartStrategy(OneForOne, 3, 5000, List(classOf[Exception])), + Supervise( + pingpong1, + LifeCycle(Permanent)) + :: + Supervise( + pingpong2, + LifeCycle(Permanent)) + :: + Supervise( + pingpong3, + LifeCycle(Permanent)) + :: Nil)) + } + + def getNestedSupervisorsAllForOneConf: Supervisor = { + pingpong1 = actorOf[PingPong1Actor].start + pingpong2 = actorOf[PingPong2Actor].start + pingpong3 = actorOf[PingPong3Actor].start + + Supervisor( + SupervisorConfig( + RestartStrategy(AllForOne, 3, 5000, List(classOf[Exception])), + Supervise( + pingpong1, + LifeCycle(Permanent)) + :: + SupervisorConfig( + RestartStrategy(AllForOne, 3, 5000, Nil), + Supervise( + pingpong2, + LifeCycle(Permanent)) + :: + Supervise( + pingpong3, + LifeCycle(Permanent)) + :: Nil) + :: Nil)) + } +} diff --git a/akka-actors/src/test/scala/dataflow/DataFlowSpec.scala b/akka-actors/src/test/scala/dataflow/DataFlowSpec.scala new file mode 100644 index 0000000000..ce54699a6f --- /dev/null +++ b/akka-actors/src/test/scala/dataflow/DataFlowSpec.scala @@ -0,0 +1,173 @@ +/** + * Copyright (C) 2009-2010 Scalable Solutions AB + */ + +package se.scalablesolutions.akka.dataflow + +import org.scalatest.Spec +import org.scalatest.Assertions +import org.scalatest.matchers.ShouldMatchers +import org.scalatest.BeforeAndAfterAll +import org.scalatest.junit.JUnitRunner +import org.junit.runner.RunWith + +import java.util.concurrent.{TimeUnit, CountDownLatch} +import java.util.concurrent.atomic.{AtomicLong, AtomicReference, AtomicInteger} + +import scala.annotation.tailrec + +import se.scalablesolutions.akka.dispatch.DefaultCompletableFuture +import se.scalablesolutions.akka.actor.ActorRegistry + +@RunWith(classOf[JUnitRunner]) +class DataFlowTest extends Spec with ShouldMatchers with BeforeAndAfterAll { + describe("DataflowVariable") { + it("should work and generate correct results") { + import DataFlow._ + + val latch = new CountDownLatch(1) + val result = new AtomicInteger(0) + val x, y, z = new DataFlowVariable[Int] + thread { + z << x() + y() + latch.countDown + result.set(z()) + } + thread { x << 40 } + thread { y << 2 } + + latch.await(3,TimeUnit.SECONDS) should equal (true) + List(x,y,z).foreach(_.shutdown) + result.get should equal (42) + ActorRegistry.shutdownAll + } + + it("should be able to transform a stream") { + import DataFlow._ + + def ints(n: Int, max: Int): List[Int] = + if (n == max) Nil + else n :: ints(n + 1, max) + + def sum(s: Int, stream: List[Int]): List[Int] = stream match { + case Nil => s :: Nil + case h :: t => s :: sum(h + s, t) + } + + val latch = new CountDownLatch(1) + val result = new AtomicReference[List[Int]](Nil) + val x = new DataFlowVariable[List[Int]] + val y = new DataFlowVariable[List[Int]] + val z = new DataFlowVariable[List[Int]] + + thread { x << ints(0, 1000) } + thread { y << sum(0, x()) } + + thread { z << y() + result.set(z()) + latch.countDown + } + + latch.await(3,TimeUnit.SECONDS) should equal (true) + List(x,y,z).foreach(_.shutdown) + result.get should equal (sum(0,ints(0,1000))) + ActorRegistry.shutdownAll + } + } + + /*it("should be able to join streams") { + import DataFlow._ + + def ints(n: Int, max: Int, stream: DataFlowStream[Int]): Unit = if (n != max) { + stream <<< n + ints(n + 1, max, stream) + } + + def sum(s: Int, in: DataFlowStream[Int], out: DataFlowStream[Int]): Unit = { + out <<< s + sum(in() + s, in, out) + } + + val producer = new DataFlowStream[Int] + val consumer = new DataFlowStream[Int] + val latch = new CountDownLatch(1) + val result = new AtomicInteger(0) + + thread { ints(0, 1000, producer) } + thread { + Thread.sleep(1000) + result.set(producer.map(x => x * x).foldLeft(0)(_ + _)) + latch.countDown + } + + latch.await(3,TimeUnit.SECONDS) should equal (true) + result.get should equal (332833500) + ActorRegistry.shutdownAll + } + + it("should be able to sum streams recursively") { + import DataFlow._ + + def ints(n: Int, max: Int, stream: DataFlowStream[Int]): Unit = if (n != max) { + stream <<< n + ints(n + 1, max, stream) + } + + def sum(s: Int, in: DataFlowStream[Int], out: DataFlowStream[Int]): Unit = { + out <<< s + sum(in() + s, in, out) + } + + val result = new AtomicLong(0) + + val producer = new DataFlowStream[Int] + val consumer = new DataFlowStream[Int] + val latch = new CountDownLatch(1) + + @tailrec def recurseSum(stream: DataFlowStream[Int]): Unit = { + val x = stream() + + if(result.addAndGet(x) == 166666500) + latch.countDown + + recurseSum(stream) + } + + thread { ints(0, 1000, producer) } + thread { sum(0, producer, consumer) } + thread { recurseSum(consumer) } + + latch.await(15,TimeUnit.SECONDS) should equal (true) + ActorRegistry.shutdownAll + }*/ + + /* Test not ready for prime time, causes some sort of deadlock */ + /* it("should be able to conditionally set variables") { + + import DataFlow._ + + val latch = new CountDownLatch(1) + val x, y, z, v = new DataFlowVariable[Int] + + val main = thread { + x << 1 + z << Math.max(x(),y()) + latch.countDown + } + + val setY = thread { + Thread sleep 2000 + y << 2 + } + + val setV = thread { + v << y + } + + latch.await(2,TimeUnit.SECONDS) should equal (true) + List(x,y,z,v) foreach (_.shutdown) + List(main,setY,setV) foreach (_ ! Exit) + println("Foo") + ActorRegistry.shutdownAll + }*/ +} diff --git a/akka-actors/src/test/scala/dispatch/DispatchersSpec.scala b/akka-actors/src/test/scala/dispatch/DispatchersSpec.scala new file mode 100644 index 0000000000..bb548b9251 --- /dev/null +++ b/akka-actors/src/test/scala/dispatch/DispatchersSpec.scala @@ -0,0 +1,74 @@ +/** + * Copyright (C) 2009-2010 Scalable Solutions AB + */ +package se.scalablesolutions.akka.actor.dispatch + +import java.util.concurrent.{CountDownLatch, TimeUnit} +import org.scalatest.junit.JUnitSuite +import org.junit.Test + +import net.lag.configgy.Config +import scala.reflect.{Manifest} +import se.scalablesolutions.akka.dispatch._ + +object DispatchersSpec { + import Dispatchers._ + // + val tipe = "type" + val keepalivems = "keep-alive-ms" + val corepoolsizefactor = "core-pool-size-factor" + val maxpoolsizefactor = "max-pool-size-factor" + val executorbounds = "executor-bounds" + val allowcoretimeout = "allow-core-timeout" + val rejectionpolicy = "rejection-policy" // abort, caller-runs, discard-oldest, discard + val throughput = "throughput" // Throughput for ExecutorBasedEventDrivenDispatcher + val aggregate = "aggregate" // Aggregate on/off for HawtDispatchers + + def instance(dispatcher: MessageDispatcher): (MessageDispatcher) => Boolean = _ == dispatcher + def ofType[T <: MessageDispatcher : Manifest]: (MessageDispatcher) => Boolean = _.getClass == manifest[T].erasure + + def typesAndValidators: Map[String,(MessageDispatcher) => Boolean] = Map( + "ReactorBasedSingleThreadEventDriven" -> ofType[ReactorBasedSingleThreadEventDrivenDispatcher], + "ExecutorBasedEventDrivenWorkStealing" -> ofType[ExecutorBasedEventDrivenWorkStealingDispatcher], + "ExecutorBasedEventDriven" -> ofType[ExecutorBasedEventDrivenDispatcher], + "ReactorBasedThreadPoolEventDriven" -> ofType[ReactorBasedThreadPoolEventDrivenDispatcher], + "Hawt" -> ofType[HawtDispatcher], + "GlobalReactorBasedSingleThreadEventDriven" -> instance(globalReactorBasedSingleThreadEventDrivenDispatcher), + "GlobalReactorBasedThreadPoolEventDriven" -> instance(globalReactorBasedThreadPoolEventDrivenDispatcher), + "GlobalExecutorBasedEventDriven" -> instance(globalExecutorBasedEventDrivenDispatcher), + "GlobalHawt" -> instance(globalHawtDispatcher) + ) + + def validTypes = typesAndValidators.keys.toList + + lazy val allDispatchers: Map[String,Option[MessageDispatcher]] = { + validTypes.map(t => (t,from(Config.fromMap(Map(tipe -> t))))).toMap + } +} + +class DispatchersSpec extends JUnitSuite { + + import Dispatchers._ + import DispatchersSpec._ + + @Test def shouldYieldNoneIfTypeIsMissing { + assert(from(Config.fromMap(Map())) === None) + } + + @Test(expected = classOf[IllegalArgumentException]) + def shouldThrowIllegalArgumentExceptionIfTypeDoesntExist { + from(Config.fromMap(Map(tipe -> "typedoesntexist"))) + } + + @Test def shouldGetTheCorrectTypesOfDispatchers { + //It can create/obtain all defined types + assert(allDispatchers.values.forall(_.isDefined)) + //All created/obtained dispatchers are of the expeced type/instance + assert(typesAndValidators.forall( tuple => tuple._2(allDispatchers(tuple._1).get) )) + } + + @Test def defaultingToDefaultWhileLoadingTheDefaultShouldWork { + assert(from(Config.fromMap(Map())).getOrElse(defaultGlobalDispatcher) == defaultGlobalDispatcher) + } + +} diff --git a/akka-actors/src/test/scala/dispatch/ExecutorBasedEventDrivenDispatcherActorSpec.scala b/akka-actors/src/test/scala/dispatch/ExecutorBasedEventDrivenDispatcherActorSpec.scala new file mode 100644 index 0000000000..9cdf43682e --- /dev/null +++ b/akka-actors/src/test/scala/dispatch/ExecutorBasedEventDrivenDispatcherActorSpec.scala @@ -0,0 +1,68 @@ +package se.scalablesolutions.akka.actor.dispatch + +import java.util.concurrent.{CountDownLatch, TimeUnit} +import org.scalatest.junit.JUnitSuite +import org.junit.Test +import se.scalablesolutions.akka.dispatch.Dispatchers +import se.scalablesolutions.akka.actor.Actor +import Actor._ + +object ExecutorBasedEventDrivenDispatcherActorSpec { + class TestActor extends Actor { + self.dispatcher = Dispatchers.newExecutorBasedEventDrivenDispatcher(self.uuid) + def receive = { + case "Hello" => + self.reply("World") + case "Failure" => + throw new RuntimeException("Expected exception; to test fault-tolerance") + } + } + + object OneWayTestActor { + val oneWay = new CountDownLatch(1) + } + class OneWayTestActor extends Actor { + self.dispatcher = Dispatchers.newExecutorBasedEventDrivenDispatcher(self.uuid) + def receive = { + case "OneWay" => OneWayTestActor.oneWay.countDown + } + } +} +class ExecutorBasedEventDrivenDispatcherActorSpec extends JUnitSuite { + import ExecutorBasedEventDrivenDispatcherActorSpec._ + + private val unit = TimeUnit.MILLISECONDS + + @Test def shouldSendOneWay = { + val actor = actorOf[OneWayTestActor].start + val result = actor ! "OneWay" + assert(OneWayTestActor.oneWay.await(1, TimeUnit.SECONDS)) + actor.stop + } + + @Test def shouldSendReplySync = { + val actor = actorOf[TestActor].start + val result = (actor !! ("Hello", 10000)).as[String] + assert("World" === result.get) + actor.stop + } + + @Test def shouldSendReplyAsync = { + val actor = actorOf[TestActor].start + val result = actor !! "Hello" + assert("World" === result.get.asInstanceOf[String]) + actor.stop + } + + @Test def shouldSendReceiveException = { + val actor = actorOf[TestActor].start + try { + actor !! "Failure" + fail("Should have thrown an exception") + } catch { + case e => + assert("Expected exception; to test fault-tolerance" === e.getMessage()) + } + actor.stop + } +} diff --git a/akka-actors/src/test/scala/dispatch/ExecutorBasedEventDrivenDispatcherActorsSpec.scala b/akka-actors/src/test/scala/dispatch/ExecutorBasedEventDrivenDispatcherActorsSpec.scala new file mode 100644 index 0000000000..fc8f1aa37f --- /dev/null +++ b/akka-actors/src/test/scala/dispatch/ExecutorBasedEventDrivenDispatcherActorsSpec.scala @@ -0,0 +1,61 @@ +package se.scalablesolutions.akka.actor.dispatch + +import org.scalatest.junit.JUnitSuite +import org.junit.Test +import org.scalatest.matchers.MustMatchers +import java.util.concurrent.CountDownLatch +import se.scalablesolutions.akka.actor.Actor +import Actor._ + +/** + * Tests the behaviour of the executor based event driven dispatcher when multiple actors are being dispatched on it. + * + * @author Jan Van Besien + */ +class ExecutorBasedEventDrivenDispatcherActorsSpec extends JUnitSuite with MustMatchers { + class SlowActor(finishedCounter: CountDownLatch) extends Actor { + self.id = "SlowActor" + + def receive = { + case x: Int => { + Thread.sleep(50) // slow actor + finishedCounter.countDown + } + } + } + + class FastActor(finishedCounter: CountDownLatch) extends Actor { + self.id = "FastActor" + + def receive = { + case x: Int => { + finishedCounter.countDown + } + } + } + + @Test def slowActorShouldntBlockFastActor { + val sFinished = new CountDownLatch(50) + val fFinished = new CountDownLatch(10) + val s = actorOf(new SlowActor(sFinished)).start + val f = actorOf(new FastActor(fFinished)).start + + // send a lot of stuff to s + for (i <- 1 to 50) { + s ! i + } + + // send some messages to f + for (i <- 1 to 10) { + f ! i + } + + // now assert that f is finished while s is still busy + fFinished.await + assert(sFinished.getCount > 0) + sFinished.await + assert(sFinished.getCount === 0) + f.stop + s.stop + } +} diff --git a/akka-actors/src/test/scala/dispatch/ExecutorBasedEventDrivenWorkStealingDispatcherSpec.scala b/akka-actors/src/test/scala/dispatch/ExecutorBasedEventDrivenWorkStealingDispatcherSpec.scala new file mode 100644 index 0000000000..cde57a0544 --- /dev/null +++ b/akka-actors/src/test/scala/dispatch/ExecutorBasedEventDrivenWorkStealingDispatcherSpec.scala @@ -0,0 +1,107 @@ +package se.scalablesolutions.akka.actor.dispatch + +import org.scalatest.matchers.MustMatchers +import org.scalatest.junit.JUnitSuite + +import org.junit.Test + +import se.scalablesolutions.akka.dispatch.Dispatchers + +import java.util.concurrent.{TimeUnit, CountDownLatch} +import se.scalablesolutions.akka.actor.{IllegalActorStateException, Actor} +import Actor._ + +object ExecutorBasedEventDrivenWorkStealingDispatcherSpec { + val delayableActorDispatcher = Dispatchers.newExecutorBasedEventDrivenWorkStealingDispatcher("pooled-dispatcher") + val sharedActorDispatcher = Dispatchers.newExecutorBasedEventDrivenWorkStealingDispatcher("pooled-dispatcher") + val parentActorDispatcher = Dispatchers.newExecutorBasedEventDrivenWorkStealingDispatcher("pooled-dispatcher") + + class DelayableActor(name: String, delay: Int, finishedCounter: CountDownLatch) extends Actor { + self.dispatcher = delayableActorDispatcher + var invocationCount = 0 + self.id = name + + def receive = { + case x: Int => { + Thread.sleep(delay) + invocationCount += 1 + finishedCounter.countDown + } + } + } + + class FirstActor extends Actor { + self.dispatcher = sharedActorDispatcher + def receive = {case _ => {}} + } + + class SecondActor extends Actor { + self.dispatcher = sharedActorDispatcher + def receive = {case _ => {}} + } + + class ParentActor extends Actor { + self.dispatcher = parentActorDispatcher + def receive = {case _ => {}} + } + + class ChildActor extends ParentActor { + } +} + +/** + * @author Jan Van Besien + */ +class ExecutorBasedEventDrivenWorkStealingDispatcherSpec extends JUnitSuite with MustMatchers { + import ExecutorBasedEventDrivenWorkStealingDispatcherSpec._ + + @Test def fastActorShouldStealWorkFromSlowActor { + val finishedCounter = new CountDownLatch(110) + + val slow = actorOf(new DelayableActor("slow", 50, finishedCounter)).start + val fast = actorOf(new DelayableActor("fast", 10, finishedCounter)).start + + for (i <- 1 to 100) { + // send most work to slow actor + if (i % 20 == 0) + fast ! i + else + slow ! i + } + + // now send some messages to actors to keep the dispatcher dispatching messages + for (i <- 1 to 10) { + Thread.sleep(150) + if (i % 2 == 0) + fast ! i + else + slow ! i + } + + finishedCounter.await(5, TimeUnit.SECONDS) + fast.actor.asInstanceOf[DelayableActor].invocationCount must be > + (slow.actor.asInstanceOf[DelayableActor].invocationCount) + slow.stop + fast.stop + } + + @Test def canNotUseActorsOfDifferentTypesInSameDispatcher(): Unit = { + val first = actorOf[FirstActor] + val second = actorOf[SecondActor] + + first.start + intercept[IllegalActorStateException] { + second.start + } + } + + @Test def canNotUseActorsOfDifferentSubTypesInSameDispatcher(): Unit = { + val parent = actorOf[ParentActor] + val child = actorOf[ChildActor] + + parent.start + intercept[IllegalActorStateException] { + child.start + } + } +} diff --git a/akka-actors/src/test/scala/dispatch/FutureSpec.scala b/akka-actors/src/test/scala/dispatch/FutureSpec.scala new file mode 100644 index 0000000000..f740763fdf --- /dev/null +++ b/akka-actors/src/test/scala/dispatch/FutureSpec.scala @@ -0,0 +1,106 @@ +package se.scalablesolutions.akka.actor + +import org.scalatest.junit.JUnitSuite +import org.junit.Test +import se.scalablesolutions.akka.dispatch.Futures +import Actor._ + +object FutureSpec { + class TestActor extends Actor { + def receive = { + case "Hello" => + self.reply("World") + case "NoReply" => {} + case "Failure" => + throw new RuntimeException("Expected exception; to test fault-tolerance") + } + } +} + +class FutureSpec extends JUnitSuite { + import FutureSpec._ + + @Test def shouldActorReplyResultThroughExplicitFuture { + val actor = actorOf[TestActor] + actor.start + val future = actor !!! "Hello" + future.await + assert(future.result.isDefined) + assert("World" === future.result.get) + actor.stop + } + + @Test def shouldActorReplyExceptionThroughExplicitFuture { + val actor = actorOf[TestActor] + actor.start + val future = actor !!! "Failure" + future.await + assert(future.exception.isDefined) + assert("Expected exception; to test fault-tolerance" === future.exception.get.getMessage) + actor.stop + } + + /* + // FIXME: implement Futures.awaitEither, and uncomment these two tests + @Test def shouldFutureAwaitEitherLeft = { + val actor1 = actorOf[TestActor].start + val actor2 = actorOf[TestActor].start + val future1 = actor1 !!! "Hello" + val future2 = actor2 !!! "NoReply" + val result = Futures.awaitEither(future1, future2) + assert(result.isDefined) + assert("World" === result.get) + actor1.stop + actor2.stop + } + + @Test def shouldFutureAwaitEitherRight = { + val actor1 = actorOf[TestActor].start + val actor2 = actorOf[TestActor].start + val future1 = actor1 !!! "NoReply" + val future2 = actor2 !!! "Hello" + val result = Futures.awaitEither(future1, future2) + assert(result.isDefined) + assert("World" === result.get) + actor1.stop + actor2.stop + } + */ + @Test def shouldFutureAwaitOneLeft = { + val actor1 = actorOf[TestActor].start + val actor2 = actorOf[TestActor].start + val future1 = actor1 !!! "NoReply" + val future2 = actor2 !!! "Hello" + val result = Futures.awaitOne(List(future1, future2)) + assert(result.result.isDefined) + assert("World" === result.result.get) + actor1.stop + actor2.stop + } + + @Test def shouldFutureAwaitOneRight = { + val actor1 = actorOf[TestActor].start + val actor2 = actorOf[TestActor].start + val future1 = actor1 !!! "Hello" + val future2 = actor2 !!! "NoReply" + val result = Futures.awaitOne(List(future1, future2)) + assert(result.result.isDefined) + assert("World" === result.result.get) + actor1.stop + actor2.stop + } + + @Test def shouldFutureAwaitAll = { + val actor1 = actorOf[TestActor].start + val actor2 = actorOf[TestActor].start + val future1 = actor1 !!! "Hello" + val future2 = actor2 !!! "Hello" + Futures.awaitAll(List(future1, future2)) + assert(future1.result.isDefined) + assert("World" === future1.result.get) + assert(future2.result.isDefined) + assert("World" === future2.result.get) + actor1.stop + actor2.stop + } +} diff --git a/akka-actors/src/test/scala/dispatch/HawtDispatcherActorSpec.scala b/akka-actors/src/test/scala/dispatch/HawtDispatcherActorSpec.scala new file mode 100644 index 0000000000..2c45f3388c --- /dev/null +++ b/akka-actors/src/test/scala/dispatch/HawtDispatcherActorSpec.scala @@ -0,0 +1,71 @@ +package se.scalablesolutions.akka.actor.dispatch + +import java.util.concurrent.{CountDownLatch, TimeUnit} + +import org.scalatest.junit.JUnitSuite +import org.junit.Test + +import se.scalablesolutions.akka.dispatch.{HawtDispatcher, Dispatchers} +import se.scalablesolutions.akka.actor.Actor +import Actor._ + +object HawtDispatcherActorSpec { + class TestActor extends Actor { + self.dispatcher = new HawtDispatcher() + def receive = { + case "Hello" => + self.reply("World") + case "Failure" => + throw new RuntimeException("Expected exception; to test fault-tolerance") + } + } + + object OneWayTestActor { + val oneWay = new CountDownLatch(1) + } + class OneWayTestActor extends Actor { + self.dispatcher = new HawtDispatcher() + def receive = { + case "OneWay" => OneWayTestActor.oneWay.countDown + } + } +} + +class HawtDispatcherActorSpec extends JUnitSuite { + import HawtDispatcherActorSpec._ + + private val unit = TimeUnit.MILLISECONDS + + @Test def shouldSendOneWay = { + val actor = actorOf[OneWayTestActor].start + val result = actor ! "OneWay" + assert(OneWayTestActor.oneWay.await(1, TimeUnit.SECONDS)) + actor.stop + } + + @Test def shouldSendReplySync = { + val actor = actorOf[TestActor].start + val result = (actor !! ("Hello", 10000)).as[String] + assert("World" === result.get) + actor.stop + } + + @Test def shouldSendReplyAsync = { + val actor = actorOf[TestActor].start + val result = actor !! "Hello" + assert("World" === result.get.asInstanceOf[String]) + actor.stop + } + + @Test def shouldSendReceiveException = { + val actor = actorOf[TestActor].start + try { + actor !! "Failure" + fail("Should have thrown an exception") + } catch { + case e => + assert("Expected exception; to test fault-tolerance" === e.getMessage()) + } + actor.stop + } +} diff --git a/akka-actors/src/test/scala/dispatch/HawtDispatcherEchoServer.scala b/akka-actors/src/test/scala/dispatch/HawtDispatcherEchoServer.scala new file mode 100644 index 0000000000..97f2e0df9d --- /dev/null +++ b/akka-actors/src/test/scala/dispatch/HawtDispatcherEchoServer.scala @@ -0,0 +1,207 @@ +/** + * Copyright (C) 2009-2010 Scalable Solutions AB + */ + +package se.scalablesolutions.akka.actor.dispatch + +import scala.collection.mutable.ListBuffer + +import java.util.concurrent.TimeUnit +import java.net.InetSocketAddress +import java.io.IOException +import java.nio.ByteBuffer +import java.nio.channels.{SocketChannel, SelectionKey, ServerSocketChannel} + +import se.scalablesolutions.akka.actor._ +import se.scalablesolutions.akka.actor.Actor._ +import se.scalablesolutions.akka.dispatch.HawtDispatcher + +import org.fusesource.hawtdispatch.DispatchSource +import org.fusesource.hawtdispatch.ScalaDispatch._ + +/** + * This is an example of how to crate an Akka actor based TCP echo server using + * the HawtDispatch dispatcher and NIO event sources. + */ +object HawtDispatcherEchoServer { + + private val hawt = new HawtDispatcher + var port=4444; + var useReactorPattern=true + + def main(args:Array[String]):Unit = run + + def run() = { + val server = actorOf(new Server(port)) + server.start + Scheduler.schedule(server, DisplayStats, 1, 5, TimeUnit.SECONDS) + + println("Press enter to shutdown."); + System.in.read + server ! Shutdown + } + + case object Shutdown + case object DisplayStats + case class SessionClosed(session:ActorRef) + + class Server(val port: Int) extends Actor { + + self.dispatcher = hawt + + var channel:ServerSocketChannel = _ + var accept_source:DispatchSource = _ + var sessions = ListBuffer[ActorRef]() + + override def init = { + channel = ServerSocketChannel.open(); + channel.socket().bind(new InetSocketAddress(port)); + channel.configureBlocking(false); + + // Setup the accept source, it will callback to the handler methods + // via the actor's mailbox so you don't need to worry about + // synchronizing with the local variables + accept_source = createSource(channel, SelectionKey.OP_ACCEPT, HawtDispatcher.queue(self)); + accept_source.setEventHandler(^{ accept }); + accept_source.setDisposer(^{ + channel.close(); + println("Closed port: "+port); + }); + + accept_source.resume + + println("Listening on port: "+port); + } + + + private def accept() = { + var socket = channel.accept(); + while( socket!=null ) { + try { + socket.configureBlocking(false); + val session = actorOf(new Session(self, socket)) + session.start() + sessions += session + } catch { + case e: Exception => + socket.close + } + socket = channel.accept(); + } + } + + def receive = { + case SessionClosed(session) => + sessions = sessions.filterNot( _ == session ) + session.stop + case DisplayStats => + sessions.foreach { session=> + session ! DisplayStats + } + case Shutdown => + sessions.foreach { session=> + session.stop + } + sessions.clear + accept_source.release + self.stop + } + } + + class Session(val server:ActorRef, val channel: SocketChannel) extends Actor { + + self.dispatcher = hawt + + val buffer = ByteBuffer.allocate(1024); + val remote_address = channel.socket.getRemoteSocketAddress.toString + + var read_source:DispatchSource = _ + var write_source:DispatchSource = _ + + var readCounter = 0L + var writeCounter = 0L + var closed = false + + override def init = { + + if(useReactorPattern) { + // Then we will be using the reactor pattern for handling IO: + // Pin this actor to a single thread. The read/write event sources will poll + // a Selector on the pinned thread. Since the IO events are generated on the same + // thread as where the Actor is pinned to, it can avoid a substantial amount + // thread synchronization. Plus your GC will perform better since all the IO + // processing is done on a single thread. + HawtDispatcher.pin(self) + } else { + // Then we will be using sing the proactor pattern for handling IO: + // Then the actor will not be pinned to a specific thread. The read/write + // event sources will poll a Selector and then asynchronously dispatch the + // event's to the actor via the thread pool. + } + + // Setup the sources, they will callback to the handler methods + // via the actor's mailbox so you don't need to worry about + // synchronizing with the local variables + read_source = createSource(channel, SelectionKey.OP_READ, HawtDispatcher.queue(self)); + read_source.setEventHandler(^{ read }) + read_source.setCancelHandler(^{ close }) + + write_source = createSource(channel, SelectionKey.OP_READ, HawtDispatcher.queue(self)); + write_source.setEventHandler(^{ write }) + write_source.setCancelHandler(^{ close }) + + read_source.resume + println("Accepted connection from: "+remote_address); + } + + override def shutdown = { + closed = true + read_source.release + write_source.release + channel.close + } + + private def catchio(func: =>Unit):Unit = { + try { + func + } catch { + case e:IOException => close + } + } + + def read():Unit = catchio { + channel.read(buffer) match { + case -1 => + close // peer disconnected. + case 0 => + case count:Int => + readCounter += count + buffer.flip; + read_source.suspend + write_source.resume + write() + } + } + + def write() = catchio { + writeCounter += channel.write(buffer) + if (buffer.remaining == 0) { + buffer.clear + write_source.suspend + read_source.resume + } + } + + def close() = { + if( !closed ) { + closed = true + server ! SessionClosed(self) + } + } + + def receive = { + case DisplayStats => + println("connection to %s reads: %,d bytes, writes: %,d".format(remote_address, readCounter, writeCounter)) + } + } +} diff --git a/akka-actors/src/test/scala/dispatch/ReactorBasedSingleThreadEventDrivenDispatcherActorSpec.scala b/akka-actors/src/test/scala/dispatch/ReactorBasedSingleThreadEventDrivenDispatcherActorSpec.scala new file mode 100644 index 0000000000..de9b912bf5 --- /dev/null +++ b/akka-actors/src/test/scala/dispatch/ReactorBasedSingleThreadEventDrivenDispatcherActorSpec.scala @@ -0,0 +1,71 @@ +package se.scalablesolutions.akka.actor.dispatch + +import java.util.concurrent.{CountDownLatch, TimeUnit} +import org.scalatest.junit.JUnitSuite +import org.junit.Test + +import se.scalablesolutions.akka.dispatch.Dispatchers +import se.scalablesolutions.akka.actor.Actor +import Actor._ + +object ReactorBasedSingleThreadEventDrivenDispatcherActorSpec { + class TestActor extends Actor { + self.dispatcher = Dispatchers.newReactorBasedSingleThreadEventDrivenDispatcher(self.uuid) + + def receive = { + case "Hello" => + self.reply("World") + case "Failure" => + throw new RuntimeException("Expected exception; to test fault-tolerance") + } + } + + object OneWayTestActor { + val oneWay = new CountDownLatch(1) + } + class OneWayTestActor extends Actor { + self.dispatcher = Dispatchers.newExecutorBasedEventDrivenDispatcher(self.uuid) + def receive = { + case "OneWay" => OneWayTestActor.oneWay.countDown + } + } +} + +class ReactorBasedSingleThreadEventDrivenDispatcherActorSpec extends JUnitSuite { + import ReactorBasedSingleThreadEventDrivenDispatcherActorSpec._ + + private val unit = TimeUnit.MILLISECONDS + + @Test def shouldSendOneWay = { + val actor = actorOf[OneWayTestActor].start + val result = actor ! "OneWay" + assert(OneWayTestActor.oneWay.await(1, TimeUnit.SECONDS)) + actor.stop + } + + @Test def shouldSendReplySync = { + val actor = actorOf[TestActor].start + val result = (actor !! ("Hello", 10000)).as[String].get + assert("World" === result) + actor.stop + } + + @Test def shouldSendReplyAsync = { + val actor = actorOf[TestActor].start + val result = actor !! "Hello" + assert("World" === result.get.asInstanceOf[String]) + actor.stop + } + + @Test def shouldSendReceiveException = { + val actor = actorOf[TestActor].start + try { + actor !! "Failure" + fail("Should have thrown an exception") + } catch { + case e => + assert("Expected exception; to test fault-tolerance" === e.getMessage()) + } + actor.stop + } +} diff --git a/akka-actors/src/test/scala/dispatch/ReactorBasedThreadPoolEventDrivenDispatcherActorSpec.scala b/akka-actors/src/test/scala/dispatch/ReactorBasedThreadPoolEventDrivenDispatcherActorSpec.scala new file mode 100644 index 0000000000..4001df8f56 --- /dev/null +++ b/akka-actors/src/test/scala/dispatch/ReactorBasedThreadPoolEventDrivenDispatcherActorSpec.scala @@ -0,0 +1,66 @@ +package se.scalablesolutions.akka.actor.dispatch + +import java.util.concurrent.{CountDownLatch, TimeUnit} +import org.scalatest.junit.JUnitSuite +import org.junit.Test + +import se.scalablesolutions.akka.dispatch.Dispatchers +import se.scalablesolutions.akka.actor.Actor +import Actor._ + +object ReactorBasedThreadPoolEventDrivenDispatcherActorSpec { + class TestActor extends Actor { + self.dispatcher = Dispatchers.newReactorBasedThreadPoolEventDrivenDispatcher(self.uuid) + def receive = { + case "Hello" => + self.reply("World") + case "Failure" => + throw new RuntimeException("Expected exception; to test fault-tolerance") + } + } +} + +class ReactorBasedThreadPoolEventDrivenDispatcherActorSpec extends JUnitSuite { + import ReactorBasedThreadPoolEventDrivenDispatcherActorSpec._ + + private val unit = TimeUnit.MILLISECONDS + + @Test def shouldSendOneWay { + val oneWay = new CountDownLatch(1) + val actor = actorOf(new Actor { + self.dispatcher = Dispatchers.newReactorBasedThreadPoolEventDrivenDispatcher(self.uuid) + def receive = { + case "OneWay" => oneWay.countDown + } + }).start + val result = actor ! "OneWay" + assert(oneWay.await(1, TimeUnit.SECONDS)) + actor.stop + } + + @Test def shouldSendReplySync = { + val actor = actorOf[TestActor].start + val result = (actor !! ("Hello", 10000)).as[String].get + assert("World" === result) + actor.stop + } + + @Test def shouldSendReplyAsync = { + val actor = actorOf[TestActor].start + val result = actor !! "Hello" + assert("World" === result.get.asInstanceOf[String]) + actor.stop + } + + @Test def shouldSendReceiveException = { + val actor = actorOf[TestActor].start + try { + actor !! "Failure" + fail("Should have thrown an exception") + } catch { + case e => + assert("Expected exception; to test fault-tolerance" === e.getMessage()) + } + actor.stop + } +} diff --git a/akka-actors/src/test/scala/dispatch/ThreadBasedActorSpec.scala b/akka-actors/src/test/scala/dispatch/ThreadBasedActorSpec.scala new file mode 100644 index 0000000000..d69ee984d8 --- /dev/null +++ b/akka-actors/src/test/scala/dispatch/ThreadBasedActorSpec.scala @@ -0,0 +1,67 @@ +package se.scalablesolutions.akka.actor.dispatch + +import java.util.concurrent.{CountDownLatch, TimeUnit} +import org.scalatest.junit.JUnitSuite +import org.junit.Test + +import se.scalablesolutions.akka.dispatch.Dispatchers +import se.scalablesolutions.akka.actor.Actor +import Actor._ + +object ThreadBasedActorSpec { + class TestActor extends Actor { + self.dispatcher = Dispatchers.newThreadBasedDispatcher(self) + + def receive = { + case "Hello" => + self.reply("World") + case "Failure" => + throw new RuntimeException("Expected exception; to test fault-tolerance") + } + } +} + +class ThreadBasedActorSpec extends JUnitSuite { + import ThreadBasedActorSpec._ + + private val unit = TimeUnit.MILLISECONDS + + @Test def shouldSendOneWay { + var oneWay = new CountDownLatch(1) + val actor = actorOf(new Actor { + self.dispatcher = Dispatchers.newThreadBasedDispatcher(self) + def receive = { + case "OneWay" => oneWay.countDown + } + }).start + val result = actor ! "OneWay" + assert(oneWay.await(1, TimeUnit.SECONDS)) + actor.stop + } + + @Test def shouldSendReplySync = { + val actor = actorOf[TestActor].start + val result = (actor !! ("Hello", 10000)).as[String] + assert("World" === result.get) + actor.stop + } + + @Test def shouldSendReplyAsync = { + val actor = actorOf[TestActor].start + val result = actor !! "Hello" + assert("World" === result.get.asInstanceOf[String]) + actor.stop + } + + @Test def shouldSendReceiveException = { + val actor = actorOf[TestActor].start + try { + actor !! "Failure" + fail("Should have thrown an exception") + } catch { + case e => + assert("Expected exception; to test fault-tolerance" === e.getMessage()) + } + actor.stop + } +} diff --git a/akka-actors/src/test/scala/dispatch/ThreadBasedDispatcherSpec.scala b/akka-actors/src/test/scala/dispatch/ThreadBasedDispatcherSpec.scala new file mode 100644 index 0000000000..44cd9aade3 --- /dev/null +++ b/akka-actors/src/test/scala/dispatch/ThreadBasedDispatcherSpec.scala @@ -0,0 +1,91 @@ +package se.scalablesolutions.akka.dispatch + +import java.util.concurrent.CountDownLatch +import java.util.concurrent.TimeUnit +import java.util.concurrent.atomic.AtomicBoolean +import java.util.concurrent.locks.Lock +import java.util.concurrent.locks.ReentrantLock + +import org.scalatest.junit.JUnitSuite +import org.junit.{Test, Before} + +import se.scalablesolutions.akka.actor.Actor +import Actor._ + +// FIXME use this test when we have removed the MessageInvoker classes +/* +class ThreadBasedDispatcherSpec extends JUnitSuite { + private var threadingIssueDetected: AtomicBoolean = null + val key1 = actorOf(new Actor { def receive = { case _ => {}} }) + val key2 = actorOf(new Actor { def receive = { case _ => {}} }) + val key3 = actorOf(new Actor { def receive = { case _ => {}} }) + + class TestMessageHandle(handleLatch: CountDownLatch) extends MessageInvoker { + val guardLock: Lock = new ReentrantLock + + def invoke(message: MessageInvocation) { + try { + if (threadingIssueDetected.get) return + if (guardLock.tryLock) { + handleLatch.countDown + } else { + threadingIssueDetected.set(true) + } + } catch { + case e: Exception => threadingIssueDetected.set(true) + } finally { + guardLock.unlock + } + } + } + + @Before + def setUp = { + threadingIssueDetected = new AtomicBoolean(false) + } + + @Test + def shouldMessagesDispatchedToTheSameHandlerAreExecutedSequentially = { + internalTestMessagesDispatchedToTheSameHandlerAreExecutedSequentially + } + + @Test + def shouldMessagesDispatchedToHandlersAreExecutedInFIFOOrder = { + internalTestMessagesDispatchedToHandlersAreExecutedInFIFOOrder + } + + private def internalTestMessagesDispatchedToTheSameHandlerAreExecutedSequentially(): Unit = { + val guardLock = new ReentrantLock + val handleLatch = new CountDownLatch(100) + val dispatcher = new ThreadBasedDispatcher("name", new TestMessageHandle(handleLatch)) + dispatcher.start + for (i <- 0 until 100) { + dispatcher.dispatch(new MessageInvocation(key1, new Object, None, None)) + } + assert(handleLatch.await(5, TimeUnit.SECONDS)) + assert(!threadingIssueDetected.get) + } + + private def internalTestMessagesDispatchedToHandlersAreExecutedInFIFOOrder(): Unit = { + val handleLatch = new CountDownLatch(100) + val dispatcher = new ThreadBasedDispatcher("name", new MessageInvoker { + var currentValue = -1; + def invoke(message: MessageInvocation) { + if (threadingIssueDetected.get) return + val messageValue = message.message.asInstanceOf[Int] + if (messageValue.intValue == currentValue + 1) { + currentValue = messageValue.intValue + handleLatch.countDown + } else threadingIssueDetected.set(true) + } + }) + dispatcher.start + for (i <- 0 until 100) { + dispatcher.dispatch(new MessageInvocation(key1, i, None, None)) + } + assert(handleLatch.await(5, TimeUnit.SECONDS)) + assert(!threadingIssueDetected.get) + dispatcher.shutdown + } +} +*/ diff --git a/akka-actors/src/test/scala/misc/ActorRegistrySpec.scala b/akka-actors/src/test/scala/misc/ActorRegistrySpec.scala new file mode 100644 index 0000000000..8c9e0778ca --- /dev/null +++ b/akka-actors/src/test/scala/misc/ActorRegistrySpec.scala @@ -0,0 +1,255 @@ +package se.scalablesolutions.akka.actor + +import org.scalatest.junit.JUnitSuite +import org.junit.Test +import Actor._ +import java.util.concurrent.{CyclicBarrier, TimeUnit, CountDownLatch} + +object ActorRegistrySpec { + var record = "" + class TestActor extends Actor { + self.id = "MyID" + def receive = { + case "ping" => + record = "pong" + record + self.reply("got ping") + } + } + + class TestActor2 extends Actor { + self.id = "MyID2" + def receive = { + case "ping" => + record = "pong" + record + self.reply("got ping") + case "ping2" => + record = "pong" + record + self.reply("got ping") + } + } + +} + +class ActorRegistrySpec extends JUnitSuite { + import ActorRegistrySpec._ + + @Test def shouldGetActorByIdFromActorRegistry { + ActorRegistry.shutdownAll + val actor = actorOf[TestActor] + actor.start + val actors = ActorRegistry.actorsFor("MyID") + assert(actors.size === 1) + assert(actors.head.actor.isInstanceOf[TestActor]) + assert(actors.head.id === "MyID") + actor.stop + } + + @Test def shouldGetActorByUUIDFromActorRegistry { + ActorRegistry.shutdownAll + val actor = actorOf[TestActor] + val uuid = actor.uuid + actor.start + val actorOrNone = ActorRegistry.actorFor(uuid) + assert(actorOrNone.isDefined) + assert(actorOrNone.get.uuid === uuid) + actor.stop + } + + @Test def shouldGetActorByClassFromActorRegistry { + ActorRegistry.shutdownAll + val actor = actorOf[TestActor] + actor.start + val actors = ActorRegistry.actorsFor(classOf[TestActor]) + assert(actors.size === 1) + assert(actors.head.actor.isInstanceOf[TestActor]) + assert(actors.head.id === "MyID") + actor.stop + } + + @Test def shouldGetActorByManifestFromActorRegistry { + ActorRegistry.shutdownAll + val actor = actorOf[TestActor] + actor.start + val actors = ActorRegistry.actorsFor[TestActor] + assert(actors.size === 1) + assert(actors.head.actor.isInstanceOf[TestActor]) + assert(actors.head.id === "MyID") + actor.stop + } + + @Test def shouldFindThingsFromActorRegistry { + ActorRegistry.shutdownAll + val actor = actorOf[TestActor] + actor.start + val found = ActorRegistry.find({ case a: ActorRef if a.actor.isInstanceOf[TestActor] => a }) + assert(found.isDefined) + assert(found.get.actor.isInstanceOf[TestActor]) + assert(found.get.id === "MyID") + actor.stop + } + + @Test def shouldGetActorsByIdFromActorRegistry { + ActorRegistry.shutdownAll + val actor1 = actorOf[TestActor] + actor1.start + val actor2 = actorOf[TestActor] + actor2.start + val actors = ActorRegistry.actorsFor("MyID") + assert(actors.size === 2) + assert(actors.head.actor.isInstanceOf[TestActor]) + assert(actors.head.id === "MyID") + assert(actors.last.actor.isInstanceOf[TestActor]) + assert(actors.last.id === "MyID") + actor1.stop + actor2.stop + } + + @Test def shouldGetActorsByClassFromActorRegistry { + ActorRegistry.shutdownAll + val actor1 = actorOf[TestActor] + actor1.start + val actor2 = actorOf[TestActor] + actor2.start + val actors = ActorRegistry.actorsFor(classOf[TestActor]) + assert(actors.size === 2) + assert(actors.head.actor.isInstanceOf[TestActor]) + assert(actors.head.id === "MyID") + assert(actors.last.actor.isInstanceOf[TestActor]) + assert(actors.last.id === "MyID") + actor1.stop + actor2.stop + } + + @Test def shouldGetActorsByManifestFromActorRegistry { + ActorRegistry.shutdownAll + val actor1 = actorOf[TestActor] + actor1.start + val actor2 = actorOf[TestActor] + actor2.start + val actors = ActorRegistry.actorsFor[TestActor] + assert(actors.size === 2) + assert(actors.head.actor.isInstanceOf[TestActor]) + assert(actors.head.id === "MyID") + assert(actors.last.actor.isInstanceOf[TestActor]) + assert(actors.last.id === "MyID") + actor1.stop + actor2.stop + } + + @Test def shouldGetActorsByMessageFromActorRegistry { + + ActorRegistry.shutdownAll + val actor1 = actorOf[TestActor] + actor1.start + val actor2 = actorOf[TestActor2] + actor2.start + + val actorsForAcotrTestActor = ActorRegistry.actorsFor[TestActor] + assert(actorsForAcotrTestActor.size === 1) + + val actorsForAcotrTestActor2 = ActorRegistry.actorsFor[TestActor2] + assert(actorsForAcotrTestActor2.size === 1) + + val actorsForAcotr = ActorRegistry.actorsFor[Actor] + assert(actorsForAcotr.size === 2) + + + val actorsForMessagePing2 = ActorRegistry.actorsFor[Actor]("ping2") + assert(actorsForMessagePing2.size === 1) + + val actorsForMessagePing = ActorRegistry.actorsFor[Actor]("ping") + assert(actorsForMessagePing.size === 2) + + actor1.stop + actor2.stop + } + + @Test def shouldGetAllActorsFromActorRegistry { + ActorRegistry.shutdownAll + val actor1 = actorOf[TestActor] + actor1.start + val actor2 = actorOf[TestActor] + actor2.start + val actors = ActorRegistry.actors + assert(actors.size === 2) + assert(actors.head.actor.isInstanceOf[TestActor]) + assert(actors.head.id === "MyID") + assert(actors.last.actor.isInstanceOf[TestActor]) + assert(actors.last.id === "MyID") + actor1.stop + actor2.stop + } + + @Test def shouldGetResponseByAllActorsInActorRegistryWhenInvokingForeach { + ActorRegistry.shutdownAll + val actor1 = actorOf[TestActor] + actor1.start + val actor2 = actorOf[TestActor] + actor2.start + record = "" + ActorRegistry.foreach(actor => actor !! "ping") + assert(record === "pongpong") + actor1.stop + actor2.stop + } + + @Test def shouldShutdownAllActorsInActorRegistry { + ActorRegistry.shutdownAll + val actor1 = actorOf[TestActor] + actor1.start + val actor2 = actorOf[TestActor] + actor2.start + ActorRegistry.shutdownAll + assert(ActorRegistry.actors.size === 0) + } + + @Test def shouldRemoveUnregisterActorInActorRegistry { + ActorRegistry.shutdownAll + val actor1 = actorOf[TestActor] + actor1.start + val actor2 = actorOf[TestActor] + actor2.start + assert(ActorRegistry.actors.size === 2) + ActorRegistry.unregister(actor1) + assert(ActorRegistry.actors.size === 1) + ActorRegistry.unregister(actor2) + assert(ActorRegistry.actors.size === 0) + } + + @Test def shouldBeAbleToRegisterActorsConcurrently { + ActorRegistry.shutdownAll + + val latch = new CountDownLatch(3) + val barrier = new CyclicBarrier(3) + + def mkTestActor(i:Int) = actorOf( new Actor { + self.id = i.toString + def receive = { case _ => } + }) + + def mkTestActors = for(i <- 1 to 10;j <- 1 to 1000) yield mkTestActor(i) + + def mkThread(actors: Iterable[ActorRef]) = new Thread { + start + override def run { + barrier.await + actors foreach { _.start } + latch.countDown + } + } + + val testActors1 = mkTestActors + val testActors2 = mkTestActors + val testActors3 = mkTestActors + + mkThread(testActors1) + mkThread(testActors2) + mkThread(testActors3) + + assert(latch.await(30,TimeUnit.SECONDS) === true) + + for(i <- 1 to 10) { + assert(ActorRegistry.actorsFor(i.toString).length === 3000) + } + } +} diff --git a/akka-actors/src/test/scala/misc/SchedulerSpec.scala b/akka-actors/src/test/scala/misc/SchedulerSpec.scala new file mode 100644 index 0000000000..16dd21f327 --- /dev/null +++ b/akka-actors/src/test/scala/misc/SchedulerSpec.scala @@ -0,0 +1,127 @@ +package se.scalablesolutions.akka.actor + +import org.scalatest.junit.JUnitSuite +import Actor._ +import java.util.concurrent.{CountDownLatch, TimeUnit} +import se.scalablesolutions.akka.config.ScalaConfig._ +import org.multiverse.api.latches.StandardLatch +import org.junit.Test + +class SchedulerSpec extends JUnitSuite { + + def withCleanEndState(action: => Unit) { + action + Scheduler.restart + ActorRegistry.shutdownAll + } + + + @Test def schedulerShouldScheduleMoreThanOnce = withCleanEndState { + + case object Tick + val countDownLatch = new CountDownLatch(3) + val tickActor = actor { + case Tick => countDownLatch.countDown + } + // run every 50 millisec + Scheduler.schedule(tickActor, Tick, 0, 50, TimeUnit.MILLISECONDS) + + // after max 1 second it should be executed at least the 3 times already + assert(countDownLatch.await(1, TimeUnit.SECONDS)) + + val countDownLatch2 = new CountDownLatch(3) + + Scheduler.schedule( () => countDownLatch2.countDown, 0, 50, TimeUnit.MILLISECONDS) + + // after max 1 second it should be executed at least the 3 times already + assert(countDownLatch2.await(1, TimeUnit.SECONDS)) + } + + @Test def schedulerShouldScheduleOnce = withCleanEndState { + case object Tick + val countDownLatch = new CountDownLatch(3) + val tickActor = actor { + case Tick => countDownLatch.countDown + } + // run every 50 millisec + Scheduler.scheduleOnce(tickActor, Tick, 50, TimeUnit.MILLISECONDS) + Scheduler.scheduleOnce( () => countDownLatch.countDown, 50, TimeUnit.MILLISECONDS) + + // after 1 second the wait should fail + assert(countDownLatch.await(1, TimeUnit.SECONDS) == false) + // should still be 1 left + assert(countDownLatch.getCount == 1) + } + + /** + * ticket #372 + */ + @Test def schedulerShouldntCreateActors = withCleanEndState { + object Ping + val ticks = new CountDownLatch(1000) + val actor = actorOf(new Actor { + def receive = { case Ping => ticks.countDown } + }).start + val numActors = ActorRegistry.actors.length + (1 to 1000).foreach( _ => Scheduler.scheduleOnce(actor,Ping,1,TimeUnit.MILLISECONDS) ) + assert(ticks.await(10,TimeUnit.SECONDS)) + assert(ActorRegistry.actors.length === numActors) + } + + /** + * ticket #372 + */ + @Test def schedulerShouldBeCancellable = withCleanEndState { + object Ping + val ticks = new CountDownLatch(1) + + val actor = actorOf(new Actor { + def receive = { case Ping => ticks.countDown } + }).start + + (1 to 10).foreach { i => + val future = Scheduler.scheduleOnce(actor,Ping,1,TimeUnit.SECONDS) + future.cancel(true) + } + assert(ticks.await(3,TimeUnit.SECONDS) == false) //No counting down should've been made + } + + /** + * ticket #307 + */ + @Test def actorRestartShouldPickUpScheduleAgain = withCleanEndState { + + object Ping + object Crash + + val restartLatch = new StandardLatch + val pingLatch = new CountDownLatch(6) + + val actor = actorOf(new Actor { + self.lifeCycle = Some(LifeCycle(Permanent)) + + def receive = { + case Ping => pingLatch.countDown + case Crash => throw new Exception("CRASH") + } + + override def postRestart(reason: Throwable) = restartLatch.open + }) + Supervisor( + SupervisorConfig( + RestartStrategy(AllForOne, 3, 1000, + List(classOf[Exception])), + Supervise( + actor, + LifeCycle(Permanent)) + :: Nil)).start + + Scheduler.schedule(actor, Ping, 500, 500, TimeUnit.MILLISECONDS) + // appx 2 pings before crash + Scheduler.scheduleOnce(actor, Crash, 1000, TimeUnit.MILLISECONDS) + + assert(restartLatch.tryAwait(2, TimeUnit.SECONDS)) + // should be enough time for the ping countdown to recover and reach 6 pings + assert(pingLatch.await(4, TimeUnit.SECONDS)) + } +} diff --git a/akka-actors/src/test/scala/routing/RoutingSpec.scala b/akka-actors/src/test/scala/routing/RoutingSpec.scala new file mode 100644 index 0000000000..b51fa11a0e --- /dev/null +++ b/akka-actors/src/test/scala/routing/RoutingSpec.scala @@ -0,0 +1,179 @@ +package se.scalablesolutions.akka.actor.routing + +import se.scalablesolutions.akka.actor.Actor +import se.scalablesolutions.akka.actor.Actor._ +import se.scalablesolutions.akka.util.Logging + +import org.scalatest.Suite +import org.junit.runner.RunWith +import org.scalatest.junit.JUnitRunner +import org.scalatest.matchers.MustMatchers +import org.junit.Test + +import java.util.concurrent.atomic.AtomicInteger +import java.util.concurrent.{CountDownLatch, TimeUnit} +import se.scalablesolutions.akka.routing._ + +@RunWith(classOf[JUnitRunner]) +class RoutingSpec extends junit.framework.TestCase with Suite with MustMatchers with Logging { + import Routing._ + + @Test def testDispatcher = { + val (testMsg1,testMsg2,testMsg3,testMsg4) = ("test1","test2","test3","test4") + val targetOk = new AtomicInteger(0) + val t1 = actorOf( new Actor() { + def receive = { + case `testMsg1` => self.reply(3) + case `testMsg2` => self.reply(7) + } + } ).start + + val t2 = actorOf( new Actor() { + def receive = { + case `testMsg3` => self.reply(11) + } + }).start + + val d = dispatcherActor { + case `testMsg1`|`testMsg2` => t1 + case `testMsg3` => t2 + }.start + + val result = for { + a <- (d !! (testMsg1, 5000)).as[Int] + b <- (d !! (testMsg2, 5000)).as[Int] + c <- (d !! (testMsg3, 5000)).as[Int] + } yield a + b + c + + result.isDefined must be (true) + result.get must be(21) + + for(a <- List(t1,t2,d)) a.stop + } + + @Test def testLogger = { + val msgs = new java.util.concurrent.ConcurrentSkipListSet[Any] + val latch = new CountDownLatch(2) + val t1 = actor { + case _ => + } + val l = loggerActor(t1,(x) => { msgs.add(x); latch.countDown }).start + val foo : Any = "foo" + val bar : Any = "bar" + l ! foo + l ! bar + val done = latch.await(5,TimeUnit.SECONDS) + done must be (true) + msgs must ( have size (2) and contain (foo) and contain (bar) ) + t1.stop + l.stop + } + + @Test def testSmallestMailboxFirstDispatcher = { + val t1ProcessedCount = new AtomicInteger(0) + val latch = new CountDownLatch(500) + val t1 = actor { + case x => + Thread.sleep(50) // slow actor + t1ProcessedCount.incrementAndGet + latch.countDown + } + + val t2ProcessedCount = new AtomicInteger(0) + val t2 = actor { + case x => t2ProcessedCount.incrementAndGet + latch.countDown + } + val d = loadBalancerActor(new SmallestMailboxFirstIterator(t1 :: t2 :: Nil)) + for (i <- 1 to 500) d ! i + val done = latch.await(10,TimeUnit.SECONDS) + done must be (true) + t1ProcessedCount.get must be < (t2ProcessedCount.get) // because t1 is much slower and thus has a bigger mailbox all the time + for(a <- List(t1,t2,d)) a.stop + } + + @Test def testListener = { + val latch = new CountDownLatch(2) + val foreachListener = new CountDownLatch(2) + val num = new AtomicInteger(0) + val i = actorOf(new Actor with Listeners { + def receive = listenerManagement orElse { + case "foo" => gossip("bar") + } + }) + i.start + + def newListener = actor { + case "bar" => + num.incrementAndGet + latch.countDown + case "foo" => foreachListener.countDown + } + + val a1 = newListener + val a2 = newListener + val a3 = newListener + + i ! Listen(a1) + i ! Listen(a2) + i ! Listen(a3) + i ! Deafen(a3) + i ! WithListeners(_ ! "foo") + i ! "foo" + + val done = latch.await(5,TimeUnit.SECONDS) + done must be (true) + num.get must be (2) + val withListeners = foreachListener.await(5,TimeUnit.SECONDS) + withListeners must be (true) + for(a <- List(i,a1,a2,a3)) a.stop + } + + @Test def testIsDefinedAt = { + import se.scalablesolutions.akka.actor.ActorRef + + val (testMsg1,testMsg2,testMsg3,testMsg4) = ("test1","test2","test3","test4") + + val t1 = actorOf( new Actor() { + def receive = { + case `testMsg1` => self.reply(3) + case `testMsg2` => self.reply(7) + } + } ).start + + val t2 = actorOf( new Actor() { + def receive = { + case `testMsg1` => self.reply(3) + case `testMsg2` => self.reply(7) + } + } ).start + + val t3 = actorOf( new Actor() { + def receive = { + case `testMsg1` => self.reply(3) + case `testMsg2` => self.reply(7) + } + } ).start + + val t4 = actorOf( new Actor() { + def receive = { + case `testMsg1` => self.reply(3) + case `testMsg2` => self.reply(7) + } + } ).start + + val d1 = loadBalancerActor(new SmallestMailboxFirstIterator(t1 :: t2 :: Nil)) + val d2 = loadBalancerActor(new CyclicIterator[ActorRef](t3 :: t4 :: Nil)) + + t1.isDefinedAt(testMsg1) must be (true) + t1.isDefinedAt(testMsg3) must be (false) + t2.isDefinedAt(testMsg1) must be (true) + t2.isDefinedAt(testMsg3) must be (false) + d1.isDefinedAt(testMsg1) must be (true) + d1.isDefinedAt(testMsg3) must be (false) + d2.isDefinedAt(testMsg1) must be (true) + d2.isDefinedAt(testMsg3) must be (false) + + for(a <- List(t1,t2,d1,d2)) a.stop + } +} diff --git a/akka-core/src/test/scala/stm/JavaStmSpec.scala b/akka-actors/src/test/scala/stm/JavaStmSpec.scala similarity index 100% rename from akka-core/src/test/scala/stm/JavaStmSpec.scala rename to akka-actors/src/test/scala/stm/JavaStmSpec.scala diff --git a/akka-core/src/test/scala/stm/RefSpec.scala b/akka-actors/src/test/scala/stm/RefSpec.scala similarity index 100% rename from akka-core/src/test/scala/stm/RefSpec.scala rename to akka-actors/src/test/scala/stm/RefSpec.scala diff --git a/akka-core/src/test/scala/stm/StmSpec.scala b/akka-actors/src/test/scala/stm/StmSpec.scala similarity index 100% rename from akka-core/src/test/scala/stm/StmSpec.scala rename to akka-actors/src/test/scala/stm/StmSpec.scala diff --git a/akka-actors/src/test/scala/ticket/Ticket001Spec.scala b/akka-actors/src/test/scala/ticket/Ticket001Spec.scala new file mode 100644 index 0000000000..b94796d9a3 --- /dev/null +++ b/akka-actors/src/test/scala/ticket/Ticket001Spec.scala @@ -0,0 +1,13 @@ +package se.scalablesolutions.akka.actor.ticket + +import org.scalatest.WordSpec +import org.scalatest.matchers.MustMatchers + +class Ticket001Spec extends WordSpec with MustMatchers { + + "An XXX" should { + "do YYY" in { + 1 must be (1) + } + } +} diff --git a/akka-core/src/main/scala/actor/BootableActorLoaderService.scala b/akka-core/src/main/scala/actor/BootableActorLoaderService.scala deleted file mode 100644 index dfb8541396..0000000000 --- a/akka-core/src/main/scala/actor/BootableActorLoaderService.scala +++ /dev/null @@ -1,101 +0,0 @@ -/** - * Copyright (C) 2009-2010 Scalable Solutions AB - */ - -package se.scalablesolutions.akka.actor - -import java.io.File -import java.net.{URL, URLClassLoader} -import java.util.jar.JarFile -import java.util.Enumeration - -import se.scalablesolutions.akka.util.{Bootable, Logging} -import se.scalablesolutions.akka.config.Config._ - -class AkkaDeployClassLoader(urls : List[URL], parent : ClassLoader) extends URLClassLoader(urls.toArray.asInstanceOf[Array[URL]],parent) -{ - override def findResources(resource : String) = { - val normalResult = super.findResources(resource) - if(normalResult.hasMoreElements) normalResult else findDeployed(resource) - } - - def findDeployed(resource : String) = new Enumeration[URL]{ - private val it = getURLs.flatMap( listClassesInPackage(_,resource) ).iterator - def hasMoreElements = it.hasNext - def nextElement = it.next - } - - def listClassesInPackage(jar : URL, pkg : String) = { - val f = new File(jar.getFile) - val jf = new JarFile(f) - try { - val es = jf.entries - var result = List[URL]() - while(es.hasMoreElements) - { - val e = es.nextElement - if(!e.isDirectory && e.getName.startsWith(pkg) && e.getName.endsWith(".class")) - result ::= new URL("jar:" + f.toURI.toURL + "!/" + e) - } - result - } finally { - jf.close - } - } -} - -/** - * Handles all modules in the deploy directory (load and unload) - */ -trait BootableActorLoaderService extends Bootable with Logging { - - val BOOT_CLASSES = config.getList("akka.boot") - lazy val applicationLoader: Option[ClassLoader] = createApplicationClassLoader - - protected def createApplicationClassLoader : Option[ClassLoader] = { - Some( - if (HOME.isDefined) { - val CONFIG = HOME.get + "/config" - val DEPLOY = HOME.get + "/deploy" - val DEPLOY_DIR = new File(DEPLOY) - if (!DEPLOY_DIR.exists) { - log.error("Could not find a deploy directory at [%s]", DEPLOY) - System.exit(-1) - } - val filesToDeploy = DEPLOY_DIR.listFiles.toArray.toList - .asInstanceOf[List[File]].filter(_.getName.endsWith(".jar")) - var dependencyJars: List[URL] = Nil - filesToDeploy.map { file => - val jarFile = new JarFile(file) - val en = jarFile.entries - while (en.hasMoreElements) { - val name = en.nextElement.getName - if (name.endsWith(".jar")) dependencyJars ::= new File( - String.format("jar:file:%s!/%s", jarFile.getName, name)).toURI.toURL - } - } - val toDeploy = filesToDeploy.map(_.toURI.toURL) - log.info("Deploying applications from [%s]: [%s]", DEPLOY, toDeploy) - log.debug("Loading dependencies [%s]", dependencyJars) - val allJars = toDeploy ::: dependencyJars - - new AkkaDeployClassLoader(allJars,Thread.currentThread.getContextClassLoader) - } else Thread.currentThread.getContextClassLoader) - } - - abstract override def onLoad = { - applicationLoader.foreach(_ => log.info("Creating /deploy class-loader")) - - super.onLoad - - for (loader <- applicationLoader; clazz <- BOOT_CLASSES) { - log.info("Loading boot class [%s]", clazz) - loader.loadClass(clazz).newInstance - } - } - - abstract override def onUnload = { - super.onUnload - ActorRegistry.shutdownAll - } -} diff --git a/akka-core/src/main/scala/actor/SerializationProtocol.scala b/akka-core/src/main/scala/actor/SerializationProtocol.scala deleted file mode 100644 index b3f7caf8c2..0000000000 --- a/akka-core/src/main/scala/actor/SerializationProtocol.scala +++ /dev/null @@ -1,253 +0,0 @@ -/** - * Copyright (C) 2009-2010 Scalable Solutions AB - */ - -package se.scalablesolutions.akka.actor - -import se.scalablesolutions.akka.config.{AllForOneStrategy, OneForOneStrategy, FaultHandlingStrategy} -import se.scalablesolutions.akka.config.ScalaConfig._ -import se.scalablesolutions.akka.stm.global._ -import se.scalablesolutions.akka.stm.TransactionManagement._ -import se.scalablesolutions.akka.stm.TransactionManagement -import se.scalablesolutions.akka.remote.protocol.RemoteProtocol._ -import se.scalablesolutions.akka.remote.{RemoteServer, RemoteRequestProtocolIdFactory, MessageSerializer} -import se.scalablesolutions.akka.serialization.Serializer - -import com.google.protobuf.ByteString - -/** - * Type class definition for Actor Serialization - */ -trait FromBinary[T <: Actor] { - def fromBinary(bytes: Array[Byte], act: T): T -} - -trait ToBinary[T <: Actor] { - def toBinary(t: T): Array[Byte] -} - -// client needs to implement Format[] for the respective actor -trait Format[T <: Actor] extends FromBinary[T] with ToBinary[T] - -/** - * A default implementation for a stateless actor - * - * Create a Format object with the client actor as the implementation of the type class - * - *

- * object BinaryFormatMyStatelessActor {
- *   implicit object MyStatelessActorFormat extends StatelessActorFormat[MyStatelessActor]
- * }
- * 
- */ -trait StatelessActorFormat[T <: Actor] extends Format[T] { - def fromBinary(bytes: Array[Byte], act: T) = act - def toBinary(ac: T) = Array.empty[Byte] -} - -/** - * A default implementation of the type class for a Format that specifies a serializer - * - * Create a Format object with the client actor as the implementation of the type class and - * a serializer object - * - *
- * object BinaryFormatMyJavaSerializableActor {
- *   implicit object MyJavaSerializableActorFormat extends SerializerBasedActorFormat[MyJavaSerializableActor] {
- *     val serializer = Serializer.Java
- *   }
- * }
- * 
- */ -trait SerializerBasedActorFormat[T <: Actor] extends Format[T] { - val serializer: Serializer - def fromBinary(bytes: Array[Byte], act: T) = serializer.fromBinary(bytes, Some(act.self.actorClass)).asInstanceOf[T] - def toBinary(ac: T) = serializer.toBinary(ac) -} - -/** - * Module for local actor serialization - */ -object ActorSerialization { - - def fromBinary[T <: Actor](bytes: Array[Byte])(implicit format: Format[T]): ActorRef = - fromBinaryToLocalActorRef(bytes, format) - - def toBinary[T <: Actor](a: ActorRef)(implicit format: Format[T]): Array[Byte] = - toSerializedActorRefProtocol(a, format).toByteArray - - // wrapper for implicits to be used by Java - def fromBinaryJ[T <: Actor](bytes: Array[Byte], format: Format[T]): ActorRef = - fromBinary(bytes)(format) - - // wrapper for implicits to be used by Java - def toBinaryJ[T <: Actor](a: ActorRef, format: Format[T]): Array[Byte] = - toBinary(a)(format) - - private def toSerializedActorRefProtocol[T <: Actor](actorRef: ActorRef, format: Format[T]): SerializedActorRefProtocol = { - val lifeCycleProtocol: Option[LifeCycleProtocol] = { - def setScope(builder: LifeCycleProtocol.Builder, scope: Scope) = scope match { - case Permanent => builder.setLifeCycle(LifeCycleType.PERMANENT) - case Temporary => builder.setLifeCycle(LifeCycleType.TEMPORARY) - } - val builder = LifeCycleProtocol.newBuilder - actorRef.lifeCycle match { - case Some(LifeCycle(scope)) => - setScope(builder, scope) - Some(builder.build) - case None => None - } - } - - val originalAddress = AddressProtocol.newBuilder - .setHostname(actorRef.homeAddress.getHostName) - .setPort(actorRef.homeAddress.getPort) - .build - - val builder = SerializedActorRefProtocol.newBuilder - .setUuid(actorRef.uuid) - .setId(actorRef.id) - .setActorClassname(actorRef.actorClass.getName) - .setOriginalAddress(originalAddress) - .setIsTransactor(actorRef.isTransactor) - .setTimeout(actorRef.timeout) - - actorRef.receiveTimeout.foreach(builder.setReceiveTimeout(_)) - builder.setActorInstance(ByteString.copyFrom(format.toBinary(actorRef.actor.asInstanceOf[T]))) - lifeCycleProtocol.foreach(builder.setLifeCycle(_)) - actorRef.supervisor.foreach(s => builder.setSupervisor(RemoteActorSerialization.toRemoteActorRefProtocol(s))) - // FIXME: how to serialize the hotswap PartialFunction ?? - //hotswap.foreach(builder.setHotswapStack(_)) - builder.build - } - - private def fromBinaryToLocalActorRef[T <: Actor](bytes: Array[Byte], format: Format[T]): ActorRef = - fromProtobufToLocalActorRef(SerializedActorRefProtocol.newBuilder.mergeFrom(bytes).build, format, None) - - private def fromProtobufToLocalActorRef[T <: Actor]( - protocol: SerializedActorRefProtocol, format: Format[T], loader: Option[ClassLoader]): ActorRef = { - Actor.log.debug("Deserializing SerializedActorRefProtocol to LocalActorRef:\n" + protocol) - - val serializer = - if (format.isInstanceOf[SerializerBasedActorFormat[_]]) - Some(format.asInstanceOf[SerializerBasedActorFormat[_]].serializer) - else None - - val lifeCycle = - if (protocol.hasLifeCycle) { - val lifeCycleProtocol = protocol.getLifeCycle - Some(if (lifeCycleProtocol.getLifeCycle == LifeCycleType.PERMANENT) LifeCycle(Permanent) - else if (lifeCycleProtocol.getLifeCycle == LifeCycleType.TEMPORARY) LifeCycle(Temporary) - else throw new IllegalActorStateException("LifeCycle type is not valid: " + lifeCycleProtocol.getLifeCycle)) - } else None - - val supervisor = - if (protocol.hasSupervisor) - Some(RemoteActorSerialization.fromProtobufToRemoteActorRef(protocol.getSupervisor, loader)) - else None - - val hotswap = - if (serializer.isDefined && protocol.hasHotswapStack) Some(serializer.get - .fromBinary(protocol.getHotswapStack.toByteArray, Some(classOf[PartialFunction[Any, Unit]])) - .asInstanceOf[PartialFunction[Any, Unit]]) - else None - - val ar = new LocalActorRef( - protocol.getUuid, - protocol.getId, - protocol.getActorClassname, - protocol.getActorInstance.toByteArray, - protocol.getOriginalAddress.getHostname, - protocol.getOriginalAddress.getPort, - if (protocol.hasIsTransactor) protocol.getIsTransactor else false, - if (protocol.hasTimeout) protocol.getTimeout else Actor.TIMEOUT, - if (protocol.hasReceiveTimeout) Some(protocol.getReceiveTimeout) else None, - lifeCycle, - supervisor, - hotswap, - loader.getOrElse(getClass.getClassLoader), // TODO: should we fall back to getClass.getClassLoader? - protocol.getMessagesList.toArray.toList.asInstanceOf[List[RemoteRequestProtocol]], format) - - if (format.isInstanceOf[SerializerBasedActorFormat[_]] == false) - format.fromBinary(protocol.getActorInstance.toByteArray, ar.actor.asInstanceOf[T]) - ar - } -} - -object RemoteActorSerialization { - /** - * Deserializes a byte array (Array[Byte]) into an RemoteActorRef instance. - */ - def fromBinaryToRemoteActorRef(bytes: Array[Byte]): ActorRef = - fromProtobufToRemoteActorRef(RemoteActorRefProtocol.newBuilder.mergeFrom(bytes).build, None) - - /** - * Deserializes a byte array (Array[Byte]) into an RemoteActorRef instance. - */ - def fromBinaryToRemoteActorRef(bytes: Array[Byte], loader: ClassLoader): ActorRef = - fromProtobufToRemoteActorRef(RemoteActorRefProtocol.newBuilder.mergeFrom(bytes).build, Some(loader)) - - /** - * Deserializes a RemoteActorRefProtocol Protocol Buffers (protobuf) Message into an RemoteActorRef instance. - */ - private[akka] def fromProtobufToRemoteActorRef(protocol: RemoteActorRefProtocol, loader: Option[ClassLoader]): ActorRef = { - Actor.log.debug("Deserializing RemoteActorRefProtocol to RemoteActorRef:\n" + protocol) - RemoteActorRef( - protocol.getUuid, - protocol.getActorClassname, - protocol.getHomeAddress.getHostname, - protocol.getHomeAddress.getPort, - protocol.getTimeout, - loader) - } - - /** - * Serializes the ActorRef instance into a Protocol Buffers (protobuf) Message. - */ - def toRemoteActorRefProtocol(ar: ActorRef): RemoteActorRefProtocol = { - import ar._ - val host = homeAddress.getHostName - val port = homeAddress.getPort - - if (!registeredInRemoteNodeDuringSerialization) { - Actor.log.debug("Register serialized Actor [%s] as remote @ [%s:%s]", actorClass.getName, host, port) - RemoteServer.getOrCreateServer(homeAddress) - RemoteServer.registerActor(homeAddress, uuid, ar) - registeredInRemoteNodeDuringSerialization = true - } - - RemoteActorRefProtocol.newBuilder - .setUuid(uuid) - .setActorClassname(actorClass.getName) - .setHomeAddress(AddressProtocol.newBuilder.setHostname(host).setPort(port).build) - .setTimeout(timeout) - .build - } - - def createRemoteRequestProtocolBuilder(actorRef: ActorRef, message: Any, isOneWay: Boolean, senderOption: Option[ActorRef]): - RemoteRequestProtocol.Builder = { - import actorRef._ - - val actorInfo = ActorInfoProtocol.newBuilder - .setUuid(uuid) - .setTarget(actorClassName) - .setTimeout(timeout) - .setActorType(ActorType.SCALA_ACTOR) - .build - - val request = RemoteRequestProtocol.newBuilder - .setId(RemoteRequestProtocolIdFactory.nextId) - .setMessage(MessageSerializer.serialize(message)) - .setActorInfo(actorInfo) - .setIsOneWay(isOneWay) - - val id = registerSupervisorAsRemoteActor - if (id.isDefined) request.setSupervisorUuid(id.get) - - senderOption.foreach { sender => - RemoteServer.getOrCreateServer(sender.homeAddress).register(sender.uuid, sender) - request.setSender(toRemoteActorRefProtocol(sender)) - } - request - } -} diff --git a/akka-core/src/main/scala/remote/MessageSerializer.scala b/akka-core/src/main/scala/remote/MessageSerializer.scala index 8ef6f5d590..49f38524f9 100644 --- a/akka-core/src/main/scala/remote/MessageSerializer.scala +++ b/akka-core/src/main/scala/remote/MessageSerializer.scala @@ -6,9 +6,9 @@ package se.scalablesolutions.akka.remote import se.scalablesolutions.akka.serialization.{Serializer, Serializable} import se.scalablesolutions.akka.remote.protocol.RemoteProtocol._ +import se.scalablesolutions.akka.util._ import com.google.protobuf.{Message, ByteString} -import se.scalablesolutions.akka.util._ object MessageSerializer extends Logging { private var SERIALIZER_JAVA: Serializer.Java = Serializer.Java diff --git a/akka-core/src/main/scala/remote/RemoteServer.scala b/akka-core/src/main/scala/remote/RemoteServer.scala index 5f3c12d5a4..9c8f7454fa 100644 --- a/akka-core/src/main/scala/remote/RemoteServer.scala +++ b/akka-core/src/main/scala/remote/RemoteServer.scala @@ -63,7 +63,7 @@ object RemoteNode extends RemoteServer */ object RemoteServer { val HOSTNAME = config.getString("akka.remote.server.hostname", "localhost") - val PORT = config.getInt("akka.remote.server.port", 9999) + val PORT = config.getInt("akka.remote.server.port", 9999) val CONNECTION_TIMEOUT_MILLIS = Duration(config.getInt("akka.remote.server.connection-timeout", 1), TIME_UNIT) diff --git a/akka-core/src/main/scala/serialization/Compression.scala b/akka-core/src/main/scala/serialization/Compression.scala index 5b8df9ada7..bbb8d95421 100644 --- a/akka-core/src/main/scala/serialization/Compression.scala +++ b/akka-core/src/main/scala/serialization/Compression.scala @@ -14,8 +14,8 @@ object Compression { */ object LZF { import voldemort.store.compress.lzf._ - def compress(bytes: Array[Byte]): Array[Byte] = LZFEncoder.encode(bytes) - def uncompress(bytes: Array[Byte]): Array[Byte] = LZFDecoder.decode(bytes) + def compress(bytes: Array[Byte]): Array[Byte] = LZFEncoder encode bytes + def uncompress(bytes: Array[Byte]): Array[Byte] = LZFDecoder decode bytes } } diff --git a/akka-core/src/test/scala/TestClasses.bak b/akka-core/src/test/scala/TestClasses.bak deleted file mode 100644 index 5a0ec08c19..0000000000 --- a/akka-core/src/test/scala/TestClasses.bak +++ /dev/null @@ -1,102 +0,0 @@ -/** - * Copyright (C) 2009-2010 Scalable Solutions AB - */ - -package se.scalablesolutions.akka.actor - -import se.scalablesolutions.akka.serialization.Serializable -import se.scalablesolutions.akka.actor.annotation.transactionrequired -import se.scalablesolutions.akka.actor.annotation.prerestart -import se.scalablesolutions.akka.actor.annotation.postrestart -import se.scalablesolutions.akka.actor.annotation.inittransactionalstate -import se.scalablesolutions.akka.actor.annotation.oneway -import se.scalablesolutions.akka.stm._ - -import com.google.inject.Inject - -trait Bar { - @oneway - def bar(msg: String): String - def getExt: Ext -} - -class BarImpl extends Bar { - @Inject private var ext: Ext = _ - def getExt: Ext = ext - def bar(msg: String) = msg -} - -trait Ext -class ExtImpl extends Ext - -class Foo extends Serializable.JavaJSON { - @Inject - private var bar: Bar = _ - def body = this - def getBar = bar - def foo(msg: String): String = msg + "_foo " - def bar(msg: String): String = bar.bar(msg) - def longRunning = { - Thread.sleep(10000) - "test" - } - def throwsException: String = { - if (true) throw new RuntimeException("Expected exception; to test fault-tolerance") - "test" - } -} - -@serializable class InMemFailer { - def fail = throw new RuntimeException("Expected exception; to test fault-tolerance") -} - -@transactionrequired -class InMemStateful { - private lazy val mapState = TransactionalState.newMap[String, String] - private lazy val vectorState = TransactionalState.newVector[String] - private lazy val refState = TransactionalState.newRef[String] - - def getMapState(key: String): String = mapState.get(key).get - def getVectorState: String = vectorState.last - def getRefState: String = refState.get.get - def setMapState(key: String, msg: String): Unit = mapState.put(key, msg) - def setVectorState(msg: String): Unit = vectorState.add(msg) - def setRefState(msg: String): Unit = refState.swap(msg) - def success(key: String, msg: String): Unit = { - mapState.put(key, msg) - vectorState.add(msg) - refState.swap(msg) - } - - def success(key: String, msg: String, nested: InMemStatefulNested): Unit = { - mapState.put(key, msg) - vectorState.add(msg) - refState.swap(msg) - nested.success(key, msg) - } - - def failure(key: String, msg: String, failer: InMemFailer): String = { - mapState.put(key, msg) - vectorState.add(msg) - refState.swap(msg) - failer.fail - msg - } - - def failure(key: String, msg: String, nested: InMemStatefulNested, failer: InMemFailer): String = { - mapState.put(key, msg) - vectorState.add(msg) - refState.swap(msg) - nested.failure(key, msg, failer) - msg - } - - def thisMethodHangs(key: String, msg: String, failer: InMemFailer) = setMapState(key, msg) - - @prerestart def preRestart = println("################ PRE RESTART") - @postrestart def postRestart = println("################ POST RESTART") -} - -@transactionrequired -class InMemStatefulNested extends InMemStateful - diff --git a/akka-core/src/main/java/se/scalablesolutions/akka/config/DependencyBinding.java b/akka-typed-actors/src/main/java/se/scalablesolutions/akka/config/DependencyBinding.java similarity index 100% rename from akka-core/src/main/java/se/scalablesolutions/akka/config/DependencyBinding.java rename to akka-typed-actors/src/main/java/se/scalablesolutions/akka/config/DependencyBinding.java diff --git a/akka-core/src/main/java/se/scalablesolutions/akka/config/TypedActorGuiceModule.java b/akka-typed-actors/src/main/java/se/scalablesolutions/akka/config/TypedActorGuiceModule.java similarity index 100% rename from akka-core/src/main/java/se/scalablesolutions/akka/config/TypedActorGuiceModule.java rename to akka-typed-actors/src/main/java/se/scalablesolutions/akka/config/TypedActorGuiceModule.java diff --git a/akka-typed-actors/src/main/java/se/scalablesolutions/akka/remote/protocol/RemoteProtocol.java b/akka-typed-actors/src/main/java/se/scalablesolutions/akka/remote/protocol/RemoteProtocol.java new file mode 100644 index 0000000000..0ab1a0aa10 --- /dev/null +++ b/akka-typed-actors/src/main/java/se/scalablesolutions/akka/remote/protocol/RemoteProtocol.java @@ -0,0 +1,5190 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: RemoteProtocol.proto + +package se.scalablesolutions.akka.remote.protocol; + +public final class RemoteProtocol { + private RemoteProtocol() {} + public static void registerAllExtensions( + com.google.protobuf.ExtensionRegistry registry) { + } + public enum ActorType + implements com.google.protobuf.ProtocolMessageEnum { + SCALA_ACTOR(0, 1), + JAVA_ACTOR(1, 2), + TYPED_ACTOR(2, 3), + ; + + + public final int getNumber() { return value; } + + public static ActorType valueOf(int value) { + switch (value) { + case 1: return SCALA_ACTOR; + case 2: return JAVA_ACTOR; + case 3: return TYPED_ACTOR; + default: return null; + } + } + + public static com.google.protobuf.Internal.EnumLiteMap + internalGetValueMap() { + return internalValueMap; + } + private static com.google.protobuf.Internal.EnumLiteMap + internalValueMap = + new com.google.protobuf.Internal.EnumLiteMap() { + public ActorType findValueByNumber(int number) { + return ActorType.valueOf(number) + ; } + }; + + public final com.google.protobuf.Descriptors.EnumValueDescriptor + getValueDescriptor() { + return getDescriptor().getValues().get(index); + } + public final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptorForType() { + return getDescriptor(); + } + public static final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptor() { + return se.scalablesolutions.akka.remote.protocol.RemoteProtocol.getDescriptor().getEnumTypes().get(0); + } + + private static final ActorType[] VALUES = { + SCALA_ACTOR, JAVA_ACTOR, TYPED_ACTOR, + }; + public static ActorType valueOf( + com.google.protobuf.Descriptors.EnumValueDescriptor desc) { + if (desc.getType() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "EnumValueDescriptor is not for this type."); + } + return VALUES[desc.getIndex()]; + } + private final int index; + private final int value; + private ActorType(int index, int value) { + this.index = index; + this.value = value; + } + + static { + se.scalablesolutions.akka.remote.protocol.RemoteProtocol.getDescriptor(); + } + + // @@protoc_insertion_point(enum_scope:ActorType) + } + + public enum SerializationSchemeType + implements com.google.protobuf.ProtocolMessageEnum { + JAVA(0, 1), + SBINARY(1, 2), + SCALA_JSON(2, 3), + JAVA_JSON(3, 4), + PROTOBUF(4, 5), + ; + + + public final int getNumber() { return value; } + + public static SerializationSchemeType valueOf(int value) { + switch (value) { + case 1: return JAVA; + case 2: return SBINARY; + case 3: return SCALA_JSON; + case 4: return JAVA_JSON; + case 5: return PROTOBUF; + default: return null; + } + } + + public static com.google.protobuf.Internal.EnumLiteMap + internalGetValueMap() { + return internalValueMap; + } + private static com.google.protobuf.Internal.EnumLiteMap + internalValueMap = + new com.google.protobuf.Internal.EnumLiteMap() { + public SerializationSchemeType findValueByNumber(int number) { + return SerializationSchemeType.valueOf(number) + ; } + }; + + public final com.google.protobuf.Descriptors.EnumValueDescriptor + getValueDescriptor() { + return getDescriptor().getValues().get(index); + } + public final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptorForType() { + return getDescriptor(); + } + public static final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptor() { + return se.scalablesolutions.akka.remote.protocol.RemoteProtocol.getDescriptor().getEnumTypes().get(1); + } + + private static final SerializationSchemeType[] VALUES = { + JAVA, SBINARY, SCALA_JSON, JAVA_JSON, PROTOBUF, + }; + public static SerializationSchemeType valueOf( + com.google.protobuf.Descriptors.EnumValueDescriptor desc) { + if (desc.getType() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "EnumValueDescriptor is not for this type."); + } + return VALUES[desc.getIndex()]; + } + private final int index; + private final int value; + private SerializationSchemeType(int index, int value) { + this.index = index; + this.value = value; + } + + static { + se.scalablesolutions.akka.remote.protocol.RemoteProtocol.getDescriptor(); + } + + // @@protoc_insertion_point(enum_scope:SerializationSchemeType) + } + + public enum LifeCycleType + implements com.google.protobuf.ProtocolMessageEnum { + PERMANENT(0, 1), + TEMPORARY(1, 2), + ; + + + public final int getNumber() { return value; } + + public static LifeCycleType valueOf(int value) { + switch (value) { + case 1: return PERMANENT; + case 2: return TEMPORARY; + default: return null; + } + } + + public static com.google.protobuf.Internal.EnumLiteMap + internalGetValueMap() { + return internalValueMap; + } + private static com.google.protobuf.Internal.EnumLiteMap + internalValueMap = + new com.google.protobuf.Internal.EnumLiteMap() { + public LifeCycleType findValueByNumber(int number) { + return LifeCycleType.valueOf(number) + ; } + }; + + public final com.google.protobuf.Descriptors.EnumValueDescriptor + getValueDescriptor() { + return getDescriptor().getValues().get(index); + } + public final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptorForType() { + return getDescriptor(); + } + public static final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptor() { + return se.scalablesolutions.akka.remote.protocol.RemoteProtocol.getDescriptor().getEnumTypes().get(2); + } + + private static final LifeCycleType[] VALUES = { + PERMANENT, TEMPORARY, + }; + public static LifeCycleType valueOf( + com.google.protobuf.Descriptors.EnumValueDescriptor desc) { + if (desc.getType() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "EnumValueDescriptor is not for this type."); + } + return VALUES[desc.getIndex()]; + } + private final int index; + private final int value; + private LifeCycleType(int index, int value) { + this.index = index; + this.value = value; + } + + static { + se.scalablesolutions.akka.remote.protocol.RemoteProtocol.getDescriptor(); + } + + // @@protoc_insertion_point(enum_scope:LifeCycleType) + } + + public static final class RemoteActorRefProtocol extends + com.google.protobuf.GeneratedMessage { + // Use RemoteActorRefProtocol.newBuilder() to construct. + private RemoteActorRefProtocol() { + initFields(); + } + private RemoteActorRefProtocol(boolean noInit) {} + + private static final RemoteActorRefProtocol defaultInstance; + public static RemoteActorRefProtocol getDefaultInstance() { + return defaultInstance; + } + + public RemoteActorRefProtocol getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return se.scalablesolutions.akka.remote.protocol.RemoteProtocol.internal_static_RemoteActorRefProtocol_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return se.scalablesolutions.akka.remote.protocol.RemoteProtocol.internal_static_RemoteActorRefProtocol_fieldAccessorTable; + } + + // required string uuid = 1; + public static final int UUID_FIELD_NUMBER = 1; + private boolean hasUuid; + private java.lang.String uuid_ = ""; + public boolean hasUuid() { return hasUuid; } + public java.lang.String getUuid() { return uuid_; } + + // required string actorClassname = 2; + public static final int ACTORCLASSNAME_FIELD_NUMBER = 2; + private boolean hasActorClassname; + private java.lang.String actorClassname_ = ""; + public boolean hasActorClassname() { return hasActorClassname; } + public java.lang.String getActorClassname() { return actorClassname_; } + + // required .AddressProtocol homeAddress = 3; + public static final int HOMEADDRESS_FIELD_NUMBER = 3; + private boolean hasHomeAddress; + private se.scalablesolutions.akka.remote.protocol.RemoteProtocol.AddressProtocol homeAddress_; + public boolean hasHomeAddress() { return hasHomeAddress; } + public se.scalablesolutions.akka.remote.protocol.RemoteProtocol.AddressProtocol getHomeAddress() { return homeAddress_; } + + // optional uint64 timeout = 4; + public static final int TIMEOUT_FIELD_NUMBER = 4; + private boolean hasTimeout; + private long timeout_ = 0L; + public boolean hasTimeout() { return hasTimeout; } + public long getTimeout() { return timeout_; } + + private void initFields() { + homeAddress_ = se.scalablesolutions.akka.remote.protocol.RemoteProtocol.AddressProtocol.getDefaultInstance(); + } + public final boolean isInitialized() { + if (!hasUuid) return false; + if (!hasActorClassname) return false; + if (!hasHomeAddress) return false; + if (!getHomeAddress().isInitialized()) return false; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (hasUuid()) { + output.writeString(1, getUuid()); + } + if (hasActorClassname()) { + output.writeString(2, getActorClassname()); + } + if (hasHomeAddress()) { + output.writeMessage(3, getHomeAddress()); + } + if (hasTimeout()) { + output.writeUInt64(4, getTimeout()); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (hasUuid()) { + size += com.google.protobuf.CodedOutputStream + .computeStringSize(1, getUuid()); + } + if (hasActorClassname()) { + size += com.google.protobuf.CodedOutputStream + .computeStringSize(2, getActorClassname()); + } + if (hasHomeAddress()) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(3, getHomeAddress()); + } + if (hasTimeout()) { + size += com.google.protobuf.CodedOutputStream + .computeUInt64Size(4, getTimeout()); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + public static se.scalablesolutions.akka.remote.protocol.RemoteProtocol.RemoteActorRefProtocol parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static se.scalablesolutions.akka.remote.protocol.RemoteProtocol.RemoteActorRefProtocol parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static se.scalablesolutions.akka.remote.protocol.RemoteProtocol.RemoteActorRefProtocol parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static se.scalablesolutions.akka.remote.protocol.RemoteProtocol.RemoteActorRefProtocol parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static se.scalablesolutions.akka.remote.protocol.RemoteProtocol.RemoteActorRefProtocol parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static se.scalablesolutions.akka.remote.protocol.RemoteProtocol.RemoteActorRefProtocol parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static se.scalablesolutions.akka.remote.protocol.RemoteProtocol.RemoteActorRefProtocol parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static se.scalablesolutions.akka.remote.protocol.RemoteProtocol.RemoteActorRefProtocol parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static se.scalablesolutions.akka.remote.protocol.RemoteProtocol.RemoteActorRefProtocol parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static se.scalablesolutions.akka.remote.protocol.RemoteProtocol.RemoteActorRefProtocol parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(se.scalablesolutions.akka.remote.protocol.RemoteProtocol.RemoteActorRefProtocol prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder { + private se.scalablesolutions.akka.remote.protocol.RemoteProtocol.RemoteActorRefProtocol result; + + // Construct using se.scalablesolutions.akka.remote.protocol.RemoteProtocol.RemoteActorRefProtocol.newBuilder() + private Builder() {} + + private static Builder create() { + Builder builder = new Builder(); + builder.result = new se.scalablesolutions.akka.remote.protocol.RemoteProtocol.RemoteActorRefProtocol(); + return builder; + } + + protected se.scalablesolutions.akka.remote.protocol.RemoteProtocol.RemoteActorRefProtocol internalGetResult() { + return result; + } + + public Builder clear() { + if (result == null) { + throw new IllegalStateException( + "Cannot call clear() after build()."); + } + result = new se.scalablesolutions.akka.remote.protocol.RemoteProtocol.RemoteActorRefProtocol(); + return this; + } + + public Builder clone() { + return create().mergeFrom(result); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return se.scalablesolutions.akka.remote.protocol.RemoteProtocol.RemoteActorRefProtocol.getDescriptor(); + } + + public se.scalablesolutions.akka.remote.protocol.RemoteProtocol.RemoteActorRefProtocol getDefaultInstanceForType() { + return se.scalablesolutions.akka.remote.protocol.RemoteProtocol.RemoteActorRefProtocol.getDefaultInstance(); + } + + public boolean isInitialized() { + return result.isInitialized(); + } + public se.scalablesolutions.akka.remote.protocol.RemoteProtocol.RemoteActorRefProtocol build() { + if (result != null && !isInitialized()) { + throw newUninitializedMessageException(result); + } + return buildPartial(); + } + + private se.scalablesolutions.akka.remote.protocol.RemoteProtocol.RemoteActorRefProtocol buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + if (!isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return buildPartial(); + } + + public se.scalablesolutions.akka.remote.protocol.RemoteProtocol.RemoteActorRefProtocol buildPartial() { + if (result == null) { + throw new IllegalStateException( + "build() has already been called on this Builder."); + } + se.scalablesolutions.akka.remote.protocol.RemoteProtocol.RemoteActorRefProtocol returnMe = result; + result = null; + return returnMe; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof se.scalablesolutions.akka.remote.protocol.RemoteProtocol.RemoteActorRefProtocol) { + return mergeFrom((se.scalablesolutions.akka.remote.protocol.RemoteProtocol.RemoteActorRefProtocol)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(se.scalablesolutions.akka.remote.protocol.RemoteProtocol.RemoteActorRefProtocol other) { + if (other == se.scalablesolutions.akka.remote.protocol.RemoteProtocol.RemoteActorRefProtocol.getDefaultInstance()) return this; + if (other.hasUuid()) { + setUuid(other.getUuid()); + } + if (other.hasActorClassname()) { + setActorClassname(other.getActorClassname()); + } + if (other.hasHomeAddress()) { + mergeHomeAddress(other.getHomeAddress()); + } + if (other.hasTimeout()) { + setTimeout(other.getTimeout()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + return this; + } + break; + } + case 10: { + setUuid(input.readString()); + break; + } + case 18: { + setActorClassname(input.readString()); + break; + } + case 26: { + se.scalablesolutions.akka.remote.protocol.RemoteProtocol.AddressProtocol.Builder subBuilder = se.scalablesolutions.akka.remote.protocol.RemoteProtocol.AddressProtocol.newBuilder(); + if (hasHomeAddress()) { + subBuilder.mergeFrom(getHomeAddress()); + } + input.readMessage(subBuilder, extensionRegistry); + setHomeAddress(subBuilder.buildPartial()); + break; + } + case 32: { + setTimeout(input.readUInt64()); + break; + } + } + } + } + + + // required string uuid = 1; + public boolean hasUuid() { + return result.hasUuid(); + } + public java.lang.String getUuid() { + return result.getUuid(); + } + public Builder setUuid(java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + result.hasUuid = true; + result.uuid_ = value; + return this; + } + public Builder clearUuid() { + result.hasUuid = false; + result.uuid_ = getDefaultInstance().getUuid(); + return this; + } + + // required string actorClassname = 2; + public boolean hasActorClassname() { + return result.hasActorClassname(); + } + public java.lang.String getActorClassname() { + return result.getActorClassname(); + } + public Builder setActorClassname(java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + result.hasActorClassname = true; + result.actorClassname_ = value; + return this; + } + public Builder clearActorClassname() { + result.hasActorClassname = false; + result.actorClassname_ = getDefaultInstance().getActorClassname(); + return this; + } + + // required .AddressProtocol homeAddress = 3; + public boolean hasHomeAddress() { + return result.hasHomeAddress(); + } + public se.scalablesolutions.akka.remote.protocol.RemoteProtocol.AddressProtocol getHomeAddress() { + return result.getHomeAddress(); + } + public Builder setHomeAddress(se.scalablesolutions.akka.remote.protocol.RemoteProtocol.AddressProtocol value) { + if (value == null) { + throw new NullPointerException(); + } + result.hasHomeAddress = true; + result.homeAddress_ = value; + return this; + } + public Builder setHomeAddress(se.scalablesolutions.akka.remote.protocol.RemoteProtocol.AddressProtocol.Builder builderForValue) { + result.hasHomeAddress = true; + result.homeAddress_ = builderForValue.build(); + return this; + } + public Builder mergeHomeAddress(se.scalablesolutions.akka.remote.protocol.RemoteProtocol.AddressProtocol value) { + if (result.hasHomeAddress() && + result.homeAddress_ != se.scalablesolutions.akka.remote.protocol.RemoteProtocol.AddressProtocol.getDefaultInstance()) { + result.homeAddress_ = + se.scalablesolutions.akka.remote.protocol.RemoteProtocol.AddressProtocol.newBuilder(result.homeAddress_).mergeFrom(value).buildPartial(); + } else { + result.homeAddress_ = value; + } + result.hasHomeAddress = true; + return this; + } + public Builder clearHomeAddress() { + result.hasHomeAddress = false; + result.homeAddress_ = se.scalablesolutions.akka.remote.protocol.RemoteProtocol.AddressProtocol.getDefaultInstance(); + return this; + } + + // optional uint64 timeout = 4; + public boolean hasTimeout() { + return result.hasTimeout(); + } + public long getTimeout() { + return result.getTimeout(); + } + public Builder setTimeout(long value) { + result.hasTimeout = true; + result.timeout_ = value; + return this; + } + public Builder clearTimeout() { + result.hasTimeout = false; + result.timeout_ = 0L; + return this; + } + + // @@protoc_insertion_point(builder_scope:RemoteActorRefProtocol) + } + + static { + defaultInstance = new RemoteActorRefProtocol(true); + se.scalablesolutions.akka.remote.protocol.RemoteProtocol.internalForceInit(); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:RemoteActorRefProtocol) + } + + public static final class SerializedActorRefProtocol extends + com.google.protobuf.GeneratedMessage { + // Use SerializedActorRefProtocol.newBuilder() to construct. + private SerializedActorRefProtocol() { + initFields(); + } + private SerializedActorRefProtocol(boolean noInit) {} + + private static final SerializedActorRefProtocol defaultInstance; + public static SerializedActorRefProtocol getDefaultInstance() { + return defaultInstance; + } + + public SerializedActorRefProtocol getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return se.scalablesolutions.akka.remote.protocol.RemoteProtocol.internal_static_SerializedActorRefProtocol_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return se.scalablesolutions.akka.remote.protocol.RemoteProtocol.internal_static_SerializedActorRefProtocol_fieldAccessorTable; + } + + // required string uuid = 1; + public static final int UUID_FIELD_NUMBER = 1; + private boolean hasUuid; + private java.lang.String uuid_ = ""; + public boolean hasUuid() { return hasUuid; } + public java.lang.String getUuid() { return uuid_; } + + // required string id = 2; + public static final int ID_FIELD_NUMBER = 2; + private boolean hasId; + private java.lang.String id_ = ""; + public boolean hasId() { return hasId; } + public java.lang.String getId() { return id_; } + + // required string actorClassname = 3; + public static final int ACTORCLASSNAME_FIELD_NUMBER = 3; + private boolean hasActorClassname; + private java.lang.String actorClassname_ = ""; + public boolean hasActorClassname() { return hasActorClassname; } + public java.lang.String getActorClassname() { return actorClassname_; } + + // required .AddressProtocol originalAddress = 4; + public static final int ORIGINALADDRESS_FIELD_NUMBER = 4; + private boolean hasOriginalAddress; + private se.scalablesolutions.akka.remote.protocol.RemoteProtocol.AddressProtocol originalAddress_; + public boolean hasOriginalAddress() { return hasOriginalAddress; } + public se.scalablesolutions.akka.remote.protocol.RemoteProtocol.AddressProtocol getOriginalAddress() { return originalAddress_; } + + // optional bytes actorInstance = 5; + public static final int ACTORINSTANCE_FIELD_NUMBER = 5; + private boolean hasActorInstance; + private com.google.protobuf.ByteString actorInstance_ = com.google.protobuf.ByteString.EMPTY; + public boolean hasActorInstance() { return hasActorInstance; } + public com.google.protobuf.ByteString getActorInstance() { return actorInstance_; } + + // optional string serializerClassname = 6; + public static final int SERIALIZERCLASSNAME_FIELD_NUMBER = 6; + private boolean hasSerializerClassname; + private java.lang.String serializerClassname_ = ""; + public boolean hasSerializerClassname() { return hasSerializerClassname; } + public java.lang.String getSerializerClassname() { return serializerClassname_; } + + // optional bool isTransactor = 7; + public static final int ISTRANSACTOR_FIELD_NUMBER = 7; + private boolean hasIsTransactor; + private boolean isTransactor_ = false; + public boolean hasIsTransactor() { return hasIsTransactor; } + public boolean getIsTransactor() { return isTransactor_; } + + // optional uint64 timeout = 8; + public static final int TIMEOUT_FIELD_NUMBER = 8; + private boolean hasTimeout; + private long timeout_ = 0L; + public boolean hasTimeout() { return hasTimeout; } + public long getTimeout() { return timeout_; } + + // optional uint64 receiveTimeout = 9; + public static final int RECEIVETIMEOUT_FIELD_NUMBER = 9; + private boolean hasReceiveTimeout; + private long receiveTimeout_ = 0L; + public boolean hasReceiveTimeout() { return hasReceiveTimeout; } + public long getReceiveTimeout() { return receiveTimeout_; } + + // optional .LifeCycleProtocol lifeCycle = 10; + public static final int LIFECYCLE_FIELD_NUMBER = 10; + private boolean hasLifeCycle; + private se.scalablesolutions.akka.remote.protocol.RemoteProtocol.LifeCycleProtocol lifeCycle_; + public boolean hasLifeCycle() { return hasLifeCycle; } + public se.scalablesolutions.akka.remote.protocol.RemoteProtocol.LifeCycleProtocol getLifeCycle() { return lifeCycle_; } + + // optional .RemoteActorRefProtocol supervisor = 11; + public static final int SUPERVISOR_FIELD_NUMBER = 11; + private boolean hasSupervisor; + private se.scalablesolutions.akka.remote.protocol.RemoteProtocol.RemoteActorRefProtocol supervisor_; + public boolean hasSupervisor() { return hasSupervisor; } + public se.scalablesolutions.akka.remote.protocol.RemoteProtocol.RemoteActorRefProtocol getSupervisor() { return supervisor_; } + + // optional bytes hotswapStack = 12; + public static final int HOTSWAPSTACK_FIELD_NUMBER = 12; + private boolean hasHotswapStack; + private com.google.protobuf.ByteString hotswapStack_ = com.google.protobuf.ByteString.EMPTY; + public boolean hasHotswapStack() { return hasHotswapStack; } + public com.google.protobuf.ByteString getHotswapStack() { return hotswapStack_; } + + // repeated .RemoteRequestProtocol messages = 13; + public static final int MESSAGES_FIELD_NUMBER = 13; + private java.util.List messages_ = + java.util.Collections.emptyList(); + public java.util.List getMessagesList() { + return messages_; + } + public int getMessagesCount() { return messages_.size(); } + public se.scalablesolutions.akka.remote.protocol.RemoteProtocol.RemoteRequestProtocol getMessages(int index) { + return messages_.get(index); + } + + private void initFields() { + originalAddress_ = se.scalablesolutions.akka.remote.protocol.RemoteProtocol.AddressProtocol.getDefaultInstance(); + lifeCycle_ = se.scalablesolutions.akka.remote.protocol.RemoteProtocol.LifeCycleProtocol.getDefaultInstance(); + supervisor_ = se.scalablesolutions.akka.remote.protocol.RemoteProtocol.RemoteActorRefProtocol.getDefaultInstance(); + } + public final boolean isInitialized() { + if (!hasUuid) return false; + if (!hasId) return false; + if (!hasActorClassname) return false; + if (!hasOriginalAddress) return false; + if (!getOriginalAddress().isInitialized()) return false; + if (hasLifeCycle()) { + if (!getLifeCycle().isInitialized()) return false; + } + if (hasSupervisor()) { + if (!getSupervisor().isInitialized()) return false; + } + for (se.scalablesolutions.akka.remote.protocol.RemoteProtocol.RemoteRequestProtocol element : getMessagesList()) { + if (!element.isInitialized()) return false; + } + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (hasUuid()) { + output.writeString(1, getUuid()); + } + if (hasId()) { + output.writeString(2, getId()); + } + if (hasActorClassname()) { + output.writeString(3, getActorClassname()); + } + if (hasOriginalAddress()) { + output.writeMessage(4, getOriginalAddress()); + } + if (hasActorInstance()) { + output.writeBytes(5, getActorInstance()); + } + if (hasSerializerClassname()) { + output.writeString(6, getSerializerClassname()); + } + if (hasIsTransactor()) { + output.writeBool(7, getIsTransactor()); + } + if (hasTimeout()) { + output.writeUInt64(8, getTimeout()); + } + if (hasReceiveTimeout()) { + output.writeUInt64(9, getReceiveTimeout()); + } + if (hasLifeCycle()) { + output.writeMessage(10, getLifeCycle()); + } + if (hasSupervisor()) { + output.writeMessage(11, getSupervisor()); + } + if (hasHotswapStack()) { + output.writeBytes(12, getHotswapStack()); + } + for (se.scalablesolutions.akka.remote.protocol.RemoteProtocol.RemoteRequestProtocol element : getMessagesList()) { + output.writeMessage(13, element); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (hasUuid()) { + size += com.google.protobuf.CodedOutputStream + .computeStringSize(1, getUuid()); + } + if (hasId()) { + size += com.google.protobuf.CodedOutputStream + .computeStringSize(2, getId()); + } + if (hasActorClassname()) { + size += com.google.protobuf.CodedOutputStream + .computeStringSize(3, getActorClassname()); + } + if (hasOriginalAddress()) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(4, getOriginalAddress()); + } + if (hasActorInstance()) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(5, getActorInstance()); + } + if (hasSerializerClassname()) { + size += com.google.protobuf.CodedOutputStream + .computeStringSize(6, getSerializerClassname()); + } + if (hasIsTransactor()) { + size += com.google.protobuf.CodedOutputStream + .computeBoolSize(7, getIsTransactor()); + } + if (hasTimeout()) { + size += com.google.protobuf.CodedOutputStream + .computeUInt64Size(8, getTimeout()); + } + if (hasReceiveTimeout()) { + size += com.google.protobuf.CodedOutputStream + .computeUInt64Size(9, getReceiveTimeout()); + } + if (hasLifeCycle()) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(10, getLifeCycle()); + } + if (hasSupervisor()) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(11, getSupervisor()); + } + if (hasHotswapStack()) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(12, getHotswapStack()); + } + for (se.scalablesolutions.akka.remote.protocol.RemoteProtocol.RemoteRequestProtocol element : getMessagesList()) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(13, element); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + public static se.scalablesolutions.akka.remote.protocol.RemoteProtocol.SerializedActorRefProtocol parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static se.scalablesolutions.akka.remote.protocol.RemoteProtocol.SerializedActorRefProtocol parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static se.scalablesolutions.akka.remote.protocol.RemoteProtocol.SerializedActorRefProtocol parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static se.scalablesolutions.akka.remote.protocol.RemoteProtocol.SerializedActorRefProtocol parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static se.scalablesolutions.akka.remote.protocol.RemoteProtocol.SerializedActorRefProtocol parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static se.scalablesolutions.akka.remote.protocol.RemoteProtocol.SerializedActorRefProtocol parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static se.scalablesolutions.akka.remote.protocol.RemoteProtocol.SerializedActorRefProtocol parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static se.scalablesolutions.akka.remote.protocol.RemoteProtocol.SerializedActorRefProtocol parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static se.scalablesolutions.akka.remote.protocol.RemoteProtocol.SerializedActorRefProtocol parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static se.scalablesolutions.akka.remote.protocol.RemoteProtocol.SerializedActorRefProtocol parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(se.scalablesolutions.akka.remote.protocol.RemoteProtocol.SerializedActorRefProtocol prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder { + private se.scalablesolutions.akka.remote.protocol.RemoteProtocol.SerializedActorRefProtocol result; + + // Construct using se.scalablesolutions.akka.remote.protocol.RemoteProtocol.SerializedActorRefProtocol.newBuilder() + private Builder() {} + + private static Builder create() { + Builder builder = new Builder(); + builder.result = new se.scalablesolutions.akka.remote.protocol.RemoteProtocol.SerializedActorRefProtocol(); + return builder; + } + + protected se.scalablesolutions.akka.remote.protocol.RemoteProtocol.SerializedActorRefProtocol internalGetResult() { + return result; + } + + public Builder clear() { + if (result == null) { + throw new IllegalStateException( + "Cannot call clear() after build()."); + } + result = new se.scalablesolutions.akka.remote.protocol.RemoteProtocol.SerializedActorRefProtocol(); + return this; + } + + public Builder clone() { + return create().mergeFrom(result); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return se.scalablesolutions.akka.remote.protocol.RemoteProtocol.SerializedActorRefProtocol.getDescriptor(); + } + + public se.scalablesolutions.akka.remote.protocol.RemoteProtocol.SerializedActorRefProtocol getDefaultInstanceForType() { + return se.scalablesolutions.akka.remote.protocol.RemoteProtocol.SerializedActorRefProtocol.getDefaultInstance(); + } + + public boolean isInitialized() { + return result.isInitialized(); + } + public se.scalablesolutions.akka.remote.protocol.RemoteProtocol.SerializedActorRefProtocol build() { + if (result != null && !isInitialized()) { + throw newUninitializedMessageException(result); + } + return buildPartial(); + } + + private se.scalablesolutions.akka.remote.protocol.RemoteProtocol.SerializedActorRefProtocol buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + if (!isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return buildPartial(); + } + + public se.scalablesolutions.akka.remote.protocol.RemoteProtocol.SerializedActorRefProtocol buildPartial() { + if (result == null) { + throw new IllegalStateException( + "build() has already been called on this Builder."); + } + if (result.messages_ != java.util.Collections.EMPTY_LIST) { + result.messages_ = + java.util.Collections.unmodifiableList(result.messages_); + } + se.scalablesolutions.akka.remote.protocol.RemoteProtocol.SerializedActorRefProtocol returnMe = result; + result = null; + return returnMe; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof se.scalablesolutions.akka.remote.protocol.RemoteProtocol.SerializedActorRefProtocol) { + return mergeFrom((se.scalablesolutions.akka.remote.protocol.RemoteProtocol.SerializedActorRefProtocol)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(se.scalablesolutions.akka.remote.protocol.RemoteProtocol.SerializedActorRefProtocol other) { + if (other == se.scalablesolutions.akka.remote.protocol.RemoteProtocol.SerializedActorRefProtocol.getDefaultInstance()) return this; + if (other.hasUuid()) { + setUuid(other.getUuid()); + } + if (other.hasId()) { + setId(other.getId()); + } + if (other.hasActorClassname()) { + setActorClassname(other.getActorClassname()); + } + if (other.hasOriginalAddress()) { + mergeOriginalAddress(other.getOriginalAddress()); + } + if (other.hasActorInstance()) { + setActorInstance(other.getActorInstance()); + } + if (other.hasSerializerClassname()) { + setSerializerClassname(other.getSerializerClassname()); + } + if (other.hasIsTransactor()) { + setIsTransactor(other.getIsTransactor()); + } + if (other.hasTimeout()) { + setTimeout(other.getTimeout()); + } + if (other.hasReceiveTimeout()) { + setReceiveTimeout(other.getReceiveTimeout()); + } + if (other.hasLifeCycle()) { + mergeLifeCycle(other.getLifeCycle()); + } + if (other.hasSupervisor()) { + mergeSupervisor(other.getSupervisor()); + } + if (other.hasHotswapStack()) { + setHotswapStack(other.getHotswapStack()); + } + if (!other.messages_.isEmpty()) { + if (result.messages_.isEmpty()) { + result.messages_ = new java.util.ArrayList(); + } + result.messages_.addAll(other.messages_); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + return this; + } + break; + } + case 10: { + setUuid(input.readString()); + break; + } + case 18: { + setId(input.readString()); + break; + } + case 26: { + setActorClassname(input.readString()); + break; + } + case 34: { + se.scalablesolutions.akka.remote.protocol.RemoteProtocol.AddressProtocol.Builder subBuilder = se.scalablesolutions.akka.remote.protocol.RemoteProtocol.AddressProtocol.newBuilder(); + if (hasOriginalAddress()) { + subBuilder.mergeFrom(getOriginalAddress()); + } + input.readMessage(subBuilder, extensionRegistry); + setOriginalAddress(subBuilder.buildPartial()); + break; + } + case 42: { + setActorInstance(input.readBytes()); + break; + } + case 50: { + setSerializerClassname(input.readString()); + break; + } + case 56: { + setIsTransactor(input.readBool()); + break; + } + case 64: { + setTimeout(input.readUInt64()); + break; + } + case 72: { + setReceiveTimeout(input.readUInt64()); + break; + } + case 82: { + se.scalablesolutions.akka.remote.protocol.RemoteProtocol.LifeCycleProtocol.Builder subBuilder = se.scalablesolutions.akka.remote.protocol.RemoteProtocol.LifeCycleProtocol.newBuilder(); + if (hasLifeCycle()) { + subBuilder.mergeFrom(getLifeCycle()); + } + input.readMessage(subBuilder, extensionRegistry); + setLifeCycle(subBuilder.buildPartial()); + break; + } + case 90: { + se.scalablesolutions.akka.remote.protocol.RemoteProtocol.RemoteActorRefProtocol.Builder subBuilder = se.scalablesolutions.akka.remote.protocol.RemoteProtocol.RemoteActorRefProtocol.newBuilder(); + if (hasSupervisor()) { + subBuilder.mergeFrom(getSupervisor()); + } + input.readMessage(subBuilder, extensionRegistry); + setSupervisor(subBuilder.buildPartial()); + break; + } + case 98: { + setHotswapStack(input.readBytes()); + break; + } + case 106: { + se.scalablesolutions.akka.remote.protocol.RemoteProtocol.RemoteRequestProtocol.Builder subBuilder = se.scalablesolutions.akka.remote.protocol.RemoteProtocol.RemoteRequestProtocol.newBuilder(); + input.readMessage(subBuilder, extensionRegistry); + addMessages(subBuilder.buildPartial()); + break; + } + } + } + } + + + // required string uuid = 1; + public boolean hasUuid() { + return result.hasUuid(); + } + public java.lang.String getUuid() { + return result.getUuid(); + } + public Builder setUuid(java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + result.hasUuid = true; + result.uuid_ = value; + return this; + } + public Builder clearUuid() { + result.hasUuid = false; + result.uuid_ = getDefaultInstance().getUuid(); + return this; + } + + // required string id = 2; + public boolean hasId() { + return result.hasId(); + } + public java.lang.String getId() { + return result.getId(); + } + public Builder setId(java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + result.hasId = true; + result.id_ = value; + return this; + } + public Builder clearId() { + result.hasId = false; + result.id_ = getDefaultInstance().getId(); + return this; + } + + // required string actorClassname = 3; + public boolean hasActorClassname() { + return result.hasActorClassname(); + } + public java.lang.String getActorClassname() { + return result.getActorClassname(); + } + public Builder setActorClassname(java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + result.hasActorClassname = true; + result.actorClassname_ = value; + return this; + } + public Builder clearActorClassname() { + result.hasActorClassname = false; + result.actorClassname_ = getDefaultInstance().getActorClassname(); + return this; + } + + // required .AddressProtocol originalAddress = 4; + public boolean hasOriginalAddress() { + return result.hasOriginalAddress(); + } + public se.scalablesolutions.akka.remote.protocol.RemoteProtocol.AddressProtocol getOriginalAddress() { + return result.getOriginalAddress(); + } + public Builder setOriginalAddress(se.scalablesolutions.akka.remote.protocol.RemoteProtocol.AddressProtocol value) { + if (value == null) { + throw new NullPointerException(); + } + result.hasOriginalAddress = true; + result.originalAddress_ = value; + return this; + } + public Builder setOriginalAddress(se.scalablesolutions.akka.remote.protocol.RemoteProtocol.AddressProtocol.Builder builderForValue) { + result.hasOriginalAddress = true; + result.originalAddress_ = builderForValue.build(); + return this; + } + public Builder mergeOriginalAddress(se.scalablesolutions.akka.remote.protocol.RemoteProtocol.AddressProtocol value) { + if (result.hasOriginalAddress() && + result.originalAddress_ != se.scalablesolutions.akka.remote.protocol.RemoteProtocol.AddressProtocol.getDefaultInstance()) { + result.originalAddress_ = + se.scalablesolutions.akka.remote.protocol.RemoteProtocol.AddressProtocol.newBuilder(result.originalAddress_).mergeFrom(value).buildPartial(); + } else { + result.originalAddress_ = value; + } + result.hasOriginalAddress = true; + return this; + } + public Builder clearOriginalAddress() { + result.hasOriginalAddress = false; + result.originalAddress_ = se.scalablesolutions.akka.remote.protocol.RemoteProtocol.AddressProtocol.getDefaultInstance(); + return this; + } + + // optional bytes actorInstance = 5; + public boolean hasActorInstance() { + return result.hasActorInstance(); + } + public com.google.protobuf.ByteString getActorInstance() { + return result.getActorInstance(); + } + public Builder setActorInstance(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + result.hasActorInstance = true; + result.actorInstance_ = value; + return this; + } + public Builder clearActorInstance() { + result.hasActorInstance = false; + result.actorInstance_ = getDefaultInstance().getActorInstance(); + return this; + } + + // optional string serializerClassname = 6; + public boolean hasSerializerClassname() { + return result.hasSerializerClassname(); + } + public java.lang.String getSerializerClassname() { + return result.getSerializerClassname(); + } + public Builder setSerializerClassname(java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + result.hasSerializerClassname = true; + result.serializerClassname_ = value; + return this; + } + public Builder clearSerializerClassname() { + result.hasSerializerClassname = false; + result.serializerClassname_ = getDefaultInstance().getSerializerClassname(); + return this; + } + + // optional bool isTransactor = 7; + public boolean hasIsTransactor() { + return result.hasIsTransactor(); + } + public boolean getIsTransactor() { + return result.getIsTransactor(); + } + public Builder setIsTransactor(boolean value) { + result.hasIsTransactor = true; + result.isTransactor_ = value; + return this; + } + public Builder clearIsTransactor() { + result.hasIsTransactor = false; + result.isTransactor_ = false; + return this; + } + + // optional uint64 timeout = 8; + public boolean hasTimeout() { + return result.hasTimeout(); + } + public long getTimeout() { + return result.getTimeout(); + } + public Builder setTimeout(long value) { + result.hasTimeout = true; + result.timeout_ = value; + return this; + } + public Builder clearTimeout() { + result.hasTimeout = false; + result.timeout_ = 0L; + return this; + } + + // optional uint64 receiveTimeout = 9; + public boolean hasReceiveTimeout() { + return result.hasReceiveTimeout(); + } + public long getReceiveTimeout() { + return result.getReceiveTimeout(); + } + public Builder setReceiveTimeout(long value) { + result.hasReceiveTimeout = true; + result.receiveTimeout_ = value; + return this; + } + public Builder clearReceiveTimeout() { + result.hasReceiveTimeout = false; + result.receiveTimeout_ = 0L; + return this; + } + + // optional .LifeCycleProtocol lifeCycle = 10; + public boolean hasLifeCycle() { + return result.hasLifeCycle(); + } + public se.scalablesolutions.akka.remote.protocol.RemoteProtocol.LifeCycleProtocol getLifeCycle() { + return result.getLifeCycle(); + } + public Builder setLifeCycle(se.scalablesolutions.akka.remote.protocol.RemoteProtocol.LifeCycleProtocol value) { + if (value == null) { + throw new NullPointerException(); + } + result.hasLifeCycle = true; + result.lifeCycle_ = value; + return this; + } + public Builder setLifeCycle(se.scalablesolutions.akka.remote.protocol.RemoteProtocol.LifeCycleProtocol.Builder builderForValue) { + result.hasLifeCycle = true; + result.lifeCycle_ = builderForValue.build(); + return this; + } + public Builder mergeLifeCycle(se.scalablesolutions.akka.remote.protocol.RemoteProtocol.LifeCycleProtocol value) { + if (result.hasLifeCycle() && + result.lifeCycle_ != se.scalablesolutions.akka.remote.protocol.RemoteProtocol.LifeCycleProtocol.getDefaultInstance()) { + result.lifeCycle_ = + se.scalablesolutions.akka.remote.protocol.RemoteProtocol.LifeCycleProtocol.newBuilder(result.lifeCycle_).mergeFrom(value).buildPartial(); + } else { + result.lifeCycle_ = value; + } + result.hasLifeCycle = true; + return this; + } + public Builder clearLifeCycle() { + result.hasLifeCycle = false; + result.lifeCycle_ = se.scalablesolutions.akka.remote.protocol.RemoteProtocol.LifeCycleProtocol.getDefaultInstance(); + return this; + } + + // optional .RemoteActorRefProtocol supervisor = 11; + public boolean hasSupervisor() { + return result.hasSupervisor(); + } + public se.scalablesolutions.akka.remote.protocol.RemoteProtocol.RemoteActorRefProtocol getSupervisor() { + return result.getSupervisor(); + } + public Builder setSupervisor(se.scalablesolutions.akka.remote.protocol.RemoteProtocol.RemoteActorRefProtocol value) { + if (value == null) { + throw new NullPointerException(); + } + result.hasSupervisor = true; + result.supervisor_ = value; + return this; + } + public Builder setSupervisor(se.scalablesolutions.akka.remote.protocol.RemoteProtocol.RemoteActorRefProtocol.Builder builderForValue) { + result.hasSupervisor = true; + result.supervisor_ = builderForValue.build(); + return this; + } + public Builder mergeSupervisor(se.scalablesolutions.akka.remote.protocol.RemoteProtocol.RemoteActorRefProtocol value) { + if (result.hasSupervisor() && + result.supervisor_ != se.scalablesolutions.akka.remote.protocol.RemoteProtocol.RemoteActorRefProtocol.getDefaultInstance()) { + result.supervisor_ = + se.scalablesolutions.akka.remote.protocol.RemoteProtocol.RemoteActorRefProtocol.newBuilder(result.supervisor_).mergeFrom(value).buildPartial(); + } else { + result.supervisor_ = value; + } + result.hasSupervisor = true; + return this; + } + public Builder clearSupervisor() { + result.hasSupervisor = false; + result.supervisor_ = se.scalablesolutions.akka.remote.protocol.RemoteProtocol.RemoteActorRefProtocol.getDefaultInstance(); + return this; + } + + // optional bytes hotswapStack = 12; + public boolean hasHotswapStack() { + return result.hasHotswapStack(); + } + public com.google.protobuf.ByteString getHotswapStack() { + return result.getHotswapStack(); + } + public Builder setHotswapStack(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + result.hasHotswapStack = true; + result.hotswapStack_ = value; + return this; + } + public Builder clearHotswapStack() { + result.hasHotswapStack = false; + result.hotswapStack_ = getDefaultInstance().getHotswapStack(); + return this; + } + + // repeated .RemoteRequestProtocol messages = 13; + public java.util.List getMessagesList() { + return java.util.Collections.unmodifiableList(result.messages_); + } + public int getMessagesCount() { + return result.getMessagesCount(); + } + public se.scalablesolutions.akka.remote.protocol.RemoteProtocol.RemoteRequestProtocol getMessages(int index) { + return result.getMessages(index); + } + public Builder setMessages(int index, se.scalablesolutions.akka.remote.protocol.RemoteProtocol.RemoteRequestProtocol value) { + if (value == null) { + throw new NullPointerException(); + } + result.messages_.set(index, value); + return this; + } + public Builder setMessages(int index, se.scalablesolutions.akka.remote.protocol.RemoteProtocol.RemoteRequestProtocol.Builder builderForValue) { + result.messages_.set(index, builderForValue.build()); + return this; + } + public Builder addMessages(se.scalablesolutions.akka.remote.protocol.RemoteProtocol.RemoteRequestProtocol value) { + if (value == null) { + throw new NullPointerException(); + } + if (result.messages_.isEmpty()) { + result.messages_ = new java.util.ArrayList(); + } + result.messages_.add(value); + return this; + } + public Builder addMessages(se.scalablesolutions.akka.remote.protocol.RemoteProtocol.RemoteRequestProtocol.Builder builderForValue) { + if (result.messages_.isEmpty()) { + result.messages_ = new java.util.ArrayList(); + } + result.messages_.add(builderForValue.build()); + return this; + } + public Builder addAllMessages( + java.lang.Iterable values) { + if (result.messages_.isEmpty()) { + result.messages_ = new java.util.ArrayList(); + } + super.addAll(values, result.messages_); + return this; + } + public Builder clearMessages() { + result.messages_ = java.util.Collections.emptyList(); + return this; + } + + // @@protoc_insertion_point(builder_scope:SerializedActorRefProtocol) + } + + static { + defaultInstance = new SerializedActorRefProtocol(true); + se.scalablesolutions.akka.remote.protocol.RemoteProtocol.internalForceInit(); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:SerializedActorRefProtocol) + } + + public static final class MessageProtocol extends + com.google.protobuf.GeneratedMessage { + // Use MessageProtocol.newBuilder() to construct. + private MessageProtocol() { + initFields(); + } + private MessageProtocol(boolean noInit) {} + + private static final MessageProtocol defaultInstance; + public static MessageProtocol getDefaultInstance() { + return defaultInstance; + } + + public MessageProtocol getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return se.scalablesolutions.akka.remote.protocol.RemoteProtocol.internal_static_MessageProtocol_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return se.scalablesolutions.akka.remote.protocol.RemoteProtocol.internal_static_MessageProtocol_fieldAccessorTable; + } + + // required .SerializationSchemeType serializationScheme = 1; + public static final int SERIALIZATIONSCHEME_FIELD_NUMBER = 1; + private boolean hasSerializationScheme; + private se.scalablesolutions.akka.remote.protocol.RemoteProtocol.SerializationSchemeType serializationScheme_; + public boolean hasSerializationScheme() { return hasSerializationScheme; } + public se.scalablesolutions.akka.remote.protocol.RemoteProtocol.SerializationSchemeType getSerializationScheme() { return serializationScheme_; } + + // required bytes message = 2; + public static final int MESSAGE_FIELD_NUMBER = 2; + private boolean hasMessage; + private com.google.protobuf.ByteString message_ = com.google.protobuf.ByteString.EMPTY; + public boolean hasMessage() { return hasMessage; } + public com.google.protobuf.ByteString getMessage() { return message_; } + + // optional bytes messageManifest = 3; + public static final int MESSAGEMANIFEST_FIELD_NUMBER = 3; + private boolean hasMessageManifest; + private com.google.protobuf.ByteString messageManifest_ = com.google.protobuf.ByteString.EMPTY; + public boolean hasMessageManifest() { return hasMessageManifest; } + public com.google.protobuf.ByteString getMessageManifest() { return messageManifest_; } + + private void initFields() { + serializationScheme_ = se.scalablesolutions.akka.remote.protocol.RemoteProtocol.SerializationSchemeType.JAVA; + } + public final boolean isInitialized() { + if (!hasSerializationScheme) return false; + if (!hasMessage) return false; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (hasSerializationScheme()) { + output.writeEnum(1, getSerializationScheme().getNumber()); + } + if (hasMessage()) { + output.writeBytes(2, getMessage()); + } + if (hasMessageManifest()) { + output.writeBytes(3, getMessageManifest()); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (hasSerializationScheme()) { + size += com.google.protobuf.CodedOutputStream + .computeEnumSize(1, getSerializationScheme().getNumber()); + } + if (hasMessage()) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(2, getMessage()); + } + if (hasMessageManifest()) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(3, getMessageManifest()); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + public static se.scalablesolutions.akka.remote.protocol.RemoteProtocol.MessageProtocol parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static se.scalablesolutions.akka.remote.protocol.RemoteProtocol.MessageProtocol parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static se.scalablesolutions.akka.remote.protocol.RemoteProtocol.MessageProtocol parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static se.scalablesolutions.akka.remote.protocol.RemoteProtocol.MessageProtocol parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static se.scalablesolutions.akka.remote.protocol.RemoteProtocol.MessageProtocol parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static se.scalablesolutions.akka.remote.protocol.RemoteProtocol.MessageProtocol parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static se.scalablesolutions.akka.remote.protocol.RemoteProtocol.MessageProtocol parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static se.scalablesolutions.akka.remote.protocol.RemoteProtocol.MessageProtocol parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static se.scalablesolutions.akka.remote.protocol.RemoteProtocol.MessageProtocol parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static se.scalablesolutions.akka.remote.protocol.RemoteProtocol.MessageProtocol parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(se.scalablesolutions.akka.remote.protocol.RemoteProtocol.MessageProtocol prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder { + private se.scalablesolutions.akka.remote.protocol.RemoteProtocol.MessageProtocol result; + + // Construct using se.scalablesolutions.akka.remote.protocol.RemoteProtocol.MessageProtocol.newBuilder() + private Builder() {} + + private static Builder create() { + Builder builder = new Builder(); + builder.result = new se.scalablesolutions.akka.remote.protocol.RemoteProtocol.MessageProtocol(); + return builder; + } + + protected se.scalablesolutions.akka.remote.protocol.RemoteProtocol.MessageProtocol internalGetResult() { + return result; + } + + public Builder clear() { + if (result == null) { + throw new IllegalStateException( + "Cannot call clear() after build()."); + } + result = new se.scalablesolutions.akka.remote.protocol.RemoteProtocol.MessageProtocol(); + return this; + } + + public Builder clone() { + return create().mergeFrom(result); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return se.scalablesolutions.akka.remote.protocol.RemoteProtocol.MessageProtocol.getDescriptor(); + } + + public se.scalablesolutions.akka.remote.protocol.RemoteProtocol.MessageProtocol getDefaultInstanceForType() { + return se.scalablesolutions.akka.remote.protocol.RemoteProtocol.MessageProtocol.getDefaultInstance(); + } + + public boolean isInitialized() { + return result.isInitialized(); + } + public se.scalablesolutions.akka.remote.protocol.RemoteProtocol.MessageProtocol build() { + if (result != null && !isInitialized()) { + throw newUninitializedMessageException(result); + } + return buildPartial(); + } + + private se.scalablesolutions.akka.remote.protocol.RemoteProtocol.MessageProtocol buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + if (!isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return buildPartial(); + } + + public se.scalablesolutions.akka.remote.protocol.RemoteProtocol.MessageProtocol buildPartial() { + if (result == null) { + throw new IllegalStateException( + "build() has already been called on this Builder."); + } + se.scalablesolutions.akka.remote.protocol.RemoteProtocol.MessageProtocol returnMe = result; + result = null; + return returnMe; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof se.scalablesolutions.akka.remote.protocol.RemoteProtocol.MessageProtocol) { + return mergeFrom((se.scalablesolutions.akka.remote.protocol.RemoteProtocol.MessageProtocol)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(se.scalablesolutions.akka.remote.protocol.RemoteProtocol.MessageProtocol other) { + if (other == se.scalablesolutions.akka.remote.protocol.RemoteProtocol.MessageProtocol.getDefaultInstance()) return this; + if (other.hasSerializationScheme()) { + setSerializationScheme(other.getSerializationScheme()); + } + if (other.hasMessage()) { + setMessage(other.getMessage()); + } + if (other.hasMessageManifest()) { + setMessageManifest(other.getMessageManifest()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + return this; + } + break; + } + case 8: { + int rawValue = input.readEnum(); + se.scalablesolutions.akka.remote.protocol.RemoteProtocol.SerializationSchemeType value = se.scalablesolutions.akka.remote.protocol.RemoteProtocol.SerializationSchemeType.valueOf(rawValue); + if (value == null) { + unknownFields.mergeVarintField(1, rawValue); + } else { + setSerializationScheme(value); + } + break; + } + case 18: { + setMessage(input.readBytes()); + break; + } + case 26: { + setMessageManifest(input.readBytes()); + break; + } + } + } + } + + + // required .SerializationSchemeType serializationScheme = 1; + public boolean hasSerializationScheme() { + return result.hasSerializationScheme(); + } + public se.scalablesolutions.akka.remote.protocol.RemoteProtocol.SerializationSchemeType getSerializationScheme() { + return result.getSerializationScheme(); + } + public Builder setSerializationScheme(se.scalablesolutions.akka.remote.protocol.RemoteProtocol.SerializationSchemeType value) { + if (value == null) { + throw new NullPointerException(); + } + result.hasSerializationScheme = true; + result.serializationScheme_ = value; + return this; + } + public Builder clearSerializationScheme() { + result.hasSerializationScheme = false; + result.serializationScheme_ = se.scalablesolutions.akka.remote.protocol.RemoteProtocol.SerializationSchemeType.JAVA; + return this; + } + + // required bytes message = 2; + public boolean hasMessage() { + return result.hasMessage(); + } + public com.google.protobuf.ByteString getMessage() { + return result.getMessage(); + } + public Builder setMessage(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + result.hasMessage = true; + result.message_ = value; + return this; + } + public Builder clearMessage() { + result.hasMessage = false; + result.message_ = getDefaultInstance().getMessage(); + return this; + } + + // optional bytes messageManifest = 3; + public boolean hasMessageManifest() { + return result.hasMessageManifest(); + } + public com.google.protobuf.ByteString getMessageManifest() { + return result.getMessageManifest(); + } + public Builder setMessageManifest(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + result.hasMessageManifest = true; + result.messageManifest_ = value; + return this; + } + public Builder clearMessageManifest() { + result.hasMessageManifest = false; + result.messageManifest_ = getDefaultInstance().getMessageManifest(); + return this; + } + + // @@protoc_insertion_point(builder_scope:MessageProtocol) + } + + static { + defaultInstance = new MessageProtocol(true); + se.scalablesolutions.akka.remote.protocol.RemoteProtocol.internalForceInit(); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:MessageProtocol) + } + + public static final class ActorInfoProtocol extends + com.google.protobuf.GeneratedMessage { + // Use ActorInfoProtocol.newBuilder() to construct. + private ActorInfoProtocol() { + initFields(); + } + private ActorInfoProtocol(boolean noInit) {} + + private static final ActorInfoProtocol defaultInstance; + public static ActorInfoProtocol getDefaultInstance() { + return defaultInstance; + } + + public ActorInfoProtocol getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return se.scalablesolutions.akka.remote.protocol.RemoteProtocol.internal_static_ActorInfoProtocol_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return se.scalablesolutions.akka.remote.protocol.RemoteProtocol.internal_static_ActorInfoProtocol_fieldAccessorTable; + } + + // required string uuid = 1; + public static final int UUID_FIELD_NUMBER = 1; + private boolean hasUuid; + private java.lang.String uuid_ = ""; + public boolean hasUuid() { return hasUuid; } + public java.lang.String getUuid() { return uuid_; } + + // required string target = 2; + public static final int TARGET_FIELD_NUMBER = 2; + private boolean hasTarget; + private java.lang.String target_ = ""; + public boolean hasTarget() { return hasTarget; } + public java.lang.String getTarget() { return target_; } + + // required uint64 timeout = 3; + public static final int TIMEOUT_FIELD_NUMBER = 3; + private boolean hasTimeout; + private long timeout_ = 0L; + public boolean hasTimeout() { return hasTimeout; } + public long getTimeout() { return timeout_; } + + // required .ActorType actorType = 4; + public static final int ACTORTYPE_FIELD_NUMBER = 4; + private boolean hasActorType; + private se.scalablesolutions.akka.remote.protocol.RemoteProtocol.ActorType actorType_; + public boolean hasActorType() { return hasActorType; } + public se.scalablesolutions.akka.remote.protocol.RemoteProtocol.ActorType getActorType() { return actorType_; } + + // optional .TypedActorInfoProtocol typedActorInfo = 5; + public static final int TYPEDACTORINFO_FIELD_NUMBER = 5; + private boolean hasTypedActorInfo; + private se.scalablesolutions.akka.remote.protocol.RemoteProtocol.TypedActorInfoProtocol typedActorInfo_; + public boolean hasTypedActorInfo() { return hasTypedActorInfo; } + public se.scalablesolutions.akka.remote.protocol.RemoteProtocol.TypedActorInfoProtocol getTypedActorInfo() { return typedActorInfo_; } + + private void initFields() { + actorType_ = se.scalablesolutions.akka.remote.protocol.RemoteProtocol.ActorType.SCALA_ACTOR; + typedActorInfo_ = se.scalablesolutions.akka.remote.protocol.RemoteProtocol.TypedActorInfoProtocol.getDefaultInstance(); + } + public final boolean isInitialized() { + if (!hasUuid) return false; + if (!hasTarget) return false; + if (!hasTimeout) return false; + if (!hasActorType) return false; + if (hasTypedActorInfo()) { + if (!getTypedActorInfo().isInitialized()) return false; + } + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (hasUuid()) { + output.writeString(1, getUuid()); + } + if (hasTarget()) { + output.writeString(2, getTarget()); + } + if (hasTimeout()) { + output.writeUInt64(3, getTimeout()); + } + if (hasActorType()) { + output.writeEnum(4, getActorType().getNumber()); + } + if (hasTypedActorInfo()) { + output.writeMessage(5, getTypedActorInfo()); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (hasUuid()) { + size += com.google.protobuf.CodedOutputStream + .computeStringSize(1, getUuid()); + } + if (hasTarget()) { + size += com.google.protobuf.CodedOutputStream + .computeStringSize(2, getTarget()); + } + if (hasTimeout()) { + size += com.google.protobuf.CodedOutputStream + .computeUInt64Size(3, getTimeout()); + } + if (hasActorType()) { + size += com.google.protobuf.CodedOutputStream + .computeEnumSize(4, getActorType().getNumber()); + } + if (hasTypedActorInfo()) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(5, getTypedActorInfo()); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + public static se.scalablesolutions.akka.remote.protocol.RemoteProtocol.ActorInfoProtocol parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static se.scalablesolutions.akka.remote.protocol.RemoteProtocol.ActorInfoProtocol parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static se.scalablesolutions.akka.remote.protocol.RemoteProtocol.ActorInfoProtocol parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static se.scalablesolutions.akka.remote.protocol.RemoteProtocol.ActorInfoProtocol parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static se.scalablesolutions.akka.remote.protocol.RemoteProtocol.ActorInfoProtocol parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static se.scalablesolutions.akka.remote.protocol.RemoteProtocol.ActorInfoProtocol parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static se.scalablesolutions.akka.remote.protocol.RemoteProtocol.ActorInfoProtocol parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static se.scalablesolutions.akka.remote.protocol.RemoteProtocol.ActorInfoProtocol parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static se.scalablesolutions.akka.remote.protocol.RemoteProtocol.ActorInfoProtocol parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static se.scalablesolutions.akka.remote.protocol.RemoteProtocol.ActorInfoProtocol parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(se.scalablesolutions.akka.remote.protocol.RemoteProtocol.ActorInfoProtocol prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder { + private se.scalablesolutions.akka.remote.protocol.RemoteProtocol.ActorInfoProtocol result; + + // Construct using se.scalablesolutions.akka.remote.protocol.RemoteProtocol.ActorInfoProtocol.newBuilder() + private Builder() {} + + private static Builder create() { + Builder builder = new Builder(); + builder.result = new se.scalablesolutions.akka.remote.protocol.RemoteProtocol.ActorInfoProtocol(); + return builder; + } + + protected se.scalablesolutions.akka.remote.protocol.RemoteProtocol.ActorInfoProtocol internalGetResult() { + return result; + } + + public Builder clear() { + if (result == null) { + throw new IllegalStateException( + "Cannot call clear() after build()."); + } + result = new se.scalablesolutions.akka.remote.protocol.RemoteProtocol.ActorInfoProtocol(); + return this; + } + + public Builder clone() { + return create().mergeFrom(result); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return se.scalablesolutions.akka.remote.protocol.RemoteProtocol.ActorInfoProtocol.getDescriptor(); + } + + public se.scalablesolutions.akka.remote.protocol.RemoteProtocol.ActorInfoProtocol getDefaultInstanceForType() { + return se.scalablesolutions.akka.remote.protocol.RemoteProtocol.ActorInfoProtocol.getDefaultInstance(); + } + + public boolean isInitialized() { + return result.isInitialized(); + } + public se.scalablesolutions.akka.remote.protocol.RemoteProtocol.ActorInfoProtocol build() { + if (result != null && !isInitialized()) { + throw newUninitializedMessageException(result); + } + return buildPartial(); + } + + private se.scalablesolutions.akka.remote.protocol.RemoteProtocol.ActorInfoProtocol buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + if (!isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return buildPartial(); + } + + public se.scalablesolutions.akka.remote.protocol.RemoteProtocol.ActorInfoProtocol buildPartial() { + if (result == null) { + throw new IllegalStateException( + "build() has already been called on this Builder."); + } + se.scalablesolutions.akka.remote.protocol.RemoteProtocol.ActorInfoProtocol returnMe = result; + result = null; + return returnMe; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof se.scalablesolutions.akka.remote.protocol.RemoteProtocol.ActorInfoProtocol) { + return mergeFrom((se.scalablesolutions.akka.remote.protocol.RemoteProtocol.ActorInfoProtocol)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(se.scalablesolutions.akka.remote.protocol.RemoteProtocol.ActorInfoProtocol other) { + if (other == se.scalablesolutions.akka.remote.protocol.RemoteProtocol.ActorInfoProtocol.getDefaultInstance()) return this; + if (other.hasUuid()) { + setUuid(other.getUuid()); + } + if (other.hasTarget()) { + setTarget(other.getTarget()); + } + if (other.hasTimeout()) { + setTimeout(other.getTimeout()); + } + if (other.hasActorType()) { + setActorType(other.getActorType()); + } + if (other.hasTypedActorInfo()) { + mergeTypedActorInfo(other.getTypedActorInfo()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + return this; + } + break; + } + case 10: { + setUuid(input.readString()); + break; + } + case 18: { + setTarget(input.readString()); + break; + } + case 24: { + setTimeout(input.readUInt64()); + break; + } + case 32: { + int rawValue = input.readEnum(); + se.scalablesolutions.akka.remote.protocol.RemoteProtocol.ActorType value = se.scalablesolutions.akka.remote.protocol.RemoteProtocol.ActorType.valueOf(rawValue); + if (value == null) { + unknownFields.mergeVarintField(4, rawValue); + } else { + setActorType(value); + } + break; + } + case 42: { + se.scalablesolutions.akka.remote.protocol.RemoteProtocol.TypedActorInfoProtocol.Builder subBuilder = se.scalablesolutions.akka.remote.protocol.RemoteProtocol.TypedActorInfoProtocol.newBuilder(); + if (hasTypedActorInfo()) { + subBuilder.mergeFrom(getTypedActorInfo()); + } + input.readMessage(subBuilder, extensionRegistry); + setTypedActorInfo(subBuilder.buildPartial()); + break; + } + } + } + } + + + // required string uuid = 1; + public boolean hasUuid() { + return result.hasUuid(); + } + public java.lang.String getUuid() { + return result.getUuid(); + } + public Builder setUuid(java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + result.hasUuid = true; + result.uuid_ = value; + return this; + } + public Builder clearUuid() { + result.hasUuid = false; + result.uuid_ = getDefaultInstance().getUuid(); + return this; + } + + // required string target = 2; + public boolean hasTarget() { + return result.hasTarget(); + } + public java.lang.String getTarget() { + return result.getTarget(); + } + public Builder setTarget(java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + result.hasTarget = true; + result.target_ = value; + return this; + } + public Builder clearTarget() { + result.hasTarget = false; + result.target_ = getDefaultInstance().getTarget(); + return this; + } + + // required uint64 timeout = 3; + public boolean hasTimeout() { + return result.hasTimeout(); + } + public long getTimeout() { + return result.getTimeout(); + } + public Builder setTimeout(long value) { + result.hasTimeout = true; + result.timeout_ = value; + return this; + } + public Builder clearTimeout() { + result.hasTimeout = false; + result.timeout_ = 0L; + return this; + } + + // required .ActorType actorType = 4; + public boolean hasActorType() { + return result.hasActorType(); + } + public se.scalablesolutions.akka.remote.protocol.RemoteProtocol.ActorType getActorType() { + return result.getActorType(); + } + public Builder setActorType(se.scalablesolutions.akka.remote.protocol.RemoteProtocol.ActorType value) { + if (value == null) { + throw new NullPointerException(); + } + result.hasActorType = true; + result.actorType_ = value; + return this; + } + public Builder clearActorType() { + result.hasActorType = false; + result.actorType_ = se.scalablesolutions.akka.remote.protocol.RemoteProtocol.ActorType.SCALA_ACTOR; + return this; + } + + // optional .TypedActorInfoProtocol typedActorInfo = 5; + public boolean hasTypedActorInfo() { + return result.hasTypedActorInfo(); + } + public se.scalablesolutions.akka.remote.protocol.RemoteProtocol.TypedActorInfoProtocol getTypedActorInfo() { + return result.getTypedActorInfo(); + } + public Builder setTypedActorInfo(se.scalablesolutions.akka.remote.protocol.RemoteProtocol.TypedActorInfoProtocol value) { + if (value == null) { + throw new NullPointerException(); + } + result.hasTypedActorInfo = true; + result.typedActorInfo_ = value; + return this; + } + public Builder setTypedActorInfo(se.scalablesolutions.akka.remote.protocol.RemoteProtocol.TypedActorInfoProtocol.Builder builderForValue) { + result.hasTypedActorInfo = true; + result.typedActorInfo_ = builderForValue.build(); + return this; + } + public Builder mergeTypedActorInfo(se.scalablesolutions.akka.remote.protocol.RemoteProtocol.TypedActorInfoProtocol value) { + if (result.hasTypedActorInfo() && + result.typedActorInfo_ != se.scalablesolutions.akka.remote.protocol.RemoteProtocol.TypedActorInfoProtocol.getDefaultInstance()) { + result.typedActorInfo_ = + se.scalablesolutions.akka.remote.protocol.RemoteProtocol.TypedActorInfoProtocol.newBuilder(result.typedActorInfo_).mergeFrom(value).buildPartial(); + } else { + result.typedActorInfo_ = value; + } + result.hasTypedActorInfo = true; + return this; + } + public Builder clearTypedActorInfo() { + result.hasTypedActorInfo = false; + result.typedActorInfo_ = se.scalablesolutions.akka.remote.protocol.RemoteProtocol.TypedActorInfoProtocol.getDefaultInstance(); + return this; + } + + // @@protoc_insertion_point(builder_scope:ActorInfoProtocol) + } + + static { + defaultInstance = new ActorInfoProtocol(true); + se.scalablesolutions.akka.remote.protocol.RemoteProtocol.internalForceInit(); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:ActorInfoProtocol) + } + + public static final class TypedActorInfoProtocol extends + com.google.protobuf.GeneratedMessage { + // Use TypedActorInfoProtocol.newBuilder() to construct. + private TypedActorInfoProtocol() { + initFields(); + } + private TypedActorInfoProtocol(boolean noInit) {} + + private static final TypedActorInfoProtocol defaultInstance; + public static TypedActorInfoProtocol getDefaultInstance() { + return defaultInstance; + } + + public TypedActorInfoProtocol getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return se.scalablesolutions.akka.remote.protocol.RemoteProtocol.internal_static_TypedActorInfoProtocol_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return se.scalablesolutions.akka.remote.protocol.RemoteProtocol.internal_static_TypedActorInfoProtocol_fieldAccessorTable; + } + + // required string interface = 1; + public static final int INTERFACE_FIELD_NUMBER = 1; + private boolean hasInterface; + private java.lang.String interface_ = ""; + public boolean hasInterface() { return hasInterface; } + public java.lang.String getInterface() { return interface_; } + + // required string method = 2; + public static final int METHOD_FIELD_NUMBER = 2; + private boolean hasMethod; + private java.lang.String method_ = ""; + public boolean hasMethod() { return hasMethod; } + public java.lang.String getMethod() { return method_; } + + private void initFields() { + } + public final boolean isInitialized() { + if (!hasInterface) return false; + if (!hasMethod) return false; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (hasInterface()) { + output.writeString(1, getInterface()); + } + if (hasMethod()) { + output.writeString(2, getMethod()); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (hasInterface()) { + size += com.google.protobuf.CodedOutputStream + .computeStringSize(1, getInterface()); + } + if (hasMethod()) { + size += com.google.protobuf.CodedOutputStream + .computeStringSize(2, getMethod()); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + public static se.scalablesolutions.akka.remote.protocol.RemoteProtocol.TypedActorInfoProtocol parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static se.scalablesolutions.akka.remote.protocol.RemoteProtocol.TypedActorInfoProtocol parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static se.scalablesolutions.akka.remote.protocol.RemoteProtocol.TypedActorInfoProtocol parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static se.scalablesolutions.akka.remote.protocol.RemoteProtocol.TypedActorInfoProtocol parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static se.scalablesolutions.akka.remote.protocol.RemoteProtocol.TypedActorInfoProtocol parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static se.scalablesolutions.akka.remote.protocol.RemoteProtocol.TypedActorInfoProtocol parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static se.scalablesolutions.akka.remote.protocol.RemoteProtocol.TypedActorInfoProtocol parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static se.scalablesolutions.akka.remote.protocol.RemoteProtocol.TypedActorInfoProtocol parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static se.scalablesolutions.akka.remote.protocol.RemoteProtocol.TypedActorInfoProtocol parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static se.scalablesolutions.akka.remote.protocol.RemoteProtocol.TypedActorInfoProtocol parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(se.scalablesolutions.akka.remote.protocol.RemoteProtocol.TypedActorInfoProtocol prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder { + private se.scalablesolutions.akka.remote.protocol.RemoteProtocol.TypedActorInfoProtocol result; + + // Construct using se.scalablesolutions.akka.remote.protocol.RemoteProtocol.TypedActorInfoProtocol.newBuilder() + private Builder() {} + + private static Builder create() { + Builder builder = new Builder(); + builder.result = new se.scalablesolutions.akka.remote.protocol.RemoteProtocol.TypedActorInfoProtocol(); + return builder; + } + + protected se.scalablesolutions.akka.remote.protocol.RemoteProtocol.TypedActorInfoProtocol internalGetResult() { + return result; + } + + public Builder clear() { + if (result == null) { + throw new IllegalStateException( + "Cannot call clear() after build()."); + } + result = new se.scalablesolutions.akka.remote.protocol.RemoteProtocol.TypedActorInfoProtocol(); + return this; + } + + public Builder clone() { + return create().mergeFrom(result); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return se.scalablesolutions.akka.remote.protocol.RemoteProtocol.TypedActorInfoProtocol.getDescriptor(); + } + + public se.scalablesolutions.akka.remote.protocol.RemoteProtocol.TypedActorInfoProtocol getDefaultInstanceForType() { + return se.scalablesolutions.akka.remote.protocol.RemoteProtocol.TypedActorInfoProtocol.getDefaultInstance(); + } + + public boolean isInitialized() { + return result.isInitialized(); + } + public se.scalablesolutions.akka.remote.protocol.RemoteProtocol.TypedActorInfoProtocol build() { + if (result != null && !isInitialized()) { + throw newUninitializedMessageException(result); + } + return buildPartial(); + } + + private se.scalablesolutions.akka.remote.protocol.RemoteProtocol.TypedActorInfoProtocol buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + if (!isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return buildPartial(); + } + + public se.scalablesolutions.akka.remote.protocol.RemoteProtocol.TypedActorInfoProtocol buildPartial() { + if (result == null) { + throw new IllegalStateException( + "build() has already been called on this Builder."); + } + se.scalablesolutions.akka.remote.protocol.RemoteProtocol.TypedActorInfoProtocol returnMe = result; + result = null; + return returnMe; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof se.scalablesolutions.akka.remote.protocol.RemoteProtocol.TypedActorInfoProtocol) { + return mergeFrom((se.scalablesolutions.akka.remote.protocol.RemoteProtocol.TypedActorInfoProtocol)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(se.scalablesolutions.akka.remote.protocol.RemoteProtocol.TypedActorInfoProtocol other) { + if (other == se.scalablesolutions.akka.remote.protocol.RemoteProtocol.TypedActorInfoProtocol.getDefaultInstance()) return this; + if (other.hasInterface()) { + setInterface(other.getInterface()); + } + if (other.hasMethod()) { + setMethod(other.getMethod()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + return this; + } + break; + } + case 10: { + setInterface(input.readString()); + break; + } + case 18: { + setMethod(input.readString()); + break; + } + } + } + } + + + // required string interface = 1; + public boolean hasInterface() { + return result.hasInterface(); + } + public java.lang.String getInterface() { + return result.getInterface(); + } + public Builder setInterface(java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + result.hasInterface = true; + result.interface_ = value; + return this; + } + public Builder clearInterface() { + result.hasInterface = false; + result.interface_ = getDefaultInstance().getInterface(); + return this; + } + + // required string method = 2; + public boolean hasMethod() { + return result.hasMethod(); + } + public java.lang.String getMethod() { + return result.getMethod(); + } + public Builder setMethod(java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + result.hasMethod = true; + result.method_ = value; + return this; + } + public Builder clearMethod() { + result.hasMethod = false; + result.method_ = getDefaultInstance().getMethod(); + return this; + } + + // @@protoc_insertion_point(builder_scope:TypedActorInfoProtocol) + } + + static { + defaultInstance = new TypedActorInfoProtocol(true); + se.scalablesolutions.akka.remote.protocol.RemoteProtocol.internalForceInit(); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:TypedActorInfoProtocol) + } + + public static final class RemoteRequestProtocol extends + com.google.protobuf.GeneratedMessage { + // Use RemoteRequestProtocol.newBuilder() to construct. + private RemoteRequestProtocol() { + initFields(); + } + private RemoteRequestProtocol(boolean noInit) {} + + private static final RemoteRequestProtocol defaultInstance; + public static RemoteRequestProtocol getDefaultInstance() { + return defaultInstance; + } + + public RemoteRequestProtocol getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return se.scalablesolutions.akka.remote.protocol.RemoteProtocol.internal_static_RemoteRequestProtocol_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return se.scalablesolutions.akka.remote.protocol.RemoteProtocol.internal_static_RemoteRequestProtocol_fieldAccessorTable; + } + + // required uint64 id = 1; + public static final int ID_FIELD_NUMBER = 1; + private boolean hasId; + private long id_ = 0L; + public boolean hasId() { return hasId; } + public long getId() { return id_; } + + // required .MessageProtocol message = 2; + public static final int MESSAGE_FIELD_NUMBER = 2; + private boolean hasMessage; + private se.scalablesolutions.akka.remote.protocol.RemoteProtocol.MessageProtocol message_; + public boolean hasMessage() { return hasMessage; } + public se.scalablesolutions.akka.remote.protocol.RemoteProtocol.MessageProtocol getMessage() { return message_; } + + // required .ActorInfoProtocol actorInfo = 3; + public static final int ACTORINFO_FIELD_NUMBER = 3; + private boolean hasActorInfo; + private se.scalablesolutions.akka.remote.protocol.RemoteProtocol.ActorInfoProtocol actorInfo_; + public boolean hasActorInfo() { return hasActorInfo; } + public se.scalablesolutions.akka.remote.protocol.RemoteProtocol.ActorInfoProtocol getActorInfo() { return actorInfo_; } + + // required bool isOneWay = 4; + public static final int ISONEWAY_FIELD_NUMBER = 4; + private boolean hasIsOneWay; + private boolean isOneWay_ = false; + public boolean hasIsOneWay() { return hasIsOneWay; } + public boolean getIsOneWay() { return isOneWay_; } + + // optional string supervisorUuid = 5; + public static final int SUPERVISORUUID_FIELD_NUMBER = 5; + private boolean hasSupervisorUuid; + private java.lang.String supervisorUuid_ = ""; + public boolean hasSupervisorUuid() { return hasSupervisorUuid; } + public java.lang.String getSupervisorUuid() { return supervisorUuid_; } + + // optional .RemoteActorRefProtocol sender = 6; + public static final int SENDER_FIELD_NUMBER = 6; + private boolean hasSender; + private se.scalablesolutions.akka.remote.protocol.RemoteProtocol.RemoteActorRefProtocol sender_; + public boolean hasSender() { return hasSender; } + public se.scalablesolutions.akka.remote.protocol.RemoteProtocol.RemoteActorRefProtocol getSender() { return sender_; } + + private void initFields() { + message_ = se.scalablesolutions.akka.remote.protocol.RemoteProtocol.MessageProtocol.getDefaultInstance(); + actorInfo_ = se.scalablesolutions.akka.remote.protocol.RemoteProtocol.ActorInfoProtocol.getDefaultInstance(); + sender_ = se.scalablesolutions.akka.remote.protocol.RemoteProtocol.RemoteActorRefProtocol.getDefaultInstance(); + } + public final boolean isInitialized() { + if (!hasId) return false; + if (!hasMessage) return false; + if (!hasActorInfo) return false; + if (!hasIsOneWay) return false; + if (!getMessage().isInitialized()) return false; + if (!getActorInfo().isInitialized()) return false; + if (hasSender()) { + if (!getSender().isInitialized()) return false; + } + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (hasId()) { + output.writeUInt64(1, getId()); + } + if (hasMessage()) { + output.writeMessage(2, getMessage()); + } + if (hasActorInfo()) { + output.writeMessage(3, getActorInfo()); + } + if (hasIsOneWay()) { + output.writeBool(4, getIsOneWay()); + } + if (hasSupervisorUuid()) { + output.writeString(5, getSupervisorUuid()); + } + if (hasSender()) { + output.writeMessage(6, getSender()); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (hasId()) { + size += com.google.protobuf.CodedOutputStream + .computeUInt64Size(1, getId()); + } + if (hasMessage()) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(2, getMessage()); + } + if (hasActorInfo()) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(3, getActorInfo()); + } + if (hasIsOneWay()) { + size += com.google.protobuf.CodedOutputStream + .computeBoolSize(4, getIsOneWay()); + } + if (hasSupervisorUuid()) { + size += com.google.protobuf.CodedOutputStream + .computeStringSize(5, getSupervisorUuid()); + } + if (hasSender()) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(6, getSender()); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + public static se.scalablesolutions.akka.remote.protocol.RemoteProtocol.RemoteRequestProtocol parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static se.scalablesolutions.akka.remote.protocol.RemoteProtocol.RemoteRequestProtocol parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static se.scalablesolutions.akka.remote.protocol.RemoteProtocol.RemoteRequestProtocol parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static se.scalablesolutions.akka.remote.protocol.RemoteProtocol.RemoteRequestProtocol parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static se.scalablesolutions.akka.remote.protocol.RemoteProtocol.RemoteRequestProtocol parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static se.scalablesolutions.akka.remote.protocol.RemoteProtocol.RemoteRequestProtocol parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static se.scalablesolutions.akka.remote.protocol.RemoteProtocol.RemoteRequestProtocol parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static se.scalablesolutions.akka.remote.protocol.RemoteProtocol.RemoteRequestProtocol parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static se.scalablesolutions.akka.remote.protocol.RemoteProtocol.RemoteRequestProtocol parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static se.scalablesolutions.akka.remote.protocol.RemoteProtocol.RemoteRequestProtocol parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(se.scalablesolutions.akka.remote.protocol.RemoteProtocol.RemoteRequestProtocol prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder { + private se.scalablesolutions.akka.remote.protocol.RemoteProtocol.RemoteRequestProtocol result; + + // Construct using se.scalablesolutions.akka.remote.protocol.RemoteProtocol.RemoteRequestProtocol.newBuilder() + private Builder() {} + + private static Builder create() { + Builder builder = new Builder(); + builder.result = new se.scalablesolutions.akka.remote.protocol.RemoteProtocol.RemoteRequestProtocol(); + return builder; + } + + protected se.scalablesolutions.akka.remote.protocol.RemoteProtocol.RemoteRequestProtocol internalGetResult() { + return result; + } + + public Builder clear() { + if (result == null) { + throw new IllegalStateException( + "Cannot call clear() after build()."); + } + result = new se.scalablesolutions.akka.remote.protocol.RemoteProtocol.RemoteRequestProtocol(); + return this; + } + + public Builder clone() { + return create().mergeFrom(result); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return se.scalablesolutions.akka.remote.protocol.RemoteProtocol.RemoteRequestProtocol.getDescriptor(); + } + + public se.scalablesolutions.akka.remote.protocol.RemoteProtocol.RemoteRequestProtocol getDefaultInstanceForType() { + return se.scalablesolutions.akka.remote.protocol.RemoteProtocol.RemoteRequestProtocol.getDefaultInstance(); + } + + public boolean isInitialized() { + return result.isInitialized(); + } + public se.scalablesolutions.akka.remote.protocol.RemoteProtocol.RemoteRequestProtocol build() { + if (result != null && !isInitialized()) { + throw newUninitializedMessageException(result); + } + return buildPartial(); + } + + private se.scalablesolutions.akka.remote.protocol.RemoteProtocol.RemoteRequestProtocol buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + if (!isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return buildPartial(); + } + + public se.scalablesolutions.akka.remote.protocol.RemoteProtocol.RemoteRequestProtocol buildPartial() { + if (result == null) { + throw new IllegalStateException( + "build() has already been called on this Builder."); + } + se.scalablesolutions.akka.remote.protocol.RemoteProtocol.RemoteRequestProtocol returnMe = result; + result = null; + return returnMe; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof se.scalablesolutions.akka.remote.protocol.RemoteProtocol.RemoteRequestProtocol) { + return mergeFrom((se.scalablesolutions.akka.remote.protocol.RemoteProtocol.RemoteRequestProtocol)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(se.scalablesolutions.akka.remote.protocol.RemoteProtocol.RemoteRequestProtocol other) { + if (other == se.scalablesolutions.akka.remote.protocol.RemoteProtocol.RemoteRequestProtocol.getDefaultInstance()) return this; + if (other.hasId()) { + setId(other.getId()); + } + if (other.hasMessage()) { + mergeMessage(other.getMessage()); + } + if (other.hasActorInfo()) { + mergeActorInfo(other.getActorInfo()); + } + if (other.hasIsOneWay()) { + setIsOneWay(other.getIsOneWay()); + } + if (other.hasSupervisorUuid()) { + setSupervisorUuid(other.getSupervisorUuid()); + } + if (other.hasSender()) { + mergeSender(other.getSender()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + return this; + } + break; + } + case 8: { + setId(input.readUInt64()); + break; + } + case 18: { + se.scalablesolutions.akka.remote.protocol.RemoteProtocol.MessageProtocol.Builder subBuilder = se.scalablesolutions.akka.remote.protocol.RemoteProtocol.MessageProtocol.newBuilder(); + if (hasMessage()) { + subBuilder.mergeFrom(getMessage()); + } + input.readMessage(subBuilder, extensionRegistry); + setMessage(subBuilder.buildPartial()); + break; + } + case 26: { + se.scalablesolutions.akka.remote.protocol.RemoteProtocol.ActorInfoProtocol.Builder subBuilder = se.scalablesolutions.akka.remote.protocol.RemoteProtocol.ActorInfoProtocol.newBuilder(); + if (hasActorInfo()) { + subBuilder.mergeFrom(getActorInfo()); + } + input.readMessage(subBuilder, extensionRegistry); + setActorInfo(subBuilder.buildPartial()); + break; + } + case 32: { + setIsOneWay(input.readBool()); + break; + } + case 42: { + setSupervisorUuid(input.readString()); + break; + } + case 50: { + se.scalablesolutions.akka.remote.protocol.RemoteProtocol.RemoteActorRefProtocol.Builder subBuilder = se.scalablesolutions.akka.remote.protocol.RemoteProtocol.RemoteActorRefProtocol.newBuilder(); + if (hasSender()) { + subBuilder.mergeFrom(getSender()); + } + input.readMessage(subBuilder, extensionRegistry); + setSender(subBuilder.buildPartial()); + break; + } + } + } + } + + + // required uint64 id = 1; + public boolean hasId() { + return result.hasId(); + } + public long getId() { + return result.getId(); + } + public Builder setId(long value) { + result.hasId = true; + result.id_ = value; + return this; + } + public Builder clearId() { + result.hasId = false; + result.id_ = 0L; + return this; + } + + // required .MessageProtocol message = 2; + public boolean hasMessage() { + return result.hasMessage(); + } + public se.scalablesolutions.akka.remote.protocol.RemoteProtocol.MessageProtocol getMessage() { + return result.getMessage(); + } + public Builder setMessage(se.scalablesolutions.akka.remote.protocol.RemoteProtocol.MessageProtocol value) { + if (value == null) { + throw new NullPointerException(); + } + result.hasMessage = true; + result.message_ = value; + return this; + } + public Builder setMessage(se.scalablesolutions.akka.remote.protocol.RemoteProtocol.MessageProtocol.Builder builderForValue) { + result.hasMessage = true; + result.message_ = builderForValue.build(); + return this; + } + public Builder mergeMessage(se.scalablesolutions.akka.remote.protocol.RemoteProtocol.MessageProtocol value) { + if (result.hasMessage() && + result.message_ != se.scalablesolutions.akka.remote.protocol.RemoteProtocol.MessageProtocol.getDefaultInstance()) { + result.message_ = + se.scalablesolutions.akka.remote.protocol.RemoteProtocol.MessageProtocol.newBuilder(result.message_).mergeFrom(value).buildPartial(); + } else { + result.message_ = value; + } + result.hasMessage = true; + return this; + } + public Builder clearMessage() { + result.hasMessage = false; + result.message_ = se.scalablesolutions.akka.remote.protocol.RemoteProtocol.MessageProtocol.getDefaultInstance(); + return this; + } + + // required .ActorInfoProtocol actorInfo = 3; + public boolean hasActorInfo() { + return result.hasActorInfo(); + } + public se.scalablesolutions.akka.remote.protocol.RemoteProtocol.ActorInfoProtocol getActorInfo() { + return result.getActorInfo(); + } + public Builder setActorInfo(se.scalablesolutions.akka.remote.protocol.RemoteProtocol.ActorInfoProtocol value) { + if (value == null) { + throw new NullPointerException(); + } + result.hasActorInfo = true; + result.actorInfo_ = value; + return this; + } + public Builder setActorInfo(se.scalablesolutions.akka.remote.protocol.RemoteProtocol.ActorInfoProtocol.Builder builderForValue) { + result.hasActorInfo = true; + result.actorInfo_ = builderForValue.build(); + return this; + } + public Builder mergeActorInfo(se.scalablesolutions.akka.remote.protocol.RemoteProtocol.ActorInfoProtocol value) { + if (result.hasActorInfo() && + result.actorInfo_ != se.scalablesolutions.akka.remote.protocol.RemoteProtocol.ActorInfoProtocol.getDefaultInstance()) { + result.actorInfo_ = + se.scalablesolutions.akka.remote.protocol.RemoteProtocol.ActorInfoProtocol.newBuilder(result.actorInfo_).mergeFrom(value).buildPartial(); + } else { + result.actorInfo_ = value; + } + result.hasActorInfo = true; + return this; + } + public Builder clearActorInfo() { + result.hasActorInfo = false; + result.actorInfo_ = se.scalablesolutions.akka.remote.protocol.RemoteProtocol.ActorInfoProtocol.getDefaultInstance(); + return this; + } + + // required bool isOneWay = 4; + public boolean hasIsOneWay() { + return result.hasIsOneWay(); + } + public boolean getIsOneWay() { + return result.getIsOneWay(); + } + public Builder setIsOneWay(boolean value) { + result.hasIsOneWay = true; + result.isOneWay_ = value; + return this; + } + public Builder clearIsOneWay() { + result.hasIsOneWay = false; + result.isOneWay_ = false; + return this; + } + + // optional string supervisorUuid = 5; + public boolean hasSupervisorUuid() { + return result.hasSupervisorUuid(); + } + public java.lang.String getSupervisorUuid() { + return result.getSupervisorUuid(); + } + public Builder setSupervisorUuid(java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + result.hasSupervisorUuid = true; + result.supervisorUuid_ = value; + return this; + } + public Builder clearSupervisorUuid() { + result.hasSupervisorUuid = false; + result.supervisorUuid_ = getDefaultInstance().getSupervisorUuid(); + return this; + } + + // optional .RemoteActorRefProtocol sender = 6; + public boolean hasSender() { + return result.hasSender(); + } + public se.scalablesolutions.akka.remote.protocol.RemoteProtocol.RemoteActorRefProtocol getSender() { + return result.getSender(); + } + public Builder setSender(se.scalablesolutions.akka.remote.protocol.RemoteProtocol.RemoteActorRefProtocol value) { + if (value == null) { + throw new NullPointerException(); + } + result.hasSender = true; + result.sender_ = value; + return this; + } + public Builder setSender(se.scalablesolutions.akka.remote.protocol.RemoteProtocol.RemoteActorRefProtocol.Builder builderForValue) { + result.hasSender = true; + result.sender_ = builderForValue.build(); + return this; + } + public Builder mergeSender(se.scalablesolutions.akka.remote.protocol.RemoteProtocol.RemoteActorRefProtocol value) { + if (result.hasSender() && + result.sender_ != se.scalablesolutions.akka.remote.protocol.RemoteProtocol.RemoteActorRefProtocol.getDefaultInstance()) { + result.sender_ = + se.scalablesolutions.akka.remote.protocol.RemoteProtocol.RemoteActorRefProtocol.newBuilder(result.sender_).mergeFrom(value).buildPartial(); + } else { + result.sender_ = value; + } + result.hasSender = true; + return this; + } + public Builder clearSender() { + result.hasSender = false; + result.sender_ = se.scalablesolutions.akka.remote.protocol.RemoteProtocol.RemoteActorRefProtocol.getDefaultInstance(); + return this; + } + + // @@protoc_insertion_point(builder_scope:RemoteRequestProtocol) + } + + static { + defaultInstance = new RemoteRequestProtocol(true); + se.scalablesolutions.akka.remote.protocol.RemoteProtocol.internalForceInit(); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:RemoteRequestProtocol) + } + + public static final class RemoteReplyProtocol extends + com.google.protobuf.GeneratedMessage { + // Use RemoteReplyProtocol.newBuilder() to construct. + private RemoteReplyProtocol() { + initFields(); + } + private RemoteReplyProtocol(boolean noInit) {} + + private static final RemoteReplyProtocol defaultInstance; + public static RemoteReplyProtocol getDefaultInstance() { + return defaultInstance; + } + + public RemoteReplyProtocol getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return se.scalablesolutions.akka.remote.protocol.RemoteProtocol.internal_static_RemoteReplyProtocol_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return se.scalablesolutions.akka.remote.protocol.RemoteProtocol.internal_static_RemoteReplyProtocol_fieldAccessorTable; + } + + // required uint64 id = 1; + public static final int ID_FIELD_NUMBER = 1; + private boolean hasId; + private long id_ = 0L; + public boolean hasId() { return hasId; } + public long getId() { return id_; } + + // optional .MessageProtocol message = 2; + public static final int MESSAGE_FIELD_NUMBER = 2; + private boolean hasMessage; + private se.scalablesolutions.akka.remote.protocol.RemoteProtocol.MessageProtocol message_; + public boolean hasMessage() { return hasMessage; } + public se.scalablesolutions.akka.remote.protocol.RemoteProtocol.MessageProtocol getMessage() { return message_; } + + // optional .ExceptionProtocol exception = 3; + public static final int EXCEPTION_FIELD_NUMBER = 3; + private boolean hasException; + private se.scalablesolutions.akka.remote.protocol.RemoteProtocol.ExceptionProtocol exception_; + public boolean hasException() { return hasException; } + public se.scalablesolutions.akka.remote.protocol.RemoteProtocol.ExceptionProtocol getException() { return exception_; } + + // optional string supervisorUuid = 4; + public static final int SUPERVISORUUID_FIELD_NUMBER = 4; + private boolean hasSupervisorUuid; + private java.lang.String supervisorUuid_ = ""; + public boolean hasSupervisorUuid() { return hasSupervisorUuid; } + public java.lang.String getSupervisorUuid() { return supervisorUuid_; } + + // required bool isActor = 5; + public static final int ISACTOR_FIELD_NUMBER = 5; + private boolean hasIsActor; + private boolean isActor_ = false; + public boolean hasIsActor() { return hasIsActor; } + public boolean getIsActor() { return isActor_; } + + // required bool isSuccessful = 6; + public static final int ISSUCCESSFUL_FIELD_NUMBER = 6; + private boolean hasIsSuccessful; + private boolean isSuccessful_ = false; + public boolean hasIsSuccessful() { return hasIsSuccessful; } + public boolean getIsSuccessful() { return isSuccessful_; } + + private void initFields() { + message_ = se.scalablesolutions.akka.remote.protocol.RemoteProtocol.MessageProtocol.getDefaultInstance(); + exception_ = se.scalablesolutions.akka.remote.protocol.RemoteProtocol.ExceptionProtocol.getDefaultInstance(); + } + public final boolean isInitialized() { + if (!hasId) return false; + if (!hasIsActor) return false; + if (!hasIsSuccessful) return false; + if (hasMessage()) { + if (!getMessage().isInitialized()) return false; + } + if (hasException()) { + if (!getException().isInitialized()) return false; + } + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (hasId()) { + output.writeUInt64(1, getId()); + } + if (hasMessage()) { + output.writeMessage(2, getMessage()); + } + if (hasException()) { + output.writeMessage(3, getException()); + } + if (hasSupervisorUuid()) { + output.writeString(4, getSupervisorUuid()); + } + if (hasIsActor()) { + output.writeBool(5, getIsActor()); + } + if (hasIsSuccessful()) { + output.writeBool(6, getIsSuccessful()); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (hasId()) { + size += com.google.protobuf.CodedOutputStream + .computeUInt64Size(1, getId()); + } + if (hasMessage()) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(2, getMessage()); + } + if (hasException()) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(3, getException()); + } + if (hasSupervisorUuid()) { + size += com.google.protobuf.CodedOutputStream + .computeStringSize(4, getSupervisorUuid()); + } + if (hasIsActor()) { + size += com.google.protobuf.CodedOutputStream + .computeBoolSize(5, getIsActor()); + } + if (hasIsSuccessful()) { + size += com.google.protobuf.CodedOutputStream + .computeBoolSize(6, getIsSuccessful()); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + public static se.scalablesolutions.akka.remote.protocol.RemoteProtocol.RemoteReplyProtocol parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static se.scalablesolutions.akka.remote.protocol.RemoteProtocol.RemoteReplyProtocol parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static se.scalablesolutions.akka.remote.protocol.RemoteProtocol.RemoteReplyProtocol parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static se.scalablesolutions.akka.remote.protocol.RemoteProtocol.RemoteReplyProtocol parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static se.scalablesolutions.akka.remote.protocol.RemoteProtocol.RemoteReplyProtocol parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static se.scalablesolutions.akka.remote.protocol.RemoteProtocol.RemoteReplyProtocol parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static se.scalablesolutions.akka.remote.protocol.RemoteProtocol.RemoteReplyProtocol parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static se.scalablesolutions.akka.remote.protocol.RemoteProtocol.RemoteReplyProtocol parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static se.scalablesolutions.akka.remote.protocol.RemoteProtocol.RemoteReplyProtocol parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static se.scalablesolutions.akka.remote.protocol.RemoteProtocol.RemoteReplyProtocol parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(se.scalablesolutions.akka.remote.protocol.RemoteProtocol.RemoteReplyProtocol prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder { + private se.scalablesolutions.akka.remote.protocol.RemoteProtocol.RemoteReplyProtocol result; + + // Construct using se.scalablesolutions.akka.remote.protocol.RemoteProtocol.RemoteReplyProtocol.newBuilder() + private Builder() {} + + private static Builder create() { + Builder builder = new Builder(); + builder.result = new se.scalablesolutions.akka.remote.protocol.RemoteProtocol.RemoteReplyProtocol(); + return builder; + } + + protected se.scalablesolutions.akka.remote.protocol.RemoteProtocol.RemoteReplyProtocol internalGetResult() { + return result; + } + + public Builder clear() { + if (result == null) { + throw new IllegalStateException( + "Cannot call clear() after build()."); + } + result = new se.scalablesolutions.akka.remote.protocol.RemoteProtocol.RemoteReplyProtocol(); + return this; + } + + public Builder clone() { + return create().mergeFrom(result); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return se.scalablesolutions.akka.remote.protocol.RemoteProtocol.RemoteReplyProtocol.getDescriptor(); + } + + public se.scalablesolutions.akka.remote.protocol.RemoteProtocol.RemoteReplyProtocol getDefaultInstanceForType() { + return se.scalablesolutions.akka.remote.protocol.RemoteProtocol.RemoteReplyProtocol.getDefaultInstance(); + } + + public boolean isInitialized() { + return result.isInitialized(); + } + public se.scalablesolutions.akka.remote.protocol.RemoteProtocol.RemoteReplyProtocol build() { + if (result != null && !isInitialized()) { + throw newUninitializedMessageException(result); + } + return buildPartial(); + } + + private se.scalablesolutions.akka.remote.protocol.RemoteProtocol.RemoteReplyProtocol buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + if (!isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return buildPartial(); + } + + public se.scalablesolutions.akka.remote.protocol.RemoteProtocol.RemoteReplyProtocol buildPartial() { + if (result == null) { + throw new IllegalStateException( + "build() has already been called on this Builder."); + } + se.scalablesolutions.akka.remote.protocol.RemoteProtocol.RemoteReplyProtocol returnMe = result; + result = null; + return returnMe; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof se.scalablesolutions.akka.remote.protocol.RemoteProtocol.RemoteReplyProtocol) { + return mergeFrom((se.scalablesolutions.akka.remote.protocol.RemoteProtocol.RemoteReplyProtocol)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(se.scalablesolutions.akka.remote.protocol.RemoteProtocol.RemoteReplyProtocol other) { + if (other == se.scalablesolutions.akka.remote.protocol.RemoteProtocol.RemoteReplyProtocol.getDefaultInstance()) return this; + if (other.hasId()) { + setId(other.getId()); + } + if (other.hasMessage()) { + mergeMessage(other.getMessage()); + } + if (other.hasException()) { + mergeException(other.getException()); + } + if (other.hasSupervisorUuid()) { + setSupervisorUuid(other.getSupervisorUuid()); + } + if (other.hasIsActor()) { + setIsActor(other.getIsActor()); + } + if (other.hasIsSuccessful()) { + setIsSuccessful(other.getIsSuccessful()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + return this; + } + break; + } + case 8: { + setId(input.readUInt64()); + break; + } + case 18: { + se.scalablesolutions.akka.remote.protocol.RemoteProtocol.MessageProtocol.Builder subBuilder = se.scalablesolutions.akka.remote.protocol.RemoteProtocol.MessageProtocol.newBuilder(); + if (hasMessage()) { + subBuilder.mergeFrom(getMessage()); + } + input.readMessage(subBuilder, extensionRegistry); + setMessage(subBuilder.buildPartial()); + break; + } + case 26: { + se.scalablesolutions.akka.remote.protocol.RemoteProtocol.ExceptionProtocol.Builder subBuilder = se.scalablesolutions.akka.remote.protocol.RemoteProtocol.ExceptionProtocol.newBuilder(); + if (hasException()) { + subBuilder.mergeFrom(getException()); + } + input.readMessage(subBuilder, extensionRegistry); + setException(subBuilder.buildPartial()); + break; + } + case 34: { + setSupervisorUuid(input.readString()); + break; + } + case 40: { + setIsActor(input.readBool()); + break; + } + case 48: { + setIsSuccessful(input.readBool()); + break; + } + } + } + } + + + // required uint64 id = 1; + public boolean hasId() { + return result.hasId(); + } + public long getId() { + return result.getId(); + } + public Builder setId(long value) { + result.hasId = true; + result.id_ = value; + return this; + } + public Builder clearId() { + result.hasId = false; + result.id_ = 0L; + return this; + } + + // optional .MessageProtocol message = 2; + public boolean hasMessage() { + return result.hasMessage(); + } + public se.scalablesolutions.akka.remote.protocol.RemoteProtocol.MessageProtocol getMessage() { + return result.getMessage(); + } + public Builder setMessage(se.scalablesolutions.akka.remote.protocol.RemoteProtocol.MessageProtocol value) { + if (value == null) { + throw new NullPointerException(); + } + result.hasMessage = true; + result.message_ = value; + return this; + } + public Builder setMessage(se.scalablesolutions.akka.remote.protocol.RemoteProtocol.MessageProtocol.Builder builderForValue) { + result.hasMessage = true; + result.message_ = builderForValue.build(); + return this; + } + public Builder mergeMessage(se.scalablesolutions.akka.remote.protocol.RemoteProtocol.MessageProtocol value) { + if (result.hasMessage() && + result.message_ != se.scalablesolutions.akka.remote.protocol.RemoteProtocol.MessageProtocol.getDefaultInstance()) { + result.message_ = + se.scalablesolutions.akka.remote.protocol.RemoteProtocol.MessageProtocol.newBuilder(result.message_).mergeFrom(value).buildPartial(); + } else { + result.message_ = value; + } + result.hasMessage = true; + return this; + } + public Builder clearMessage() { + result.hasMessage = false; + result.message_ = se.scalablesolutions.akka.remote.protocol.RemoteProtocol.MessageProtocol.getDefaultInstance(); + return this; + } + + // optional .ExceptionProtocol exception = 3; + public boolean hasException() { + return result.hasException(); + } + public se.scalablesolutions.akka.remote.protocol.RemoteProtocol.ExceptionProtocol getException() { + return result.getException(); + } + public Builder setException(se.scalablesolutions.akka.remote.protocol.RemoteProtocol.ExceptionProtocol value) { + if (value == null) { + throw new NullPointerException(); + } + result.hasException = true; + result.exception_ = value; + return this; + } + public Builder setException(se.scalablesolutions.akka.remote.protocol.RemoteProtocol.ExceptionProtocol.Builder builderForValue) { + result.hasException = true; + result.exception_ = builderForValue.build(); + return this; + } + public Builder mergeException(se.scalablesolutions.akka.remote.protocol.RemoteProtocol.ExceptionProtocol value) { + if (result.hasException() && + result.exception_ != se.scalablesolutions.akka.remote.protocol.RemoteProtocol.ExceptionProtocol.getDefaultInstance()) { + result.exception_ = + se.scalablesolutions.akka.remote.protocol.RemoteProtocol.ExceptionProtocol.newBuilder(result.exception_).mergeFrom(value).buildPartial(); + } else { + result.exception_ = value; + } + result.hasException = true; + return this; + } + public Builder clearException() { + result.hasException = false; + result.exception_ = se.scalablesolutions.akka.remote.protocol.RemoteProtocol.ExceptionProtocol.getDefaultInstance(); + return this; + } + + // optional string supervisorUuid = 4; + public boolean hasSupervisorUuid() { + return result.hasSupervisorUuid(); + } + public java.lang.String getSupervisorUuid() { + return result.getSupervisorUuid(); + } + public Builder setSupervisorUuid(java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + result.hasSupervisorUuid = true; + result.supervisorUuid_ = value; + return this; + } + public Builder clearSupervisorUuid() { + result.hasSupervisorUuid = false; + result.supervisorUuid_ = getDefaultInstance().getSupervisorUuid(); + return this; + } + + // required bool isActor = 5; + public boolean hasIsActor() { + return result.hasIsActor(); + } + public boolean getIsActor() { + return result.getIsActor(); + } + public Builder setIsActor(boolean value) { + result.hasIsActor = true; + result.isActor_ = value; + return this; + } + public Builder clearIsActor() { + result.hasIsActor = false; + result.isActor_ = false; + return this; + } + + // required bool isSuccessful = 6; + public boolean hasIsSuccessful() { + return result.hasIsSuccessful(); + } + public boolean getIsSuccessful() { + return result.getIsSuccessful(); + } + public Builder setIsSuccessful(boolean value) { + result.hasIsSuccessful = true; + result.isSuccessful_ = value; + return this; + } + public Builder clearIsSuccessful() { + result.hasIsSuccessful = false; + result.isSuccessful_ = false; + return this; + } + + // @@protoc_insertion_point(builder_scope:RemoteReplyProtocol) + } + + static { + defaultInstance = new RemoteReplyProtocol(true); + se.scalablesolutions.akka.remote.protocol.RemoteProtocol.internalForceInit(); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:RemoteReplyProtocol) + } + + public static final class LifeCycleProtocol extends + com.google.protobuf.GeneratedMessage { + // Use LifeCycleProtocol.newBuilder() to construct. + private LifeCycleProtocol() { + initFields(); + } + private LifeCycleProtocol(boolean noInit) {} + + private static final LifeCycleProtocol defaultInstance; + public static LifeCycleProtocol getDefaultInstance() { + return defaultInstance; + } + + public LifeCycleProtocol getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return se.scalablesolutions.akka.remote.protocol.RemoteProtocol.internal_static_LifeCycleProtocol_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return se.scalablesolutions.akka.remote.protocol.RemoteProtocol.internal_static_LifeCycleProtocol_fieldAccessorTable; + } + + // required .LifeCycleType lifeCycle = 1; + public static final int LIFECYCLE_FIELD_NUMBER = 1; + private boolean hasLifeCycle; + private se.scalablesolutions.akka.remote.protocol.RemoteProtocol.LifeCycleType lifeCycle_; + public boolean hasLifeCycle() { return hasLifeCycle; } + public se.scalablesolutions.akka.remote.protocol.RemoteProtocol.LifeCycleType getLifeCycle() { return lifeCycle_; } + + // optional string preRestart = 2; + public static final int PRERESTART_FIELD_NUMBER = 2; + private boolean hasPreRestart; + private java.lang.String preRestart_ = ""; + public boolean hasPreRestart() { return hasPreRestart; } + public java.lang.String getPreRestart() { return preRestart_; } + + // optional string postRestart = 3; + public static final int POSTRESTART_FIELD_NUMBER = 3; + private boolean hasPostRestart; + private java.lang.String postRestart_ = ""; + public boolean hasPostRestart() { return hasPostRestart; } + public java.lang.String getPostRestart() { return postRestart_; } + + // optional string init = 4; + public static final int INIT_FIELD_NUMBER = 4; + private boolean hasInit; + private java.lang.String init_ = ""; + public boolean hasInit() { return hasInit; } + public java.lang.String getInit() { return init_; } + + // optional string shutdown = 5; + public static final int SHUTDOWN_FIELD_NUMBER = 5; + private boolean hasShutdown; + private java.lang.String shutdown_ = ""; + public boolean hasShutdown() { return hasShutdown; } + public java.lang.String getShutdown() { return shutdown_; } + + private void initFields() { + lifeCycle_ = se.scalablesolutions.akka.remote.protocol.RemoteProtocol.LifeCycleType.PERMANENT; + } + public final boolean isInitialized() { + if (!hasLifeCycle) return false; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (hasLifeCycle()) { + output.writeEnum(1, getLifeCycle().getNumber()); + } + if (hasPreRestart()) { + output.writeString(2, getPreRestart()); + } + if (hasPostRestart()) { + output.writeString(3, getPostRestart()); + } + if (hasInit()) { + output.writeString(4, getInit()); + } + if (hasShutdown()) { + output.writeString(5, getShutdown()); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (hasLifeCycle()) { + size += com.google.protobuf.CodedOutputStream + .computeEnumSize(1, getLifeCycle().getNumber()); + } + if (hasPreRestart()) { + size += com.google.protobuf.CodedOutputStream + .computeStringSize(2, getPreRestart()); + } + if (hasPostRestart()) { + size += com.google.protobuf.CodedOutputStream + .computeStringSize(3, getPostRestart()); + } + if (hasInit()) { + size += com.google.protobuf.CodedOutputStream + .computeStringSize(4, getInit()); + } + if (hasShutdown()) { + size += com.google.protobuf.CodedOutputStream + .computeStringSize(5, getShutdown()); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + public static se.scalablesolutions.akka.remote.protocol.RemoteProtocol.LifeCycleProtocol parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static se.scalablesolutions.akka.remote.protocol.RemoteProtocol.LifeCycleProtocol parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static se.scalablesolutions.akka.remote.protocol.RemoteProtocol.LifeCycleProtocol parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static se.scalablesolutions.akka.remote.protocol.RemoteProtocol.LifeCycleProtocol parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static se.scalablesolutions.akka.remote.protocol.RemoteProtocol.LifeCycleProtocol parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static se.scalablesolutions.akka.remote.protocol.RemoteProtocol.LifeCycleProtocol parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static se.scalablesolutions.akka.remote.protocol.RemoteProtocol.LifeCycleProtocol parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static se.scalablesolutions.akka.remote.protocol.RemoteProtocol.LifeCycleProtocol parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static se.scalablesolutions.akka.remote.protocol.RemoteProtocol.LifeCycleProtocol parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static se.scalablesolutions.akka.remote.protocol.RemoteProtocol.LifeCycleProtocol parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(se.scalablesolutions.akka.remote.protocol.RemoteProtocol.LifeCycleProtocol prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder { + private se.scalablesolutions.akka.remote.protocol.RemoteProtocol.LifeCycleProtocol result; + + // Construct using se.scalablesolutions.akka.remote.protocol.RemoteProtocol.LifeCycleProtocol.newBuilder() + private Builder() {} + + private static Builder create() { + Builder builder = new Builder(); + builder.result = new se.scalablesolutions.akka.remote.protocol.RemoteProtocol.LifeCycleProtocol(); + return builder; + } + + protected se.scalablesolutions.akka.remote.protocol.RemoteProtocol.LifeCycleProtocol internalGetResult() { + return result; + } + + public Builder clear() { + if (result == null) { + throw new IllegalStateException( + "Cannot call clear() after build()."); + } + result = new se.scalablesolutions.akka.remote.protocol.RemoteProtocol.LifeCycleProtocol(); + return this; + } + + public Builder clone() { + return create().mergeFrom(result); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return se.scalablesolutions.akka.remote.protocol.RemoteProtocol.LifeCycleProtocol.getDescriptor(); + } + + public se.scalablesolutions.akka.remote.protocol.RemoteProtocol.LifeCycleProtocol getDefaultInstanceForType() { + return se.scalablesolutions.akka.remote.protocol.RemoteProtocol.LifeCycleProtocol.getDefaultInstance(); + } + + public boolean isInitialized() { + return result.isInitialized(); + } + public se.scalablesolutions.akka.remote.protocol.RemoteProtocol.LifeCycleProtocol build() { + if (result != null && !isInitialized()) { + throw newUninitializedMessageException(result); + } + return buildPartial(); + } + + private se.scalablesolutions.akka.remote.protocol.RemoteProtocol.LifeCycleProtocol buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + if (!isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return buildPartial(); + } + + public se.scalablesolutions.akka.remote.protocol.RemoteProtocol.LifeCycleProtocol buildPartial() { + if (result == null) { + throw new IllegalStateException( + "build() has already been called on this Builder."); + } + se.scalablesolutions.akka.remote.protocol.RemoteProtocol.LifeCycleProtocol returnMe = result; + result = null; + return returnMe; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof se.scalablesolutions.akka.remote.protocol.RemoteProtocol.LifeCycleProtocol) { + return mergeFrom((se.scalablesolutions.akka.remote.protocol.RemoteProtocol.LifeCycleProtocol)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(se.scalablesolutions.akka.remote.protocol.RemoteProtocol.LifeCycleProtocol other) { + if (other == se.scalablesolutions.akka.remote.protocol.RemoteProtocol.LifeCycleProtocol.getDefaultInstance()) return this; + if (other.hasLifeCycle()) { + setLifeCycle(other.getLifeCycle()); + } + if (other.hasPreRestart()) { + setPreRestart(other.getPreRestart()); + } + if (other.hasPostRestart()) { + setPostRestart(other.getPostRestart()); + } + if (other.hasInit()) { + setInit(other.getInit()); + } + if (other.hasShutdown()) { + setShutdown(other.getShutdown()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + return this; + } + break; + } + case 8: { + int rawValue = input.readEnum(); + se.scalablesolutions.akka.remote.protocol.RemoteProtocol.LifeCycleType value = se.scalablesolutions.akka.remote.protocol.RemoteProtocol.LifeCycleType.valueOf(rawValue); + if (value == null) { + unknownFields.mergeVarintField(1, rawValue); + } else { + setLifeCycle(value); + } + break; + } + case 18: { + setPreRestart(input.readString()); + break; + } + case 26: { + setPostRestart(input.readString()); + break; + } + case 34: { + setInit(input.readString()); + break; + } + case 42: { + setShutdown(input.readString()); + break; + } + } + } + } + + + // required .LifeCycleType lifeCycle = 1; + public boolean hasLifeCycle() { + return result.hasLifeCycle(); + } + public se.scalablesolutions.akka.remote.protocol.RemoteProtocol.LifeCycleType getLifeCycle() { + return result.getLifeCycle(); + } + public Builder setLifeCycle(se.scalablesolutions.akka.remote.protocol.RemoteProtocol.LifeCycleType value) { + if (value == null) { + throw new NullPointerException(); + } + result.hasLifeCycle = true; + result.lifeCycle_ = value; + return this; + } + public Builder clearLifeCycle() { + result.hasLifeCycle = false; + result.lifeCycle_ = se.scalablesolutions.akka.remote.protocol.RemoteProtocol.LifeCycleType.PERMANENT; + return this; + } + + // optional string preRestart = 2; + public boolean hasPreRestart() { + return result.hasPreRestart(); + } + public java.lang.String getPreRestart() { + return result.getPreRestart(); + } + public Builder setPreRestart(java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + result.hasPreRestart = true; + result.preRestart_ = value; + return this; + } + public Builder clearPreRestart() { + result.hasPreRestart = false; + result.preRestart_ = getDefaultInstance().getPreRestart(); + return this; + } + + // optional string postRestart = 3; + public boolean hasPostRestart() { + return result.hasPostRestart(); + } + public java.lang.String getPostRestart() { + return result.getPostRestart(); + } + public Builder setPostRestart(java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + result.hasPostRestart = true; + result.postRestart_ = value; + return this; + } + public Builder clearPostRestart() { + result.hasPostRestart = false; + result.postRestart_ = getDefaultInstance().getPostRestart(); + return this; + } + + // optional string init = 4; + public boolean hasInit() { + return result.hasInit(); + } + public java.lang.String getInit() { + return result.getInit(); + } + public Builder setInit(java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + result.hasInit = true; + result.init_ = value; + return this; + } + public Builder clearInit() { + result.hasInit = false; + result.init_ = getDefaultInstance().getInit(); + return this; + } + + // optional string shutdown = 5; + public boolean hasShutdown() { + return result.hasShutdown(); + } + public java.lang.String getShutdown() { + return result.getShutdown(); + } + public Builder setShutdown(java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + result.hasShutdown = true; + result.shutdown_ = value; + return this; + } + public Builder clearShutdown() { + result.hasShutdown = false; + result.shutdown_ = getDefaultInstance().getShutdown(); + return this; + } + + // @@protoc_insertion_point(builder_scope:LifeCycleProtocol) + } + + static { + defaultInstance = new LifeCycleProtocol(true); + se.scalablesolutions.akka.remote.protocol.RemoteProtocol.internalForceInit(); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:LifeCycleProtocol) + } + + public static final class AddressProtocol extends + com.google.protobuf.GeneratedMessage { + // Use AddressProtocol.newBuilder() to construct. + private AddressProtocol() { + initFields(); + } + private AddressProtocol(boolean noInit) {} + + private static final AddressProtocol defaultInstance; + public static AddressProtocol getDefaultInstance() { + return defaultInstance; + } + + public AddressProtocol getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return se.scalablesolutions.akka.remote.protocol.RemoteProtocol.internal_static_AddressProtocol_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return se.scalablesolutions.akka.remote.protocol.RemoteProtocol.internal_static_AddressProtocol_fieldAccessorTable; + } + + // required string hostname = 1; + public static final int HOSTNAME_FIELD_NUMBER = 1; + private boolean hasHostname; + private java.lang.String hostname_ = ""; + public boolean hasHostname() { return hasHostname; } + public java.lang.String getHostname() { return hostname_; } + + // required uint32 port = 2; + public static final int PORT_FIELD_NUMBER = 2; + private boolean hasPort; + private int port_ = 0; + public boolean hasPort() { return hasPort; } + public int getPort() { return port_; } + + private void initFields() { + } + public final boolean isInitialized() { + if (!hasHostname) return false; + if (!hasPort) return false; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (hasHostname()) { + output.writeString(1, getHostname()); + } + if (hasPort()) { + output.writeUInt32(2, getPort()); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (hasHostname()) { + size += com.google.protobuf.CodedOutputStream + .computeStringSize(1, getHostname()); + } + if (hasPort()) { + size += com.google.protobuf.CodedOutputStream + .computeUInt32Size(2, getPort()); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + public static se.scalablesolutions.akka.remote.protocol.RemoteProtocol.AddressProtocol parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static se.scalablesolutions.akka.remote.protocol.RemoteProtocol.AddressProtocol parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static se.scalablesolutions.akka.remote.protocol.RemoteProtocol.AddressProtocol parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static se.scalablesolutions.akka.remote.protocol.RemoteProtocol.AddressProtocol parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static se.scalablesolutions.akka.remote.protocol.RemoteProtocol.AddressProtocol parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static se.scalablesolutions.akka.remote.protocol.RemoteProtocol.AddressProtocol parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static se.scalablesolutions.akka.remote.protocol.RemoteProtocol.AddressProtocol parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static se.scalablesolutions.akka.remote.protocol.RemoteProtocol.AddressProtocol parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static se.scalablesolutions.akka.remote.protocol.RemoteProtocol.AddressProtocol parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static se.scalablesolutions.akka.remote.protocol.RemoteProtocol.AddressProtocol parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(se.scalablesolutions.akka.remote.protocol.RemoteProtocol.AddressProtocol prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder { + private se.scalablesolutions.akka.remote.protocol.RemoteProtocol.AddressProtocol result; + + // Construct using se.scalablesolutions.akka.remote.protocol.RemoteProtocol.AddressProtocol.newBuilder() + private Builder() {} + + private static Builder create() { + Builder builder = new Builder(); + builder.result = new se.scalablesolutions.akka.remote.protocol.RemoteProtocol.AddressProtocol(); + return builder; + } + + protected se.scalablesolutions.akka.remote.protocol.RemoteProtocol.AddressProtocol internalGetResult() { + return result; + } + + public Builder clear() { + if (result == null) { + throw new IllegalStateException( + "Cannot call clear() after build()."); + } + result = new se.scalablesolutions.akka.remote.protocol.RemoteProtocol.AddressProtocol(); + return this; + } + + public Builder clone() { + return create().mergeFrom(result); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return se.scalablesolutions.akka.remote.protocol.RemoteProtocol.AddressProtocol.getDescriptor(); + } + + public se.scalablesolutions.akka.remote.protocol.RemoteProtocol.AddressProtocol getDefaultInstanceForType() { + return se.scalablesolutions.akka.remote.protocol.RemoteProtocol.AddressProtocol.getDefaultInstance(); + } + + public boolean isInitialized() { + return result.isInitialized(); + } + public se.scalablesolutions.akka.remote.protocol.RemoteProtocol.AddressProtocol build() { + if (result != null && !isInitialized()) { + throw newUninitializedMessageException(result); + } + return buildPartial(); + } + + private se.scalablesolutions.akka.remote.protocol.RemoteProtocol.AddressProtocol buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + if (!isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return buildPartial(); + } + + public se.scalablesolutions.akka.remote.protocol.RemoteProtocol.AddressProtocol buildPartial() { + if (result == null) { + throw new IllegalStateException( + "build() has already been called on this Builder."); + } + se.scalablesolutions.akka.remote.protocol.RemoteProtocol.AddressProtocol returnMe = result; + result = null; + return returnMe; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof se.scalablesolutions.akka.remote.protocol.RemoteProtocol.AddressProtocol) { + return mergeFrom((se.scalablesolutions.akka.remote.protocol.RemoteProtocol.AddressProtocol)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(se.scalablesolutions.akka.remote.protocol.RemoteProtocol.AddressProtocol other) { + if (other == se.scalablesolutions.akka.remote.protocol.RemoteProtocol.AddressProtocol.getDefaultInstance()) return this; + if (other.hasHostname()) { + setHostname(other.getHostname()); + } + if (other.hasPort()) { + setPort(other.getPort()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + return this; + } + break; + } + case 10: { + setHostname(input.readString()); + break; + } + case 16: { + setPort(input.readUInt32()); + break; + } + } + } + } + + + // required string hostname = 1; + public boolean hasHostname() { + return result.hasHostname(); + } + public java.lang.String getHostname() { + return result.getHostname(); + } + public Builder setHostname(java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + result.hasHostname = true; + result.hostname_ = value; + return this; + } + public Builder clearHostname() { + result.hasHostname = false; + result.hostname_ = getDefaultInstance().getHostname(); + return this; + } + + // required uint32 port = 2; + public boolean hasPort() { + return result.hasPort(); + } + public int getPort() { + return result.getPort(); + } + public Builder setPort(int value) { + result.hasPort = true; + result.port_ = value; + return this; + } + public Builder clearPort() { + result.hasPort = false; + result.port_ = 0; + return this; + } + + // @@protoc_insertion_point(builder_scope:AddressProtocol) + } + + static { + defaultInstance = new AddressProtocol(true); + se.scalablesolutions.akka.remote.protocol.RemoteProtocol.internalForceInit(); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:AddressProtocol) + } + + public static final class ExceptionProtocol extends + com.google.protobuf.GeneratedMessage { + // Use ExceptionProtocol.newBuilder() to construct. + private ExceptionProtocol() { + initFields(); + } + private ExceptionProtocol(boolean noInit) {} + + private static final ExceptionProtocol defaultInstance; + public static ExceptionProtocol getDefaultInstance() { + return defaultInstance; + } + + public ExceptionProtocol getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return se.scalablesolutions.akka.remote.protocol.RemoteProtocol.internal_static_ExceptionProtocol_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return se.scalablesolutions.akka.remote.protocol.RemoteProtocol.internal_static_ExceptionProtocol_fieldAccessorTable; + } + + // required string classname = 1; + public static final int CLASSNAME_FIELD_NUMBER = 1; + private boolean hasClassname; + private java.lang.String classname_ = ""; + public boolean hasClassname() { return hasClassname; } + public java.lang.String getClassname() { return classname_; } + + // required string message = 2; + public static final int MESSAGE_FIELD_NUMBER = 2; + private boolean hasMessage; + private java.lang.String message_ = ""; + public boolean hasMessage() { return hasMessage; } + public java.lang.String getMessage() { return message_; } + + private void initFields() { + } + public final boolean isInitialized() { + if (!hasClassname) return false; + if (!hasMessage) return false; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (hasClassname()) { + output.writeString(1, getClassname()); + } + if (hasMessage()) { + output.writeString(2, getMessage()); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (hasClassname()) { + size += com.google.protobuf.CodedOutputStream + .computeStringSize(1, getClassname()); + } + if (hasMessage()) { + size += com.google.protobuf.CodedOutputStream + .computeStringSize(2, getMessage()); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + public static se.scalablesolutions.akka.remote.protocol.RemoteProtocol.ExceptionProtocol parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static se.scalablesolutions.akka.remote.protocol.RemoteProtocol.ExceptionProtocol parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static se.scalablesolutions.akka.remote.protocol.RemoteProtocol.ExceptionProtocol parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static se.scalablesolutions.akka.remote.protocol.RemoteProtocol.ExceptionProtocol parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static se.scalablesolutions.akka.remote.protocol.RemoteProtocol.ExceptionProtocol parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static se.scalablesolutions.akka.remote.protocol.RemoteProtocol.ExceptionProtocol parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static se.scalablesolutions.akka.remote.protocol.RemoteProtocol.ExceptionProtocol parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static se.scalablesolutions.akka.remote.protocol.RemoteProtocol.ExceptionProtocol parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static se.scalablesolutions.akka.remote.protocol.RemoteProtocol.ExceptionProtocol parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static se.scalablesolutions.akka.remote.protocol.RemoteProtocol.ExceptionProtocol parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(se.scalablesolutions.akka.remote.protocol.RemoteProtocol.ExceptionProtocol prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder { + private se.scalablesolutions.akka.remote.protocol.RemoteProtocol.ExceptionProtocol result; + + // Construct using se.scalablesolutions.akka.remote.protocol.RemoteProtocol.ExceptionProtocol.newBuilder() + private Builder() {} + + private static Builder create() { + Builder builder = new Builder(); + builder.result = new se.scalablesolutions.akka.remote.protocol.RemoteProtocol.ExceptionProtocol(); + return builder; + } + + protected se.scalablesolutions.akka.remote.protocol.RemoteProtocol.ExceptionProtocol internalGetResult() { + return result; + } + + public Builder clear() { + if (result == null) { + throw new IllegalStateException( + "Cannot call clear() after build()."); + } + result = new se.scalablesolutions.akka.remote.protocol.RemoteProtocol.ExceptionProtocol(); + return this; + } + + public Builder clone() { + return create().mergeFrom(result); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return se.scalablesolutions.akka.remote.protocol.RemoteProtocol.ExceptionProtocol.getDescriptor(); + } + + public se.scalablesolutions.akka.remote.protocol.RemoteProtocol.ExceptionProtocol getDefaultInstanceForType() { + return se.scalablesolutions.akka.remote.protocol.RemoteProtocol.ExceptionProtocol.getDefaultInstance(); + } + + public boolean isInitialized() { + return result.isInitialized(); + } + public se.scalablesolutions.akka.remote.protocol.RemoteProtocol.ExceptionProtocol build() { + if (result != null && !isInitialized()) { + throw newUninitializedMessageException(result); + } + return buildPartial(); + } + + private se.scalablesolutions.akka.remote.protocol.RemoteProtocol.ExceptionProtocol buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + if (!isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return buildPartial(); + } + + public se.scalablesolutions.akka.remote.protocol.RemoteProtocol.ExceptionProtocol buildPartial() { + if (result == null) { + throw new IllegalStateException( + "build() has already been called on this Builder."); + } + se.scalablesolutions.akka.remote.protocol.RemoteProtocol.ExceptionProtocol returnMe = result; + result = null; + return returnMe; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof se.scalablesolutions.akka.remote.protocol.RemoteProtocol.ExceptionProtocol) { + return mergeFrom((se.scalablesolutions.akka.remote.protocol.RemoteProtocol.ExceptionProtocol)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(se.scalablesolutions.akka.remote.protocol.RemoteProtocol.ExceptionProtocol other) { + if (other == se.scalablesolutions.akka.remote.protocol.RemoteProtocol.ExceptionProtocol.getDefaultInstance()) return this; + if (other.hasClassname()) { + setClassname(other.getClassname()); + } + if (other.hasMessage()) { + setMessage(other.getMessage()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + return this; + } + break; + } + case 10: { + setClassname(input.readString()); + break; + } + case 18: { + setMessage(input.readString()); + break; + } + } + } + } + + + // required string classname = 1; + public boolean hasClassname() { + return result.hasClassname(); + } + public java.lang.String getClassname() { + return result.getClassname(); + } + public Builder setClassname(java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + result.hasClassname = true; + result.classname_ = value; + return this; + } + public Builder clearClassname() { + result.hasClassname = false; + result.classname_ = getDefaultInstance().getClassname(); + return this; + } + + // required string message = 2; + public boolean hasMessage() { + return result.hasMessage(); + } + public java.lang.String getMessage() { + return result.getMessage(); + } + public Builder setMessage(java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + result.hasMessage = true; + result.message_ = value; + return this; + } + public Builder clearMessage() { + result.hasMessage = false; + result.message_ = getDefaultInstance().getMessage(); + return this; + } + + // @@protoc_insertion_point(builder_scope:ExceptionProtocol) + } + + static { + defaultInstance = new ExceptionProtocol(true); + se.scalablesolutions.akka.remote.protocol.RemoteProtocol.internalForceInit(); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:ExceptionProtocol) + } + + private static com.google.protobuf.Descriptors.Descriptor + internal_static_RemoteActorRefProtocol_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_RemoteActorRefProtocol_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_SerializedActorRefProtocol_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_SerializedActorRefProtocol_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_MessageProtocol_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_MessageProtocol_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_ActorInfoProtocol_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_ActorInfoProtocol_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_TypedActorInfoProtocol_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_TypedActorInfoProtocol_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_RemoteRequestProtocol_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_RemoteRequestProtocol_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_RemoteReplyProtocol_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_RemoteReplyProtocol_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_LifeCycleProtocol_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_LifeCycleProtocol_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_AddressProtocol_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_AddressProtocol_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_ExceptionProtocol_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_ExceptionProtocol_fieldAccessorTable; + + public static com.google.protobuf.Descriptors.FileDescriptor + getDescriptor() { + return descriptor; + } + private static com.google.protobuf.Descriptors.FileDescriptor + descriptor; + static { + java.lang.String[] descriptorData = { + "\n\024RemoteProtocol.proto\"v\n\026RemoteActorRef" + + "Protocol\022\014\n\004uuid\030\001 \002(\t\022\026\n\016actorClassname" + + "\030\002 \002(\t\022%\n\013homeAddress\030\003 \002(\0132\020.AddressPro" + + "tocol\022\017\n\007timeout\030\004 \001(\004\"\200\003\n\032SerializedAct" + + "orRefProtocol\022\014\n\004uuid\030\001 \002(\t\022\n\n\002id\030\002 \002(\t\022" + + "\026\n\016actorClassname\030\003 \002(\t\022)\n\017originalAddre" + + "ss\030\004 \002(\0132\020.AddressProtocol\022\025\n\ractorInsta" + + "nce\030\005 \001(\014\022\033\n\023serializerClassname\030\006 \001(\t\022\024" + + "\n\014isTransactor\030\007 \001(\010\022\017\n\007timeout\030\010 \001(\004\022\026\n" + + "\016receiveTimeout\030\t \001(\004\022%\n\tlifeCycle\030\n \001(\013", + "2\022.LifeCycleProtocol\022+\n\nsupervisor\030\013 \001(\013" + + "2\027.RemoteActorRefProtocol\022\024\n\014hotswapStac" + + "k\030\014 \001(\014\022(\n\010messages\030\r \003(\0132\026.RemoteReques" + + "tProtocol\"r\n\017MessageProtocol\0225\n\023serializ" + + "ationScheme\030\001 \002(\0162\030.SerializationSchemeT" + + "ype\022\017\n\007message\030\002 \002(\014\022\027\n\017messageManifest\030" + + "\003 \001(\014\"\222\001\n\021ActorInfoProtocol\022\014\n\004uuid\030\001 \002(" + + "\t\022\016\n\006target\030\002 \002(\t\022\017\n\007timeout\030\003 \002(\004\022\035\n\tac" + + "torType\030\004 \002(\0162\n.ActorType\022/\n\016typedActorI" + + "nfo\030\005 \001(\0132\027.TypedActorInfoProtocol\";\n\026Ty", + "pedActorInfoProtocol\022\021\n\tinterface\030\001 \002(\t\022" + + "\016\n\006method\030\002 \002(\t\"\300\001\n\025RemoteRequestProtoco" + + "l\022\n\n\002id\030\001 \002(\004\022!\n\007message\030\002 \002(\0132\020.Message" + + "Protocol\022%\n\tactorInfo\030\003 \002(\0132\022.ActorInfoP" + + "rotocol\022\020\n\010isOneWay\030\004 \002(\010\022\026\n\016supervisorU" + + "uid\030\005 \001(\t\022\'\n\006sender\030\006 \001(\0132\027.RemoteActorR" + + "efProtocol\"\252\001\n\023RemoteReplyProtocol\022\n\n\002id" + + "\030\001 \002(\004\022!\n\007message\030\002 \001(\0132\020.MessageProtoco" + + "l\022%\n\texception\030\003 \001(\0132\022.ExceptionProtocol" + + "\022\026\n\016supervisorUuid\030\004 \001(\t\022\017\n\007isActor\030\005 \002(", + "\010\022\024\n\014isSuccessful\030\006 \002(\010\"\177\n\021LifeCycleProt" + + "ocol\022!\n\tlifeCycle\030\001 \002(\0162\016.LifeCycleType\022" + + "\022\n\npreRestart\030\002 \001(\t\022\023\n\013postRestart\030\003 \001(\t" + + "\022\014\n\004init\030\004 \001(\t\022\020\n\010shutdown\030\005 \001(\t\"1\n\017Addr" + + "essProtocol\022\020\n\010hostname\030\001 \002(\t\022\014\n\004port\030\002 " + + "\002(\r\"7\n\021ExceptionProtocol\022\021\n\tclassname\030\001 " + + "\002(\t\022\017\n\007message\030\002 \002(\t*=\n\tActorType\022\017\n\013SCA" + + "LA_ACTOR\020\001\022\016\n\nJAVA_ACTOR\020\002\022\017\n\013TYPED_ACTO" + + "R\020\003*]\n\027SerializationSchemeType\022\010\n\004JAVA\020\001" + + "\022\013\n\007SBINARY\020\002\022\016\n\nSCALA_JSON\020\003\022\r\n\tJAVA_JS", + "ON\020\004\022\014\n\010PROTOBUF\020\005*-\n\rLifeCycleType\022\r\n\tP" + + "ERMANENT\020\001\022\r\n\tTEMPORARY\020\002B-\n)se.scalable" + + "solutions.akka.remote.protocolH\001" + }; + com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = + new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { + public com.google.protobuf.ExtensionRegistry assignDescriptors( + com.google.protobuf.Descriptors.FileDescriptor root) { + descriptor = root; + internal_static_RemoteActorRefProtocol_descriptor = + getDescriptor().getMessageTypes().get(0); + internal_static_RemoteActorRefProtocol_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_RemoteActorRefProtocol_descriptor, + new java.lang.String[] { "Uuid", "ActorClassname", "HomeAddress", "Timeout", }, + se.scalablesolutions.akka.remote.protocol.RemoteProtocol.RemoteActorRefProtocol.class, + se.scalablesolutions.akka.remote.protocol.RemoteProtocol.RemoteActorRefProtocol.Builder.class); + internal_static_SerializedActorRefProtocol_descriptor = + getDescriptor().getMessageTypes().get(1); + internal_static_SerializedActorRefProtocol_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_SerializedActorRefProtocol_descriptor, + new java.lang.String[] { "Uuid", "Id", "ActorClassname", "OriginalAddress", "ActorInstance", "SerializerClassname", "IsTransactor", "Timeout", "ReceiveTimeout", "LifeCycle", "Supervisor", "HotswapStack", "Messages", }, + se.scalablesolutions.akka.remote.protocol.RemoteProtocol.SerializedActorRefProtocol.class, + se.scalablesolutions.akka.remote.protocol.RemoteProtocol.SerializedActorRefProtocol.Builder.class); + internal_static_MessageProtocol_descriptor = + getDescriptor().getMessageTypes().get(2); + internal_static_MessageProtocol_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_MessageProtocol_descriptor, + new java.lang.String[] { "SerializationScheme", "Message", "MessageManifest", }, + se.scalablesolutions.akka.remote.protocol.RemoteProtocol.MessageProtocol.class, + se.scalablesolutions.akka.remote.protocol.RemoteProtocol.MessageProtocol.Builder.class); + internal_static_ActorInfoProtocol_descriptor = + getDescriptor().getMessageTypes().get(3); + internal_static_ActorInfoProtocol_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_ActorInfoProtocol_descriptor, + new java.lang.String[] { "Uuid", "Target", "Timeout", "ActorType", "TypedActorInfo", }, + se.scalablesolutions.akka.remote.protocol.RemoteProtocol.ActorInfoProtocol.class, + se.scalablesolutions.akka.remote.protocol.RemoteProtocol.ActorInfoProtocol.Builder.class); + internal_static_TypedActorInfoProtocol_descriptor = + getDescriptor().getMessageTypes().get(4); + internal_static_TypedActorInfoProtocol_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_TypedActorInfoProtocol_descriptor, + new java.lang.String[] { "Interface", "Method", }, + se.scalablesolutions.akka.remote.protocol.RemoteProtocol.TypedActorInfoProtocol.class, + se.scalablesolutions.akka.remote.protocol.RemoteProtocol.TypedActorInfoProtocol.Builder.class); + internal_static_RemoteRequestProtocol_descriptor = + getDescriptor().getMessageTypes().get(5); + internal_static_RemoteRequestProtocol_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_RemoteRequestProtocol_descriptor, + new java.lang.String[] { "Id", "Message", "ActorInfo", "IsOneWay", "SupervisorUuid", "Sender", }, + se.scalablesolutions.akka.remote.protocol.RemoteProtocol.RemoteRequestProtocol.class, + se.scalablesolutions.akka.remote.protocol.RemoteProtocol.RemoteRequestProtocol.Builder.class); + internal_static_RemoteReplyProtocol_descriptor = + getDescriptor().getMessageTypes().get(6); + internal_static_RemoteReplyProtocol_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_RemoteReplyProtocol_descriptor, + new java.lang.String[] { "Id", "Message", "Exception", "SupervisorUuid", "IsActor", "IsSuccessful", }, + se.scalablesolutions.akka.remote.protocol.RemoteProtocol.RemoteReplyProtocol.class, + se.scalablesolutions.akka.remote.protocol.RemoteProtocol.RemoteReplyProtocol.Builder.class); + internal_static_LifeCycleProtocol_descriptor = + getDescriptor().getMessageTypes().get(7); + internal_static_LifeCycleProtocol_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_LifeCycleProtocol_descriptor, + new java.lang.String[] { "LifeCycle", "PreRestart", "PostRestart", "Init", "Shutdown", }, + se.scalablesolutions.akka.remote.protocol.RemoteProtocol.LifeCycleProtocol.class, + se.scalablesolutions.akka.remote.protocol.RemoteProtocol.LifeCycleProtocol.Builder.class); + internal_static_AddressProtocol_descriptor = + getDescriptor().getMessageTypes().get(8); + internal_static_AddressProtocol_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_AddressProtocol_descriptor, + new java.lang.String[] { "Hostname", "Port", }, + se.scalablesolutions.akka.remote.protocol.RemoteProtocol.AddressProtocol.class, + se.scalablesolutions.akka.remote.protocol.RemoteProtocol.AddressProtocol.Builder.class); + internal_static_ExceptionProtocol_descriptor = + getDescriptor().getMessageTypes().get(9); + internal_static_ExceptionProtocol_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_ExceptionProtocol_descriptor, + new java.lang.String[] { "Classname", "Message", }, + se.scalablesolutions.akka.remote.protocol.RemoteProtocol.ExceptionProtocol.class, + se.scalablesolutions.akka.remote.protocol.RemoteProtocol.ExceptionProtocol.Builder.class); + return null; + } + }; + com.google.protobuf.Descriptors.FileDescriptor + .internalBuildGeneratedFileFrom(descriptorData, + new com.google.protobuf.Descriptors.FileDescriptor[] { + }, assigner); + } + + public static void internalForceInit() {} + + // @@protoc_insertion_point(outer_class_scope) +} diff --git a/akka-core/src/main/scala/actor/TypedActor.scala b/akka-typed-actors/src/main/scala/actor/TypedActor.scala similarity index 98% rename from akka-core/src/main/scala/actor/TypedActor.scala rename to akka-typed-actors/src/main/scala/actor/TypedActor.scala index 77473fe4d1..c3d2444e55 100644 --- a/akka-core/src/main/scala/actor/TypedActor.scala +++ b/akka-typed-actors/src/main/scala/actor/TypedActor.scala @@ -109,7 +109,7 @@ import scala.reflect.BeanProperty * * @author Jonas Bonér */ -abstract class TypedActor extends Actor { +abstract class TypedActor extends Actor with Proxyable { val DELEGATE_FIELD_NAME = "DELEGATE_0".intern @volatile private[actor] var proxy: AnyRef = _ @@ -192,7 +192,7 @@ abstract class TypedActor extends Actor { /** * Rewrite target instance in AspectWerkz Proxy. */ - private[actor] def swapInstanceInProxy(newInstance: Actor) = proxyDelegate.set(proxy, newInstance) + private[actor] def swapProxiedActor(newInstance: Actor) = proxyDelegate.set(proxy, newInstance) private[akka] def initialize(typedActorProxy: AnyRef) = { proxy = typedActorProxy @@ -537,6 +537,12 @@ object TypedActor extends Logging { private[akka] def supervise(restartStrategy: RestartStrategy, components: List[Supervise]): Supervisor = Supervisor(SupervisorConfig(restartStrategy, components)) + + private[akka] def isJoinPointAndOneWay(message: AnyRef): Boolean = if (isJoinPoint(message)) + isOneWay(message.asInstanceOf[JoinPoint].getRtti.asInstanceOf[MethodRtti]) + else false + + private[akka] def isJoinPoint(message: AnyRef): Boolean = message.isInstanceOf[JoinPoint]) } /** diff --git a/akka-core/src/main/scala/config/TypedActorConfigurator.scala b/akka-typed-actors/src/main/scala/config/TypedActorConfigurator.scala similarity index 100% rename from akka-core/src/main/scala/config/TypedActorConfigurator.scala rename to akka-typed-actors/src/main/scala/config/TypedActorConfigurator.scala diff --git a/akka-core/src/main/scala/config/TypedActorGuiceConfigurator.scala b/akka-typed-actors/src/main/scala/config/TypedActorGuiceConfigurator.scala similarity index 100% rename from akka-core/src/main/scala/config/TypedActorGuiceConfigurator.scala rename to akka-typed-actors/src/main/scala/config/TypedActorGuiceConfigurator.scala diff --git a/akka-active-object-test/src/test/java/se/scalablesolutions/akka/api/Bar.java b/akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/Bar.java similarity index 61% rename from akka-active-object-test/src/test/java/se/scalablesolutions/akka/api/Bar.java rename to akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/Bar.java index fb31de7a55..906476b789 100644 --- a/akka-active-object-test/src/test/java/se/scalablesolutions/akka/api/Bar.java +++ b/akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/Bar.java @@ -1,4 +1,4 @@ -package se.scalablesolutions.akka.api; +package se.scalablesolutions.akka.actor; public interface Bar { void bar(String msg); diff --git a/akka-active-object-test/src/test/java/se/scalablesolutions/akka/api/BarImpl.java b/akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/BarImpl.java similarity index 50% rename from akka-active-object-test/src/test/java/se/scalablesolutions/akka/api/BarImpl.java rename to akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/BarImpl.java index bb93a1ad03..9cb41a85cf 100644 --- a/akka-active-object-test/src/test/java/se/scalablesolutions/akka/api/BarImpl.java +++ b/akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/BarImpl.java @@ -1,13 +1,16 @@ -package se.scalablesolutions.akka.api; +package se.scalablesolutions.akka.actor; import com.google.inject.Inject; +import se.scalablesolutions.akka.actor.*; -public class BarImpl implements Bar { +public class BarImpl extends TypedActor implements Bar { @Inject private Ext ext; + public Ext getExt() { return ext; } + public void bar(String msg) { } } diff --git a/akka-active-object-test/src/test/java/se/scalablesolutions/akka/api/Ext.java b/akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/Ext.java similarity index 50% rename from akka-active-object-test/src/test/java/se/scalablesolutions/akka/api/Ext.java rename to akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/Ext.java index 1929058fac..c37219cf00 100644 --- a/akka-active-object-test/src/test/java/se/scalablesolutions/akka/api/Ext.java +++ b/akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/Ext.java @@ -1,4 +1,4 @@ -package se.scalablesolutions.akka.api; +package se.scalablesolutions.akka.actor; public interface Ext { void ext(); diff --git a/akka-active-object-test/src/test/java/se/scalablesolutions/akka/api/ExtImpl.java b/akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/ExtImpl.java similarity index 62% rename from akka-active-object-test/src/test/java/se/scalablesolutions/akka/api/ExtImpl.java rename to akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/ExtImpl.java index 3c9c9fd3f4..dd8ca55089 100644 --- a/akka-active-object-test/src/test/java/se/scalablesolutions/akka/api/ExtImpl.java +++ b/akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/ExtImpl.java @@ -1,4 +1,4 @@ -package se.scalablesolutions.akka.api; +package se.scalablesolutions.akka.actor; public class ExtImpl implements Ext { public void ext() { diff --git a/akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/Foo.java b/akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/Foo.java new file mode 100644 index 0000000000..a64f975bce --- /dev/null +++ b/akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/Foo.java @@ -0,0 +1,14 @@ +package se.scalablesolutions.akka.actor; + +public interface Foo { + public Foo body(); + public Bar getBar(); + + public String foo(String msg); + public void bar(String msg); + + public String longRunning(); + public String throwsException(); + + public int $tag() throws java.rmi.RemoteException; +} diff --git a/akka-active-object-test/src/test/java/se/scalablesolutions/akka/api/Foo.java b/akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/FooImpl.java similarity index 70% rename from akka-active-object-test/src/test/java/se/scalablesolutions/akka/api/Foo.java rename to akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/FooImpl.java index 5849eb902d..ded09f4e07 100644 --- a/akka-active-object-test/src/test/java/se/scalablesolutions/akka/api/Foo.java +++ b/akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/FooImpl.java @@ -1,34 +1,40 @@ -package se.scalablesolutions.akka.api; +package se.scalablesolutions.akka.actor; import com.google.inject.Inject; +import se.scalablesolutions.akka.actor.*; -public class Foo extends se.scalablesolutions.akka.serialization.Serializable.JavaJSON { +public class FooImpl extends TypedActor implements Foo { @Inject private Bar bar; + public Foo body() { return this; } + public Bar getBar() { return bar; } + public String foo(String msg) { return msg + "return_foo "; } + public void bar(String msg) { bar.bar(msg); } + public String longRunning() { try { - Thread.sleep(10000); + Thread.sleep(1200); } catch (InterruptedException e) { } return "test"; } + public String throwsException() { if (true) throw new RuntimeException("Expected exception; to test fault-tolerance"); return "test"; } - public int $tag() throws java.rmi.RemoteException - { + public int $tag() throws java.rmi.RemoteException { return 0; } } diff --git a/akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/NestedTransactionalTypedActor.java b/akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/NestedTransactionalTypedActor.java new file mode 100644 index 0000000000..fbd241763f --- /dev/null +++ b/akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/NestedTransactionalTypedActor.java @@ -0,0 +1,12 @@ +package se.scalablesolutions.akka.actor; + +public interface NestedTransactionalTypedActor { + public String getMapState(String key); + public String getVectorState(); + public String getRefState(); + public void setMapState(String key, String msg); + public void setVectorState(String msg); + public void setRefState(String msg); + public void success(String key, String msg); + public String failure(String key, String msg, TypedActorFailer failer); +} diff --git a/akka-active-object-test/src/test/java/se/scalablesolutions/akka/api/InMemStatefulNested.java b/akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/NestedTransactionalTypedActorImpl.java similarity index 58% rename from akka-active-object-test/src/test/java/se/scalablesolutions/akka/api/InMemStatefulNested.java rename to akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/NestedTransactionalTypedActorImpl.java index 9cd92bd075..1b95517c22 100644 --- a/akka-active-object-test/src/test/java/se/scalablesolutions/akka/api/InMemStatefulNested.java +++ b/akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/NestedTransactionalTypedActorImpl.java @@ -1,17 +1,15 @@ -package se.scalablesolutions.akka.api; +package se.scalablesolutions.akka.actor; -import se.scalablesolutions.akka.actor.annotation.transactionrequired; -import se.scalablesolutions.akka.actor.annotation.inittransactionalstate; +import se.scalablesolutions.akka.actor.*; import se.scalablesolutions.akka.stm.*; -@transactionrequired -public class InMemStatefulNested { +public class NestedTransactionalTypedActorImpl extends TypedTransactor implements NestedTransactionalTypedActor { private TransactionalMap mapState; private TransactionalVector vectorState; private Ref refState; private boolean isInitialized = false; - @inittransactionalstate + @Override public void init() { if (!isInitialized) { mapState = new TransactionalMap(); @@ -25,62 +23,37 @@ public class InMemStatefulNested { return (String) mapState.get(key).get(); } - public String getVectorState() { return (String) vectorState.last(); } - public String getRefState() { - return (String) refState.get().get(); + return (String) refState.get(); } - public void setMapState(String key, String msg) { mapState.put(key, msg); } - public void setVectorState(String msg) { vectorState.add(msg); } - public void setRefState(String msg) { refState.swap(msg); } - public void success(String key, String msg) { mapState.put(key, msg); vectorState.add(msg); refState.swap(msg); } - - public String failure(String key, String msg, InMemFailer failer) { + public String failure(String key, String msg, TypedActorFailer failer) { mapState.put(key, msg); vectorState.add(msg); refState.swap(msg); failer.fail(); return msg; } - - - public void thisMethodHangs(String key, String msg, InMemFailer failer) { - setMapState(key, msg); - } - - /* - public void clashOk(String key, String msg, InMemClasher clasher) { - mapState.put(key, msg); - clasher.clash(); - } - - public void clashNotOk(String key, String msg, InMemClasher clasher) { - mapState.put(key, msg); - clasher.clash(); - this.success("clash", "clash"); - } - */ } diff --git a/akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/ProtobufProtocol.java b/akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/ProtobufProtocol.java new file mode 100644 index 0000000000..683f008729 --- /dev/null +++ b/akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/ProtobufProtocol.java @@ -0,0 +1,1060 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: ProtobufProtocol.proto + +package se.scalablesolutions.akka.actor; + +public final class ProtobufProtocol { + private ProtobufProtocol() {} + public static void registerAllExtensions( + com.google.protobuf.ExtensionRegistry registry) { + } + public static final class ProtobufPOJO extends + com.google.protobuf.GeneratedMessage { + // Use ProtobufPOJO.newBuilder() to construct. + private ProtobufPOJO() { + initFields(); + } + private ProtobufPOJO(boolean noInit) {} + + private static final ProtobufPOJO defaultInstance; + public static ProtobufPOJO getDefaultInstance() { + return defaultInstance; + } + + public ProtobufPOJO getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return se.scalablesolutions.akka.actor.ProtobufProtocol.internal_static_se_scalablesolutions_akka_actor_ProtobufPOJO_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return se.scalablesolutions.akka.actor.ProtobufProtocol.internal_static_se_scalablesolutions_akka_actor_ProtobufPOJO_fieldAccessorTable; + } + + // required uint64 id = 1; + public static final int ID_FIELD_NUMBER = 1; + private boolean hasId; + private long id_ = 0L; + public boolean hasId() { return hasId; } + public long getId() { return id_; } + + // required string name = 2; + public static final int NAME_FIELD_NUMBER = 2; + private boolean hasName; + private java.lang.String name_ = ""; + public boolean hasName() { return hasName; } + public java.lang.String getName() { return name_; } + + // required bool status = 3; + public static final int STATUS_FIELD_NUMBER = 3; + private boolean hasStatus; + private boolean status_ = false; + public boolean hasStatus() { return hasStatus; } + public boolean getStatus() { return status_; } + + private void initFields() { + } + public final boolean isInitialized() { + if (!hasId) return false; + if (!hasName) return false; + if (!hasStatus) return false; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (hasId()) { + output.writeUInt64(1, getId()); + } + if (hasName()) { + output.writeString(2, getName()); + } + if (hasStatus()) { + output.writeBool(3, getStatus()); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (hasId()) { + size += com.google.protobuf.CodedOutputStream + .computeUInt64Size(1, getId()); + } + if (hasName()) { + size += com.google.protobuf.CodedOutputStream + .computeStringSize(2, getName()); + } + if (hasStatus()) { + size += com.google.protobuf.CodedOutputStream + .computeBoolSize(3, getStatus()); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + public static se.scalablesolutions.akka.actor.ProtobufProtocol.ProtobufPOJO parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static se.scalablesolutions.akka.actor.ProtobufProtocol.ProtobufPOJO parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static se.scalablesolutions.akka.actor.ProtobufProtocol.ProtobufPOJO parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static se.scalablesolutions.akka.actor.ProtobufProtocol.ProtobufPOJO parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static se.scalablesolutions.akka.actor.ProtobufProtocol.ProtobufPOJO parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static se.scalablesolutions.akka.actor.ProtobufProtocol.ProtobufPOJO parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static se.scalablesolutions.akka.actor.ProtobufProtocol.ProtobufPOJO parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static se.scalablesolutions.akka.actor.ProtobufProtocol.ProtobufPOJO parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static se.scalablesolutions.akka.actor.ProtobufProtocol.ProtobufPOJO parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static se.scalablesolutions.akka.actor.ProtobufProtocol.ProtobufPOJO parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(se.scalablesolutions.akka.actor.ProtobufProtocol.ProtobufPOJO prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder { + private se.scalablesolutions.akka.actor.ProtobufProtocol.ProtobufPOJO result; + + // Construct using se.scalablesolutions.akka.actor.ProtobufProtocol.ProtobufPOJO.newBuilder() + private Builder() {} + + private static Builder create() { + Builder builder = new Builder(); + builder.result = new se.scalablesolutions.akka.actor.ProtobufProtocol.ProtobufPOJO(); + return builder; + } + + protected se.scalablesolutions.akka.actor.ProtobufProtocol.ProtobufPOJO internalGetResult() { + return result; + } + + public Builder clear() { + if (result == null) { + throw new IllegalStateException( + "Cannot call clear() after build()."); + } + result = new se.scalablesolutions.akka.actor.ProtobufProtocol.ProtobufPOJO(); + return this; + } + + public Builder clone() { + return create().mergeFrom(result); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return se.scalablesolutions.akka.actor.ProtobufProtocol.ProtobufPOJO.getDescriptor(); + } + + public se.scalablesolutions.akka.actor.ProtobufProtocol.ProtobufPOJO getDefaultInstanceForType() { + return se.scalablesolutions.akka.actor.ProtobufProtocol.ProtobufPOJO.getDefaultInstance(); + } + + public boolean isInitialized() { + return result.isInitialized(); + } + public se.scalablesolutions.akka.actor.ProtobufProtocol.ProtobufPOJO build() { + if (result != null && !isInitialized()) { + throw newUninitializedMessageException(result); + } + return buildPartial(); + } + + private se.scalablesolutions.akka.actor.ProtobufProtocol.ProtobufPOJO buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + if (!isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return buildPartial(); + } + + public se.scalablesolutions.akka.actor.ProtobufProtocol.ProtobufPOJO buildPartial() { + if (result == null) { + throw new IllegalStateException( + "build() has already been called on this Builder."); + } + se.scalablesolutions.akka.actor.ProtobufProtocol.ProtobufPOJO returnMe = result; + result = null; + return returnMe; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof se.scalablesolutions.akka.actor.ProtobufProtocol.ProtobufPOJO) { + return mergeFrom((se.scalablesolutions.akka.actor.ProtobufProtocol.ProtobufPOJO)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(se.scalablesolutions.akka.actor.ProtobufProtocol.ProtobufPOJO other) { + if (other == se.scalablesolutions.akka.actor.ProtobufProtocol.ProtobufPOJO.getDefaultInstance()) return this; + if (other.hasId()) { + setId(other.getId()); + } + if (other.hasName()) { + setName(other.getName()); + } + if (other.hasStatus()) { + setStatus(other.getStatus()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + return this; + } + break; + } + case 8: { + setId(input.readUInt64()); + break; + } + case 18: { + setName(input.readString()); + break; + } + case 24: { + setStatus(input.readBool()); + break; + } + } + } + } + + + // required uint64 id = 1; + public boolean hasId() { + return result.hasId(); + } + public long getId() { + return result.getId(); + } + public Builder setId(long value) { + result.hasId = true; + result.id_ = value; + return this; + } + public Builder clearId() { + result.hasId = false; + result.id_ = 0L; + return this; + } + + // required string name = 2; + public boolean hasName() { + return result.hasName(); + } + public java.lang.String getName() { + return result.getName(); + } + public Builder setName(java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + result.hasName = true; + result.name_ = value; + return this; + } + public Builder clearName() { + result.hasName = false; + result.name_ = getDefaultInstance().getName(); + return this; + } + + // required bool status = 3; + public boolean hasStatus() { + return result.hasStatus(); + } + public boolean getStatus() { + return result.getStatus(); + } + public Builder setStatus(boolean value) { + result.hasStatus = true; + result.status_ = value; + return this; + } + public Builder clearStatus() { + result.hasStatus = false; + result.status_ = false; + return this; + } + + // @@protoc_insertion_point(builder_scope:se.scalablesolutions.akka.actor.ProtobufPOJO) + } + + static { + defaultInstance = new ProtobufPOJO(true); + se.scalablesolutions.akka.actor.ProtobufProtocol.internalForceInit(); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:se.scalablesolutions.akka.actor.ProtobufPOJO) + } + + public static final class Counter extends + com.google.protobuf.GeneratedMessage { + // Use Counter.newBuilder() to construct. + private Counter() { + initFields(); + } + private Counter(boolean noInit) {} + + private static final Counter defaultInstance; + public static Counter getDefaultInstance() { + return defaultInstance; + } + + public Counter getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return se.scalablesolutions.akka.actor.ProtobufProtocol.internal_static_se_scalablesolutions_akka_actor_Counter_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return se.scalablesolutions.akka.actor.ProtobufProtocol.internal_static_se_scalablesolutions_akka_actor_Counter_fieldAccessorTable; + } + + // required uint32 count = 1; + public static final int COUNT_FIELD_NUMBER = 1; + private boolean hasCount; + private int count_ = 0; + public boolean hasCount() { return hasCount; } + public int getCount() { return count_; } + + private void initFields() { + } + public final boolean isInitialized() { + if (!hasCount) return false; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (hasCount()) { + output.writeUInt32(1, getCount()); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (hasCount()) { + size += com.google.protobuf.CodedOutputStream + .computeUInt32Size(1, getCount()); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + public static se.scalablesolutions.akka.actor.ProtobufProtocol.Counter parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static se.scalablesolutions.akka.actor.ProtobufProtocol.Counter parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static se.scalablesolutions.akka.actor.ProtobufProtocol.Counter parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static se.scalablesolutions.akka.actor.ProtobufProtocol.Counter parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static se.scalablesolutions.akka.actor.ProtobufProtocol.Counter parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static se.scalablesolutions.akka.actor.ProtobufProtocol.Counter parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static se.scalablesolutions.akka.actor.ProtobufProtocol.Counter parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static se.scalablesolutions.akka.actor.ProtobufProtocol.Counter parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static se.scalablesolutions.akka.actor.ProtobufProtocol.Counter parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static se.scalablesolutions.akka.actor.ProtobufProtocol.Counter parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(se.scalablesolutions.akka.actor.ProtobufProtocol.Counter prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder { + private se.scalablesolutions.akka.actor.ProtobufProtocol.Counter result; + + // Construct using se.scalablesolutions.akka.actor.ProtobufProtocol.Counter.newBuilder() + private Builder() {} + + private static Builder create() { + Builder builder = new Builder(); + builder.result = new se.scalablesolutions.akka.actor.ProtobufProtocol.Counter(); + return builder; + } + + protected se.scalablesolutions.akka.actor.ProtobufProtocol.Counter internalGetResult() { + return result; + } + + public Builder clear() { + if (result == null) { + throw new IllegalStateException( + "Cannot call clear() after build()."); + } + result = new se.scalablesolutions.akka.actor.ProtobufProtocol.Counter(); + return this; + } + + public Builder clone() { + return create().mergeFrom(result); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return se.scalablesolutions.akka.actor.ProtobufProtocol.Counter.getDescriptor(); + } + + public se.scalablesolutions.akka.actor.ProtobufProtocol.Counter getDefaultInstanceForType() { + return se.scalablesolutions.akka.actor.ProtobufProtocol.Counter.getDefaultInstance(); + } + + public boolean isInitialized() { + return result.isInitialized(); + } + public se.scalablesolutions.akka.actor.ProtobufProtocol.Counter build() { + if (result != null && !isInitialized()) { + throw newUninitializedMessageException(result); + } + return buildPartial(); + } + + private se.scalablesolutions.akka.actor.ProtobufProtocol.Counter buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + if (!isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return buildPartial(); + } + + public se.scalablesolutions.akka.actor.ProtobufProtocol.Counter buildPartial() { + if (result == null) { + throw new IllegalStateException( + "build() has already been called on this Builder."); + } + se.scalablesolutions.akka.actor.ProtobufProtocol.Counter returnMe = result; + result = null; + return returnMe; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof se.scalablesolutions.akka.actor.ProtobufProtocol.Counter) { + return mergeFrom((se.scalablesolutions.akka.actor.ProtobufProtocol.Counter)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(se.scalablesolutions.akka.actor.ProtobufProtocol.Counter other) { + if (other == se.scalablesolutions.akka.actor.ProtobufProtocol.Counter.getDefaultInstance()) return this; + if (other.hasCount()) { + setCount(other.getCount()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + return this; + } + break; + } + case 8: { + setCount(input.readUInt32()); + break; + } + } + } + } + + + // required uint32 count = 1; + public boolean hasCount() { + return result.hasCount(); + } + public int getCount() { + return result.getCount(); + } + public Builder setCount(int value) { + result.hasCount = true; + result.count_ = value; + return this; + } + public Builder clearCount() { + result.hasCount = false; + result.count_ = 0; + return this; + } + + // @@protoc_insertion_point(builder_scope:se.scalablesolutions.akka.actor.Counter) + } + + static { + defaultInstance = new Counter(true); + se.scalablesolutions.akka.actor.ProtobufProtocol.internalForceInit(); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:se.scalablesolutions.akka.actor.Counter) + } + + public static final class DualCounter extends + com.google.protobuf.GeneratedMessage { + // Use DualCounter.newBuilder() to construct. + private DualCounter() { + initFields(); + } + private DualCounter(boolean noInit) {} + + private static final DualCounter defaultInstance; + public static DualCounter getDefaultInstance() { + return defaultInstance; + } + + public DualCounter getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return se.scalablesolutions.akka.actor.ProtobufProtocol.internal_static_se_scalablesolutions_akka_actor_DualCounter_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return se.scalablesolutions.akka.actor.ProtobufProtocol.internal_static_se_scalablesolutions_akka_actor_DualCounter_fieldAccessorTable; + } + + // required uint32 count1 = 1; + public static final int COUNT1_FIELD_NUMBER = 1; + private boolean hasCount1; + private int count1_ = 0; + public boolean hasCount1() { return hasCount1; } + public int getCount1() { return count1_; } + + // required uint32 count2 = 2; + public static final int COUNT2_FIELD_NUMBER = 2; + private boolean hasCount2; + private int count2_ = 0; + public boolean hasCount2() { return hasCount2; } + public int getCount2() { return count2_; } + + private void initFields() { + } + public final boolean isInitialized() { + if (!hasCount1) return false; + if (!hasCount2) return false; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (hasCount1()) { + output.writeUInt32(1, getCount1()); + } + if (hasCount2()) { + output.writeUInt32(2, getCount2()); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (hasCount1()) { + size += com.google.protobuf.CodedOutputStream + .computeUInt32Size(1, getCount1()); + } + if (hasCount2()) { + size += com.google.protobuf.CodedOutputStream + .computeUInt32Size(2, getCount2()); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + public static se.scalablesolutions.akka.actor.ProtobufProtocol.DualCounter parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static se.scalablesolutions.akka.actor.ProtobufProtocol.DualCounter parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static se.scalablesolutions.akka.actor.ProtobufProtocol.DualCounter parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static se.scalablesolutions.akka.actor.ProtobufProtocol.DualCounter parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static se.scalablesolutions.akka.actor.ProtobufProtocol.DualCounter parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static se.scalablesolutions.akka.actor.ProtobufProtocol.DualCounter parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static se.scalablesolutions.akka.actor.ProtobufProtocol.DualCounter parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static se.scalablesolutions.akka.actor.ProtobufProtocol.DualCounter parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static se.scalablesolutions.akka.actor.ProtobufProtocol.DualCounter parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static se.scalablesolutions.akka.actor.ProtobufProtocol.DualCounter parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(se.scalablesolutions.akka.actor.ProtobufProtocol.DualCounter prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder { + private se.scalablesolutions.akka.actor.ProtobufProtocol.DualCounter result; + + // Construct using se.scalablesolutions.akka.actor.ProtobufProtocol.DualCounter.newBuilder() + private Builder() {} + + private static Builder create() { + Builder builder = new Builder(); + builder.result = new se.scalablesolutions.akka.actor.ProtobufProtocol.DualCounter(); + return builder; + } + + protected se.scalablesolutions.akka.actor.ProtobufProtocol.DualCounter internalGetResult() { + return result; + } + + public Builder clear() { + if (result == null) { + throw new IllegalStateException( + "Cannot call clear() after build()."); + } + result = new se.scalablesolutions.akka.actor.ProtobufProtocol.DualCounter(); + return this; + } + + public Builder clone() { + return create().mergeFrom(result); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return se.scalablesolutions.akka.actor.ProtobufProtocol.DualCounter.getDescriptor(); + } + + public se.scalablesolutions.akka.actor.ProtobufProtocol.DualCounter getDefaultInstanceForType() { + return se.scalablesolutions.akka.actor.ProtobufProtocol.DualCounter.getDefaultInstance(); + } + + public boolean isInitialized() { + return result.isInitialized(); + } + public se.scalablesolutions.akka.actor.ProtobufProtocol.DualCounter build() { + if (result != null && !isInitialized()) { + throw newUninitializedMessageException(result); + } + return buildPartial(); + } + + private se.scalablesolutions.akka.actor.ProtobufProtocol.DualCounter buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + if (!isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return buildPartial(); + } + + public se.scalablesolutions.akka.actor.ProtobufProtocol.DualCounter buildPartial() { + if (result == null) { + throw new IllegalStateException( + "build() has already been called on this Builder."); + } + se.scalablesolutions.akka.actor.ProtobufProtocol.DualCounter returnMe = result; + result = null; + return returnMe; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof se.scalablesolutions.akka.actor.ProtobufProtocol.DualCounter) { + return mergeFrom((se.scalablesolutions.akka.actor.ProtobufProtocol.DualCounter)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(se.scalablesolutions.akka.actor.ProtobufProtocol.DualCounter other) { + if (other == se.scalablesolutions.akka.actor.ProtobufProtocol.DualCounter.getDefaultInstance()) return this; + if (other.hasCount1()) { + setCount1(other.getCount1()); + } + if (other.hasCount2()) { + setCount2(other.getCount2()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + return this; + } + break; + } + case 8: { + setCount1(input.readUInt32()); + break; + } + case 16: { + setCount2(input.readUInt32()); + break; + } + } + } + } + + + // required uint32 count1 = 1; + public boolean hasCount1() { + return result.hasCount1(); + } + public int getCount1() { + return result.getCount1(); + } + public Builder setCount1(int value) { + result.hasCount1 = true; + result.count1_ = value; + return this; + } + public Builder clearCount1() { + result.hasCount1 = false; + result.count1_ = 0; + return this; + } + + // required uint32 count2 = 2; + public boolean hasCount2() { + return result.hasCount2(); + } + public int getCount2() { + return result.getCount2(); + } + public Builder setCount2(int value) { + result.hasCount2 = true; + result.count2_ = value; + return this; + } + public Builder clearCount2() { + result.hasCount2 = false; + result.count2_ = 0; + return this; + } + + // @@protoc_insertion_point(builder_scope:se.scalablesolutions.akka.actor.DualCounter) + } + + static { + defaultInstance = new DualCounter(true); + se.scalablesolutions.akka.actor.ProtobufProtocol.internalForceInit(); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:se.scalablesolutions.akka.actor.DualCounter) + } + + private static com.google.protobuf.Descriptors.Descriptor + internal_static_se_scalablesolutions_akka_actor_ProtobufPOJO_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_se_scalablesolutions_akka_actor_ProtobufPOJO_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_se_scalablesolutions_akka_actor_Counter_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_se_scalablesolutions_akka_actor_Counter_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_se_scalablesolutions_akka_actor_DualCounter_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_se_scalablesolutions_akka_actor_DualCounter_fieldAccessorTable; + + public static com.google.protobuf.Descriptors.FileDescriptor + getDescriptor() { + return descriptor; + } + private static com.google.protobuf.Descriptors.FileDescriptor + descriptor; + static { + java.lang.String[] descriptorData = { + "\n\026ProtobufProtocol.proto\022\037se.scalablesol" + + "utions.akka.actor\"8\n\014ProtobufPOJO\022\n\n\002id\030" + + "\001 \002(\004\022\014\n\004name\030\002 \002(\t\022\016\n\006status\030\003 \002(\010\"\030\n\007C" + + "ounter\022\r\n\005count\030\001 \002(\r\"-\n\013DualCounter\022\016\n\006" + + "count1\030\001 \002(\r\022\016\n\006count2\030\002 \002(\r" + }; + com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = + new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { + public com.google.protobuf.ExtensionRegistry assignDescriptors( + com.google.protobuf.Descriptors.FileDescriptor root) { + descriptor = root; + internal_static_se_scalablesolutions_akka_actor_ProtobufPOJO_descriptor = + getDescriptor().getMessageTypes().get(0); + internal_static_se_scalablesolutions_akka_actor_ProtobufPOJO_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_se_scalablesolutions_akka_actor_ProtobufPOJO_descriptor, + new java.lang.String[] { "Id", "Name", "Status", }, + se.scalablesolutions.akka.actor.ProtobufProtocol.ProtobufPOJO.class, + se.scalablesolutions.akka.actor.ProtobufProtocol.ProtobufPOJO.Builder.class); + internal_static_se_scalablesolutions_akka_actor_Counter_descriptor = + getDescriptor().getMessageTypes().get(1); + internal_static_se_scalablesolutions_akka_actor_Counter_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_se_scalablesolutions_akka_actor_Counter_descriptor, + new java.lang.String[] { "Count", }, + se.scalablesolutions.akka.actor.ProtobufProtocol.Counter.class, + se.scalablesolutions.akka.actor.ProtobufProtocol.Counter.Builder.class); + internal_static_se_scalablesolutions_akka_actor_DualCounter_descriptor = + getDescriptor().getMessageTypes().get(2); + internal_static_se_scalablesolutions_akka_actor_DualCounter_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_se_scalablesolutions_akka_actor_DualCounter_descriptor, + new java.lang.String[] { "Count1", "Count2", }, + se.scalablesolutions.akka.actor.ProtobufProtocol.DualCounter.class, + se.scalablesolutions.akka.actor.ProtobufProtocol.DualCounter.Builder.class); + return null; + } + }; + com.google.protobuf.Descriptors.FileDescriptor + .internalBuildGeneratedFileFrom(descriptorData, + new com.google.protobuf.Descriptors.FileDescriptor[] { + }, assigner); + } + + public static void internalForceInit() {} + + // @@protoc_insertion_point(outer_class_scope) +} diff --git a/akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/RemoteTypedActorOne.java b/akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/RemoteTypedActorOne.java new file mode 100644 index 0000000000..dd03a45d12 --- /dev/null +++ b/akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/RemoteTypedActorOne.java @@ -0,0 +1,6 @@ +package se.scalablesolutions.akka.actor; + +public interface RemoteTypedActorOne { + public String requestReply(String s) throws Exception; + public void oneWay() throws Exception; +} diff --git a/akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/RemoteTypedActorOneImpl.java b/akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/RemoteTypedActorOneImpl.java new file mode 100644 index 0000000000..715e5366a4 --- /dev/null +++ b/akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/RemoteTypedActorOneImpl.java @@ -0,0 +1,29 @@ +package se.scalablesolutions.akka.actor.remote; + +import se.scalablesolutions.akka.actor.*; + +import java.util.concurrent.CountDownLatch; + +public class RemoteTypedActorOneImpl extends TypedActor implements RemoteTypedActorOne { + + public static CountDownLatch latch = new CountDownLatch(1); + + public String requestReply(String s) throws Exception { + if (s.equals("ping")) { + RemoteTypedActorLog.messageLog().put("ping"); + return "pong"; + } else if (s.equals("die")) { + throw new RuntimeException("Expected exception; to test fault-tolerance"); + } else return null; + } + + public void oneWay() throws Exception { + RemoteTypedActorLog.oneWayLog().put("oneway"); + } + + @Override + public void preRestart(Throwable e) { + try { RemoteTypedActorLog.messageLog().put(e.getMessage()); } catch(Exception ex) {} + latch.countDown(); + } +} diff --git a/akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/RemoteTypedActorTwo.java b/akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/RemoteTypedActorTwo.java new file mode 100644 index 0000000000..5fd289b8c2 --- /dev/null +++ b/akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/RemoteTypedActorTwo.java @@ -0,0 +1,6 @@ +package se.scalablesolutions.akka.actor; + +public interface RemoteTypedActorTwo { + public String requestReply(String s) throws Exception; + public void oneWay() throws Exception; +} diff --git a/akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/RemoteTypedActorTwoImpl.java b/akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/RemoteTypedActorTwoImpl.java new file mode 100644 index 0000000000..a5882fd4e6 --- /dev/null +++ b/akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/RemoteTypedActorTwoImpl.java @@ -0,0 +1,29 @@ +package se.scalablesolutions.akka.actor.remote; + +import se.scalablesolutions.akka.actor.*; + +import java.util.concurrent.CountDownLatch; + +public class RemoteTypedActorTwoImpl extends TypedActor implements RemoteTypedActorTwo { + + public static CountDownLatch latch = new CountDownLatch(1); + + public String requestReply(String s) throws Exception { + if (s.equals("ping")) { + RemoteTypedActorLog.messageLog().put("ping"); + return "pong"; + } else if (s.equals("die")) { + throw new RuntimeException("Expected exception; to test fault-tolerance"); + } else return null; + } + + public void oneWay() throws Exception { + RemoteTypedActorLog.oneWayLog().put("oneway"); + } + + @Override + public void preRestart(Throwable e) { + try { RemoteTypedActorLog.messageLog().put(e.getMessage()); } catch(Exception ex) {} + latch.countDown(); + } +} diff --git a/akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/SamplePojo.java b/akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/SamplePojo.java new file mode 100644 index 0000000000..5d06afdc9c --- /dev/null +++ b/akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/SamplePojo.java @@ -0,0 +1,8 @@ +package se.scalablesolutions.akka.actor; + +import java.util.concurrent.CountDownLatch; + +public interface SamplePojo { + public String greet(String s); + public String fail(); +} diff --git a/akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/SamplePojoImpl.java b/akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/SamplePojoImpl.java new file mode 100644 index 0000000000..12985c72ce --- /dev/null +++ b/akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/SamplePojoImpl.java @@ -0,0 +1,45 @@ +package se.scalablesolutions.akka.actor; + +import se.scalablesolutions.akka.actor.*; + +import java.util.concurrent.CountDownLatch; + +public class SamplePojoImpl extends TypedActor implements SamplePojo { + + public static CountDownLatch latch = new CountDownLatch(1); + + public static boolean _pre = false; + public static boolean _post = false; + public static boolean _down = false; + public static void reset() { + _pre = false; + _post = false; + _down = false; + } + + public String greet(String s) { + return "hello " + s; + } + + public String fail() { + throw new RuntimeException("expected"); + } + + @Override + public void preRestart(Throwable e) { + _pre = true; + latch.countDown(); + } + + @Override + public void postRestart(Throwable e) { + _post = true; + latch.countDown(); + } + + @Override + public void shutdown() { + _down = true; + latch.countDown(); + } +} diff --git a/akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/SimpleJavaPojo.java b/akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/SimpleJavaPojo.java new file mode 100644 index 0000000000..d3a18abbd9 --- /dev/null +++ b/akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/SimpleJavaPojo.java @@ -0,0 +1,14 @@ +package se.scalablesolutions.akka.actor; + +import se.scalablesolutions.akka.dispatch.Future; +import se.scalablesolutions.akka.dispatch.CompletableFuture; +import se.scalablesolutions.akka.dispatch.Future; + +public interface SimpleJavaPojo { + public Object getSender(); + public Object getSenderFuture(); + public Future square(int value); + public void setName(String name); + public String getName(); + public void throwException(); +} diff --git a/akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/SimpleJavaPojoCaller.java b/akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/SimpleJavaPojoCaller.java new file mode 100644 index 0000000000..e35702846f --- /dev/null +++ b/akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/SimpleJavaPojoCaller.java @@ -0,0 +1,9 @@ +package se.scalablesolutions.akka.actor; + +import se.scalablesolutions.akka.dispatch.CompletableFuture; + +public interface SimpleJavaPojoCaller { + public void setPojo(SimpleJavaPojo pojo); + public Object getSenderFromSimpleJavaPojo(); + public Object getSenderFutureFromSimpleJavaPojo(); +} diff --git a/akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/SimpleJavaPojoCallerImpl.java b/akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/SimpleJavaPojoCallerImpl.java new file mode 100644 index 0000000000..760b69f8b9 --- /dev/null +++ b/akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/SimpleJavaPojoCallerImpl.java @@ -0,0 +1,26 @@ +package se.scalablesolutions.akka.actor; + +import se.scalablesolutions.akka.actor.*; +import se.scalablesolutions.akka.dispatch.Future; + +public class SimpleJavaPojoCallerImpl extends TypedActor implements SimpleJavaPojoCaller { + + SimpleJavaPojo pojo; + + public void setPojo(SimpleJavaPojo pojo) { + this.pojo = pojo; + } + + public Object getSenderFromSimpleJavaPojo() { + Object sender = pojo.getSender(); + return sender; + } + + public Object getSenderFutureFromSimpleJavaPojo() { + return pojo.getSenderFuture(); + } + + public Future square(int value) { + return future(value * value); + } +} diff --git a/akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/SimpleJavaPojoImpl.java b/akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/SimpleJavaPojoImpl.java new file mode 100644 index 0000000000..c02d266ce8 --- /dev/null +++ b/akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/SimpleJavaPojoImpl.java @@ -0,0 +1,53 @@ +package se.scalablesolutions.akka.actor; + +import se.scalablesolutions.akka.actor.*; +import se.scalablesolutions.akka.dispatch.Future; +import se.scalablesolutions.akka.dispatch.CompletableFuture; + +public class SimpleJavaPojoImpl extends TypedActor implements SimpleJavaPojo { + + public static boolean _pre = false; + public static boolean _post = false; + public static boolean _down = false; + public static void reset() { + _pre = false; + _post = false; + _down = false; + } + + private String name; + + public Future square(int value) { + return future(value * value); + } + + public Object getSender() { + return getContext().getSender(); + } + + public CompletableFuture getSenderFuture() { + return getContext().getSenderFuture().get(); + } + + public void setName(String name) { + this.name = name; + } + + public String getName() { + return name; + } + + @Override + public void preRestart(Throwable e) { + _pre = true; + } + + @Override + public void postRestart(Throwable e) { + _post = true; + } + + public void throwException() { + throw new RuntimeException(); + } +} diff --git a/akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/TransactionalTypedActor.java b/akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/TransactionalTypedActor.java new file mode 100644 index 0000000000..6e7c43745b --- /dev/null +++ b/akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/TransactionalTypedActor.java @@ -0,0 +1,14 @@ +package se.scalablesolutions.akka.actor; + +public interface TransactionalTypedActor { + public String getMapState(String key); + public String getVectorState(); + public String getRefState(); + public void setMapState(String key, String msg); + public void setVectorState(String msg); + public void setRefState(String msg); + public void success(String key, String msg); + public void success(String key, String msg, NestedTransactionalTypedActor nested); + public String failure(String key, String msg, TypedActorFailer failer); + public String failure(String key, String msg, NestedTransactionalTypedActor nested, TypedActorFailer failer); +} diff --git a/akka-active-object-test/src/test/java/se/scalablesolutions/akka/api/InMemStateful.java b/akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/TransactionalTypedActorImpl.java similarity index 62% rename from akka-active-object-test/src/test/java/se/scalablesolutions/akka/api/InMemStateful.java rename to akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/TransactionalTypedActorImpl.java index fdd1fd5b93..9b32f5d329 100644 --- a/akka-active-object-test/src/test/java/se/scalablesolutions/akka/api/InMemStateful.java +++ b/akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/TransactionalTypedActorImpl.java @@ -1,20 +1,16 @@ -package se.scalablesolutions.akka.api; +package se.scalablesolutions.akka.actor; -import se.scalablesolutions.akka.actor.annotation.transactionrequired; -import se.scalablesolutions.akka.actor.annotation.prerestart; -import se.scalablesolutions.akka.actor.annotation.postrestart; -import se.scalablesolutions.akka.actor.annotation.inittransactionalstate; +import se.scalablesolutions.akka.actor.*; import se.scalablesolutions.akka.stm.*; -@transactionrequired -public class InMemStateful { +public class TransactionalTypedActorImpl extends TypedTransactor implements TransactionalTypedActor { private TransactionalMap mapState; private TransactionalVector vectorState; private Ref refState; private boolean isInitialized = false; - @inittransactionalstate - public void init() { + @Override + public void initTransactionalState() { if (!isInitialized) { mapState = new TransactionalMap(); vectorState = new TransactionalVector(); @@ -32,7 +28,7 @@ public class InMemStateful { } public String getRefState() { - return (String)refState.get().get(); + return (String)refState.get(); } public void setMapState(String key, String msg) { @@ -53,14 +49,14 @@ public class InMemStateful { refState.swap(msg); } - public void success(String key, String msg, InMemStatefulNested nested) { + public void success(String key, String msg, NestedTransactionalTypedActor nested) { mapState.put(key, msg); vectorState.add(msg); refState.swap(msg); nested.success(key, msg); } - public String failure(String key, String msg, InMemFailer failer) { + public String failure(String key, String msg, TypedActorFailer failer) { mapState.put(key, msg); vectorState.add(msg); refState.swap(msg); @@ -68,7 +64,7 @@ public class InMemStateful { return msg; } - public String failure(String key, String msg, InMemStatefulNested nested, InMemFailer failer) { + public String failure(String key, String msg, NestedTransactionalTypedActor nested, TypedActorFailer failer) { mapState.put(key, msg); vectorState.add(msg); refState.swap(msg); @@ -76,17 +72,13 @@ public class InMemStateful { return msg; } - public void thisMethodHangs(String key, String msg, InMemFailer failer) { - setMapState(key, msg); - } - - @prerestart - public void preRestart() { + @Override + public void preRestart(Throwable e) { System.out.println("################ PRE RESTART"); } - @postrestart - public void postRestart() { + @Override + public void postRestart(Throwable e) { System.out.println("################ POST RESTART"); } } diff --git a/akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/TypedActorFailer.java b/akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/TypedActorFailer.java new file mode 100644 index 0000000000..e0b1e72c33 --- /dev/null +++ b/akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/TypedActorFailer.java @@ -0,0 +1,5 @@ +package se.scalablesolutions.akka.actor; + +public interface TypedActorFailer extends java.io.Serializable { + public int fail(); +} diff --git a/akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/TypedActorFailerImpl.java b/akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/TypedActorFailerImpl.java new file mode 100644 index 0000000000..89a97330df --- /dev/null +++ b/akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/TypedActorFailerImpl.java @@ -0,0 +1,9 @@ +package se.scalablesolutions.akka.actor; + +import se.scalablesolutions.akka.actor.*; + +public class TypedActorFailerImpl extends TypedActor implements TypedActorFailer { + public int fail() { + throw new RuntimeException("expected"); + } +} diff --git a/akka-typed-actors/src/test/java/se/scalablesolutions/akka/stm/Address.java b/akka-typed-actors/src/test/java/se/scalablesolutions/akka/stm/Address.java new file mode 100644 index 0000000000..cb3057929f --- /dev/null +++ b/akka-typed-actors/src/test/java/se/scalablesolutions/akka/stm/Address.java @@ -0,0 +1,13 @@ +package se.scalablesolutions.akka.stm; + +public class Address { + private String location; + + public Address(String location) { + this.location = location; + } + + @Override public String toString() { + return "Address(" + location + ")"; + } +} diff --git a/akka-typed-actors/src/test/java/se/scalablesolutions/akka/stm/CounterExample.java b/akka-typed-actors/src/test/java/se/scalablesolutions/akka/stm/CounterExample.java new file mode 100644 index 0000000000..57a9a07daa --- /dev/null +++ b/akka-typed-actors/src/test/java/se/scalablesolutions/akka/stm/CounterExample.java @@ -0,0 +1,26 @@ +package se.scalablesolutions.akka.stm; + +import se.scalablesolutions.akka.stm.Ref; +import se.scalablesolutions.akka.stm.local.Atomic; + +public class CounterExample { + final static Ref ref = new Ref(0); + + public static int counter() { + return new Atomic() { + public Integer atomically() { + int inc = ref.get() + 1; + ref.set(inc); + return inc; + } + }.execute(); + } + + public static void main(String[] args) { + System.out.println(); + System.out.println("Counter example"); + System.out.println(); + System.out.println("counter 1: " + counter()); + System.out.println("counter 2: " + counter()); + } +} diff --git a/akka-typed-actors/src/test/java/se/scalablesolutions/akka/stm/JavaStmTests.java b/akka-typed-actors/src/test/java/se/scalablesolutions/akka/stm/JavaStmTests.java new file mode 100644 index 0000000000..7204013808 --- /dev/null +++ b/akka-typed-actors/src/test/java/se/scalablesolutions/akka/stm/JavaStmTests.java @@ -0,0 +1,91 @@ +package se.scalablesolutions.akka.stm; + +import static org.junit.Assert.*; +import org.junit.Test; +import org.junit.Before; + +import se.scalablesolutions.akka.stm.*; +import se.scalablesolutions.akka.stm.local.Atomic; + +import org.multiverse.api.ThreadLocalTransaction; +import org.multiverse.api.TransactionConfiguration; +import org.multiverse.api.exceptions.ReadonlyException; + +public class JavaStmTests { + + private Ref ref; + + private int getRefValue() { + return new Atomic() { + public Integer atomically() { + return ref.get(); + } + }.execute(); + } + + public int increment() { + return new Atomic() { + public Integer atomically() { + int inc = ref.get() + 1; + ref.set(inc); + return inc; + } + }.execute(); + } + + @Before public void initialise() { + ref = new Ref(0); + } + + @Test public void incrementRef() { + assertEquals(0, getRefValue()); + increment(); + increment(); + increment(); + assertEquals(3, getRefValue()); + } + + @Test public void failSetRef() { + assertEquals(0, getRefValue()); + try { + new Atomic() { + public Object atomically() { + ref.set(3); + throw new RuntimeException(); + } + }.execute(); + } catch(RuntimeException e) {} + assertEquals(0, getRefValue()); + } + + @Test public void configureTransaction() { + TransactionFactory txFactory = new TransactionFactoryBuilder() + .setFamilyName("example") + .setReadonly(true) + .build(); + + // get transaction config from multiverse + TransactionConfiguration config = new Atomic(txFactory) { + public TransactionConfiguration atomically() { + ref.get(); + return ThreadLocalTransaction.getThreadLocalTransaction().getConfiguration(); + } + }.execute(); + + assertEquals("example", config.getFamilyName()); + assertEquals(true, config.isReadonly()); + } + + @Test(expected=ReadonlyException.class) public void failReadonlyTransaction() { + TransactionFactory txFactory = new TransactionFactoryBuilder() + .setFamilyName("example") + .setReadonly(true) + .build(); + + new Atomic(txFactory) { + public Object atomically() { + return ref.set(3); + } + }.execute(); + } +} diff --git a/akka-typed-actors/src/test/java/se/scalablesolutions/akka/stm/RefExample.java b/akka-typed-actors/src/test/java/se/scalablesolutions/akka/stm/RefExample.java new file mode 100644 index 0000000000..f590524fd7 --- /dev/null +++ b/akka-typed-actors/src/test/java/se/scalablesolutions/akka/stm/RefExample.java @@ -0,0 +1,36 @@ +package se.scalablesolutions.akka.stm; + +import se.scalablesolutions.akka.stm.Ref; +import se.scalablesolutions.akka.stm.local.Atomic; + +public class RefExample { + public static void main(String[] args) { + System.out.println(); + System.out.println("Ref example"); + System.out.println(); + + final Ref ref = new Ref(0); + + Integer value1 = new Atomic() { + public Integer atomically() { + return ref.get(); + } + }.execute(); + + System.out.println("value 1: " + value1); + + new Atomic() { + public Object atomically() { + return ref.set(5); + } + }.execute(); + + Integer value2 = new Atomic() { + public Integer atomically() { + return ref.get(); + } + }.execute(); + + System.out.println("value 2: " + value2); + } +} diff --git a/akka-typed-actors/src/test/java/se/scalablesolutions/akka/stm/StmExamples.java b/akka-typed-actors/src/test/java/se/scalablesolutions/akka/stm/StmExamples.java new file mode 100644 index 0000000000..a8526f2dd0 --- /dev/null +++ b/akka-typed-actors/src/test/java/se/scalablesolutions/akka/stm/StmExamples.java @@ -0,0 +1,18 @@ +package se.scalablesolutions.akka.stm; + +import se.scalablesolutions.akka.stm.Ref; +import se.scalablesolutions.akka.stm.local.Atomic; + +public class StmExamples { + public static void main(String[] args) { + System.out.println(); + System.out.println("STM examples"); + System.out.println(); + + CounterExample.main(args); + RefExample.main(args); + TransactionFactoryExample.main(args); + TransactionalMapExample.main(args); + TransactionalVectorExample.main(args); + } +} diff --git a/akka-typed-actors/src/test/java/se/scalablesolutions/akka/stm/TransactionFactoryExample.java b/akka-typed-actors/src/test/java/se/scalablesolutions/akka/stm/TransactionFactoryExample.java new file mode 100644 index 0000000000..00dd87b7c5 --- /dev/null +++ b/akka-typed-actors/src/test/java/se/scalablesolutions/akka/stm/TransactionFactoryExample.java @@ -0,0 +1,30 @@ +package se.scalablesolutions.akka.stm; + +import se.scalablesolutions.akka.stm.*; +import se.scalablesolutions.akka.stm.local.Atomic; + +import org.multiverse.api.ThreadLocalTransaction; +import org.multiverse.api.TransactionConfiguration; + +public class TransactionFactoryExample { + public static void main(String[] args) { + System.out.println(); + System.out.println("TransactionFactory example"); + System.out.println(); + + TransactionFactory txFactory = new TransactionFactoryBuilder() + .setFamilyName("example") + .setReadonly(true) + .build(); + + new Atomic(txFactory) { + public Object atomically() { + // check config has been passed to multiverse + TransactionConfiguration config = ThreadLocalTransaction.getThreadLocalTransaction().getConfiguration(); + System.out.println("family name: " + config.getFamilyName()); + System.out.println("readonly: " + config.isReadonly()); + return null; + } + }.execute(); + } +} diff --git a/akka-typed-actors/src/test/java/se/scalablesolutions/akka/stm/TransactionalMapExample.java b/akka-typed-actors/src/test/java/se/scalablesolutions/akka/stm/TransactionalMapExample.java new file mode 100644 index 0000000000..7c4940c7a5 --- /dev/null +++ b/akka-typed-actors/src/test/java/se/scalablesolutions/akka/stm/TransactionalMapExample.java @@ -0,0 +1,35 @@ +package se.scalablesolutions.akka.stm; + +import se.scalablesolutions.akka.stm.*; +import se.scalablesolutions.akka.stm.local.Atomic; + +public class TransactionalMapExample { + public static void main(String[] args) { + System.out.println(); + System.out.println("TransactionalMap example"); + System.out.println(); + + final TransactionalMap users = new TransactionalMap(); + + // fill users map (in a transaction) + new Atomic() { + public Object atomically() { + users.put("bill", new User("bill")); + users.put("mary", new User("mary")); + users.put("john", new User("john")); + return null; + } + }.execute(); + + System.out.println("users: " + users); + + // access users map (in a transaction) + User user = new Atomic() { + public User atomically() { + return users.get("bill").get(); + } + }.execute(); + + System.out.println("user: " + user); + } +} diff --git a/akka-typed-actors/src/test/java/se/scalablesolutions/akka/stm/TransactionalVectorExample.java b/akka-typed-actors/src/test/java/se/scalablesolutions/akka/stm/TransactionalVectorExample.java new file mode 100644 index 0000000000..7274848beb --- /dev/null +++ b/akka-typed-actors/src/test/java/se/scalablesolutions/akka/stm/TransactionalVectorExample.java @@ -0,0 +1,34 @@ +package se.scalablesolutions.akka.stm; + +import se.scalablesolutions.akka.stm.*; +import se.scalablesolutions.akka.stm.local.Atomic; + +public class TransactionalVectorExample { + public static void main(String[] args) { + System.out.println(); + System.out.println("TransactionalVector example"); + System.out.println(); + + final TransactionalVector
addresses = new TransactionalVector
(); + + // fill addresses vector (in a transaction) + new Atomic() { + public Object atomically() { + addresses.add(new Address("somewhere")); + addresses.add(new Address("somewhere else")); + return null; + } + }.execute(); + + System.out.println("addresses: " + addresses); + + // access addresses vector (in a transaction) + Address address = new Atomic
() { + public Address atomically() { + return addresses.get(0); + } + }.execute(); + + System.out.println("address: " + address); + } +} diff --git a/akka-typed-actors/src/test/java/se/scalablesolutions/akka/stm/User.java b/akka-typed-actors/src/test/java/se/scalablesolutions/akka/stm/User.java new file mode 100644 index 0000000000..c9dc4b3723 --- /dev/null +++ b/akka-typed-actors/src/test/java/se/scalablesolutions/akka/stm/User.java @@ -0,0 +1,13 @@ +package se.scalablesolutions.akka.stm; + +public class User { + private String name; + + public User(String name) { + this.name = name; + } + + @Override public String toString() { + return "User(" + name + ")"; + } +} diff --git a/akka-core/src/main/resources/META-INF/aop.xml b/akka-typed-actors/src/test/resources/META-INF/aop.xml similarity index 100% rename from akka-core/src/main/resources/META-INF/aop.xml rename to akka-typed-actors/src/test/resources/META-INF/aop.xml diff --git a/akka-typed-actors/src/test/scala/actor/typed-actor/NestedTransactionalTypedActorSpec.scala b/akka-typed-actors/src/test/scala/actor/typed-actor/NestedTransactionalTypedActorSpec.scala new file mode 100644 index 0000000000..7338e8df41 --- /dev/null +++ b/akka-typed-actors/src/test/scala/actor/typed-actor/NestedTransactionalTypedActorSpec.scala @@ -0,0 +1,102 @@ + /** + * Copyright (C) 2009-2010 Scalable Solutions AB + */ + +package se.scalablesolutions.akka.actor + +import org.scalatest.Spec +import org.scalatest.Assertions +import org.scalatest.matchers.ShouldMatchers +import org.scalatest.BeforeAndAfterAll +import org.scalatest.junit.JUnitRunner +import org.junit.runner.RunWith + +import se.scalablesolutions.akka.actor._ + +@RunWith(classOf[JUnitRunner]) +class NestedTransactionalTypedActorSpec extends + Spec with + ShouldMatchers with + BeforeAndAfterAll { + + private var messageLog = "" + + override def afterAll { + // ActorRegistry.shutdownAll + } + + describe("Declaratively nested supervised transactional in-memory TypedActor") { + + it("map should not rollback state for stateful server in case of success") { + val stateful = TypedActor.newInstance(classOf[TransactionalTypedActor], classOf[TransactionalTypedActorImpl]) + stateful.setMapState("testShouldNotRollbackStateForStatefulServerInCaseOfSuccess", "init") // set init state + val nested = TypedActor.newInstance(classOf[NestedTransactionalTypedActor], classOf[NestedTransactionalTypedActorImpl]) + nested.setMapState("testShouldNotRollbackStateForStatefulServerInCaseOfSuccess", "init") // set init state + stateful.success("testShouldNotRollbackStateForStatefulServerInCaseOfSuccess", "new state", nested) // transactionrequired + stateful.getMapState("testShouldNotRollbackStateForStatefulServerInCaseOfSuccess") should equal("new state") + nested.getMapState("testShouldNotRollbackStateForStatefulServerInCaseOfSuccess") should equal("new state") + } + + it("map should rollback state for stateful server in case of failure") { + val stateful = TypedActor.newInstance(classOf[TransactionalTypedActor], classOf[TransactionalTypedActorImpl]) + stateful.setMapState("testShouldRollbackStateForStatefulServerInCaseOfFailure", "init") // set init state + val nested = TypedActor.newInstance(classOf[NestedTransactionalTypedActor], classOf[NestedTransactionalTypedActorImpl]) + nested.setMapState("testShouldRollbackStateForStatefulServerInCaseOfFailure", "init") // set init state + val failer = TypedActor.newInstance(classOf[TypedActorFailer], classOf[TypedActorFailerImpl]) + try { + stateful.failure("testShouldRollbackStateForStatefulServerInCaseOfFailure", "new state", nested, failer) + fail("should have thrown an exception") + } catch { case e => {} } + stateful.getMapState("testShouldRollbackStateForStatefulServerInCaseOfFailure") should equal("init") + nested.getMapState("testShouldRollbackStateForStatefulServerInCaseOfFailure") should equal("init") + } + + it("vector should not rollback state for stateful server in case of success") { + val stateful = TypedActor.newInstance(classOf[TransactionalTypedActor], classOf[TransactionalTypedActorImpl]) + stateful.setVectorState("init") // set init state + val nested = TypedActor.newInstance(classOf[NestedTransactionalTypedActor], classOf[NestedTransactionalTypedActorImpl]) + nested.setVectorState("init") // set init state + stateful.success("testShouldNotRollbackStateForStatefulServerInCaseOfSuccess", "new state", nested) // transactionrequired + stateful.getVectorState should equal("new state") + nested.getVectorState should equal("new state") + } + + it("vector should rollback state for stateful server in case of failure") { + val stateful = TypedActor.newInstance(classOf[TransactionalTypedActor], classOf[TransactionalTypedActorImpl]) + stateful.setVectorState("init") // set init state + val nested = TypedActor.newInstance(classOf[NestedTransactionalTypedActor], classOf[NestedTransactionalTypedActorImpl]) + nested.setVectorState("init") // set init state + val failer = TypedActor.newInstance(classOf[TypedActorFailer], classOf[TypedActorFailerImpl]) + try { + stateful.failure("testShouldRollbackStateForStatefulServerInCaseOfFailure", "new state", nested, failer) + fail("should have thrown an exception") + } catch { case e => {} } + stateful.getVectorState should equal("init") + nested.getVectorState should equal("init") + } + + it("ref should not rollback state for stateful server in case of success") { + val stateful = TypedActor.newInstance(classOf[TransactionalTypedActor], classOf[TransactionalTypedActorImpl]) + val nested = TypedActor.newInstance(classOf[NestedTransactionalTypedActor], classOf[NestedTransactionalTypedActorImpl]) + stateful.setRefState("init") // set init state + nested.setRefState("init") // set init state + stateful.success("testShouldNotRollbackStateForStatefulServerInCaseOfSuccess", "new state", nested) + stateful.getRefState should equal("new state") + nested.getRefState should equal("new state") + } + + it("ref should rollback state for stateful server in case of failure") { + val stateful = TypedActor.newInstance(classOf[TransactionalTypedActor], classOf[TransactionalTypedActorImpl]) + val nested = TypedActor.newInstance(classOf[NestedTransactionalTypedActor], classOf[NestedTransactionalTypedActorImpl]) + stateful.setRefState("init") // set init state + nested.setRefState("init") // set init state + val failer = TypedActor.newInstance(classOf[TypedActorFailer], classOf[TypedActorFailerImpl]) + try { + stateful.failure("testShouldRollbackStateForStatefulServerInCaseOfFailure", "new state", nested, failer) + fail("should have thrown an exception") + } catch { case e => {} } + stateful.getRefState should equal("init") + nested.getRefState should equal("init") + } + } +} diff --git a/akka-typed-actors/src/test/scala/actor/typed-actor/RestartNestedTransactionalTypedActorSpec.scala b/akka-typed-actors/src/test/scala/actor/typed-actor/RestartNestedTransactionalTypedActorSpec.scala new file mode 100644 index 0000000000..1769a5c47b --- /dev/null +++ b/akka-typed-actors/src/test/scala/actor/typed-actor/RestartNestedTransactionalTypedActorSpec.scala @@ -0,0 +1,118 @@ +/** + * Copyright (C) 2009-2010 Scalable Solutions AB + */ + +package se.scalablesolutions.akka.actor + +import org.scalatest.Spec +import org.scalatest.Assertions +import org.scalatest.matchers.ShouldMatchers +import org.scalatest.BeforeAndAfterAll +import org.scalatest.junit.JUnitRunner +import org.junit.runner.RunWith + +import se.scalablesolutions.akka.config.Config +import se.scalablesolutions.akka.config._ +import se.scalablesolutions.akka.config.TypedActorConfigurator +import se.scalablesolutions.akka.config.JavaConfig._ +import se.scalablesolutions.akka.actor._ + +@RunWith(classOf[JUnitRunner]) +class RestartNestedTransactionalTypedActorSpec extends + Spec with + ShouldMatchers with + BeforeAndAfterAll { + + private val conf = new TypedActorConfigurator + private var messageLog = "" + + override def beforeAll { + /* + Config.config + conf.configure( + new RestartStrategy(new AllForOne, 3, 5000, List(classOf[Exception]).toArray), + List( + new Component(classOf[TransactionalTypedActor], + new LifeCycle(new Permanent), + 10000), + new Component(classOf[NestedTransactionalTypedActor], + new LifeCycle(new Permanent), + 10000), + new Component(classOf[TypedActorFailer], + new LifeCycle(new Permanent), + 10000) + ).toArray).supervise + */ + } + + override def afterAll { + /* + conf.stop + ActorRegistry.shutdownAll + */ + } + + describe("Restart nested supervised transactional Typed Actor") { +/* + it("map should rollback state for stateful server in case of failure") { + val stateful = conf.getInstance(classOf[TransactionalTypedActor]) + stateful.init + stateful.setMapState("testShouldRollbackStateForStatefulServerInCaseOfFailure", "init") // set init state + + val nested = conf.getInstance(classOf[NestedTransactionalTypedActor]) + nested.init + nested.setMapState("testShouldRollbackStateForStatefulServerInCaseOfFailure", "init") // set init state + + val failer = conf.getInstance(classOf[TypedActorFailer]) + try { + stateful.failure("testShouldRollbackStateForStatefulServerInCaseOfFailure", "new state", nested, failer) + + fail("should have thrown an exception") + } catch { case e => {} } + stateful.getMapState("testShouldRollbackStateForStatefulServerInCaseOfFailure") should equal("init") + + nested.getMapState("testShouldRollbackStateForStatefulServerInCaseOfFailure") should equal("init") + } + + it("vector should rollback state for stateful server in case of failure") { + val stateful = conf.getInstance(classOf[TransactionalTypedActor]) + stateful.init + stateful.setVectorState("init") // set init state + + val nested = conf.getInstance(classOf[NestedTransactionalTypedActor]) + nested.init + nested.setVectorState("init") // set init state + + val failer = conf.getInstance(classOf[TypedActorFailer]) + try { + stateful.failure("testShouldRollbackStateForStatefulServerInCaseOfFailure", "new state", nested, failer) + + fail("should have thrown an exception") + } catch { case e => {} } + stateful.getVectorState should equal("init") + + nested.getVectorState should equal("init") + } + + it("ref should rollback state for stateful server in case of failure") { + val stateful = conf.getInstance(classOf[TransactionalTypedActor]) + stateful.init + val nested = conf.getInstance(classOf[NestedTransactionalTypedActor]) + nested.init + stateful.setRefState("init") // set init state + + nested.setRefState("init") // set init state + + val failer = conf.getInstance(classOf[TypedActorFailer]) + try { + stateful.failure("testShouldRollbackStateForStatefulServerInCaseOfFailure", "new state", nested, failer) + + fail("should have thrown an exception") + } catch { case e => {} } + stateful.getRefState should equal("init") + + nested.getRefState should equal("init") + } + */ + } +} diff --git a/akka-typed-actors/src/test/scala/actor/typed-actor/RestartTransactionalTypedActorSpec.scala b/akka-typed-actors/src/test/scala/actor/typed-actor/RestartTransactionalTypedActorSpec.scala new file mode 100644 index 0000000000..56b1e6ec5b --- /dev/null +++ b/akka-typed-actors/src/test/scala/actor/typed-actor/RestartTransactionalTypedActorSpec.scala @@ -0,0 +1,92 @@ +/** + * Copyright (C) 2009-2010 Scalable Solutions AB + */ + +package se.scalablesolutions.akka.actor + +import org.scalatest.Spec +import org.scalatest.Assertions +import org.scalatest.matchers.ShouldMatchers +import org.scalatest.BeforeAndAfterAll +import org.scalatest.junit.JUnitRunner +import org.junit.runner.RunWith + +import se.scalablesolutions.akka.config.Config +import se.scalablesolutions.akka.config._ +import se.scalablesolutions.akka.config.TypedActorConfigurator +import se.scalablesolutions.akka.config.JavaConfig._ +import se.scalablesolutions.akka.actor._ + +@RunWith(classOf[JUnitRunner]) +class RestartTransactionalTypedActorSpec extends + Spec with + ShouldMatchers with + BeforeAndAfterAll { + + private val conf = new TypedActorConfigurator + private var messageLog = "" + + def before { + Config.config + conf.configure( + new RestartStrategy(new AllForOne, 3, 5000, List(classOf[Exception]).toArray), + List( + new Component( + classOf[TransactionalTypedActor], + new LifeCycle(new Temporary), + 10000), + new Component( + classOf[TypedActorFailer], + new LifeCycle(new Temporary), + 10000) + ).toArray).supervise + } + + def after { + conf.stop + ActorRegistry.shutdownAll + } + + describe("Restart supervised transactional Typed Actor ") { +/* + it("map should rollback state for stateful server in case of failure") { + before + val stateful = conf.getInstance(classOf[TransactionalTypedActor]) + stateful.init + stateful.setMapState("testShouldRollbackStateForStatefulServerInCaseOfFailure", "init") + val failer = conf.getInstance(classOf[TypedActorFailer]) + try { + stateful.failure("testShouldRollbackStateForStatefulServerInCaseOfFailure", "new state", failer) + fail("should have thrown an exception") + } catch { case e => {} } + stateful.getMapState("testShouldRollbackStateForStatefulServerInCaseOfFailure") should equal("init") + after + } + + it("vector should rollback state for stateful server in case of failure") { + before + val stateful = conf.getInstance(classOf[TransactionalTypedActor]) + stateful.init + stateful.setVectorState("init") // set init state + val failer = conf.getInstance(classOf[TypedActorFailer]) + try { + stateful.failure("testShouldRollbackStateForStatefulServerInCaseOfFailure", "new state", failer) + fail("should have thrown an exception") + } catch { case e => {} } + stateful.getVectorState should equal("init") + after + } + + it("ref should rollback state for stateful server in case of failure") { + val stateful = conf.getInstance(classOf[TransactionalTypedActor]) + stateful.init + stateful.setRefState("init") // set init state + val failer = conf.getInstance(classOf[TypedActorFailer]) + try { + stateful.failure("testShouldRollbackStateForStatefulServerInCaseOfFailure", "new state", failer) + fail("should have thrown an exception") + } catch { case e => {} } + stateful.getRefState should equal("init") + } +*/ } +} diff --git a/akka-typed-actors/src/test/scala/actor/typed-actor/TransactionalTypedActorSpec.scala b/akka-typed-actors/src/test/scala/actor/typed-actor/TransactionalTypedActorSpec.scala new file mode 100644 index 0000000000..b55f52c875 --- /dev/null +++ b/akka-typed-actors/src/test/scala/actor/typed-actor/TransactionalTypedActorSpec.scala @@ -0,0 +1,83 @@ +/** + * Copyright (C) 2009-2010 Scalable Solutions AB + */ + +package se.scalablesolutions.akka.actor + +import org.scalatest.Spec +import org.scalatest.Assertions +import org.scalatest.matchers.ShouldMatchers +import org.scalatest.BeforeAndAfterAll +import org.scalatest.junit.JUnitRunner +import org.junit.runner.RunWith + +import se.scalablesolutions.akka.actor._ + +@RunWith(classOf[JUnitRunner]) +class TransactionalTypedActorSpec extends + Spec with + ShouldMatchers with + BeforeAndAfterAll { + + private var messageLog = "" + + override def afterAll { +// ActorRegistry.shutdownAll + } + + describe("Declaratively supervised transactional in-memory Typed Actor ") { + it("map should not rollback state for stateful server in case of success") { + val stateful = TypedActor.newInstance(classOf[TransactionalTypedActor], classOf[TransactionalTypedActorImpl]) + stateful.setMapState("testShouldNotRollbackStateForStatefulServerInCaseOfSuccess", "init") + stateful.success("testShouldNotRollbackStateForStatefulServerInCaseOfSuccess", "new state") + stateful.getMapState("testShouldNotRollbackStateForStatefulServerInCaseOfSuccess") should equal("new state") + } + + it("map should rollback state for stateful server in case of failure") { + val stateful = TypedActor.newInstance(classOf[TransactionalTypedActor], classOf[TransactionalTypedActorImpl]) + stateful.setMapState("testShouldRollbackStateForStatefulServerInCaseOfFailure", "init") + val failer = TypedActor.newInstance(classOf[TypedActorFailer], classOf[TypedActorFailerImpl]) + try { + stateful.failure("testShouldRollbackStateForStatefulServerInCaseOfFailure", "new state", failer) + fail("should have thrown an exception") + } catch { case e => {} } + stateful.getMapState("testShouldRollbackStateForStatefulServerInCaseOfFailure") should equal("init") + } + + it("vector should not rollback state for stateful server in case of success") { + val stateful = TypedActor.newInstance(classOf[TransactionalTypedActor], classOf[TransactionalTypedActorImpl]) + stateful.setVectorState("init") // set init state + stateful.success("testShouldNotRollbackStateForStatefulServerInCaseOfSuccess", "new state") + stateful.getVectorState should equal("new state") + } + + it("vector should rollback state for stateful server in case of failure") { + val stateful = TypedActor.newInstance(classOf[TransactionalTypedActor], classOf[TransactionalTypedActorImpl]) + stateful.setVectorState("init") // set init state + val failer = TypedActor.newInstance(classOf[TypedActorFailer], classOf[TypedActorFailerImpl]) + try { + stateful.failure("testShouldRollbackStateForStatefulServerInCaseOfFailure", "new state", failer) + fail("should have thrown an exception") + } catch { case e => {} } + stateful.getVectorState should equal("init") + } + + it("ref should not rollback state for stateful server in case of success") { + val stateful = TypedActor.newInstance(classOf[TransactionalTypedActor], classOf[TransactionalTypedActorImpl]) + stateful.setRefState("init") // set init state + stateful.success("testShouldNotRollbackStateForStatefulServerInCaseOfSuccess", "new state") + stateful.getRefState should equal("new state") + } + + it("ref should rollback state for stateful server in case of failure") { + val stateful = TypedActor.newInstance(classOf[TransactionalTypedActor], classOf[TransactionalTypedActorImpl]) + stateful.setRefState("init") // set init state + val failer = TypedActor.newInstance(classOf[TypedActorFailer], classOf[TypedActorFailerImpl]) + try { + stateful.failure("testShouldRollbackStateForStatefulServerInCaseOfFailure", "new state", failer) + fail("should have thrown an exception") + } catch { case e => {} } + stateful.getRefState should equal("init") + } + } +} diff --git a/akka-typed-actors/src/test/scala/actor/typed-actor/TypedActorContextSpec.scala b/akka-typed-actors/src/test/scala/actor/typed-actor/TypedActorContextSpec.scala new file mode 100644 index 0000000000..adc0879c84 --- /dev/null +++ b/akka-typed-actors/src/test/scala/actor/typed-actor/TypedActorContextSpec.scala @@ -0,0 +1,38 @@ +/** + * Copyright (C) 2009-2010 Scalable Solutions AB + */ + +package se.scalablesolutions.akka.actor + +import org.scalatest.Spec +import org.scalatest.Assertions +import org.scalatest.matchers.ShouldMatchers +import org.scalatest.BeforeAndAfterAll +import org.scalatest.junit.JUnitRunner +import org.junit.runner.RunWith + +import se.scalablesolutions.akka.dispatch.DefaultCompletableFuture; + +@RunWith(classOf[JUnitRunner]) +class TypedActorContextSpec extends + Spec with + ShouldMatchers with + BeforeAndAfterAll { + + describe("TypedActorContext") { + it("context.sender should return the sender TypedActor reference") { + val pojo = TypedActor.newInstance(classOf[SimpleJavaPojo], classOf[SimpleJavaPojoImpl]) + val pojoCaller = TypedActor.newInstance(classOf[SimpleJavaPojoCaller], classOf[SimpleJavaPojoCallerImpl]) + pojoCaller.setPojo(pojo) + pojoCaller.getSenderFromSimpleJavaPojo.isInstanceOf[Option[_]] should equal (true) + pojoCaller.getSenderFromSimpleJavaPojo.asInstanceOf[Option[_]].isDefined should equal (true) + pojoCaller.getSenderFromSimpleJavaPojo.asInstanceOf[Option[_]].get should equal (pojoCaller) + } + it("context.senderFuture should return the senderFuture TypedActor reference") { + val pojo = TypedActor.newInstance(classOf[SimpleJavaPojo], classOf[SimpleJavaPojoImpl]) + val pojoCaller = TypedActor.newInstance(classOf[SimpleJavaPojoCaller], classOf[SimpleJavaPojoCallerImpl]) + pojoCaller.setPojo(pojo) + pojoCaller.getSenderFutureFromSimpleJavaPojo.getClass.getName should equal (classOf[DefaultCompletableFuture[_]].getName) + } + } +} diff --git a/akka-typed-actors/src/test/scala/actor/typed-actor/TypedActorGuiceConfiguratorSpec.scala b/akka-typed-actors/src/test/scala/actor/typed-actor/TypedActorGuiceConfiguratorSpec.scala new file mode 100644 index 0000000000..d076ec52cf --- /dev/null +++ b/akka-typed-actors/src/test/scala/actor/typed-actor/TypedActorGuiceConfiguratorSpec.scala @@ -0,0 +1,131 @@ +/** + * Copyright (C) 2009-2010 Scalable Solutions AB + */ + +package se.scalablesolutions.akka.actor + +import com.google.inject.AbstractModule +import com.google.inject.Scopes + +import org.scalatest.Spec +import org.scalatest.Assertions +import org.scalatest.matchers.ShouldMatchers +import org.scalatest.BeforeAndAfterAll +import org.scalatest.junit.JUnitRunner +import org.junit.runner.RunWith + +import se.scalablesolutions.akka.config.Config +import se.scalablesolutions.akka.config.TypedActorConfigurator +import se.scalablesolutions.akka.config.JavaConfig._ +import se.scalablesolutions.akka.dispatch._ +import se.scalablesolutions.akka.dispatch.FutureTimeoutException + +@RunWith(classOf[JUnitRunner]) +class TypedActorGuiceConfiguratorSpec extends + Spec with + ShouldMatchers with + BeforeAndAfterAll { + + private val conf = new TypedActorConfigurator + private var messageLog = "" + + override def beforeAll { + Config.config + val dispatcher = Dispatchers.newExecutorBasedEventDrivenDispatcher("test") + + conf.addExternalGuiceModule(new AbstractModule { + def configure = bind(classOf[Ext]).to(classOf[ExtImpl]).in(Scopes.SINGLETON) + }).configure( + new RestartStrategy(new AllForOne, 3, 5000, List(classOf[Exception]).toArray), + List( + new Component( + classOf[Foo], + classOf[FooImpl], + new LifeCycle(new Permanent), + 1000, + dispatcher), + new Component( + classOf[Bar], + classOf[BarImpl], + new LifeCycle(new Permanent), + 1000, + dispatcher) + ).toArray).inject.supervise + + } + + override def afterAll = conf.stop + + describe("TypedActorGuiceConfigurator") { +/* + it("should inject typed actor using guice") { + messageLog = "" + val foo = conf.getInstance(classOf[Foo]) + val bar = conf.getInstance(classOf[Bar]) + bar should equal(foo.getBar) + } + + it("should inject external dependency using guice") { + messageLog = "" + val bar = conf.getInstance(classOf[Bar]) + val ext = conf.getExternalDependency(classOf[Ext]) + ext.toString should equal(bar.getExt.toString) + } + + it("should lookup non-supervised instance") { + try { + val str = conf.getInstance(classOf[String]) + fail("exception should have been thrown") + } catch { + case e: Exception => + classOf[IllegalStateException] should equal(e.getClass) + } + } + + it("should be able to invoke typed actor") { + messageLog = "" + val foo = conf.getInstance(classOf[Foo]) + messageLog += foo.foo("foo ") + foo.bar("bar ") + messageLog += "before_bar " + Thread.sleep(500) + messageLog should equal("foo return_foo before_bar ") + } + + it("should be able to invoke typed actor's invocation") { + messageLog = "" + val foo = conf.getInstance(classOf[Foo]) + val bar = conf.getInstance(classOf[Bar]) + messageLog += foo.foo("foo ") + foo.bar("bar ") + messageLog += "before_bar " + Thread.sleep(500) + messageLog should equal("foo return_foo before_bar ") + } + + it("should throw FutureTimeoutException on time-out") { + messageLog = "" + val foo = conf.getInstance(classOf[Foo]) + try { + foo.longRunning + fail("exception should have been thrown") + } catch { + case e: FutureTimeoutException => + classOf[FutureTimeoutException] should equal(e.getClass) + } + } + + it("should propagate exception") { + messageLog = "" + val foo = conf.getInstance(classOf[Foo]) + try { + foo.throwsException + fail("exception should have been thrown") + } catch { + case e: RuntimeException => + classOf[RuntimeException] should equal(e.getClass) + } + } + */ + } +} diff --git a/akka-typed-actors/src/test/scala/actor/typed-actor/TypedActorLifecycleSpec.scala b/akka-typed-actors/src/test/scala/actor/typed-actor/TypedActorLifecycleSpec.scala new file mode 100644 index 0000000000..10fc40493b --- /dev/null +++ b/akka-typed-actors/src/test/scala/actor/typed-actor/TypedActorLifecycleSpec.scala @@ -0,0 +1,169 @@ +package se.scalablesolutions.akka.actor + +import org.junit.runner.RunWith +import org.scalatest.{BeforeAndAfterAll, Spec} +import org.scalatest.junit.JUnitRunner +import org.scalatest.matchers.ShouldMatchers + +import se.scalablesolutions.akka.actor.TypedActor._ + +import se.scalablesolutions.akka.config.{OneForOneStrategy, TypedActorConfigurator} +import se.scalablesolutions.akka.config.JavaConfig._ + +import java.util.concurrent.CountDownLatch + +/** + * @author Martin Krasser + */ +@RunWith(classOf[JUnitRunner]) +class TypedActorLifecycleSpec extends Spec with ShouldMatchers with BeforeAndAfterAll { + var conf1: TypedActorConfigurator = _ + var conf2: TypedActorConfigurator = _ + + override protected def beforeAll() = { + val strategy = new RestartStrategy(new AllForOne(), 3, 1000, Array(classOf[Exception])) + val comp3 = new Component(classOf[SamplePojo], classOf[SamplePojoImpl], new LifeCycle(new Permanent()), 1000) + val comp4 = new Component(classOf[SamplePojo], classOf[SamplePojoImpl], new LifeCycle(new Temporary()), 1000) + conf1 = new TypedActorConfigurator().configure(strategy, Array(comp3)).supervise + conf2 = new TypedActorConfigurator().configure(strategy, Array(comp4)).supervise + } + + override protected def afterAll() = { + conf1.stop + conf2.stop + } + + describe("TypedActor lifecycle management") { + it("should restart supervised, non-annotated typed actor on failure") { + SamplePojoImpl.reset + val obj = conf1.getInstance[SamplePojo](classOf[SamplePojo]) + val cdl = new CountDownLatch(2) + SamplePojoImpl.latch = cdl + assert(AspectInitRegistry.initFor(obj) ne null) + try { + obj.fail + fail("expected exception not thrown") + } catch { + case e: RuntimeException => { + cdl.await + assert(SamplePojoImpl._pre) + assert(SamplePojoImpl._post) + assert(!SamplePojoImpl._down) +// assert(AspectInitRegistry.initFor(obj) ne null) + } + } + } + + it("should shutdown supervised, non-annotated typed actor on failure") { + SamplePojoImpl.reset + val obj = conf2.getInstance[SamplePojo](classOf[SamplePojo]) + val cdl = new CountDownLatch(1) + SamplePojoImpl.latch = cdl + assert(AspectInitRegistry.initFor(obj) ne null) + try { + obj.fail + fail("expected exception not thrown") + } catch { + case e: RuntimeException => { + cdl.await + assert(!SamplePojoImpl._pre) + assert(!SamplePojoImpl._post) + assert(SamplePojoImpl._down) + // assert(AspectInitRegistry.initFor(obj) eq null) + } + } + } + + it("should shutdown non-supervised, non-initialized typed actor on TypedActor.stop") { + SamplePojoImpl.reset + val obj = TypedActor.newInstance(classOf[SamplePojo], classOf[SamplePojoImpl]) + TypedActor.stop(obj) + assert(!SamplePojoImpl._pre) + assert(!SamplePojoImpl._post) + assert(SamplePojoImpl._down) + } + + it("both preRestart and postRestart methods should be invoked when an actor is restarted") { + SamplePojoImpl.reset + val pojo = TypedActor.newInstance(classOf[SimpleJavaPojo], classOf[SimpleJavaPojoImpl]) + val supervisor = TypedActor.newInstance(classOf[SimpleJavaPojo], classOf[SimpleJavaPojoImpl]) + link(supervisor, pojo, new OneForOneStrategy(3, 2000), Array(classOf[Throwable])) + pojo.throwException + Thread.sleep(500) + SimpleJavaPojoImpl._pre should be(true) + SimpleJavaPojoImpl._post should be(true) + } + + /* + it("should shutdown non-supervised, annotated typed actor on TypedActor.stop") { + val obj = TypedActor.newInstance(classOf[SamplePojoAnnotated]) + assert(AspectInitRegistry.initFor(obj) ne null) + assert("hello akka" === obj.greet("akka")) + TypedActor.stop(obj) + assert(AspectInitRegistry.initFor(obj) eq null) + assert(!obj.pre) + assert(!obj.post) + assert(obj.down) + try { + obj.greet("akka") + fail("access to stopped typed actor") + } catch { + case e: Exception => {} + } + } + + it("should shutdown non-supervised, annotated typed actor on ActorRegistry.shutdownAll") { + val obj = TypedActor.newInstance(classOf[SamplePojoAnnotated]) + assert(AspectInitRegistry.initFor(obj) ne null) + assert("hello akka" === obj.greet("akka")) + ActorRegistry.shutdownAll + assert(AspectInitRegistry.initFor(obj) eq null) + assert(!obj.pre) + assert(!obj.post) + assert(obj.down) + try { + obj.greet("akka") + fail("access to stopped typed actor") + } catch { + case e: Exception => { } + } + } + + it("should restart supervised, annotated typed actor on failure") { + val obj = conf1.getInstance[SamplePojoAnnotated](classOf[SamplePojoAnnotated]) + val cdl = obj.newCountdownLatch(2) + assert(AspectInitRegistry.initFor(obj) ne null) + try { + obj.fail + fail("expected exception not thrown") + } catch { + case e: RuntimeException => { + cdl.await + assert(obj.pre) + assert(obj.post) + assert(!obj.down) + assert(AspectInitRegistry.initFor(obj) ne null) + } + } + } + + it("should shutdown supervised, annotated typed actor on failure") { + val obj = conf2.getInstance[SamplePojoAnnotated](classOf[SamplePojoAnnotated]) + val cdl = obj.newCountdownLatch(1) + assert(AspectInitRegistry.initFor(obj) ne null) + try { + obj.fail + fail("expected exception not thrown") + } catch { + case e: RuntimeException => { + cdl.await + assert(!obj.pre) + assert(!obj.post) + assert(obj.down) + assert(AspectInitRegistry.initFor(obj) eq null) + } + } + } + */ + } +} diff --git a/akka-typed-actors/src/test/scala/actor/typed-actor/TypedActorSpec.scala b/akka-typed-actors/src/test/scala/actor/typed-actor/TypedActorSpec.scala new file mode 100644 index 0000000000..7de0a8f5df --- /dev/null +++ b/akka-typed-actors/src/test/scala/actor/typed-actor/TypedActorSpec.scala @@ -0,0 +1,31 @@ +/** + * Copyright (C) 2009-2010 Scalable Solutions AB + */ + +package se.scalablesolutions.akka.actor + +import org.scalatest.Spec +import org.scalatest.Assertions +import org.scalatest.matchers.ShouldMatchers +import org.scalatest.BeforeAndAfterAll +import org.scalatest.junit.JUnitRunner +import org.junit.runner.RunWith + +import se.scalablesolutions.akka.dispatch.DefaultCompletableFuture; + +@RunWith(classOf[JUnitRunner]) +class TypedActorSpec extends + Spec with + ShouldMatchers with + BeforeAndAfterAll { + + describe("TypedActor") { + it("should resolve Future return from method defined to return a Future") { + val pojo = TypedActor.newInstance(classOf[SimpleJavaPojo], classOf[SimpleJavaPojoImpl]) + val future = pojo.square(10) + future.await + future.result.isDefined should equal (true) + future.result.get should equal (100) + } + } +} diff --git a/akka-typed-actors/src/test/scala/actor/typed-actor/TypedActorUtilFunctionsSpec.scala b/akka-typed-actors/src/test/scala/actor/typed-actor/TypedActorUtilFunctionsSpec.scala new file mode 100644 index 0000000000..48424f3c17 --- /dev/null +++ b/akka-typed-actors/src/test/scala/actor/typed-actor/TypedActorUtilFunctionsSpec.scala @@ -0,0 +1,23 @@ +package se.scalablesolutions.akka.actor + +import org.scalatest.Suite +import org.junit.runner.RunWith +import org.scalatest.junit.JUnitRunner +import org.scalatest.matchers.MustMatchers +import org.junit.{Before, After, Test} +import java.util.concurrent.{ CountDownLatch, TimeUnit } + +@RunWith(classOf[JUnitRunner]) +class ActorObjectUtilFunctionsSpec extends junit.framework.TestCase with Suite with MustMatchers { + import Actor._ + @Test def testSpawn = { + val latch = new CountDownLatch(1) + + spawn { + latch.countDown + } + + val done = latch.await(10,TimeUnit.SECONDS) + done must be (true) + } +} diff --git a/project/build/AkkaProject.scala b/project/build/AkkaProject.scala index a06e5ec3d8..e6c243c201 100644 --- a/project/build/AkkaProject.scala +++ b/project/build/AkkaProject.scala @@ -216,17 +216,19 @@ class AkkaParentProject(info: ProjectInfo) extends DefaultProject(info) { // Subprojects // ------------------------------------------------------------------------------------------------------------------- - lazy val akka_core = project("akka-core", "akka-core", new AkkaCoreProject(_)) - lazy val akka_amqp = project("akka-amqp", "akka-amqp", new AkkaAMQPProject(_), akka_core) - lazy val akka_http = project("akka-http", "akka-http", new AkkaHttpProject(_), akka_core, akka_camel) - lazy val akka_camel = project("akka-camel", "akka-camel", new AkkaCamelProject(_), akka_core) - lazy val akka_persistence = project("akka-persistence", "akka-persistence", new AkkaPersistenceParentProject(_)) - lazy val akka_spring = project("akka-spring", "akka-spring", new AkkaSpringProject(_), akka_core, akka_camel) - lazy val akka_jta = project("akka-jta", "akka-jta", new AkkaJTAProject(_), akka_core) - lazy val akka_kernel = project("akka-kernel", "akka-kernel", new AkkaKernelProject(_), - akka_core, akka_http, akka_spring, akka_camel, akka_persistence, akka_amqp) - lazy val akka_osgi = project("akka-osgi", "akka-osgi", new AkkaOSGiParentProject(_)) - lazy val akka_samples = project("akka-samples", "akka-samples", new AkkaSamplesParentProject(_)) + lazy val akka_actors = project("akka-actors", "akka-actors", new AkkaCoreProject(_)) + lazy val akka_typed_actors = project("akka-typed-actors", "akka-typed-actors", new AkkaCoreProject(_), akka_actors) + lazy val akka_core = project("akka-core", "akka-core", new AkkaCoreProject(_), akka_typed_actors) + lazy val akka_amqp = project("akka-amqp", "akka-amqp", new AkkaAMQPProject(_), akka_core) + lazy val akka_http = project("akka-http", "akka-http", new AkkaHttpProject(_), akka_core, akka_camel) + lazy val akka_camel = project("akka-camel", "akka-camel", new AkkaCamelProject(_), akka_core) + lazy val akka_persistence = project("akka-persistence", "akka-persistence", new AkkaPersistenceParentProject(_)) + lazy val akka_spring = project("akka-spring", "akka-spring", new AkkaSpringProject(_), akka_core, akka_camel) + lazy val akka_jta = project("akka-jta", "akka-jta", new AkkaJTAProject(_), akka_core) + lazy val akka_kernel = project("akka-kernel", "akka-kernel", new AkkaKernelProject(_), + akka_core, akka_http, akka_spring, akka_camel, akka_persistence, akka_amqp) + lazy val akka_osgi = project("akka-osgi", "akka-osgi", new AkkaOSGiParentProject(_)) + lazy val akka_samples = project("akka-samples", "akka-samples", new AkkaSamplesParentProject(_)) // ------------------------------------------------------------------------------------------------------------------- // Miscellaneous @@ -307,53 +309,71 @@ class AkkaParentProject(info: ProjectInfo) extends DefaultProject(info) { - // publish to local mvn - import Process._ - lazy val publishLocalMvn = runMvnInstall - def runMvnInstall = task { - for (absPath <- akkaArtifacts.getPaths) { - val artifactRE = """(.*)/dist/(.*)-(.*).jar""".r - val artifactRE(path, artifactId, artifactVersion) = absPath - val command = "mvn install:install-file" + - " -Dfile=" + absPath + - " -DgroupId=se.scalablesolutions.akka" + - " -DartifactId=" + artifactId + - " -Dversion=" + version + - " -Dpackaging=jar -DgeneratePom=true" - command ! log - } - None - } dependsOn(dist) describedAs("Run mvn install for artifacts in dist.") + // publish to local mvn + import Process._ + lazy val publishLocalMvn = runMvnInstall + def runMvnInstall = task { + for (absPath <- akkaArtifacts.getPaths) { + val artifactRE = """(.*)/dist/(.*)-(.*).jar""".r + val artifactRE(path, artifactId, artifactVersion) = absPath + val command = "mvn install:install-file" + + " -Dfile=" + absPath + + " -DgroupId=se.scalablesolutions.akka" + + " -DartifactId=" + artifactId + + " -Dversion=" + version + + " -Dpackaging=jar -DgeneratePom=true" + command ! log + } + None + } dependsOn(dist) describedAs("Run mvn install for artifacts in dist.") + + // ------------------------------------------------------------------------------------------------------------------- + // akka-actors subproject + // ------------------------------------------------------------------------------------------------------------------- + + class AkkaActorsProject(info: ProjectInfo) extends AkkaDefaultProject(info, distPath) { + val configgy = Dependencies.configgy + val hawtdispatch = Dependencies.hawtdispatch + val multiverse = Dependencies.multiverse + val jsr166x = Dependencies.jsr166x + val slf4j = Dependencies.slf4j + val logback = Dependencies.logback + val logback_core = Dependencies.logback_core + + // testing + val junit = Dependencies.junit + val scalatest = Dependencies.scalatest + } + + class AkkaTypedActorsProject(info: ProjectInfo) extends AkkaDefaultProject(info, distPath) { + val aopalliance = Dependencies.aopalliance + val werkz = Dependencies.werkz + val werkz_core = Dependencies.werkz_core + + // testing + val junit = Dependencies.junit + val scalatest = Dependencies.scalatest + } // ------------------------------------------------------------------------------------------------------------------- // akka-core subproject // ------------------------------------------------------------------------------------------------------------------- class AkkaCoreProject(info: ProjectInfo) extends AkkaDefaultProject(info, distPath) { - val aopalliance = Dependencies.aopalliance val commons_codec = Dependencies.commons_codec val commons_io = Dependencies.commons_io - val configgy = Dependencies.configgy val dispatch_http = Dependencies.dispatch_http val dispatch_json = Dependencies.dispatch_json val guicey = Dependencies.guicey val h2_lzf = Dependencies.h2_lzf - val hawtdispatch = Dependencies.hawtdispatch val jackson = Dependencies.jackson val jackson_core = Dependencies.jackson_core val jgroups = Dependencies.jgroups - val jsr166x = Dependencies.jsr166x val jta_1_1 = Dependencies.jta_1_1 - val multiverse = Dependencies.multiverse val netty = Dependencies.netty val protobuf = Dependencies.protobuf val sbinary = Dependencies.sbinary val sjson = Dependencies.sjson - val werkz = Dependencies.werkz - val werkz_core = Dependencies.werkz_core - val slf4j = Dependencies.slf4j - val logback = Dependencies.logback - val logback_core = Dependencies.logback_core // testing val junit = Dependencies.junit From 35cc621863ff5ec139e1f98c12e468f67ceb469a Mon Sep 17 00:00:00 2001 From: Viktor Klang Date: Fri, 27 Aug 2010 15:58:51 +0200 Subject: [PATCH 3/8] Adding a guard to dispatcher_= in ActorRef --- akka-core/src/main/scala/actor/ActorRef.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/akka-core/src/main/scala/actor/ActorRef.scala b/akka-core/src/main/scala/actor/ActorRef.scala index 0cf81083f4..dbb72b5cbb 100644 --- a/akka-core/src/main/scala/actor/ActorRef.scala +++ b/akka-core/src/main/scala/actor/ActorRef.scala @@ -741,7 +741,7 @@ class LocalActorRef private[akka]( /** * Sets the dispatcher for this actor. Needs to be invoked before the actor is started. */ - def dispatcher_=(md: MessageDispatcher): Unit = { + def dispatcher_=(md: MessageDispatcher): Unit = guard.withGuard { if (!isRunning || isBeingRestarted) _dispatcher = md else throw new ActorInitializationException( "Can not swap dispatcher for " + toString + " after it has been started") From a23159fecebdef9d586fa0b96dacea01170a2f3d Mon Sep 17 00:00:00 2001 From: Viktor Klang Date: Fri, 27 Aug 2010 16:20:21 +0200 Subject: [PATCH 4/8] Make sure dispatcher isnt changed on actor restart --- akka-core/src/main/scala/actor/ActorRef.scala | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/akka-core/src/main/scala/actor/ActorRef.scala b/akka-core/src/main/scala/actor/ActorRef.scala index dbb72b5cbb..1f025bb3d1 100644 --- a/akka-core/src/main/scala/actor/ActorRef.scala +++ b/akka-core/src/main/scala/actor/ActorRef.scala @@ -742,9 +742,11 @@ class LocalActorRef private[akka]( * Sets the dispatcher for this actor. Needs to be invoked before the actor is started. */ def dispatcher_=(md: MessageDispatcher): Unit = guard.withGuard { - if (!isRunning || isBeingRestarted) _dispatcher = md - else throw new ActorInitializationException( + if (!isBeingRestarted) { + if (!isRunning) _dispatcher = md + else throw new ActorInitializationException( "Can not swap dispatcher for " + toString + " after it has been started") + } } /** @@ -1141,7 +1143,6 @@ class LocalActorRef private[akka]( freshActor.initTransactionalState actorInstance.set(freshActor) if (failedActor.isInstanceOf[TypedActor]) failedActor.asInstanceOf[TypedActor].swapInstanceInProxy(freshActor) - if (dispatcher.isShutdown) dispatcher.start Actor.log.debug("Invoking 'postRestart' for new actor instance [%s].", id) freshActor.postRestart(reason) } From 76097ee10a5bb576a7a3040e1fe41f421b3db58d Mon Sep 17 00:00:00 2001 From: Martin Krasser Date: Fri, 27 Aug 2010 21:27:37 +0200 Subject: [PATCH 5/8] remove logback.xml from akka-core jar and exclude logback-test.xml from distribution. --- akka-core/src/main/resources/logback.xml | 31 ------------------- akka-core/src/test/resources/logback-test.xml | 21 ------------- config/logback.xml | 2 +- project/build/AkkaProject.scala | 8 +++-- 4 files changed, 6 insertions(+), 56 deletions(-) delete mode 100644 akka-core/src/main/resources/logback.xml delete mode 100644 akka-core/src/test/resources/logback-test.xml diff --git a/akka-core/src/main/resources/logback.xml b/akka-core/src/main/resources/logback.xml deleted file mode 100644 index 4635396601..0000000000 --- a/akka-core/src/main/resources/logback.xml +++ /dev/null @@ -1,31 +0,0 @@ - - - - - - - - - - - - - - [%4p] [%d{ISO8601}] [%t] %c{1}: %m%n - - - - ./logs/akka.log - - [%4p] [%d{ISO8601}] [%t] %c{1}: %m%n - - - ./logs/akka.log.%d{yyyy-MM-dd-HH} - - - - - - - - diff --git a/akka-core/src/test/resources/logback-test.xml b/akka-core/src/test/resources/logback-test.xml deleted file mode 100644 index 78eae40ec4..0000000000 --- a/akka-core/src/test/resources/logback-test.xml +++ /dev/null @@ -1,21 +0,0 @@ - - - - - - - - - - - - - - [%4p] [%d{ISO8601}] [%t] %c{1}: %m%n - - - - - - - diff --git a/config/logback.xml b/config/logback.xml index 1ace0bfd8f..3e6ba75548 100644 --- a/config/logback.xml +++ b/config/logback.xml @@ -22,7 +22,7 @@ ./logs/akka.log.%d{yyyy-MM-dd-HH} - + diff --git a/project/build/AkkaProject.scala b/project/build/AkkaProject.scala index 4503357db6..0d0a9e00cb 100644 --- a/project/build/AkkaProject.scala +++ b/project/build/AkkaProject.scala @@ -250,6 +250,7 @@ class AkkaParentProject(info: ProjectInfo) extends DefaultProject(info) { .filter(!_.getName.contains("scala-library")) .map("lib_managed/scala_%s/compile/".format(buildScalaVersion) + _.getName) .mkString(" ") + + " config/" + " scala-library.jar" + " dist/akka-core_%s-%s.jar".format(buildScalaVersion, version) + " dist/akka-http_%s-%s.jar".format(buildScalaVersion, version) + @@ -271,10 +272,9 @@ class AkkaParentProject(info: ProjectInfo) extends DefaultProject(info) { } override def mainResources = super.mainResources +++ - descendents(info.projectPath / "config", "*") --- - (super.mainResources ** "logback-test.xml") + (info.projectPath / "config").descendentsExcept("*", "logback-test.xml") - override def testResources = super.testResources --- (super.testResources ** "logback-test.xml") + override def runClasspath = super.runClasspath +++ "config" // ------------------------------------------------------------ // publishing @@ -730,6 +730,8 @@ class AkkaParentProject(info: ProjectInfo) extends DefaultProject(info) { // ------------------------------------------------------------ class AkkaDefaultProject(info: ProjectInfo, val deployPath: Path) extends DefaultProject(info) with DeployProject with OSGiProject { + override def runClasspath = super.runClasspath +++ (AkkaParentProject.this.info.projectPath / "config") + override def testClasspath = super.testClasspath +++ (AkkaParentProject.this.info.projectPath / "config") override def packageDocsJar = this.defaultJarPath("-docs.jar") override def packageSrcJar = this.defaultJarPath("-sources.jar") } From 7586fcf26d19e0679fb5ee3c63ebd63081640d00 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jonas=20Bon=C3=A9r?= Date: Sat, 28 Aug 2010 16:48:27 +0200 Subject: [PATCH 6/8] Completed refactoring into lightweight modules akka-actor akka-typed-actor and akka-remote --- .../src/main/resources/logback.xml | 0 .../src/main/scala/actor/Actor.scala | 18 +- .../src/main/scala/actor/ActorRef.scala | 70 +- .../src/main/scala/actor/ActorRegistry.scala | 7 +- .../src/main/scala/actor/Agent.scala | 0 .../actor/BootableActorLoaderService.scala | 101 ++ .../src/main/scala/actor/FSM.scala | 9 +- .../src/main/scala/actor/Implicits.scala | 0 .../src/main/scala/actor/Scheduler.scala | 1 + .../src/main/scala/actor/Supervisor.scala | 4 +- .../src/main/scala/actor/UntypedActor.scala | 0 .../src/main/scala/config/Config.scala | 0 .../src/main/scala/config/Configuration.scala | 0 .../src/main/scala/config/Configurator.scala | 0 .../main/scala/config/SupervisionConfig.scala | 0 .../scala/dataflow/DataFlowVariable.scala | 0 ...actReactorBasedEventDrivenDispatcher.scala | 0 .../src/main/scala/dispatch/Dispatchers.scala | 0 .../ExecutorBasedEventDrivenDispatcher.scala | 0 ...sedEventDrivenWorkStealingDispatcher.scala | 0 .../src/main/scala/dispatch/Future.scala | 0 .../main/scala/dispatch/HawtDispatcher.scala | 0 .../main/scala/dispatch/MessageHandling.scala | 0 ...sedSingleThreadEventDrivenDispatcher.scala | 0 ...BasedThreadPoolEventDrivenDispatcher.scala | 0 .../dispatch/ThreadBasedDispatcher.scala | 0 .../scala/dispatch/ThreadPoolBuilder.scala | 0 .../src/main/scala/routing/Iterators.scala | 0 .../src/main/scala/routing/Listeners.scala | 0 .../src/main/scala/routing/Routers.scala | 0 .../src/main/scala/routing/Routing.scala | 0 .../src/main/scala/stm/JTA.scala | 0 .../src/main/scala/stm/Ref.scala | 0 .../src/main/scala/stm/Transaction.scala | 0 .../main/scala/stm/TransactionFactory.scala | 0 .../scala/stm/TransactionFactoryBuilder.scala | 0 .../scala/stm/TransactionManagement.scala | 0 .../src/main/scala/stm/TransactionalMap.scala | 0 .../main/scala/stm/TransactionalVector.scala | 0 .../src/main/scala/stm/global/Atomic.scala | 0 .../src/main/scala/stm/global/GlobalStm.scala | 0 .../src/main/scala/stm/global/package.scala | 0 .../src/main/scala/stm/local/Atomic.scala | 0 .../src/main/scala/stm/local/LocalStm.scala | 0 .../src/main/scala/stm/local/package.scala | 0 .../src/main/scala/stm/transactional.scala | 0 .../src/main/scala/util/AkkaException.scala | 0 .../src/main/scala/util/Bootable.scala | 0 .../src/main/scala/util/Duration.scala | 0 .../src/main/scala/util/HashCode.scala | 0 .../src/main/scala/util/Helpers.scala | 0 .../main/scala/util/ListenerManagement.scala | 0 .../src/main/scala/util/LockUtil.scala | 0 .../src/main/scala/util/Logging.scala | 0 .../main/scala/util/ReflectiveAccess.scala | 193 +++ .../src/main/scala/util/Uuid.scala | 0 .../scalablesolutions/akka/stm/Address.java | 0 .../akka/stm/CounterExample.java | 0 .../akka/stm/JavaStmTests.java | 0 .../akka/stm/RefExample.java | 0 .../akka/stm/StmExamples.java | 0 .../akka/stm/TransactionFactoryExample.java | 0 .../akka/stm/TransactionalMapExample.java | 0 .../akka/stm/TransactionalVectorExample.java | 0 .../se/scalablesolutions/akka/stm/User.java | 0 .../src/test/resources/logback-test.xml | 0 akka-actor/src/test/scala/Messages.scala | 13 + .../ActorFireForgetRequestReplySpec.scala | 0 .../test/scala/actor/actor/AgentSpec.scala | 0 .../src/test/scala/actor/actor/Bench.scala | 0 .../test/scala/actor/actor/FSMActorSpec.scala | 0 .../scala/actor/actor/ForwardActorSpec.scala | 0 .../actor/actor/ReceiveTimeoutSpec.scala | 0 .../scala/actor/actor/TransactorSpec.scala | 0 .../supervisor/RestartStrategySpec.scala | 0 .../supervisor/SupervisorHierarchySpec.scala | 0 .../actor/supervisor/SupervisorSpec.scala | 0 .../test/scala/dataflow/DataFlowSpec.scala | 4 +- .../test/scala/dispatch/DispatchersSpec.scala | 0 ...rBasedEventDrivenDispatcherActorSpec.scala | 0 ...BasedEventDrivenDispatcherActorsSpec.scala | 0 ...ventDrivenWorkStealingDispatcherSpec.scala | 0 .../src/test/scala/dispatch/FutureSpec.scala | 0 .../dispatch/HawtDispatcherActorSpec.scala | 0 .../dispatch/HawtDispatcherEchoServer.scala | 0 ...ThreadEventDrivenDispatcherActorSpec.scala | 0 ...adPoolEventDrivenDispatcherActorSpec.scala | 0 .../scala/dispatch/ThreadBasedActorSpec.scala | 0 .../dispatch/ThreadBasedDispatcherSpec.scala | 0 .../test/scala/misc/ActorRegistrySpec.scala | 0 .../src/test/scala/misc/SchedulerSpec.scala | 0 .../src/test/scala/routing/RoutingSpec.scala | 0 .../src/test/scala/stm/JavaStmSpec.scala | 0 .../src/test/scala/stm/RefSpec.scala | 0 .../src/test/scala/stm/StmSpec.scala | 0 .../src/test/scala/ticket/Ticket001Spec.scala | 0 .../test/scala/dataflow/DataFlowSpec.scala | 173 --- akka-core/.ensime | 79 -- .../akka/actor/ProtobufProtocol.java | 1060 ----------------- akka-core/src/test/scala/Messages.scala | 44 - .../ActorFireForgetRequestReplySpec.scala | 92 -- .../test/scala/actor/actor/AgentSpec.scala | 111 -- .../src/test/scala/actor/actor/Bench.scala | 119 -- .../scala/actor/actor/ForwardActorSpec.scala | 81 -- .../test/scala/actor/actor/FsmActorSpec.scala | 82 -- .../actor/actor/ReceiveTimeoutSpec.scala | 77 -- .../scala/actor/actor/TransactorSpec.scala | 255 ---- .../supervisor/RestartStrategySpec.scala | 74 -- .../supervisor/SupervisorHierarchySpec.scala | 81 -- .../actor/supervisor/SupervisorSpec.scala | 605 ---------- .../test/scala/dispatch/DispatchersSpec.scala | 74 -- ...rBasedEventDrivenDispatcherActorSpec.scala | 68 -- ...BasedEventDrivenDispatcherActorsSpec.scala | 61 - ...ventDrivenWorkStealingDispatcherSpec.scala | 107 -- .../src/test/scala/dispatch/FutureSpec.scala | 106 -- .../dispatch/HawtDispatcherActorSpec.scala | 71 -- .../dispatch/HawtDispatcherEchoServer.scala | 207 ---- ...ThreadEventDrivenDispatcherActorSpec.scala | 71 -- ...adPoolEventDrivenDispatcherActorSpec.scala | 66 - .../scala/dispatch/ThreadBasedActorSpec.scala | 67 -- .../dispatch/ThreadBasedDispatcherSpec.scala | 91 -- .../test/scala/misc/ActorRegistrySpec.scala | 255 ---- .../src/test/scala/misc/SchedulerSpec.scala | 127 -- .../src/test/scala/routing/RoutingSpec.scala | 179 --- .../src/main/resources/features.xml | 4 +- .../src/main/protocol/RemoteProtocol.proto | 2 +- .../remote/BootableRemoteActorService.scala | 0 .../src/main/scala/remote/Cluster.scala | 0 .../scala/remote/JGroupsClusterActor.scala | 0 .../main/scala/remote/MessageSerializer.scala | 0 .../src/main/scala/remote/RemoteClient.scala | 37 +- .../src/main/scala/remote/RemoteServer.scala | 7 +- .../src/main/scala/serialization/Binary.scala | 0 .../scala/serialization/Compression.scala | 0 .../scala/serialization/Serializable.scala | 0 .../serialization/SerializationProtocol.scala | 284 +++++ .../main/scala/serialization/Serializer.scala | 0 .../akka/actor/ProtobufProtocol.java | 204 ++-- .../akka/actor/RemoteTypedActorOne.java | 0 .../akka/actor/RemoteTypedActorOneImpl.java | 0 .../akka/actor/RemoteTypedActorTwo.java | 0 .../akka/actor/RemoteTypedActorTwoImpl.java | 0 .../akka/config/DependencyBinding.java | 0 .../akka/config/TypedActorGuiceModule.java | 0 .../akka/remote/protocol/RemoteProtocol.java | 0 .../src/test/protocol/ProtobufProtocol.proto | 2 +- .../src/test/resources/META-INF/aop.xml | 0 .../src/test/resources/logback-test.xml | 0 .../src/test/scala/Messages.scala | 8 - .../ClientInitiatedRemoteActorSpec.scala | 0 .../scala/remote/RemoteSupervisorSpec.scala | 0 .../RemoteTransactionalTypedActorSpec.scala | 0 .../scala/remote/RemoteTypedActorSpec.scala | 0 .../ServerInitiatedRemoteActorSample.scala | 0 .../ServerInitiatedRemoteActorSpec.scala | 0 .../src/test/scala/remote/ShutdownSpec.scala | 0 ...rotobufActorMessageSerializationSpec.scala | 0 .../SerializableTypeClassActorSpec.scala | 0 .../scala/serialization/SerializerSpec.scala | 0 .../UntypedActorSerializationSpec.scala | 0 .../src/test/scala/ticket/Ticket001Spec.scala | 0 akka-samples/akka-sample-chat/Buildfile | 2 +- .../src/main/scala/AkkaProject.scala | 4 +- .../akka/config/DependencyBinding.java | 24 + .../akka/config/TypedActorGuiceModule.java | 32 + .../akka/remote/protocol/RemoteProtocol.java | 0 .../src/main/scala/actor/TypedActor.scala | 48 +- .../scala/config/TypedActorConfigurator.scala | 0 .../config/TypedActorGuiceConfigurator.scala | 6 +- .../se/scalablesolutions/akka/actor/Bar.java | 0 .../scalablesolutions/akka/actor/BarImpl.java | 0 .../se/scalablesolutions/akka/actor/Ext.java | 0 .../scalablesolutions/akka/actor/ExtImpl.java | 0 .../se/scalablesolutions/akka/actor/Foo.java | 0 .../scalablesolutions/akka/actor/FooImpl.java | 0 .../actor/NestedTransactionalTypedActor.java | 0 .../NestedTransactionalTypedActorImpl.java | 0 .../akka/actor/SamplePojo.java | 0 .../akka/actor/SamplePojoImpl.java | 0 .../akka/actor/SimpleJavaPojo.java | 0 .../akka/actor/SimpleJavaPojoCaller.java | 0 .../akka/actor/SimpleJavaPojoCallerImpl.java | 0 .../akka/actor/SimpleJavaPojoImpl.java | 0 .../akka/actor/TransactionalTypedActor.java | 0 .../actor/TransactionalTypedActorImpl.java | 0 .../akka/actor/TypedActorFailer.java | 0 .../akka/actor/TypedActorFailerImpl.java | 0 .../src/test/resources/META-INF/aop.xml | 0 .../NestedTransactionalTypedActorSpec.scala | 0 ...artNestedTransactionalTypedActorSpec.scala | 0 .../RestartTransactionalTypedActorSpec.scala | 0 .../TransactionalTypedActorSpec.scala | 0 .../typed-actor/TypedActorContextSpec.scala | 0 .../TypedActorGuiceConfiguratorSpec.scala | 0 .../typed-actor/TypedActorLifecycleSpec.scala | 0 .../actor/typed-actor/TypedActorSpec.scala | 0 .../TypedActorUtilFunctionsSpec.scala | 0 .../se/scalablesolutions/akka/actor/Bar.java | 6 - .../scalablesolutions/akka/actor/BarImpl.java | 16 - .../se/scalablesolutions/akka/actor/Ext.java | 6 - .../scalablesolutions/akka/actor/ExtImpl.java | 6 - .../se/scalablesolutions/akka/actor/Foo.java | 14 - .../scalablesolutions/akka/actor/FooImpl.java | 40 - .../actor/NestedTransactionalTypedActor.java | 12 - .../NestedTransactionalTypedActorImpl.java | 59 - .../akka/actor/RemoteTypedActorOne.java | 6 - .../akka/actor/RemoteTypedActorOneImpl.java | 29 - .../akka/actor/RemoteTypedActorTwo.java | 6 - .../akka/actor/RemoteTypedActorTwoImpl.java | 29 - .../akka/actor/SamplePojo.java | 8 - .../akka/actor/SamplePojoImpl.java | 45 - .../akka/actor/SimpleJavaPojo.java | 14 - .../akka/actor/SimpleJavaPojoCaller.java | 9 - .../akka/actor/SimpleJavaPojoCallerImpl.java | 26 - .../akka/actor/SimpleJavaPojoImpl.java | 53 - .../akka/actor/TransactionalTypedActor.java | 14 - .../actor/TransactionalTypedActorImpl.java | 84 -- .../akka/actor/TypedActorFailer.java | 5 - .../akka/actor/TypedActorFailerImpl.java | 9 - .../scalablesolutions/akka/stm/Address.java | 13 - .../akka/stm/CounterExample.java | 26 - .../akka/stm/JavaStmTests.java | 91 -- .../akka/stm/RefExample.java | 36 - .../akka/stm/StmExamples.java | 18 - .../akka/stm/TransactionFactoryExample.java | 30 - .../akka/stm/TransactionalMapExample.java | 35 - .../akka/stm/TransactionalVectorExample.java | 34 - .../se/scalablesolutions/akka/stm/User.java | 13 - .../NestedTransactionalTypedActorSpec.scala | 102 -- ...artNestedTransactionalTypedActorSpec.scala | 118 -- .../RestartTransactionalTypedActorSpec.scala | 92 -- .../TransactionalTypedActorSpec.scala | 83 -- .../typed-actor/TypedActorContextSpec.scala | 38 - .../TypedActorGuiceConfiguratorSpec.scala | 131 -- .../typed-actor/TypedActorLifecycleSpec.scala | 169 --- .../actor/typed-actor/TypedActorSpec.scala | 31 - .../TypedActorUtilFunctionsSpec.scala | 23 - project/build/AkkaProject.scala | 52 +- 238 files changed, 914 insertions(+), 6284 deletions(-) rename {akka-actors => akka-actor}/src/main/resources/logback.xml (100%) rename {akka-actors => akka-actor}/src/main/scala/actor/Actor.scala (97%) rename {akka-actors => akka-actor}/src/main/scala/actor/ActorRef.scala (97%) rename {akka-actors => akka-actor}/src/main/scala/actor/ActorRegistry.scala (95%) rename {akka-actors => akka-actor}/src/main/scala/actor/Agent.scala (100%) create mode 100644 akka-actor/src/main/scala/actor/BootableActorLoaderService.scala rename {akka-actors => akka-actor}/src/main/scala/actor/FSM.scala (93%) rename {akka-actors => akka-actor}/src/main/scala/actor/Implicits.scala (100%) rename {akka-actors => akka-actor}/src/main/scala/actor/Scheduler.scala (99%) rename {akka-actors => akka-actor}/src/main/scala/actor/Supervisor.scala (98%) rename {akka-actors => akka-actor}/src/main/scala/actor/UntypedActor.scala (100%) rename {akka-actors => akka-actor}/src/main/scala/config/Config.scala (100%) rename {akka-actors => akka-actor}/src/main/scala/config/Configuration.scala (100%) rename {akka-actors => akka-actor}/src/main/scala/config/Configurator.scala (100%) rename {akka-actors => akka-actor}/src/main/scala/config/SupervisionConfig.scala (100%) rename {akka-actors => akka-actor}/src/main/scala/dataflow/DataFlowVariable.scala (100%) rename {akka-actors => akka-actor}/src/main/scala/dispatch/AbstractReactorBasedEventDrivenDispatcher.scala (100%) rename {akka-actors => akka-actor}/src/main/scala/dispatch/Dispatchers.scala (100%) rename {akka-actors => akka-actor}/src/main/scala/dispatch/ExecutorBasedEventDrivenDispatcher.scala (100%) rename {akka-actors => akka-actor}/src/main/scala/dispatch/ExecutorBasedEventDrivenWorkStealingDispatcher.scala (100%) rename {akka-actors => akka-actor}/src/main/scala/dispatch/Future.scala (100%) rename {akka-actors => akka-actor}/src/main/scala/dispatch/HawtDispatcher.scala (100%) rename {akka-actors => akka-actor}/src/main/scala/dispatch/MessageHandling.scala (100%) rename {akka-actors => akka-actor}/src/main/scala/dispatch/ReactorBasedSingleThreadEventDrivenDispatcher.scala (100%) rename {akka-actors => akka-actor}/src/main/scala/dispatch/ReactorBasedThreadPoolEventDrivenDispatcher.scala (100%) rename {akka-actors => akka-actor}/src/main/scala/dispatch/ThreadBasedDispatcher.scala (100%) rename {akka-actors => akka-actor}/src/main/scala/dispatch/ThreadPoolBuilder.scala (100%) rename {akka-actors => akka-actor}/src/main/scala/routing/Iterators.scala (100%) rename {akka-actors => akka-actor}/src/main/scala/routing/Listeners.scala (100%) rename {akka-actors => akka-actor}/src/main/scala/routing/Routers.scala (100%) rename {akka-actors => akka-actor}/src/main/scala/routing/Routing.scala (100%) rename {akka-actors => akka-actor}/src/main/scala/stm/JTA.scala (100%) rename {akka-actors => akka-actor}/src/main/scala/stm/Ref.scala (100%) rename {akka-actors => akka-actor}/src/main/scala/stm/Transaction.scala (100%) rename {akka-actors => akka-actor}/src/main/scala/stm/TransactionFactory.scala (100%) rename {akka-actors => akka-actor}/src/main/scala/stm/TransactionFactoryBuilder.scala (100%) rename {akka-actors => akka-actor}/src/main/scala/stm/TransactionManagement.scala (100%) rename {akka-actors => akka-actor}/src/main/scala/stm/TransactionalMap.scala (100%) rename {akka-actors => akka-actor}/src/main/scala/stm/TransactionalVector.scala (100%) rename {akka-actors => akka-actor}/src/main/scala/stm/global/Atomic.scala (100%) rename {akka-actors => akka-actor}/src/main/scala/stm/global/GlobalStm.scala (100%) rename {akka-actors => akka-actor}/src/main/scala/stm/global/package.scala (100%) rename {akka-actors => akka-actor}/src/main/scala/stm/local/Atomic.scala (100%) rename {akka-actors => akka-actor}/src/main/scala/stm/local/LocalStm.scala (100%) rename {akka-actors => akka-actor}/src/main/scala/stm/local/package.scala (100%) rename {akka-actors => akka-actor}/src/main/scala/stm/transactional.scala (100%) rename {akka-actors => akka-actor}/src/main/scala/util/AkkaException.scala (100%) rename {akka-actors => akka-actor}/src/main/scala/util/Bootable.scala (100%) rename {akka-actors => akka-actor}/src/main/scala/util/Duration.scala (100%) rename {akka-actors => akka-actor}/src/main/scala/util/HashCode.scala (100%) rename {akka-actors => akka-actor}/src/main/scala/util/Helpers.scala (100%) rename {akka-actors => akka-actor}/src/main/scala/util/ListenerManagement.scala (100%) rename {akka-actors => akka-actor}/src/main/scala/util/LockUtil.scala (100%) rename {akka-actors => akka-actor}/src/main/scala/util/Logging.scala (100%) create mode 100644 akka-actor/src/main/scala/util/ReflectiveAccess.scala rename {akka-actors => akka-actor}/src/main/scala/util/Uuid.scala (100%) rename {akka-core => akka-actor}/src/test/java/se/scalablesolutions/akka/stm/Address.java (100%) rename {akka-core => akka-actor}/src/test/java/se/scalablesolutions/akka/stm/CounterExample.java (100%) rename {akka-core => akka-actor}/src/test/java/se/scalablesolutions/akka/stm/JavaStmTests.java (100%) rename {akka-core => akka-actor}/src/test/java/se/scalablesolutions/akka/stm/RefExample.java (100%) rename {akka-core => akka-actor}/src/test/java/se/scalablesolutions/akka/stm/StmExamples.java (100%) rename {akka-core => akka-actor}/src/test/java/se/scalablesolutions/akka/stm/TransactionFactoryExample.java (100%) rename {akka-core => akka-actor}/src/test/java/se/scalablesolutions/akka/stm/TransactionalMapExample.java (100%) rename {akka-core => akka-actor}/src/test/java/se/scalablesolutions/akka/stm/TransactionalVectorExample.java (100%) rename {akka-core => akka-actor}/src/test/java/se/scalablesolutions/akka/stm/User.java (100%) rename {akka-actors => akka-actor}/src/test/resources/logback-test.xml (100%) create mode 100644 akka-actor/src/test/scala/Messages.scala rename {akka-actors => akka-actor}/src/test/scala/actor/actor/ActorFireForgetRequestReplySpec.scala (100%) rename {akka-actors => akka-actor}/src/test/scala/actor/actor/AgentSpec.scala (100%) rename {akka-actors => akka-actor}/src/test/scala/actor/actor/Bench.scala (100%) rename {akka-actors => akka-actor}/src/test/scala/actor/actor/FSMActorSpec.scala (100%) rename {akka-actors => akka-actor}/src/test/scala/actor/actor/ForwardActorSpec.scala (100%) rename {akka-actors => akka-actor}/src/test/scala/actor/actor/ReceiveTimeoutSpec.scala (100%) rename {akka-actors => akka-actor}/src/test/scala/actor/actor/TransactorSpec.scala (100%) rename {akka-actors => akka-actor}/src/test/scala/actor/supervisor/RestartStrategySpec.scala (100%) rename {akka-actors => akka-actor}/src/test/scala/actor/supervisor/SupervisorHierarchySpec.scala (100%) rename {akka-actors => akka-actor}/src/test/scala/actor/supervisor/SupervisorSpec.scala (100%) rename {akka-core => akka-actor}/src/test/scala/dataflow/DataFlowSpec.scala (98%) rename {akka-actors => akka-actor}/src/test/scala/dispatch/DispatchersSpec.scala (100%) rename {akka-actors => akka-actor}/src/test/scala/dispatch/ExecutorBasedEventDrivenDispatcherActorSpec.scala (100%) rename {akka-actors => akka-actor}/src/test/scala/dispatch/ExecutorBasedEventDrivenDispatcherActorsSpec.scala (100%) rename {akka-actors => akka-actor}/src/test/scala/dispatch/ExecutorBasedEventDrivenWorkStealingDispatcherSpec.scala (100%) rename {akka-actors => akka-actor}/src/test/scala/dispatch/FutureSpec.scala (100%) rename {akka-actors => akka-actor}/src/test/scala/dispatch/HawtDispatcherActorSpec.scala (100%) rename {akka-actors => akka-actor}/src/test/scala/dispatch/HawtDispatcherEchoServer.scala (100%) rename {akka-actors => akka-actor}/src/test/scala/dispatch/ReactorBasedSingleThreadEventDrivenDispatcherActorSpec.scala (100%) rename {akka-actors => akka-actor}/src/test/scala/dispatch/ReactorBasedThreadPoolEventDrivenDispatcherActorSpec.scala (100%) rename {akka-actors => akka-actor}/src/test/scala/dispatch/ThreadBasedActorSpec.scala (100%) rename {akka-actors => akka-actor}/src/test/scala/dispatch/ThreadBasedDispatcherSpec.scala (100%) rename {akka-actors => akka-actor}/src/test/scala/misc/ActorRegistrySpec.scala (100%) rename {akka-actors => akka-actor}/src/test/scala/misc/SchedulerSpec.scala (100%) rename {akka-actors => akka-actor}/src/test/scala/routing/RoutingSpec.scala (100%) rename {akka-actors => akka-actor}/src/test/scala/stm/JavaStmSpec.scala (100%) rename {akka-actors => akka-actor}/src/test/scala/stm/RefSpec.scala (100%) rename {akka-actors => akka-actor}/src/test/scala/stm/StmSpec.scala (100%) rename {akka-actors => akka-actor}/src/test/scala/ticket/Ticket001Spec.scala (100%) delete mode 100644 akka-actors/src/test/scala/dataflow/DataFlowSpec.scala delete mode 100644 akka-core/.ensime delete mode 100644 akka-core/src/test/java/se/scalablesolutions/akka/actor/ProtobufProtocol.java delete mode 100644 akka-core/src/test/scala/Messages.scala delete mode 100644 akka-core/src/test/scala/actor/actor/ActorFireForgetRequestReplySpec.scala delete mode 100644 akka-core/src/test/scala/actor/actor/AgentSpec.scala delete mode 100644 akka-core/src/test/scala/actor/actor/Bench.scala delete mode 100644 akka-core/src/test/scala/actor/actor/ForwardActorSpec.scala delete mode 100644 akka-core/src/test/scala/actor/actor/FsmActorSpec.scala delete mode 100644 akka-core/src/test/scala/actor/actor/ReceiveTimeoutSpec.scala delete mode 100644 akka-core/src/test/scala/actor/actor/TransactorSpec.scala delete mode 100644 akka-core/src/test/scala/actor/supervisor/RestartStrategySpec.scala delete mode 100644 akka-core/src/test/scala/actor/supervisor/SupervisorHierarchySpec.scala delete mode 100644 akka-core/src/test/scala/actor/supervisor/SupervisorSpec.scala delete mode 100644 akka-core/src/test/scala/dispatch/DispatchersSpec.scala delete mode 100644 akka-core/src/test/scala/dispatch/ExecutorBasedEventDrivenDispatcherActorSpec.scala delete mode 100644 akka-core/src/test/scala/dispatch/ExecutorBasedEventDrivenDispatcherActorsSpec.scala delete mode 100644 akka-core/src/test/scala/dispatch/ExecutorBasedEventDrivenWorkStealingDispatcherSpec.scala delete mode 100644 akka-core/src/test/scala/dispatch/FutureSpec.scala delete mode 100644 akka-core/src/test/scala/dispatch/HawtDispatcherActorSpec.scala delete mode 100644 akka-core/src/test/scala/dispatch/HawtDispatcherEchoServer.scala delete mode 100644 akka-core/src/test/scala/dispatch/ReactorBasedSingleThreadEventDrivenDispatcherActorSpec.scala delete mode 100644 akka-core/src/test/scala/dispatch/ReactorBasedThreadPoolEventDrivenDispatcherActorSpec.scala delete mode 100644 akka-core/src/test/scala/dispatch/ThreadBasedActorSpec.scala delete mode 100644 akka-core/src/test/scala/dispatch/ThreadBasedDispatcherSpec.scala delete mode 100644 akka-core/src/test/scala/misc/ActorRegistrySpec.scala delete mode 100644 akka-core/src/test/scala/misc/SchedulerSpec.scala delete mode 100644 akka-core/src/test/scala/routing/RoutingSpec.scala rename {akka-core => akka-remote}/src/main/protocol/RemoteProtocol.proto (98%) rename {akka-core => akka-remote}/src/main/scala/remote/BootableRemoteActorService.scala (100%) rename {akka-core => akka-remote}/src/main/scala/remote/Cluster.scala (100%) rename {akka-core => akka-remote}/src/main/scala/remote/JGroupsClusterActor.scala (100%) rename {akka-core => akka-remote}/src/main/scala/remote/MessageSerializer.scala (100%) rename {akka-core => akka-remote}/src/main/scala/remote/RemoteClient.scala (92%) rename {akka-core => akka-remote}/src/main/scala/remote/RemoteServer.scala (98%) rename {akka-core => akka-remote}/src/main/scala/serialization/Binary.scala (100%) rename {akka-core => akka-remote}/src/main/scala/serialization/Compression.scala (100%) rename {akka-core => akka-remote}/src/main/scala/serialization/Serializable.scala (100%) create mode 100644 akka-remote/src/main/scala/serialization/SerializationProtocol.scala rename {akka-core => akka-remote}/src/main/scala/serialization/Serializer.scala (100%) rename {akka-typed-actors => akka-remote}/src/test/java/se/scalablesolutions/akka/actor/ProtobufProtocol.java (98%) rename {akka-core => akka-remote}/src/test/java/se/scalablesolutions/akka/actor/RemoteTypedActorOne.java (100%) rename {akka-core => akka-remote}/src/test/java/se/scalablesolutions/akka/actor/RemoteTypedActorOneImpl.java (100%) rename {akka-core => akka-remote}/src/test/java/se/scalablesolutions/akka/actor/RemoteTypedActorTwo.java (100%) rename {akka-core => akka-remote}/src/test/java/se/scalablesolutions/akka/actor/RemoteTypedActorTwoImpl.java (100%) rename {akka-typed-actors/src/main => akka-remote/src/test}/java/se/scalablesolutions/akka/config/DependencyBinding.java (100%) rename {akka-typed-actors/src/main => akka-remote/src/test}/java/se/scalablesolutions/akka/config/TypedActorGuiceModule.java (100%) rename {akka-core/src/main => akka-remote/src/test}/java/se/scalablesolutions/akka/remote/protocol/RemoteProtocol.java (100%) rename {akka-core => akka-remote}/src/test/protocol/ProtobufProtocol.proto (92%) rename {akka-core => akka-remote}/src/test/resources/META-INF/aop.xml (100%) rename {akka-core => akka-remote}/src/test/resources/logback-test.xml (100%) rename {akka-actors => akka-remote}/src/test/scala/Messages.scala (83%) rename {akka-core => akka-remote}/src/test/scala/remote/ClientInitiatedRemoteActorSpec.scala (100%) rename {akka-core => akka-remote}/src/test/scala/remote/RemoteSupervisorSpec.scala (100%) rename {akka-core => akka-remote}/src/test/scala/remote/RemoteTransactionalTypedActorSpec.scala (100%) rename {akka-core => akka-remote}/src/test/scala/remote/RemoteTypedActorSpec.scala (100%) rename {akka-core => akka-remote}/src/test/scala/remote/ServerInitiatedRemoteActorSample.scala (100%) rename {akka-core => akka-remote}/src/test/scala/remote/ServerInitiatedRemoteActorSpec.scala (100%) rename {akka-core => akka-remote}/src/test/scala/remote/ShutdownSpec.scala (100%) rename {akka-core => akka-remote}/src/test/scala/serialization/ProtobufActorMessageSerializationSpec.scala (100%) rename {akka-core => akka-remote}/src/test/scala/serialization/SerializableTypeClassActorSpec.scala (100%) rename {akka-core => akka-remote}/src/test/scala/serialization/SerializerSpec.scala (100%) rename {akka-core => akka-remote}/src/test/scala/serialization/UntypedActorSerializationSpec.scala (100%) rename {akka-core => akka-remote}/src/test/scala/ticket/Ticket001Spec.scala (100%) create mode 100644 akka-typed-actor/src/main/java/se/scalablesolutions/akka/config/DependencyBinding.java create mode 100644 akka-typed-actor/src/main/java/se/scalablesolutions/akka/config/TypedActorGuiceModule.java rename {akka-typed-actors => akka-typed-actor}/src/main/java/se/scalablesolutions/akka/remote/protocol/RemoteProtocol.java (100%) rename {akka-typed-actors => akka-typed-actor}/src/main/scala/actor/TypedActor.scala (94%) rename {akka-typed-actors => akka-typed-actor}/src/main/scala/config/TypedActorConfigurator.scala (100%) rename {akka-typed-actors => akka-typed-actor}/src/main/scala/config/TypedActorGuiceConfigurator.scala (97%) rename {akka-core => akka-typed-actor}/src/test/java/se/scalablesolutions/akka/actor/Bar.java (100%) rename {akka-core => akka-typed-actor}/src/test/java/se/scalablesolutions/akka/actor/BarImpl.java (100%) rename {akka-core => akka-typed-actor}/src/test/java/se/scalablesolutions/akka/actor/Ext.java (100%) rename {akka-core => akka-typed-actor}/src/test/java/se/scalablesolutions/akka/actor/ExtImpl.java (100%) rename {akka-core => akka-typed-actor}/src/test/java/se/scalablesolutions/akka/actor/Foo.java (100%) rename {akka-core => akka-typed-actor}/src/test/java/se/scalablesolutions/akka/actor/FooImpl.java (100%) rename {akka-core => akka-typed-actor}/src/test/java/se/scalablesolutions/akka/actor/NestedTransactionalTypedActor.java (100%) rename {akka-core => akka-typed-actor}/src/test/java/se/scalablesolutions/akka/actor/NestedTransactionalTypedActorImpl.java (100%) rename {akka-core => akka-typed-actor}/src/test/java/se/scalablesolutions/akka/actor/SamplePojo.java (100%) rename {akka-core => akka-typed-actor}/src/test/java/se/scalablesolutions/akka/actor/SamplePojoImpl.java (100%) rename {akka-core => akka-typed-actor}/src/test/java/se/scalablesolutions/akka/actor/SimpleJavaPojo.java (100%) rename {akka-core => akka-typed-actor}/src/test/java/se/scalablesolutions/akka/actor/SimpleJavaPojoCaller.java (100%) rename {akka-core => akka-typed-actor}/src/test/java/se/scalablesolutions/akka/actor/SimpleJavaPojoCallerImpl.java (100%) rename {akka-core => akka-typed-actor}/src/test/java/se/scalablesolutions/akka/actor/SimpleJavaPojoImpl.java (100%) rename {akka-core => akka-typed-actor}/src/test/java/se/scalablesolutions/akka/actor/TransactionalTypedActor.java (100%) rename {akka-core => akka-typed-actor}/src/test/java/se/scalablesolutions/akka/actor/TransactionalTypedActorImpl.java (100%) rename {akka-core => akka-typed-actor}/src/test/java/se/scalablesolutions/akka/actor/TypedActorFailer.java (100%) rename {akka-core => akka-typed-actor}/src/test/java/se/scalablesolutions/akka/actor/TypedActorFailerImpl.java (100%) rename {akka-typed-actors => akka-typed-actor}/src/test/resources/META-INF/aop.xml (100%) rename {akka-core => akka-typed-actor}/src/test/scala/actor/typed-actor/NestedTransactionalTypedActorSpec.scala (100%) rename {akka-core => akka-typed-actor}/src/test/scala/actor/typed-actor/RestartNestedTransactionalTypedActorSpec.scala (100%) rename {akka-core => akka-typed-actor}/src/test/scala/actor/typed-actor/RestartTransactionalTypedActorSpec.scala (100%) rename {akka-core => akka-typed-actor}/src/test/scala/actor/typed-actor/TransactionalTypedActorSpec.scala (100%) rename {akka-core => akka-typed-actor}/src/test/scala/actor/typed-actor/TypedActorContextSpec.scala (100%) rename {akka-core => akka-typed-actor}/src/test/scala/actor/typed-actor/TypedActorGuiceConfiguratorSpec.scala (100%) rename {akka-core => akka-typed-actor}/src/test/scala/actor/typed-actor/TypedActorLifecycleSpec.scala (100%) rename {akka-core => akka-typed-actor}/src/test/scala/actor/typed-actor/TypedActorSpec.scala (100%) rename {akka-core => akka-typed-actor}/src/test/scala/actor/typed-actor/TypedActorUtilFunctionsSpec.scala (100%) delete mode 100644 akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/Bar.java delete mode 100644 akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/BarImpl.java delete mode 100644 akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/Ext.java delete mode 100644 akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/ExtImpl.java delete mode 100644 akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/Foo.java delete mode 100644 akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/FooImpl.java delete mode 100644 akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/NestedTransactionalTypedActor.java delete mode 100644 akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/NestedTransactionalTypedActorImpl.java delete mode 100644 akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/RemoteTypedActorOne.java delete mode 100644 akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/RemoteTypedActorOneImpl.java delete mode 100644 akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/RemoteTypedActorTwo.java delete mode 100644 akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/RemoteTypedActorTwoImpl.java delete mode 100644 akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/SamplePojo.java delete mode 100644 akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/SamplePojoImpl.java delete mode 100644 akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/SimpleJavaPojo.java delete mode 100644 akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/SimpleJavaPojoCaller.java delete mode 100644 akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/SimpleJavaPojoCallerImpl.java delete mode 100644 akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/SimpleJavaPojoImpl.java delete mode 100644 akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/TransactionalTypedActor.java delete mode 100644 akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/TransactionalTypedActorImpl.java delete mode 100644 akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/TypedActorFailer.java delete mode 100644 akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/TypedActorFailerImpl.java delete mode 100644 akka-typed-actors/src/test/java/se/scalablesolutions/akka/stm/Address.java delete mode 100644 akka-typed-actors/src/test/java/se/scalablesolutions/akka/stm/CounterExample.java delete mode 100644 akka-typed-actors/src/test/java/se/scalablesolutions/akka/stm/JavaStmTests.java delete mode 100644 akka-typed-actors/src/test/java/se/scalablesolutions/akka/stm/RefExample.java delete mode 100644 akka-typed-actors/src/test/java/se/scalablesolutions/akka/stm/StmExamples.java delete mode 100644 akka-typed-actors/src/test/java/se/scalablesolutions/akka/stm/TransactionFactoryExample.java delete mode 100644 akka-typed-actors/src/test/java/se/scalablesolutions/akka/stm/TransactionalMapExample.java delete mode 100644 akka-typed-actors/src/test/java/se/scalablesolutions/akka/stm/TransactionalVectorExample.java delete mode 100644 akka-typed-actors/src/test/java/se/scalablesolutions/akka/stm/User.java delete mode 100644 akka-typed-actors/src/test/scala/actor/typed-actor/NestedTransactionalTypedActorSpec.scala delete mode 100644 akka-typed-actors/src/test/scala/actor/typed-actor/RestartNestedTransactionalTypedActorSpec.scala delete mode 100644 akka-typed-actors/src/test/scala/actor/typed-actor/RestartTransactionalTypedActorSpec.scala delete mode 100644 akka-typed-actors/src/test/scala/actor/typed-actor/TransactionalTypedActorSpec.scala delete mode 100644 akka-typed-actors/src/test/scala/actor/typed-actor/TypedActorContextSpec.scala delete mode 100644 akka-typed-actors/src/test/scala/actor/typed-actor/TypedActorGuiceConfiguratorSpec.scala delete mode 100644 akka-typed-actors/src/test/scala/actor/typed-actor/TypedActorLifecycleSpec.scala delete mode 100644 akka-typed-actors/src/test/scala/actor/typed-actor/TypedActorSpec.scala delete mode 100644 akka-typed-actors/src/test/scala/actor/typed-actor/TypedActorUtilFunctionsSpec.scala diff --git a/akka-actors/src/main/resources/logback.xml b/akka-actor/src/main/resources/logback.xml similarity index 100% rename from akka-actors/src/main/resources/logback.xml rename to akka-actor/src/main/resources/logback.xml diff --git a/akka-actors/src/main/scala/actor/Actor.scala b/akka-actor/src/main/scala/actor/Actor.scala similarity index 97% rename from akka-actors/src/main/scala/actor/Actor.scala rename to akka-actor/src/main/scala/actor/Actor.scala index 6db8304fb4..24a04c3eb3 100644 --- a/akka-actors/src/main/scala/actor/Actor.scala +++ b/akka-actor/src/main/scala/actor/Actor.scala @@ -502,6 +502,22 @@ private[actor] class AnyOptionAsTypedOption(anyOption: Option[Any]) { def asSilently[T: Manifest]: Option[T] = narrowSilently[T](anyOption) } +/** + * Marker interface for proxyable actors (such as typed actor). + * + * @author Jonas Bonér + */ trait Proxyable { - def swapProxiedActor(newInstance: Actor) + private[actor] def swapProxiedActor(newInstance: Actor) +} + +/** + * Represents the different Actor types. + * + * @author Jonas Bonér + */ +sealed trait ActorType +object ActorType { + case object ScalaActor extends ActorType + case object TypedActor extends ActorType } diff --git a/akka-actors/src/main/scala/actor/ActorRef.scala b/akka-actor/src/main/scala/actor/ActorRef.scala similarity index 97% rename from akka-actors/src/main/scala/actor/ActorRef.scala rename to akka-actor/src/main/scala/actor/ActorRef.scala index 2d687e3738..cec8d18d84 100644 --- a/akka-actors/src/main/scala/actor/ActorRef.scala +++ b/akka-actor/src/main/scala/actor/ActorRef.scala @@ -11,9 +11,9 @@ import se.scalablesolutions.akka.config.ScalaConfig._ import se.scalablesolutions.akka.stm.global._ import se.scalablesolutions.akka.stm.TransactionManagement._ import se.scalablesolutions.akka.stm.{TransactionManagement, TransactionSetAbortedException} -import se.scalablesolutions.akka.util.{HashCode, Logging, UUID, ReentrantGuard} -import se.scalablesolutions.akka.remote.{RemoteClientModule, RemoteServerModule} import se.scalablesolutions.akka.AkkaException +import se.scalablesolutions.akka.util._ +import ReflectiveAccess._ import org.multiverse.api.ThreadLocalTransaction._ import org.multiverse.commitbarriers.CountDownCommitBarrier @@ -743,7 +743,7 @@ class LocalActorRef private[akka]( * Invoking 'makeRemote' means that an actor will be moved to and invoked on a remote host. */ def makeRemote(hostname: String, port: Int): Unit = { - RemoteClientModule.ensureRemotingEnabled + ensureRemotingEnabled if (!isRunning || isBeingRestarted) makeRemote(new InetSocketAddress(hostname, port)) else throw new ActorInitializationException( "Can't make a running actor remote. Make sure you call 'makeRemote' before 'start'.") @@ -753,7 +753,7 @@ class LocalActorRef private[akka]( * Invoking 'makeRemote' means that an actor will be moved to and invoked on a remote host. */ def makeRemote(address: InetSocketAddress): Unit = guard.withGuard { - RemoteClientModule.ensureRemotingEnabled + ensureRemotingEnabled if (!isRunning || isBeingRestarted) { _remoteAddress = Some(address) RemoteClientModule.register(address, uuid) @@ -829,10 +829,12 @@ class LocalActorRef private[akka]( _isShutDown = true actor.shutdown ActorRegistry.unregister(this) - remoteAddress.foreach { address => - RemoteClientModule.unregister(address, uuid) + if (isRemotingEnabled) { + remoteAddress.foreach { address => + RemoteClientModule.unregister(address, uuid) + } + RemoteServerModule.unregister(this) } - RemoteClientModule.unregister(this) nullOutActorRefReferencesFor(actorInstance.get) } //else if (isBeingRestarted) throw new ActorKilledException("Actor [" + toString + "] is being restarted.") } @@ -887,7 +889,7 @@ class LocalActorRef private[akka]( * To be invoked from within the actor itself. */ def startLinkRemote(actorRef: ActorRef, hostname: String, port: Int) = guard.withGuard { - RemoteClientModule.ensureRemotingEnabled + ensureRemotingEnabled try { actorRef.makeRemote(hostname, port) actorRef.start @@ -913,7 +915,7 @@ class LocalActorRef private[akka]( * To be invoked from within the actor itself. */ def spawnRemote(clazz: Class[_ <: Actor], hostname: String, port: Int): ActorRef = guard.withGuard { - RemoteClientModule.ensureRemotingEnabled + ensureRemotingEnabled val actor = spawnButDoNotStart(clazz) actor.makeRemote(hostname, port) actor.start @@ -941,7 +943,7 @@ class LocalActorRef private[akka]( * To be invoked from within the actor itself. */ def spawnLinkRemote(clazz: Class[_ <: Actor], hostname: String, port: Int): ActorRef = guard.withGuard { - RemoteClientModule.ensureRemotingEnabled + ensureRemotingEnabled val actor = spawnButDoNotStart(clazz) try { actor.makeRemote(hostname, port) @@ -978,8 +980,10 @@ class LocalActorRef private[akka]( protected[akka] def postMessageToMailbox(message: Any, senderOption: Option[ActorRef]): Unit = { joinTransaction(message) - if (remoteAddress.isDefined) RemoteClientModule.send(message, senderOption, None, remoteAddress.get, this) - else { + if (isRemotingEnabled && remoteAddress.isDefined) { + RemoteClientModule.send[Any]( + message, senderOption, None, remoteAddress.get, timeout, true, this, None, ActorType.ScalaActor) + } else { val invocation = new MessageInvocation(this, message, senderOption, None, transactionSet.get) invocation.send } @@ -992,9 +996,12 @@ class LocalActorRef private[akka]( senderFuture: Option[CompletableFuture[T]]): CompletableFuture[T] = { joinTransaction(message) - if (remoteAddress.isDefined) RemoteClientModule.send( - message, senderOption, senderFuture, remoteAddress.get, this) - else { + if (isRemotingEnabled && remoteAddress.isDefined) { + val future = RemoteClientModule.send[T]( + message, senderOption, senderFuture, remoteAddress.get, timeout, false, this, None, ActorType.ScalaActor) + if (future.isDefined) future.get + else throw new IllegalActorStateException("Expected a future from remote call to actor " + toString) + } else { val future = if (senderFuture.isDefined) senderFuture.get else new DefaultCompletableFuture[T](timeout) val invocation = new MessageInvocation( @@ -1096,7 +1103,7 @@ class LocalActorRef private[akka]( } protected[akka] def registerSupervisorAsRemoteActor: Option[String] = guard.withGuard { - RemoteClientModule.ensureRemotingEnabled + ensureRemotingEnabled if (_supervisor.isDefined) { remoteAddress.foreach(address => RemoteClientModule.registerSupervisorForActor(address, this)) Some(_supervisor.get.uuid) @@ -1358,7 +1365,7 @@ private[akka] case class RemoteActorRef private[akka] ( loader: Option[ClassLoader]) extends ActorRef with ScalaActorRef { - RemoteClientModule.ensureRemotingEnabled + ensureRemotingEnabled _uuid = uuuid timeout = _timeout @@ -1367,14 +1374,16 @@ private[akka] case class RemoteActorRef private[akka] ( lazy val remoteClient = RemoteClientModule.clientFor(hostname, port, loader) def postMessageToMailbox(message: Any, senderOption: Option[ActorRef]): Unit = - RemoteClientModule.send(message, senderOption, None, remoteAddress.get, this) + RemoteClientModule.send[Any]( + message, senderOption, None, remoteAddress.get, timeout, true, this, None, ActorType.ScalaActor) def postMessageToMailboxAndCreateFutureResultWithTimeout[T]( message: Any, timeout: Long, senderOption: Option[ActorRef], senderFuture: Option[CompletableFuture[T]]): CompletableFuture[T] = { - val future = RemoteClientModule.send(message, senderOption, None, remoteAddress.get, this) + val future = RemoteClientModule.send[T]( + message, senderOption, senderFuture, remoteAddress.get, timeout, false, this, None, ActorType.ScalaActor) if (future.isDefined) future.get else throw new IllegalActorStateException("Expected a future from remote call to actor " + toString) } @@ -1397,6 +1406,8 @@ private[akka] case class RemoteActorRef private[akka] ( protected[akka] def registerSupervisorAsRemoteActor: Option[String] = None + val remoteAddress: Option[InetSocketAddress] = Some(new InetSocketAddress(hostname, port)) + // ==== NOT SUPPORTED ==== def actorClass: Class[_ <: Actor] = unsupported def dispatcher_=(md: MessageDispatcher): Unit = unsupported @@ -1407,7 +1418,6 @@ private[akka] case class RemoteActorRef private[akka] ( def makeRemote(hostname: String, port: Int): Unit = unsupported def makeRemote(address: InetSocketAddress): Unit = unsupported def homeAddress_=(address: InetSocketAddress): Unit = unsupported - def remoteAddress: Option[InetSocketAddress] = unsupported def link(actorRef: ActorRef): Unit = unsupported def unlink(actorRef: ActorRef): Unit = unsupported def startLink(actorRef: ActorRef): Unit = unsupported @@ -1460,6 +1470,7 @@ trait ActorRefShared { * from ActorRef -> ScalaActorRef and back */ trait ScalaActorRef extends ActorRefShared { ref: ActorRef => + /** * Identifier for actor, does not have to be a unique one. Default is the 'uuid'. *

@@ -1469,7 +1480,8 @@ trait ScalaActorRef extends ActorRefShared { ref: ActorRef => * upon restart, remote restart etc. */ def id: String - def id_=(id: String):Unit + + def id_=(id: String): Unit /** * User overridable callback/setting. @@ -1529,7 +1541,7 @@ trait ScalaActorRef extends ActorRefShared { ref: ActorRef => def sender: Option[ActorRef] = { // Five lines of map-performance-avoidance, could be just: currentMessage map { _.sender } val msg = currentMessage - if(msg.isEmpty) None + if (msg.isEmpty) None else msg.get.sender } @@ -1540,7 +1552,7 @@ trait ScalaActorRef extends ActorRefShared { ref: ActorRef => def senderFuture(): Option[CompletableFuture[Any]] = { // Five lines of map-performance-avoidance, could be just: currentMessage map { _.senderFuture } val msg = currentMessage - if(msg.isEmpty) None + if (msg.isEmpty) None else msg.get.senderFuture } @@ -1580,7 +1592,8 @@ trait ScalaActorRef extends ActorRefShared { ref: ActorRef => def !!(message: Any, timeout: Long = this.timeout)(implicit sender: Option[ActorRef] = None): Option[Any] = { if (isRunning) { val future = postMessageToMailboxAndCreateFutureResultWithTimeout[Any](message, timeout, sender, None) - val isMessageJoinPoint = TypedActorModule.resolveFutureIfMessageIsJoinPoint(message, future) + val isMessageJoinPoint = if (isTypedActorEnabled) TypedActorModule.resolveFutureIfMessageIsJoinPoint(message, future) + else false try { future.await } catch { @@ -1664,8 +1677,10 @@ trait ScalaActorRef extends ActorRefShared { ref: ActorRef => /** * Atomically create (from actor class), start and make an actor remote. */ - def spawnRemote[T <: Actor: Manifest](hostname: String, port: Int): ActorRef = + def spawnRemote[T <: Actor: Manifest](hostname: String, port: Int): ActorRef = { + ensureRemotingEnabled spawnRemote(manifest[T].erasure.asInstanceOf[Class[_ <: Actor]],hostname,port) + } /** @@ -1674,10 +1689,11 @@ trait ScalaActorRef extends ActorRefShared { ref: ActorRef => def spawnLink[T <: Actor: Manifest]: ActorRef = spawnLink(manifest[T].erasure.asInstanceOf[Class[_ <: Actor]]) - /** * Atomically create (from actor class), start, link and make an actor remote. */ - def spawnLinkRemote[T <: Actor : Manifest](hostname: String, port: Int): ActorRef = + def spawnLinkRemote[T <: Actor : Manifest](hostname: String, port: Int): ActorRef = { + ensureRemotingEnabled spawnLinkRemote(manifest[T].erasure.asInstanceOf[Class[_ <: Actor]],hostname,port) + } } diff --git a/akka-actors/src/main/scala/actor/ActorRegistry.scala b/akka-actor/src/main/scala/actor/ActorRegistry.scala similarity index 95% rename from akka-actors/src/main/scala/actor/ActorRegistry.scala rename to akka-actor/src/main/scala/actor/ActorRegistry.scala index 51de155723..b14ff45f48 100644 --- a/akka-actors/src/main/scala/actor/ActorRegistry.scala +++ b/akka-actor/src/main/scala/actor/ActorRegistry.scala @@ -8,10 +8,15 @@ import scala.collection.mutable.ListBuffer import scala.reflect.Manifest import java.util.concurrent.{ConcurrentSkipListSet, ConcurrentHashMap} -import java.util.{Set=>JSet} +import java.util.{Set => JSet} import se.scalablesolutions.akka.util.ListenerManagement +/** + * Base trait for ActorRegistry events, allows listen to when an actor is added and removed from the ActorRegistry. + * + * @author Jonas Bonér + */ sealed trait ActorRegistryEvent case class ActorRegistered(actor: ActorRef) extends ActorRegistryEvent case class ActorUnregistered(actor: ActorRef) extends ActorRegistryEvent diff --git a/akka-actors/src/main/scala/actor/Agent.scala b/akka-actor/src/main/scala/actor/Agent.scala similarity index 100% rename from akka-actors/src/main/scala/actor/Agent.scala rename to akka-actor/src/main/scala/actor/Agent.scala diff --git a/akka-actor/src/main/scala/actor/BootableActorLoaderService.scala b/akka-actor/src/main/scala/actor/BootableActorLoaderService.scala new file mode 100644 index 0000000000..dfb8541396 --- /dev/null +++ b/akka-actor/src/main/scala/actor/BootableActorLoaderService.scala @@ -0,0 +1,101 @@ +/** + * Copyright (C) 2009-2010 Scalable Solutions AB + */ + +package se.scalablesolutions.akka.actor + +import java.io.File +import java.net.{URL, URLClassLoader} +import java.util.jar.JarFile +import java.util.Enumeration + +import se.scalablesolutions.akka.util.{Bootable, Logging} +import se.scalablesolutions.akka.config.Config._ + +class AkkaDeployClassLoader(urls : List[URL], parent : ClassLoader) extends URLClassLoader(urls.toArray.asInstanceOf[Array[URL]],parent) +{ + override def findResources(resource : String) = { + val normalResult = super.findResources(resource) + if(normalResult.hasMoreElements) normalResult else findDeployed(resource) + } + + def findDeployed(resource : String) = new Enumeration[URL]{ + private val it = getURLs.flatMap( listClassesInPackage(_,resource) ).iterator + def hasMoreElements = it.hasNext + def nextElement = it.next + } + + def listClassesInPackage(jar : URL, pkg : String) = { + val f = new File(jar.getFile) + val jf = new JarFile(f) + try { + val es = jf.entries + var result = List[URL]() + while(es.hasMoreElements) + { + val e = es.nextElement + if(!e.isDirectory && e.getName.startsWith(pkg) && e.getName.endsWith(".class")) + result ::= new URL("jar:" + f.toURI.toURL + "!/" + e) + } + result + } finally { + jf.close + } + } +} + +/** + * Handles all modules in the deploy directory (load and unload) + */ +trait BootableActorLoaderService extends Bootable with Logging { + + val BOOT_CLASSES = config.getList("akka.boot") + lazy val applicationLoader: Option[ClassLoader] = createApplicationClassLoader + + protected def createApplicationClassLoader : Option[ClassLoader] = { + Some( + if (HOME.isDefined) { + val CONFIG = HOME.get + "/config" + val DEPLOY = HOME.get + "/deploy" + val DEPLOY_DIR = new File(DEPLOY) + if (!DEPLOY_DIR.exists) { + log.error("Could not find a deploy directory at [%s]", DEPLOY) + System.exit(-1) + } + val filesToDeploy = DEPLOY_DIR.listFiles.toArray.toList + .asInstanceOf[List[File]].filter(_.getName.endsWith(".jar")) + var dependencyJars: List[URL] = Nil + filesToDeploy.map { file => + val jarFile = new JarFile(file) + val en = jarFile.entries + while (en.hasMoreElements) { + val name = en.nextElement.getName + if (name.endsWith(".jar")) dependencyJars ::= new File( + String.format("jar:file:%s!/%s", jarFile.getName, name)).toURI.toURL + } + } + val toDeploy = filesToDeploy.map(_.toURI.toURL) + log.info("Deploying applications from [%s]: [%s]", DEPLOY, toDeploy) + log.debug("Loading dependencies [%s]", dependencyJars) + val allJars = toDeploy ::: dependencyJars + + new AkkaDeployClassLoader(allJars,Thread.currentThread.getContextClassLoader) + } else Thread.currentThread.getContextClassLoader) + } + + abstract override def onLoad = { + applicationLoader.foreach(_ => log.info("Creating /deploy class-loader")) + + super.onLoad + + for (loader <- applicationLoader; clazz <- BOOT_CLASSES) { + log.info("Loading boot class [%s]", clazz) + loader.loadClass(clazz).newInstance + } + } + + abstract override def onUnload = { + super.onUnload + ActorRegistry.shutdownAll + } +} diff --git a/akka-actors/src/main/scala/actor/FSM.scala b/akka-actor/src/main/scala/actor/FSM.scala similarity index 93% rename from akka-actors/src/main/scala/actor/FSM.scala rename to akka-actor/src/main/scala/actor/FSM.scala index 6248a2575c..0bdc04fc48 100644 --- a/akka-actors/src/main/scala/actor/FSM.scala +++ b/akka-actor/src/main/scala/actor/FSM.scala @@ -1,11 +1,15 @@ +/** + * Copyright (C) 2009-2010 Scalable Solutions AB + */ + package se.scalablesolutions.akka.actor import se.scalablesolutions.akka.stm.Ref import se.scalablesolutions.akka.stm.local._ + import java.util.concurrent.{ScheduledFuture, TimeUnit} -trait FSM[S] { - this: Actor => +trait FSM[S] { this: Actor => type StateFunction = scala.PartialFunction[Event, State] @@ -20,7 +24,6 @@ trait FSM[S] { State(NextState, currentState.stateFunction, stateData, currentState.timeout) } - override final protected def receive: Receive = { case value => { timeoutFuture = timeoutFuture.flatMap {ref => ref.cancel(true); None} diff --git a/akka-actors/src/main/scala/actor/Implicits.scala b/akka-actor/src/main/scala/actor/Implicits.scala similarity index 100% rename from akka-actors/src/main/scala/actor/Implicits.scala rename to akka-actor/src/main/scala/actor/Implicits.scala diff --git a/akka-actors/src/main/scala/actor/Scheduler.scala b/akka-actor/src/main/scala/actor/Scheduler.scala similarity index 99% rename from akka-actors/src/main/scala/actor/Scheduler.scala rename to akka-actor/src/main/scala/actor/Scheduler.scala index 50db44a1d0..5dc57056cc 100644 --- a/akka-actors/src/main/scala/actor/Scheduler.scala +++ b/akka-actor/src/main/scala/actor/Scheduler.scala @@ -16,6 +16,7 @@ package se.scalablesolutions.akka.actor import scala.collection.JavaConversions + import java.util.concurrent._ import se.scalablesolutions.akka.util.Logging diff --git a/akka-actors/src/main/scala/actor/Supervisor.scala b/akka-actor/src/main/scala/actor/Supervisor.scala similarity index 98% rename from akka-actors/src/main/scala/actor/Supervisor.scala rename to akka-actor/src/main/scala/actor/Supervisor.scala index 27baedbab3..5493f35c56 100644 --- a/akka-actors/src/main/scala/actor/Supervisor.scala +++ b/akka-actor/src/main/scala/actor/Supervisor.scala @@ -6,9 +6,9 @@ package se.scalablesolutions.akka.actor import se.scalablesolutions.akka.config.ScalaConfig._ import se.scalablesolutions.akka.config.{AllForOneStrategy, OneForOneStrategy, FaultHandlingStrategy} -import se.scalablesolutions.akka.util.Logging -import se.scalablesolutions.akka.remote.RemoteServerModule import se.scalablesolutions.akka.AkkaException +import se.scalablesolutions.akka.util._ +import ReflectiveAccess._ import Actor._ import java.util.concurrent.{CopyOnWriteArrayList, ConcurrentHashMap} diff --git a/akka-actors/src/main/scala/actor/UntypedActor.scala b/akka-actor/src/main/scala/actor/UntypedActor.scala similarity index 100% rename from akka-actors/src/main/scala/actor/UntypedActor.scala rename to akka-actor/src/main/scala/actor/UntypedActor.scala diff --git a/akka-actors/src/main/scala/config/Config.scala b/akka-actor/src/main/scala/config/Config.scala similarity index 100% rename from akka-actors/src/main/scala/config/Config.scala rename to akka-actor/src/main/scala/config/Config.scala diff --git a/akka-actors/src/main/scala/config/Configuration.scala b/akka-actor/src/main/scala/config/Configuration.scala similarity index 100% rename from akka-actors/src/main/scala/config/Configuration.scala rename to akka-actor/src/main/scala/config/Configuration.scala diff --git a/akka-actors/src/main/scala/config/Configurator.scala b/akka-actor/src/main/scala/config/Configurator.scala similarity index 100% rename from akka-actors/src/main/scala/config/Configurator.scala rename to akka-actor/src/main/scala/config/Configurator.scala diff --git a/akka-actors/src/main/scala/config/SupervisionConfig.scala b/akka-actor/src/main/scala/config/SupervisionConfig.scala similarity index 100% rename from akka-actors/src/main/scala/config/SupervisionConfig.scala rename to akka-actor/src/main/scala/config/SupervisionConfig.scala diff --git a/akka-actors/src/main/scala/dataflow/DataFlowVariable.scala b/akka-actor/src/main/scala/dataflow/DataFlowVariable.scala similarity index 100% rename from akka-actors/src/main/scala/dataflow/DataFlowVariable.scala rename to akka-actor/src/main/scala/dataflow/DataFlowVariable.scala diff --git a/akka-actors/src/main/scala/dispatch/AbstractReactorBasedEventDrivenDispatcher.scala b/akka-actor/src/main/scala/dispatch/AbstractReactorBasedEventDrivenDispatcher.scala similarity index 100% rename from akka-actors/src/main/scala/dispatch/AbstractReactorBasedEventDrivenDispatcher.scala rename to akka-actor/src/main/scala/dispatch/AbstractReactorBasedEventDrivenDispatcher.scala diff --git a/akka-actors/src/main/scala/dispatch/Dispatchers.scala b/akka-actor/src/main/scala/dispatch/Dispatchers.scala similarity index 100% rename from akka-actors/src/main/scala/dispatch/Dispatchers.scala rename to akka-actor/src/main/scala/dispatch/Dispatchers.scala diff --git a/akka-actors/src/main/scala/dispatch/ExecutorBasedEventDrivenDispatcher.scala b/akka-actor/src/main/scala/dispatch/ExecutorBasedEventDrivenDispatcher.scala similarity index 100% rename from akka-actors/src/main/scala/dispatch/ExecutorBasedEventDrivenDispatcher.scala rename to akka-actor/src/main/scala/dispatch/ExecutorBasedEventDrivenDispatcher.scala diff --git a/akka-actors/src/main/scala/dispatch/ExecutorBasedEventDrivenWorkStealingDispatcher.scala b/akka-actor/src/main/scala/dispatch/ExecutorBasedEventDrivenWorkStealingDispatcher.scala similarity index 100% rename from akka-actors/src/main/scala/dispatch/ExecutorBasedEventDrivenWorkStealingDispatcher.scala rename to akka-actor/src/main/scala/dispatch/ExecutorBasedEventDrivenWorkStealingDispatcher.scala diff --git a/akka-actors/src/main/scala/dispatch/Future.scala b/akka-actor/src/main/scala/dispatch/Future.scala similarity index 100% rename from akka-actors/src/main/scala/dispatch/Future.scala rename to akka-actor/src/main/scala/dispatch/Future.scala diff --git a/akka-actors/src/main/scala/dispatch/HawtDispatcher.scala b/akka-actor/src/main/scala/dispatch/HawtDispatcher.scala similarity index 100% rename from akka-actors/src/main/scala/dispatch/HawtDispatcher.scala rename to akka-actor/src/main/scala/dispatch/HawtDispatcher.scala diff --git a/akka-actors/src/main/scala/dispatch/MessageHandling.scala b/akka-actor/src/main/scala/dispatch/MessageHandling.scala similarity index 100% rename from akka-actors/src/main/scala/dispatch/MessageHandling.scala rename to akka-actor/src/main/scala/dispatch/MessageHandling.scala diff --git a/akka-actors/src/main/scala/dispatch/ReactorBasedSingleThreadEventDrivenDispatcher.scala b/akka-actor/src/main/scala/dispatch/ReactorBasedSingleThreadEventDrivenDispatcher.scala similarity index 100% rename from akka-actors/src/main/scala/dispatch/ReactorBasedSingleThreadEventDrivenDispatcher.scala rename to akka-actor/src/main/scala/dispatch/ReactorBasedSingleThreadEventDrivenDispatcher.scala diff --git a/akka-actors/src/main/scala/dispatch/ReactorBasedThreadPoolEventDrivenDispatcher.scala b/akka-actor/src/main/scala/dispatch/ReactorBasedThreadPoolEventDrivenDispatcher.scala similarity index 100% rename from akka-actors/src/main/scala/dispatch/ReactorBasedThreadPoolEventDrivenDispatcher.scala rename to akka-actor/src/main/scala/dispatch/ReactorBasedThreadPoolEventDrivenDispatcher.scala diff --git a/akka-actors/src/main/scala/dispatch/ThreadBasedDispatcher.scala b/akka-actor/src/main/scala/dispatch/ThreadBasedDispatcher.scala similarity index 100% rename from akka-actors/src/main/scala/dispatch/ThreadBasedDispatcher.scala rename to akka-actor/src/main/scala/dispatch/ThreadBasedDispatcher.scala diff --git a/akka-actors/src/main/scala/dispatch/ThreadPoolBuilder.scala b/akka-actor/src/main/scala/dispatch/ThreadPoolBuilder.scala similarity index 100% rename from akka-actors/src/main/scala/dispatch/ThreadPoolBuilder.scala rename to akka-actor/src/main/scala/dispatch/ThreadPoolBuilder.scala diff --git a/akka-actors/src/main/scala/routing/Iterators.scala b/akka-actor/src/main/scala/routing/Iterators.scala similarity index 100% rename from akka-actors/src/main/scala/routing/Iterators.scala rename to akka-actor/src/main/scala/routing/Iterators.scala diff --git a/akka-actors/src/main/scala/routing/Listeners.scala b/akka-actor/src/main/scala/routing/Listeners.scala similarity index 100% rename from akka-actors/src/main/scala/routing/Listeners.scala rename to akka-actor/src/main/scala/routing/Listeners.scala diff --git a/akka-actors/src/main/scala/routing/Routers.scala b/akka-actor/src/main/scala/routing/Routers.scala similarity index 100% rename from akka-actors/src/main/scala/routing/Routers.scala rename to akka-actor/src/main/scala/routing/Routers.scala diff --git a/akka-actors/src/main/scala/routing/Routing.scala b/akka-actor/src/main/scala/routing/Routing.scala similarity index 100% rename from akka-actors/src/main/scala/routing/Routing.scala rename to akka-actor/src/main/scala/routing/Routing.scala diff --git a/akka-actors/src/main/scala/stm/JTA.scala b/akka-actor/src/main/scala/stm/JTA.scala similarity index 100% rename from akka-actors/src/main/scala/stm/JTA.scala rename to akka-actor/src/main/scala/stm/JTA.scala diff --git a/akka-actors/src/main/scala/stm/Ref.scala b/akka-actor/src/main/scala/stm/Ref.scala similarity index 100% rename from akka-actors/src/main/scala/stm/Ref.scala rename to akka-actor/src/main/scala/stm/Ref.scala diff --git a/akka-actors/src/main/scala/stm/Transaction.scala b/akka-actor/src/main/scala/stm/Transaction.scala similarity index 100% rename from akka-actors/src/main/scala/stm/Transaction.scala rename to akka-actor/src/main/scala/stm/Transaction.scala diff --git a/akka-actors/src/main/scala/stm/TransactionFactory.scala b/akka-actor/src/main/scala/stm/TransactionFactory.scala similarity index 100% rename from akka-actors/src/main/scala/stm/TransactionFactory.scala rename to akka-actor/src/main/scala/stm/TransactionFactory.scala diff --git a/akka-actors/src/main/scala/stm/TransactionFactoryBuilder.scala b/akka-actor/src/main/scala/stm/TransactionFactoryBuilder.scala similarity index 100% rename from akka-actors/src/main/scala/stm/TransactionFactoryBuilder.scala rename to akka-actor/src/main/scala/stm/TransactionFactoryBuilder.scala diff --git a/akka-actors/src/main/scala/stm/TransactionManagement.scala b/akka-actor/src/main/scala/stm/TransactionManagement.scala similarity index 100% rename from akka-actors/src/main/scala/stm/TransactionManagement.scala rename to akka-actor/src/main/scala/stm/TransactionManagement.scala diff --git a/akka-actors/src/main/scala/stm/TransactionalMap.scala b/akka-actor/src/main/scala/stm/TransactionalMap.scala similarity index 100% rename from akka-actors/src/main/scala/stm/TransactionalMap.scala rename to akka-actor/src/main/scala/stm/TransactionalMap.scala diff --git a/akka-actors/src/main/scala/stm/TransactionalVector.scala b/akka-actor/src/main/scala/stm/TransactionalVector.scala similarity index 100% rename from akka-actors/src/main/scala/stm/TransactionalVector.scala rename to akka-actor/src/main/scala/stm/TransactionalVector.scala diff --git a/akka-actors/src/main/scala/stm/global/Atomic.scala b/akka-actor/src/main/scala/stm/global/Atomic.scala similarity index 100% rename from akka-actors/src/main/scala/stm/global/Atomic.scala rename to akka-actor/src/main/scala/stm/global/Atomic.scala diff --git a/akka-actors/src/main/scala/stm/global/GlobalStm.scala b/akka-actor/src/main/scala/stm/global/GlobalStm.scala similarity index 100% rename from akka-actors/src/main/scala/stm/global/GlobalStm.scala rename to akka-actor/src/main/scala/stm/global/GlobalStm.scala diff --git a/akka-actors/src/main/scala/stm/global/package.scala b/akka-actor/src/main/scala/stm/global/package.scala similarity index 100% rename from akka-actors/src/main/scala/stm/global/package.scala rename to akka-actor/src/main/scala/stm/global/package.scala diff --git a/akka-actors/src/main/scala/stm/local/Atomic.scala b/akka-actor/src/main/scala/stm/local/Atomic.scala similarity index 100% rename from akka-actors/src/main/scala/stm/local/Atomic.scala rename to akka-actor/src/main/scala/stm/local/Atomic.scala diff --git a/akka-actors/src/main/scala/stm/local/LocalStm.scala b/akka-actor/src/main/scala/stm/local/LocalStm.scala similarity index 100% rename from akka-actors/src/main/scala/stm/local/LocalStm.scala rename to akka-actor/src/main/scala/stm/local/LocalStm.scala diff --git a/akka-actors/src/main/scala/stm/local/package.scala b/akka-actor/src/main/scala/stm/local/package.scala similarity index 100% rename from akka-actors/src/main/scala/stm/local/package.scala rename to akka-actor/src/main/scala/stm/local/package.scala diff --git a/akka-actors/src/main/scala/stm/transactional.scala b/akka-actor/src/main/scala/stm/transactional.scala similarity index 100% rename from akka-actors/src/main/scala/stm/transactional.scala rename to akka-actor/src/main/scala/stm/transactional.scala diff --git a/akka-actors/src/main/scala/util/AkkaException.scala b/akka-actor/src/main/scala/util/AkkaException.scala similarity index 100% rename from akka-actors/src/main/scala/util/AkkaException.scala rename to akka-actor/src/main/scala/util/AkkaException.scala diff --git a/akka-actors/src/main/scala/util/Bootable.scala b/akka-actor/src/main/scala/util/Bootable.scala similarity index 100% rename from akka-actors/src/main/scala/util/Bootable.scala rename to akka-actor/src/main/scala/util/Bootable.scala diff --git a/akka-actors/src/main/scala/util/Duration.scala b/akka-actor/src/main/scala/util/Duration.scala similarity index 100% rename from akka-actors/src/main/scala/util/Duration.scala rename to akka-actor/src/main/scala/util/Duration.scala diff --git a/akka-actors/src/main/scala/util/HashCode.scala b/akka-actor/src/main/scala/util/HashCode.scala similarity index 100% rename from akka-actors/src/main/scala/util/HashCode.scala rename to akka-actor/src/main/scala/util/HashCode.scala diff --git a/akka-actors/src/main/scala/util/Helpers.scala b/akka-actor/src/main/scala/util/Helpers.scala similarity index 100% rename from akka-actors/src/main/scala/util/Helpers.scala rename to akka-actor/src/main/scala/util/Helpers.scala diff --git a/akka-actors/src/main/scala/util/ListenerManagement.scala b/akka-actor/src/main/scala/util/ListenerManagement.scala similarity index 100% rename from akka-actors/src/main/scala/util/ListenerManagement.scala rename to akka-actor/src/main/scala/util/ListenerManagement.scala diff --git a/akka-actors/src/main/scala/util/LockUtil.scala b/akka-actor/src/main/scala/util/LockUtil.scala similarity index 100% rename from akka-actors/src/main/scala/util/LockUtil.scala rename to akka-actor/src/main/scala/util/LockUtil.scala diff --git a/akka-actors/src/main/scala/util/Logging.scala b/akka-actor/src/main/scala/util/Logging.scala similarity index 100% rename from akka-actors/src/main/scala/util/Logging.scala rename to akka-actor/src/main/scala/util/Logging.scala diff --git a/akka-actor/src/main/scala/util/ReflectiveAccess.scala b/akka-actor/src/main/scala/util/ReflectiveAccess.scala new file mode 100644 index 0000000000..72909457b0 --- /dev/null +++ b/akka-actor/src/main/scala/util/ReflectiveAccess.scala @@ -0,0 +1,193 @@ +/** + * Copyright (C) 2009-2010 Scalable Solutions AB + */ + +package se.scalablesolutions.akka.util + +import se.scalablesolutions.akka.actor.{ActorRef, IllegalActorStateException, ActorType} +import se.scalablesolutions.akka.dispatch.{Future, CompletableFuture} +import se.scalablesolutions.akka.config.{Config, ModuleNotAvailableException} + +import java.net.InetSocketAddress + +/** + * Helper class for reflective access to different modules in order to allow optional loading of modules. + * + * @author Jonas Bonér + */ +object ReflectiveAccess { + + val loader = getClass.getClassLoader + + lazy val isRemotingEnabled = RemoteClientModule.isRemotingEnabled + lazy val isTypedActorEnabled = TypedActorModule.isTypedActorEnabled + + def ensureRemotingEnabled = RemoteClientModule.ensureRemotingEnabled + def ensureTypedActorEnabled = TypedActorModule.ensureTypedActorEnabled + + /** + * Reflective access to the RemoteClient module. + * + * @author Jonas Bonér + */ + object RemoteClientModule { + + type RemoteClient = { + def send[T]( + message: Any, + senderOption: Option[ActorRef], + senderFuture: Option[CompletableFuture[_]], + remoteAddress: InetSocketAddress, + timeout: Long, + isOneWay: Boolean, + actorRef: ActorRef, + typedActorInfo: Option[Tuple2[String, String]], + actorType: ActorType): Option[CompletableFuture[T]] + def registerSupervisorForActor(actorRef: ActorRef) + } + + type RemoteClientObject = { + def register(hostname: String, port: Int, uuid: String): Unit + def unregister(hostname: String, port: Int, uuid: String): Unit + def clientFor(address: InetSocketAddress): RemoteClient + def clientFor(hostname: String, port: Int, loader: Option[ClassLoader]): RemoteClient + } + + lazy val isRemotingEnabled = remoteClientObjectInstance.isDefined + + def ensureRemotingEnabled = if (!isRemotingEnabled) throw new ModuleNotAvailableException( + "Can't load the remoting module, make sure that akka-remote.jar is on the classpath") + + val remoteClientObjectInstance: Option[RemoteClientObject] = { + try { + val clazz = loader.loadClass("se.scalablesolutions.akka.remote.RemoteClient$") + val ctor = clazz.getDeclaredConstructor(Array[Class[_]](): _*) + ctor.setAccessible(true) + Some(ctor.newInstance(Array[AnyRef](): _*).asInstanceOf[RemoteClientObject]) + } catch { case e => None } + } + + def register(address: InetSocketAddress, uuid: String) = { + ensureRemotingEnabled + remoteClientObjectInstance.get.register(address.getHostName, address.getPort, uuid) + } + + def unregister(address: InetSocketAddress, uuid: String) = { + ensureRemotingEnabled + remoteClientObjectInstance.get.unregister(address.getHostName, address.getPort, uuid) + } + + def registerSupervisorForActor(remoteAddress: InetSocketAddress, actorRef: ActorRef) = { + ensureRemotingEnabled + val remoteClient = remoteClientObjectInstance.get.clientFor(remoteAddress) + remoteClient.registerSupervisorForActor(actorRef) + } + + def clientFor(hostname: String, port: Int, loader: Option[ClassLoader]): RemoteClient = { + ensureRemotingEnabled + remoteClientObjectInstance.get.clientFor(hostname, port, loader) + } + + def send[T]( + message: Any, + senderOption: Option[ActorRef], + senderFuture: Option[CompletableFuture[_]], + remoteAddress: InetSocketAddress, + timeout: Long, + isOneWay: Boolean, + actorRef: ActorRef, + typedActorInfo: Option[Tuple2[String, String]], + actorType: ActorType): Option[CompletableFuture[T]] = { + ensureRemotingEnabled + clientFor(remoteAddress.getHostName, remoteAddress.getPort, None).send[T]( + message, senderOption, senderFuture, remoteAddress, timeout, isOneWay, actorRef, typedActorInfo, actorType) + } + } + + /** + * Reflective access to the RemoteServer module. + * + * @author Jonas Bonér + */ + object RemoteServerModule { + val HOSTNAME = Config.config.getString("akka.remote.server.hostname", "localhost") + val PORT = Config.config.getInt("akka.remote.server.port", 9999) + + type RemoteServerObject = { + def registerActor(address: InetSocketAddress, uuid: String, actor: ActorRef): Unit + def registerTypedActor(address: InetSocketAddress, name: String, typedActor: AnyRef): Unit + } + + type RemoteNodeObject = { + def unregister(actorRef: ActorRef): Unit + } + + val remoteServerObjectInstance: Option[RemoteServerObject] = { + try { + val clazz = loader.loadClass("se.scalablesolutions.akka.remote.RemoteServer$") + val ctor = clazz.getDeclaredConstructor(Array[Class[_]](): _*) + ctor.setAccessible(true) + Some(ctor.newInstance(Array[AnyRef](): _*).asInstanceOf[RemoteServerObject]) + } catch { case e => None } + } + + val remoteNodeObjectInstance: Option[RemoteNodeObject] = { + try { + val clazz = loader.loadClass("se.scalablesolutions.akka.remote.RemoteNode$") + val ctor = clazz.getDeclaredConstructor(Array[Class[_]](): _*) + ctor.setAccessible(true) + Some(ctor.newInstance(Array[AnyRef](): _*).asInstanceOf[RemoteNodeObject]) + } catch { case e => None } + } + + def registerActor(address: InetSocketAddress, uuid: String, actorRef: ActorRef) = { + ensureRemotingEnabled + remoteServerObjectInstance.get.registerActor(address, uuid, actorRef) + } + + def registerTypedActor(address: InetSocketAddress, implementationClassName: String, proxy: AnyRef) = { + ensureRemotingEnabled + remoteServerObjectInstance.get.registerTypedActor(address, implementationClassName, proxy) + } + + def unregister(actorRef: ActorRef) = { + ensureRemotingEnabled + remoteNodeObjectInstance.get.unregister(actorRef) + } + } + + /** + * Reflective access to the TypedActors module. + * + * @author Jonas Bonér + */ + object TypedActorModule { + + type TypedActorObject = { + def isJoinPoint(message: Any): Boolean + def isJoinPointAndOneWay(message: Any): Boolean + } + + lazy val isTypedActorEnabled = typedActorObjectInstance.isDefined + + def ensureTypedActorEnabled = if (!isTypedActorEnabled) throw new ModuleNotAvailableException( + "Can't load the typed actor module, make sure that akka-typed-actor.jar is on the classpath") + + val typedActorObjectInstance: Option[TypedActorObject] = { + try { + val clazz = loader.loadClass("se.scalablesolutions.akka.actor.TypedActor$") + val ctor = clazz.getDeclaredConstructor(Array[Class[_]](): _*) + ctor.setAccessible(true) + Some(ctor.newInstance(Array[AnyRef](): _*).asInstanceOf[TypedActorObject]) + } catch { case e => None } + } + + def resolveFutureIfMessageIsJoinPoint(message: Any, future: Future[_]): Boolean = { + ensureTypedActorEnabled + if (typedActorObjectInstance.get.isJoinPointAndOneWay(message)) { + future.asInstanceOf[CompletableFuture[Option[_]]].completeWithResult(None) + } + typedActorObjectInstance.get.isJoinPoint(message) + } + } +} diff --git a/akka-actors/src/main/scala/util/Uuid.scala b/akka-actor/src/main/scala/util/Uuid.scala similarity index 100% rename from akka-actors/src/main/scala/util/Uuid.scala rename to akka-actor/src/main/scala/util/Uuid.scala diff --git a/akka-core/src/test/java/se/scalablesolutions/akka/stm/Address.java b/akka-actor/src/test/java/se/scalablesolutions/akka/stm/Address.java similarity index 100% rename from akka-core/src/test/java/se/scalablesolutions/akka/stm/Address.java rename to akka-actor/src/test/java/se/scalablesolutions/akka/stm/Address.java diff --git a/akka-core/src/test/java/se/scalablesolutions/akka/stm/CounterExample.java b/akka-actor/src/test/java/se/scalablesolutions/akka/stm/CounterExample.java similarity index 100% rename from akka-core/src/test/java/se/scalablesolutions/akka/stm/CounterExample.java rename to akka-actor/src/test/java/se/scalablesolutions/akka/stm/CounterExample.java diff --git a/akka-core/src/test/java/se/scalablesolutions/akka/stm/JavaStmTests.java b/akka-actor/src/test/java/se/scalablesolutions/akka/stm/JavaStmTests.java similarity index 100% rename from akka-core/src/test/java/se/scalablesolutions/akka/stm/JavaStmTests.java rename to akka-actor/src/test/java/se/scalablesolutions/akka/stm/JavaStmTests.java diff --git a/akka-core/src/test/java/se/scalablesolutions/akka/stm/RefExample.java b/akka-actor/src/test/java/se/scalablesolutions/akka/stm/RefExample.java similarity index 100% rename from akka-core/src/test/java/se/scalablesolutions/akka/stm/RefExample.java rename to akka-actor/src/test/java/se/scalablesolutions/akka/stm/RefExample.java diff --git a/akka-core/src/test/java/se/scalablesolutions/akka/stm/StmExamples.java b/akka-actor/src/test/java/se/scalablesolutions/akka/stm/StmExamples.java similarity index 100% rename from akka-core/src/test/java/se/scalablesolutions/akka/stm/StmExamples.java rename to akka-actor/src/test/java/se/scalablesolutions/akka/stm/StmExamples.java diff --git a/akka-core/src/test/java/se/scalablesolutions/akka/stm/TransactionFactoryExample.java b/akka-actor/src/test/java/se/scalablesolutions/akka/stm/TransactionFactoryExample.java similarity index 100% rename from akka-core/src/test/java/se/scalablesolutions/akka/stm/TransactionFactoryExample.java rename to akka-actor/src/test/java/se/scalablesolutions/akka/stm/TransactionFactoryExample.java diff --git a/akka-core/src/test/java/se/scalablesolutions/akka/stm/TransactionalMapExample.java b/akka-actor/src/test/java/se/scalablesolutions/akka/stm/TransactionalMapExample.java similarity index 100% rename from akka-core/src/test/java/se/scalablesolutions/akka/stm/TransactionalMapExample.java rename to akka-actor/src/test/java/se/scalablesolutions/akka/stm/TransactionalMapExample.java diff --git a/akka-core/src/test/java/se/scalablesolutions/akka/stm/TransactionalVectorExample.java b/akka-actor/src/test/java/se/scalablesolutions/akka/stm/TransactionalVectorExample.java similarity index 100% rename from akka-core/src/test/java/se/scalablesolutions/akka/stm/TransactionalVectorExample.java rename to akka-actor/src/test/java/se/scalablesolutions/akka/stm/TransactionalVectorExample.java diff --git a/akka-core/src/test/java/se/scalablesolutions/akka/stm/User.java b/akka-actor/src/test/java/se/scalablesolutions/akka/stm/User.java similarity index 100% rename from akka-core/src/test/java/se/scalablesolutions/akka/stm/User.java rename to akka-actor/src/test/java/se/scalablesolutions/akka/stm/User.java diff --git a/akka-actors/src/test/resources/logback-test.xml b/akka-actor/src/test/resources/logback-test.xml similarity index 100% rename from akka-actors/src/test/resources/logback-test.xml rename to akka-actor/src/test/resources/logback-test.xml diff --git a/akka-actor/src/test/scala/Messages.scala b/akka-actor/src/test/scala/Messages.scala new file mode 100644 index 0000000000..7e22dd9d7c --- /dev/null +++ b/akka-actor/src/test/scala/Messages.scala @@ -0,0 +1,13 @@ +/** + * Copyright (C) 2009-2010 Scalable Solutions AB + */ + +package se.scalablesolutions.akka + +abstract class TestMessage + +case object Ping extends TestMessage +case object Pong extends TestMessage +case object OneWay extends TestMessage +case object Die extends TestMessage +case object NotifySupervisorExit extends TestMessage diff --git a/akka-actors/src/test/scala/actor/actor/ActorFireForgetRequestReplySpec.scala b/akka-actor/src/test/scala/actor/actor/ActorFireForgetRequestReplySpec.scala similarity index 100% rename from akka-actors/src/test/scala/actor/actor/ActorFireForgetRequestReplySpec.scala rename to akka-actor/src/test/scala/actor/actor/ActorFireForgetRequestReplySpec.scala diff --git a/akka-actors/src/test/scala/actor/actor/AgentSpec.scala b/akka-actor/src/test/scala/actor/actor/AgentSpec.scala similarity index 100% rename from akka-actors/src/test/scala/actor/actor/AgentSpec.scala rename to akka-actor/src/test/scala/actor/actor/AgentSpec.scala diff --git a/akka-actors/src/test/scala/actor/actor/Bench.scala b/akka-actor/src/test/scala/actor/actor/Bench.scala similarity index 100% rename from akka-actors/src/test/scala/actor/actor/Bench.scala rename to akka-actor/src/test/scala/actor/actor/Bench.scala diff --git a/akka-actors/src/test/scala/actor/actor/FSMActorSpec.scala b/akka-actor/src/test/scala/actor/actor/FSMActorSpec.scala similarity index 100% rename from akka-actors/src/test/scala/actor/actor/FSMActorSpec.scala rename to akka-actor/src/test/scala/actor/actor/FSMActorSpec.scala diff --git a/akka-actors/src/test/scala/actor/actor/ForwardActorSpec.scala b/akka-actor/src/test/scala/actor/actor/ForwardActorSpec.scala similarity index 100% rename from akka-actors/src/test/scala/actor/actor/ForwardActorSpec.scala rename to akka-actor/src/test/scala/actor/actor/ForwardActorSpec.scala diff --git a/akka-actors/src/test/scala/actor/actor/ReceiveTimeoutSpec.scala b/akka-actor/src/test/scala/actor/actor/ReceiveTimeoutSpec.scala similarity index 100% rename from akka-actors/src/test/scala/actor/actor/ReceiveTimeoutSpec.scala rename to akka-actor/src/test/scala/actor/actor/ReceiveTimeoutSpec.scala diff --git a/akka-actors/src/test/scala/actor/actor/TransactorSpec.scala b/akka-actor/src/test/scala/actor/actor/TransactorSpec.scala similarity index 100% rename from akka-actors/src/test/scala/actor/actor/TransactorSpec.scala rename to akka-actor/src/test/scala/actor/actor/TransactorSpec.scala diff --git a/akka-actors/src/test/scala/actor/supervisor/RestartStrategySpec.scala b/akka-actor/src/test/scala/actor/supervisor/RestartStrategySpec.scala similarity index 100% rename from akka-actors/src/test/scala/actor/supervisor/RestartStrategySpec.scala rename to akka-actor/src/test/scala/actor/supervisor/RestartStrategySpec.scala diff --git a/akka-actors/src/test/scala/actor/supervisor/SupervisorHierarchySpec.scala b/akka-actor/src/test/scala/actor/supervisor/SupervisorHierarchySpec.scala similarity index 100% rename from akka-actors/src/test/scala/actor/supervisor/SupervisorHierarchySpec.scala rename to akka-actor/src/test/scala/actor/supervisor/SupervisorHierarchySpec.scala diff --git a/akka-actors/src/test/scala/actor/supervisor/SupervisorSpec.scala b/akka-actor/src/test/scala/actor/supervisor/SupervisorSpec.scala similarity index 100% rename from akka-actors/src/test/scala/actor/supervisor/SupervisorSpec.scala rename to akka-actor/src/test/scala/actor/supervisor/SupervisorSpec.scala diff --git a/akka-core/src/test/scala/dataflow/DataFlowSpec.scala b/akka-actor/src/test/scala/dataflow/DataFlowSpec.scala similarity index 98% rename from akka-core/src/test/scala/dataflow/DataFlowSpec.scala rename to akka-actor/src/test/scala/dataflow/DataFlowSpec.scala index ce54699a6f..c5d3c32e63 100644 --- a/akka-core/src/test/scala/dataflow/DataFlowSpec.scala +++ b/akka-actor/src/test/scala/dataflow/DataFlowSpec.scala @@ -22,7 +22,7 @@ import se.scalablesolutions.akka.actor.ActorRegistry @RunWith(classOf[JUnitRunner]) class DataFlowTest extends Spec with ShouldMatchers with BeforeAndAfterAll { describe("DataflowVariable") { - it("should work and generate correct results") { + /* it("should work and generate correct results") { import DataFlow._ val latch = new CountDownLatch(1) @@ -73,8 +73,8 @@ class DataFlowTest extends Spec with ShouldMatchers with BeforeAndAfterAll { result.get should equal (sum(0,ints(0,1000))) ActorRegistry.shutdownAll } + */ } - /*it("should be able to join streams") { import DataFlow._ diff --git a/akka-actors/src/test/scala/dispatch/DispatchersSpec.scala b/akka-actor/src/test/scala/dispatch/DispatchersSpec.scala similarity index 100% rename from akka-actors/src/test/scala/dispatch/DispatchersSpec.scala rename to akka-actor/src/test/scala/dispatch/DispatchersSpec.scala diff --git a/akka-actors/src/test/scala/dispatch/ExecutorBasedEventDrivenDispatcherActorSpec.scala b/akka-actor/src/test/scala/dispatch/ExecutorBasedEventDrivenDispatcherActorSpec.scala similarity index 100% rename from akka-actors/src/test/scala/dispatch/ExecutorBasedEventDrivenDispatcherActorSpec.scala rename to akka-actor/src/test/scala/dispatch/ExecutorBasedEventDrivenDispatcherActorSpec.scala diff --git a/akka-actors/src/test/scala/dispatch/ExecutorBasedEventDrivenDispatcherActorsSpec.scala b/akka-actor/src/test/scala/dispatch/ExecutorBasedEventDrivenDispatcherActorsSpec.scala similarity index 100% rename from akka-actors/src/test/scala/dispatch/ExecutorBasedEventDrivenDispatcherActorsSpec.scala rename to akka-actor/src/test/scala/dispatch/ExecutorBasedEventDrivenDispatcherActorsSpec.scala diff --git a/akka-actors/src/test/scala/dispatch/ExecutorBasedEventDrivenWorkStealingDispatcherSpec.scala b/akka-actor/src/test/scala/dispatch/ExecutorBasedEventDrivenWorkStealingDispatcherSpec.scala similarity index 100% rename from akka-actors/src/test/scala/dispatch/ExecutorBasedEventDrivenWorkStealingDispatcherSpec.scala rename to akka-actor/src/test/scala/dispatch/ExecutorBasedEventDrivenWorkStealingDispatcherSpec.scala diff --git a/akka-actors/src/test/scala/dispatch/FutureSpec.scala b/akka-actor/src/test/scala/dispatch/FutureSpec.scala similarity index 100% rename from akka-actors/src/test/scala/dispatch/FutureSpec.scala rename to akka-actor/src/test/scala/dispatch/FutureSpec.scala diff --git a/akka-actors/src/test/scala/dispatch/HawtDispatcherActorSpec.scala b/akka-actor/src/test/scala/dispatch/HawtDispatcherActorSpec.scala similarity index 100% rename from akka-actors/src/test/scala/dispatch/HawtDispatcherActorSpec.scala rename to akka-actor/src/test/scala/dispatch/HawtDispatcherActorSpec.scala diff --git a/akka-actors/src/test/scala/dispatch/HawtDispatcherEchoServer.scala b/akka-actor/src/test/scala/dispatch/HawtDispatcherEchoServer.scala similarity index 100% rename from akka-actors/src/test/scala/dispatch/HawtDispatcherEchoServer.scala rename to akka-actor/src/test/scala/dispatch/HawtDispatcherEchoServer.scala diff --git a/akka-actors/src/test/scala/dispatch/ReactorBasedSingleThreadEventDrivenDispatcherActorSpec.scala b/akka-actor/src/test/scala/dispatch/ReactorBasedSingleThreadEventDrivenDispatcherActorSpec.scala similarity index 100% rename from akka-actors/src/test/scala/dispatch/ReactorBasedSingleThreadEventDrivenDispatcherActorSpec.scala rename to akka-actor/src/test/scala/dispatch/ReactorBasedSingleThreadEventDrivenDispatcherActorSpec.scala diff --git a/akka-actors/src/test/scala/dispatch/ReactorBasedThreadPoolEventDrivenDispatcherActorSpec.scala b/akka-actor/src/test/scala/dispatch/ReactorBasedThreadPoolEventDrivenDispatcherActorSpec.scala similarity index 100% rename from akka-actors/src/test/scala/dispatch/ReactorBasedThreadPoolEventDrivenDispatcherActorSpec.scala rename to akka-actor/src/test/scala/dispatch/ReactorBasedThreadPoolEventDrivenDispatcherActorSpec.scala diff --git a/akka-actors/src/test/scala/dispatch/ThreadBasedActorSpec.scala b/akka-actor/src/test/scala/dispatch/ThreadBasedActorSpec.scala similarity index 100% rename from akka-actors/src/test/scala/dispatch/ThreadBasedActorSpec.scala rename to akka-actor/src/test/scala/dispatch/ThreadBasedActorSpec.scala diff --git a/akka-actors/src/test/scala/dispatch/ThreadBasedDispatcherSpec.scala b/akka-actor/src/test/scala/dispatch/ThreadBasedDispatcherSpec.scala similarity index 100% rename from akka-actors/src/test/scala/dispatch/ThreadBasedDispatcherSpec.scala rename to akka-actor/src/test/scala/dispatch/ThreadBasedDispatcherSpec.scala diff --git a/akka-actors/src/test/scala/misc/ActorRegistrySpec.scala b/akka-actor/src/test/scala/misc/ActorRegistrySpec.scala similarity index 100% rename from akka-actors/src/test/scala/misc/ActorRegistrySpec.scala rename to akka-actor/src/test/scala/misc/ActorRegistrySpec.scala diff --git a/akka-actors/src/test/scala/misc/SchedulerSpec.scala b/akka-actor/src/test/scala/misc/SchedulerSpec.scala similarity index 100% rename from akka-actors/src/test/scala/misc/SchedulerSpec.scala rename to akka-actor/src/test/scala/misc/SchedulerSpec.scala diff --git a/akka-actors/src/test/scala/routing/RoutingSpec.scala b/akka-actor/src/test/scala/routing/RoutingSpec.scala similarity index 100% rename from akka-actors/src/test/scala/routing/RoutingSpec.scala rename to akka-actor/src/test/scala/routing/RoutingSpec.scala diff --git a/akka-actors/src/test/scala/stm/JavaStmSpec.scala b/akka-actor/src/test/scala/stm/JavaStmSpec.scala similarity index 100% rename from akka-actors/src/test/scala/stm/JavaStmSpec.scala rename to akka-actor/src/test/scala/stm/JavaStmSpec.scala diff --git a/akka-actors/src/test/scala/stm/RefSpec.scala b/akka-actor/src/test/scala/stm/RefSpec.scala similarity index 100% rename from akka-actors/src/test/scala/stm/RefSpec.scala rename to akka-actor/src/test/scala/stm/RefSpec.scala diff --git a/akka-actors/src/test/scala/stm/StmSpec.scala b/akka-actor/src/test/scala/stm/StmSpec.scala similarity index 100% rename from akka-actors/src/test/scala/stm/StmSpec.scala rename to akka-actor/src/test/scala/stm/StmSpec.scala diff --git a/akka-actors/src/test/scala/ticket/Ticket001Spec.scala b/akka-actor/src/test/scala/ticket/Ticket001Spec.scala similarity index 100% rename from akka-actors/src/test/scala/ticket/Ticket001Spec.scala rename to akka-actor/src/test/scala/ticket/Ticket001Spec.scala diff --git a/akka-actors/src/test/scala/dataflow/DataFlowSpec.scala b/akka-actors/src/test/scala/dataflow/DataFlowSpec.scala deleted file mode 100644 index ce54699a6f..0000000000 --- a/akka-actors/src/test/scala/dataflow/DataFlowSpec.scala +++ /dev/null @@ -1,173 +0,0 @@ -/** - * Copyright (C) 2009-2010 Scalable Solutions AB - */ - -package se.scalablesolutions.akka.dataflow - -import org.scalatest.Spec -import org.scalatest.Assertions -import org.scalatest.matchers.ShouldMatchers -import org.scalatest.BeforeAndAfterAll -import org.scalatest.junit.JUnitRunner -import org.junit.runner.RunWith - -import java.util.concurrent.{TimeUnit, CountDownLatch} -import java.util.concurrent.atomic.{AtomicLong, AtomicReference, AtomicInteger} - -import scala.annotation.tailrec - -import se.scalablesolutions.akka.dispatch.DefaultCompletableFuture -import se.scalablesolutions.akka.actor.ActorRegistry - -@RunWith(classOf[JUnitRunner]) -class DataFlowTest extends Spec with ShouldMatchers with BeforeAndAfterAll { - describe("DataflowVariable") { - it("should work and generate correct results") { - import DataFlow._ - - val latch = new CountDownLatch(1) - val result = new AtomicInteger(0) - val x, y, z = new DataFlowVariable[Int] - thread { - z << x() + y() - latch.countDown - result.set(z()) - } - thread { x << 40 } - thread { y << 2 } - - latch.await(3,TimeUnit.SECONDS) should equal (true) - List(x,y,z).foreach(_.shutdown) - result.get should equal (42) - ActorRegistry.shutdownAll - } - - it("should be able to transform a stream") { - import DataFlow._ - - def ints(n: Int, max: Int): List[Int] = - if (n == max) Nil - else n :: ints(n + 1, max) - - def sum(s: Int, stream: List[Int]): List[Int] = stream match { - case Nil => s :: Nil - case h :: t => s :: sum(h + s, t) - } - - val latch = new CountDownLatch(1) - val result = new AtomicReference[List[Int]](Nil) - val x = new DataFlowVariable[List[Int]] - val y = new DataFlowVariable[List[Int]] - val z = new DataFlowVariable[List[Int]] - - thread { x << ints(0, 1000) } - thread { y << sum(0, x()) } - - thread { z << y() - result.set(z()) - latch.countDown - } - - latch.await(3,TimeUnit.SECONDS) should equal (true) - List(x,y,z).foreach(_.shutdown) - result.get should equal (sum(0,ints(0,1000))) - ActorRegistry.shutdownAll - } - } - - /*it("should be able to join streams") { - import DataFlow._ - - def ints(n: Int, max: Int, stream: DataFlowStream[Int]): Unit = if (n != max) { - stream <<< n - ints(n + 1, max, stream) - } - - def sum(s: Int, in: DataFlowStream[Int], out: DataFlowStream[Int]): Unit = { - out <<< s - sum(in() + s, in, out) - } - - val producer = new DataFlowStream[Int] - val consumer = new DataFlowStream[Int] - val latch = new CountDownLatch(1) - val result = new AtomicInteger(0) - - thread { ints(0, 1000, producer) } - thread { - Thread.sleep(1000) - result.set(producer.map(x => x * x).foldLeft(0)(_ + _)) - latch.countDown - } - - latch.await(3,TimeUnit.SECONDS) should equal (true) - result.get should equal (332833500) - ActorRegistry.shutdownAll - } - - it("should be able to sum streams recursively") { - import DataFlow._ - - def ints(n: Int, max: Int, stream: DataFlowStream[Int]): Unit = if (n != max) { - stream <<< n - ints(n + 1, max, stream) - } - - def sum(s: Int, in: DataFlowStream[Int], out: DataFlowStream[Int]): Unit = { - out <<< s - sum(in() + s, in, out) - } - - val result = new AtomicLong(0) - - val producer = new DataFlowStream[Int] - val consumer = new DataFlowStream[Int] - val latch = new CountDownLatch(1) - - @tailrec def recurseSum(stream: DataFlowStream[Int]): Unit = { - val x = stream() - - if(result.addAndGet(x) == 166666500) - latch.countDown - - recurseSum(stream) - } - - thread { ints(0, 1000, producer) } - thread { sum(0, producer, consumer) } - thread { recurseSum(consumer) } - - latch.await(15,TimeUnit.SECONDS) should equal (true) - ActorRegistry.shutdownAll - }*/ - - /* Test not ready for prime time, causes some sort of deadlock */ - /* it("should be able to conditionally set variables") { - - import DataFlow._ - - val latch = new CountDownLatch(1) - val x, y, z, v = new DataFlowVariable[Int] - - val main = thread { - x << 1 - z << Math.max(x(),y()) - latch.countDown - } - - val setY = thread { - Thread sleep 2000 - y << 2 - } - - val setV = thread { - v << y - } - - latch.await(2,TimeUnit.SECONDS) should equal (true) - List(x,y,z,v) foreach (_.shutdown) - List(main,setY,setV) foreach (_ ! Exit) - println("Foo") - ActorRegistry.shutdownAll - }*/ -} diff --git a/akka-core/.ensime b/akka-core/.ensime deleted file mode 100644 index 0b21e8eb5c..0000000000 --- a/akka-core/.ensime +++ /dev/null @@ -1,79 +0,0 @@ -( - - ;; Where you unpacked the ENSIME distribution. - :server-root "/Users/jboner/config/emacs-config/lib/ensime" - - ;; The command with which to invoke the ENSIME server. Change this to - ;; "bin/server.bat" if your're on Windows. - :server-cmd "bin/server.sh" - - - ;; The host to connect to. Connecting to remote ENSIME servers is not - ;; currently supported. - ;; ------------------------------ - ;; :server-host "localhost" - - - ;; Assume a standard sbt directory structure. Look in default sbt - ;; locations for dependencies, sources, target, etc. - ;; - ;; Note for sbt subprojects: Each subproject needs it's own .ensime - ;; file. - ;; ----------------------------- - :use-sbt t - :sbt-compile-conf "compile" - - - ;; Use an existing pom.xml to determine the dependencies - ;; for the project. A Maven-style directory structure is assumed. - ;; ----------------------------- - ;; :use-maven t - ;; :maven-compile-scopes "compile" - ;; :maven-runtime-scopes "runtime" - - - ;; Use an existing ivy.xml to determine the dependencies - ;; for the project. A Maven-style directory structure is assumed. - ;; ----------------------------- - ;; :use-ivy t - ;; :ivy-compile-conf "compile" - ;; :ivy-runtime-conf "compile" - - - ;; The home package for your project. - ;; Used by ENSIME to populate the project outline view. - ;; ------------------------------ - :project-package "se.scalablesolutions.akka" - - - ;; :sources ([dir | file]*) - ;; Include source files by directory(recursively) or by filename. - ;; ------------------------------ - :sources ("src/main/") - - - ;; :dependency-jars ([dir | file]*) - ;; Include jars by directory(recursively) or by filename. - ;; ------------------------------ - ;; :dependency-jars ("lib") - - - ;; :dependency-dirs ([dir | file]*) - ;; Include directories of .class files. - ;; ------------------------------ - ;; :dependency-dirs ("target/classes") - - - ;; :target dir - ;; Specify the target of the project build process. Should be - ;; the directory where .class files are written - ;; - ;; The target is used to populate the classpath when launching - ;; the inferior scala repl. - ;; ------------------------------ - ;; :target "target/classes" - - ) - - - diff --git a/akka-core/src/test/java/se/scalablesolutions/akka/actor/ProtobufProtocol.java b/akka-core/src/test/java/se/scalablesolutions/akka/actor/ProtobufProtocol.java deleted file mode 100644 index 683f008729..0000000000 --- a/akka-core/src/test/java/se/scalablesolutions/akka/actor/ProtobufProtocol.java +++ /dev/null @@ -1,1060 +0,0 @@ -// Generated by the protocol buffer compiler. DO NOT EDIT! -// source: ProtobufProtocol.proto - -package se.scalablesolutions.akka.actor; - -public final class ProtobufProtocol { - private ProtobufProtocol() {} - public static void registerAllExtensions( - com.google.protobuf.ExtensionRegistry registry) { - } - public static final class ProtobufPOJO extends - com.google.protobuf.GeneratedMessage { - // Use ProtobufPOJO.newBuilder() to construct. - private ProtobufPOJO() { - initFields(); - } - private ProtobufPOJO(boolean noInit) {} - - private static final ProtobufPOJO defaultInstance; - public static ProtobufPOJO getDefaultInstance() { - return defaultInstance; - } - - public ProtobufPOJO getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return se.scalablesolutions.akka.actor.ProtobufProtocol.internal_static_se_scalablesolutions_akka_actor_ProtobufPOJO_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return se.scalablesolutions.akka.actor.ProtobufProtocol.internal_static_se_scalablesolutions_akka_actor_ProtobufPOJO_fieldAccessorTable; - } - - // required uint64 id = 1; - public static final int ID_FIELD_NUMBER = 1; - private boolean hasId; - private long id_ = 0L; - public boolean hasId() { return hasId; } - public long getId() { return id_; } - - // required string name = 2; - public static final int NAME_FIELD_NUMBER = 2; - private boolean hasName; - private java.lang.String name_ = ""; - public boolean hasName() { return hasName; } - public java.lang.String getName() { return name_; } - - // required bool status = 3; - public static final int STATUS_FIELD_NUMBER = 3; - private boolean hasStatus; - private boolean status_ = false; - public boolean hasStatus() { return hasStatus; } - public boolean getStatus() { return status_; } - - private void initFields() { - } - public final boolean isInitialized() { - if (!hasId) return false; - if (!hasName) return false; - if (!hasStatus) return false; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (hasId()) { - output.writeUInt64(1, getId()); - } - if (hasName()) { - output.writeString(2, getName()); - } - if (hasStatus()) { - output.writeBool(3, getStatus()); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (hasId()) { - size += com.google.protobuf.CodedOutputStream - .computeUInt64Size(1, getId()); - } - if (hasName()) { - size += com.google.protobuf.CodedOutputStream - .computeStringSize(2, getName()); - } - if (hasStatus()) { - size += com.google.protobuf.CodedOutputStream - .computeBoolSize(3, getStatus()); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - public static se.scalablesolutions.akka.actor.ProtobufProtocol.ProtobufPOJO parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static se.scalablesolutions.akka.actor.ProtobufProtocol.ProtobufPOJO parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static se.scalablesolutions.akka.actor.ProtobufProtocol.ProtobufPOJO parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static se.scalablesolutions.akka.actor.ProtobufProtocol.ProtobufPOJO parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static se.scalablesolutions.akka.actor.ProtobufProtocol.ProtobufPOJO parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static se.scalablesolutions.akka.actor.ProtobufProtocol.ProtobufPOJO parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static se.scalablesolutions.akka.actor.ProtobufProtocol.ProtobufPOJO parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static se.scalablesolutions.akka.actor.ProtobufProtocol.ProtobufPOJO parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static se.scalablesolutions.akka.actor.ProtobufProtocol.ProtobufPOJO parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static se.scalablesolutions.akka.actor.ProtobufProtocol.ProtobufPOJO parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(se.scalablesolutions.akka.actor.ProtobufProtocol.ProtobufPOJO prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder { - private se.scalablesolutions.akka.actor.ProtobufProtocol.ProtobufPOJO result; - - // Construct using se.scalablesolutions.akka.actor.ProtobufProtocol.ProtobufPOJO.newBuilder() - private Builder() {} - - private static Builder create() { - Builder builder = new Builder(); - builder.result = new se.scalablesolutions.akka.actor.ProtobufProtocol.ProtobufPOJO(); - return builder; - } - - protected se.scalablesolutions.akka.actor.ProtobufProtocol.ProtobufPOJO internalGetResult() { - return result; - } - - public Builder clear() { - if (result == null) { - throw new IllegalStateException( - "Cannot call clear() after build()."); - } - result = new se.scalablesolutions.akka.actor.ProtobufProtocol.ProtobufPOJO(); - return this; - } - - public Builder clone() { - return create().mergeFrom(result); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return se.scalablesolutions.akka.actor.ProtobufProtocol.ProtobufPOJO.getDescriptor(); - } - - public se.scalablesolutions.akka.actor.ProtobufProtocol.ProtobufPOJO getDefaultInstanceForType() { - return se.scalablesolutions.akka.actor.ProtobufProtocol.ProtobufPOJO.getDefaultInstance(); - } - - public boolean isInitialized() { - return result.isInitialized(); - } - public se.scalablesolutions.akka.actor.ProtobufProtocol.ProtobufPOJO build() { - if (result != null && !isInitialized()) { - throw newUninitializedMessageException(result); - } - return buildPartial(); - } - - private se.scalablesolutions.akka.actor.ProtobufProtocol.ProtobufPOJO buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - if (!isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return buildPartial(); - } - - public se.scalablesolutions.akka.actor.ProtobufProtocol.ProtobufPOJO buildPartial() { - if (result == null) { - throw new IllegalStateException( - "build() has already been called on this Builder."); - } - se.scalablesolutions.akka.actor.ProtobufProtocol.ProtobufPOJO returnMe = result; - result = null; - return returnMe; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof se.scalablesolutions.akka.actor.ProtobufProtocol.ProtobufPOJO) { - return mergeFrom((se.scalablesolutions.akka.actor.ProtobufProtocol.ProtobufPOJO)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(se.scalablesolutions.akka.actor.ProtobufProtocol.ProtobufPOJO other) { - if (other == se.scalablesolutions.akka.actor.ProtobufProtocol.ProtobufPOJO.getDefaultInstance()) return this; - if (other.hasId()) { - setId(other.getId()); - } - if (other.hasName()) { - setName(other.getName()); - } - if (other.hasStatus()) { - setStatus(other.getStatus()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - return this; - } - break; - } - case 8: { - setId(input.readUInt64()); - break; - } - case 18: { - setName(input.readString()); - break; - } - case 24: { - setStatus(input.readBool()); - break; - } - } - } - } - - - // required uint64 id = 1; - public boolean hasId() { - return result.hasId(); - } - public long getId() { - return result.getId(); - } - public Builder setId(long value) { - result.hasId = true; - result.id_ = value; - return this; - } - public Builder clearId() { - result.hasId = false; - result.id_ = 0L; - return this; - } - - // required string name = 2; - public boolean hasName() { - return result.hasName(); - } - public java.lang.String getName() { - return result.getName(); - } - public Builder setName(java.lang.String value) { - if (value == null) { - throw new NullPointerException(); - } - result.hasName = true; - result.name_ = value; - return this; - } - public Builder clearName() { - result.hasName = false; - result.name_ = getDefaultInstance().getName(); - return this; - } - - // required bool status = 3; - public boolean hasStatus() { - return result.hasStatus(); - } - public boolean getStatus() { - return result.getStatus(); - } - public Builder setStatus(boolean value) { - result.hasStatus = true; - result.status_ = value; - return this; - } - public Builder clearStatus() { - result.hasStatus = false; - result.status_ = false; - return this; - } - - // @@protoc_insertion_point(builder_scope:se.scalablesolutions.akka.actor.ProtobufPOJO) - } - - static { - defaultInstance = new ProtobufPOJO(true); - se.scalablesolutions.akka.actor.ProtobufProtocol.internalForceInit(); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:se.scalablesolutions.akka.actor.ProtobufPOJO) - } - - public static final class Counter extends - com.google.protobuf.GeneratedMessage { - // Use Counter.newBuilder() to construct. - private Counter() { - initFields(); - } - private Counter(boolean noInit) {} - - private static final Counter defaultInstance; - public static Counter getDefaultInstance() { - return defaultInstance; - } - - public Counter getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return se.scalablesolutions.akka.actor.ProtobufProtocol.internal_static_se_scalablesolutions_akka_actor_Counter_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return se.scalablesolutions.akka.actor.ProtobufProtocol.internal_static_se_scalablesolutions_akka_actor_Counter_fieldAccessorTable; - } - - // required uint32 count = 1; - public static final int COUNT_FIELD_NUMBER = 1; - private boolean hasCount; - private int count_ = 0; - public boolean hasCount() { return hasCount; } - public int getCount() { return count_; } - - private void initFields() { - } - public final boolean isInitialized() { - if (!hasCount) return false; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (hasCount()) { - output.writeUInt32(1, getCount()); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (hasCount()) { - size += com.google.protobuf.CodedOutputStream - .computeUInt32Size(1, getCount()); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - public static se.scalablesolutions.akka.actor.ProtobufProtocol.Counter parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static se.scalablesolutions.akka.actor.ProtobufProtocol.Counter parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static se.scalablesolutions.akka.actor.ProtobufProtocol.Counter parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static se.scalablesolutions.akka.actor.ProtobufProtocol.Counter parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static se.scalablesolutions.akka.actor.ProtobufProtocol.Counter parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static se.scalablesolutions.akka.actor.ProtobufProtocol.Counter parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static se.scalablesolutions.akka.actor.ProtobufProtocol.Counter parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static se.scalablesolutions.akka.actor.ProtobufProtocol.Counter parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static se.scalablesolutions.akka.actor.ProtobufProtocol.Counter parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static se.scalablesolutions.akka.actor.ProtobufProtocol.Counter parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(se.scalablesolutions.akka.actor.ProtobufProtocol.Counter prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder { - private se.scalablesolutions.akka.actor.ProtobufProtocol.Counter result; - - // Construct using se.scalablesolutions.akka.actor.ProtobufProtocol.Counter.newBuilder() - private Builder() {} - - private static Builder create() { - Builder builder = new Builder(); - builder.result = new se.scalablesolutions.akka.actor.ProtobufProtocol.Counter(); - return builder; - } - - protected se.scalablesolutions.akka.actor.ProtobufProtocol.Counter internalGetResult() { - return result; - } - - public Builder clear() { - if (result == null) { - throw new IllegalStateException( - "Cannot call clear() after build()."); - } - result = new se.scalablesolutions.akka.actor.ProtobufProtocol.Counter(); - return this; - } - - public Builder clone() { - return create().mergeFrom(result); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return se.scalablesolutions.akka.actor.ProtobufProtocol.Counter.getDescriptor(); - } - - public se.scalablesolutions.akka.actor.ProtobufProtocol.Counter getDefaultInstanceForType() { - return se.scalablesolutions.akka.actor.ProtobufProtocol.Counter.getDefaultInstance(); - } - - public boolean isInitialized() { - return result.isInitialized(); - } - public se.scalablesolutions.akka.actor.ProtobufProtocol.Counter build() { - if (result != null && !isInitialized()) { - throw newUninitializedMessageException(result); - } - return buildPartial(); - } - - private se.scalablesolutions.akka.actor.ProtobufProtocol.Counter buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - if (!isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return buildPartial(); - } - - public se.scalablesolutions.akka.actor.ProtobufProtocol.Counter buildPartial() { - if (result == null) { - throw new IllegalStateException( - "build() has already been called on this Builder."); - } - se.scalablesolutions.akka.actor.ProtobufProtocol.Counter returnMe = result; - result = null; - return returnMe; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof se.scalablesolutions.akka.actor.ProtobufProtocol.Counter) { - return mergeFrom((se.scalablesolutions.akka.actor.ProtobufProtocol.Counter)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(se.scalablesolutions.akka.actor.ProtobufProtocol.Counter other) { - if (other == se.scalablesolutions.akka.actor.ProtobufProtocol.Counter.getDefaultInstance()) return this; - if (other.hasCount()) { - setCount(other.getCount()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - return this; - } - break; - } - case 8: { - setCount(input.readUInt32()); - break; - } - } - } - } - - - // required uint32 count = 1; - public boolean hasCount() { - return result.hasCount(); - } - public int getCount() { - return result.getCount(); - } - public Builder setCount(int value) { - result.hasCount = true; - result.count_ = value; - return this; - } - public Builder clearCount() { - result.hasCount = false; - result.count_ = 0; - return this; - } - - // @@protoc_insertion_point(builder_scope:se.scalablesolutions.akka.actor.Counter) - } - - static { - defaultInstance = new Counter(true); - se.scalablesolutions.akka.actor.ProtobufProtocol.internalForceInit(); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:se.scalablesolutions.akka.actor.Counter) - } - - public static final class DualCounter extends - com.google.protobuf.GeneratedMessage { - // Use DualCounter.newBuilder() to construct. - private DualCounter() { - initFields(); - } - private DualCounter(boolean noInit) {} - - private static final DualCounter defaultInstance; - public static DualCounter getDefaultInstance() { - return defaultInstance; - } - - public DualCounter getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return se.scalablesolutions.akka.actor.ProtobufProtocol.internal_static_se_scalablesolutions_akka_actor_DualCounter_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return se.scalablesolutions.akka.actor.ProtobufProtocol.internal_static_se_scalablesolutions_akka_actor_DualCounter_fieldAccessorTable; - } - - // required uint32 count1 = 1; - public static final int COUNT1_FIELD_NUMBER = 1; - private boolean hasCount1; - private int count1_ = 0; - public boolean hasCount1() { return hasCount1; } - public int getCount1() { return count1_; } - - // required uint32 count2 = 2; - public static final int COUNT2_FIELD_NUMBER = 2; - private boolean hasCount2; - private int count2_ = 0; - public boolean hasCount2() { return hasCount2; } - public int getCount2() { return count2_; } - - private void initFields() { - } - public final boolean isInitialized() { - if (!hasCount1) return false; - if (!hasCount2) return false; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (hasCount1()) { - output.writeUInt32(1, getCount1()); - } - if (hasCount2()) { - output.writeUInt32(2, getCount2()); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (hasCount1()) { - size += com.google.protobuf.CodedOutputStream - .computeUInt32Size(1, getCount1()); - } - if (hasCount2()) { - size += com.google.protobuf.CodedOutputStream - .computeUInt32Size(2, getCount2()); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - public static se.scalablesolutions.akka.actor.ProtobufProtocol.DualCounter parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static se.scalablesolutions.akka.actor.ProtobufProtocol.DualCounter parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static se.scalablesolutions.akka.actor.ProtobufProtocol.DualCounter parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static se.scalablesolutions.akka.actor.ProtobufProtocol.DualCounter parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static se.scalablesolutions.akka.actor.ProtobufProtocol.DualCounter parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static se.scalablesolutions.akka.actor.ProtobufProtocol.DualCounter parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static se.scalablesolutions.akka.actor.ProtobufProtocol.DualCounter parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static se.scalablesolutions.akka.actor.ProtobufProtocol.DualCounter parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static se.scalablesolutions.akka.actor.ProtobufProtocol.DualCounter parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static se.scalablesolutions.akka.actor.ProtobufProtocol.DualCounter parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(se.scalablesolutions.akka.actor.ProtobufProtocol.DualCounter prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder { - private se.scalablesolutions.akka.actor.ProtobufProtocol.DualCounter result; - - // Construct using se.scalablesolutions.akka.actor.ProtobufProtocol.DualCounter.newBuilder() - private Builder() {} - - private static Builder create() { - Builder builder = new Builder(); - builder.result = new se.scalablesolutions.akka.actor.ProtobufProtocol.DualCounter(); - return builder; - } - - protected se.scalablesolutions.akka.actor.ProtobufProtocol.DualCounter internalGetResult() { - return result; - } - - public Builder clear() { - if (result == null) { - throw new IllegalStateException( - "Cannot call clear() after build()."); - } - result = new se.scalablesolutions.akka.actor.ProtobufProtocol.DualCounter(); - return this; - } - - public Builder clone() { - return create().mergeFrom(result); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return se.scalablesolutions.akka.actor.ProtobufProtocol.DualCounter.getDescriptor(); - } - - public se.scalablesolutions.akka.actor.ProtobufProtocol.DualCounter getDefaultInstanceForType() { - return se.scalablesolutions.akka.actor.ProtobufProtocol.DualCounter.getDefaultInstance(); - } - - public boolean isInitialized() { - return result.isInitialized(); - } - public se.scalablesolutions.akka.actor.ProtobufProtocol.DualCounter build() { - if (result != null && !isInitialized()) { - throw newUninitializedMessageException(result); - } - return buildPartial(); - } - - private se.scalablesolutions.akka.actor.ProtobufProtocol.DualCounter buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - if (!isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return buildPartial(); - } - - public se.scalablesolutions.akka.actor.ProtobufProtocol.DualCounter buildPartial() { - if (result == null) { - throw new IllegalStateException( - "build() has already been called on this Builder."); - } - se.scalablesolutions.akka.actor.ProtobufProtocol.DualCounter returnMe = result; - result = null; - return returnMe; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof se.scalablesolutions.akka.actor.ProtobufProtocol.DualCounter) { - return mergeFrom((se.scalablesolutions.akka.actor.ProtobufProtocol.DualCounter)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(se.scalablesolutions.akka.actor.ProtobufProtocol.DualCounter other) { - if (other == se.scalablesolutions.akka.actor.ProtobufProtocol.DualCounter.getDefaultInstance()) return this; - if (other.hasCount1()) { - setCount1(other.getCount1()); - } - if (other.hasCount2()) { - setCount2(other.getCount2()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - return this; - } - break; - } - case 8: { - setCount1(input.readUInt32()); - break; - } - case 16: { - setCount2(input.readUInt32()); - break; - } - } - } - } - - - // required uint32 count1 = 1; - public boolean hasCount1() { - return result.hasCount1(); - } - public int getCount1() { - return result.getCount1(); - } - public Builder setCount1(int value) { - result.hasCount1 = true; - result.count1_ = value; - return this; - } - public Builder clearCount1() { - result.hasCount1 = false; - result.count1_ = 0; - return this; - } - - // required uint32 count2 = 2; - public boolean hasCount2() { - return result.hasCount2(); - } - public int getCount2() { - return result.getCount2(); - } - public Builder setCount2(int value) { - result.hasCount2 = true; - result.count2_ = value; - return this; - } - public Builder clearCount2() { - result.hasCount2 = false; - result.count2_ = 0; - return this; - } - - // @@protoc_insertion_point(builder_scope:se.scalablesolutions.akka.actor.DualCounter) - } - - static { - defaultInstance = new DualCounter(true); - se.scalablesolutions.akka.actor.ProtobufProtocol.internalForceInit(); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:se.scalablesolutions.akka.actor.DualCounter) - } - - private static com.google.protobuf.Descriptors.Descriptor - internal_static_se_scalablesolutions_akka_actor_ProtobufPOJO_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_se_scalablesolutions_akka_actor_ProtobufPOJO_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_se_scalablesolutions_akka_actor_Counter_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_se_scalablesolutions_akka_actor_Counter_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_se_scalablesolutions_akka_actor_DualCounter_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_se_scalablesolutions_akka_actor_DualCounter_fieldAccessorTable; - - public static com.google.protobuf.Descriptors.FileDescriptor - getDescriptor() { - return descriptor; - } - private static com.google.protobuf.Descriptors.FileDescriptor - descriptor; - static { - java.lang.String[] descriptorData = { - "\n\026ProtobufProtocol.proto\022\037se.scalablesol" + - "utions.akka.actor\"8\n\014ProtobufPOJO\022\n\n\002id\030" + - "\001 \002(\004\022\014\n\004name\030\002 \002(\t\022\016\n\006status\030\003 \002(\010\"\030\n\007C" + - "ounter\022\r\n\005count\030\001 \002(\r\"-\n\013DualCounter\022\016\n\006" + - "count1\030\001 \002(\r\022\016\n\006count2\030\002 \002(\r" - }; - com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = - new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { - public com.google.protobuf.ExtensionRegistry assignDescriptors( - com.google.protobuf.Descriptors.FileDescriptor root) { - descriptor = root; - internal_static_se_scalablesolutions_akka_actor_ProtobufPOJO_descriptor = - getDescriptor().getMessageTypes().get(0); - internal_static_se_scalablesolutions_akka_actor_ProtobufPOJO_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_se_scalablesolutions_akka_actor_ProtobufPOJO_descriptor, - new java.lang.String[] { "Id", "Name", "Status", }, - se.scalablesolutions.akka.actor.ProtobufProtocol.ProtobufPOJO.class, - se.scalablesolutions.akka.actor.ProtobufProtocol.ProtobufPOJO.Builder.class); - internal_static_se_scalablesolutions_akka_actor_Counter_descriptor = - getDescriptor().getMessageTypes().get(1); - internal_static_se_scalablesolutions_akka_actor_Counter_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_se_scalablesolutions_akka_actor_Counter_descriptor, - new java.lang.String[] { "Count", }, - se.scalablesolutions.akka.actor.ProtobufProtocol.Counter.class, - se.scalablesolutions.akka.actor.ProtobufProtocol.Counter.Builder.class); - internal_static_se_scalablesolutions_akka_actor_DualCounter_descriptor = - getDescriptor().getMessageTypes().get(2); - internal_static_se_scalablesolutions_akka_actor_DualCounter_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_se_scalablesolutions_akka_actor_DualCounter_descriptor, - new java.lang.String[] { "Count1", "Count2", }, - se.scalablesolutions.akka.actor.ProtobufProtocol.DualCounter.class, - se.scalablesolutions.akka.actor.ProtobufProtocol.DualCounter.Builder.class); - return null; - } - }; - com.google.protobuf.Descriptors.FileDescriptor - .internalBuildGeneratedFileFrom(descriptorData, - new com.google.protobuf.Descriptors.FileDescriptor[] { - }, assigner); - } - - public static void internalForceInit() {} - - // @@protoc_insertion_point(outer_class_scope) -} diff --git a/akka-core/src/test/scala/Messages.scala b/akka-core/src/test/scala/Messages.scala deleted file mode 100644 index ad1fcf8885..0000000000 --- a/akka-core/src/test/scala/Messages.scala +++ /dev/null @@ -1,44 +0,0 @@ -/** - * Copyright (C) 2009-2010 Scalable Solutions AB - */ - -package se.scalablesolutions.akka - -import se.scalablesolutions.akka.serialization.Serializable -import sbinary._ -import sbinary.Operations._ - -sealed abstract class TestMessage - -case object Ping extends TestMessage -case object Pong extends TestMessage -case object OneWay extends TestMessage -case object Die extends TestMessage -case object NotifySupervisorExit extends TestMessage - -case class User(val usernamePassword: Tuple2[String, String], - val email: String, - val age: Int) - extends Serializable.SBinary[User] { - def this() = this(null, null, 0) - import sbinary.DefaultProtocol._ - implicit object UserFormat extends Format[User] { - def reads(in : Input) = User( - read[Tuple2[String, String]](in), - read[String](in), - read[Int](in)) - def writes(out: Output, value: User) = { - write[Tuple2[String, String]](out, value.usernamePassword) - write[String](out, value.email) - write[Int](out, value.age) - } - } - def fromBytes(bytes: Array[Byte]) = fromByteArray[User](bytes) - def toBytes: Array[Byte] = toByteArray(this) -} - -case object RemotePing extends TestMessage -case object RemotePong extends TestMessage -case object RemoteOneWay extends TestMessage -case object RemoteDie extends TestMessage -case object RemoteNotifySupervisorExit extends TestMessage diff --git a/akka-core/src/test/scala/actor/actor/ActorFireForgetRequestReplySpec.scala b/akka-core/src/test/scala/actor/actor/ActorFireForgetRequestReplySpec.scala deleted file mode 100644 index 9d3ce765ec..0000000000 --- a/akka-core/src/test/scala/actor/actor/ActorFireForgetRequestReplySpec.scala +++ /dev/null @@ -1,92 +0,0 @@ -package se.scalablesolutions.akka.actor - -import java.util.concurrent.{TimeUnit, CyclicBarrier, TimeoutException} -import se.scalablesolutions.akka.config.ScalaConfig._ -import org.scalatest.junit.JUnitSuite -import org.junit.Test - -import se.scalablesolutions.akka.dispatch.Dispatchers -import Actor._ - -object ActorFireForgetRequestReplySpec { - class ReplyActor extends Actor { - self.dispatcher = Dispatchers.newThreadBasedDispatcher(self) - - def receive = { - case "Send" => - self.reply("Reply") - case "SendImplicit" => - self.sender.get ! "ReplyImplicit" - } - } - - class CrashingTemporaryActor extends Actor { - self.lifeCycle = Some(LifeCycle(Temporary)) - - def receive = { - case "Die" => - state.finished.await - throw new Exception("Expected exception") - } - } - - class SenderActor(replyActor: ActorRef) extends Actor { - self.dispatcher = Dispatchers.newThreadBasedDispatcher(self) - - def receive = { - case "Init" => replyActor ! "Send" - case "Reply" => { - state.s = "Reply" - state.finished.await - } - case "InitImplicit" => replyActor ! "SendImplicit" - case "ReplyImplicit" => { - state.s = "ReplyImplicit" - state.finished.await - } - } - } - - object state { - var s = "NIL" - val finished = new CyclicBarrier(2) - } -} - -class ActorFireForgetRequestReplySpec extends JUnitSuite { - import ActorFireForgetRequestReplySpec._ - - @Test - def shouldReplyToBangMessageUsingReply = { - state.finished.reset - val replyActor = actorOf[ReplyActor].start - val senderActor = actorOf(new SenderActor(replyActor)).start - senderActor ! "Init" - try { state.finished.await(1L, TimeUnit.SECONDS) } - catch { case e: TimeoutException => fail("Never got the message") } - assert("Reply" === state.s) - } - - @Test - def shouldReplyToBangMessageUsingImplicitSender = { - state.finished.reset - val replyActor = actorOf[ReplyActor].start - val senderActor = actorOf(new SenderActor(replyActor)).start - senderActor ! "InitImplicit" - try { state.finished.await(1L, TimeUnit.SECONDS) } - catch { case e: TimeoutException => fail("Never got the message") } - assert("ReplyImplicit" === state.s) - } - - @Test - def shouldShutdownCrashedTemporaryActor = { - state.finished.reset - val actor = actorOf[CrashingTemporaryActor].start - assert(actor.isRunning) - actor ! "Die" - try { state.finished.await(1L, TimeUnit.SECONDS) } - catch { case e: TimeoutException => fail("Never got the message") } - Thread.sleep(100) - assert(actor.isShutdown) - } -} diff --git a/akka-core/src/test/scala/actor/actor/AgentSpec.scala b/akka-core/src/test/scala/actor/actor/AgentSpec.scala deleted file mode 100644 index 71911c3ad8..0000000000 --- a/akka-core/src/test/scala/actor/actor/AgentSpec.scala +++ /dev/null @@ -1,111 +0,0 @@ -package se.scalablesolutions.akka.actor - -import se.scalablesolutions.akka.actor.Actor.transactor -import org.scalatest.Suite -import org.scalatest.junit.JUnitRunner -import org.scalatest.matchers.MustMatchers - -import org.junit.runner.RunWith -import org.junit.Test - -import java.util.concurrent.{TimeUnit, CountDownLatch} - -@RunWith(classOf[JUnitRunner]) -class AgentSpec extends junit.framework.TestCase with Suite with MustMatchers { - - @Test def testSendFun = { - val agent = Agent(5) - agent send (_ + 1) - agent send (_ * 2) - val result = agent() - result must be(12) - agent.close - } - - @Test def testSendValue = { - val agent = Agent(5) - agent send 6 - val result = agent() - result must be(6) - agent.close - } - - @Test def testSendProc = { - val agent = Agent(5) - var result = 0 - val latch = new CountDownLatch(2) - agent sendProc { e => result += e; latch.countDown } - agent sendProc { e => result += e; latch.countDown } - assert(latch.await(5, TimeUnit.SECONDS)) - result must be(10) - agent.close - } - - @Test def testOneAgentsendWithinEnlosingTransactionSuccess = { - case object Go - val agent = Agent(5) - val latch = new CountDownLatch(1) - val tx = transactor { - case Go => agent send { e => latch.countDown; e + 1 } - } - tx ! Go - assert(latch.await(5, TimeUnit.SECONDS)) - val result = agent() - result must be(6) - agent.close - tx.stop - } - -/* - // Strange test - do we really need it? - @Test def testDoingAgentGetInEnlosingTransactionShouldYieldException = { - case object Go - val latch = new CountDownLatch(1) - val agent = Agent(5) - val tx = transactor { - case Go => - agent send (_ * 2) - try { agent() } - catch { - case _ => latch.countDown - } - } - tx ! Go - assert(latch.await(5, TimeUnit.SECONDS)) - agent.close - tx.stop - assert(true) - } -*/ - - @Test def testAgentForeach = { - val agent1 = Agent(3) - var result = 0 - for (first <- agent1) { - result = first + 1 - } - result must be(4) - agent1.close - } - - @Test def testAgentMap = { - val agent1 = Agent(3) - val result = for (first <- agent1) yield first + 1 - result() must be(4) - result.close - agent1.close - } - - @Test def testAgentFlatMap = { - val agent1 = Agent(3) - val agent2 = Agent(5) - val result = for { - first <- agent1 - second <- agent2 - } yield second + first - result() must be(8) - result.close - agent1.close - agent2.close - } -} diff --git a/akka-core/src/test/scala/actor/actor/Bench.scala b/akka-core/src/test/scala/actor/actor/Bench.scala deleted file mode 100644 index 8e3a44f3a0..0000000000 --- a/akka-core/src/test/scala/actor/actor/Bench.scala +++ /dev/null @@ -1,119 +0,0 @@ -/* The Computer Language Benchmarks Game - http://shootout.alioth.debian.org/ - contributed by Julien Gaugaz - inspired by the version contributed by Yura Taras and modified by Isaac Gouy -*/ -package se.scalablesolutions.akka.actor - -import se.scalablesolutions.akka.actor.Actor._ - -object Chameneos { - - sealed trait ChameneosEvent - case class Meet(from: ActorRef, colour: Colour) extends ChameneosEvent - case class Change(colour: Colour) extends ChameneosEvent - case class MeetingCount(count: Int) extends ChameneosEvent - case object Exit extends ChameneosEvent - - abstract class Colour - case object RED extends Colour - case object YELLOW extends Colour - case object BLUE extends Colour - case object FADED extends Colour - - val colours = Array[Colour](BLUE, RED, YELLOW) - - var start = 0L - var end = 0L - - class Chameneo(var mall: ActorRef, var colour: Colour, cid: Int) extends Actor { - var meetings = 0 - self.start - mall ! Meet(self, colour) - - def receive = { - case Meet(from, otherColour) => - colour = complement(otherColour) - meetings = meetings +1 - from ! Change(colour) - mall ! Meet(self, colour) - - case Change(newColour) => - colour = newColour - meetings = meetings +1 - mall ! Meet(self, colour) - - case Exit => - colour = FADED - self.sender.get ! MeetingCount(meetings) - } - - def complement(otherColour: Colour): Colour = colour match { - case RED => otherColour match { - case RED => RED - case YELLOW => BLUE - case BLUE => YELLOW - case FADED => FADED - } - case YELLOW => otherColour match { - case RED => BLUE - case YELLOW => YELLOW - case BLUE => RED - case FADED => FADED - } - case BLUE => otherColour match { - case RED => YELLOW - case YELLOW => RED - case BLUE => BLUE - case FADED => FADED - } - case FADED => FADED - } - - override def toString = cid + "(" + colour + ")" - } - - class Mall(var n: Int, numChameneos: Int) extends Actor { - var waitingChameneo: Option[ActorRef] = None - var sumMeetings = 0 - var numFaded = 0 - - override def init = { - for (i <- 0 until numChameneos) actorOf(new Chameneo(self, colours(i % 3), i)) - } - - def receive = { - case MeetingCount(i) => - numFaded += 1 - sumMeetings += i - if (numFaded == numChameneos) { - Chameneos.end = System.currentTimeMillis - self.stop - } - - case msg @ Meet(a, c) => - if (n > 0) { - waitingChameneo match { - case Some(chameneo) => - n -= 1 - chameneo ! msg - waitingChameneo = None - case None => waitingChameneo = self.sender - } - } else { - waitingChameneo.foreach(_ ! Exit) - self.sender.get ! Exit - } - } - } - - def run { -// System.setProperty("akka.config", "akka.conf") - Chameneos.start = System.currentTimeMillis - actorOf(new Mall(1000000, 4)).start - Thread.sleep(10000) - println("Elapsed: " + (end - start)) - } - - def main(args : Array[String]): Unit = run -} diff --git a/akka-core/src/test/scala/actor/actor/ForwardActorSpec.scala b/akka-core/src/test/scala/actor/actor/ForwardActorSpec.scala deleted file mode 100644 index e3ab0bded7..0000000000 --- a/akka-core/src/test/scala/actor/actor/ForwardActorSpec.scala +++ /dev/null @@ -1,81 +0,0 @@ -package se.scalablesolutions.akka.actor - -import java.util.concurrent.{TimeUnit, CountDownLatch} -import org.scalatest.junit.JUnitSuite -import org.junit.Test - -import Actor._ - -object ForwardActorSpec { - object ForwardState { - var sender: Option[ActorRef] = None - } - - class ReceiverActor extends Actor { - val latch = new CountDownLatch(1) - def receive = { - case "SendBang" => { - ForwardState.sender = self.sender - latch.countDown - } - case "SendBangBang" => self.reply("SendBangBang") - } - } - - - class ForwardActor extends Actor { - val receiverActor = actorOf[ReceiverActor] - receiverActor.start - def receive = { - case "SendBang" => receiverActor.forward("SendBang") - case "SendBangBang" => receiverActor.forward("SendBangBang") - } - } - - class BangSenderActor extends Actor { - val forwardActor = actorOf[ForwardActor] - forwardActor.start - forwardActor ! "SendBang" - def receive = { - case _ => {} - } - } - - class BangBangSenderActor extends Actor { - val latch = new CountDownLatch(1) - val forwardActor = actorOf[ForwardActor] - forwardActor.start - (forwardActor !! "SendBangBang") match { - case Some(_) => latch.countDown - case None => {} - } - def receive = { - case _ => {} - } - } -} - -class ForwardActorSpec extends JUnitSuite { - import ForwardActorSpec._ - - @Test - def shouldForwardActorReferenceWhenInvokingForwardOnBang { - val senderActor = actorOf[BangSenderActor] - val latch = senderActor.actor.asInstanceOf[BangSenderActor] - .forwardActor.actor.asInstanceOf[ForwardActor] - .receiverActor.actor.asInstanceOf[ReceiverActor] - .latch - senderActor.start - assert(latch.await(1L, TimeUnit.SECONDS)) - assert(ForwardState.sender ne null) - assert(senderActor.toString === ForwardState.sender.get.toString) - } - - @Test - def shouldForwardActorReferenceWhenInvokingForwardOnBangBang { - val senderActor = actorOf[BangBangSenderActor] - senderActor.start - val latch = senderActor.actor.asInstanceOf[BangBangSenderActor].latch - assert(latch.await(1L, TimeUnit.SECONDS)) - } -} diff --git a/akka-core/src/test/scala/actor/actor/FsmActorSpec.scala b/akka-core/src/test/scala/actor/actor/FsmActorSpec.scala deleted file mode 100644 index e4515bd3da..0000000000 --- a/akka-core/src/test/scala/actor/actor/FsmActorSpec.scala +++ /dev/null @@ -1,82 +0,0 @@ -/** - * Copyright (C) 2009-2010 Scalable Solutions AB - */ - -package se.scalablesolutions.akka.actor - -import org.scalatest.junit.JUnitSuite -import org.junit.Test - -import org.multiverse.api.latches.StandardLatch - -import java.util.concurrent.TimeUnit - -object FSMActorSpec { - - class Lock(code: String, - timeout: Int, - unlockedLatch: StandardLatch, - lockedLatch: StandardLatch) extends Actor with FSM[CodeState] { - - def initialState = State(NextState, locked, CodeState("", code)) - - def locked: StateFunction = { - case Event(digit: Char, CodeState(soFar, code)) => { - soFar + digit match { - case incomplete if incomplete.length < code.length => - State(NextState, locked, CodeState(incomplete, code)) - case codeTry if (codeTry == code) => { - doUnlock - State(NextState, open, CodeState("", code), Some(timeout)) - } - case wrong => { - log.error("Wrong code %s", wrong) - State(NextState, locked, CodeState("", code)) - } - } - } - } - - def open: StateFunction = { - case Event(StateTimeout, stateData) => { - doLock - State(NextState, locked, stateData) - } - } - - private def doLock() { - log.info("Locked") - lockedLatch.open - } - - private def doUnlock = { - log.info("Unlocked") - unlockedLatch.open - } - } - - case class CodeState(soFar: String, code: String) -} - -class FSMActorSpec extends JUnitSuite { - import FSMActorSpec._ - - @Test - def unlockTheLock = { - val unlockedLatch = new StandardLatch - val lockedLatch = new StandardLatch - - // lock that locked after being open for 1 sec - val lock = Actor.actorOf(new Lock("33221", 1000, unlockedLatch, lockedLatch)).start - - lock ! '3' - lock ! '3' - lock ! '2' - lock ! '2' - lock ! '1' - - assert(unlockedLatch.tryAwait(1, TimeUnit.SECONDS)) - assert(lockedLatch.tryAwait(2, TimeUnit.SECONDS)) - } -} - diff --git a/akka-core/src/test/scala/actor/actor/ReceiveTimeoutSpec.scala b/akka-core/src/test/scala/actor/actor/ReceiveTimeoutSpec.scala deleted file mode 100644 index ff43467efc..0000000000 --- a/akka-core/src/test/scala/actor/actor/ReceiveTimeoutSpec.scala +++ /dev/null @@ -1,77 +0,0 @@ -package se.scalablesolutions.akka.actor - -import org.scalatest.junit.JUnitSuite -import org.junit.Test - -import java.util.concurrent.TimeUnit -import org.multiverse.api.latches.StandardLatch -import Actor._ - -class ReceiveTimeoutSpec extends JUnitSuite { - - @Test def receiveShouldGetTimeout= { - - val timeoutLatch = new StandardLatch - - val timeoutActor = actorOf(new Actor { - self.receiveTimeout = Some(500L) - - protected def receive = { - case ReceiveTimeout => timeoutLatch.open - } - }).start - - assert(timeoutLatch.tryAwait(3, TimeUnit.SECONDS)) - } - - @Test def swappedReceiveShouldAlsoGetTimout = { - val timeoutLatch = new StandardLatch - - val timeoutActor = actorOf(new Actor { - self.receiveTimeout = Some(500L) - - protected def receive = { - case ReceiveTimeout => timeoutLatch.open - } - }).start - - // after max 1 second the timeout should already been sent - assert(timeoutLatch.tryAwait(3, TimeUnit.SECONDS)) - - val swappedLatch = new StandardLatch - timeoutActor ! HotSwap(Some{ - case ReceiveTimeout => swappedLatch.open - }) - - assert(swappedLatch.tryAwait(3, TimeUnit.SECONDS)) - } - - @Test def timeoutShouldBeCancelledAfterRegularReceive = { - - val timeoutLatch = new StandardLatch - case object Tick - val timeoutActor = actorOf(new Actor { - self.receiveTimeout = Some(500L) - - protected def receive = { - case Tick => () - case ReceiveTimeout => timeoutLatch.open - } - }).start - timeoutActor ! Tick - - assert(timeoutLatch.tryAwait(2, TimeUnit.SECONDS) == false) - } - - @Test def timeoutShouldNotBeSentWhenNotSpecified = { - val timeoutLatch = new StandardLatch - val timeoutActor = actorOf(new Actor { - - protected def receive = { - case ReceiveTimeout => timeoutLatch.open - } - }).start - - assert(timeoutLatch.tryAwait(1, TimeUnit.SECONDS) == false) - } -} diff --git a/akka-core/src/test/scala/actor/actor/TransactorSpec.scala b/akka-core/src/test/scala/actor/actor/TransactorSpec.scala deleted file mode 100644 index dd23a76a88..0000000000 --- a/akka-core/src/test/scala/actor/actor/TransactorSpec.scala +++ /dev/null @@ -1,255 +0,0 @@ -package se.scalablesolutions.akka.actor - -import java.util.concurrent.{TimeUnit, CountDownLatch} -import org.scalatest.junit.JUnitSuite -import org.junit.Test - -import se.scalablesolutions.akka.stm.{Ref, TransactionalMap, TransactionalVector} -import Actor._ - -object TransactorSpec { - case class GetMapState(key: String) - case object GetVectorState - case object GetVectorSize - case object GetRefState - - case class SetMapState(key: String, value: String) - case class SetVectorState(key: String) - case class SetRefState(key: String) - case class Success(key: String, value: String) - case class Failure(key: String, value: String, failer: ActorRef) - - case class SetMapStateOneWay(key: String, value: String) - case class SetVectorStateOneWay(key: String) - case class SetRefStateOneWay(key: String) - case class SuccessOneWay(key: String, value: String) - case class FailureOneWay(key: String, value: String, failer: ActorRef) - - case object GetNotifier -} -import TransactorSpec._ - -class StatefulTransactor(expectedInvocationCount: Int) extends Transactor { - def this() = this(0) - self.timeout = 5000 - - val notifier = new CountDownLatch(expectedInvocationCount) - - private val mapState = TransactionalMap[String, String]() - private val vectorState = TransactionalVector[String]() - private val refState = Ref[String]() - - def receive = { - case GetNotifier => - self.reply(notifier) - case GetMapState(key) => - self.reply(mapState.get(key).get) - notifier.countDown - case GetVectorSize => - self.reply(vectorState.length.asInstanceOf[AnyRef]) - notifier.countDown - case GetRefState => - self.reply(refState.get) - notifier.countDown - case SetMapState(key, msg) => - mapState.put(key, msg) - self.reply(msg) - notifier.countDown - case SetVectorState(msg) => - vectorState.add(msg) - self.reply(msg) - notifier.countDown - case SetRefState(msg) => - refState.swap(msg) - self.reply(msg) - notifier.countDown - case Success(key, msg) => - mapState.put(key, msg) - vectorState.add(msg) - refState.swap(msg) - self.reply(msg) - notifier.countDown - case Failure(key, msg, failer) => - mapState.put(key, msg) - vectorState.add(msg) - refState.swap(msg) - failer !! "Failure" - self.reply(msg) - notifier.countDown - case SetMapStateOneWay(key, msg) => - mapState.put(key, msg) - notifier.countDown - case SetVectorStateOneWay(msg) => - vectorState.add(msg) - notifier.countDown - case SetRefStateOneWay(msg) => - refState.swap(msg) - notifier.countDown - case SuccessOneWay(key, msg) => - mapState.put(key, msg) - vectorState.add(msg) - refState.swap(msg) - notifier.countDown - case FailureOneWay(key, msg, failer) => - mapState.put(key, msg) - vectorState.add(msg) - refState.swap(msg) - notifier.countDown - failer ! "Failure" - } -} - -@serializable -class FailerTransactor extends Transactor { - - def receive = { - case "Failure" => - throw new RuntimeException("Expected exception; to test fault-tolerance") - } -} - -class TransactorSpec extends JUnitSuite { - - @Test - def shouldOneWayMapShouldNotRollbackStateForStatefulServerInCaseOfSuccess = { - val stateful = actorOf(new StatefulTransactor(2)) - stateful.start - stateful ! SetMapStateOneWay("testShouldNotRollbackStateForStatefulServerInCaseOfSuccess", "init") // set init state - stateful ! SuccessOneWay("testShouldNotRollbackStateForStatefulServerInCaseOfSuccess", "new state") // transactionrequired - val notifier = (stateful !! GetNotifier).as[CountDownLatch] - assert(notifier.get.await(1, TimeUnit.SECONDS)) - assert("new state" === (stateful !! GetMapState("testShouldNotRollbackStateForStatefulServerInCaseOfSuccess")).get) - } - - @Test - def shouldMapShouldNotRollbackStateForStatefulServerInCaseOfSuccess = { - val stateful = actorOf[StatefulTransactor] - stateful.start - stateful !! SetMapState("testShouldNotRollbackStateForStatefulServerInCaseOfSuccess", "init") // set init state - stateful !! Success("testShouldNotRollbackStateForStatefulServerInCaseOfSuccess", "new state") // transactionrequired - assert("new state" === (stateful !! GetMapState("testShouldNotRollbackStateForStatefulServerInCaseOfSuccess")).get) - } - - @Test - def shouldOneWayMapShouldRollbackStateForStatefulServerInCaseOfFailure = { - val stateful = actorOf(new StatefulTransactor(2)) - stateful.start - val failer = actorOf[FailerTransactor] - failer.start - stateful ! SetMapStateOneWay("testShouldRollbackStateForStatefulServerInCaseOfFailure", "init") // set init state - stateful ! FailureOneWay("testShouldRollbackStateForStatefulServerInCaseOfFailure", "new state", failer) // call failing transactionrequired method - val notifier = (stateful !! GetNotifier).as[CountDownLatch] - assert(notifier.get.await(5, TimeUnit.SECONDS)) - assert("init" === (stateful !! GetMapState("testShouldRollbackStateForStatefulServerInCaseOfFailure")).get) // check that state is == init state - } - - @Test - def shouldMapShouldRollbackStateForStatefulServerInCaseOfFailure = { - val stateful = actorOf[StatefulTransactor] - stateful.start - stateful !! SetMapState("testShouldRollbackStateForStatefulServerInCaseOfFailure", "init") // set init state - val failer = actorOf[FailerTransactor] - failer.start - try { - stateful !! Failure("testShouldRollbackStateForStatefulServerInCaseOfFailure", "new state", failer) // call failing transactionrequired method - fail("should have thrown an exception") - } catch {case e: RuntimeException => {}} - assert("init" === (stateful !! GetMapState("testShouldRollbackStateForStatefulServerInCaseOfFailure")).get) // check that state is == init state - } - - @Test - def shouldOneWayVectorShouldNotRollbackStateForStatefulServerInCaseOfSuccess = { - val stateful = actorOf(new StatefulTransactor(2)) - stateful.start - stateful ! SetVectorStateOneWay("init") // set init state - stateful ! SuccessOneWay("testShouldNotRollbackStateForStatefulServerInCaseOfSuccess", "new state") // transactionrequired - val notifier = (stateful !! GetNotifier).as[CountDownLatch] - assert(notifier.get.await(1, TimeUnit.SECONDS)) - assert(2 === (stateful !! GetVectorSize).get) - } - - @Test - def shouldVectorShouldNotRollbackStateForStatefulServerInCaseOfSuccess = { - val stateful = actorOf[StatefulTransactor] - stateful.start - stateful !! SetVectorState("init") // set init state - stateful !! Success("testShouldNotRollbackStateForStatefulServerInCaseOfSuccess", "new state") // transactionrequired - assert(2 === (stateful !! GetVectorSize).get) - } - - @Test - def shouldOneWayVectorShouldRollbackStateForStatefulServerInCaseOfFailure = { - val stateful = actorOf(new StatefulTransactor(2)) - stateful.start - stateful ! SetVectorStateOneWay("init") // set init state - Thread.sleep(1000) - val failer = actorOf[FailerTransactor] - failer.start - stateful ! FailureOneWay("testShouldRollbackStateForStatefulServerInCaseOfFailure", "new state", failer) // call failing transactionrequired method - val notifier = (stateful !! GetNotifier).as[CountDownLatch] - assert(notifier.get.await(1, TimeUnit.SECONDS)) - assert(1 === (stateful !! GetVectorSize).get) - } - - @Test - def shouldVectorShouldRollbackStateForStatefulServerInCaseOfFailure = { - val stateful = actorOf[StatefulTransactor] - stateful.start - stateful !! SetVectorState("init") // set init state - val failer = actorOf[FailerTransactor] - failer.start - try { - stateful !! Failure("testShouldRollbackStateForStatefulServerInCaseOfFailure", "new state", failer) // call failing transactionrequired method - fail("should have thrown an exception") - } catch {case e: RuntimeException => {}} - assert(1 === (stateful !! GetVectorSize).get) - } - - @Test - def shouldOneWayRefShouldNotRollbackStateForStatefulServerInCaseOfSuccess = { - val stateful = actorOf(new StatefulTransactor(2)) - stateful.start - stateful ! SetRefStateOneWay("init") // set init state - stateful ! SuccessOneWay("testShouldNotRollbackStateForStatefulServerInCaseOfSuccess", "new state") // transactionrequired - val notifier = (stateful !! GetNotifier).as[CountDownLatch] - assert(notifier.get.await(1, TimeUnit.SECONDS)) - assert("new state" === (stateful !! GetRefState).get) - } - - @Test - def shouldRefShouldNotRollbackStateForStatefulServerInCaseOfSuccess = { - val stateful = actorOf[StatefulTransactor] - stateful.start - stateful !! SetRefState("init") // set init state - stateful !! Success("testShouldNotRollbackStateForStatefulServerInCaseOfSuccess", "new state") // transactionrequired - assert("new state" === (stateful !! GetRefState).get) - } - - @Test - def shouldOneWayRefShouldRollbackStateForStatefulServerInCaseOfFailure = { - val stateful = actorOf(new StatefulTransactor(2)) - stateful.start - stateful ! SetRefStateOneWay("init") // set init state - Thread.sleep(1000) - val failer = actorOf[FailerTransactor] - failer.start - stateful ! FailureOneWay("testShouldRollbackStateForStatefulServerInCaseOfFailure", "new state", failer) // call failing transactionrequired method - val notifier = (stateful !! GetNotifier).as[CountDownLatch] - assert(notifier.get.await(1, TimeUnit.SECONDS)) - assert("init" === (stateful !! (GetRefState, 1000000)).get) // check that state is == init state - } - - @Test - def shouldRefShouldRollbackStateForStatefulServerInCaseOfFailure = { - val stateful = actorOf[StatefulTransactor] - stateful.start - stateful !! SetRefState("init") // set init state - val failer = actorOf[FailerTransactor] - failer.start - try { - stateful !! Failure("testShouldRollbackStateForStatefulServerInCaseOfFailure", "new state", failer) // call failing transactionrequired method - fail("should have thrown an exception") - } catch {case e: RuntimeException => {}} - assert("init" === (stateful !! GetRefState).get) // check that state is == init state - } -} diff --git a/akka-core/src/test/scala/actor/supervisor/RestartStrategySpec.scala b/akka-core/src/test/scala/actor/supervisor/RestartStrategySpec.scala deleted file mode 100644 index 5023c756e1..0000000000 --- a/akka-core/src/test/scala/actor/supervisor/RestartStrategySpec.scala +++ /dev/null @@ -1,74 +0,0 @@ -/** - * Copyright (C) 2009-2010 Scalable Solutions AB - */ - -package se.scalablesolutions.akka.actor - -import org.scalatest.junit.JUnitSuite -import org.junit.Test - -import Actor._ -import se.scalablesolutions.akka.config.OneForOneStrategy -import java.util.concurrent.{TimeUnit, CountDownLatch} -import se.scalablesolutions.akka.config.ScalaConfig.{Permanent, LifeCycle} -import org.multiverse.api.latches.StandardLatch - -class RestartStrategySpec extends JUnitSuite { - - object Ping - object Crash - - @Test - def slaveShouldStayDeadAfterMaxRestarts = { - - val boss = actorOf(new Actor{ - self.trapExit = List(classOf[Throwable]) - self.faultHandler = Some(OneForOneStrategy(1, 1000)) - protected def receive = { case _ => () } - }).start - - val restartLatch = new StandardLatch - val secondRestartLatch = new StandardLatch - val countDownLatch = new CountDownLatch(2) - - - val slave = actorOf(new Actor{ - - protected def receive = { - case Ping => countDownLatch.countDown - case Crash => throw new Exception("Crashing...") - } - override def postRestart(reason: Throwable) = { - restartLatch.open - } - - override def shutdown = { - if (restartLatch.isOpen) { - secondRestartLatch.open - } - } - }) - boss.startLink(slave) - - slave ! Ping - slave ! Crash - slave ! Ping - - // test restart and post restart ping - assert(restartLatch.tryAwait(1, TimeUnit.SECONDS)) - assert(countDownLatch.await(1, TimeUnit.SECONDS)) - - // now crash again... should not restart - slave ! Crash - - assert(secondRestartLatch.tryAwait(1, TimeUnit.SECONDS)) - val exceptionLatch = new StandardLatch - try { - slave ! Ping // this should fail - } catch { - case e => exceptionLatch.open // expected here - } - assert(exceptionLatch.tryAwait(1, TimeUnit.SECONDS)) - } -} - diff --git a/akka-core/src/test/scala/actor/supervisor/SupervisorHierarchySpec.scala b/akka-core/src/test/scala/actor/supervisor/SupervisorHierarchySpec.scala deleted file mode 100644 index ffc9dbd860..0000000000 --- a/akka-core/src/test/scala/actor/supervisor/SupervisorHierarchySpec.scala +++ /dev/null @@ -1,81 +0,0 @@ -/** - * Copyright (C) 2009-2010 Scalable Solutions AB - */ - -package se.scalablesolutions.akka.actor - -import org.scalatest.junit.JUnitSuite -import org.junit.Test - -import Actor._ -import se.scalablesolutions.akka.config.OneForOneStrategy - -import java.util.concurrent.{TimeUnit, CountDownLatch} - -object SupervisorHierarchySpec { - class FireWorkerException(msg: String) extends Exception(msg) - - class CountDownActor(countDown: CountDownLatch) extends Actor { - protected def receive = { case _ => () } - override def postRestart(reason: Throwable) = countDown.countDown - } - - class CrasherActor extends Actor { - protected def receive = { case _ => () } - } -} - -class SupervisorHierarchySpec extends JUnitSuite { - import SupervisorHierarchySpec._ - - @Test - def killWorkerShouldRestartMangerAndOtherWorkers = { - val countDown = new CountDownLatch(4) - - val workerOne = actorOf(new CountDownActor(countDown)) - val workerTwo = actorOf(new CountDownActor(countDown)) - val workerThree = actorOf(new CountDownActor(countDown)) - - val boss = actorOf(new Actor{ - self.trapExit = List(classOf[Throwable]) - self.faultHandler = Some(OneForOneStrategy(5, 1000)) - - protected def receive = { case _ => () } - }).start - - val manager = actorOf(new CountDownActor(countDown)) - boss.startLink(manager) - - manager.startLink(workerOne) - manager.startLink(workerTwo) - manager.startLink(workerThree) - - workerOne ! Exit(workerOne, new FireWorkerException("Fire the worker!")) - - // manager + all workers should be restarted by only killing a worker - // manager doesn't trap exits, so boss will restart manager - - assert(countDown.await(2, TimeUnit.SECONDS)) - } - - @Test - def supervisorShouldReceiveNotificationMessageWhenMaximumNumberOfRestartsWithinTimeRangeIsReached = { - val countDown = new CountDownLatch(2) - val crasher = actorOf(new CountDownActor(countDown)) - val boss = actorOf(new Actor{ - self.trapExit = List(classOf[Throwable]) - self.faultHandler = Some(OneForOneStrategy(1, 5000)) - protected def receive = { - case MaximumNumberOfRestartsWithinTimeRangeReached(_, _, _, _) => - countDown.countDown - } - }).start - boss.startLink(crasher) - - crasher ! Exit(crasher, new FireWorkerException("Fire the worker!")) - crasher ! Exit(crasher, new FireWorkerException("Fire the worker!")) - - assert(countDown.await(2, TimeUnit.SECONDS)) - } -} - diff --git a/akka-core/src/test/scala/actor/supervisor/SupervisorSpec.scala b/akka-core/src/test/scala/actor/supervisor/SupervisorSpec.scala deleted file mode 100644 index 01eb9cb006..0000000000 --- a/akka-core/src/test/scala/actor/supervisor/SupervisorSpec.scala +++ /dev/null @@ -1,605 +0,0 @@ -/** - * Copyright (C) 2009-2010 Scalable Solutions AB - */ - -package se.scalablesolutions.akka.actor - -import se.scalablesolutions.akka.config.ScalaConfig._ -import se.scalablesolutions.akka.config.OneForOneStrategy -import se.scalablesolutions.akka.{OneWay, Die, Ping} -import Actor._ - -import org.scalatest.junit.JUnitSuite -import org.junit.Test -import java.util.concurrent.{TimeUnit, LinkedBlockingQueue} - -object SupervisorSpec { - var messageLog = new LinkedBlockingQueue[String] - var oneWayLog = new LinkedBlockingQueue[String] - - def clearMessageLogs { - messageLog.clear - oneWayLog.clear - } - - class PingPong1Actor extends Actor { - import self._ - //dispatcher = Dispatchers.newThreadBasedDispatcher(self) - def receive = { - case Ping => - messageLog.put("ping") - reply("pong") - - case OneWay => - oneWayLog.put("oneway") - - case Die => - println("******************** GOT DIE 1") - throw new RuntimeException("Expected exception; to test fault-tolerance") - } - override def postRestart(reason: Throwable) { - println("******************** restart 1") - messageLog.put(reason.getMessage) - } - } - - class PingPong2Actor extends Actor { - import self._ - def receive = { - case Ping => - messageLog.put("ping") - reply("pong") - case Die => - println("******************** GOT DIE 2") - throw new RuntimeException("Expected exception; to test fault-tolerance") - } - override def postRestart(reason: Throwable) { - println("******************** restart 2") - messageLog.put(reason.getMessage) - } - } - - class PingPong3Actor extends Actor { - import self._ - def receive = { - case Ping => - messageLog.put("ping") - reply("pong") - case Die => - println("******************** GOT DIE 3") - throw new RuntimeException("Expected exception; to test fault-tolerance") - } - - override def postRestart(reason: Throwable) { - println("******************** restart 3") - messageLog.put(reason.getMessage) - } - } - - class TemporaryActor extends Actor { - import self._ - lifeCycle = Some(LifeCycle(Temporary)) - def receive = { - case Ping => - messageLog.put("ping") - reply("pong") - case Die => - println("******************** GOT DIE 3") - throw new RuntimeException("Expected exception; to test fault-tolerance") - } - - override def postRestart(reason: Throwable) { - println("******************** restart temporary") - messageLog.put(reason.getMessage) - } - } - - class Master extends Actor { - self.trapExit = classOf[Exception] :: Nil - self.faultHandler = Some(OneForOneStrategy(5, 1000)) - val temp = self.spawnLink[TemporaryActor] - override def receive = { - case Die => temp !! (Die, 5000) - } - } -} - -/** - * @author Jonas Bonér - */ -class SupervisorSpec extends JUnitSuite { - import SupervisorSpec._ - - var pingpong1: ActorRef = _ - var pingpong2: ActorRef = _ - var pingpong3: ActorRef = _ - var temporaryActor: ActorRef = _ - -/* - @Test def shouldStartServer = { - clearMessageLogs - val sup = getSingleActorAllForOneSupervisor - sup.start - - expect("pong") { - (pingpong1 !! (Ping, 5000)).getOrElse("nil") - } - } -*/ - @Test def shoulNotRestartProgrammaticallyLinkedTemporaryActor = { - clearMessageLogs - val master = actorOf[Master].start - - intercept[RuntimeException] { - master !! (Die, 5000) - } - - Thread.sleep(1000) - assert(messageLog.size === 0) - } - - @Test def shoulNotRestartTemporaryActor = { - clearMessageLogs - val sup = getTemporaryActorAllForOneSupervisor - - intercept[RuntimeException] { - temporaryActor !! (Die, 5000) - } - - Thread.sleep(1000) - assert(messageLog.size === 0) - } - - @Test def shouldStartServerForNestedSupervisorHierarchy = { - clearMessageLogs - val sup = getNestedSupervisorsAllForOneConf - sup.start - - expect("pong") { - (pingpong1 !! (Ping, 5000)).getOrElse("nil") - } - } - - @Test def shouldKillSingleActorOneForOne = { - clearMessageLogs - val sup = getSingleActorOneForOneSupervisor - - intercept[RuntimeException] { - pingpong1 !! (Die, 5000) - } - - expect("Expected exception; to test fault-tolerance") { - messageLog.poll(5, TimeUnit.SECONDS) - } - } - - @Test def shouldCallKillCallSingleActorOneForOne = { - clearMessageLogs - val sup = getSingleActorOneForOneSupervisor - - expect("pong") { - (pingpong1 !! (Ping, 5000)).getOrElse("nil") - } - - expect("ping") { - messageLog.poll(5, TimeUnit.SECONDS) - } - intercept[RuntimeException] { - pingpong1 !! (Die, 5000) - } - - expect("Expected exception; to test fault-tolerance") { - messageLog.poll(5, TimeUnit.SECONDS) - } - expect("pong") { - (pingpong1 !! (Ping, 5000)).getOrElse("nil") - } - - expect("ping") { - messageLog.poll(5, TimeUnit.SECONDS) - } - } - - @Test def shouldKillSingleActorAllForOne = { - clearMessageLogs - val sup = getSingleActorAllForOneSupervisor - - intercept[RuntimeException] { - pingpong1 !! (Die, 5000) - } - - expect("Expected exception; to test fault-tolerance") { - messageLog.poll(5, TimeUnit.SECONDS) - } - } - - @Test def shouldCallKillCallSingleActorAllForOne = { - clearMessageLogs - val sup = getSingleActorAllForOneSupervisor - - expect("pong") { - (pingpong1 !! (Ping, 5000)).getOrElse("nil") - } - - expect("ping") { - messageLog.poll(5, TimeUnit.SECONDS) - } - intercept[RuntimeException] { - pingpong1 !! (Die, 5000) - } - - expect("Expected exception; to test fault-tolerance") { - messageLog.poll(5, TimeUnit.SECONDS) - } - expect("pong") { - (pingpong1 !! (Ping, 5000)).getOrElse("nil") - } - - expect("ping") { - messageLog.poll(5, TimeUnit.SECONDS) - } - } - - @Test def shouldKillMultipleActorsOneForOne1 = { - clearMessageLogs - val sup = getMultipleActorsOneForOneConf - - intercept[RuntimeException] { - pingpong1 !! (Die, 5000) - } - - expect("Expected exception; to test fault-tolerance") { - messageLog.poll(5, TimeUnit.SECONDS) - } - } - - @Test def shouldKillMultipleActorsOneForOne2 = { - clearMessageLogs - val sup = getMultipleActorsOneForOneConf - - intercept[RuntimeException] { - pingpong3 !! (Die, 5000) - } - - expect("Expected exception; to test fault-tolerance") { - messageLog.poll(5, TimeUnit.SECONDS) - } - } - - @Test def shouldKillCallMultipleActorsOneForOne = { - clearMessageLogs - val sup = getMultipleActorsOneForOneConf - - expect("pong") { - (pingpong1 !! (Ping, 5000)).getOrElse("nil") - } - - expect("pong") { - (pingpong2 !! (Ping, 5000)).getOrElse("nil") - } - - expect("pong") { - (pingpong3 !! (Ping, 5000)).getOrElse("nil") - } - - expect("ping") { - messageLog.poll(5, TimeUnit.SECONDS) - } - expect("ping") { - messageLog.poll(5, TimeUnit.SECONDS) - } - expect("ping") { - messageLog.poll(5, TimeUnit.SECONDS) - } - intercept[RuntimeException] { - pingpong2 !! (Die, 5000) - } - - expect("Expected exception; to test fault-tolerance") { - messageLog.poll(5, TimeUnit.SECONDS) - } - expect("pong") { - (pingpong1 !! (Ping, 5000)).getOrElse("nil") - } - - expect("pong") { - (pingpong2 !! (Ping, 5000)).getOrElse("nil") - } - - expect("pong") { - (pingpong3 !! (Ping, 5000)).getOrElse("nil") - } - - expect("ping") { - messageLog.poll(5, TimeUnit.SECONDS) - } - expect("ping") { - messageLog.poll(5, TimeUnit.SECONDS) - } - expect("ping") { - messageLog.poll(5, TimeUnit.SECONDS) - } - } - - @Test def shouldKillMultipleActorsAllForOne = { - clearMessageLogs - val sup = getMultipleActorsAllForOneConf - - intercept[RuntimeException] { - pingpong2 !! (Die, 5000) - } - - expect("Expected exception; to test fault-tolerance") { - messageLog.poll(5, TimeUnit.SECONDS) - } - expect("Expected exception; to test fault-tolerance") { - messageLog.poll(5, TimeUnit.SECONDS) - } - expect("Expected exception; to test fault-tolerance") { - messageLog.poll(5, TimeUnit.SECONDS) - } - } - - @Test def shouldCallKillCallMultipleActorsAllForOne = { - clearMessageLogs - val sup = getMultipleActorsAllForOneConf - - expect("pong") { - (pingpong1 !! (Ping, 5000)).getOrElse("nil") - } - - expect("pong") { - (pingpong2 !! (Ping, 5000)).getOrElse("nil") - } - - expect("pong") { - (pingpong3 !! (Ping, 5000)).getOrElse("nil") - } - - expect("ping") { - messageLog.poll(5, TimeUnit.SECONDS) - } - expect("ping") { - messageLog.poll(5, TimeUnit.SECONDS) - } - expect("ping") { - messageLog.poll(5, TimeUnit.SECONDS) - } - intercept[RuntimeException] { - pingpong2 !! (Die, 5000) - } - - expect("Expected exception; to test fault-tolerance") { - messageLog.poll(5, TimeUnit.SECONDS) - } - expect("Expected exception; to test fault-tolerance") { - messageLog.poll(5, TimeUnit.SECONDS) - } - expect("Expected exception; to test fault-tolerance") { - messageLog.poll(5, TimeUnit.SECONDS) - } - expect("pong") { - (pingpong1 !! (Ping, 5000)).getOrElse("nil") - } - - expect("pong") { - (pingpong2 !! (Ping, 5000)).getOrElse("nil") - } - - expect("pong") { - (pingpong3 !! (Ping, 5000)).getOrElse("nil") - } - - expect("ping") { - messageLog.poll(5, TimeUnit.SECONDS) - } - expect("ping") { - messageLog.poll(5, TimeUnit.SECONDS) - } - expect("ping") { - messageLog.poll(5, TimeUnit.SECONDS) - } - } - - @Test def shouldOneWayKillSingleActorOneForOne = { - clearMessageLogs - val sup = getSingleActorOneForOneSupervisor - - pingpong1 ! Die - - expect("Expected exception; to test fault-tolerance") { - messageLog.poll(5, TimeUnit.SECONDS) - } - } - - @Test def shouldOneWayCallKillCallSingleActorOneForOne = { - clearMessageLogs - val sup = getSingleActorOneForOneSupervisor - - pingpong1 ! OneWay - - expect("oneway") { - oneWayLog.poll(5, TimeUnit.SECONDS) - } - pingpong1 ! Die - - expect("Expected exception; to test fault-tolerance") { - messageLog.poll(5, TimeUnit.SECONDS) - } - pingpong1 ! OneWay - - expect("oneway") { - oneWayLog.poll(5, TimeUnit.SECONDS) - } - } - - @Test def shouldRestartKilledActorsForNestedSupervisorHierarchy = { - clearMessageLogs - val sup = getNestedSupervisorsAllForOneConf - - - expect("pong") { - (pingpong1 !! (Ping, 5000)).getOrElse("nil") - } - - expect("pong") { - (pingpong2 !! (Ping, 5000)).getOrElse("nil") - } - - expect("pong") { - (pingpong3 !! (Ping, 5000)).getOrElse("nil") - } - - expect("ping") { - messageLog.poll(5, TimeUnit.SECONDS) - } - expect("ping") { - messageLog.poll(5, TimeUnit.SECONDS) - } - expect("ping") { - messageLog.poll(5, TimeUnit.SECONDS) - } - intercept[RuntimeException] { - pingpong2 !! (Die, 5000) - } - - expect("Expected exception; to test fault-tolerance") { - messageLog.poll(5 , TimeUnit.SECONDS) - } - expect("Expected exception; to test fault-tolerance") { - messageLog.poll(5, TimeUnit.SECONDS) - } - expect("Expected exception; to test fault-tolerance") { - messageLog.poll(5, TimeUnit.SECONDS) - } - expect("pong") { - (pingpong1 !! (Ping, 5000)).getOrElse("nil") - } - - expect("pong") { - (pingpong2 !! (Ping, 5000)).getOrElse("nil") - } - - expect("pong") { - (pingpong3 !! (Ping, 5000)).getOrElse("nil") - } - - expect("ping") { - messageLog.poll(5, TimeUnit.SECONDS) - } - expect("ping") { - messageLog.poll(5, TimeUnit.SECONDS) - } - expect("ping") { - messageLog.poll(5, TimeUnit.SECONDS) - } - } - - // ============================================= - // Create some supervisors with different configurations - - def getTemporaryActorAllForOneSupervisor: Supervisor = { - temporaryActor = actorOf[TemporaryActor].start - - Supervisor( - SupervisorConfig( - RestartStrategy(AllForOne, 3, 5000, List(classOf[Exception])), - Supervise( - temporaryActor, - LifeCycle(Temporary)) - :: Nil)) - } - - def getSingleActorAllForOneSupervisor: Supervisor = { - pingpong1 = actorOf[PingPong1Actor].start - - Supervisor( - SupervisorConfig( - RestartStrategy(AllForOne, 3, 5000, List(classOf[Exception])), - Supervise( - pingpong1, - LifeCycle(Permanent)) - :: Nil)) - } - - def getSingleActorOneForOneSupervisor: Supervisor = { - pingpong1 = actorOf[PingPong1Actor].start - - Supervisor( - SupervisorConfig( - RestartStrategy(OneForOne, 3, 5000, List(classOf[Exception])), - Supervise( - pingpong1, - LifeCycle(Permanent)) - :: Nil)) - } - - def getMultipleActorsAllForOneConf: Supervisor = { - pingpong1 = actorOf[PingPong1Actor].start - pingpong2 = actorOf[PingPong2Actor].start - pingpong3 = actorOf[PingPong3Actor].start - - Supervisor( - SupervisorConfig( - RestartStrategy(AllForOne, 3, 5000, List(classOf[Exception])), - Supervise( - pingpong1, - LifeCycle(Permanent)) - :: - Supervise( - pingpong2, - LifeCycle(Permanent)) - :: - Supervise( - pingpong3, - LifeCycle(Permanent)) - :: Nil)) - } - - def getMultipleActorsOneForOneConf: Supervisor = { - pingpong1 = actorOf[PingPong1Actor].start - pingpong2 = actorOf[PingPong2Actor].start - pingpong3 = actorOf[PingPong3Actor].start - - Supervisor( - SupervisorConfig( - RestartStrategy(OneForOne, 3, 5000, List(classOf[Exception])), - Supervise( - pingpong1, - LifeCycle(Permanent)) - :: - Supervise( - pingpong2, - LifeCycle(Permanent)) - :: - Supervise( - pingpong3, - LifeCycle(Permanent)) - :: Nil)) - } - - def getNestedSupervisorsAllForOneConf: Supervisor = { - pingpong1 = actorOf[PingPong1Actor].start - pingpong2 = actorOf[PingPong2Actor].start - pingpong3 = actorOf[PingPong3Actor].start - - Supervisor( - SupervisorConfig( - RestartStrategy(AllForOne, 3, 5000, List(classOf[Exception])), - Supervise( - pingpong1, - LifeCycle(Permanent)) - :: - SupervisorConfig( - RestartStrategy(AllForOne, 3, 5000, Nil), - Supervise( - pingpong2, - LifeCycle(Permanent)) - :: - Supervise( - pingpong3, - LifeCycle(Permanent)) - :: Nil) - :: Nil)) - } -} diff --git a/akka-core/src/test/scala/dispatch/DispatchersSpec.scala b/akka-core/src/test/scala/dispatch/DispatchersSpec.scala deleted file mode 100644 index bb548b9251..0000000000 --- a/akka-core/src/test/scala/dispatch/DispatchersSpec.scala +++ /dev/null @@ -1,74 +0,0 @@ -/** - * Copyright (C) 2009-2010 Scalable Solutions AB - */ -package se.scalablesolutions.akka.actor.dispatch - -import java.util.concurrent.{CountDownLatch, TimeUnit} -import org.scalatest.junit.JUnitSuite -import org.junit.Test - -import net.lag.configgy.Config -import scala.reflect.{Manifest} -import se.scalablesolutions.akka.dispatch._ - -object DispatchersSpec { - import Dispatchers._ - // - val tipe = "type" - val keepalivems = "keep-alive-ms" - val corepoolsizefactor = "core-pool-size-factor" - val maxpoolsizefactor = "max-pool-size-factor" - val executorbounds = "executor-bounds" - val allowcoretimeout = "allow-core-timeout" - val rejectionpolicy = "rejection-policy" // abort, caller-runs, discard-oldest, discard - val throughput = "throughput" // Throughput for ExecutorBasedEventDrivenDispatcher - val aggregate = "aggregate" // Aggregate on/off for HawtDispatchers - - def instance(dispatcher: MessageDispatcher): (MessageDispatcher) => Boolean = _ == dispatcher - def ofType[T <: MessageDispatcher : Manifest]: (MessageDispatcher) => Boolean = _.getClass == manifest[T].erasure - - def typesAndValidators: Map[String,(MessageDispatcher) => Boolean] = Map( - "ReactorBasedSingleThreadEventDriven" -> ofType[ReactorBasedSingleThreadEventDrivenDispatcher], - "ExecutorBasedEventDrivenWorkStealing" -> ofType[ExecutorBasedEventDrivenWorkStealingDispatcher], - "ExecutorBasedEventDriven" -> ofType[ExecutorBasedEventDrivenDispatcher], - "ReactorBasedThreadPoolEventDriven" -> ofType[ReactorBasedThreadPoolEventDrivenDispatcher], - "Hawt" -> ofType[HawtDispatcher], - "GlobalReactorBasedSingleThreadEventDriven" -> instance(globalReactorBasedSingleThreadEventDrivenDispatcher), - "GlobalReactorBasedThreadPoolEventDriven" -> instance(globalReactorBasedThreadPoolEventDrivenDispatcher), - "GlobalExecutorBasedEventDriven" -> instance(globalExecutorBasedEventDrivenDispatcher), - "GlobalHawt" -> instance(globalHawtDispatcher) - ) - - def validTypes = typesAndValidators.keys.toList - - lazy val allDispatchers: Map[String,Option[MessageDispatcher]] = { - validTypes.map(t => (t,from(Config.fromMap(Map(tipe -> t))))).toMap - } -} - -class DispatchersSpec extends JUnitSuite { - - import Dispatchers._ - import DispatchersSpec._ - - @Test def shouldYieldNoneIfTypeIsMissing { - assert(from(Config.fromMap(Map())) === None) - } - - @Test(expected = classOf[IllegalArgumentException]) - def shouldThrowIllegalArgumentExceptionIfTypeDoesntExist { - from(Config.fromMap(Map(tipe -> "typedoesntexist"))) - } - - @Test def shouldGetTheCorrectTypesOfDispatchers { - //It can create/obtain all defined types - assert(allDispatchers.values.forall(_.isDefined)) - //All created/obtained dispatchers are of the expeced type/instance - assert(typesAndValidators.forall( tuple => tuple._2(allDispatchers(tuple._1).get) )) - } - - @Test def defaultingToDefaultWhileLoadingTheDefaultShouldWork { - assert(from(Config.fromMap(Map())).getOrElse(defaultGlobalDispatcher) == defaultGlobalDispatcher) - } - -} diff --git a/akka-core/src/test/scala/dispatch/ExecutorBasedEventDrivenDispatcherActorSpec.scala b/akka-core/src/test/scala/dispatch/ExecutorBasedEventDrivenDispatcherActorSpec.scala deleted file mode 100644 index 9cdf43682e..0000000000 --- a/akka-core/src/test/scala/dispatch/ExecutorBasedEventDrivenDispatcherActorSpec.scala +++ /dev/null @@ -1,68 +0,0 @@ -package se.scalablesolutions.akka.actor.dispatch - -import java.util.concurrent.{CountDownLatch, TimeUnit} -import org.scalatest.junit.JUnitSuite -import org.junit.Test -import se.scalablesolutions.akka.dispatch.Dispatchers -import se.scalablesolutions.akka.actor.Actor -import Actor._ - -object ExecutorBasedEventDrivenDispatcherActorSpec { - class TestActor extends Actor { - self.dispatcher = Dispatchers.newExecutorBasedEventDrivenDispatcher(self.uuid) - def receive = { - case "Hello" => - self.reply("World") - case "Failure" => - throw new RuntimeException("Expected exception; to test fault-tolerance") - } - } - - object OneWayTestActor { - val oneWay = new CountDownLatch(1) - } - class OneWayTestActor extends Actor { - self.dispatcher = Dispatchers.newExecutorBasedEventDrivenDispatcher(self.uuid) - def receive = { - case "OneWay" => OneWayTestActor.oneWay.countDown - } - } -} -class ExecutorBasedEventDrivenDispatcherActorSpec extends JUnitSuite { - import ExecutorBasedEventDrivenDispatcherActorSpec._ - - private val unit = TimeUnit.MILLISECONDS - - @Test def shouldSendOneWay = { - val actor = actorOf[OneWayTestActor].start - val result = actor ! "OneWay" - assert(OneWayTestActor.oneWay.await(1, TimeUnit.SECONDS)) - actor.stop - } - - @Test def shouldSendReplySync = { - val actor = actorOf[TestActor].start - val result = (actor !! ("Hello", 10000)).as[String] - assert("World" === result.get) - actor.stop - } - - @Test def shouldSendReplyAsync = { - val actor = actorOf[TestActor].start - val result = actor !! "Hello" - assert("World" === result.get.asInstanceOf[String]) - actor.stop - } - - @Test def shouldSendReceiveException = { - val actor = actorOf[TestActor].start - try { - actor !! "Failure" - fail("Should have thrown an exception") - } catch { - case e => - assert("Expected exception; to test fault-tolerance" === e.getMessage()) - } - actor.stop - } -} diff --git a/akka-core/src/test/scala/dispatch/ExecutorBasedEventDrivenDispatcherActorsSpec.scala b/akka-core/src/test/scala/dispatch/ExecutorBasedEventDrivenDispatcherActorsSpec.scala deleted file mode 100644 index fc8f1aa37f..0000000000 --- a/akka-core/src/test/scala/dispatch/ExecutorBasedEventDrivenDispatcherActorsSpec.scala +++ /dev/null @@ -1,61 +0,0 @@ -package se.scalablesolutions.akka.actor.dispatch - -import org.scalatest.junit.JUnitSuite -import org.junit.Test -import org.scalatest.matchers.MustMatchers -import java.util.concurrent.CountDownLatch -import se.scalablesolutions.akka.actor.Actor -import Actor._ - -/** - * Tests the behaviour of the executor based event driven dispatcher when multiple actors are being dispatched on it. - * - * @author Jan Van Besien - */ -class ExecutorBasedEventDrivenDispatcherActorsSpec extends JUnitSuite with MustMatchers { - class SlowActor(finishedCounter: CountDownLatch) extends Actor { - self.id = "SlowActor" - - def receive = { - case x: Int => { - Thread.sleep(50) // slow actor - finishedCounter.countDown - } - } - } - - class FastActor(finishedCounter: CountDownLatch) extends Actor { - self.id = "FastActor" - - def receive = { - case x: Int => { - finishedCounter.countDown - } - } - } - - @Test def slowActorShouldntBlockFastActor { - val sFinished = new CountDownLatch(50) - val fFinished = new CountDownLatch(10) - val s = actorOf(new SlowActor(sFinished)).start - val f = actorOf(new FastActor(fFinished)).start - - // send a lot of stuff to s - for (i <- 1 to 50) { - s ! i - } - - // send some messages to f - for (i <- 1 to 10) { - f ! i - } - - // now assert that f is finished while s is still busy - fFinished.await - assert(sFinished.getCount > 0) - sFinished.await - assert(sFinished.getCount === 0) - f.stop - s.stop - } -} diff --git a/akka-core/src/test/scala/dispatch/ExecutorBasedEventDrivenWorkStealingDispatcherSpec.scala b/akka-core/src/test/scala/dispatch/ExecutorBasedEventDrivenWorkStealingDispatcherSpec.scala deleted file mode 100644 index cde57a0544..0000000000 --- a/akka-core/src/test/scala/dispatch/ExecutorBasedEventDrivenWorkStealingDispatcherSpec.scala +++ /dev/null @@ -1,107 +0,0 @@ -package se.scalablesolutions.akka.actor.dispatch - -import org.scalatest.matchers.MustMatchers -import org.scalatest.junit.JUnitSuite - -import org.junit.Test - -import se.scalablesolutions.akka.dispatch.Dispatchers - -import java.util.concurrent.{TimeUnit, CountDownLatch} -import se.scalablesolutions.akka.actor.{IllegalActorStateException, Actor} -import Actor._ - -object ExecutorBasedEventDrivenWorkStealingDispatcherSpec { - val delayableActorDispatcher = Dispatchers.newExecutorBasedEventDrivenWorkStealingDispatcher("pooled-dispatcher") - val sharedActorDispatcher = Dispatchers.newExecutorBasedEventDrivenWorkStealingDispatcher("pooled-dispatcher") - val parentActorDispatcher = Dispatchers.newExecutorBasedEventDrivenWorkStealingDispatcher("pooled-dispatcher") - - class DelayableActor(name: String, delay: Int, finishedCounter: CountDownLatch) extends Actor { - self.dispatcher = delayableActorDispatcher - var invocationCount = 0 - self.id = name - - def receive = { - case x: Int => { - Thread.sleep(delay) - invocationCount += 1 - finishedCounter.countDown - } - } - } - - class FirstActor extends Actor { - self.dispatcher = sharedActorDispatcher - def receive = {case _ => {}} - } - - class SecondActor extends Actor { - self.dispatcher = sharedActorDispatcher - def receive = {case _ => {}} - } - - class ParentActor extends Actor { - self.dispatcher = parentActorDispatcher - def receive = {case _ => {}} - } - - class ChildActor extends ParentActor { - } -} - -/** - * @author Jan Van Besien - */ -class ExecutorBasedEventDrivenWorkStealingDispatcherSpec extends JUnitSuite with MustMatchers { - import ExecutorBasedEventDrivenWorkStealingDispatcherSpec._ - - @Test def fastActorShouldStealWorkFromSlowActor { - val finishedCounter = new CountDownLatch(110) - - val slow = actorOf(new DelayableActor("slow", 50, finishedCounter)).start - val fast = actorOf(new DelayableActor("fast", 10, finishedCounter)).start - - for (i <- 1 to 100) { - // send most work to slow actor - if (i % 20 == 0) - fast ! i - else - slow ! i - } - - // now send some messages to actors to keep the dispatcher dispatching messages - for (i <- 1 to 10) { - Thread.sleep(150) - if (i % 2 == 0) - fast ! i - else - slow ! i - } - - finishedCounter.await(5, TimeUnit.SECONDS) - fast.actor.asInstanceOf[DelayableActor].invocationCount must be > - (slow.actor.asInstanceOf[DelayableActor].invocationCount) - slow.stop - fast.stop - } - - @Test def canNotUseActorsOfDifferentTypesInSameDispatcher(): Unit = { - val first = actorOf[FirstActor] - val second = actorOf[SecondActor] - - first.start - intercept[IllegalActorStateException] { - second.start - } - } - - @Test def canNotUseActorsOfDifferentSubTypesInSameDispatcher(): Unit = { - val parent = actorOf[ParentActor] - val child = actorOf[ChildActor] - - parent.start - intercept[IllegalActorStateException] { - child.start - } - } -} diff --git a/akka-core/src/test/scala/dispatch/FutureSpec.scala b/akka-core/src/test/scala/dispatch/FutureSpec.scala deleted file mode 100644 index f740763fdf..0000000000 --- a/akka-core/src/test/scala/dispatch/FutureSpec.scala +++ /dev/null @@ -1,106 +0,0 @@ -package se.scalablesolutions.akka.actor - -import org.scalatest.junit.JUnitSuite -import org.junit.Test -import se.scalablesolutions.akka.dispatch.Futures -import Actor._ - -object FutureSpec { - class TestActor extends Actor { - def receive = { - case "Hello" => - self.reply("World") - case "NoReply" => {} - case "Failure" => - throw new RuntimeException("Expected exception; to test fault-tolerance") - } - } -} - -class FutureSpec extends JUnitSuite { - import FutureSpec._ - - @Test def shouldActorReplyResultThroughExplicitFuture { - val actor = actorOf[TestActor] - actor.start - val future = actor !!! "Hello" - future.await - assert(future.result.isDefined) - assert("World" === future.result.get) - actor.stop - } - - @Test def shouldActorReplyExceptionThroughExplicitFuture { - val actor = actorOf[TestActor] - actor.start - val future = actor !!! "Failure" - future.await - assert(future.exception.isDefined) - assert("Expected exception; to test fault-tolerance" === future.exception.get.getMessage) - actor.stop - } - - /* - // FIXME: implement Futures.awaitEither, and uncomment these two tests - @Test def shouldFutureAwaitEitherLeft = { - val actor1 = actorOf[TestActor].start - val actor2 = actorOf[TestActor].start - val future1 = actor1 !!! "Hello" - val future2 = actor2 !!! "NoReply" - val result = Futures.awaitEither(future1, future2) - assert(result.isDefined) - assert("World" === result.get) - actor1.stop - actor2.stop - } - - @Test def shouldFutureAwaitEitherRight = { - val actor1 = actorOf[TestActor].start - val actor2 = actorOf[TestActor].start - val future1 = actor1 !!! "NoReply" - val future2 = actor2 !!! "Hello" - val result = Futures.awaitEither(future1, future2) - assert(result.isDefined) - assert("World" === result.get) - actor1.stop - actor2.stop - } - */ - @Test def shouldFutureAwaitOneLeft = { - val actor1 = actorOf[TestActor].start - val actor2 = actorOf[TestActor].start - val future1 = actor1 !!! "NoReply" - val future2 = actor2 !!! "Hello" - val result = Futures.awaitOne(List(future1, future2)) - assert(result.result.isDefined) - assert("World" === result.result.get) - actor1.stop - actor2.stop - } - - @Test def shouldFutureAwaitOneRight = { - val actor1 = actorOf[TestActor].start - val actor2 = actorOf[TestActor].start - val future1 = actor1 !!! "Hello" - val future2 = actor2 !!! "NoReply" - val result = Futures.awaitOne(List(future1, future2)) - assert(result.result.isDefined) - assert("World" === result.result.get) - actor1.stop - actor2.stop - } - - @Test def shouldFutureAwaitAll = { - val actor1 = actorOf[TestActor].start - val actor2 = actorOf[TestActor].start - val future1 = actor1 !!! "Hello" - val future2 = actor2 !!! "Hello" - Futures.awaitAll(List(future1, future2)) - assert(future1.result.isDefined) - assert("World" === future1.result.get) - assert(future2.result.isDefined) - assert("World" === future2.result.get) - actor1.stop - actor2.stop - } -} diff --git a/akka-core/src/test/scala/dispatch/HawtDispatcherActorSpec.scala b/akka-core/src/test/scala/dispatch/HawtDispatcherActorSpec.scala deleted file mode 100644 index 2c45f3388c..0000000000 --- a/akka-core/src/test/scala/dispatch/HawtDispatcherActorSpec.scala +++ /dev/null @@ -1,71 +0,0 @@ -package se.scalablesolutions.akka.actor.dispatch - -import java.util.concurrent.{CountDownLatch, TimeUnit} - -import org.scalatest.junit.JUnitSuite -import org.junit.Test - -import se.scalablesolutions.akka.dispatch.{HawtDispatcher, Dispatchers} -import se.scalablesolutions.akka.actor.Actor -import Actor._ - -object HawtDispatcherActorSpec { - class TestActor extends Actor { - self.dispatcher = new HawtDispatcher() - def receive = { - case "Hello" => - self.reply("World") - case "Failure" => - throw new RuntimeException("Expected exception; to test fault-tolerance") - } - } - - object OneWayTestActor { - val oneWay = new CountDownLatch(1) - } - class OneWayTestActor extends Actor { - self.dispatcher = new HawtDispatcher() - def receive = { - case "OneWay" => OneWayTestActor.oneWay.countDown - } - } -} - -class HawtDispatcherActorSpec extends JUnitSuite { - import HawtDispatcherActorSpec._ - - private val unit = TimeUnit.MILLISECONDS - - @Test def shouldSendOneWay = { - val actor = actorOf[OneWayTestActor].start - val result = actor ! "OneWay" - assert(OneWayTestActor.oneWay.await(1, TimeUnit.SECONDS)) - actor.stop - } - - @Test def shouldSendReplySync = { - val actor = actorOf[TestActor].start - val result = (actor !! ("Hello", 10000)).as[String] - assert("World" === result.get) - actor.stop - } - - @Test def shouldSendReplyAsync = { - val actor = actorOf[TestActor].start - val result = actor !! "Hello" - assert("World" === result.get.asInstanceOf[String]) - actor.stop - } - - @Test def shouldSendReceiveException = { - val actor = actorOf[TestActor].start - try { - actor !! "Failure" - fail("Should have thrown an exception") - } catch { - case e => - assert("Expected exception; to test fault-tolerance" === e.getMessage()) - } - actor.stop - } -} diff --git a/akka-core/src/test/scala/dispatch/HawtDispatcherEchoServer.scala b/akka-core/src/test/scala/dispatch/HawtDispatcherEchoServer.scala deleted file mode 100644 index 97f2e0df9d..0000000000 --- a/akka-core/src/test/scala/dispatch/HawtDispatcherEchoServer.scala +++ /dev/null @@ -1,207 +0,0 @@ -/** - * Copyright (C) 2009-2010 Scalable Solutions AB - */ - -package se.scalablesolutions.akka.actor.dispatch - -import scala.collection.mutable.ListBuffer - -import java.util.concurrent.TimeUnit -import java.net.InetSocketAddress -import java.io.IOException -import java.nio.ByteBuffer -import java.nio.channels.{SocketChannel, SelectionKey, ServerSocketChannel} - -import se.scalablesolutions.akka.actor._ -import se.scalablesolutions.akka.actor.Actor._ -import se.scalablesolutions.akka.dispatch.HawtDispatcher - -import org.fusesource.hawtdispatch.DispatchSource -import org.fusesource.hawtdispatch.ScalaDispatch._ - -/** - * This is an example of how to crate an Akka actor based TCP echo server using - * the HawtDispatch dispatcher and NIO event sources. - */ -object HawtDispatcherEchoServer { - - private val hawt = new HawtDispatcher - var port=4444; - var useReactorPattern=true - - def main(args:Array[String]):Unit = run - - def run() = { - val server = actorOf(new Server(port)) - server.start - Scheduler.schedule(server, DisplayStats, 1, 5, TimeUnit.SECONDS) - - println("Press enter to shutdown."); - System.in.read - server ! Shutdown - } - - case object Shutdown - case object DisplayStats - case class SessionClosed(session:ActorRef) - - class Server(val port: Int) extends Actor { - - self.dispatcher = hawt - - var channel:ServerSocketChannel = _ - var accept_source:DispatchSource = _ - var sessions = ListBuffer[ActorRef]() - - override def init = { - channel = ServerSocketChannel.open(); - channel.socket().bind(new InetSocketAddress(port)); - channel.configureBlocking(false); - - // Setup the accept source, it will callback to the handler methods - // via the actor's mailbox so you don't need to worry about - // synchronizing with the local variables - accept_source = createSource(channel, SelectionKey.OP_ACCEPT, HawtDispatcher.queue(self)); - accept_source.setEventHandler(^{ accept }); - accept_source.setDisposer(^{ - channel.close(); - println("Closed port: "+port); - }); - - accept_source.resume - - println("Listening on port: "+port); - } - - - private def accept() = { - var socket = channel.accept(); - while( socket!=null ) { - try { - socket.configureBlocking(false); - val session = actorOf(new Session(self, socket)) - session.start() - sessions += session - } catch { - case e: Exception => - socket.close - } - socket = channel.accept(); - } - } - - def receive = { - case SessionClosed(session) => - sessions = sessions.filterNot( _ == session ) - session.stop - case DisplayStats => - sessions.foreach { session=> - session ! DisplayStats - } - case Shutdown => - sessions.foreach { session=> - session.stop - } - sessions.clear - accept_source.release - self.stop - } - } - - class Session(val server:ActorRef, val channel: SocketChannel) extends Actor { - - self.dispatcher = hawt - - val buffer = ByteBuffer.allocate(1024); - val remote_address = channel.socket.getRemoteSocketAddress.toString - - var read_source:DispatchSource = _ - var write_source:DispatchSource = _ - - var readCounter = 0L - var writeCounter = 0L - var closed = false - - override def init = { - - if(useReactorPattern) { - // Then we will be using the reactor pattern for handling IO: - // Pin this actor to a single thread. The read/write event sources will poll - // a Selector on the pinned thread. Since the IO events are generated on the same - // thread as where the Actor is pinned to, it can avoid a substantial amount - // thread synchronization. Plus your GC will perform better since all the IO - // processing is done on a single thread. - HawtDispatcher.pin(self) - } else { - // Then we will be using sing the proactor pattern for handling IO: - // Then the actor will not be pinned to a specific thread. The read/write - // event sources will poll a Selector and then asynchronously dispatch the - // event's to the actor via the thread pool. - } - - // Setup the sources, they will callback to the handler methods - // via the actor's mailbox so you don't need to worry about - // synchronizing with the local variables - read_source = createSource(channel, SelectionKey.OP_READ, HawtDispatcher.queue(self)); - read_source.setEventHandler(^{ read }) - read_source.setCancelHandler(^{ close }) - - write_source = createSource(channel, SelectionKey.OP_READ, HawtDispatcher.queue(self)); - write_source.setEventHandler(^{ write }) - write_source.setCancelHandler(^{ close }) - - read_source.resume - println("Accepted connection from: "+remote_address); - } - - override def shutdown = { - closed = true - read_source.release - write_source.release - channel.close - } - - private def catchio(func: =>Unit):Unit = { - try { - func - } catch { - case e:IOException => close - } - } - - def read():Unit = catchio { - channel.read(buffer) match { - case -1 => - close // peer disconnected. - case 0 => - case count:Int => - readCounter += count - buffer.flip; - read_source.suspend - write_source.resume - write() - } - } - - def write() = catchio { - writeCounter += channel.write(buffer) - if (buffer.remaining == 0) { - buffer.clear - write_source.suspend - read_source.resume - } - } - - def close() = { - if( !closed ) { - closed = true - server ! SessionClosed(self) - } - } - - def receive = { - case DisplayStats => - println("connection to %s reads: %,d bytes, writes: %,d".format(remote_address, readCounter, writeCounter)) - } - } -} diff --git a/akka-core/src/test/scala/dispatch/ReactorBasedSingleThreadEventDrivenDispatcherActorSpec.scala b/akka-core/src/test/scala/dispatch/ReactorBasedSingleThreadEventDrivenDispatcherActorSpec.scala deleted file mode 100644 index de9b912bf5..0000000000 --- a/akka-core/src/test/scala/dispatch/ReactorBasedSingleThreadEventDrivenDispatcherActorSpec.scala +++ /dev/null @@ -1,71 +0,0 @@ -package se.scalablesolutions.akka.actor.dispatch - -import java.util.concurrent.{CountDownLatch, TimeUnit} -import org.scalatest.junit.JUnitSuite -import org.junit.Test - -import se.scalablesolutions.akka.dispatch.Dispatchers -import se.scalablesolutions.akka.actor.Actor -import Actor._ - -object ReactorBasedSingleThreadEventDrivenDispatcherActorSpec { - class TestActor extends Actor { - self.dispatcher = Dispatchers.newReactorBasedSingleThreadEventDrivenDispatcher(self.uuid) - - def receive = { - case "Hello" => - self.reply("World") - case "Failure" => - throw new RuntimeException("Expected exception; to test fault-tolerance") - } - } - - object OneWayTestActor { - val oneWay = new CountDownLatch(1) - } - class OneWayTestActor extends Actor { - self.dispatcher = Dispatchers.newExecutorBasedEventDrivenDispatcher(self.uuid) - def receive = { - case "OneWay" => OneWayTestActor.oneWay.countDown - } - } -} - -class ReactorBasedSingleThreadEventDrivenDispatcherActorSpec extends JUnitSuite { - import ReactorBasedSingleThreadEventDrivenDispatcherActorSpec._ - - private val unit = TimeUnit.MILLISECONDS - - @Test def shouldSendOneWay = { - val actor = actorOf[OneWayTestActor].start - val result = actor ! "OneWay" - assert(OneWayTestActor.oneWay.await(1, TimeUnit.SECONDS)) - actor.stop - } - - @Test def shouldSendReplySync = { - val actor = actorOf[TestActor].start - val result = (actor !! ("Hello", 10000)).as[String].get - assert("World" === result) - actor.stop - } - - @Test def shouldSendReplyAsync = { - val actor = actorOf[TestActor].start - val result = actor !! "Hello" - assert("World" === result.get.asInstanceOf[String]) - actor.stop - } - - @Test def shouldSendReceiveException = { - val actor = actorOf[TestActor].start - try { - actor !! "Failure" - fail("Should have thrown an exception") - } catch { - case e => - assert("Expected exception; to test fault-tolerance" === e.getMessage()) - } - actor.stop - } -} diff --git a/akka-core/src/test/scala/dispatch/ReactorBasedThreadPoolEventDrivenDispatcherActorSpec.scala b/akka-core/src/test/scala/dispatch/ReactorBasedThreadPoolEventDrivenDispatcherActorSpec.scala deleted file mode 100644 index 4001df8f56..0000000000 --- a/akka-core/src/test/scala/dispatch/ReactorBasedThreadPoolEventDrivenDispatcherActorSpec.scala +++ /dev/null @@ -1,66 +0,0 @@ -package se.scalablesolutions.akka.actor.dispatch - -import java.util.concurrent.{CountDownLatch, TimeUnit} -import org.scalatest.junit.JUnitSuite -import org.junit.Test - -import se.scalablesolutions.akka.dispatch.Dispatchers -import se.scalablesolutions.akka.actor.Actor -import Actor._ - -object ReactorBasedThreadPoolEventDrivenDispatcherActorSpec { - class TestActor extends Actor { - self.dispatcher = Dispatchers.newReactorBasedThreadPoolEventDrivenDispatcher(self.uuid) - def receive = { - case "Hello" => - self.reply("World") - case "Failure" => - throw new RuntimeException("Expected exception; to test fault-tolerance") - } - } -} - -class ReactorBasedThreadPoolEventDrivenDispatcherActorSpec extends JUnitSuite { - import ReactorBasedThreadPoolEventDrivenDispatcherActorSpec._ - - private val unit = TimeUnit.MILLISECONDS - - @Test def shouldSendOneWay { - val oneWay = new CountDownLatch(1) - val actor = actorOf(new Actor { - self.dispatcher = Dispatchers.newReactorBasedThreadPoolEventDrivenDispatcher(self.uuid) - def receive = { - case "OneWay" => oneWay.countDown - } - }).start - val result = actor ! "OneWay" - assert(oneWay.await(1, TimeUnit.SECONDS)) - actor.stop - } - - @Test def shouldSendReplySync = { - val actor = actorOf[TestActor].start - val result = (actor !! ("Hello", 10000)).as[String].get - assert("World" === result) - actor.stop - } - - @Test def shouldSendReplyAsync = { - val actor = actorOf[TestActor].start - val result = actor !! "Hello" - assert("World" === result.get.asInstanceOf[String]) - actor.stop - } - - @Test def shouldSendReceiveException = { - val actor = actorOf[TestActor].start - try { - actor !! "Failure" - fail("Should have thrown an exception") - } catch { - case e => - assert("Expected exception; to test fault-tolerance" === e.getMessage()) - } - actor.stop - } -} diff --git a/akka-core/src/test/scala/dispatch/ThreadBasedActorSpec.scala b/akka-core/src/test/scala/dispatch/ThreadBasedActorSpec.scala deleted file mode 100644 index d69ee984d8..0000000000 --- a/akka-core/src/test/scala/dispatch/ThreadBasedActorSpec.scala +++ /dev/null @@ -1,67 +0,0 @@ -package se.scalablesolutions.akka.actor.dispatch - -import java.util.concurrent.{CountDownLatch, TimeUnit} -import org.scalatest.junit.JUnitSuite -import org.junit.Test - -import se.scalablesolutions.akka.dispatch.Dispatchers -import se.scalablesolutions.akka.actor.Actor -import Actor._ - -object ThreadBasedActorSpec { - class TestActor extends Actor { - self.dispatcher = Dispatchers.newThreadBasedDispatcher(self) - - def receive = { - case "Hello" => - self.reply("World") - case "Failure" => - throw new RuntimeException("Expected exception; to test fault-tolerance") - } - } -} - -class ThreadBasedActorSpec extends JUnitSuite { - import ThreadBasedActorSpec._ - - private val unit = TimeUnit.MILLISECONDS - - @Test def shouldSendOneWay { - var oneWay = new CountDownLatch(1) - val actor = actorOf(new Actor { - self.dispatcher = Dispatchers.newThreadBasedDispatcher(self) - def receive = { - case "OneWay" => oneWay.countDown - } - }).start - val result = actor ! "OneWay" - assert(oneWay.await(1, TimeUnit.SECONDS)) - actor.stop - } - - @Test def shouldSendReplySync = { - val actor = actorOf[TestActor].start - val result = (actor !! ("Hello", 10000)).as[String] - assert("World" === result.get) - actor.stop - } - - @Test def shouldSendReplyAsync = { - val actor = actorOf[TestActor].start - val result = actor !! "Hello" - assert("World" === result.get.asInstanceOf[String]) - actor.stop - } - - @Test def shouldSendReceiveException = { - val actor = actorOf[TestActor].start - try { - actor !! "Failure" - fail("Should have thrown an exception") - } catch { - case e => - assert("Expected exception; to test fault-tolerance" === e.getMessage()) - } - actor.stop - } -} diff --git a/akka-core/src/test/scala/dispatch/ThreadBasedDispatcherSpec.scala b/akka-core/src/test/scala/dispatch/ThreadBasedDispatcherSpec.scala deleted file mode 100644 index 44cd9aade3..0000000000 --- a/akka-core/src/test/scala/dispatch/ThreadBasedDispatcherSpec.scala +++ /dev/null @@ -1,91 +0,0 @@ -package se.scalablesolutions.akka.dispatch - -import java.util.concurrent.CountDownLatch -import java.util.concurrent.TimeUnit -import java.util.concurrent.atomic.AtomicBoolean -import java.util.concurrent.locks.Lock -import java.util.concurrent.locks.ReentrantLock - -import org.scalatest.junit.JUnitSuite -import org.junit.{Test, Before} - -import se.scalablesolutions.akka.actor.Actor -import Actor._ - -// FIXME use this test when we have removed the MessageInvoker classes -/* -class ThreadBasedDispatcherSpec extends JUnitSuite { - private var threadingIssueDetected: AtomicBoolean = null - val key1 = actorOf(new Actor { def receive = { case _ => {}} }) - val key2 = actorOf(new Actor { def receive = { case _ => {}} }) - val key3 = actorOf(new Actor { def receive = { case _ => {}} }) - - class TestMessageHandle(handleLatch: CountDownLatch) extends MessageInvoker { - val guardLock: Lock = new ReentrantLock - - def invoke(message: MessageInvocation) { - try { - if (threadingIssueDetected.get) return - if (guardLock.tryLock) { - handleLatch.countDown - } else { - threadingIssueDetected.set(true) - } - } catch { - case e: Exception => threadingIssueDetected.set(true) - } finally { - guardLock.unlock - } - } - } - - @Before - def setUp = { - threadingIssueDetected = new AtomicBoolean(false) - } - - @Test - def shouldMessagesDispatchedToTheSameHandlerAreExecutedSequentially = { - internalTestMessagesDispatchedToTheSameHandlerAreExecutedSequentially - } - - @Test - def shouldMessagesDispatchedToHandlersAreExecutedInFIFOOrder = { - internalTestMessagesDispatchedToHandlersAreExecutedInFIFOOrder - } - - private def internalTestMessagesDispatchedToTheSameHandlerAreExecutedSequentially(): Unit = { - val guardLock = new ReentrantLock - val handleLatch = new CountDownLatch(100) - val dispatcher = new ThreadBasedDispatcher("name", new TestMessageHandle(handleLatch)) - dispatcher.start - for (i <- 0 until 100) { - dispatcher.dispatch(new MessageInvocation(key1, new Object, None, None)) - } - assert(handleLatch.await(5, TimeUnit.SECONDS)) - assert(!threadingIssueDetected.get) - } - - private def internalTestMessagesDispatchedToHandlersAreExecutedInFIFOOrder(): Unit = { - val handleLatch = new CountDownLatch(100) - val dispatcher = new ThreadBasedDispatcher("name", new MessageInvoker { - var currentValue = -1; - def invoke(message: MessageInvocation) { - if (threadingIssueDetected.get) return - val messageValue = message.message.asInstanceOf[Int] - if (messageValue.intValue == currentValue + 1) { - currentValue = messageValue.intValue - handleLatch.countDown - } else threadingIssueDetected.set(true) - } - }) - dispatcher.start - for (i <- 0 until 100) { - dispatcher.dispatch(new MessageInvocation(key1, i, None, None)) - } - assert(handleLatch.await(5, TimeUnit.SECONDS)) - assert(!threadingIssueDetected.get) - dispatcher.shutdown - } -} -*/ diff --git a/akka-core/src/test/scala/misc/ActorRegistrySpec.scala b/akka-core/src/test/scala/misc/ActorRegistrySpec.scala deleted file mode 100644 index 8c9e0778ca..0000000000 --- a/akka-core/src/test/scala/misc/ActorRegistrySpec.scala +++ /dev/null @@ -1,255 +0,0 @@ -package se.scalablesolutions.akka.actor - -import org.scalatest.junit.JUnitSuite -import org.junit.Test -import Actor._ -import java.util.concurrent.{CyclicBarrier, TimeUnit, CountDownLatch} - -object ActorRegistrySpec { - var record = "" - class TestActor extends Actor { - self.id = "MyID" - def receive = { - case "ping" => - record = "pong" + record - self.reply("got ping") - } - } - - class TestActor2 extends Actor { - self.id = "MyID2" - def receive = { - case "ping" => - record = "pong" + record - self.reply("got ping") - case "ping2" => - record = "pong" + record - self.reply("got ping") - } - } - -} - -class ActorRegistrySpec extends JUnitSuite { - import ActorRegistrySpec._ - - @Test def shouldGetActorByIdFromActorRegistry { - ActorRegistry.shutdownAll - val actor = actorOf[TestActor] - actor.start - val actors = ActorRegistry.actorsFor("MyID") - assert(actors.size === 1) - assert(actors.head.actor.isInstanceOf[TestActor]) - assert(actors.head.id === "MyID") - actor.stop - } - - @Test def shouldGetActorByUUIDFromActorRegistry { - ActorRegistry.shutdownAll - val actor = actorOf[TestActor] - val uuid = actor.uuid - actor.start - val actorOrNone = ActorRegistry.actorFor(uuid) - assert(actorOrNone.isDefined) - assert(actorOrNone.get.uuid === uuid) - actor.stop - } - - @Test def shouldGetActorByClassFromActorRegistry { - ActorRegistry.shutdownAll - val actor = actorOf[TestActor] - actor.start - val actors = ActorRegistry.actorsFor(classOf[TestActor]) - assert(actors.size === 1) - assert(actors.head.actor.isInstanceOf[TestActor]) - assert(actors.head.id === "MyID") - actor.stop - } - - @Test def shouldGetActorByManifestFromActorRegistry { - ActorRegistry.shutdownAll - val actor = actorOf[TestActor] - actor.start - val actors = ActorRegistry.actorsFor[TestActor] - assert(actors.size === 1) - assert(actors.head.actor.isInstanceOf[TestActor]) - assert(actors.head.id === "MyID") - actor.stop - } - - @Test def shouldFindThingsFromActorRegistry { - ActorRegistry.shutdownAll - val actor = actorOf[TestActor] - actor.start - val found = ActorRegistry.find({ case a: ActorRef if a.actor.isInstanceOf[TestActor] => a }) - assert(found.isDefined) - assert(found.get.actor.isInstanceOf[TestActor]) - assert(found.get.id === "MyID") - actor.stop - } - - @Test def shouldGetActorsByIdFromActorRegistry { - ActorRegistry.shutdownAll - val actor1 = actorOf[TestActor] - actor1.start - val actor2 = actorOf[TestActor] - actor2.start - val actors = ActorRegistry.actorsFor("MyID") - assert(actors.size === 2) - assert(actors.head.actor.isInstanceOf[TestActor]) - assert(actors.head.id === "MyID") - assert(actors.last.actor.isInstanceOf[TestActor]) - assert(actors.last.id === "MyID") - actor1.stop - actor2.stop - } - - @Test def shouldGetActorsByClassFromActorRegistry { - ActorRegistry.shutdownAll - val actor1 = actorOf[TestActor] - actor1.start - val actor2 = actorOf[TestActor] - actor2.start - val actors = ActorRegistry.actorsFor(classOf[TestActor]) - assert(actors.size === 2) - assert(actors.head.actor.isInstanceOf[TestActor]) - assert(actors.head.id === "MyID") - assert(actors.last.actor.isInstanceOf[TestActor]) - assert(actors.last.id === "MyID") - actor1.stop - actor2.stop - } - - @Test def shouldGetActorsByManifestFromActorRegistry { - ActorRegistry.shutdownAll - val actor1 = actorOf[TestActor] - actor1.start - val actor2 = actorOf[TestActor] - actor2.start - val actors = ActorRegistry.actorsFor[TestActor] - assert(actors.size === 2) - assert(actors.head.actor.isInstanceOf[TestActor]) - assert(actors.head.id === "MyID") - assert(actors.last.actor.isInstanceOf[TestActor]) - assert(actors.last.id === "MyID") - actor1.stop - actor2.stop - } - - @Test def shouldGetActorsByMessageFromActorRegistry { - - ActorRegistry.shutdownAll - val actor1 = actorOf[TestActor] - actor1.start - val actor2 = actorOf[TestActor2] - actor2.start - - val actorsForAcotrTestActor = ActorRegistry.actorsFor[TestActor] - assert(actorsForAcotrTestActor.size === 1) - - val actorsForAcotrTestActor2 = ActorRegistry.actorsFor[TestActor2] - assert(actorsForAcotrTestActor2.size === 1) - - val actorsForAcotr = ActorRegistry.actorsFor[Actor] - assert(actorsForAcotr.size === 2) - - - val actorsForMessagePing2 = ActorRegistry.actorsFor[Actor]("ping2") - assert(actorsForMessagePing2.size === 1) - - val actorsForMessagePing = ActorRegistry.actorsFor[Actor]("ping") - assert(actorsForMessagePing.size === 2) - - actor1.stop - actor2.stop - } - - @Test def shouldGetAllActorsFromActorRegistry { - ActorRegistry.shutdownAll - val actor1 = actorOf[TestActor] - actor1.start - val actor2 = actorOf[TestActor] - actor2.start - val actors = ActorRegistry.actors - assert(actors.size === 2) - assert(actors.head.actor.isInstanceOf[TestActor]) - assert(actors.head.id === "MyID") - assert(actors.last.actor.isInstanceOf[TestActor]) - assert(actors.last.id === "MyID") - actor1.stop - actor2.stop - } - - @Test def shouldGetResponseByAllActorsInActorRegistryWhenInvokingForeach { - ActorRegistry.shutdownAll - val actor1 = actorOf[TestActor] - actor1.start - val actor2 = actorOf[TestActor] - actor2.start - record = "" - ActorRegistry.foreach(actor => actor !! "ping") - assert(record === "pongpong") - actor1.stop - actor2.stop - } - - @Test def shouldShutdownAllActorsInActorRegistry { - ActorRegistry.shutdownAll - val actor1 = actorOf[TestActor] - actor1.start - val actor2 = actorOf[TestActor] - actor2.start - ActorRegistry.shutdownAll - assert(ActorRegistry.actors.size === 0) - } - - @Test def shouldRemoveUnregisterActorInActorRegistry { - ActorRegistry.shutdownAll - val actor1 = actorOf[TestActor] - actor1.start - val actor2 = actorOf[TestActor] - actor2.start - assert(ActorRegistry.actors.size === 2) - ActorRegistry.unregister(actor1) - assert(ActorRegistry.actors.size === 1) - ActorRegistry.unregister(actor2) - assert(ActorRegistry.actors.size === 0) - } - - @Test def shouldBeAbleToRegisterActorsConcurrently { - ActorRegistry.shutdownAll - - val latch = new CountDownLatch(3) - val barrier = new CyclicBarrier(3) - - def mkTestActor(i:Int) = actorOf( new Actor { - self.id = i.toString - def receive = { case _ => } - }) - - def mkTestActors = for(i <- 1 to 10;j <- 1 to 1000) yield mkTestActor(i) - - def mkThread(actors: Iterable[ActorRef]) = new Thread { - start - override def run { - barrier.await - actors foreach { _.start } - latch.countDown - } - } - - val testActors1 = mkTestActors - val testActors2 = mkTestActors - val testActors3 = mkTestActors - - mkThread(testActors1) - mkThread(testActors2) - mkThread(testActors3) - - assert(latch.await(30,TimeUnit.SECONDS) === true) - - for(i <- 1 to 10) { - assert(ActorRegistry.actorsFor(i.toString).length === 3000) - } - } -} diff --git a/akka-core/src/test/scala/misc/SchedulerSpec.scala b/akka-core/src/test/scala/misc/SchedulerSpec.scala deleted file mode 100644 index 16dd21f327..0000000000 --- a/akka-core/src/test/scala/misc/SchedulerSpec.scala +++ /dev/null @@ -1,127 +0,0 @@ -package se.scalablesolutions.akka.actor - -import org.scalatest.junit.JUnitSuite -import Actor._ -import java.util.concurrent.{CountDownLatch, TimeUnit} -import se.scalablesolutions.akka.config.ScalaConfig._ -import org.multiverse.api.latches.StandardLatch -import org.junit.Test - -class SchedulerSpec extends JUnitSuite { - - def withCleanEndState(action: => Unit) { - action - Scheduler.restart - ActorRegistry.shutdownAll - } - - - @Test def schedulerShouldScheduleMoreThanOnce = withCleanEndState { - - case object Tick - val countDownLatch = new CountDownLatch(3) - val tickActor = actor { - case Tick => countDownLatch.countDown - } - // run every 50 millisec - Scheduler.schedule(tickActor, Tick, 0, 50, TimeUnit.MILLISECONDS) - - // after max 1 second it should be executed at least the 3 times already - assert(countDownLatch.await(1, TimeUnit.SECONDS)) - - val countDownLatch2 = new CountDownLatch(3) - - Scheduler.schedule( () => countDownLatch2.countDown, 0, 50, TimeUnit.MILLISECONDS) - - // after max 1 second it should be executed at least the 3 times already - assert(countDownLatch2.await(1, TimeUnit.SECONDS)) - } - - @Test def schedulerShouldScheduleOnce = withCleanEndState { - case object Tick - val countDownLatch = new CountDownLatch(3) - val tickActor = actor { - case Tick => countDownLatch.countDown - } - // run every 50 millisec - Scheduler.scheduleOnce(tickActor, Tick, 50, TimeUnit.MILLISECONDS) - Scheduler.scheduleOnce( () => countDownLatch.countDown, 50, TimeUnit.MILLISECONDS) - - // after 1 second the wait should fail - assert(countDownLatch.await(1, TimeUnit.SECONDS) == false) - // should still be 1 left - assert(countDownLatch.getCount == 1) - } - - /** - * ticket #372 - */ - @Test def schedulerShouldntCreateActors = withCleanEndState { - object Ping - val ticks = new CountDownLatch(1000) - val actor = actorOf(new Actor { - def receive = { case Ping => ticks.countDown } - }).start - val numActors = ActorRegistry.actors.length - (1 to 1000).foreach( _ => Scheduler.scheduleOnce(actor,Ping,1,TimeUnit.MILLISECONDS) ) - assert(ticks.await(10,TimeUnit.SECONDS)) - assert(ActorRegistry.actors.length === numActors) - } - - /** - * ticket #372 - */ - @Test def schedulerShouldBeCancellable = withCleanEndState { - object Ping - val ticks = new CountDownLatch(1) - - val actor = actorOf(new Actor { - def receive = { case Ping => ticks.countDown } - }).start - - (1 to 10).foreach { i => - val future = Scheduler.scheduleOnce(actor,Ping,1,TimeUnit.SECONDS) - future.cancel(true) - } - assert(ticks.await(3,TimeUnit.SECONDS) == false) //No counting down should've been made - } - - /** - * ticket #307 - */ - @Test def actorRestartShouldPickUpScheduleAgain = withCleanEndState { - - object Ping - object Crash - - val restartLatch = new StandardLatch - val pingLatch = new CountDownLatch(6) - - val actor = actorOf(new Actor { - self.lifeCycle = Some(LifeCycle(Permanent)) - - def receive = { - case Ping => pingLatch.countDown - case Crash => throw new Exception("CRASH") - } - - override def postRestart(reason: Throwable) = restartLatch.open - }) - Supervisor( - SupervisorConfig( - RestartStrategy(AllForOne, 3, 1000, - List(classOf[Exception])), - Supervise( - actor, - LifeCycle(Permanent)) - :: Nil)).start - - Scheduler.schedule(actor, Ping, 500, 500, TimeUnit.MILLISECONDS) - // appx 2 pings before crash - Scheduler.scheduleOnce(actor, Crash, 1000, TimeUnit.MILLISECONDS) - - assert(restartLatch.tryAwait(2, TimeUnit.SECONDS)) - // should be enough time for the ping countdown to recover and reach 6 pings - assert(pingLatch.await(4, TimeUnit.SECONDS)) - } -} diff --git a/akka-core/src/test/scala/routing/RoutingSpec.scala b/akka-core/src/test/scala/routing/RoutingSpec.scala deleted file mode 100644 index b51fa11a0e..0000000000 --- a/akka-core/src/test/scala/routing/RoutingSpec.scala +++ /dev/null @@ -1,179 +0,0 @@ -package se.scalablesolutions.akka.actor.routing - -import se.scalablesolutions.akka.actor.Actor -import se.scalablesolutions.akka.actor.Actor._ -import se.scalablesolutions.akka.util.Logging - -import org.scalatest.Suite -import org.junit.runner.RunWith -import org.scalatest.junit.JUnitRunner -import org.scalatest.matchers.MustMatchers -import org.junit.Test - -import java.util.concurrent.atomic.AtomicInteger -import java.util.concurrent.{CountDownLatch, TimeUnit} -import se.scalablesolutions.akka.routing._ - -@RunWith(classOf[JUnitRunner]) -class RoutingSpec extends junit.framework.TestCase with Suite with MustMatchers with Logging { - import Routing._ - - @Test def testDispatcher = { - val (testMsg1,testMsg2,testMsg3,testMsg4) = ("test1","test2","test3","test4") - val targetOk = new AtomicInteger(0) - val t1 = actorOf( new Actor() { - def receive = { - case `testMsg1` => self.reply(3) - case `testMsg2` => self.reply(7) - } - } ).start - - val t2 = actorOf( new Actor() { - def receive = { - case `testMsg3` => self.reply(11) - } - }).start - - val d = dispatcherActor { - case `testMsg1`|`testMsg2` => t1 - case `testMsg3` => t2 - }.start - - val result = for { - a <- (d !! (testMsg1, 5000)).as[Int] - b <- (d !! (testMsg2, 5000)).as[Int] - c <- (d !! (testMsg3, 5000)).as[Int] - } yield a + b + c - - result.isDefined must be (true) - result.get must be(21) - - for(a <- List(t1,t2,d)) a.stop - } - - @Test def testLogger = { - val msgs = new java.util.concurrent.ConcurrentSkipListSet[Any] - val latch = new CountDownLatch(2) - val t1 = actor { - case _ => - } - val l = loggerActor(t1,(x) => { msgs.add(x); latch.countDown }).start - val foo : Any = "foo" - val bar : Any = "bar" - l ! foo - l ! bar - val done = latch.await(5,TimeUnit.SECONDS) - done must be (true) - msgs must ( have size (2) and contain (foo) and contain (bar) ) - t1.stop - l.stop - } - - @Test def testSmallestMailboxFirstDispatcher = { - val t1ProcessedCount = new AtomicInteger(0) - val latch = new CountDownLatch(500) - val t1 = actor { - case x => - Thread.sleep(50) // slow actor - t1ProcessedCount.incrementAndGet - latch.countDown - } - - val t2ProcessedCount = new AtomicInteger(0) - val t2 = actor { - case x => t2ProcessedCount.incrementAndGet - latch.countDown - } - val d = loadBalancerActor(new SmallestMailboxFirstIterator(t1 :: t2 :: Nil)) - for (i <- 1 to 500) d ! i - val done = latch.await(10,TimeUnit.SECONDS) - done must be (true) - t1ProcessedCount.get must be < (t2ProcessedCount.get) // because t1 is much slower and thus has a bigger mailbox all the time - for(a <- List(t1,t2,d)) a.stop - } - - @Test def testListener = { - val latch = new CountDownLatch(2) - val foreachListener = new CountDownLatch(2) - val num = new AtomicInteger(0) - val i = actorOf(new Actor with Listeners { - def receive = listenerManagement orElse { - case "foo" => gossip("bar") - } - }) - i.start - - def newListener = actor { - case "bar" => - num.incrementAndGet - latch.countDown - case "foo" => foreachListener.countDown - } - - val a1 = newListener - val a2 = newListener - val a3 = newListener - - i ! Listen(a1) - i ! Listen(a2) - i ! Listen(a3) - i ! Deafen(a3) - i ! WithListeners(_ ! "foo") - i ! "foo" - - val done = latch.await(5,TimeUnit.SECONDS) - done must be (true) - num.get must be (2) - val withListeners = foreachListener.await(5,TimeUnit.SECONDS) - withListeners must be (true) - for(a <- List(i,a1,a2,a3)) a.stop - } - - @Test def testIsDefinedAt = { - import se.scalablesolutions.akka.actor.ActorRef - - val (testMsg1,testMsg2,testMsg3,testMsg4) = ("test1","test2","test3","test4") - - val t1 = actorOf( new Actor() { - def receive = { - case `testMsg1` => self.reply(3) - case `testMsg2` => self.reply(7) - } - } ).start - - val t2 = actorOf( new Actor() { - def receive = { - case `testMsg1` => self.reply(3) - case `testMsg2` => self.reply(7) - } - } ).start - - val t3 = actorOf( new Actor() { - def receive = { - case `testMsg1` => self.reply(3) - case `testMsg2` => self.reply(7) - } - } ).start - - val t4 = actorOf( new Actor() { - def receive = { - case `testMsg1` => self.reply(3) - case `testMsg2` => self.reply(7) - } - } ).start - - val d1 = loadBalancerActor(new SmallestMailboxFirstIterator(t1 :: t2 :: Nil)) - val d2 = loadBalancerActor(new CyclicIterator[ActorRef](t3 :: t4 :: Nil)) - - t1.isDefinedAt(testMsg1) must be (true) - t1.isDefinedAt(testMsg3) must be (false) - t2.isDefinedAt(testMsg1) must be (true) - t2.isDefinedAt(testMsg3) must be (false) - d1.isDefinedAt(testMsg1) must be (true) - d1.isDefinedAt(testMsg3) must be (false) - d2.isDefinedAt(testMsg1) must be (true) - d2.isDefinedAt(testMsg3) must be (false) - - for(a <- List(t1,t2,d1,d2)) a.stop - } -} diff --git a/akka-karaf/akka-features/src/main/resources/features.xml b/akka-karaf/akka-features/src/main/resources/features.xml index db48a13785..067e64f193 100644 --- a/akka-karaf/akka-features/src/main/resources/features.xml +++ b/akka-karaf/akka-features/src/main/resources/features.xml @@ -13,10 +13,10 @@ mvn:sjson.json/sjson/0.6-SNAPSHOT - + sjson mvn:se.scalablesolutions.akka.akka-wrap/jgroups-wrapper_2.8.0.RC3_osgi/2.9.0.GA mvn:org.jboss.netty/netty/3.2.0.CR1 - mvn:se.scalablesolutions.akka/akka-core_2.8.0.RC3_osgi/0.9 + mvn:se.scalablesolutions.akka/akka-remote_2.8.0.RC3_osgi/0.9 diff --git a/akka-core/src/main/protocol/RemoteProtocol.proto b/akka-remote/src/main/protocol/RemoteProtocol.proto similarity index 98% rename from akka-core/src/main/protocol/RemoteProtocol.proto rename to akka-remote/src/main/protocol/RemoteProtocol.proto index 6cf9bfd534..567bf54eba 100644 --- a/akka-core/src/main/protocol/RemoteProtocol.proto +++ b/akka-remote/src/main/protocol/RemoteProtocol.proto @@ -7,7 +7,7 @@ option optimize_for = SPEED; /****************************************** Compile with: - cd ./akka-core/src/main/protocol + cd ./akka-remote/src/main/protocol protoc RemoteProtocol.proto --java_out ../java *******************************************/ diff --git a/akka-core/src/main/scala/remote/BootableRemoteActorService.scala b/akka-remote/src/main/scala/remote/BootableRemoteActorService.scala similarity index 100% rename from akka-core/src/main/scala/remote/BootableRemoteActorService.scala rename to akka-remote/src/main/scala/remote/BootableRemoteActorService.scala diff --git a/akka-core/src/main/scala/remote/Cluster.scala b/akka-remote/src/main/scala/remote/Cluster.scala similarity index 100% rename from akka-core/src/main/scala/remote/Cluster.scala rename to akka-remote/src/main/scala/remote/Cluster.scala diff --git a/akka-core/src/main/scala/remote/JGroupsClusterActor.scala b/akka-remote/src/main/scala/remote/JGroupsClusterActor.scala similarity index 100% rename from akka-core/src/main/scala/remote/JGroupsClusterActor.scala rename to akka-remote/src/main/scala/remote/JGroupsClusterActor.scala diff --git a/akka-core/src/main/scala/remote/MessageSerializer.scala b/akka-remote/src/main/scala/remote/MessageSerializer.scala similarity index 100% rename from akka-core/src/main/scala/remote/MessageSerializer.scala rename to akka-remote/src/main/scala/remote/MessageSerializer.scala diff --git a/akka-core/src/main/scala/remote/RemoteClient.scala b/akka-remote/src/main/scala/remote/RemoteClient.scala similarity index 92% rename from akka-core/src/main/scala/remote/RemoteClient.scala rename to akka-remote/src/main/scala/remote/RemoteClient.scala index 6a8245e8b8..8a555bf5a1 100644 --- a/akka-core/src/main/scala/remote/RemoteClient.scala +++ b/akka-remote/src/main/scala/remote/RemoteClient.scala @@ -5,12 +5,14 @@ package se.scalablesolutions.akka.remote import se.scalablesolutions.akka.remote.protocol.RemoteProtocol._ -import se.scalablesolutions.akka.actor.{Exit, Actor, ActorRef, RemoteActorRef, IllegalActorStateException} +import se.scalablesolutions.akka.actor.{Exit, Actor, ActorRef, ActorType, RemoteActorRef, RemoteActorSerialization, IllegalActorStateException} import se.scalablesolutions.akka.dispatch.{DefaultCompletableFuture, CompletableFuture} import se.scalablesolutions.akka.util.{ListenerManagement, UUID, Logging, Duration} import se.scalablesolutions.akka.config.Config._ import se.scalablesolutions.akka.AkkaException import Actor._ +import RemoteActorSerialization._ + import org.jboss.netty.channel._ import group.DefaultChannelGroup import org.jboss.netty.channel.socket.nio.NioClientSocketChannelFactory @@ -118,7 +120,8 @@ object RemoteClient extends Logging { private[akka] def clientFor(hostname: String, port: Int, loader: Option[ClassLoader]): RemoteClient = clientFor(new InetSocketAddress(hostname, port), loader) - private[akka] def clientFor(address: InetSocketAddress, loader: Option[ClassLoader]): RemoteClient = synchronized { + private[akka] def clientFor( + address: InetSocketAddress, loader: Option[ClassLoader]): RemoteClient = synchronized { val hostname = address.getHostName val port = address.getPort val hash = hostname + ':' + port @@ -151,7 +154,7 @@ object RemoteClient extends Logging { remoteClients.clear } - private[akka] def register(hostname: String, port: Int, uuid: String) = synchronized { + def register(hostname: String, port: Int, uuid: String) = synchronized { actorsFor(RemoteServer.Address(hostname, port)) += uuid } @@ -177,7 +180,8 @@ object RemoteClient extends Logging { * * @author Jonas Bonér */ -class RemoteClient private[akka] (val hostname: String, val port: Int, val loader: Option[ClassLoader] = None) +class RemoteClient private[akka] ( + val hostname: String, val port: Int, val loader: Option[ClassLoader] = None) extends Logging with ListenerManagement { val name = "RemoteClient@" + hostname + "::" + port @@ -200,7 +204,8 @@ class RemoteClient private[akka] (val hostname: String, val port: Int, val loade Duration(config.getInt("akka.remote.client.reconnection-time-window", 600), TIME_UNIT).toMillis @volatile private var reconnectionTimeWindowStart = 0L - bootstrap.setPipelineFactory(new RemoteClientPipelineFactory(name, futures, supervisors, bootstrap, remoteAddress, timer, this)) + bootstrap.setPipelineFactory(new RemoteClientPipelineFactory( + name, futures, supervisors, bootstrap, remoteAddress, timer, this)) bootstrap.setOption("tcpNoDelay", true) bootstrap.setOption("keepAlive", true) @@ -242,7 +247,24 @@ class RemoteClient private[akka] (val hostname: String, val port: Int, val loade protected override def manageLifeCycleOfListeners = false - def send[T](request: RemoteRequestProtocol, senderFuture: Option[CompletableFuture[T]]): Option[CompletableFuture[T]] = if (isRunning) { + def send[T]( + message: Any, + senderOption: Option[ActorRef], + senderFuture: Option[CompletableFuture[T]], + remoteAddress: InetSocketAddress, + timeout: Long, + isOneWay: Boolean, + actorRef: ActorRef, + typedActorInfo: Option[Tuple2[String, String]], + actorType: ActorType): Option[CompletableFuture[T]] = { + send(createRemoteRequestProtocolBuilder( + actorRef, message, isOneWay, senderOption, typedActorInfo, actorType).build, senderFuture) + } + + def send[T]( + request: RemoteRequestProtocol, + senderFuture: Option[CompletableFuture[T]]): + Option[CompletableFuture[T]] = if (isRunning) { if (request.getIsOneWay) { connection.getChannel.write(request) None @@ -256,7 +278,8 @@ class RemoteClient private[akka] (val hostname: String, val port: Int, val loade } } } else { - val exception = new RemoteClientException("Remote client is not running, make sure you have invoked 'RemoteClient.connect' before using it.", this) + val exception = new RemoteClientException( + "Remote client is not running, make sure you have invoked 'RemoteClient.connect' before using it.", this) foreachListener(l => l ! RemoteClientError(exception, this)) throw exception } diff --git a/akka-core/src/main/scala/remote/RemoteServer.scala b/akka-remote/src/main/scala/remote/RemoteServer.scala similarity index 98% rename from akka-core/src/main/scala/remote/RemoteServer.scala rename to akka-remote/src/main/scala/remote/RemoteServer.scala index 9c8f7454fa..f20f78cc96 100644 --- a/akka-core/src/main/scala/remote/RemoteServer.scala +++ b/akka-remote/src/main/scala/remote/RemoteServer.scala @@ -13,6 +13,7 @@ import se.scalablesolutions.akka.actor._ import se.scalablesolutions.akka.actor.Actor._ import se.scalablesolutions.akka.util._ import se.scalablesolutions.akka.remote.protocol.RemoteProtocol._ +import se.scalablesolutions.akka.remote.protocol.RemoteProtocol.ActorType._ import se.scalablesolutions.akka.config.Config._ import org.jboss.netty.bootstrap.ServerBootstrap @@ -443,9 +444,9 @@ class RemoteServerHandler( private def handleRemoteRequestProtocol(request: RemoteRequestProtocol, channel: Channel) = { log.debug("Received RemoteRequestProtocol[\n%s]", request.toString) val actorType = request.getActorInfo.getActorType - if (actorType == ActorType.SCALA_ACTOR) dispatchToActor(request, channel) - else if (actorType == ActorType.JAVA_ACTOR) throw new IllegalActorStateException("ActorType JAVA_ACTOR is currently not supported") - else if (actorType == ActorType.TYPED_ACTOR) dispatchToTypedActor(request, channel) + if (actorType == SCALA_ACTOR) dispatchToActor(request, channel) + else if (actorType == JAVA_ACTOR) throw new IllegalActorStateException("ActorType JAVA_ACTOR is currently not supported") + else if (actorType == TYPED_ACTOR) dispatchToTypedActor(request, channel) else throw new IllegalActorStateException("Unknown ActorType [" + actorType + "]") } diff --git a/akka-core/src/main/scala/serialization/Binary.scala b/akka-remote/src/main/scala/serialization/Binary.scala similarity index 100% rename from akka-core/src/main/scala/serialization/Binary.scala rename to akka-remote/src/main/scala/serialization/Binary.scala diff --git a/akka-core/src/main/scala/serialization/Compression.scala b/akka-remote/src/main/scala/serialization/Compression.scala similarity index 100% rename from akka-core/src/main/scala/serialization/Compression.scala rename to akka-remote/src/main/scala/serialization/Compression.scala diff --git a/akka-core/src/main/scala/serialization/Serializable.scala b/akka-remote/src/main/scala/serialization/Serializable.scala similarity index 100% rename from akka-core/src/main/scala/serialization/Serializable.scala rename to akka-remote/src/main/scala/serialization/Serializable.scala diff --git a/akka-remote/src/main/scala/serialization/SerializationProtocol.scala b/akka-remote/src/main/scala/serialization/SerializationProtocol.scala new file mode 100644 index 0000000000..e7bee8e9b9 --- /dev/null +++ b/akka-remote/src/main/scala/serialization/SerializationProtocol.scala @@ -0,0 +1,284 @@ +/** + * Copyright (C) 2009-2010 Scalable Solutions AB + */ + +package se.scalablesolutions.akka.actor + +import se.scalablesolutions.akka.config.{AllForOneStrategy, OneForOneStrategy, FaultHandlingStrategy} +import se.scalablesolutions.akka.config.ScalaConfig._ +import se.scalablesolutions.akka.stm.global._ +import se.scalablesolutions.akka.stm.TransactionManagement._ +import se.scalablesolutions.akka.stm.TransactionManagement +import se.scalablesolutions.akka.remote.protocol.RemoteProtocol._ +import se.scalablesolutions.akka.remote.{RemoteServer, RemoteRequestProtocolIdFactory, MessageSerializer} +import se.scalablesolutions.akka.remote.protocol.RemoteProtocol.ActorType._ +import se.scalablesolutions.akka.serialization.Serializer + +import com.google.protobuf.ByteString + +/** + * Type class definition for Actor Serialization + */ +trait FromBinary[T <: Actor] { + def fromBinary(bytes: Array[Byte], act: T): T +} + +trait ToBinary[T <: Actor] { + def toBinary(t: T): Array[Byte] +} + +// client needs to implement Format[] for the respective actor +trait Format[T <: Actor] extends FromBinary[T] with ToBinary[T] + +/** + * A default implementation for a stateless actor + * + * Create a Format object with the client actor as the implementation of the type class + * + *

+ * object BinaryFormatMyStatelessActor {
+ *   implicit object MyStatelessActorFormat extends StatelessActorFormat[MyStatelessActor]
+ * }
+ * 
+ */ +trait StatelessActorFormat[T <: Actor] extends Format[T] { + def fromBinary(bytes: Array[Byte], act: T) = act + def toBinary(ac: T) = Array.empty[Byte] +} + +/** + * A default implementation of the type class for a Format that specifies a serializer + * + * Create a Format object with the client actor as the implementation of the type class and + * a serializer object + * + *
+ * object BinaryFormatMyJavaSerializableActor {
+ *   implicit object MyJavaSerializableActorFormat extends SerializerBasedActorFormat[MyJavaSerializableActor] {
+ *     val serializer = Serializer.Java
+ *   }
+ * }
+ * 
+ */ +trait SerializerBasedActorFormat[T <: Actor] extends Format[T] { + val serializer: Serializer + def fromBinary(bytes: Array[Byte], act: T) = serializer.fromBinary(bytes, Some(act.self.actorClass)).asInstanceOf[T] + def toBinary(ac: T) = serializer.toBinary(ac) +} + +/** + * Module for local actor serialization + */ +object ActorSerialization { + + def fromBinary[T <: Actor](bytes: Array[Byte])(implicit format: Format[T]): ActorRef = + fromBinaryToLocalActorRef(bytes, format) + + def toBinary[T <: Actor](a: ActorRef)(implicit format: Format[T]): Array[Byte] = + toSerializedActorRefProtocol(a, format).toByteArray + + // wrapper for implicits to be used by Java + def fromBinaryJ[T <: Actor](bytes: Array[Byte], format: Format[T]): ActorRef = + fromBinary(bytes)(format) + + // wrapper for implicits to be used by Java + def toBinaryJ[T <: Actor](a: ActorRef, format: Format[T]): Array[Byte] = + toBinary(a)(format) + + private def toSerializedActorRefProtocol[T <: Actor]( + actorRef: ActorRef, format: Format[T]): SerializedActorRefProtocol = { + val lifeCycleProtocol: Option[LifeCycleProtocol] = { + def setScope(builder: LifeCycleProtocol.Builder, scope: Scope) = scope match { + case Permanent => builder.setLifeCycle(LifeCycleType.PERMANENT) + case Temporary => builder.setLifeCycle(LifeCycleType.TEMPORARY) + } + val builder = LifeCycleProtocol.newBuilder + actorRef.lifeCycle match { + case Some(LifeCycle(scope)) => + setScope(builder, scope) + Some(builder.build) + case None => None + } + } + + val originalAddress = AddressProtocol.newBuilder + .setHostname(actorRef.homeAddress.getHostName) + .setPort(actorRef.homeAddress.getPort) + .build + + val builder = SerializedActorRefProtocol.newBuilder + .setUuid(actorRef.uuid) + .setId(actorRef.id) + .setActorClassname(actorRef.actorClass.getName) + .setOriginalAddress(originalAddress) + .setIsTransactor(actorRef.isTransactor) + .setTimeout(actorRef.timeout) + + actorRef.receiveTimeout.foreach(builder.setReceiveTimeout(_)) + builder.setActorInstance(ByteString.copyFrom(format.toBinary(actorRef.actor.asInstanceOf[T]))) + lifeCycleProtocol.foreach(builder.setLifeCycle(_)) + actorRef.supervisor.foreach(s => builder.setSupervisor(RemoteActorSerialization.toRemoteActorRefProtocol(s))) + // FIXME: how to serialize the hotswap PartialFunction ?? + //hotswap.foreach(builder.setHotswapStack(_)) + builder.build + } + + private def fromBinaryToLocalActorRef[T <: Actor](bytes: Array[Byte], format: Format[T]): ActorRef = + fromProtobufToLocalActorRef(SerializedActorRefProtocol.newBuilder.mergeFrom(bytes).build, format, None) + + private def fromProtobufToLocalActorRef[T <: Actor]( + protocol: SerializedActorRefProtocol, format: Format[T], loader: Option[ClassLoader]): ActorRef = { + Actor.log.debug("Deserializing SerializedActorRefProtocol to LocalActorRef:\n" + protocol) + + val serializer = + if (format.isInstanceOf[SerializerBasedActorFormat[_]]) + Some(format.asInstanceOf[SerializerBasedActorFormat[_]].serializer) + else None + + val lifeCycle = + if (protocol.hasLifeCycle) { + val lifeCycleProtocol = protocol.getLifeCycle + Some(if (lifeCycleProtocol.getLifeCycle == LifeCycleType.PERMANENT) LifeCycle(Permanent) + else if (lifeCycleProtocol.getLifeCycle == LifeCycleType.TEMPORARY) LifeCycle(Temporary) + else throw new IllegalActorStateException("LifeCycle type is not valid: " + lifeCycleProtocol.getLifeCycle)) + } else None + + val supervisor = + if (protocol.hasSupervisor) + Some(RemoteActorSerialization.fromProtobufToRemoteActorRef(protocol.getSupervisor, loader)) + else None + + val hotswap = + if (serializer.isDefined && protocol.hasHotswapStack) Some(serializer.get + .fromBinary(protocol.getHotswapStack.toByteArray, Some(classOf[PartialFunction[Any, Unit]])) + .asInstanceOf[PartialFunction[Any, Unit]]) + else None + + val classLoader = loader.getOrElse(getClass.getClassLoader) + + val factory = () => { + val actorClass = classLoader.loadClass(protocol.getActorClassname) + if (format.isInstanceOf[SerializerBasedActorFormat[_]]) + format.asInstanceOf[SerializerBasedActorFormat[_]].serializer.fromBinary( + protocol.getActorInstance.toByteArray, Some(actorClass)).asInstanceOf[Actor] + else actorClass.newInstance.asInstanceOf[Actor] + } + + val ar = new LocalActorRef( + protocol.getUuid, + protocol.getId, + protocol.getOriginalAddress.getHostname, + protocol.getOriginalAddress.getPort, + if (protocol.hasIsTransactor) protocol.getIsTransactor else false, + if (protocol.hasTimeout) protocol.getTimeout else Actor.TIMEOUT, + if (protocol.hasReceiveTimeout) Some(protocol.getReceiveTimeout) else None, + lifeCycle, + supervisor, + hotswap, + classLoader, // TODO: should we fall back to getClass.getClassLoader? + factory) + + val messages = protocol.getMessagesList.toArray.toList.asInstanceOf[List[RemoteRequestProtocol]] + messages.foreach(message => ar ! MessageSerializer.deserialize(message.getMessage)) + + if (format.isInstanceOf[SerializerBasedActorFormat[_]] == false) + format.fromBinary(protocol.getActorInstance.toByteArray, ar.actor.asInstanceOf[T]) + ar + } +} + +object RemoteActorSerialization { + /** + * Deserializes a byte array (Array[Byte]) into an RemoteActorRef instance. + */ + def fromBinaryToRemoteActorRef(bytes: Array[Byte]): ActorRef = + fromProtobufToRemoteActorRef(RemoteActorRefProtocol.newBuilder.mergeFrom(bytes).build, None) + + /** + * Deserializes a byte array (Array[Byte]) into an RemoteActorRef instance. + */ + def fromBinaryToRemoteActorRef(bytes: Array[Byte], loader: ClassLoader): ActorRef = + fromProtobufToRemoteActorRef(RemoteActorRefProtocol.newBuilder.mergeFrom(bytes).build, Some(loader)) + + /** + * Deserializes a RemoteActorRefProtocol Protocol Buffers (protobuf) Message into an RemoteActorRef instance. + */ + private[akka] def fromProtobufToRemoteActorRef(protocol: RemoteActorRefProtocol, loader: Option[ClassLoader]): ActorRef = { + Actor.log.debug("Deserializing RemoteActorRefProtocol to RemoteActorRef:\n" + protocol) + RemoteActorRef( + protocol.getUuid, + protocol.getActorClassname, + protocol.getHomeAddress.getHostname, + protocol.getHomeAddress.getPort, + protocol.getTimeout, + loader) + } + + /** + * Serializes the ActorRef instance into a Protocol Buffers (protobuf) Message. + */ + def toRemoteActorRefProtocol(ar: ActorRef): RemoteActorRefProtocol = { + import ar._ + val host = homeAddress.getHostName + val port = homeAddress.getPort + + if (!registeredInRemoteNodeDuringSerialization) { + Actor.log.debug("Register serialized Actor [%s] as remote @ [%s:%s]", actorClass.getName, host, port) + RemoteServer.getOrCreateServer(homeAddress) + RemoteServer.registerActor(homeAddress, uuid, ar) + registeredInRemoteNodeDuringSerialization = true + } + + RemoteActorRefProtocol.newBuilder + .setUuid(uuid) + .setActorClassname(actorClass.getName) + .setHomeAddress(AddressProtocol.newBuilder.setHostname(host).setPort(port).build) + .setTimeout(timeout) + .build + } + + def createRemoteRequestProtocolBuilder( + actorRef: ActorRef, + message: Any, + isOneWay: Boolean, + senderOption: Option[ActorRef], + typedActorInfo: Option[Tuple2[String, String]], + actorType: ActorType): + RemoteRequestProtocol.Builder = { + import actorRef._ + + val actorInfoBuilder = ActorInfoProtocol.newBuilder + .setUuid(uuid) + .setTarget(actorClassName) + .setTimeout(timeout) + + typedActorInfo.foreach { typedActor => + actorInfoBuilder.setTypedActorInfo( + TypedActorInfoProtocol.newBuilder + .setInterface(typedActor._1) + .setMethod(typedActor._2) + .build) + } + + actorType match { + case ActorType.ScalaActor => actorInfoBuilder.setActorType(SCALA_ACTOR) + case ActorType.TypedActor => actorInfoBuilder.setActorType(TYPED_ACTOR) + } + val actorInfo = actorInfoBuilder.build + + val requestBuilder = RemoteRequestProtocol.newBuilder + .setId(RemoteRequestProtocolIdFactory.nextId) + .setMessage(MessageSerializer.serialize(message)) + .setActorInfo(actorInfo) + .setIsOneWay(isOneWay) + + val id = registerSupervisorAsRemoteActor + if (id.isDefined) requestBuilder.setSupervisorUuid(id.get) + + senderOption.foreach { sender => + RemoteServer.getOrCreateServer(sender.homeAddress).register(sender.uuid, sender) + requestBuilder.setSender(toRemoteActorRefProtocol(sender)) + } + requestBuilder + } +} diff --git a/akka-core/src/main/scala/serialization/Serializer.scala b/akka-remote/src/main/scala/serialization/Serializer.scala similarity index 100% rename from akka-core/src/main/scala/serialization/Serializer.scala rename to akka-remote/src/main/scala/serialization/Serializer.scala diff --git a/akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/ProtobufProtocol.java b/akka-remote/src/test/java/se/scalablesolutions/akka/actor/ProtobufProtocol.java similarity index 98% rename from akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/ProtobufProtocol.java rename to akka-remote/src/test/java/se/scalablesolutions/akka/actor/ProtobufProtocol.java index 683f008729..183d2025d0 100644 --- a/akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/ProtobufProtocol.java +++ b/akka-remote/src/test/java/se/scalablesolutions/akka/actor/ProtobufProtocol.java @@ -15,47 +15,47 @@ public final class ProtobufProtocol { initFields(); } private ProtobufPOJO(boolean noInit) {} - + private static final ProtobufPOJO defaultInstance; public static ProtobufPOJO getDefaultInstance() { return defaultInstance; } - + public ProtobufPOJO getDefaultInstanceForType() { return defaultInstance; } - + public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return se.scalablesolutions.akka.actor.ProtobufProtocol.internal_static_se_scalablesolutions_akka_actor_ProtobufPOJO_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return se.scalablesolutions.akka.actor.ProtobufProtocol.internal_static_se_scalablesolutions_akka_actor_ProtobufPOJO_fieldAccessorTable; } - + // required uint64 id = 1; public static final int ID_FIELD_NUMBER = 1; private boolean hasId; private long id_ = 0L; public boolean hasId() { return hasId; } public long getId() { return id_; } - + // required string name = 2; public static final int NAME_FIELD_NUMBER = 2; private boolean hasName; private java.lang.String name_ = ""; public boolean hasName() { return hasName; } public java.lang.String getName() { return name_; } - + // required bool status = 3; public static final int STATUS_FIELD_NUMBER = 3; private boolean hasStatus; private boolean status_ = false; public boolean hasStatus() { return hasStatus; } public boolean getStatus() { return status_; } - + private void initFields() { } public final boolean isInitialized() { @@ -64,7 +64,7 @@ public final class ProtobufProtocol { if (!hasStatus) return false; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -79,12 +79,12 @@ public final class ProtobufProtocol { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (hasId()) { size += com.google.protobuf.CodedOutputStream @@ -102,7 +102,7 @@ public final class ProtobufProtocol { memoizedSerializedSize = size; return size; } - + public static se.scalablesolutions.akka.actor.ProtobufProtocol.ProtobufPOJO parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { @@ -169,31 +169,31 @@ public final class ProtobufProtocol { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(se.scalablesolutions.akka.actor.ProtobufProtocol.ProtobufPOJO prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder { private se.scalablesolutions.akka.actor.ProtobufProtocol.ProtobufPOJO result; - + // Construct using se.scalablesolutions.akka.actor.ProtobufProtocol.ProtobufPOJO.newBuilder() private Builder() {} - + private static Builder create() { Builder builder = new Builder(); builder.result = new se.scalablesolutions.akka.actor.ProtobufProtocol.ProtobufPOJO(); return builder; } - + protected se.scalablesolutions.akka.actor.ProtobufProtocol.ProtobufPOJO internalGetResult() { return result; } - + public Builder clear() { if (result == null) { throw new IllegalStateException( @@ -202,20 +202,20 @@ public final class ProtobufProtocol { result = new se.scalablesolutions.akka.actor.ProtobufProtocol.ProtobufPOJO(); return this; } - + public Builder clone() { return create().mergeFrom(result); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return se.scalablesolutions.akka.actor.ProtobufProtocol.ProtobufPOJO.getDescriptor(); } - + public se.scalablesolutions.akka.actor.ProtobufProtocol.ProtobufPOJO getDefaultInstanceForType() { return se.scalablesolutions.akka.actor.ProtobufProtocol.ProtobufPOJO.getDefaultInstance(); } - + public boolean isInitialized() { return result.isInitialized(); } @@ -225,7 +225,7 @@ public final class ProtobufProtocol { } return buildPartial(); } - + private se.scalablesolutions.akka.actor.ProtobufProtocol.ProtobufPOJO buildParsed() throws com.google.protobuf.InvalidProtocolBufferException { if (!isInitialized()) { @@ -234,7 +234,7 @@ public final class ProtobufProtocol { } return buildPartial(); } - + public se.scalablesolutions.akka.actor.ProtobufProtocol.ProtobufPOJO buildPartial() { if (result == null) { throw new IllegalStateException( @@ -244,7 +244,7 @@ public final class ProtobufProtocol { result = null; return returnMe; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof se.scalablesolutions.akka.actor.ProtobufProtocol.ProtobufPOJO) { return mergeFrom((se.scalablesolutions.akka.actor.ProtobufProtocol.ProtobufPOJO)other); @@ -253,7 +253,7 @@ public final class ProtobufProtocol { return this; } } - + public Builder mergeFrom(se.scalablesolutions.akka.actor.ProtobufProtocol.ProtobufPOJO other) { if (other == se.scalablesolutions.akka.actor.ProtobufProtocol.ProtobufPOJO.getDefaultInstance()) return this; if (other.hasId()) { @@ -268,7 +268,7 @@ public final class ProtobufProtocol { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) @@ -305,8 +305,8 @@ public final class ProtobufProtocol { } } } - - + + // required uint64 id = 1; public boolean hasId() { return result.hasId(); @@ -324,7 +324,7 @@ public final class ProtobufProtocol { result.id_ = 0L; return this; } - + // required string name = 2; public boolean hasName() { return result.hasName(); @@ -345,7 +345,7 @@ public final class ProtobufProtocol { result.name_ = getDefaultInstance().getName(); return this; } - + // required bool status = 3; public boolean hasStatus() { return result.hasStatus(); @@ -363,19 +363,19 @@ public final class ProtobufProtocol { result.status_ = false; return this; } - + // @@protoc_insertion_point(builder_scope:se.scalablesolutions.akka.actor.ProtobufPOJO) } - + static { defaultInstance = new ProtobufPOJO(true); se.scalablesolutions.akka.actor.ProtobufProtocol.internalForceInit(); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:se.scalablesolutions.akka.actor.ProtobufPOJO) } - + public static final class Counter extends com.google.protobuf.GeneratedMessage { // Use Counter.newBuilder() to construct. @@ -383,40 +383,40 @@ public final class ProtobufProtocol { initFields(); } private Counter(boolean noInit) {} - + private static final Counter defaultInstance; public static Counter getDefaultInstance() { return defaultInstance; } - + public Counter getDefaultInstanceForType() { return defaultInstance; } - + public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return se.scalablesolutions.akka.actor.ProtobufProtocol.internal_static_se_scalablesolutions_akka_actor_Counter_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return se.scalablesolutions.akka.actor.ProtobufProtocol.internal_static_se_scalablesolutions_akka_actor_Counter_fieldAccessorTable; } - + // required uint32 count = 1; public static final int COUNT_FIELD_NUMBER = 1; private boolean hasCount; private int count_ = 0; public boolean hasCount() { return hasCount; } public int getCount() { return count_; } - + private void initFields() { } public final boolean isInitialized() { if (!hasCount) return false; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -425,12 +425,12 @@ public final class ProtobufProtocol { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (hasCount()) { size += com.google.protobuf.CodedOutputStream @@ -440,7 +440,7 @@ public final class ProtobufProtocol { memoizedSerializedSize = size; return size; } - + public static se.scalablesolutions.akka.actor.ProtobufProtocol.Counter parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { @@ -507,31 +507,31 @@ public final class ProtobufProtocol { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(se.scalablesolutions.akka.actor.ProtobufProtocol.Counter prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder { private se.scalablesolutions.akka.actor.ProtobufProtocol.Counter result; - + // Construct using se.scalablesolutions.akka.actor.ProtobufProtocol.Counter.newBuilder() private Builder() {} - + private static Builder create() { Builder builder = new Builder(); builder.result = new se.scalablesolutions.akka.actor.ProtobufProtocol.Counter(); return builder; } - + protected se.scalablesolutions.akka.actor.ProtobufProtocol.Counter internalGetResult() { return result; } - + public Builder clear() { if (result == null) { throw new IllegalStateException( @@ -540,20 +540,20 @@ public final class ProtobufProtocol { result = new se.scalablesolutions.akka.actor.ProtobufProtocol.Counter(); return this; } - + public Builder clone() { return create().mergeFrom(result); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return se.scalablesolutions.akka.actor.ProtobufProtocol.Counter.getDescriptor(); } - + public se.scalablesolutions.akka.actor.ProtobufProtocol.Counter getDefaultInstanceForType() { return se.scalablesolutions.akka.actor.ProtobufProtocol.Counter.getDefaultInstance(); } - + public boolean isInitialized() { return result.isInitialized(); } @@ -563,7 +563,7 @@ public final class ProtobufProtocol { } return buildPartial(); } - + private se.scalablesolutions.akka.actor.ProtobufProtocol.Counter buildParsed() throws com.google.protobuf.InvalidProtocolBufferException { if (!isInitialized()) { @@ -572,7 +572,7 @@ public final class ProtobufProtocol { } return buildPartial(); } - + public se.scalablesolutions.akka.actor.ProtobufProtocol.Counter buildPartial() { if (result == null) { throw new IllegalStateException( @@ -582,7 +582,7 @@ public final class ProtobufProtocol { result = null; return returnMe; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof se.scalablesolutions.akka.actor.ProtobufProtocol.Counter) { return mergeFrom((se.scalablesolutions.akka.actor.ProtobufProtocol.Counter)other); @@ -591,7 +591,7 @@ public final class ProtobufProtocol { return this; } } - + public Builder mergeFrom(se.scalablesolutions.akka.actor.ProtobufProtocol.Counter other) { if (other == se.scalablesolutions.akka.actor.ProtobufProtocol.Counter.getDefaultInstance()) return this; if (other.hasCount()) { @@ -600,7 +600,7 @@ public final class ProtobufProtocol { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) @@ -629,8 +629,8 @@ public final class ProtobufProtocol { } } } - - + + // required uint32 count = 1; public boolean hasCount() { return result.hasCount(); @@ -648,19 +648,19 @@ public final class ProtobufProtocol { result.count_ = 0; return this; } - + // @@protoc_insertion_point(builder_scope:se.scalablesolutions.akka.actor.Counter) } - + static { defaultInstance = new Counter(true); se.scalablesolutions.akka.actor.ProtobufProtocol.internalForceInit(); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:se.scalablesolutions.akka.actor.Counter) } - + public static final class DualCounter extends com.google.protobuf.GeneratedMessage { // Use DualCounter.newBuilder() to construct. @@ -668,40 +668,40 @@ public final class ProtobufProtocol { initFields(); } private DualCounter(boolean noInit) {} - + private static final DualCounter defaultInstance; public static DualCounter getDefaultInstance() { return defaultInstance; } - + public DualCounter getDefaultInstanceForType() { return defaultInstance; } - + public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return se.scalablesolutions.akka.actor.ProtobufProtocol.internal_static_se_scalablesolutions_akka_actor_DualCounter_descriptor; } - + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return se.scalablesolutions.akka.actor.ProtobufProtocol.internal_static_se_scalablesolutions_akka_actor_DualCounter_fieldAccessorTable; } - + // required uint32 count1 = 1; public static final int COUNT1_FIELD_NUMBER = 1; private boolean hasCount1; private int count1_ = 0; public boolean hasCount1() { return hasCount1; } public int getCount1() { return count1_; } - + // required uint32 count2 = 2; public static final int COUNT2_FIELD_NUMBER = 2; private boolean hasCount2; private int count2_ = 0; public boolean hasCount2() { return hasCount2; } public int getCount2() { return count2_; } - + private void initFields() { } public final boolean isInitialized() { @@ -709,7 +709,7 @@ public final class ProtobufProtocol { if (!hasCount2) return false; return true; } - + public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); @@ -721,12 +721,12 @@ public final class ProtobufProtocol { } getUnknownFields().writeTo(output); } - + private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; - + size = 0; if (hasCount1()) { size += com.google.protobuf.CodedOutputStream @@ -740,7 +740,7 @@ public final class ProtobufProtocol { memoizedSerializedSize = size; return size; } - + public static se.scalablesolutions.akka.actor.ProtobufProtocol.DualCounter parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { @@ -807,31 +807,31 @@ public final class ProtobufProtocol { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } - + public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(se.scalablesolutions.akka.actor.ProtobufProtocol.DualCounter prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } - + public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder { private se.scalablesolutions.akka.actor.ProtobufProtocol.DualCounter result; - + // Construct using se.scalablesolutions.akka.actor.ProtobufProtocol.DualCounter.newBuilder() private Builder() {} - + private static Builder create() { Builder builder = new Builder(); builder.result = new se.scalablesolutions.akka.actor.ProtobufProtocol.DualCounter(); return builder; } - + protected se.scalablesolutions.akka.actor.ProtobufProtocol.DualCounter internalGetResult() { return result; } - + public Builder clear() { if (result == null) { throw new IllegalStateException( @@ -840,20 +840,20 @@ public final class ProtobufProtocol { result = new se.scalablesolutions.akka.actor.ProtobufProtocol.DualCounter(); return this; } - + public Builder clone() { return create().mergeFrom(result); } - + public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return se.scalablesolutions.akka.actor.ProtobufProtocol.DualCounter.getDescriptor(); } - + public se.scalablesolutions.akka.actor.ProtobufProtocol.DualCounter getDefaultInstanceForType() { return se.scalablesolutions.akka.actor.ProtobufProtocol.DualCounter.getDefaultInstance(); } - + public boolean isInitialized() { return result.isInitialized(); } @@ -863,7 +863,7 @@ public final class ProtobufProtocol { } return buildPartial(); } - + private se.scalablesolutions.akka.actor.ProtobufProtocol.DualCounter buildParsed() throws com.google.protobuf.InvalidProtocolBufferException { if (!isInitialized()) { @@ -872,7 +872,7 @@ public final class ProtobufProtocol { } return buildPartial(); } - + public se.scalablesolutions.akka.actor.ProtobufProtocol.DualCounter buildPartial() { if (result == null) { throw new IllegalStateException( @@ -882,7 +882,7 @@ public final class ProtobufProtocol { result = null; return returnMe; } - + public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof se.scalablesolutions.akka.actor.ProtobufProtocol.DualCounter) { return mergeFrom((se.scalablesolutions.akka.actor.ProtobufProtocol.DualCounter)other); @@ -891,7 +891,7 @@ public final class ProtobufProtocol { return this; } } - + public Builder mergeFrom(se.scalablesolutions.akka.actor.ProtobufProtocol.DualCounter other) { if (other == se.scalablesolutions.akka.actor.ProtobufProtocol.DualCounter.getDefaultInstance()) return this; if (other.hasCount1()) { @@ -903,7 +903,7 @@ public final class ProtobufProtocol { this.mergeUnknownFields(other.getUnknownFields()); return this; } - + public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) @@ -936,8 +936,8 @@ public final class ProtobufProtocol { } } } - - + + // required uint32 count1 = 1; public boolean hasCount1() { return result.hasCount1(); @@ -955,7 +955,7 @@ public final class ProtobufProtocol { result.count1_ = 0; return this; } - + // required uint32 count2 = 2; public boolean hasCount2() { return result.hasCount2(); @@ -973,19 +973,19 @@ public final class ProtobufProtocol { result.count2_ = 0; return this; } - + // @@protoc_insertion_point(builder_scope:se.scalablesolutions.akka.actor.DualCounter) } - + static { defaultInstance = new DualCounter(true); se.scalablesolutions.akka.actor.ProtobufProtocol.internalForceInit(); defaultInstance.initFields(); } - + // @@protoc_insertion_point(class_scope:se.scalablesolutions.akka.actor.DualCounter) } - + private static com.google.protobuf.Descriptors.Descriptor internal_static_se_scalablesolutions_akka_actor_ProtobufPOJO_descriptor; private static @@ -1001,7 +1001,7 @@ public final class ProtobufProtocol { private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_se_scalablesolutions_akka_actor_DualCounter_fieldAccessorTable; - + public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { return descriptor; @@ -1053,8 +1053,8 @@ public final class ProtobufProtocol { new com.google.protobuf.Descriptors.FileDescriptor[] { }, assigner); } - + public static void internalForceInit() {} - + // @@protoc_insertion_point(outer_class_scope) } diff --git a/akka-core/src/test/java/se/scalablesolutions/akka/actor/RemoteTypedActorOne.java b/akka-remote/src/test/java/se/scalablesolutions/akka/actor/RemoteTypedActorOne.java similarity index 100% rename from akka-core/src/test/java/se/scalablesolutions/akka/actor/RemoteTypedActorOne.java rename to akka-remote/src/test/java/se/scalablesolutions/akka/actor/RemoteTypedActorOne.java diff --git a/akka-core/src/test/java/se/scalablesolutions/akka/actor/RemoteTypedActorOneImpl.java b/akka-remote/src/test/java/se/scalablesolutions/akka/actor/RemoteTypedActorOneImpl.java similarity index 100% rename from akka-core/src/test/java/se/scalablesolutions/akka/actor/RemoteTypedActorOneImpl.java rename to akka-remote/src/test/java/se/scalablesolutions/akka/actor/RemoteTypedActorOneImpl.java diff --git a/akka-core/src/test/java/se/scalablesolutions/akka/actor/RemoteTypedActorTwo.java b/akka-remote/src/test/java/se/scalablesolutions/akka/actor/RemoteTypedActorTwo.java similarity index 100% rename from akka-core/src/test/java/se/scalablesolutions/akka/actor/RemoteTypedActorTwo.java rename to akka-remote/src/test/java/se/scalablesolutions/akka/actor/RemoteTypedActorTwo.java diff --git a/akka-core/src/test/java/se/scalablesolutions/akka/actor/RemoteTypedActorTwoImpl.java b/akka-remote/src/test/java/se/scalablesolutions/akka/actor/RemoteTypedActorTwoImpl.java similarity index 100% rename from akka-core/src/test/java/se/scalablesolutions/akka/actor/RemoteTypedActorTwoImpl.java rename to akka-remote/src/test/java/se/scalablesolutions/akka/actor/RemoteTypedActorTwoImpl.java diff --git a/akka-typed-actors/src/main/java/se/scalablesolutions/akka/config/DependencyBinding.java b/akka-remote/src/test/java/se/scalablesolutions/akka/config/DependencyBinding.java similarity index 100% rename from akka-typed-actors/src/main/java/se/scalablesolutions/akka/config/DependencyBinding.java rename to akka-remote/src/test/java/se/scalablesolutions/akka/config/DependencyBinding.java diff --git a/akka-typed-actors/src/main/java/se/scalablesolutions/akka/config/TypedActorGuiceModule.java b/akka-remote/src/test/java/se/scalablesolutions/akka/config/TypedActorGuiceModule.java similarity index 100% rename from akka-typed-actors/src/main/java/se/scalablesolutions/akka/config/TypedActorGuiceModule.java rename to akka-remote/src/test/java/se/scalablesolutions/akka/config/TypedActorGuiceModule.java diff --git a/akka-core/src/main/java/se/scalablesolutions/akka/remote/protocol/RemoteProtocol.java b/akka-remote/src/test/java/se/scalablesolutions/akka/remote/protocol/RemoteProtocol.java similarity index 100% rename from akka-core/src/main/java/se/scalablesolutions/akka/remote/protocol/RemoteProtocol.java rename to akka-remote/src/test/java/se/scalablesolutions/akka/remote/protocol/RemoteProtocol.java diff --git a/akka-core/src/test/protocol/ProtobufProtocol.proto b/akka-remote/src/test/protocol/ProtobufProtocol.proto similarity index 92% rename from akka-core/src/test/protocol/ProtobufProtocol.proto rename to akka-remote/src/test/protocol/ProtobufProtocol.proto index 35ffec95e3..a63ebdff62 100644 --- a/akka-core/src/test/protocol/ProtobufProtocol.proto +++ b/akka-remote/src/test/protocol/ProtobufProtocol.proto @@ -6,7 +6,7 @@ package se.scalablesolutions.akka.actor; /* Compile with: - cd ./akka-core/src/test/protocol + cd ./akka-remote/src/test/protocol protoc ProtobufProtocol.proto --java_out ../java */ diff --git a/akka-core/src/test/resources/META-INF/aop.xml b/akka-remote/src/test/resources/META-INF/aop.xml similarity index 100% rename from akka-core/src/test/resources/META-INF/aop.xml rename to akka-remote/src/test/resources/META-INF/aop.xml diff --git a/akka-core/src/test/resources/logback-test.xml b/akka-remote/src/test/resources/logback-test.xml similarity index 100% rename from akka-core/src/test/resources/logback-test.xml rename to akka-remote/src/test/resources/logback-test.xml diff --git a/akka-actors/src/test/scala/Messages.scala b/akka-remote/src/test/scala/Messages.scala similarity index 83% rename from akka-actors/src/test/scala/Messages.scala rename to akka-remote/src/test/scala/Messages.scala index ad1fcf8885..2b99155626 100644 --- a/akka-actors/src/test/scala/Messages.scala +++ b/akka-remote/src/test/scala/Messages.scala @@ -8,14 +8,6 @@ import se.scalablesolutions.akka.serialization.Serializable import sbinary._ import sbinary.Operations._ -sealed abstract class TestMessage - -case object Ping extends TestMessage -case object Pong extends TestMessage -case object OneWay extends TestMessage -case object Die extends TestMessage -case object NotifySupervisorExit extends TestMessage - case class User(val usernamePassword: Tuple2[String, String], val email: String, val age: Int) diff --git a/akka-core/src/test/scala/remote/ClientInitiatedRemoteActorSpec.scala b/akka-remote/src/test/scala/remote/ClientInitiatedRemoteActorSpec.scala similarity index 100% rename from akka-core/src/test/scala/remote/ClientInitiatedRemoteActorSpec.scala rename to akka-remote/src/test/scala/remote/ClientInitiatedRemoteActorSpec.scala diff --git a/akka-core/src/test/scala/remote/RemoteSupervisorSpec.scala b/akka-remote/src/test/scala/remote/RemoteSupervisorSpec.scala similarity index 100% rename from akka-core/src/test/scala/remote/RemoteSupervisorSpec.scala rename to akka-remote/src/test/scala/remote/RemoteSupervisorSpec.scala diff --git a/akka-core/src/test/scala/remote/RemoteTransactionalTypedActorSpec.scala b/akka-remote/src/test/scala/remote/RemoteTransactionalTypedActorSpec.scala similarity index 100% rename from akka-core/src/test/scala/remote/RemoteTransactionalTypedActorSpec.scala rename to akka-remote/src/test/scala/remote/RemoteTransactionalTypedActorSpec.scala diff --git a/akka-core/src/test/scala/remote/RemoteTypedActorSpec.scala b/akka-remote/src/test/scala/remote/RemoteTypedActorSpec.scala similarity index 100% rename from akka-core/src/test/scala/remote/RemoteTypedActorSpec.scala rename to akka-remote/src/test/scala/remote/RemoteTypedActorSpec.scala diff --git a/akka-core/src/test/scala/remote/ServerInitiatedRemoteActorSample.scala b/akka-remote/src/test/scala/remote/ServerInitiatedRemoteActorSample.scala similarity index 100% rename from akka-core/src/test/scala/remote/ServerInitiatedRemoteActorSample.scala rename to akka-remote/src/test/scala/remote/ServerInitiatedRemoteActorSample.scala diff --git a/akka-core/src/test/scala/remote/ServerInitiatedRemoteActorSpec.scala b/akka-remote/src/test/scala/remote/ServerInitiatedRemoteActorSpec.scala similarity index 100% rename from akka-core/src/test/scala/remote/ServerInitiatedRemoteActorSpec.scala rename to akka-remote/src/test/scala/remote/ServerInitiatedRemoteActorSpec.scala diff --git a/akka-core/src/test/scala/remote/ShutdownSpec.scala b/akka-remote/src/test/scala/remote/ShutdownSpec.scala similarity index 100% rename from akka-core/src/test/scala/remote/ShutdownSpec.scala rename to akka-remote/src/test/scala/remote/ShutdownSpec.scala diff --git a/akka-core/src/test/scala/serialization/ProtobufActorMessageSerializationSpec.scala b/akka-remote/src/test/scala/serialization/ProtobufActorMessageSerializationSpec.scala similarity index 100% rename from akka-core/src/test/scala/serialization/ProtobufActorMessageSerializationSpec.scala rename to akka-remote/src/test/scala/serialization/ProtobufActorMessageSerializationSpec.scala diff --git a/akka-core/src/test/scala/serialization/SerializableTypeClassActorSpec.scala b/akka-remote/src/test/scala/serialization/SerializableTypeClassActorSpec.scala similarity index 100% rename from akka-core/src/test/scala/serialization/SerializableTypeClassActorSpec.scala rename to akka-remote/src/test/scala/serialization/SerializableTypeClassActorSpec.scala diff --git a/akka-core/src/test/scala/serialization/SerializerSpec.scala b/akka-remote/src/test/scala/serialization/SerializerSpec.scala similarity index 100% rename from akka-core/src/test/scala/serialization/SerializerSpec.scala rename to akka-remote/src/test/scala/serialization/SerializerSpec.scala diff --git a/akka-core/src/test/scala/serialization/UntypedActorSerializationSpec.scala b/akka-remote/src/test/scala/serialization/UntypedActorSerializationSpec.scala similarity index 100% rename from akka-core/src/test/scala/serialization/UntypedActorSerializationSpec.scala rename to akka-remote/src/test/scala/serialization/UntypedActorSerializationSpec.scala diff --git a/akka-core/src/test/scala/ticket/Ticket001Spec.scala b/akka-remote/src/test/scala/ticket/Ticket001Spec.scala similarity index 100% rename from akka-core/src/test/scala/ticket/Ticket001Spec.scala rename to akka-remote/src/test/scala/ticket/Ticket001Spec.scala diff --git a/akka-samples/akka-sample-chat/Buildfile b/akka-samples/akka-sample-chat/Buildfile index fecc093964..814e6e4149 100644 --- a/akka-samples/akka-sample-chat/Buildfile +++ b/akka-samples/akka-sample-chat/Buildfile @@ -7,7 +7,7 @@ repositories.remote << "http://www.ibiblio.org/maven2/" repositories.remote << "http://www.lag.net/repo" repositories.remote << "http://multiverse.googlecode.com/svn/maven-repository/releases" -AKKA = group('akka-core', 'akka-comet', 'akka-util','akka-kernel', 'akka-rest', 'akka-util-java', +AKKA = group('akka-remote', 'akka-comet', 'akka-util','akka-kernel', 'akka-rest', 'akka-util-java', 'akka-security','akka-persistence-common', 'akka-persistence-redis', 'akka-amqp', :under=> 'se.scalablesolutions.akka', diff --git a/akka-sbt-plugin/src/main/scala/AkkaProject.scala b/akka-sbt-plugin/src/main/scala/AkkaProject.scala index ecf19ae135..ad0f98eee1 100644 --- a/akka-sbt-plugin/src/main/scala/AkkaProject.scala +++ b/akka-sbt-plugin/src/main/scala/AkkaProject.scala @@ -46,6 +46,6 @@ trait AkkaProject extends AkkaBaseProject { // convenience method def akkaModule(module: String) = "se.scalablesolutions.akka" %% ("akka-" + module) % akkaVersion - // akka core dependency by default - val akkaCore = akkaModule("core") + // akka remote dependency by default + val akkaRemote = akkaModule("remote") } diff --git a/akka-typed-actor/src/main/java/se/scalablesolutions/akka/config/DependencyBinding.java b/akka-typed-actor/src/main/java/se/scalablesolutions/akka/config/DependencyBinding.java new file mode 100644 index 0000000000..2956e6860f --- /dev/null +++ b/akka-typed-actor/src/main/java/se/scalablesolutions/akka/config/DependencyBinding.java @@ -0,0 +1,24 @@ +/** + * Copyright (C) 2009-2010 Scalable Solutions AB + */ + +package se.scalablesolutions.akka.config; + +/** + * @author Jonas Bonér + */ +public class DependencyBinding { + private final Class intf; + private final Object target; + + public DependencyBinding(final Class intf, final Object target) { + this.intf = intf; + this.target = target; + } + public Class getInterface() { + return intf; + } + public Object getTarget() { + return target; + } +} diff --git a/akka-typed-actor/src/main/java/se/scalablesolutions/akka/config/TypedActorGuiceModule.java b/akka-typed-actor/src/main/java/se/scalablesolutions/akka/config/TypedActorGuiceModule.java new file mode 100644 index 0000000000..0c2ed11402 --- /dev/null +++ b/akka-typed-actor/src/main/java/se/scalablesolutions/akka/config/TypedActorGuiceModule.java @@ -0,0 +1,32 @@ +/** + * Copyright (C) 2009-2010 Scalable Solutions AB + */ + +package se.scalablesolutions.akka.config; + +import java.util.List; + +import com.google.inject.AbstractModule; +import com.google.inject.Singleton; +//import com.google.inject.jsr250.ResourceProviderFactory; + +/** + * @author Jonas Bonér + */ +public class TypedActorGuiceModule extends AbstractModule { + private final List bindings; + + public TypedActorGuiceModule(final List bindings) { + this.bindings = bindings; + } + + protected void configure() { + //bind(ResourceProviderFactory.class); + for (int i = 0; i < bindings.size(); i++) { + final DependencyBinding db = bindings.get(i); + //if (db.getInterface() ne null) bind((Class) db.getInterface()).to((Class) db.getTarget()).in(Singleton.class); + //else + this.bind(db.getInterface()).toInstance(db.getTarget()); + } + } +} diff --git a/akka-typed-actors/src/main/java/se/scalablesolutions/akka/remote/protocol/RemoteProtocol.java b/akka-typed-actor/src/main/java/se/scalablesolutions/akka/remote/protocol/RemoteProtocol.java similarity index 100% rename from akka-typed-actors/src/main/java/se/scalablesolutions/akka/remote/protocol/RemoteProtocol.java rename to akka-typed-actor/src/main/java/se/scalablesolutions/akka/remote/protocol/RemoteProtocol.java diff --git a/akka-typed-actors/src/main/scala/actor/TypedActor.scala b/akka-typed-actor/src/main/scala/actor/TypedActor.scala similarity index 94% rename from akka-typed-actors/src/main/scala/actor/TypedActor.scala rename to akka-typed-actor/src/main/scala/actor/TypedActor.scala index c3d2444e55..96790b590b 100644 --- a/akka-typed-actors/src/main/scala/actor/TypedActor.scala +++ b/akka-typed-actor/src/main/scala/actor/TypedActor.scala @@ -6,12 +6,10 @@ package se.scalablesolutions.akka.actor import Actor._ import se.scalablesolutions.akka.config.FaultHandlingStrategy -import se.scalablesolutions.akka.remote.protocol.RemoteProtocol._ -import se.scalablesolutions.akka.remote.{MessageSerializer, RemoteClient, RemoteRequestProtocolIdFactory} import se.scalablesolutions.akka.dispatch.{MessageDispatcher, Future, CompletableFuture, Dispatchers} import se.scalablesolutions.akka.config.ScalaConfig._ -import se.scalablesolutions.akka.serialization.Serializer import se.scalablesolutions.akka.util._ +import ReflectiveAccess._ import org.codehaus.aspectwerkz.joinpoint.{MethodRtti, JoinPoint} import org.codehaus.aspectwerkz.proxy.Proxy @@ -226,8 +224,11 @@ abstract class TypedActor extends Actor with Proxyable { if (arg.getClass.getName.contains(TypedActor.AW_PROXY_PREFIX)) unserializable = true } if (!unserializable && hasMutableArgument) { - val copyOfArgs = Serializer.Java.deepClone(args) - joinPoint.getRtti.asInstanceOf[MethodRtti].setParameterValues(copyOfArgs.asInstanceOf[Array[AnyRef]]) + + //FIXME serializeArguments + // val copyOfArgs = Serializer.Java.deepClone(args) + // joinPoint.getRtti.asInstanceOf[MethodRtti].setParameterValues(copyOfArgs.asInstanceOf[Array[AnyRef]]) + joinPoint } } } @@ -538,11 +539,11 @@ object TypedActor extends Logging { private[akka] def supervise(restartStrategy: RestartStrategy, components: List[Supervise]): Supervisor = Supervisor(SupervisorConfig(restartStrategy, components)) - private[akka] def isJoinPointAndOneWay(message: AnyRef): Boolean = if (isJoinPoint(message)) + def isJoinPointAndOneWay(message: Any): Boolean = if (isJoinPoint(message)) isOneWay(message.asInstanceOf[JoinPoint].getRtti.asInstanceOf[MethodRtti]) else false - private[akka] def isJoinPoint(message: AnyRef): Boolean = message.isInstanceOf[JoinPoint]) + private[akka] def isJoinPoint(message: Any): Boolean = message.isInstanceOf[JoinPoint] } /** @@ -604,33 +605,14 @@ private[akka] sealed class TypedActorAspect { private def remoteDispatch(joinPoint: JoinPoint): AnyRef = { val methodRtti = joinPoint.getRtti.asInstanceOf[MethodRtti] val isOneWay = TypedActor.isOneWay(methodRtti) + val (message: Array[AnyRef], isEscaped) = escapeArguments(methodRtti.getParameterValues) - - val typedActorInfo = TypedActorInfoProtocol.newBuilder - .setInterface(interfaceClass.getName) - .setMethod(methodRtti.getMethod.getName) - .build - - val actorInfo = ActorInfoProtocol.newBuilder - .setUuid(uuid) - .setTarget(typedActor.getClass.getName) - .setTimeout(timeout) - .setActorType(ActorType.TYPED_ACTOR) - .setTypedActorInfo(typedActorInfo) - .build - - val requestBuilder = RemoteRequestProtocol.newBuilder - .setId(RemoteRequestProtocolIdFactory.nextId) - .setMessage(MessageSerializer.serialize(message)) - .setActorInfo(actorInfo) - .setIsOneWay(isOneWay) - - val id = actorRef.registerSupervisorAsRemoteActor - if (id.isDefined) requestBuilder.setSupervisorUuid(id.get) - - val remoteMessage = requestBuilder.build - - val future = RemoteClient.clientFor(remoteAddress.get).send(remoteMessage, None) + + val future = RemoteClientModule.send[AnyRef]( + message, None, None, remoteAddress.get, + timeout, isOneWay, actorRef, + Some((interfaceClass.getName, methodRtti.getMethod.getName)), + ActorType.TypedActor) if (isOneWay) null // for void methods else { diff --git a/akka-typed-actors/src/main/scala/config/TypedActorConfigurator.scala b/akka-typed-actor/src/main/scala/config/TypedActorConfigurator.scala similarity index 100% rename from akka-typed-actors/src/main/scala/config/TypedActorConfigurator.scala rename to akka-typed-actor/src/main/scala/config/TypedActorConfigurator.scala diff --git a/akka-typed-actors/src/main/scala/config/TypedActorGuiceConfigurator.scala b/akka-typed-actor/src/main/scala/config/TypedActorGuiceConfigurator.scala similarity index 97% rename from akka-typed-actors/src/main/scala/config/TypedActorGuiceConfigurator.scala rename to akka-typed-actor/src/main/scala/config/TypedActorGuiceConfigurator.scala index 718050655c..339c4d297d 100644 --- a/akka-typed-actors/src/main/scala/config/TypedActorGuiceConfigurator.scala +++ b/akka-typed-actor/src/main/scala/config/TypedActorGuiceConfigurator.scala @@ -6,8 +6,8 @@ package se.scalablesolutions.akka.config import se.scalablesolutions.akka.actor._ import se.scalablesolutions.akka.config.ScalaConfig._ -import se.scalablesolutions.akka.remote.RemoteServer -import se.scalablesolutions.akka.util.Logging +import se.scalablesolutions.akka.util._ +import ReflectiveAccess._ import org.codehaus.aspectwerkz.proxy.Proxy @@ -122,7 +122,7 @@ private[akka] class TypedActorGuiceConfigurator extends TypedActorConfiguratorBa remoteAddress.foreach { address => actorRef.makeRemote(remoteAddress.get) - RemoteServer.registerTypedActor(address, implementationClass.getName, proxy) + RemoteServerModule.registerTypedActor(address, implementationClass.getName, proxy) } AspectInitRegistry.register( diff --git a/akka-core/src/test/java/se/scalablesolutions/akka/actor/Bar.java b/akka-typed-actor/src/test/java/se/scalablesolutions/akka/actor/Bar.java similarity index 100% rename from akka-core/src/test/java/se/scalablesolutions/akka/actor/Bar.java rename to akka-typed-actor/src/test/java/se/scalablesolutions/akka/actor/Bar.java diff --git a/akka-core/src/test/java/se/scalablesolutions/akka/actor/BarImpl.java b/akka-typed-actor/src/test/java/se/scalablesolutions/akka/actor/BarImpl.java similarity index 100% rename from akka-core/src/test/java/se/scalablesolutions/akka/actor/BarImpl.java rename to akka-typed-actor/src/test/java/se/scalablesolutions/akka/actor/BarImpl.java diff --git a/akka-core/src/test/java/se/scalablesolutions/akka/actor/Ext.java b/akka-typed-actor/src/test/java/se/scalablesolutions/akka/actor/Ext.java similarity index 100% rename from akka-core/src/test/java/se/scalablesolutions/akka/actor/Ext.java rename to akka-typed-actor/src/test/java/se/scalablesolutions/akka/actor/Ext.java diff --git a/akka-core/src/test/java/se/scalablesolutions/akka/actor/ExtImpl.java b/akka-typed-actor/src/test/java/se/scalablesolutions/akka/actor/ExtImpl.java similarity index 100% rename from akka-core/src/test/java/se/scalablesolutions/akka/actor/ExtImpl.java rename to akka-typed-actor/src/test/java/se/scalablesolutions/akka/actor/ExtImpl.java diff --git a/akka-core/src/test/java/se/scalablesolutions/akka/actor/Foo.java b/akka-typed-actor/src/test/java/se/scalablesolutions/akka/actor/Foo.java similarity index 100% rename from akka-core/src/test/java/se/scalablesolutions/akka/actor/Foo.java rename to akka-typed-actor/src/test/java/se/scalablesolutions/akka/actor/Foo.java diff --git a/akka-core/src/test/java/se/scalablesolutions/akka/actor/FooImpl.java b/akka-typed-actor/src/test/java/se/scalablesolutions/akka/actor/FooImpl.java similarity index 100% rename from akka-core/src/test/java/se/scalablesolutions/akka/actor/FooImpl.java rename to akka-typed-actor/src/test/java/se/scalablesolutions/akka/actor/FooImpl.java diff --git a/akka-core/src/test/java/se/scalablesolutions/akka/actor/NestedTransactionalTypedActor.java b/akka-typed-actor/src/test/java/se/scalablesolutions/akka/actor/NestedTransactionalTypedActor.java similarity index 100% rename from akka-core/src/test/java/se/scalablesolutions/akka/actor/NestedTransactionalTypedActor.java rename to akka-typed-actor/src/test/java/se/scalablesolutions/akka/actor/NestedTransactionalTypedActor.java diff --git a/akka-core/src/test/java/se/scalablesolutions/akka/actor/NestedTransactionalTypedActorImpl.java b/akka-typed-actor/src/test/java/se/scalablesolutions/akka/actor/NestedTransactionalTypedActorImpl.java similarity index 100% rename from akka-core/src/test/java/se/scalablesolutions/akka/actor/NestedTransactionalTypedActorImpl.java rename to akka-typed-actor/src/test/java/se/scalablesolutions/akka/actor/NestedTransactionalTypedActorImpl.java diff --git a/akka-core/src/test/java/se/scalablesolutions/akka/actor/SamplePojo.java b/akka-typed-actor/src/test/java/se/scalablesolutions/akka/actor/SamplePojo.java similarity index 100% rename from akka-core/src/test/java/se/scalablesolutions/akka/actor/SamplePojo.java rename to akka-typed-actor/src/test/java/se/scalablesolutions/akka/actor/SamplePojo.java diff --git a/akka-core/src/test/java/se/scalablesolutions/akka/actor/SamplePojoImpl.java b/akka-typed-actor/src/test/java/se/scalablesolutions/akka/actor/SamplePojoImpl.java similarity index 100% rename from akka-core/src/test/java/se/scalablesolutions/akka/actor/SamplePojoImpl.java rename to akka-typed-actor/src/test/java/se/scalablesolutions/akka/actor/SamplePojoImpl.java diff --git a/akka-core/src/test/java/se/scalablesolutions/akka/actor/SimpleJavaPojo.java b/akka-typed-actor/src/test/java/se/scalablesolutions/akka/actor/SimpleJavaPojo.java similarity index 100% rename from akka-core/src/test/java/se/scalablesolutions/akka/actor/SimpleJavaPojo.java rename to akka-typed-actor/src/test/java/se/scalablesolutions/akka/actor/SimpleJavaPojo.java diff --git a/akka-core/src/test/java/se/scalablesolutions/akka/actor/SimpleJavaPojoCaller.java b/akka-typed-actor/src/test/java/se/scalablesolutions/akka/actor/SimpleJavaPojoCaller.java similarity index 100% rename from akka-core/src/test/java/se/scalablesolutions/akka/actor/SimpleJavaPojoCaller.java rename to akka-typed-actor/src/test/java/se/scalablesolutions/akka/actor/SimpleJavaPojoCaller.java diff --git a/akka-core/src/test/java/se/scalablesolutions/akka/actor/SimpleJavaPojoCallerImpl.java b/akka-typed-actor/src/test/java/se/scalablesolutions/akka/actor/SimpleJavaPojoCallerImpl.java similarity index 100% rename from akka-core/src/test/java/se/scalablesolutions/akka/actor/SimpleJavaPojoCallerImpl.java rename to akka-typed-actor/src/test/java/se/scalablesolutions/akka/actor/SimpleJavaPojoCallerImpl.java diff --git a/akka-core/src/test/java/se/scalablesolutions/akka/actor/SimpleJavaPojoImpl.java b/akka-typed-actor/src/test/java/se/scalablesolutions/akka/actor/SimpleJavaPojoImpl.java similarity index 100% rename from akka-core/src/test/java/se/scalablesolutions/akka/actor/SimpleJavaPojoImpl.java rename to akka-typed-actor/src/test/java/se/scalablesolutions/akka/actor/SimpleJavaPojoImpl.java diff --git a/akka-core/src/test/java/se/scalablesolutions/akka/actor/TransactionalTypedActor.java b/akka-typed-actor/src/test/java/se/scalablesolutions/akka/actor/TransactionalTypedActor.java similarity index 100% rename from akka-core/src/test/java/se/scalablesolutions/akka/actor/TransactionalTypedActor.java rename to akka-typed-actor/src/test/java/se/scalablesolutions/akka/actor/TransactionalTypedActor.java diff --git a/akka-core/src/test/java/se/scalablesolutions/akka/actor/TransactionalTypedActorImpl.java b/akka-typed-actor/src/test/java/se/scalablesolutions/akka/actor/TransactionalTypedActorImpl.java similarity index 100% rename from akka-core/src/test/java/se/scalablesolutions/akka/actor/TransactionalTypedActorImpl.java rename to akka-typed-actor/src/test/java/se/scalablesolutions/akka/actor/TransactionalTypedActorImpl.java diff --git a/akka-core/src/test/java/se/scalablesolutions/akka/actor/TypedActorFailer.java b/akka-typed-actor/src/test/java/se/scalablesolutions/akka/actor/TypedActorFailer.java similarity index 100% rename from akka-core/src/test/java/se/scalablesolutions/akka/actor/TypedActorFailer.java rename to akka-typed-actor/src/test/java/se/scalablesolutions/akka/actor/TypedActorFailer.java diff --git a/akka-core/src/test/java/se/scalablesolutions/akka/actor/TypedActorFailerImpl.java b/akka-typed-actor/src/test/java/se/scalablesolutions/akka/actor/TypedActorFailerImpl.java similarity index 100% rename from akka-core/src/test/java/se/scalablesolutions/akka/actor/TypedActorFailerImpl.java rename to akka-typed-actor/src/test/java/se/scalablesolutions/akka/actor/TypedActorFailerImpl.java diff --git a/akka-typed-actors/src/test/resources/META-INF/aop.xml b/akka-typed-actor/src/test/resources/META-INF/aop.xml similarity index 100% rename from akka-typed-actors/src/test/resources/META-INF/aop.xml rename to akka-typed-actor/src/test/resources/META-INF/aop.xml diff --git a/akka-core/src/test/scala/actor/typed-actor/NestedTransactionalTypedActorSpec.scala b/akka-typed-actor/src/test/scala/actor/typed-actor/NestedTransactionalTypedActorSpec.scala similarity index 100% rename from akka-core/src/test/scala/actor/typed-actor/NestedTransactionalTypedActorSpec.scala rename to akka-typed-actor/src/test/scala/actor/typed-actor/NestedTransactionalTypedActorSpec.scala diff --git a/akka-core/src/test/scala/actor/typed-actor/RestartNestedTransactionalTypedActorSpec.scala b/akka-typed-actor/src/test/scala/actor/typed-actor/RestartNestedTransactionalTypedActorSpec.scala similarity index 100% rename from akka-core/src/test/scala/actor/typed-actor/RestartNestedTransactionalTypedActorSpec.scala rename to akka-typed-actor/src/test/scala/actor/typed-actor/RestartNestedTransactionalTypedActorSpec.scala diff --git a/akka-core/src/test/scala/actor/typed-actor/RestartTransactionalTypedActorSpec.scala b/akka-typed-actor/src/test/scala/actor/typed-actor/RestartTransactionalTypedActorSpec.scala similarity index 100% rename from akka-core/src/test/scala/actor/typed-actor/RestartTransactionalTypedActorSpec.scala rename to akka-typed-actor/src/test/scala/actor/typed-actor/RestartTransactionalTypedActorSpec.scala diff --git a/akka-core/src/test/scala/actor/typed-actor/TransactionalTypedActorSpec.scala b/akka-typed-actor/src/test/scala/actor/typed-actor/TransactionalTypedActorSpec.scala similarity index 100% rename from akka-core/src/test/scala/actor/typed-actor/TransactionalTypedActorSpec.scala rename to akka-typed-actor/src/test/scala/actor/typed-actor/TransactionalTypedActorSpec.scala diff --git a/akka-core/src/test/scala/actor/typed-actor/TypedActorContextSpec.scala b/akka-typed-actor/src/test/scala/actor/typed-actor/TypedActorContextSpec.scala similarity index 100% rename from akka-core/src/test/scala/actor/typed-actor/TypedActorContextSpec.scala rename to akka-typed-actor/src/test/scala/actor/typed-actor/TypedActorContextSpec.scala diff --git a/akka-core/src/test/scala/actor/typed-actor/TypedActorGuiceConfiguratorSpec.scala b/akka-typed-actor/src/test/scala/actor/typed-actor/TypedActorGuiceConfiguratorSpec.scala similarity index 100% rename from akka-core/src/test/scala/actor/typed-actor/TypedActorGuiceConfiguratorSpec.scala rename to akka-typed-actor/src/test/scala/actor/typed-actor/TypedActorGuiceConfiguratorSpec.scala diff --git a/akka-core/src/test/scala/actor/typed-actor/TypedActorLifecycleSpec.scala b/akka-typed-actor/src/test/scala/actor/typed-actor/TypedActorLifecycleSpec.scala similarity index 100% rename from akka-core/src/test/scala/actor/typed-actor/TypedActorLifecycleSpec.scala rename to akka-typed-actor/src/test/scala/actor/typed-actor/TypedActorLifecycleSpec.scala diff --git a/akka-core/src/test/scala/actor/typed-actor/TypedActorSpec.scala b/akka-typed-actor/src/test/scala/actor/typed-actor/TypedActorSpec.scala similarity index 100% rename from akka-core/src/test/scala/actor/typed-actor/TypedActorSpec.scala rename to akka-typed-actor/src/test/scala/actor/typed-actor/TypedActorSpec.scala diff --git a/akka-core/src/test/scala/actor/typed-actor/TypedActorUtilFunctionsSpec.scala b/akka-typed-actor/src/test/scala/actor/typed-actor/TypedActorUtilFunctionsSpec.scala similarity index 100% rename from akka-core/src/test/scala/actor/typed-actor/TypedActorUtilFunctionsSpec.scala rename to akka-typed-actor/src/test/scala/actor/typed-actor/TypedActorUtilFunctionsSpec.scala diff --git a/akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/Bar.java b/akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/Bar.java deleted file mode 100644 index 906476b789..0000000000 --- a/akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/Bar.java +++ /dev/null @@ -1,6 +0,0 @@ -package se.scalablesolutions.akka.actor; - -public interface Bar { - void bar(String msg); - Ext getExt(); -} diff --git a/akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/BarImpl.java b/akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/BarImpl.java deleted file mode 100644 index 9cb41a85cf..0000000000 --- a/akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/BarImpl.java +++ /dev/null @@ -1,16 +0,0 @@ -package se.scalablesolutions.akka.actor; - -import com.google.inject.Inject; -import se.scalablesolutions.akka.actor.*; - -public class BarImpl extends TypedActor implements Bar { - @Inject - private Ext ext; - - public Ext getExt() { - return ext; - } - - public void bar(String msg) { - } -} diff --git a/akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/Ext.java b/akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/Ext.java deleted file mode 100644 index c37219cf00..0000000000 --- a/akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/Ext.java +++ /dev/null @@ -1,6 +0,0 @@ -package se.scalablesolutions.akka.actor; - -public interface Ext { - void ext(); -} - diff --git a/akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/ExtImpl.java b/akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/ExtImpl.java deleted file mode 100644 index dd8ca55089..0000000000 --- a/akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/ExtImpl.java +++ /dev/null @@ -1,6 +0,0 @@ -package se.scalablesolutions.akka.actor; - -public class ExtImpl implements Ext { - public void ext() { - } -} diff --git a/akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/Foo.java b/akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/Foo.java deleted file mode 100644 index a64f975bce..0000000000 --- a/akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/Foo.java +++ /dev/null @@ -1,14 +0,0 @@ -package se.scalablesolutions.akka.actor; - -public interface Foo { - public Foo body(); - public Bar getBar(); - - public String foo(String msg); - public void bar(String msg); - - public String longRunning(); - public String throwsException(); - - public int $tag() throws java.rmi.RemoteException; -} diff --git a/akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/FooImpl.java b/akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/FooImpl.java deleted file mode 100644 index ded09f4e07..0000000000 --- a/akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/FooImpl.java +++ /dev/null @@ -1,40 +0,0 @@ -package se.scalablesolutions.akka.actor; - -import com.google.inject.Inject; -import se.scalablesolutions.akka.actor.*; - -public class FooImpl extends TypedActor implements Foo { - @Inject - private Bar bar; - - public Foo body() { return this; } - - public Bar getBar() { - return bar; - } - - public String foo(String msg) { - return msg + "return_foo "; - } - - public void bar(String msg) { - bar.bar(msg); - } - - public String longRunning() { - try { - Thread.sleep(1200); - } catch (InterruptedException e) { - } - return "test"; - } - - public String throwsException() { - if (true) throw new RuntimeException("Expected exception; to test fault-tolerance"); - return "test"; - } - - public int $tag() throws java.rmi.RemoteException { - return 0; - } -} diff --git a/akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/NestedTransactionalTypedActor.java b/akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/NestedTransactionalTypedActor.java deleted file mode 100644 index fbd241763f..0000000000 --- a/akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/NestedTransactionalTypedActor.java +++ /dev/null @@ -1,12 +0,0 @@ -package se.scalablesolutions.akka.actor; - -public interface NestedTransactionalTypedActor { - public String getMapState(String key); - public String getVectorState(); - public String getRefState(); - public void setMapState(String key, String msg); - public void setVectorState(String msg); - public void setRefState(String msg); - public void success(String key, String msg); - public String failure(String key, String msg, TypedActorFailer failer); -} diff --git a/akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/NestedTransactionalTypedActorImpl.java b/akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/NestedTransactionalTypedActorImpl.java deleted file mode 100644 index 1b95517c22..0000000000 --- a/akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/NestedTransactionalTypedActorImpl.java +++ /dev/null @@ -1,59 +0,0 @@ -package se.scalablesolutions.akka.actor; - -import se.scalablesolutions.akka.actor.*; -import se.scalablesolutions.akka.stm.*; - -public class NestedTransactionalTypedActorImpl extends TypedTransactor implements NestedTransactionalTypedActor { - private TransactionalMap mapState; - private TransactionalVector vectorState; - private Ref refState; - private boolean isInitialized = false; - - @Override - public void init() { - if (!isInitialized) { - mapState = new TransactionalMap(); - vectorState = new TransactionalVector(); - refState = new Ref(); - isInitialized = true; - } - } - - public String getMapState(String key) { - return (String) mapState.get(key).get(); - } - - public String getVectorState() { - return (String) vectorState.last(); - } - - public String getRefState() { - return (String) refState.get(); - } - - public void setMapState(String key, String msg) { - mapState.put(key, msg); - } - - public void setVectorState(String msg) { - vectorState.add(msg); - } - - public void setRefState(String msg) { - refState.swap(msg); - } - - public void success(String key, String msg) { - mapState.put(key, msg); - vectorState.add(msg); - refState.swap(msg); - } - - public String failure(String key, String msg, TypedActorFailer failer) { - mapState.put(key, msg); - vectorState.add(msg); - refState.swap(msg); - failer.fail(); - return msg; - } -} diff --git a/akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/RemoteTypedActorOne.java b/akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/RemoteTypedActorOne.java deleted file mode 100644 index dd03a45d12..0000000000 --- a/akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/RemoteTypedActorOne.java +++ /dev/null @@ -1,6 +0,0 @@ -package se.scalablesolutions.akka.actor; - -public interface RemoteTypedActorOne { - public String requestReply(String s) throws Exception; - public void oneWay() throws Exception; -} diff --git a/akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/RemoteTypedActorOneImpl.java b/akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/RemoteTypedActorOneImpl.java deleted file mode 100644 index 715e5366a4..0000000000 --- a/akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/RemoteTypedActorOneImpl.java +++ /dev/null @@ -1,29 +0,0 @@ -package se.scalablesolutions.akka.actor.remote; - -import se.scalablesolutions.akka.actor.*; - -import java.util.concurrent.CountDownLatch; - -public class RemoteTypedActorOneImpl extends TypedActor implements RemoteTypedActorOne { - - public static CountDownLatch latch = new CountDownLatch(1); - - public String requestReply(String s) throws Exception { - if (s.equals("ping")) { - RemoteTypedActorLog.messageLog().put("ping"); - return "pong"; - } else if (s.equals("die")) { - throw new RuntimeException("Expected exception; to test fault-tolerance"); - } else return null; - } - - public void oneWay() throws Exception { - RemoteTypedActorLog.oneWayLog().put("oneway"); - } - - @Override - public void preRestart(Throwable e) { - try { RemoteTypedActorLog.messageLog().put(e.getMessage()); } catch(Exception ex) {} - latch.countDown(); - } -} diff --git a/akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/RemoteTypedActorTwo.java b/akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/RemoteTypedActorTwo.java deleted file mode 100644 index 5fd289b8c2..0000000000 --- a/akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/RemoteTypedActorTwo.java +++ /dev/null @@ -1,6 +0,0 @@ -package se.scalablesolutions.akka.actor; - -public interface RemoteTypedActorTwo { - public String requestReply(String s) throws Exception; - public void oneWay() throws Exception; -} diff --git a/akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/RemoteTypedActorTwoImpl.java b/akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/RemoteTypedActorTwoImpl.java deleted file mode 100644 index a5882fd4e6..0000000000 --- a/akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/RemoteTypedActorTwoImpl.java +++ /dev/null @@ -1,29 +0,0 @@ -package se.scalablesolutions.akka.actor.remote; - -import se.scalablesolutions.akka.actor.*; - -import java.util.concurrent.CountDownLatch; - -public class RemoteTypedActorTwoImpl extends TypedActor implements RemoteTypedActorTwo { - - public static CountDownLatch latch = new CountDownLatch(1); - - public String requestReply(String s) throws Exception { - if (s.equals("ping")) { - RemoteTypedActorLog.messageLog().put("ping"); - return "pong"; - } else if (s.equals("die")) { - throw new RuntimeException("Expected exception; to test fault-tolerance"); - } else return null; - } - - public void oneWay() throws Exception { - RemoteTypedActorLog.oneWayLog().put("oneway"); - } - - @Override - public void preRestart(Throwable e) { - try { RemoteTypedActorLog.messageLog().put(e.getMessage()); } catch(Exception ex) {} - latch.countDown(); - } -} diff --git a/akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/SamplePojo.java b/akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/SamplePojo.java deleted file mode 100644 index 5d06afdc9c..0000000000 --- a/akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/SamplePojo.java +++ /dev/null @@ -1,8 +0,0 @@ -package se.scalablesolutions.akka.actor; - -import java.util.concurrent.CountDownLatch; - -public interface SamplePojo { - public String greet(String s); - public String fail(); -} diff --git a/akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/SamplePojoImpl.java b/akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/SamplePojoImpl.java deleted file mode 100644 index 12985c72ce..0000000000 --- a/akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/SamplePojoImpl.java +++ /dev/null @@ -1,45 +0,0 @@ -package se.scalablesolutions.akka.actor; - -import se.scalablesolutions.akka.actor.*; - -import java.util.concurrent.CountDownLatch; - -public class SamplePojoImpl extends TypedActor implements SamplePojo { - - public static CountDownLatch latch = new CountDownLatch(1); - - public static boolean _pre = false; - public static boolean _post = false; - public static boolean _down = false; - public static void reset() { - _pre = false; - _post = false; - _down = false; - } - - public String greet(String s) { - return "hello " + s; - } - - public String fail() { - throw new RuntimeException("expected"); - } - - @Override - public void preRestart(Throwable e) { - _pre = true; - latch.countDown(); - } - - @Override - public void postRestart(Throwable e) { - _post = true; - latch.countDown(); - } - - @Override - public void shutdown() { - _down = true; - latch.countDown(); - } -} diff --git a/akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/SimpleJavaPojo.java b/akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/SimpleJavaPojo.java deleted file mode 100644 index d3a18abbd9..0000000000 --- a/akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/SimpleJavaPojo.java +++ /dev/null @@ -1,14 +0,0 @@ -package se.scalablesolutions.akka.actor; - -import se.scalablesolutions.akka.dispatch.Future; -import se.scalablesolutions.akka.dispatch.CompletableFuture; -import se.scalablesolutions.akka.dispatch.Future; - -public interface SimpleJavaPojo { - public Object getSender(); - public Object getSenderFuture(); - public Future square(int value); - public void setName(String name); - public String getName(); - public void throwException(); -} diff --git a/akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/SimpleJavaPojoCaller.java b/akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/SimpleJavaPojoCaller.java deleted file mode 100644 index e35702846f..0000000000 --- a/akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/SimpleJavaPojoCaller.java +++ /dev/null @@ -1,9 +0,0 @@ -package se.scalablesolutions.akka.actor; - -import se.scalablesolutions.akka.dispatch.CompletableFuture; - -public interface SimpleJavaPojoCaller { - public void setPojo(SimpleJavaPojo pojo); - public Object getSenderFromSimpleJavaPojo(); - public Object getSenderFutureFromSimpleJavaPojo(); -} diff --git a/akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/SimpleJavaPojoCallerImpl.java b/akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/SimpleJavaPojoCallerImpl.java deleted file mode 100644 index 760b69f8b9..0000000000 --- a/akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/SimpleJavaPojoCallerImpl.java +++ /dev/null @@ -1,26 +0,0 @@ -package se.scalablesolutions.akka.actor; - -import se.scalablesolutions.akka.actor.*; -import se.scalablesolutions.akka.dispatch.Future; - -public class SimpleJavaPojoCallerImpl extends TypedActor implements SimpleJavaPojoCaller { - - SimpleJavaPojo pojo; - - public void setPojo(SimpleJavaPojo pojo) { - this.pojo = pojo; - } - - public Object getSenderFromSimpleJavaPojo() { - Object sender = pojo.getSender(); - return sender; - } - - public Object getSenderFutureFromSimpleJavaPojo() { - return pojo.getSenderFuture(); - } - - public Future square(int value) { - return future(value * value); - } -} diff --git a/akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/SimpleJavaPojoImpl.java b/akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/SimpleJavaPojoImpl.java deleted file mode 100644 index c02d266ce8..0000000000 --- a/akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/SimpleJavaPojoImpl.java +++ /dev/null @@ -1,53 +0,0 @@ -package se.scalablesolutions.akka.actor; - -import se.scalablesolutions.akka.actor.*; -import se.scalablesolutions.akka.dispatch.Future; -import se.scalablesolutions.akka.dispatch.CompletableFuture; - -public class SimpleJavaPojoImpl extends TypedActor implements SimpleJavaPojo { - - public static boolean _pre = false; - public static boolean _post = false; - public static boolean _down = false; - public static void reset() { - _pre = false; - _post = false; - _down = false; - } - - private String name; - - public Future square(int value) { - return future(value * value); - } - - public Object getSender() { - return getContext().getSender(); - } - - public CompletableFuture getSenderFuture() { - return getContext().getSenderFuture().get(); - } - - public void setName(String name) { - this.name = name; - } - - public String getName() { - return name; - } - - @Override - public void preRestart(Throwable e) { - _pre = true; - } - - @Override - public void postRestart(Throwable e) { - _post = true; - } - - public void throwException() { - throw new RuntimeException(); - } -} diff --git a/akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/TransactionalTypedActor.java b/akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/TransactionalTypedActor.java deleted file mode 100644 index 6e7c43745b..0000000000 --- a/akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/TransactionalTypedActor.java +++ /dev/null @@ -1,14 +0,0 @@ -package se.scalablesolutions.akka.actor; - -public interface TransactionalTypedActor { - public String getMapState(String key); - public String getVectorState(); - public String getRefState(); - public void setMapState(String key, String msg); - public void setVectorState(String msg); - public void setRefState(String msg); - public void success(String key, String msg); - public void success(String key, String msg, NestedTransactionalTypedActor nested); - public String failure(String key, String msg, TypedActorFailer failer); - public String failure(String key, String msg, NestedTransactionalTypedActor nested, TypedActorFailer failer); -} diff --git a/akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/TransactionalTypedActorImpl.java b/akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/TransactionalTypedActorImpl.java deleted file mode 100644 index 9b32f5d329..0000000000 --- a/akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/TransactionalTypedActorImpl.java +++ /dev/null @@ -1,84 +0,0 @@ -package se.scalablesolutions.akka.actor; - -import se.scalablesolutions.akka.actor.*; -import se.scalablesolutions.akka.stm.*; - -public class TransactionalTypedActorImpl extends TypedTransactor implements TransactionalTypedActor { - private TransactionalMap mapState; - private TransactionalVector vectorState; - private Ref refState; - private boolean isInitialized = false; - - @Override - public void initTransactionalState() { - if (!isInitialized) { - mapState = new TransactionalMap(); - vectorState = new TransactionalVector(); - refState = new Ref(); - isInitialized = true; - } - } - - public String getMapState(String key) { - return (String)mapState.get(key).get(); - } - - public String getVectorState() { - return (String)vectorState.last(); - } - - public String getRefState() { - return (String)refState.get(); - } - - public void setMapState(String key, String msg) { - mapState.put(key, msg); - } - - public void setVectorState(String msg) { - vectorState.add(msg); - } - - public void setRefState(String msg) { - refState.swap(msg); - } - - public void success(String key, String msg) { - mapState.put(key, msg); - vectorState.add(msg); - refState.swap(msg); - } - - public void success(String key, String msg, NestedTransactionalTypedActor nested) { - mapState.put(key, msg); - vectorState.add(msg); - refState.swap(msg); - nested.success(key, msg); - } - - public String failure(String key, String msg, TypedActorFailer failer) { - mapState.put(key, msg); - vectorState.add(msg); - refState.swap(msg); - failer.fail(); - return msg; - } - - public String failure(String key, String msg, NestedTransactionalTypedActor nested, TypedActorFailer failer) { - mapState.put(key, msg); - vectorState.add(msg); - refState.swap(msg); - nested.failure(key, msg, failer); - return msg; - } - - @Override - public void preRestart(Throwable e) { - System.out.println("################ PRE RESTART"); - } - - @Override - public void postRestart(Throwable e) { - System.out.println("################ POST RESTART"); - } -} diff --git a/akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/TypedActorFailer.java b/akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/TypedActorFailer.java deleted file mode 100644 index e0b1e72c33..0000000000 --- a/akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/TypedActorFailer.java +++ /dev/null @@ -1,5 +0,0 @@ -package se.scalablesolutions.akka.actor; - -public interface TypedActorFailer extends java.io.Serializable { - public int fail(); -} diff --git a/akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/TypedActorFailerImpl.java b/akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/TypedActorFailerImpl.java deleted file mode 100644 index 89a97330df..0000000000 --- a/akka-typed-actors/src/test/java/se/scalablesolutions/akka/actor/TypedActorFailerImpl.java +++ /dev/null @@ -1,9 +0,0 @@ -package se.scalablesolutions.akka.actor; - -import se.scalablesolutions.akka.actor.*; - -public class TypedActorFailerImpl extends TypedActor implements TypedActorFailer { - public int fail() { - throw new RuntimeException("expected"); - } -} diff --git a/akka-typed-actors/src/test/java/se/scalablesolutions/akka/stm/Address.java b/akka-typed-actors/src/test/java/se/scalablesolutions/akka/stm/Address.java deleted file mode 100644 index cb3057929f..0000000000 --- a/akka-typed-actors/src/test/java/se/scalablesolutions/akka/stm/Address.java +++ /dev/null @@ -1,13 +0,0 @@ -package se.scalablesolutions.akka.stm; - -public class Address { - private String location; - - public Address(String location) { - this.location = location; - } - - @Override public String toString() { - return "Address(" + location + ")"; - } -} diff --git a/akka-typed-actors/src/test/java/se/scalablesolutions/akka/stm/CounterExample.java b/akka-typed-actors/src/test/java/se/scalablesolutions/akka/stm/CounterExample.java deleted file mode 100644 index 57a9a07daa..0000000000 --- a/akka-typed-actors/src/test/java/se/scalablesolutions/akka/stm/CounterExample.java +++ /dev/null @@ -1,26 +0,0 @@ -package se.scalablesolutions.akka.stm; - -import se.scalablesolutions.akka.stm.Ref; -import se.scalablesolutions.akka.stm.local.Atomic; - -public class CounterExample { - final static Ref ref = new Ref(0); - - public static int counter() { - return new Atomic() { - public Integer atomically() { - int inc = ref.get() + 1; - ref.set(inc); - return inc; - } - }.execute(); - } - - public static void main(String[] args) { - System.out.println(); - System.out.println("Counter example"); - System.out.println(); - System.out.println("counter 1: " + counter()); - System.out.println("counter 2: " + counter()); - } -} diff --git a/akka-typed-actors/src/test/java/se/scalablesolutions/akka/stm/JavaStmTests.java b/akka-typed-actors/src/test/java/se/scalablesolutions/akka/stm/JavaStmTests.java deleted file mode 100644 index 7204013808..0000000000 --- a/akka-typed-actors/src/test/java/se/scalablesolutions/akka/stm/JavaStmTests.java +++ /dev/null @@ -1,91 +0,0 @@ -package se.scalablesolutions.akka.stm; - -import static org.junit.Assert.*; -import org.junit.Test; -import org.junit.Before; - -import se.scalablesolutions.akka.stm.*; -import se.scalablesolutions.akka.stm.local.Atomic; - -import org.multiverse.api.ThreadLocalTransaction; -import org.multiverse.api.TransactionConfiguration; -import org.multiverse.api.exceptions.ReadonlyException; - -public class JavaStmTests { - - private Ref ref; - - private int getRefValue() { - return new Atomic() { - public Integer atomically() { - return ref.get(); - } - }.execute(); - } - - public int increment() { - return new Atomic() { - public Integer atomically() { - int inc = ref.get() + 1; - ref.set(inc); - return inc; - } - }.execute(); - } - - @Before public void initialise() { - ref = new Ref(0); - } - - @Test public void incrementRef() { - assertEquals(0, getRefValue()); - increment(); - increment(); - increment(); - assertEquals(3, getRefValue()); - } - - @Test public void failSetRef() { - assertEquals(0, getRefValue()); - try { - new Atomic() { - public Object atomically() { - ref.set(3); - throw new RuntimeException(); - } - }.execute(); - } catch(RuntimeException e) {} - assertEquals(0, getRefValue()); - } - - @Test public void configureTransaction() { - TransactionFactory txFactory = new TransactionFactoryBuilder() - .setFamilyName("example") - .setReadonly(true) - .build(); - - // get transaction config from multiverse - TransactionConfiguration config = new Atomic(txFactory) { - public TransactionConfiguration atomically() { - ref.get(); - return ThreadLocalTransaction.getThreadLocalTransaction().getConfiguration(); - } - }.execute(); - - assertEquals("example", config.getFamilyName()); - assertEquals(true, config.isReadonly()); - } - - @Test(expected=ReadonlyException.class) public void failReadonlyTransaction() { - TransactionFactory txFactory = new TransactionFactoryBuilder() - .setFamilyName("example") - .setReadonly(true) - .build(); - - new Atomic(txFactory) { - public Object atomically() { - return ref.set(3); - } - }.execute(); - } -} diff --git a/akka-typed-actors/src/test/java/se/scalablesolutions/akka/stm/RefExample.java b/akka-typed-actors/src/test/java/se/scalablesolutions/akka/stm/RefExample.java deleted file mode 100644 index f590524fd7..0000000000 --- a/akka-typed-actors/src/test/java/se/scalablesolutions/akka/stm/RefExample.java +++ /dev/null @@ -1,36 +0,0 @@ -package se.scalablesolutions.akka.stm; - -import se.scalablesolutions.akka.stm.Ref; -import se.scalablesolutions.akka.stm.local.Atomic; - -public class RefExample { - public static void main(String[] args) { - System.out.println(); - System.out.println("Ref example"); - System.out.println(); - - final Ref ref = new Ref(0); - - Integer value1 = new Atomic() { - public Integer atomically() { - return ref.get(); - } - }.execute(); - - System.out.println("value 1: " + value1); - - new Atomic() { - public Object atomically() { - return ref.set(5); - } - }.execute(); - - Integer value2 = new Atomic() { - public Integer atomically() { - return ref.get(); - } - }.execute(); - - System.out.println("value 2: " + value2); - } -} diff --git a/akka-typed-actors/src/test/java/se/scalablesolutions/akka/stm/StmExamples.java b/akka-typed-actors/src/test/java/se/scalablesolutions/akka/stm/StmExamples.java deleted file mode 100644 index a8526f2dd0..0000000000 --- a/akka-typed-actors/src/test/java/se/scalablesolutions/akka/stm/StmExamples.java +++ /dev/null @@ -1,18 +0,0 @@ -package se.scalablesolutions.akka.stm; - -import se.scalablesolutions.akka.stm.Ref; -import se.scalablesolutions.akka.stm.local.Atomic; - -public class StmExamples { - public static void main(String[] args) { - System.out.println(); - System.out.println("STM examples"); - System.out.println(); - - CounterExample.main(args); - RefExample.main(args); - TransactionFactoryExample.main(args); - TransactionalMapExample.main(args); - TransactionalVectorExample.main(args); - } -} diff --git a/akka-typed-actors/src/test/java/se/scalablesolutions/akka/stm/TransactionFactoryExample.java b/akka-typed-actors/src/test/java/se/scalablesolutions/akka/stm/TransactionFactoryExample.java deleted file mode 100644 index 00dd87b7c5..0000000000 --- a/akka-typed-actors/src/test/java/se/scalablesolutions/akka/stm/TransactionFactoryExample.java +++ /dev/null @@ -1,30 +0,0 @@ -package se.scalablesolutions.akka.stm; - -import se.scalablesolutions.akka.stm.*; -import se.scalablesolutions.akka.stm.local.Atomic; - -import org.multiverse.api.ThreadLocalTransaction; -import org.multiverse.api.TransactionConfiguration; - -public class TransactionFactoryExample { - public static void main(String[] args) { - System.out.println(); - System.out.println("TransactionFactory example"); - System.out.println(); - - TransactionFactory txFactory = new TransactionFactoryBuilder() - .setFamilyName("example") - .setReadonly(true) - .build(); - - new Atomic(txFactory) { - public Object atomically() { - // check config has been passed to multiverse - TransactionConfiguration config = ThreadLocalTransaction.getThreadLocalTransaction().getConfiguration(); - System.out.println("family name: " + config.getFamilyName()); - System.out.println("readonly: " + config.isReadonly()); - return null; - } - }.execute(); - } -} diff --git a/akka-typed-actors/src/test/java/se/scalablesolutions/akka/stm/TransactionalMapExample.java b/akka-typed-actors/src/test/java/se/scalablesolutions/akka/stm/TransactionalMapExample.java deleted file mode 100644 index 7c4940c7a5..0000000000 --- a/akka-typed-actors/src/test/java/se/scalablesolutions/akka/stm/TransactionalMapExample.java +++ /dev/null @@ -1,35 +0,0 @@ -package se.scalablesolutions.akka.stm; - -import se.scalablesolutions.akka.stm.*; -import se.scalablesolutions.akka.stm.local.Atomic; - -public class TransactionalMapExample { - public static void main(String[] args) { - System.out.println(); - System.out.println("TransactionalMap example"); - System.out.println(); - - final TransactionalMap users = new TransactionalMap(); - - // fill users map (in a transaction) - new Atomic() { - public Object atomically() { - users.put("bill", new User("bill")); - users.put("mary", new User("mary")); - users.put("john", new User("john")); - return null; - } - }.execute(); - - System.out.println("users: " + users); - - // access users map (in a transaction) - User user = new Atomic() { - public User atomically() { - return users.get("bill").get(); - } - }.execute(); - - System.out.println("user: " + user); - } -} diff --git a/akka-typed-actors/src/test/java/se/scalablesolutions/akka/stm/TransactionalVectorExample.java b/akka-typed-actors/src/test/java/se/scalablesolutions/akka/stm/TransactionalVectorExample.java deleted file mode 100644 index 7274848beb..0000000000 --- a/akka-typed-actors/src/test/java/se/scalablesolutions/akka/stm/TransactionalVectorExample.java +++ /dev/null @@ -1,34 +0,0 @@ -package se.scalablesolutions.akka.stm; - -import se.scalablesolutions.akka.stm.*; -import se.scalablesolutions.akka.stm.local.Atomic; - -public class TransactionalVectorExample { - public static void main(String[] args) { - System.out.println(); - System.out.println("TransactionalVector example"); - System.out.println(); - - final TransactionalVector
addresses = new TransactionalVector
(); - - // fill addresses vector (in a transaction) - new Atomic() { - public Object atomically() { - addresses.add(new Address("somewhere")); - addresses.add(new Address("somewhere else")); - return null; - } - }.execute(); - - System.out.println("addresses: " + addresses); - - // access addresses vector (in a transaction) - Address address = new Atomic
() { - public Address atomically() { - return addresses.get(0); - } - }.execute(); - - System.out.println("address: " + address); - } -} diff --git a/akka-typed-actors/src/test/java/se/scalablesolutions/akka/stm/User.java b/akka-typed-actors/src/test/java/se/scalablesolutions/akka/stm/User.java deleted file mode 100644 index c9dc4b3723..0000000000 --- a/akka-typed-actors/src/test/java/se/scalablesolutions/akka/stm/User.java +++ /dev/null @@ -1,13 +0,0 @@ -package se.scalablesolutions.akka.stm; - -public class User { - private String name; - - public User(String name) { - this.name = name; - } - - @Override public String toString() { - return "User(" + name + ")"; - } -} diff --git a/akka-typed-actors/src/test/scala/actor/typed-actor/NestedTransactionalTypedActorSpec.scala b/akka-typed-actors/src/test/scala/actor/typed-actor/NestedTransactionalTypedActorSpec.scala deleted file mode 100644 index 7338e8df41..0000000000 --- a/akka-typed-actors/src/test/scala/actor/typed-actor/NestedTransactionalTypedActorSpec.scala +++ /dev/null @@ -1,102 +0,0 @@ - /** - * Copyright (C) 2009-2010 Scalable Solutions AB - */ - -package se.scalablesolutions.akka.actor - -import org.scalatest.Spec -import org.scalatest.Assertions -import org.scalatest.matchers.ShouldMatchers -import org.scalatest.BeforeAndAfterAll -import org.scalatest.junit.JUnitRunner -import org.junit.runner.RunWith - -import se.scalablesolutions.akka.actor._ - -@RunWith(classOf[JUnitRunner]) -class NestedTransactionalTypedActorSpec extends - Spec with - ShouldMatchers with - BeforeAndAfterAll { - - private var messageLog = "" - - override def afterAll { - // ActorRegistry.shutdownAll - } - - describe("Declaratively nested supervised transactional in-memory TypedActor") { - - it("map should not rollback state for stateful server in case of success") { - val stateful = TypedActor.newInstance(classOf[TransactionalTypedActor], classOf[TransactionalTypedActorImpl]) - stateful.setMapState("testShouldNotRollbackStateForStatefulServerInCaseOfSuccess", "init") // set init state - val nested = TypedActor.newInstance(classOf[NestedTransactionalTypedActor], classOf[NestedTransactionalTypedActorImpl]) - nested.setMapState("testShouldNotRollbackStateForStatefulServerInCaseOfSuccess", "init") // set init state - stateful.success("testShouldNotRollbackStateForStatefulServerInCaseOfSuccess", "new state", nested) // transactionrequired - stateful.getMapState("testShouldNotRollbackStateForStatefulServerInCaseOfSuccess") should equal("new state") - nested.getMapState("testShouldNotRollbackStateForStatefulServerInCaseOfSuccess") should equal("new state") - } - - it("map should rollback state for stateful server in case of failure") { - val stateful = TypedActor.newInstance(classOf[TransactionalTypedActor], classOf[TransactionalTypedActorImpl]) - stateful.setMapState("testShouldRollbackStateForStatefulServerInCaseOfFailure", "init") // set init state - val nested = TypedActor.newInstance(classOf[NestedTransactionalTypedActor], classOf[NestedTransactionalTypedActorImpl]) - nested.setMapState("testShouldRollbackStateForStatefulServerInCaseOfFailure", "init") // set init state - val failer = TypedActor.newInstance(classOf[TypedActorFailer], classOf[TypedActorFailerImpl]) - try { - stateful.failure("testShouldRollbackStateForStatefulServerInCaseOfFailure", "new state", nested, failer) - fail("should have thrown an exception") - } catch { case e => {} } - stateful.getMapState("testShouldRollbackStateForStatefulServerInCaseOfFailure") should equal("init") - nested.getMapState("testShouldRollbackStateForStatefulServerInCaseOfFailure") should equal("init") - } - - it("vector should not rollback state for stateful server in case of success") { - val stateful = TypedActor.newInstance(classOf[TransactionalTypedActor], classOf[TransactionalTypedActorImpl]) - stateful.setVectorState("init") // set init state - val nested = TypedActor.newInstance(classOf[NestedTransactionalTypedActor], classOf[NestedTransactionalTypedActorImpl]) - nested.setVectorState("init") // set init state - stateful.success("testShouldNotRollbackStateForStatefulServerInCaseOfSuccess", "new state", nested) // transactionrequired - stateful.getVectorState should equal("new state") - nested.getVectorState should equal("new state") - } - - it("vector should rollback state for stateful server in case of failure") { - val stateful = TypedActor.newInstance(classOf[TransactionalTypedActor], classOf[TransactionalTypedActorImpl]) - stateful.setVectorState("init") // set init state - val nested = TypedActor.newInstance(classOf[NestedTransactionalTypedActor], classOf[NestedTransactionalTypedActorImpl]) - nested.setVectorState("init") // set init state - val failer = TypedActor.newInstance(classOf[TypedActorFailer], classOf[TypedActorFailerImpl]) - try { - stateful.failure("testShouldRollbackStateForStatefulServerInCaseOfFailure", "new state", nested, failer) - fail("should have thrown an exception") - } catch { case e => {} } - stateful.getVectorState should equal("init") - nested.getVectorState should equal("init") - } - - it("ref should not rollback state for stateful server in case of success") { - val stateful = TypedActor.newInstance(classOf[TransactionalTypedActor], classOf[TransactionalTypedActorImpl]) - val nested = TypedActor.newInstance(classOf[NestedTransactionalTypedActor], classOf[NestedTransactionalTypedActorImpl]) - stateful.setRefState("init") // set init state - nested.setRefState("init") // set init state - stateful.success("testShouldNotRollbackStateForStatefulServerInCaseOfSuccess", "new state", nested) - stateful.getRefState should equal("new state") - nested.getRefState should equal("new state") - } - - it("ref should rollback state for stateful server in case of failure") { - val stateful = TypedActor.newInstance(classOf[TransactionalTypedActor], classOf[TransactionalTypedActorImpl]) - val nested = TypedActor.newInstance(classOf[NestedTransactionalTypedActor], classOf[NestedTransactionalTypedActorImpl]) - stateful.setRefState("init") // set init state - nested.setRefState("init") // set init state - val failer = TypedActor.newInstance(classOf[TypedActorFailer], classOf[TypedActorFailerImpl]) - try { - stateful.failure("testShouldRollbackStateForStatefulServerInCaseOfFailure", "new state", nested, failer) - fail("should have thrown an exception") - } catch { case e => {} } - stateful.getRefState should equal("init") - nested.getRefState should equal("init") - } - } -} diff --git a/akka-typed-actors/src/test/scala/actor/typed-actor/RestartNestedTransactionalTypedActorSpec.scala b/akka-typed-actors/src/test/scala/actor/typed-actor/RestartNestedTransactionalTypedActorSpec.scala deleted file mode 100644 index 1769a5c47b..0000000000 --- a/akka-typed-actors/src/test/scala/actor/typed-actor/RestartNestedTransactionalTypedActorSpec.scala +++ /dev/null @@ -1,118 +0,0 @@ -/** - * Copyright (C) 2009-2010 Scalable Solutions AB - */ - -package se.scalablesolutions.akka.actor - -import org.scalatest.Spec -import org.scalatest.Assertions -import org.scalatest.matchers.ShouldMatchers -import org.scalatest.BeforeAndAfterAll -import org.scalatest.junit.JUnitRunner -import org.junit.runner.RunWith - -import se.scalablesolutions.akka.config.Config -import se.scalablesolutions.akka.config._ -import se.scalablesolutions.akka.config.TypedActorConfigurator -import se.scalablesolutions.akka.config.JavaConfig._ -import se.scalablesolutions.akka.actor._ - -@RunWith(classOf[JUnitRunner]) -class RestartNestedTransactionalTypedActorSpec extends - Spec with - ShouldMatchers with - BeforeAndAfterAll { - - private val conf = new TypedActorConfigurator - private var messageLog = "" - - override def beforeAll { - /* - Config.config - conf.configure( - new RestartStrategy(new AllForOne, 3, 5000, List(classOf[Exception]).toArray), - List( - new Component(classOf[TransactionalTypedActor], - new LifeCycle(new Permanent), - 10000), - new Component(classOf[NestedTransactionalTypedActor], - new LifeCycle(new Permanent), - 10000), - new Component(classOf[TypedActorFailer], - new LifeCycle(new Permanent), - 10000) - ).toArray).supervise - */ - } - - override def afterAll { - /* - conf.stop - ActorRegistry.shutdownAll - */ - } - - describe("Restart nested supervised transactional Typed Actor") { -/* - it("map should rollback state for stateful server in case of failure") { - val stateful = conf.getInstance(classOf[TransactionalTypedActor]) - stateful.init - stateful.setMapState("testShouldRollbackStateForStatefulServerInCaseOfFailure", "init") // set init state - - val nested = conf.getInstance(classOf[NestedTransactionalTypedActor]) - nested.init - nested.setMapState("testShouldRollbackStateForStatefulServerInCaseOfFailure", "init") // set init state - - val failer = conf.getInstance(classOf[TypedActorFailer]) - try { - stateful.failure("testShouldRollbackStateForStatefulServerInCaseOfFailure", "new state", nested, failer) - - fail("should have thrown an exception") - } catch { case e => {} } - stateful.getMapState("testShouldRollbackStateForStatefulServerInCaseOfFailure") should equal("init") - - nested.getMapState("testShouldRollbackStateForStatefulServerInCaseOfFailure") should equal("init") - } - - it("vector should rollback state for stateful server in case of failure") { - val stateful = conf.getInstance(classOf[TransactionalTypedActor]) - stateful.init - stateful.setVectorState("init") // set init state - - val nested = conf.getInstance(classOf[NestedTransactionalTypedActor]) - nested.init - nested.setVectorState("init") // set init state - - val failer = conf.getInstance(classOf[TypedActorFailer]) - try { - stateful.failure("testShouldRollbackStateForStatefulServerInCaseOfFailure", "new state", nested, failer) - - fail("should have thrown an exception") - } catch { case e => {} } - stateful.getVectorState should equal("init") - - nested.getVectorState should equal("init") - } - - it("ref should rollback state for stateful server in case of failure") { - val stateful = conf.getInstance(classOf[TransactionalTypedActor]) - stateful.init - val nested = conf.getInstance(classOf[NestedTransactionalTypedActor]) - nested.init - stateful.setRefState("init") // set init state - - nested.setRefState("init") // set init state - - val failer = conf.getInstance(classOf[TypedActorFailer]) - try { - stateful.failure("testShouldRollbackStateForStatefulServerInCaseOfFailure", "new state", nested, failer) - - fail("should have thrown an exception") - } catch { case e => {} } - stateful.getRefState should equal("init") - - nested.getRefState should equal("init") - } - */ - } -} diff --git a/akka-typed-actors/src/test/scala/actor/typed-actor/RestartTransactionalTypedActorSpec.scala b/akka-typed-actors/src/test/scala/actor/typed-actor/RestartTransactionalTypedActorSpec.scala deleted file mode 100644 index 56b1e6ec5b..0000000000 --- a/akka-typed-actors/src/test/scala/actor/typed-actor/RestartTransactionalTypedActorSpec.scala +++ /dev/null @@ -1,92 +0,0 @@ -/** - * Copyright (C) 2009-2010 Scalable Solutions AB - */ - -package se.scalablesolutions.akka.actor - -import org.scalatest.Spec -import org.scalatest.Assertions -import org.scalatest.matchers.ShouldMatchers -import org.scalatest.BeforeAndAfterAll -import org.scalatest.junit.JUnitRunner -import org.junit.runner.RunWith - -import se.scalablesolutions.akka.config.Config -import se.scalablesolutions.akka.config._ -import se.scalablesolutions.akka.config.TypedActorConfigurator -import se.scalablesolutions.akka.config.JavaConfig._ -import se.scalablesolutions.akka.actor._ - -@RunWith(classOf[JUnitRunner]) -class RestartTransactionalTypedActorSpec extends - Spec with - ShouldMatchers with - BeforeAndAfterAll { - - private val conf = new TypedActorConfigurator - private var messageLog = "" - - def before { - Config.config - conf.configure( - new RestartStrategy(new AllForOne, 3, 5000, List(classOf[Exception]).toArray), - List( - new Component( - classOf[TransactionalTypedActor], - new LifeCycle(new Temporary), - 10000), - new Component( - classOf[TypedActorFailer], - new LifeCycle(new Temporary), - 10000) - ).toArray).supervise - } - - def after { - conf.stop - ActorRegistry.shutdownAll - } - - describe("Restart supervised transactional Typed Actor ") { -/* - it("map should rollback state for stateful server in case of failure") { - before - val stateful = conf.getInstance(classOf[TransactionalTypedActor]) - stateful.init - stateful.setMapState("testShouldRollbackStateForStatefulServerInCaseOfFailure", "init") - val failer = conf.getInstance(classOf[TypedActorFailer]) - try { - stateful.failure("testShouldRollbackStateForStatefulServerInCaseOfFailure", "new state", failer) - fail("should have thrown an exception") - } catch { case e => {} } - stateful.getMapState("testShouldRollbackStateForStatefulServerInCaseOfFailure") should equal("init") - after - } - - it("vector should rollback state for stateful server in case of failure") { - before - val stateful = conf.getInstance(classOf[TransactionalTypedActor]) - stateful.init - stateful.setVectorState("init") // set init state - val failer = conf.getInstance(classOf[TypedActorFailer]) - try { - stateful.failure("testShouldRollbackStateForStatefulServerInCaseOfFailure", "new state", failer) - fail("should have thrown an exception") - } catch { case e => {} } - stateful.getVectorState should equal("init") - after - } - - it("ref should rollback state for stateful server in case of failure") { - val stateful = conf.getInstance(classOf[TransactionalTypedActor]) - stateful.init - stateful.setRefState("init") // set init state - val failer = conf.getInstance(classOf[TypedActorFailer]) - try { - stateful.failure("testShouldRollbackStateForStatefulServerInCaseOfFailure", "new state", failer) - fail("should have thrown an exception") - } catch { case e => {} } - stateful.getRefState should equal("init") - } -*/ } -} diff --git a/akka-typed-actors/src/test/scala/actor/typed-actor/TransactionalTypedActorSpec.scala b/akka-typed-actors/src/test/scala/actor/typed-actor/TransactionalTypedActorSpec.scala deleted file mode 100644 index b55f52c875..0000000000 --- a/akka-typed-actors/src/test/scala/actor/typed-actor/TransactionalTypedActorSpec.scala +++ /dev/null @@ -1,83 +0,0 @@ -/** - * Copyright (C) 2009-2010 Scalable Solutions AB - */ - -package se.scalablesolutions.akka.actor - -import org.scalatest.Spec -import org.scalatest.Assertions -import org.scalatest.matchers.ShouldMatchers -import org.scalatest.BeforeAndAfterAll -import org.scalatest.junit.JUnitRunner -import org.junit.runner.RunWith - -import se.scalablesolutions.akka.actor._ - -@RunWith(classOf[JUnitRunner]) -class TransactionalTypedActorSpec extends - Spec with - ShouldMatchers with - BeforeAndAfterAll { - - private var messageLog = "" - - override def afterAll { -// ActorRegistry.shutdownAll - } - - describe("Declaratively supervised transactional in-memory Typed Actor ") { - it("map should not rollback state for stateful server in case of success") { - val stateful = TypedActor.newInstance(classOf[TransactionalTypedActor], classOf[TransactionalTypedActorImpl]) - stateful.setMapState("testShouldNotRollbackStateForStatefulServerInCaseOfSuccess", "init") - stateful.success("testShouldNotRollbackStateForStatefulServerInCaseOfSuccess", "new state") - stateful.getMapState("testShouldNotRollbackStateForStatefulServerInCaseOfSuccess") should equal("new state") - } - - it("map should rollback state for stateful server in case of failure") { - val stateful = TypedActor.newInstance(classOf[TransactionalTypedActor], classOf[TransactionalTypedActorImpl]) - stateful.setMapState("testShouldRollbackStateForStatefulServerInCaseOfFailure", "init") - val failer = TypedActor.newInstance(classOf[TypedActorFailer], classOf[TypedActorFailerImpl]) - try { - stateful.failure("testShouldRollbackStateForStatefulServerInCaseOfFailure", "new state", failer) - fail("should have thrown an exception") - } catch { case e => {} } - stateful.getMapState("testShouldRollbackStateForStatefulServerInCaseOfFailure") should equal("init") - } - - it("vector should not rollback state for stateful server in case of success") { - val stateful = TypedActor.newInstance(classOf[TransactionalTypedActor], classOf[TransactionalTypedActorImpl]) - stateful.setVectorState("init") // set init state - stateful.success("testShouldNotRollbackStateForStatefulServerInCaseOfSuccess", "new state") - stateful.getVectorState should equal("new state") - } - - it("vector should rollback state for stateful server in case of failure") { - val stateful = TypedActor.newInstance(classOf[TransactionalTypedActor], classOf[TransactionalTypedActorImpl]) - stateful.setVectorState("init") // set init state - val failer = TypedActor.newInstance(classOf[TypedActorFailer], classOf[TypedActorFailerImpl]) - try { - stateful.failure("testShouldRollbackStateForStatefulServerInCaseOfFailure", "new state", failer) - fail("should have thrown an exception") - } catch { case e => {} } - stateful.getVectorState should equal("init") - } - - it("ref should not rollback state for stateful server in case of success") { - val stateful = TypedActor.newInstance(classOf[TransactionalTypedActor], classOf[TransactionalTypedActorImpl]) - stateful.setRefState("init") // set init state - stateful.success("testShouldNotRollbackStateForStatefulServerInCaseOfSuccess", "new state") - stateful.getRefState should equal("new state") - } - - it("ref should rollback state for stateful server in case of failure") { - val stateful = TypedActor.newInstance(classOf[TransactionalTypedActor], classOf[TransactionalTypedActorImpl]) - stateful.setRefState("init") // set init state - val failer = TypedActor.newInstance(classOf[TypedActorFailer], classOf[TypedActorFailerImpl]) - try { - stateful.failure("testShouldRollbackStateForStatefulServerInCaseOfFailure", "new state", failer) - fail("should have thrown an exception") - } catch { case e => {} } - stateful.getRefState should equal("init") - } - } -} diff --git a/akka-typed-actors/src/test/scala/actor/typed-actor/TypedActorContextSpec.scala b/akka-typed-actors/src/test/scala/actor/typed-actor/TypedActorContextSpec.scala deleted file mode 100644 index adc0879c84..0000000000 --- a/akka-typed-actors/src/test/scala/actor/typed-actor/TypedActorContextSpec.scala +++ /dev/null @@ -1,38 +0,0 @@ -/** - * Copyright (C) 2009-2010 Scalable Solutions AB - */ - -package se.scalablesolutions.akka.actor - -import org.scalatest.Spec -import org.scalatest.Assertions -import org.scalatest.matchers.ShouldMatchers -import org.scalatest.BeforeAndAfterAll -import org.scalatest.junit.JUnitRunner -import org.junit.runner.RunWith - -import se.scalablesolutions.akka.dispatch.DefaultCompletableFuture; - -@RunWith(classOf[JUnitRunner]) -class TypedActorContextSpec extends - Spec with - ShouldMatchers with - BeforeAndAfterAll { - - describe("TypedActorContext") { - it("context.sender should return the sender TypedActor reference") { - val pojo = TypedActor.newInstance(classOf[SimpleJavaPojo], classOf[SimpleJavaPojoImpl]) - val pojoCaller = TypedActor.newInstance(classOf[SimpleJavaPojoCaller], classOf[SimpleJavaPojoCallerImpl]) - pojoCaller.setPojo(pojo) - pojoCaller.getSenderFromSimpleJavaPojo.isInstanceOf[Option[_]] should equal (true) - pojoCaller.getSenderFromSimpleJavaPojo.asInstanceOf[Option[_]].isDefined should equal (true) - pojoCaller.getSenderFromSimpleJavaPojo.asInstanceOf[Option[_]].get should equal (pojoCaller) - } - it("context.senderFuture should return the senderFuture TypedActor reference") { - val pojo = TypedActor.newInstance(classOf[SimpleJavaPojo], classOf[SimpleJavaPojoImpl]) - val pojoCaller = TypedActor.newInstance(classOf[SimpleJavaPojoCaller], classOf[SimpleJavaPojoCallerImpl]) - pojoCaller.setPojo(pojo) - pojoCaller.getSenderFutureFromSimpleJavaPojo.getClass.getName should equal (classOf[DefaultCompletableFuture[_]].getName) - } - } -} diff --git a/akka-typed-actors/src/test/scala/actor/typed-actor/TypedActorGuiceConfiguratorSpec.scala b/akka-typed-actors/src/test/scala/actor/typed-actor/TypedActorGuiceConfiguratorSpec.scala deleted file mode 100644 index d076ec52cf..0000000000 --- a/akka-typed-actors/src/test/scala/actor/typed-actor/TypedActorGuiceConfiguratorSpec.scala +++ /dev/null @@ -1,131 +0,0 @@ -/** - * Copyright (C) 2009-2010 Scalable Solutions AB - */ - -package se.scalablesolutions.akka.actor - -import com.google.inject.AbstractModule -import com.google.inject.Scopes - -import org.scalatest.Spec -import org.scalatest.Assertions -import org.scalatest.matchers.ShouldMatchers -import org.scalatest.BeforeAndAfterAll -import org.scalatest.junit.JUnitRunner -import org.junit.runner.RunWith - -import se.scalablesolutions.akka.config.Config -import se.scalablesolutions.akka.config.TypedActorConfigurator -import se.scalablesolutions.akka.config.JavaConfig._ -import se.scalablesolutions.akka.dispatch._ -import se.scalablesolutions.akka.dispatch.FutureTimeoutException - -@RunWith(classOf[JUnitRunner]) -class TypedActorGuiceConfiguratorSpec extends - Spec with - ShouldMatchers with - BeforeAndAfterAll { - - private val conf = new TypedActorConfigurator - private var messageLog = "" - - override def beforeAll { - Config.config - val dispatcher = Dispatchers.newExecutorBasedEventDrivenDispatcher("test") - - conf.addExternalGuiceModule(new AbstractModule { - def configure = bind(classOf[Ext]).to(classOf[ExtImpl]).in(Scopes.SINGLETON) - }).configure( - new RestartStrategy(new AllForOne, 3, 5000, List(classOf[Exception]).toArray), - List( - new Component( - classOf[Foo], - classOf[FooImpl], - new LifeCycle(new Permanent), - 1000, - dispatcher), - new Component( - classOf[Bar], - classOf[BarImpl], - new LifeCycle(new Permanent), - 1000, - dispatcher) - ).toArray).inject.supervise - - } - - override def afterAll = conf.stop - - describe("TypedActorGuiceConfigurator") { -/* - it("should inject typed actor using guice") { - messageLog = "" - val foo = conf.getInstance(classOf[Foo]) - val bar = conf.getInstance(classOf[Bar]) - bar should equal(foo.getBar) - } - - it("should inject external dependency using guice") { - messageLog = "" - val bar = conf.getInstance(classOf[Bar]) - val ext = conf.getExternalDependency(classOf[Ext]) - ext.toString should equal(bar.getExt.toString) - } - - it("should lookup non-supervised instance") { - try { - val str = conf.getInstance(classOf[String]) - fail("exception should have been thrown") - } catch { - case e: Exception => - classOf[IllegalStateException] should equal(e.getClass) - } - } - - it("should be able to invoke typed actor") { - messageLog = "" - val foo = conf.getInstance(classOf[Foo]) - messageLog += foo.foo("foo ") - foo.bar("bar ") - messageLog += "before_bar " - Thread.sleep(500) - messageLog should equal("foo return_foo before_bar ") - } - - it("should be able to invoke typed actor's invocation") { - messageLog = "" - val foo = conf.getInstance(classOf[Foo]) - val bar = conf.getInstance(classOf[Bar]) - messageLog += foo.foo("foo ") - foo.bar("bar ") - messageLog += "before_bar " - Thread.sleep(500) - messageLog should equal("foo return_foo before_bar ") - } - - it("should throw FutureTimeoutException on time-out") { - messageLog = "" - val foo = conf.getInstance(classOf[Foo]) - try { - foo.longRunning - fail("exception should have been thrown") - } catch { - case e: FutureTimeoutException => - classOf[FutureTimeoutException] should equal(e.getClass) - } - } - - it("should propagate exception") { - messageLog = "" - val foo = conf.getInstance(classOf[Foo]) - try { - foo.throwsException - fail("exception should have been thrown") - } catch { - case e: RuntimeException => - classOf[RuntimeException] should equal(e.getClass) - } - } - */ - } -} diff --git a/akka-typed-actors/src/test/scala/actor/typed-actor/TypedActorLifecycleSpec.scala b/akka-typed-actors/src/test/scala/actor/typed-actor/TypedActorLifecycleSpec.scala deleted file mode 100644 index 10fc40493b..0000000000 --- a/akka-typed-actors/src/test/scala/actor/typed-actor/TypedActorLifecycleSpec.scala +++ /dev/null @@ -1,169 +0,0 @@ -package se.scalablesolutions.akka.actor - -import org.junit.runner.RunWith -import org.scalatest.{BeforeAndAfterAll, Spec} -import org.scalatest.junit.JUnitRunner -import org.scalatest.matchers.ShouldMatchers - -import se.scalablesolutions.akka.actor.TypedActor._ - -import se.scalablesolutions.akka.config.{OneForOneStrategy, TypedActorConfigurator} -import se.scalablesolutions.akka.config.JavaConfig._ - -import java.util.concurrent.CountDownLatch - -/** - * @author Martin Krasser - */ -@RunWith(classOf[JUnitRunner]) -class TypedActorLifecycleSpec extends Spec with ShouldMatchers with BeforeAndAfterAll { - var conf1: TypedActorConfigurator = _ - var conf2: TypedActorConfigurator = _ - - override protected def beforeAll() = { - val strategy = new RestartStrategy(new AllForOne(), 3, 1000, Array(classOf[Exception])) - val comp3 = new Component(classOf[SamplePojo], classOf[SamplePojoImpl], new LifeCycle(new Permanent()), 1000) - val comp4 = new Component(classOf[SamplePojo], classOf[SamplePojoImpl], new LifeCycle(new Temporary()), 1000) - conf1 = new TypedActorConfigurator().configure(strategy, Array(comp3)).supervise - conf2 = new TypedActorConfigurator().configure(strategy, Array(comp4)).supervise - } - - override protected def afterAll() = { - conf1.stop - conf2.stop - } - - describe("TypedActor lifecycle management") { - it("should restart supervised, non-annotated typed actor on failure") { - SamplePojoImpl.reset - val obj = conf1.getInstance[SamplePojo](classOf[SamplePojo]) - val cdl = new CountDownLatch(2) - SamplePojoImpl.latch = cdl - assert(AspectInitRegistry.initFor(obj) ne null) - try { - obj.fail - fail("expected exception not thrown") - } catch { - case e: RuntimeException => { - cdl.await - assert(SamplePojoImpl._pre) - assert(SamplePojoImpl._post) - assert(!SamplePojoImpl._down) -// assert(AspectInitRegistry.initFor(obj) ne null) - } - } - } - - it("should shutdown supervised, non-annotated typed actor on failure") { - SamplePojoImpl.reset - val obj = conf2.getInstance[SamplePojo](classOf[SamplePojo]) - val cdl = new CountDownLatch(1) - SamplePojoImpl.latch = cdl - assert(AspectInitRegistry.initFor(obj) ne null) - try { - obj.fail - fail("expected exception not thrown") - } catch { - case e: RuntimeException => { - cdl.await - assert(!SamplePojoImpl._pre) - assert(!SamplePojoImpl._post) - assert(SamplePojoImpl._down) - // assert(AspectInitRegistry.initFor(obj) eq null) - } - } - } - - it("should shutdown non-supervised, non-initialized typed actor on TypedActor.stop") { - SamplePojoImpl.reset - val obj = TypedActor.newInstance(classOf[SamplePojo], classOf[SamplePojoImpl]) - TypedActor.stop(obj) - assert(!SamplePojoImpl._pre) - assert(!SamplePojoImpl._post) - assert(SamplePojoImpl._down) - } - - it("both preRestart and postRestart methods should be invoked when an actor is restarted") { - SamplePojoImpl.reset - val pojo = TypedActor.newInstance(classOf[SimpleJavaPojo], classOf[SimpleJavaPojoImpl]) - val supervisor = TypedActor.newInstance(classOf[SimpleJavaPojo], classOf[SimpleJavaPojoImpl]) - link(supervisor, pojo, new OneForOneStrategy(3, 2000), Array(classOf[Throwable])) - pojo.throwException - Thread.sleep(500) - SimpleJavaPojoImpl._pre should be(true) - SimpleJavaPojoImpl._post should be(true) - } - - /* - it("should shutdown non-supervised, annotated typed actor on TypedActor.stop") { - val obj = TypedActor.newInstance(classOf[SamplePojoAnnotated]) - assert(AspectInitRegistry.initFor(obj) ne null) - assert("hello akka" === obj.greet("akka")) - TypedActor.stop(obj) - assert(AspectInitRegistry.initFor(obj) eq null) - assert(!obj.pre) - assert(!obj.post) - assert(obj.down) - try { - obj.greet("akka") - fail("access to stopped typed actor") - } catch { - case e: Exception => {} - } - } - - it("should shutdown non-supervised, annotated typed actor on ActorRegistry.shutdownAll") { - val obj = TypedActor.newInstance(classOf[SamplePojoAnnotated]) - assert(AspectInitRegistry.initFor(obj) ne null) - assert("hello akka" === obj.greet("akka")) - ActorRegistry.shutdownAll - assert(AspectInitRegistry.initFor(obj) eq null) - assert(!obj.pre) - assert(!obj.post) - assert(obj.down) - try { - obj.greet("akka") - fail("access to stopped typed actor") - } catch { - case e: Exception => { } - } - } - - it("should restart supervised, annotated typed actor on failure") { - val obj = conf1.getInstance[SamplePojoAnnotated](classOf[SamplePojoAnnotated]) - val cdl = obj.newCountdownLatch(2) - assert(AspectInitRegistry.initFor(obj) ne null) - try { - obj.fail - fail("expected exception not thrown") - } catch { - case e: RuntimeException => { - cdl.await - assert(obj.pre) - assert(obj.post) - assert(!obj.down) - assert(AspectInitRegistry.initFor(obj) ne null) - } - } - } - - it("should shutdown supervised, annotated typed actor on failure") { - val obj = conf2.getInstance[SamplePojoAnnotated](classOf[SamplePojoAnnotated]) - val cdl = obj.newCountdownLatch(1) - assert(AspectInitRegistry.initFor(obj) ne null) - try { - obj.fail - fail("expected exception not thrown") - } catch { - case e: RuntimeException => { - cdl.await - assert(!obj.pre) - assert(!obj.post) - assert(obj.down) - assert(AspectInitRegistry.initFor(obj) eq null) - } - } - } - */ - } -} diff --git a/akka-typed-actors/src/test/scala/actor/typed-actor/TypedActorSpec.scala b/akka-typed-actors/src/test/scala/actor/typed-actor/TypedActorSpec.scala deleted file mode 100644 index 7de0a8f5df..0000000000 --- a/akka-typed-actors/src/test/scala/actor/typed-actor/TypedActorSpec.scala +++ /dev/null @@ -1,31 +0,0 @@ -/** - * Copyright (C) 2009-2010 Scalable Solutions AB - */ - -package se.scalablesolutions.akka.actor - -import org.scalatest.Spec -import org.scalatest.Assertions -import org.scalatest.matchers.ShouldMatchers -import org.scalatest.BeforeAndAfterAll -import org.scalatest.junit.JUnitRunner -import org.junit.runner.RunWith - -import se.scalablesolutions.akka.dispatch.DefaultCompletableFuture; - -@RunWith(classOf[JUnitRunner]) -class TypedActorSpec extends - Spec with - ShouldMatchers with - BeforeAndAfterAll { - - describe("TypedActor") { - it("should resolve Future return from method defined to return a Future") { - val pojo = TypedActor.newInstance(classOf[SimpleJavaPojo], classOf[SimpleJavaPojoImpl]) - val future = pojo.square(10) - future.await - future.result.isDefined should equal (true) - future.result.get should equal (100) - } - } -} diff --git a/akka-typed-actors/src/test/scala/actor/typed-actor/TypedActorUtilFunctionsSpec.scala b/akka-typed-actors/src/test/scala/actor/typed-actor/TypedActorUtilFunctionsSpec.scala deleted file mode 100644 index 48424f3c17..0000000000 --- a/akka-typed-actors/src/test/scala/actor/typed-actor/TypedActorUtilFunctionsSpec.scala +++ /dev/null @@ -1,23 +0,0 @@ -package se.scalablesolutions.akka.actor - -import org.scalatest.Suite -import org.junit.runner.RunWith -import org.scalatest.junit.JUnitRunner -import org.scalatest.matchers.MustMatchers -import org.junit.{Before, After, Test} -import java.util.concurrent.{ CountDownLatch, TimeUnit } - -@RunWith(classOf[JUnitRunner]) -class ActorObjectUtilFunctionsSpec extends junit.framework.TestCase with Suite with MustMatchers { - import Actor._ - @Test def testSpawn = { - val latch = new CountDownLatch(1) - - spawn { - latch.countDown - } - - val done = latch.await(10,TimeUnit.SECONDS) - done must be (true) - } -} diff --git a/project/build/AkkaProject.scala b/project/build/AkkaProject.scala index e6c243c201..59e190114f 100644 --- a/project/build/AkkaProject.scala +++ b/project/build/AkkaProject.scala @@ -216,19 +216,19 @@ class AkkaParentProject(info: ProjectInfo) extends DefaultProject(info) { // Subprojects // ------------------------------------------------------------------------------------------------------------------- - lazy val akka_actors = project("akka-actors", "akka-actors", new AkkaCoreProject(_)) - lazy val akka_typed_actors = project("akka-typed-actors", "akka-typed-actors", new AkkaCoreProject(_), akka_actors) - lazy val akka_core = project("akka-core", "akka-core", new AkkaCoreProject(_), akka_typed_actors) - lazy val akka_amqp = project("akka-amqp", "akka-amqp", new AkkaAMQPProject(_), akka_core) - lazy val akka_http = project("akka-http", "akka-http", new AkkaHttpProject(_), akka_core, akka_camel) - lazy val akka_camel = project("akka-camel", "akka-camel", new AkkaCamelProject(_), akka_core) - lazy val akka_persistence = project("akka-persistence", "akka-persistence", new AkkaPersistenceParentProject(_)) - lazy val akka_spring = project("akka-spring", "akka-spring", new AkkaSpringProject(_), akka_core, akka_camel) - lazy val akka_jta = project("akka-jta", "akka-jta", new AkkaJTAProject(_), akka_core) - lazy val akka_kernel = project("akka-kernel", "akka-kernel", new AkkaKernelProject(_), - akka_core, akka_http, akka_spring, akka_camel, akka_persistence, akka_amqp) - lazy val akka_osgi = project("akka-osgi", "akka-osgi", new AkkaOSGiParentProject(_)) - lazy val akka_samples = project("akka-samples", "akka-samples", new AkkaSamplesParentProject(_)) + lazy val akka_actor = project("akka-actor", "akka-actor", new AkkaActorProject(_)) + lazy val akka_typed_actor = project("akka-typed-actor", "akka-typed-actor", new AkkaTypedActorProject(_), akka_actor) + lazy val akka_remote = project("akka-remote", "akka-remote", new AkkaRemoteProject(_), akka_typed_actor) + lazy val akka_amqp = project("akka-amqp", "akka-amqp", new AkkaAMQPProject(_), akka_remote) + lazy val akka_http = project("akka-http", "akka-http", new AkkaHttpProject(_), akka_remote, akka_camel) + lazy val akka_camel = project("akka-camel", "akka-camel", new AkkaCamelProject(_), akka_remote) + lazy val akka_persistence = project("akka-persistence", "akka-persistence", new AkkaPersistenceParentProject(_)) + lazy val akka_spring = project("akka-spring", "akka-spring", new AkkaSpringProject(_), akka_remote, akka_camel) + lazy val akka_jta = project("akka-jta", "akka-jta", new AkkaJTAProject(_), akka_remote) + lazy val akka_kernel = project("akka-kernel", "akka-kernel", new AkkaKernelProject(_), + akka_remote, akka_http, akka_spring, akka_camel, akka_persistence, akka_amqp) + lazy val akka_osgi = project("akka-osgi", "akka-osgi", new AkkaOSGiParentProject(_)) + lazy val akka_samples = project("akka-samples", "akka-samples", new AkkaSamplesParentProject(_)) // ------------------------------------------------------------------------------------------------------------------- // Miscellaneous @@ -253,7 +253,9 @@ class AkkaParentProject(info: ProjectInfo) extends DefaultProject(info) { .map("lib_managed/scala_%s/compile/".format(buildScalaVersion) + _.getName) .mkString(" ") + " scala-library.jar" + - " dist/akka-core_%s-%s.jar".format(buildScalaVersion, version) + + " dist/akka-actor_%s-%s.jar".format(buildScalaVersion, version) + + " dist/akka-typed-actor_%s-%s.jar".format(buildScalaVersion, version) + + " dist/akka-remote_%s-%s.jar".format(buildScalaVersion, version) + " dist/akka-http_%s-%s.jar".format(buildScalaVersion, version) + " dist/akka-camel_%s-%s.jar".format(buildScalaVersion, version) + " dist/akka-amqp_%s-%s.jar".format(buildScalaVersion, version) + @@ -328,10 +330,10 @@ class AkkaParentProject(info: ProjectInfo) extends DefaultProject(info) { } dependsOn(dist) describedAs("Run mvn install for artifacts in dist.") // ------------------------------------------------------------------------------------------------------------------- - // akka-actors subproject + // akka-actor subproject // ------------------------------------------------------------------------------------------------------------------- - class AkkaActorsProject(info: ProjectInfo) extends AkkaDefaultProject(info, distPath) { + class AkkaActorProject(info: ProjectInfo) extends AkkaDefaultProject(info, distPath) { val configgy = Dependencies.configgy val hawtdispatch = Dependencies.hawtdispatch val multiverse = Dependencies.multiverse @@ -345,7 +347,11 @@ class AkkaParentProject(info: ProjectInfo) extends DefaultProject(info) { val scalatest = Dependencies.scalatest } - class AkkaTypedActorsProject(info: ProjectInfo) extends AkkaDefaultProject(info, distPath) { + // ------------------------------------------------------------------------------------------------------------------- + // akka-typed-actor subproject + // ------------------------------------------------------------------------------------------------------------------- + + class AkkaTypedActorProject(info: ProjectInfo) extends AkkaDefaultProject(info, distPath) { val aopalliance = Dependencies.aopalliance val werkz = Dependencies.werkz val werkz_core = Dependencies.werkz_core @@ -356,10 +362,10 @@ class AkkaParentProject(info: ProjectInfo) extends DefaultProject(info) { } // ------------------------------------------------------------------------------------------------------------------- - // akka-core subproject + // akka-remote subproject // ------------------------------------------------------------------------------------------------------------------- - class AkkaCoreProject(info: ProjectInfo) extends AkkaDefaultProject(info, distPath) { + class AkkaRemoteProject(info: ProjectInfo) extends AkkaDefaultProject(info, distPath) { val commons_codec = Dependencies.commons_codec val commons_io = Dependencies.commons_io val dispatch_http = Dependencies.dispatch_http @@ -439,7 +445,7 @@ class AkkaParentProject(info: ProjectInfo) extends DefaultProject(info) { class AkkaPersistenceParentProject(info: ProjectInfo) extends ParentProject(info) { lazy val akka_persistence_common = project("akka-persistence-common", "akka-persistence-common", - new AkkaPersistenceCommonProject(_), akka_core) + new AkkaPersistenceCommonProject(_), akka_remote) lazy val akka_persistence_redis = project("akka-persistence-redis", "akka-persistence-redis", new AkkaRedisProject(_), akka_persistence_common) lazy val akka_persistence_mongo = project("akka-persistence-mongo", "akka-persistence-mongo", @@ -534,7 +540,7 @@ class AkkaParentProject(info: ProjectInfo) extends DefaultProject(info) { lazy val akka_osgi_dependencies_bundle = project("akka-osgi-dependencies-bundle", "akka-osgi-dependencies-bundle", new AkkaOSGiDependenciesBundleProject(_), akka_kernel, akka_jta) // akka_kernel does not depend on akka_jta (why?) therefore we list akka_jta here lazy val akka_osgi_assembly = project("akka-osgi-assembly", "akka-osgi-assembly", - new AkkaOSGiAssemblyProject(_), akka_osgi_dependencies_bundle, akka_core, akka_amqp, akka_http, + new AkkaOSGiAssemblyProject(_), akka_osgi_dependencies_bundle, akka_remote, akka_amqp, akka_http, akka_camel, akka_spring, akka_jta, akka_persistence.akka_persistence_common, akka_persistence.akka_persistence_redis, akka_persistence.akka_persistence_mongo, akka_persistence.akka_persistence_cassandra) @@ -695,7 +701,7 @@ class AkkaParentProject(info: ProjectInfo) extends DefaultProject(info) { class AkkaSamplesParentProject(info: ProjectInfo) extends ParentProject(info) { lazy val akka_sample_ants = project("akka-sample-ants", "akka-sample-ants", - new AkkaSampleAntsProject(_), akka_core) + new AkkaSampleAntsProject(_), akka_remote) lazy val akka_sample_chat = project("akka-sample-chat", "akka-sample-chat", new AkkaSampleChatProject(_), akka_kernel) lazy val akka_sample_pubsub = project("akka-sample-pubsub", "akka-sample-pubsub", @@ -715,7 +721,7 @@ class AkkaParentProject(info: ProjectInfo) extends DefaultProject(info) { lazy val akka_sample_remote = project("akka-sample-remote", "akka-sample-remote", new AkkaSampleRemoteProject(_), akka_kernel) lazy val akka_sample_osgi = project("akka-sample-osgi", "akka-sample-osgi", - new AkkaSampleOSGiProject(_), akka_core) + new AkkaSampleOSGiProject(_), akka_remote) } // ------------------------------------------------------------------------------------------------------------------- From fc7068282dc84f59e98031f6497dc90fd659ed8c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jonas=20Bon=C3=A9r?= Date: Sat, 28 Aug 2010 19:26:56 +0200 Subject: [PATCH 7/8] renamed cassandra storage-conf.xml --- ...nf.xml => cassandra-akka-storage-conf.xml} | 0 config/multiverse-properties-reference.txt | 38 ------------------- 2 files changed, 38 deletions(-) rename config/{storage-conf.xml => cassandra-akka-storage-conf.xml} (100%) delete mode 100644 config/multiverse-properties-reference.txt diff --git a/config/storage-conf.xml b/config/cassandra-akka-storage-conf.xml similarity index 100% rename from config/storage-conf.xml rename to config/cassandra-akka-storage-conf.xml diff --git a/config/multiverse-properties-reference.txt b/config/multiverse-properties-reference.txt deleted file mode 100644 index 7503cec167..0000000000 --- a/config/multiverse-properties-reference.txt +++ /dev/null @@ -1,38 +0,0 @@ -# ============================================ -# ===== Multiverse JVM Options Reference ===== -# ============================================ - -# All these properties can be set on the commandline using '-D=' - -# a flag that is used to enable sanity checks. -# default=true -org.multiverse.MuliverseConstants.sanityChecks= - -# a flag that enables to dump the bytecode of the instrumented classes to the tmp directory -# This is a very interesting feature for debugging of the instrumentation -# default=false -org.multiverse.stms.alpha.instrumentation.MultiverseJavaAgent.dumpBytecode= - -# a string containing the full path to a static no-arg factory method that is used to create the global stm. -# default = org.multiverse.stms.alpha.AlphaStm.createDebug -org.multiverse.api.GlobalStmInstance.factorymethod= - -# a flag that enables the reuse of the FailedToObtainLocksException exception instance -# default = true -org.multiverse.api.exceptions.FailedToObtainLocksException.reuse= - -# a flag that enables the reuse of the LoadLockedException exception instance -# default = true -org.multiverse.api.exceptions.LoadLockedException.reuse= - -# a flag that enables the reuse of the LoadLockedException exception instance -# default = true -org.multiverse.api.exceptions.LoadTooOldVersionException.reuse= - -# a flag that enables the reuse of the RetryError exception instance -# default = true -org.multiverse.api.exceptions.RetryError.reuse= - -# a flag that enables the reuse of the WriteConflictException exception instance -# default = true -org.multiverse.api.exceptions.WriteConflictException.reuse= From ee47eaee5bd1891f525c075a72d3a2353b1eb5f3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jonas=20Bon=C3=A9r?= Date: Sat, 28 Aug 2010 19:27:42 +0200 Subject: [PATCH 8/8] removed trailing whitespace --- akka-actor/src/main/scala/actor/Actor.scala | 6 +-- .../src/main/scala/actor/ActorRef.scala | 32 +++++++------- .../src/main/scala/actor/ActorRegistry.scala | 2 +- .../src/main/scala/actor/Supervisor.scala | 2 +- akka-actor/src/main/scala/config/Config.scala | 2 +- .../scala/dataflow/DataFlowVariable.scala | 2 +- .../src/main/scala/stm/global/GlobalStm.scala | 2 +- .../src/main/scala/stm/local/LocalStm.scala | 2 +- .../main/scala/util/ReflectiveAccess.scala | 44 +++++++++---------- .../test/scala/dataflow/DataFlowSpec.scala | 4 +- .../src/main/scala/remote/RemoteClient.scala | 28 ++---------- .../serialization/SerializationProtocol.scala | 6 +-- .../src/main/scala/actor/TypedActor.scala | 14 +++--- 13 files changed, 62 insertions(+), 84 deletions(-) diff --git a/akka-actor/src/main/scala/actor/Actor.scala b/akka-actor/src/main/scala/actor/Actor.scala index 24a04c3eb3..6a5365f3fb 100644 --- a/akka-actor/src/main/scala/actor/Actor.scala +++ b/akka-actor/src/main/scala/actor/Actor.scala @@ -506,7 +506,7 @@ private[actor] class AnyOptionAsTypedOption(anyOption: Option[Any]) { * Marker interface for proxyable actors (such as typed actor). * * @author Jonas Bonér - */ + */ trait Proxyable { private[actor] def swapProxiedActor(newInstance: Actor) } @@ -515,9 +515,9 @@ trait Proxyable { * Represents the different Actor types. * * @author Jonas Bonér - */ + */ sealed trait ActorType object ActorType { case object ScalaActor extends ActorType - case object TypedActor extends ActorType + case object TypedActor extends ActorType } diff --git a/akka-actor/src/main/scala/actor/ActorRef.scala b/akka-actor/src/main/scala/actor/ActorRef.scala index cec8d18d84..7f2c5c4c01 100644 --- a/akka-actor/src/main/scala/actor/ActorRef.scala +++ b/akka-actor/src/main/scala/actor/ActorRef.scala @@ -62,9 +62,9 @@ import scala.reflect.BeanProperty * * @author Jonas Bonér */ -trait ActorRef extends - ActorRefShared with - TransactionManagement with +trait ActorRef extends + ActorRefShared with + TransactionManagement with Logging with java.lang.Comparable[ActorRef] { scalaRef: ScalaActorRef => @@ -78,7 +78,7 @@ trait ActorRef extends @volatile protected[akka] var startOnCreation = false @volatile protected[akka] var registeredInRemoteNodeDuringSerialization = false protected[akka] val guard = new ReentrantGuard - + /** * User overridable callback/setting. *

@@ -746,7 +746,7 @@ class LocalActorRef private[akka]( ensureRemotingEnabled if (!isRunning || isBeingRestarted) makeRemote(new InetSocketAddress(hostname, port)) else throw new ActorInitializationException( - "Can't make a running actor remote. Make sure you call 'makeRemote' before 'start'.") + "Can't make a running actor remote. Make sure you call 'makeRemote' before 'start'.") } /** @@ -759,7 +759,7 @@ class LocalActorRef private[akka]( RemoteClientModule.register(address, uuid) homeAddress = (RemoteServerModule.HOSTNAME, RemoteServerModule.PORT) } else throw new ActorInitializationException( - "Can't make a running actor remote. Make sure you call 'makeRemote' before 'start'.") + "Can't make a running actor remote. Make sure you call 'makeRemote' before 'start'.") } /** @@ -830,10 +830,10 @@ class LocalActorRef private[akka]( actor.shutdown ActorRegistry.unregister(this) if (isRemotingEnabled) { - remoteAddress.foreach { address => + remoteAddress.foreach { address => RemoteClientModule.unregister(address, uuid) } - RemoteServerModule.unregister(this) + RemoteServerModule.unregister(this) } nullOutActorRefReferencesFor(actorInstance.get) } //else if (isBeingRestarted) throw new ActorKilledException("Actor [" + toString + "] is being restarted.") @@ -1137,7 +1137,7 @@ class LocalActorRef private[akka]( freshActor.init freshActor.initTransactionalState actorInstance.set(freshActor) - if (failedActor.isInstanceOf[Proxyable]) + if (failedActor.isInstanceOf[Proxyable]) failedActor.asInstanceOf[Proxyable].swapProxiedActor(freshActor) Actor.log.debug("Invoking 'postRestart' for new actor instance [%s].", id) freshActor.postRestart(reason) @@ -1357,16 +1357,16 @@ object RemoteActorSystemMessage { * @author Jonas Bonér */ private[akka] case class RemoteActorRef private[akka] ( - uuuid: String, - val className: String, - val hostname: String, - val port: Int, - _timeout: Long, + uuuid: String, + val className: String, + val hostname: String, + val port: Int, + _timeout: Long, loader: Option[ClassLoader]) extends ActorRef with ScalaActorRef { ensureRemotingEnabled - + _uuid = uuuid timeout = _timeout @@ -1480,7 +1480,7 @@ trait ScalaActorRef extends ActorRefShared { ref: ActorRef => * upon restart, remote restart etc. */ def id: String - + def id_=(id: String): Unit /** diff --git a/akka-actor/src/main/scala/actor/ActorRegistry.scala b/akka-actor/src/main/scala/actor/ActorRegistry.scala index b14ff45f48..e8c38f2b76 100644 --- a/akka-actor/src/main/scala/actor/ActorRegistry.scala +++ b/akka-actor/src/main/scala/actor/ActorRegistry.scala @@ -16,7 +16,7 @@ import se.scalablesolutions.akka.util.ListenerManagement * Base trait for ActorRegistry events, allows listen to when an actor is added and removed from the ActorRegistry. * * @author Jonas Bonér - */ + */ sealed trait ActorRegistryEvent case class ActorRegistered(actor: ActorRef) extends ActorRegistryEvent case class ActorUnregistered(actor: ActorRef) extends ActorRegistryEvent diff --git a/akka-actor/src/main/scala/actor/Supervisor.scala b/akka-actor/src/main/scala/actor/Supervisor.scala index 5493f35c56..1af351a33d 100644 --- a/akka-actor/src/main/scala/actor/Supervisor.scala +++ b/akka-actor/src/main/scala/actor/Supervisor.scala @@ -162,7 +162,7 @@ sealed class Supervisor private[akka] ( _childActors.put(className, actorRef :: currentActors) actorRef.lifeCycle = Some(lifeCycle) supervisor.link(actorRef) - remoteAddress.foreach { address => + remoteAddress.foreach { address => RemoteServerModule.registerActor( new InetSocketAddress(address.hostname, address.port), actorRef.uuid, actorRef) } diff --git a/akka-actor/src/main/scala/config/Config.scala b/akka-actor/src/main/scala/config/Config.scala index bc16ce59c8..3b50d613c1 100644 --- a/akka-actor/src/main/scala/config/Config.scala +++ b/akka-actor/src/main/scala/config/Config.scala @@ -24,7 +24,7 @@ object ConfigLogger extends Logging * * @author Jonas Bonér */ -object Config { +object Config { val VERSION = "1.0-SNAPSHOT" // Set Multiverse options for max speed diff --git a/akka-actor/src/main/scala/dataflow/DataFlowVariable.scala b/akka-actor/src/main/scala/dataflow/DataFlowVariable.scala index f35d0f898e..bddfd8597f 100644 --- a/akka-actor/src/main/scala/dataflow/DataFlowVariable.scala +++ b/akka-actor/src/main/scala/dataflow/DataFlowVariable.scala @@ -137,4 +137,4 @@ object DataFlow { * @author Jonas Bonér */ class DataFlowVariableException(msg: String) extends AkkaException(msg) -} \ No newline at end of file +} diff --git a/akka-actor/src/main/scala/stm/global/GlobalStm.scala b/akka-actor/src/main/scala/stm/global/GlobalStm.scala index 76de9d5f57..f2dfce8a96 100644 --- a/akka-actor/src/main/scala/stm/global/GlobalStm.scala +++ b/akka-actor/src/main/scala/stm/global/GlobalStm.scala @@ -31,7 +31,7 @@ class GlobalStm extends TransactionManagement { val DefaultGlobalTransactionFactory = TransactionFactory( DefaultGlobalTransactionConfig, "DefaultGlobalTransaction") - def atomic[T](body: => T)(implicit factory: TransactionFactory = DefaultGlobalTransactionFactory): T = + def atomic[T](body: => T)(implicit factory: TransactionFactory = DefaultGlobalTransactionFactory): T = atomic(factory)(body) def atomic[T](factory: TransactionFactory)(body: => T): T = { diff --git a/akka-actor/src/main/scala/stm/local/LocalStm.scala b/akka-actor/src/main/scala/stm/local/LocalStm.scala index c24097f9e5..f0e60206f6 100644 --- a/akka-actor/src/main/scala/stm/local/LocalStm.scala +++ b/akka-actor/src/main/scala/stm/local/LocalStm.scala @@ -32,7 +32,7 @@ class LocalStm extends TransactionManagement with Logging { val DefaultLocalTransactionFactory = TransactionFactory( DefaultLocalTransactionConfig, "DefaultLocalTransaction") - def atomic[T](body: => T)(implicit factory: TransactionFactory = DefaultLocalTransactionFactory): T = + def atomic[T](body: => T)(implicit factory: TransactionFactory = DefaultLocalTransactionFactory): T = atomic(factory)(body) def atomic[T](factory: TransactionFactory)(body: => T): T = { diff --git a/akka-actor/src/main/scala/util/ReflectiveAccess.scala b/akka-actor/src/main/scala/util/ReflectiveAccess.scala index 72909457b0..da9fb2f3c6 100644 --- a/akka-actor/src/main/scala/util/ReflectiveAccess.scala +++ b/akka-actor/src/main/scala/util/ReflectiveAccess.scala @@ -14,7 +14,7 @@ import java.net.InetSocketAddress * Helper class for reflective access to different modules in order to allow optional loading of modules. * * @author Jonas Bonér - */ + */ object ReflectiveAccess { val loader = getClass.getClassLoader @@ -26,15 +26,15 @@ object ReflectiveAccess { def ensureTypedActorEnabled = TypedActorModule.ensureTypedActorEnabled /** - * Reflective access to the RemoteClient module. + * Reflective access to the RemoteClient module. * * @author Jonas Bonér */ object RemoteClientModule { - type RemoteClient = { + type RemoteClient = { def send[T]( - message: Any, + message: Any, senderOption: Option[ActorRef], senderFuture: Option[CompletableFuture[_]], remoteAddress: InetSocketAddress, @@ -43,18 +43,18 @@ object ReflectiveAccess { actorRef: ActorRef, typedActorInfo: Option[Tuple2[String, String]], actorType: ActorType): Option[CompletableFuture[T]] - def registerSupervisorForActor(actorRef: ActorRef) + def registerSupervisorForActor(actorRef: ActorRef) } - - type RemoteClientObject = { - def register(hostname: String, port: Int, uuid: String): Unit + + type RemoteClientObject = { + def register(hostname: String, port: Int, uuid: String): Unit def unregister(hostname: String, port: Int, uuid: String): Unit def clientFor(address: InetSocketAddress): RemoteClient def clientFor(hostname: String, port: Int, loader: Option[ClassLoader]): RemoteClient } - + lazy val isRemotingEnabled = remoteClientObjectInstance.isDefined - + def ensureRemotingEnabled = if (!isRemotingEnabled) throw new ModuleNotAvailableException( "Can't load the remoting module, make sure that akka-remote.jar is on the classpath") @@ -87,9 +87,9 @@ object ReflectiveAccess { ensureRemotingEnabled remoteClientObjectInstance.get.clientFor(hostname, port, loader) } - + def send[T]( - message: Any, + message: Any, senderOption: Option[ActorRef], senderFuture: Option[CompletableFuture[_]], remoteAddress: InetSocketAddress, @@ -101,11 +101,11 @@ object ReflectiveAccess { ensureRemotingEnabled clientFor(remoteAddress.getHostName, remoteAddress.getPort, None).send[T]( message, senderOption, senderFuture, remoteAddress, timeout, isOneWay, actorRef, typedActorInfo, actorType) - } + } } /** - * Reflective access to the RemoteServer module. + * Reflective access to the RemoteServer module. * * @author Jonas Bonér */ @@ -113,15 +113,15 @@ object ReflectiveAccess { val HOSTNAME = Config.config.getString("akka.remote.server.hostname", "localhost") val PORT = Config.config.getInt("akka.remote.server.port", 9999) - type RemoteServerObject = { + type RemoteServerObject = { def registerActor(address: InetSocketAddress, uuid: String, actor: ActorRef): Unit def registerTypedActor(address: InetSocketAddress, name: String, typedActor: AnyRef): Unit } - - type RemoteNodeObject = { - def unregister(actorRef: ActorRef): Unit + + type RemoteNodeObject = { + def unregister(actorRef: ActorRef): Unit } - + val remoteServerObjectInstance: Option[RemoteServerObject] = { try { val clazz = loader.loadClass("se.scalablesolutions.akka.remote.RemoteServer$") @@ -157,7 +157,7 @@ object ReflectiveAccess { } /** - * Reflective access to the TypedActors module. + * Reflective access to the TypedActors module. * * @author Jonas Bonér */ @@ -167,7 +167,7 @@ object ReflectiveAccess { def isJoinPoint(message: Any): Boolean def isJoinPointAndOneWay(message: Any): Boolean } - + lazy val isTypedActorEnabled = typedActorObjectInstance.isDefined def ensureTypedActorEnabled = if (!isTypedActorEnabled) throw new ModuleNotAvailableException( @@ -185,7 +185,7 @@ object ReflectiveAccess { def resolveFutureIfMessageIsJoinPoint(message: Any, future: Future[_]): Boolean = { ensureTypedActorEnabled if (typedActorObjectInstance.get.isJoinPointAndOneWay(message)) { - future.asInstanceOf[CompletableFuture[Option[_]]].completeWithResult(None) + future.asInstanceOf[CompletableFuture[Option[_]]].completeWithResult(None) } typedActorObjectInstance.get.isJoinPoint(message) } diff --git a/akka-actor/src/test/scala/dataflow/DataFlowSpec.scala b/akka-actor/src/test/scala/dataflow/DataFlowSpec.scala index c5d3c32e63..518d50fc36 100644 --- a/akka-actor/src/test/scala/dataflow/DataFlowSpec.scala +++ b/akka-actor/src/test/scala/dataflow/DataFlowSpec.scala @@ -99,7 +99,7 @@ class DataFlowTest extends Spec with ShouldMatchers with BeforeAndAfterAll { result.set(producer.map(x => x * x).foldLeft(0)(_ + _)) latch.countDown } - + latch.await(3,TimeUnit.SECONDS) should equal (true) result.get should equal (332833500) ActorRegistry.shutdownAll @@ -136,7 +136,7 @@ class DataFlowTest extends Spec with ShouldMatchers with BeforeAndAfterAll { thread { ints(0, 1000, producer) } thread { sum(0, producer, consumer) } thread { recurseSum(consumer) } - + latch.await(15,TimeUnit.SECONDS) should equal (true) ActorRegistry.shutdownAll }*/ diff --git a/akka-remote/src/main/scala/remote/RemoteClient.scala b/akka-remote/src/main/scala/remote/RemoteClient.scala index 8a555bf5a1..2b3a3a41ce 100644 --- a/akka-remote/src/main/scala/remote/RemoteClient.scala +++ b/akka-remote/src/main/scala/remote/RemoteClient.scala @@ -248,7 +248,7 @@ class RemoteClient private[akka] ( protected override def manageLifeCycleOfListeners = false def send[T]( - message: Any, + message: Any, senderOption: Option[ActorRef], senderFuture: Option[CompletableFuture[T]], remoteAddress: InetSocketAddress, @@ -262,8 +262,8 @@ class RemoteClient private[akka] ( } def send[T]( - request: RemoteRequestProtocol, - senderFuture: Option[CompletableFuture[T]]): + request: RemoteRequestProtocol, + senderFuture: Option[CompletableFuture[T]]): Option[CompletableFuture[T]] = if (isRunning) { if (request.getIsOneWay) { connection.getChannel.write(request) @@ -464,25 +464,3 @@ class RemoteClientHandler( .newInstance(exception.getMessage).asInstanceOf[Throwable] } } - -object RemoteDisconnectTest { -import se.scalablesolutions.akka.actor.{Actor,ActorRef} - - class TestClientActor extends Actor { - def receive = { - case ("send ping",akt:ActorRef) => akt ! "ping" - case "pong" => { - log.debug("got pong") - } - } - } - - class TestServerActor extends Actor { - def receive = { - case "ping" => { - log.debug("got ping") - self reply "pong" - } - } - } -} diff --git a/akka-remote/src/main/scala/serialization/SerializationProtocol.scala b/akka-remote/src/main/scala/serialization/SerializationProtocol.scala index e7bee8e9b9..3f54f8e921 100644 --- a/akka-remote/src/main/scala/serialization/SerializationProtocol.scala +++ b/akka-remote/src/main/scala/serialization/SerializationProtocol.scala @@ -155,7 +155,7 @@ object ActorSerialization { else None val classLoader = loader.getOrElse(getClass.getClassLoader) - + val factory = () => { val actorClass = classLoader.loadClass(protocol.getActorClassname) if (format.isInstanceOf[SerializerBasedActorFormat[_]]) @@ -180,7 +180,7 @@ object ActorSerialization { val messages = protocol.getMessagesList.toArray.toList.asInstanceOf[List[RemoteRequestProtocol]] messages.foreach(message => ar ! MessageSerializer.deserialize(message.getMessage)) - + if (format.isInstanceOf[SerializerBasedActorFormat[_]] == false) format.fromBinary(protocol.getActorInstance.toByteArray, ar.actor.asInstanceOf[T]) ar @@ -252,7 +252,7 @@ object RemoteActorSerialization { .setTarget(actorClassName) .setTimeout(timeout) - typedActorInfo.foreach { typedActor => + typedActorInfo.foreach { typedActor => actorInfoBuilder.setTypedActorInfo( TypedActorInfoProtocol.newBuilder .setInterface(typedActor._1) diff --git a/akka-typed-actor/src/main/scala/actor/TypedActor.scala b/akka-typed-actor/src/main/scala/actor/TypedActor.scala index 96790b590b..b27f5b4b4d 100644 --- a/akka-typed-actor/src/main/scala/actor/TypedActor.scala +++ b/akka-typed-actor/src/main/scala/actor/TypedActor.scala @@ -224,7 +224,7 @@ abstract class TypedActor extends Actor with Proxyable { if (arg.getClass.getName.contains(TypedActor.AW_PROXY_PREFIX)) unserializable = true } if (!unserializable && hasMutableArgument) { - + //FIXME serializeArguments // val copyOfArgs = Serializer.Java.deepClone(args) // joinPoint.getRtti.asInstanceOf[MethodRtti].setParameterValues(copyOfArgs.asInstanceOf[Array[AnyRef]]) @@ -539,11 +539,11 @@ object TypedActor extends Logging { private[akka] def supervise(restartStrategy: RestartStrategy, components: List[Supervise]): Supervisor = Supervisor(SupervisorConfig(restartStrategy, components)) - def isJoinPointAndOneWay(message: Any): Boolean = if (isJoinPoint(message)) + def isJoinPointAndOneWay(message: Any): Boolean = if (isJoinPoint(message)) isOneWay(message.asInstanceOf[JoinPoint].getRtti.asInstanceOf[MethodRtti]) else false - private[akka] def isJoinPoint(message: Any): Boolean = message.isInstanceOf[JoinPoint] + private[akka] def isJoinPoint(message: Any): Boolean = message.isInstanceOf[JoinPoint] } /** @@ -607,11 +607,11 @@ private[akka] sealed class TypedActorAspect { val isOneWay = TypedActor.isOneWay(methodRtti) val (message: Array[AnyRef], isEscaped) = escapeArguments(methodRtti.getParameterValues) - + val future = RemoteClientModule.send[AnyRef]( - message, None, None, remoteAddress.get, - timeout, isOneWay, actorRef, - Some((interfaceClass.getName, methodRtti.getMethod.getName)), + message, None, None, remoteAddress.get, + timeout, isOneWay, actorRef, + Some((interfaceClass.getName, methodRtti.getMethod.getName)), ActorType.TypedActor) if (isOneWay) null // for void methods