diff --git a/akka-amqp/src/main/scala/AMQP.scala b/akka-amqp/src/main/scala/AMQP.scala deleted file mode 100644 index 0d94c7a7dc..0000000000 --- a/akka-amqp/src/main/scala/AMQP.scala +++ /dev/null @@ -1,150 +0,0 @@ -/** - * Copyright (C) 2009-2010 Scalable Solutions AB - */ - -package se.scalablesolutions.akka.amqp - -import se.scalablesolutions.akka.actor.{Actor, ActorRef} -import se.scalablesolutions.akka.actor.Actor._ -import se.scalablesolutions.akka.config.OneForOneStrategy -import com.rabbitmq.client.{ReturnListener, ShutdownListener, ConnectionFactory} -import java.lang.IllegalArgumentException -import se.scalablesolutions.akka.util.Logging -/** - * AMQP Actor API. Implements Connection, Producer and Consumer materialized as Actors. - * - * @see se.scalablesolutions.akka.amqp.ExampleSession - * - * @author Irmo Manie - */ -object AMQP { - case class ConnectionParameters( - host: String = ConnectionFactory.DEFAULT_HOST, - port: Int = ConnectionFactory.DEFAULT_AMQP_PORT, - username: String = ConnectionFactory.DEFAULT_USER, - password: String = ConnectionFactory.DEFAULT_PASS, - virtualHost: String = ConnectionFactory.DEFAULT_VHOST, - initReconnectDelay: Long = 5000, - connectionCallback: Option[ActorRef] = None) - - case class ChannelParameters( - shutdownListener: Option[ShutdownListener] = None, - channelCallback: Option[ActorRef] = None) - - case class ExchangeParameters( - exchangeName: String, - exchangeType: ExchangeType, - exchangeDurable: Boolean = false, - exchangeAutoDelete: Boolean = true, - exchangePassive: Boolean = false, - configurationArguments: Map[String, AnyRef] = Map()) - - case class ProducerParameters( - exchangeParameters: ExchangeParameters, - producerId: Option[String] = None, - returnListener: Option[ReturnListener] = None, - channelParameters: Option[ChannelParameters] = None) - - case class ConsumerParameters( - exchangeParameters: ExchangeParameters, - routingKey: String, - deliveryHandler: ActorRef, - queueName: Option[String] = None, - queueDurable: Boolean = false, - queueAutoDelete: Boolean = true, - queuePassive: Boolean = false, - queueExclusive: Boolean = false, - selfAcknowledging: Boolean = true, - channelParameters: Option[ChannelParameters] = None) { - if (queueDurable && queueName.isEmpty) { - throw new IllegalArgumentException("A queue name is required when requesting a durable queue.") - } - } - - def newConnection(connectionParameters: ConnectionParameters = new ConnectionParameters): ActorRef = { - val connection: ActorRef = supervisor.newConnection(connectionParameters) - connection ! Connect - connection - } - - def newProducer(connection: ActorRef, producerParameters: ProducerParameters): ActorRef = { - val producer: ActorRef = Actor.actorOf(new ProducerActor(producerParameters)) - connection.startLink(producer) - producer ! Start - producer - } - - def newConsumer(connection: ActorRef, consumerParameters: ConsumerParameters): ActorRef = { - val consumer: ActorRef = actorOf(new ConsumerActor(consumerParameters)) - val handler = consumerParameters.deliveryHandler - if (handler.supervisor.isEmpty) consumer.startLink(handler) - connection.startLink(consumer) - consumer ! Start - consumer - } - - def newRpcClient[O,I]( - connection: ActorRef, - exchangeParameters: ExchangeParameters, - routingKey: String, - serializer: RpcClientSerializer[O,I], - channelParameters: Option[ChannelParameters] = None): ActorRef = { - val rpcActor: ActorRef = actorOf(new RpcClientActor[O,I](exchangeParameters, routingKey, serializer, channelParameters)) - connection.startLink(rpcActor) - rpcActor ! Start - rpcActor - } - - def newRpcServer[I,O]( - connection: ActorRef, - exchangeParameters: ExchangeParameters, - routingKey: String, - serializer: RpcServerSerializer[I,O], - requestHandler: I => O, - queueName: Option[String] = None, - channelParameters: Option[ChannelParameters] = None) = { - val producer = newProducer(connection, new ProducerParameters(new ExchangeParameters("", ExchangeType.Direct), channelParameters = channelParameters)) - val rpcServer = actorOf(new RpcServerActor[I,O](producer, serializer, requestHandler)) - val consumer = newConsumer(connection, new ConsumerParameters(exchangeParameters, routingKey, rpcServer - , channelParameters = channelParameters - , selfAcknowledging = false - , queueName = queueName)) - - } - - private val supervisor = new AMQPSupervisor - - class AMQPSupervisor extends Logging { - class AMQPSupervisorActor extends Actor { - import self._ - - faultHandler = Some(OneForOneStrategy(5, 5000)) - trapExit = List(classOf[Throwable]) - - def receive = { - case _ => {} // ignore all messages - } - } - - private val supervisor = actorOf(new AMQPSupervisorActor).start - - def newConnection(connectionParameters: ConnectionParameters): ActorRef = { - val connectionActor = actorOf(new FaultTolerantConnectionActor(connectionParameters)) - supervisor.startLink(connectionActor) - connectionActor - } - } - - trait FromBinary[T] { - def fromBinary(bytes: Array[Byte]): T - } - - trait ToBinary[T] { - def toBinary(t: T): Array[Byte] - } - - - case class RpcClientSerializer[O,I](toBinary: ToBinary[O], fromBinary: FromBinary[I]) - - case class RpcServerSerializer[I,O](fromBinary: FromBinary[I], toBinary: ToBinary[O]) -} diff --git a/akka-amqp/src/main/scala/se/scalablesolutions/akka/amqp/AMQP.scala b/akka-amqp/src/main/scala/se/scalablesolutions/akka/amqp/AMQP.scala new file mode 100644 index 0000000000..cd73d27e03 --- /dev/null +++ b/akka-amqp/src/main/scala/se/scalablesolutions/akka/amqp/AMQP.scala @@ -0,0 +1,218 @@ +/** + * Copyright (C) 2009-2010 Scalable Solutions AB + */ + +package se.scalablesolutions.akka.amqp + +import se.scalablesolutions.akka.actor.{Actor, ActorRef} +import se.scalablesolutions.akka.actor.Actor._ +import se.scalablesolutions.akka.config.OneForOneStrategy +import com.rabbitmq.client.{ReturnListener, ShutdownListener, ConnectionFactory} +import com.rabbitmq.client.AMQP.BasicProperties +import java.lang.{String, IllegalArgumentException} + +/** + * AMQP Actor API. Implements Connection, Producer and Consumer materialized as Actors. + * + * @see se.scalablesolutions.akka.amqp.ExampleSession + * + * @author Irmo Manie + */ +object AMQP { + case class ConnectionParameters( + host: String = ConnectionFactory.DEFAULT_HOST, + port: Int = ConnectionFactory.DEFAULT_AMQP_PORT, + username: String = ConnectionFactory.DEFAULT_USER, + password: String = ConnectionFactory.DEFAULT_PASS, + virtualHost: String = ConnectionFactory.DEFAULT_VHOST, + initReconnectDelay: Long = 5000, + connectionCallback: Option[ActorRef] = None) + + case class ChannelParameters( + shutdownListener: Option[ShutdownListener] = None, + channelCallback: Option[ActorRef] = None) + + case class ExchangeParameters( + exchangeName: String, + exchangeType: ExchangeType, + exchangeDurable: Boolean = false, + exchangeAutoDelete: Boolean = true, + exchangePassive: Boolean = false, + configurationArguments: Map[String, AnyRef] = Map()) + + case class ProducerParameters( + exchangeParameters: ExchangeParameters, + producerId: Option[String] = None, + returnListener: Option[ReturnListener] = None, + channelParameters: Option[ChannelParameters] = None) + + case class ConsumerParameters( + exchangeParameters: ExchangeParameters, + routingKey: String, + deliveryHandler: ActorRef, + queueName: Option[String] = None, + queueDurable: Boolean = false, + queueAutoDelete: Boolean = true, + queuePassive: Boolean = false, + queueExclusive: Boolean = false, + selfAcknowledging: Boolean = true, + channelParameters: Option[ChannelParameters] = None) { + if (queueDurable && queueName.isEmpty) { + throw new IllegalArgumentException("A queue name is required when requesting a durable queue.") + } + } + + def newConnection(connectionParameters: ConnectionParameters = new ConnectionParameters): ActorRef = { + val connection = actorOf(new FaultTolerantConnectionActor(connectionParameters)) + supervisor.startLink(connection) + connection ! Connect + connection + } + + def newProducer(connection: ActorRef, producerParameters: ProducerParameters): ActorRef = { + val producer: ActorRef = Actor.actorOf(new ProducerActor(producerParameters)) + connection.startLink(producer) + producer ! Start + producer + } + + def newConsumer(connection: ActorRef, consumerParameters: ConsumerParameters): ActorRef = { + val consumer: ActorRef = actorOf(new ConsumerActor(consumerParameters)) + val handler = consumerParameters.deliveryHandler + if (handler.supervisor.isEmpty) consumer.startLink(handler) + connection.startLink(consumer) + consumer ! Start + consumer + } + + /** + * Convenience + */ + class ProducerClient[O](client: ActorRef, routingKey: String, toBinary: ToBinary[O]) { + def send(request: O, replyTo: Option[String] = None) = { + val basicProperties = new BasicProperties + basicProperties.setReplyTo(replyTo.getOrElse(null)) + client ! Message(toBinary.toBinary(request), routingKey, false, false, Some(basicProperties)) + } + + def stop = client.stop + } + + def newStringProducer(connection: ActorRef, + exchange: String, + routingKey: Option[String] = None, + producerId: Option[String] = None, + durable: Boolean = false, + autoDelete: Boolean = true, + passive: Boolean = true): ProducerClient[String] = { + + val exchangeParameters = ExchangeParameters(exchange, ExchangeType.Topic, + exchangeDurable = durable, exchangeAutoDelete = autoDelete) + val rKey = routingKey.getOrElse("%s.request".format(exchange)) + + val producerRef = newProducer(connection, ProducerParameters(exchangeParameters, producerId)) + val toBinary = new ToBinary[String] { + def toBinary(t: String) = t.getBytes + } + new ProducerClient(producerRef, rKey, toBinary) + } + + def newStringConsumer(connection: ActorRef, + exchange: String, + handler: String => Unit, + routingKey: Option[String] = None, + queueName: Option[String] = None, + durable: Boolean = false, + autoDelete: Boolean = true): ActorRef = { + + val deliveryHandler = actor { + case Delivery(payload, _, _, _, _) => handler.apply(new String(payload)) + } + + val exchangeParameters = ExchangeParameters(exchange, ExchangeType.Topic, + exchangeDurable = durable, exchangeAutoDelete = autoDelete) + val rKey = routingKey.getOrElse("%s.request".format(exchange)) + val qName = queueName.getOrElse("%s.in".format(rKey)) + + newConsumer(connection, ConsumerParameters(exchangeParameters, rKey, deliveryHandler, Some(qName), durable, autoDelete)) + } + + def newProtobufProducer[O <: com.google.protobuf.Message](connection: ActorRef, + exchange: String, + routingKey: Option[String] = None, + producerId: Option[String] = None, + durable: Boolean = false, + autoDelete: Boolean = true, + passive: Boolean = true): ProducerClient[O] = { + + val exchangeParameters = ExchangeParameters(exchange, ExchangeType.Topic, + exchangeDurable = durable, exchangeAutoDelete = autoDelete) + val rKey = routingKey.getOrElse("%s.request".format(exchange)) + + val producerRef = newProducer(connection, ProducerParameters(exchangeParameters, producerId)) + new ProducerClient(producerRef, rKey, new ToBinary[O] { + def toBinary(t: O) = t.toByteArray + }) + } + + def newProtobufConsumer[I <: com.google.protobuf.Message](connection: ActorRef, + exchange: String, + handler: I => Unit, + routingKey: Option[String] = None, + queueName: Option[String] = None, + durable: Boolean = false, + autoDelete: Boolean = true)(implicit manifest: Manifest[I]): ActorRef = { + + val deliveryHandler = actor { + case Delivery(payload, _, _, _, _) => { + handler.apply(createProtobufFromBytes[I](payload)) + } + } + + val exchangeParameters = ExchangeParameters(exchange, ExchangeType.Topic, + exchangeDurable = durable, exchangeAutoDelete = autoDelete) + val rKey = routingKey.getOrElse("%s.request".format(exchange)) + val qName = queueName.getOrElse("%s.in".format(rKey)) + + newConsumer(connection, ConsumerParameters(exchangeParameters, rKey, deliveryHandler, Some(qName), durable, autoDelete)) + } + + /** + * Main supervisor + */ + + class AMQPSupervisorActor extends Actor { + import self._ + + faultHandler = Some(OneForOneStrategy(5, 5000)) + trapExit = List(classOf[Throwable]) + + def receive = { + case _ => {} // ignore all messages + } + } + + private val supervisor = actorOf(new AMQPSupervisorActor).start + + def shutdownAll = { + supervisor.shutdownLinkedActors + } + + /** + * Serialization stuff + */ + + trait FromBinary[T] { + def fromBinary(bytes: Array[Byte]): T + } + + trait ToBinary[T] { + def toBinary(t: T): Array[Byte] + } + + private val ARRAY_OF_BYTE_ARRAY = Array[Class[_]](classOf[Array[Byte]]) + + private[amqp] def createProtobufFromBytes[I <: com.google.protobuf.Message](bytes: Array[Byte])(implicit manifest: Manifest[I]): I = { + manifest.erasure.getDeclaredMethod("parseFrom", ARRAY_OF_BYTE_ARRAY: _*).invoke(null, bytes).asInstanceOf[I] + } +} diff --git a/akka-amqp/src/main/scala/AMQPMessage.scala b/akka-amqp/src/main/scala/se/scalablesolutions/akka/amqp/AMQPMessage.scala similarity index 91% rename from akka-amqp/src/main/scala/AMQPMessage.scala rename to akka-amqp/src/main/scala/se/scalablesolutions/akka/amqp/AMQPMessage.scala index 92cd95906a..34eb37aa14 100644 --- a/akka-amqp/src/main/scala/AMQPMessage.scala +++ b/akka-amqp/src/main/scala/se/scalablesolutions/akka/amqp/AMQPMessage.scala @@ -44,6 +44,9 @@ case object Stopped extends AMQPMessage // delivery messages case class Acknowledge(deliveryTag: Long) extends AMQPMessage case class Acknowledged(deliveryTag: Long) extends AMQPMessage +case class Reject(deliveryTag: Long) extends AMQPMessage +case class Rejected(deliveryTag: Long) extends AMQPMessage +class RejectionException(deliveryTag: Long) extends RuntimeException // internal messages private[akka] case class Failure(cause: Throwable) extends InternalAMQPMessage diff --git a/akka-amqp/src/main/scala/ConsumerActor.scala b/akka-amqp/src/main/scala/se/scalablesolutions/akka/amqp/ConsumerActor.scala similarity index 83% rename from akka-amqp/src/main/scala/ConsumerActor.scala rename to akka-amqp/src/main/scala/se/scalablesolutions/akka/amqp/ConsumerActor.scala index 90c8d7deec..b01f79f949 100644 --- a/akka-amqp/src/main/scala/ConsumerActor.scala +++ b/akka-amqp/src/main/scala/se/scalablesolutions/akka/amqp/ConsumerActor.scala @@ -23,6 +23,7 @@ private[amqp] class ConsumerActor(consumerParameters: ConsumerParameters) def specificMessageHandler = { case Acknowledge(deliveryTag) => acknowledgeDeliveryTag(deliveryTag, true) + case Reject(deliveryTag) => rejectDeliveryTag(deliveryTag, true) case message: Message => handleIllegalMessage("%s can't be used to send messages, ignoring message [%s]".format(this, message)) case unknown => @@ -82,6 +83,19 @@ private[amqp] class ConsumerActor(consumerParameters: ConsumerParameters) } } + private def rejectDeliveryTag(deliveryTag: Long, remoteAcknowledgement: Boolean) = { + log.debug("Rejecting message with delivery tag [%s]", deliveryTag) + // FIXME: when rabbitmq 1.9 arrives, basicReject should be available on the API and implemented instead of this + log.warning("Consumer is rejecting delivery with tag [%s] - " + + "for now this means we have to self terminate and kill the channel - see you in a second.") + channel.foreach{ch => + if (remoteAcknowledgement) { + deliveryHandler ! Rejected(deliveryTag) + } + } + throw new RejectionException(deliveryTag) + } + private def handleIllegalMessage(errorMessage: String) = { log.error(errorMessage) throw new IllegalArgumentException(errorMessage) @@ -94,7 +108,7 @@ private[amqp] class ConsumerActor(consumerParameters: ConsumerParameters) override def shutdown = { listenerTag.foreach(tag => channel.foreach(_.basicCancel(tag))) - self.linkedActorsAsList.foreach(_.stop) + self.shutdownLinkedActors super.shutdown } diff --git a/akka-amqp/src/main/scala/ExampleSession.scala b/akka-amqp/src/main/scala/se/scalablesolutions/akka/amqp/ExampleSession.scala similarity index 58% rename from akka-amqp/src/main/scala/ExampleSession.scala rename to akka-amqp/src/main/scala/se/scalablesolutions/akka/amqp/ExampleSession.scala index 4fa1358a29..f3a8197abf 100644 --- a/akka-amqp/src/main/scala/ExampleSession.scala +++ b/akka-amqp/src/main/scala/se/scalablesolutions/akka/amqp/ExampleSession.scala @@ -4,44 +4,64 @@ package se.scalablesolutions.akka.amqp +import rpc.RPC +import rpc.RPC.{RpcClientSerializer, RpcServerSerializer} import se.scalablesolutions.akka.actor.{Actor, ActorRegistry} import Actor._ import java.util.concurrent.{CountDownLatch, TimeUnit} -import se.scalablesolutions.akka.amqp.AMQP._ import java.lang.String +import se.scalablesolutions.akka.amqp.AMQP._ +import se.scalablesolutions.akka.remote.protocol.RemoteProtocol.AddressProtocol object ExampleSession { def main(args: Array[String]) = { - println("==== DIRECT ===") + + printTopic("DIRECT") direct - TimeUnit.SECONDS.sleep(2) - - println("==== FANOUT ===") + printTopic("FANOUT") fanout - TimeUnit.SECONDS.sleep(2) - - println("==== TOPIC ===") + printTopic("TOPIC") topic - TimeUnit.SECONDS.sleep(2) - - println("==== CALLBACK ===") + printTopic("CALLBACK") callback - TimeUnit.SECONDS.sleep(2) + printTopic("EASY STRING PRODUCER AND CONSUMER") + easyStringProducerConsumer - println("==== RPC ===") + printTopic("EASY PROTOBUF PRODUCER AND CONSUMER") + easyProtobufProducerConsumer + + printTopic("RPC") rpc - TimeUnit.SECONDS.sleep(2) + printTopic("EASY STRING RPC") + easyStringRpc + + printTopic("EASY PROTOBUF RPC") + easyProtobufRpc + + printTopic("Happy hAkking :-)") + + // shutdown everything the amqp tree except the main AMQP supervisor + // all connections/consumers/producers will be stopped + AMQP.shutdownAll ActorRegistry.shutdownAll System.exit(0) } + def printTopic(topic: String) { + + println("") + println("==== " + topic + " ===") + println("") + TimeUnit.SECONDS.sleep(2) + } + def direct = { // defaults to amqp://guest:guest@localhost:5672/ @@ -115,7 +135,7 @@ object ExampleSession { case Restarting => // not used, sent when channel or connection fails and initiates a restart case Stopped => log.info("Channel callback: Stopped") } - val exchangeParameters = ExchangeParameters("my_direct_exchange", ExchangeType.Direct) + val exchangeParameters = ExchangeParameters("my_callback_exchange", ExchangeType.Direct) val channelParameters = ChannelParameters(channelCallback = Some(channelCallback)) val consumer = AMQP.newConsumer(connection, ConsumerParameters(exchangeParameters, "callback.routing", actor { @@ -129,6 +149,40 @@ object ExampleSession { connection.stop } + def easyStringProducerConsumer = { + val connection = AMQP.newConnection() + + val exchangeName = "easy.string" + + // listen by default to: + // exchange = exchangeName + // routingKey = .request + // queueName = .in + AMQP.newStringConsumer(connection, exchangeName, message => println("Received message: "+message)) + + // send by default to: + // exchange = exchangeName + // routingKey = .request + val producer = AMQP.newStringProducer(connection, exchangeName) + + producer.send("This shit is easy!") + } + + def easyProtobufProducerConsumer = { + val connection = AMQP.newConnection() + + val exchangeName = "easy.protobuf" + + def protobufMessageHandler(message: AddressProtocol) = { + log.info("Received "+message) + } + + AMQP.newProtobufConsumer(connection, exchangeName, protobufMessageHandler) + + val producerClient = AMQP.newProtobufProducer[AddressProtocol](connection, exchangeName) + producerClient.send(AddressProtocol.newBuilder.setHostname("akkarocks.com").setPort(1234).build) + } + def rpc = { val connection = AMQP.newConnection() @@ -146,7 +200,7 @@ object ExampleSession { def requestHandler(request: String) = 3 - val rpcServer = AMQP.newRpcServer[String,Int](connection, exchangeParameters, "rpc.in.key", rpcServerSerializer, + val rpcServer = RPC.newRpcServer[String,Int](connection, exchangeParameters, "rpc.in.key", rpcServerSerializer, requestHandler, queueName = Some("rpc.in.key.queue")) @@ -159,9 +213,56 @@ object ExampleSession { } val rpcClientSerializer = new RpcClientSerializer[String, Int](clientToBinary, clientFromBinary) - val rpcClient = AMQP.newRpcClient[String,Int](connection, exchangeParameters, "rpc.in.key", rpcClientSerializer) + val rpcClient = RPC.newRpcClient[String,Int](connection, exchangeParameters, "rpc.in.key", rpcClientSerializer) val response = (rpcClient !! "rpc_request") log.info("Response: " + response) } + + def easyStringRpc = { + + val connection = AMQP.newConnection() + + val exchangeName = "easy.stringrpc" + + // listen by default to: + // exchange = exchangeName + // routingKey = .request + // queueName = .in + RPC.newStringRpcServer(connection, exchangeName, request => { + log.info("Got request: "+request) + "Response to: '"+request+"'" + }) + + // send by default to: + // exchange = exchangeName + // routingKey = .request + val stringRpcClient = RPC.newStringRpcClient(connection, exchangeName) + + val response = stringRpcClient.call("AMQP Rocks!") + log.info("Got response: "+response) + + stringRpcClient.callAsync("AMQP is dead easy") { + case response => log.info("This is handled async: "+response) + } + } + + def easyProtobufRpc = { + + val connection = AMQP.newConnection() + + val exchangeName = "easy.protobuf.rpc" + + def protobufRequestHandler(request: AddressProtocol): AddressProtocol = { + AddressProtocol.newBuilder.setHostname(request.getHostname.reverse).setPort(request.getPort).build + } + + RPC.newProtobufRpcServer(connection, exchangeName, protobufRequestHandler) + + val protobufRpcClient = RPC.newProtobufRpcClient[AddressProtocol, AddressProtocol](connection, exchangeName) + + val response = protobufRpcClient.call(AddressProtocol.newBuilder.setHostname("localhost").setPort(4321).build) + + log.info("Got response: "+response) + } } diff --git a/akka-amqp/src/main/scala/ExchangeType.scala b/akka-amqp/src/main/scala/se/scalablesolutions/akka/amqp/ExchangeType.scala similarity index 100% rename from akka-amqp/src/main/scala/ExchangeType.scala rename to akka-amqp/src/main/scala/se/scalablesolutions/akka/amqp/ExchangeType.scala diff --git a/akka-amqp/src/main/scala/FaultTolerantChannelActor.scala b/akka-amqp/src/main/scala/se/scalablesolutions/akka/amqp/FaultTolerantChannelActor.scala similarity index 100% rename from akka-amqp/src/main/scala/FaultTolerantChannelActor.scala rename to akka-amqp/src/main/scala/se/scalablesolutions/akka/amqp/FaultTolerantChannelActor.scala diff --git a/akka-amqp/src/main/scala/FaultTolerantConnectionActor.scala b/akka-amqp/src/main/scala/se/scalablesolutions/akka/amqp/FaultTolerantConnectionActor.scala similarity index 98% rename from akka-amqp/src/main/scala/FaultTolerantConnectionActor.scala rename to akka-amqp/src/main/scala/se/scalablesolutions/akka/amqp/FaultTolerantConnectionActor.scala index 97c3074700..1e50a985be 100644 --- a/akka-amqp/src/main/scala/FaultTolerantConnectionActor.scala +++ b/akka-amqp/src/main/scala/se/scalablesolutions/akka/amqp/FaultTolerantConnectionActor.scala @@ -107,7 +107,7 @@ private[amqp] class FaultTolerantConnectionActor(connectionParameters: Connectio override def shutdown = { reconnectionTimer.cancel // make sure shutdown is called on all linked actors so they can do channel cleanup before connection is killed - self.linkedActorsAsList.foreach(_.stop) + self.shutdownLinkedActors disconnect } diff --git a/akka-amqp/src/main/scala/ProducerActor.scala b/akka-amqp/src/main/scala/se/scalablesolutions/akka/amqp/ProducerActor.scala similarity index 100% rename from akka-amqp/src/main/scala/ProducerActor.scala rename to akka-amqp/src/main/scala/se/scalablesolutions/akka/amqp/ProducerActor.scala diff --git a/akka-amqp/src/main/scala/se/scalablesolutions/akka/amqp/rpc/RPC.scala b/akka-amqp/src/main/scala/se/scalablesolutions/akka/amqp/rpc/RPC.scala new file mode 100644 index 0000000000..b51cbe407f --- /dev/null +++ b/akka-amqp/src/main/scala/se/scalablesolutions/akka/amqp/rpc/RPC.scala @@ -0,0 +1,182 @@ +package se.scalablesolutions.akka.amqp.rpc + +import se.scalablesolutions.akka.amqp.AMQP._ +import com.google.protobuf.Message +import se.scalablesolutions.akka.actor.{Actor, ActorRef} +import Actor._ +import se.scalablesolutions.akka.amqp._ + +object RPC { + + def newRpcClient[O, I](connection: ActorRef, + exchangeParameters: ExchangeParameters, + routingKey: String, + serializer: RpcClientSerializer[O, I], + channelParameters: Option[ChannelParameters] = None): ActorRef = { + val rpcActor: ActorRef = actorOf(new RpcClientActor[O, I]( + exchangeParameters, routingKey, serializer, channelParameters)) + connection.startLink(rpcActor) + rpcActor ! Start + rpcActor + } + + def newRpcServer[I, O](connection: ActorRef, + exchangeParameters: ExchangeParameters, + routingKey: String, + serializer: RpcServerSerializer[I, O], + requestHandler: I => O, + queueName: Option[String] = None, + channelParameters: Option[ChannelParameters] = None): RpcServerHandle = { + val producer = newProducer(connection, ProducerParameters( + ExchangeParameters("", ExchangeType.Direct), channelParameters = channelParameters)) + val rpcServer = actorOf(new RpcServerActor[I, O](producer, serializer, requestHandler)) + val consumer = newConsumer(connection, ConsumerParameters(exchangeParameters, routingKey, rpcServer, + channelParameters = channelParameters, selfAcknowledging = false, queueName = queueName)) + RpcServerHandle(producer, consumer) + } + + case class RpcServerHandle(producer: ActorRef, consumer: ActorRef) { + def stop = { + consumer.stop + producer.stop + } + } + + case class RpcClientSerializer[O, I](toBinary: ToBinary[O], fromBinary: FromBinary[I]) + + case class RpcServerSerializer[I, O](fromBinary: FromBinary[I], toBinary: ToBinary[O]) + + + /** + * RPC convenience + */ + class RpcClient[O, I](client: ActorRef){ + def call(request: O, timeout: Long = 5000): Option[I] = { + (client.!!(request, timeout)).as[I] + } + + def callAsync(request: O, timeout: Long = 5000)(responseHandler: PartialFunction[Option[I],Unit]) = { + spawn { + val result = call(request, timeout) + responseHandler.apply(result) + } + } + def stop = client.stop + } + + def newProtobufRpcServer[I <: Message, O <: Message]( + connection: ActorRef, + exchange: String, + requestHandler: I => O, + routingKey: Option[String] = None, + queueName: Option[String] = None, + durable: Boolean = false, + autoDelete: Boolean = true)(implicit manifest: Manifest[I]): RpcServerHandle = { + + val serializer = new RpcServerSerializer[I, O]( + new FromBinary[I] { + def fromBinary(bytes: Array[Byte]): I = { + createProtobufFromBytes[I](bytes) + } + }, new ToBinary[O] { + def toBinary(t: O) = t.toByteArray + }) + + startServer(connection, exchange, requestHandler, routingKey, queueName, durable, autoDelete, serializer) + } + + def newProtobufRpcClient[O <: Message, I <: Message]( + connection: ActorRef, + exchange: String, + routingKey: Option[String] = None, + durable: Boolean = false, + autoDelete: Boolean = true, + passive: Boolean = true)(implicit manifest: Manifest[I]): RpcClient[O, I] = { + + + val serializer = new RpcClientSerializer[O, I]( + new ToBinary[O] { + def toBinary(t: O) = t.toByteArray + }, new FromBinary[I] { + def fromBinary(bytes: Array[Byte]): I = { + createProtobufFromBytes[I](bytes) + } + }) + + startClient(connection, exchange, routingKey, durable, autoDelete, passive, serializer) + } + + def newStringRpcServer(connection: ActorRef, + exchange: String, + requestHandler: String => String, + routingKey: Option[String] = None, + queueName: Option[String] = None, + durable: Boolean = false, + autoDelete: Boolean = true): RpcServerHandle = { + + val serializer = new RpcServerSerializer[String, String]( + new FromBinary[String] { + def fromBinary(bytes: Array[Byte]): String = { + new String(bytes) + } + }, new ToBinary[String] { + def toBinary(t: String) = t.getBytes + }) + + startServer(connection, exchange, requestHandler, routingKey, queueName, durable, autoDelete, serializer) + } + + def newStringRpcClient(connection: ActorRef, + exchange: String, + routingKey: Option[String] = None, + durable: Boolean = false, + autoDelete: Boolean = true, + passive: Boolean = true): RpcClient[String, String] = { + + + val serializer = new RpcClientSerializer[String, String]( + new ToBinary[String] { + def toBinary(t: String) = t.getBytes + }, new FromBinary[String] { + def fromBinary(bytes: Array[Byte]): String = { + new String(bytes) + } + }) + + startClient(connection, exchange, routingKey, durable, autoDelete, passive, serializer) + } + + private def startClient[O, I](connection: ActorRef, + exchange: String, + routingKey: Option[String] = None, + durable: Boolean = false, + autoDelete: Boolean = true, + passive: Boolean = true, + serializer: RpcClientSerializer[O, I]): RpcClient[O, I] = { + + val exchangeParameters = ExchangeParameters(exchange, ExchangeType.Topic, + exchangeDurable = durable, exchangeAutoDelete = autoDelete, exchangePassive = passive) + val rKey = routingKey.getOrElse("%s.request".format(exchange)) + + val client = newRpcClient(connection, exchangeParameters, rKey, serializer) + new RpcClient(client) + } + + private def startServer[I, O](connection: ActorRef, + exchange: String, + requestHandler: I => O, + routingKey: Option[String] = None, + queueName: Option[String] = None, + durable: Boolean = false, + autoDelete: Boolean = true, + serializer: RpcServerSerializer[I, O]): RpcServerHandle = { + + val exchangeParameters = ExchangeParameters(exchange, ExchangeType.Topic, + exchangeDurable = durable, exchangeAutoDelete = autoDelete) + val rKey = routingKey.getOrElse("%s.request".format(exchange)) + val qName = queueName.getOrElse("%s.in".format(rKey)) + + newRpcServer(connection, exchangeParameters, rKey, serializer, requestHandler, queueName = Some(qName)) + } +} + diff --git a/akka-amqp/src/main/scala/RpcClientActor.scala b/akka-amqp/src/main/scala/se/scalablesolutions/akka/amqp/rpc/RpcClientActor.scala similarity index 89% rename from akka-amqp/src/main/scala/RpcClientActor.scala rename to akka-amqp/src/main/scala/se/scalablesolutions/akka/amqp/rpc/RpcClientActor.scala index 0691e76884..5c717cb8bb 100644 --- a/akka-amqp/src/main/scala/RpcClientActor.scala +++ b/akka-amqp/src/main/scala/se/scalablesolutions/akka/amqp/rpc/RpcClientActor.scala @@ -4,11 +4,9 @@ package se.scalablesolutions.akka.amqp -import se.scalablesolutions.akka.serialization.Serializer -import se.scalablesolutions.akka.amqp.AMQP.{ChannelParameters, ExchangeParameters} - import com.rabbitmq.client.{Channel, RpcClient} -import se.scalablesolutions.akka.amqp.AMQP.{RpcClientSerializer, ChannelParameters, ExchangeParameters} +import rpc.RPC.RpcClientSerializer +import se.scalablesolutions.akka.amqp.AMQP.{ChannelParameters, ExchangeParameters} class RpcClientActor[I,O]( exchangeParameters: ExchangeParameters, @@ -41,5 +39,11 @@ class RpcClientActor[I,O]( super.preRestart(reason) } + + override def shutdown = { + rpcClient.foreach(rpc => rpc.close) + super.shutdown + } + override def toString = "AMQP.RpcClient[exchange=" +exchangeName + ", routingKey=" + routingKey+ "]" } diff --git a/akka-amqp/src/main/scala/RpcServerActor.scala b/akka-amqp/src/main/scala/se/scalablesolutions/akka/amqp/rpc/RpcServerActor.scala similarity index 94% rename from akka-amqp/src/main/scala/RpcServerActor.scala rename to akka-amqp/src/main/scala/se/scalablesolutions/akka/amqp/rpc/RpcServerActor.scala index 309c7fa40c..5f6b4b713c 100644 --- a/akka-amqp/src/main/scala/RpcServerActor.scala +++ b/akka-amqp/src/main/scala/se/scalablesolutions/akka/amqp/rpc/RpcServerActor.scala @@ -4,9 +4,9 @@ package se.scalablesolutions.akka.amqp +import rpc.RPC.RpcServerSerializer import se.scalablesolutions.akka.actor.{ActorRef, Actor} import com.rabbitmq.client.AMQP.BasicProperties -import se.scalablesolutions.akka.amqp.AMQP.RpcServerSerializer class RpcServerActor[I,O]( producer: ActorRef, diff --git a/akka-amqp/src/test/scala/AMQPRpcClientServerTest.scala b/akka-amqp/src/test/scala/AMQPRpcClientServerTest.scala deleted file mode 100644 index c585675098..0000000000 --- a/akka-amqp/src/test/scala/AMQPRpcClientServerTest.scala +++ /dev/null @@ -1,69 +0,0 @@ -/** - * Copyright (C) 2009-2010 Scalable Solutions AB - */ - -package se.scalablesolutions.akka.amqp.test - -import se.scalablesolutions.akka.util.Logging -import org.scalatest.junit.JUnitSuite -import org.junit.Test -import se.scalablesolutions.akka.amqp._ -import se.scalablesolutions.akka.actor.Actor._ -import org.scalatest.matchers.MustMatchers -import java.util.concurrent.{CountDownLatch, TimeUnit} -import se.scalablesolutions.akka.amqp.AMQP._ - -class AMQPRpcClientServerTest extends JUnitSuite with MustMatchers with Logging { - @Test - def consumerMessage = if (AMQPTest.enabled) { - val connection = AMQP.newConnection() - try { - - val countDown = new CountDownLatch(3) - val channelCallback = actor { - case Started => countDown.countDown - case Restarting => () - case Stopped => () - } - - val exchangeParameters = ExchangeParameters("text_topic_exchange", ExchangeType.Topic) - val channelParameters = ChannelParameters(channelCallback - = Some(channelCallback)) - - val rpcServerSerializer = new RpcServerSerializer[String, Int]( - new FromBinary[String] { - def fromBinary(bytes: Array[Byte]) = new String(bytes) - }, new ToBinary[Int] { - def toBinary(t: Int) = Array(t.toByte) - }) - - def requestHandler(request: String) = 3 - - val rpcServer = AMQP.newRpcServer[String, Int](connection, exchangeParameters, "rpc.routing", rpcServerSerializer, - requestHandler, channelParameters = Some(channelParameters)) - - val rpcClientSerializer = new RpcClientSerializer[String, Int]( - new ToBinary[String] { - def toBinary(t: String) = t.getBytes - }, new FromBinary[Int] { - def fromBinary(bytes: Array[Byte]) = bytes.head.toInt - }) - - val rpcClient = AMQP.newRpcClient[String, Int](connection, exchangeParameters, "rpc.routing", rpcClientSerializer, - channelParameters = Some(channelParameters)) - - countDown.await(2, TimeUnit.SECONDS) must be(true) - val response = rpcClient !! "some_payload" - response must be(Some(3)) - } finally { - connection.stop - } - } - - @Test - def dummy { - // amqp tests need local rabbitmq server running, so a disabled by default. - // this dummy test makes sure that the whole test class doesn't fail because of missing tests - assert(true) - } -} diff --git a/akka-amqp/src/test/scala/AMQPConnectionRecoveryTest.scala b/akka-amqp/src/test/scala/se/scalablesolutions/akka/amqp/test/AMQPConnectionRecoveryTest.scala similarity index 81% rename from akka-amqp/src/test/scala/AMQPConnectionRecoveryTest.scala rename to akka-amqp/src/test/scala/se/scalablesolutions/akka/amqp/test/AMQPConnectionRecoveryTest.scala index c1af35546a..f9d30227f0 100644 --- a/akka-amqp/src/test/scala/AMQPConnectionRecoveryTest.scala +++ b/akka-amqp/src/test/scala/se/scalablesolutions/akka/amqp/test/AMQPConnectionRecoveryTest.scala @@ -1,12 +1,9 @@ +package se.scalablesolutions.akka.amqp.test + /** * Copyright (C) 2009-2010 Scalable Solutions AB */ -package se.scalablesolutions.akka.amqp.test - -import se.scalablesolutions.akka.util.Logging -import org.scalatest.junit.JUnitSuite -import org.junit.Test import java.util.concurrent.TimeUnit import se.scalablesolutions.akka.actor.{Actor, ActorRef} import org.multiverse.api.latches.StandardLatch @@ -14,11 +11,13 @@ import com.rabbitmq.client.ShutdownSignalException import se.scalablesolutions.akka.amqp._ import se.scalablesolutions.akka.amqp.AMQP.ConnectionParameters import org.scalatest.matchers.MustMatchers +import org.scalatest.junit.JUnitSuite +import org.junit.Test -class AMQPConnectionRecoveryTest extends JUnitSuite with MustMatchers with Logging { +class AMQPConnectionRecoveryTest extends JUnitSuite with MustMatchers { @Test - def connectionAndRecovery = if (AMQPTest.enabled) { + def connectionAndRecovery = if (AMQPTest.enabled) AMQPTest.withCleanEndState { val connectedLatch = new StandardLatch val reconnectingLatch = new StandardLatch @@ -45,15 +44,9 @@ class AMQPConnectionRecoveryTest extends JUnitSuite with MustMatchers with Loggi reconnectedLatch.tryAwait(2, TimeUnit.SECONDS) must be(true) } finally { - connection.stop + AMQP.shutdownAll disconnectedLatch.tryAwait(2, TimeUnit.SECONDS) must be(true) } } - @Test - def dummy { - // amqp tests need local rabbitmq server running, so a disabled by default. - // this dummy test makes sure that the whole test class doesn't fail because of missing tests - assert(true) - } } diff --git a/akka-amqp/src/test/scala/AMQPConsumerChannelRecoveryTest.scala b/akka-amqp/src/test/scala/se/scalablesolutions/akka/amqp/test/AMQPConsumerChannelRecoveryTest.scala similarity index 86% rename from akka-amqp/src/test/scala/AMQPConsumerChannelRecoveryTest.scala rename to akka-amqp/src/test/scala/se/scalablesolutions/akka/amqp/test/AMQPConsumerChannelRecoveryTest.scala index a0b44f4739..31a90c8200 100644 --- a/akka-amqp/src/test/scala/AMQPConsumerChannelRecoveryTest.scala +++ b/akka-amqp/src/test/scala/se/scalablesolutions/akka/amqp/test/AMQPConsumerChannelRecoveryTest.scala @@ -1,12 +1,9 @@ +package se.scalablesolutions.akka.amqp.test + /** * Copyright (C) 2009-2010 Scalable Solutions AB */ -package se.scalablesolutions.akka.amqp.test - -import se.scalablesolutions.akka.util.Logging -import org.scalatest.junit.JUnitSuite -import se.scalablesolutions.akka.actor.Actor._ import org.multiverse.api.latches.StandardLatch import com.rabbitmq.client.ShutdownSignalException import se.scalablesolutions.akka.amqp._ @@ -15,11 +12,13 @@ import java.util.concurrent.TimeUnit import se.scalablesolutions.akka.actor.ActorRef import org.junit.Test import se.scalablesolutions.akka.amqp.AMQP._ +import org.scalatest.junit.JUnitSuite +import se.scalablesolutions.akka.actor.Actor._ -class AMQPConsumerChannelRecoveryTest extends JUnitSuite with MustMatchers with Logging { +class AMQPConsumerChannelRecoveryTest extends JUnitSuite with MustMatchers { @Test - def consumerChannelRecovery = if (AMQPTest.enabled) { + def consumerChannelRecovery = if (AMQPTest.enabled) AMQPTest.withCleanEndState { val connection = AMQP.newConnection(ConnectionParameters(initReconnectDelay = 50)) try { @@ -60,11 +59,4 @@ class AMQPConsumerChannelRecoveryTest extends JUnitSuite with MustMatchers with connection.stop } } - - @Test - def dummy { - // amqp tests need local rabbitmq server running, so a disabled by default. - // this dummy test makes sure that the whole test class doesn't fail because of missing tests - assert(true) - } } diff --git a/akka-amqp/src/test/scala/AMQPConsumerConnectionRecoveryTest.scala b/akka-amqp/src/test/scala/se/scalablesolutions/akka/amqp/test/AMQPConsumerConnectionRecoveryTest.scala similarity index 86% rename from akka-amqp/src/test/scala/AMQPConsumerConnectionRecoveryTest.scala rename to akka-amqp/src/test/scala/se/scalablesolutions/akka/amqp/test/AMQPConsumerConnectionRecoveryTest.scala index bf4885fea5..50c078a13a 100644 --- a/akka-amqp/src/test/scala/AMQPConsumerConnectionRecoveryTest.scala +++ b/akka-amqp/src/test/scala/se/scalablesolutions/akka/amqp/test/AMQPConsumerConnectionRecoveryTest.scala @@ -1,25 +1,24 @@ +package se.scalablesolutions.akka.amqp.test + /** * Copyright (C) 2009-2010 Scalable Solutions AB */ -package se.scalablesolutions.akka.amqp.test - -import se.scalablesolutions.akka.util.Logging -import org.scalatest.junit.JUnitSuite -import se.scalablesolutions.akka.actor.Actor._ import org.multiverse.api.latches.StandardLatch import com.rabbitmq.client.ShutdownSignalException import se.scalablesolutions.akka.amqp._ import org.scalatest.matchers.MustMatchers import java.util.concurrent.TimeUnit -import se.scalablesolutions.akka.actor.ActorRef import org.junit.Test import se.scalablesolutions.akka.amqp.AMQP._ +import org.scalatest.junit.JUnitSuite +import se.scalablesolutions.akka.actor.{Actor, ActorRef} +import Actor._ -class AMQPConsumerConnectionRecoveryTest extends JUnitSuite with MustMatchers with Logging { +class AMQPConsumerConnectionRecoveryTest extends JUnitSuite with MustMatchers { @Test - def consumerConnectionRecovery = if (AMQPTest.enabled) { + def consumerConnectionRecovery = if (AMQPTest.enabled) AMQPTest.withCleanEndState { val connection = AMQP.newConnection(ConnectionParameters(initReconnectDelay = 50)) try { @@ -79,11 +78,4 @@ class AMQPConsumerConnectionRecoveryTest extends JUnitSuite with MustMatchers wi connection.stop } } - - @Test - def dummy { - // amqp tests need local rabbitmq server running, so a disabled by default. - // this dummy test makes sure that the whole test class doesn't fail because of missing tests - assert(true) - } } diff --git a/akka-amqp/src/test/scala/AMQPConsumerManualAcknowledgeTest.scala b/akka-amqp/src/test/scala/se/scalablesolutions/akka/amqp/test/AMQPConsumerManualAcknowledgeTest.scala similarity index 86% rename from akka-amqp/src/test/scala/AMQPConsumerManualAcknowledgeTest.scala rename to akka-amqp/src/test/scala/se/scalablesolutions/akka/amqp/test/AMQPConsumerManualAcknowledgeTest.scala index 2dc4ee939b..011f287636 100644 --- a/akka-amqp/src/test/scala/AMQPConsumerManualAcknowledgeTest.scala +++ b/akka-amqp/src/test/scala/se/scalablesolutions/akka/amqp/test/AMQPConsumerManualAcknowledgeTest.scala @@ -1,12 +1,9 @@ +package se.scalablesolutions.akka.amqp.test + /** * Copyright (C) 2009-2010 Scalable Solutions AB */ -package se.scalablesolutions.akka.amqp.test - -import se.scalablesolutions.akka.util.Logging -import org.scalatest.junit.JUnitSuite -import org.multiverse.api.latches.StandardLatch import se.scalablesolutions.akka.actor.Actor._ import org.scalatest.matchers.MustMatchers import se.scalablesolutions.akka.amqp._ @@ -14,11 +11,13 @@ import org.junit.Test import se.scalablesolutions.akka.actor.ActorRef import java.util.concurrent.{CountDownLatch, TimeUnit} import se.scalablesolutions.akka.amqp.AMQP.{ExchangeParameters, ConsumerParameters, ChannelParameters, ProducerParameters} +import org.multiverse.api.latches.StandardLatch +import org.scalatest.junit.JUnitSuite -class AMQPConsumerManualAcknowledgeTest extends JUnitSuite with MustMatchers with Logging { +class AMQPConsumerManualAcknowledgeTest extends JUnitSuite with MustMatchers { @Test - def consumerMessageManualAcknowledge = if (AMQPTest.enabled) { + def consumerMessageManualAcknowledge = if (AMQPTest.enabled) AMQPTest.withCleanEndState { val connection = AMQP.newConnection() try { val countDown = new CountDownLatch(2) @@ -57,11 +56,4 @@ class AMQPConsumerManualAcknowledgeTest extends JUnitSuite with MustMatchers wit connection.stop } } - - @Test - def dummy { - // amqp tests need local rabbitmq server running, so a disabled by default. - // this dummy test makes sure that the whole test class doesn't fail because of missing tests - assert(true) - } } diff --git a/akka-amqp/src/test/scala/se/scalablesolutions/akka/amqp/test/AMQPConsumerManualRejectTest.scala b/akka-amqp/src/test/scala/se/scalablesolutions/akka/amqp/test/AMQPConsumerManualRejectTest.scala new file mode 100644 index 0000000000..d00d09b480 --- /dev/null +++ b/akka-amqp/src/test/scala/se/scalablesolutions/akka/amqp/test/AMQPConsumerManualRejectTest.scala @@ -0,0 +1,53 @@ +package se.scalablesolutions.akka.amqp.test + +/** + * Copyright (C) 2009-2010 Scalable Solutions AB + */ + +import se.scalablesolutions.akka.actor.Actor._ +import org.scalatest.matchers.MustMatchers +import se.scalablesolutions.akka.amqp._ +import org.junit.Test +import se.scalablesolutions.akka.actor.ActorRef +import java.util.concurrent.{CountDownLatch, TimeUnit} +import se.scalablesolutions.akka.amqp.AMQP.{ExchangeParameters, ConsumerParameters, ChannelParameters, ProducerParameters} +import org.multiverse.api.latches.StandardLatch +import org.scalatest.junit.JUnitSuite + +class AMQPConsumerManualRejectTest extends JUnitSuite with MustMatchers { + + @Test + def consumerMessageManualAcknowledge = if (AMQPTest.enabled) AMQPTest.withCleanEndState { + val connection = AMQP.newConnection() + try { + val countDown = new CountDownLatch(2) + val restartingLatch = new StandardLatch + val channelCallback = actor { + case Started => countDown.countDown + case Restarting => restartingLatch.open + case Stopped => () + } + val exchangeParameters = ExchangeParameters("text_exchange",ExchangeType.Direct) + val channelParameters = ChannelParameters(channelCallback = Some(channelCallback)) + + val rejectedLatch = new StandardLatch + val consumer:ActorRef = AMQP.newConsumer(connection, ConsumerParameters(exchangeParameters, "manual.reject.this", actor { + case Delivery(payload, _, deliveryTag, _, sender) => { + sender.foreach(_ ! Reject(deliveryTag)) + } + case Rejected(deliveryTag) => rejectedLatch.open + }, queueName = Some("self.reject.queue"), selfAcknowledging = false, queueAutoDelete = false, channelParameters = Some(channelParameters))) + + val producer = AMQP.newProducer(connection, + ProducerParameters(exchangeParameters, channelParameters = Some(channelParameters))) + + countDown.await(2, TimeUnit.SECONDS) must be (true) + producer ! Message("some_payload".getBytes, "manual.reject.this") + + rejectedLatch.tryAwait(2, TimeUnit.SECONDS) must be (true) + restartingLatch.tryAwait(2, TimeUnit.SECONDS) must be (true) + } finally { + connection.stop + } + } +} \ No newline at end of file diff --git a/akka-amqp/src/test/scala/AMQPConsumerMessageTest.scala b/akka-amqp/src/test/scala/se/scalablesolutions/akka/amqp/test/AMQPConsumerMessageTest.scala similarity index 83% rename from akka-amqp/src/test/scala/AMQPConsumerMessageTest.scala rename to akka-amqp/src/test/scala/se/scalablesolutions/akka/amqp/test/AMQPConsumerMessageTest.scala index 5d34f867d6..88661de58d 100644 --- a/akka-amqp/src/test/scala/AMQPConsumerMessageTest.scala +++ b/akka-amqp/src/test/scala/se/scalablesolutions/akka/amqp/test/AMQPConsumerMessageTest.scala @@ -1,23 +1,22 @@ +package se.scalablesolutions.akka.amqp.test + /** * Copyright (C) 2009-2010 Scalable Solutions AB */ -package se.scalablesolutions.akka.amqp.test - -import se.scalablesolutions.akka.util.Logging -import org.scalatest.junit.JUnitSuite -import org.junit.Test import se.scalablesolutions.akka.amqp._ import org.multiverse.api.latches.StandardLatch import se.scalablesolutions.akka.actor.Actor._ import org.scalatest.matchers.MustMatchers import java.util.concurrent.{CountDownLatch, TimeUnit} import se.scalablesolutions.akka.amqp.AMQP.{ExchangeParameters, ConsumerParameters, ChannelParameters, ProducerParameters} +import org.scalatest.junit.JUnitSuite +import org.junit.Test -class AMQPConsumerMessageTest extends JUnitSuite with MustMatchers with Logging { +class AMQPConsumerMessageTest extends JUnitSuite with MustMatchers { @Test - def consumerMessage = if (AMQPTest.enabled) { + def consumerMessage = if (AMQPTest.enabled) AMQPTest.withCleanEndState { val connection = AMQP.newConnection() try { @@ -46,11 +45,4 @@ class AMQPConsumerMessageTest extends JUnitSuite with MustMatchers with Logging connection.stop } } - - @Test - def dummy { - // amqp tests need local rabbitmq server running, so a disabled by default. - // this dummy test makes sure that the whole test class doesn't fail because of missing tests - assert(true) - } } diff --git a/akka-amqp/src/test/scala/AMQPProducerChannelRecoveryTest.scala b/akka-amqp/src/test/scala/se/scalablesolutions/akka/amqp/test/AMQPProducerChannelRecoveryTest.scala similarity index 84% rename from akka-amqp/src/test/scala/AMQPProducerChannelRecoveryTest.scala rename to akka-amqp/src/test/scala/se/scalablesolutions/akka/amqp/test/AMQPProducerChannelRecoveryTest.scala index 26b2d78393..e0ede02de3 100644 --- a/akka-amqp/src/test/scala/AMQPProducerChannelRecoveryTest.scala +++ b/akka-amqp/src/test/scala/se/scalablesolutions/akka/amqp/test/AMQPProducerChannelRecoveryTest.scala @@ -1,12 +1,9 @@ +package se.scalablesolutions.akka.amqp.test + /** * Copyright (C) 2009-2010 Scalable Solutions AB */ -package se.scalablesolutions.akka.amqp.test - -import se.scalablesolutions.akka.util.Logging -import org.scalatest.junit.JUnitSuite -import org.junit.Test import java.util.concurrent.TimeUnit import se.scalablesolutions.akka.actor.{Actor, ActorRef} import org.multiverse.api.latches.StandardLatch @@ -14,11 +11,13 @@ import com.rabbitmq.client.ShutdownSignalException import se.scalablesolutions.akka.amqp._ import org.scalatest.matchers.MustMatchers import se.scalablesolutions.akka.amqp.AMQP.{ExchangeParameters, ChannelParameters, ProducerParameters, ConnectionParameters} +import org.scalatest.junit.JUnitSuite +import org.junit.Test -class AMQPProducerChannelRecoveryTest extends JUnitSuite with MustMatchers with Logging { +class AMQPProducerChannelRecoveryTest extends JUnitSuite with MustMatchers { @Test - def producerChannelRecovery = if (AMQPTest.enabled) { + def producerChannelRecovery = if (AMQPTest.enabled) AMQPTest.withCleanEndState { val connection = AMQP.newConnection(ConnectionParameters(initReconnectDelay = 50)) @@ -53,11 +52,4 @@ class AMQPProducerChannelRecoveryTest extends JUnitSuite with MustMatchers with connection.stop } } - - @Test - def dummy { - // amqp tests need local rabbitmq server running, so a disabled by default. - // this dummy test makes sure that the whole test class doesn't fail because of missing tests - assert(true) - } } diff --git a/akka-amqp/src/test/scala/AMQPProducerConnectionRecoveryTest.scala b/akka-amqp/src/test/scala/se/scalablesolutions/akka/amqp/test/AMQPProducerConnectionRecoveryTest.scala similarity index 84% rename from akka-amqp/src/test/scala/AMQPProducerConnectionRecoveryTest.scala rename to akka-amqp/src/test/scala/se/scalablesolutions/akka/amqp/test/AMQPProducerConnectionRecoveryTest.scala index fe8259b208..ad756ff5f0 100644 --- a/akka-amqp/src/test/scala/AMQPProducerConnectionRecoveryTest.scala +++ b/akka-amqp/src/test/scala/se/scalablesolutions/akka/amqp/test/AMQPProducerConnectionRecoveryTest.scala @@ -1,12 +1,9 @@ +package se.scalablesolutions.akka.amqp.test + /** * Copyright (C) 2009-2010 Scalable Solutions AB */ -package se.scalablesolutions.akka.amqp.test - -import se.scalablesolutions.akka.util.Logging -import org.scalatest.junit.JUnitSuite -import org.junit.Test import java.util.concurrent.TimeUnit import se.scalablesolutions.akka.actor.{Actor, ActorRef} import org.multiverse.api.latches.StandardLatch @@ -14,11 +11,13 @@ import com.rabbitmq.client.ShutdownSignalException import se.scalablesolutions.akka.amqp._ import org.scalatest.matchers.MustMatchers import se.scalablesolutions.akka.amqp.AMQP.{ExchangeParameters, ChannelParameters, ProducerParameters, ConnectionParameters} +import org.scalatest.junit.JUnitSuite +import org.junit.Test -class AMQPProducerConnectionRecoveryTest extends JUnitSuite with MustMatchers with Logging { +class AMQPProducerConnectionRecoveryTest extends JUnitSuite with MustMatchers { @Test - def producerConnectionRecovery = if (AMQPTest.enabled) { + def producerConnectionRecovery = if (AMQPTest.enabled) AMQPTest.withCleanEndState { val connection = AMQP.newConnection(ConnectionParameters(initReconnectDelay = 50)) try { @@ -52,11 +51,4 @@ class AMQPProducerConnectionRecoveryTest extends JUnitSuite with MustMatchers wi connection.stop } } - - @Test - def dummy { - // amqp tests need local rabbitmq server running, so a disabled by default. - // this dummy test makes sure that the whole test class doesn't fail because of missing tests - assert(true) - } } diff --git a/akka-amqp/src/test/scala/AMQPProducerMessageTest.scala b/akka-amqp/src/test/scala/se/scalablesolutions/akka/amqp/test/AMQPProducerMessageTest.scala similarity index 81% rename from akka-amqp/src/test/scala/AMQPProducerMessageTest.scala rename to akka-amqp/src/test/scala/se/scalablesolutions/akka/amqp/test/AMQPProducerMessageTest.scala index 5b19df351f..7d485b1b8f 100644 --- a/akka-amqp/src/test/scala/AMQPProducerMessageTest.scala +++ b/akka-amqp/src/test/scala/se/scalablesolutions/akka/amqp/test/AMQPProducerMessageTest.scala @@ -1,12 +1,9 @@ +package se.scalablesolutions.akka.amqp.test + /** * Copyright (C) 2009-2010 Scalable Solutions AB */ -package se.scalablesolutions.akka.amqp.test - -import se.scalablesolutions.akka.util.Logging -import org.scalatest.junit.JUnitSuite -import org.junit.Test import java.util.concurrent.TimeUnit import se.scalablesolutions.akka.actor.ActorRef import org.multiverse.api.latches.StandardLatch @@ -16,11 +13,13 @@ import com.rabbitmq.client.AMQP.BasicProperties import java.lang.String import org.scalatest.matchers.MustMatchers import se.scalablesolutions.akka.amqp.AMQP.{ExchangeParameters, ProducerParameters} +import org.scalatest.junit.JUnitSuite +import org.junit.Test -class AMQPProducerMessageTest extends JUnitSuite with MustMatchers with Logging { +class AMQPProducerMessageTest extends JUnitSuite with MustMatchers { @Test - def producerMessage = if (AMQPTest.enabled) { + def producerMessage = if (AMQPTest.enabled) AMQPTest.withCleanEndState { val connection: ActorRef = AMQP.newConnection() try { @@ -41,11 +40,4 @@ class AMQPProducerMessageTest extends JUnitSuite with MustMatchers with Logging connection.stop } } - - @Test - def dummy { - // amqp tests need local rabbitmq server running, so a disabled by default. - // this dummy test makes sure that the whole test class doesn't fail because of missing tests - assert(true) - } } diff --git a/akka-amqp/src/test/scala/se/scalablesolutions/akka/amqp/test/AMQPProtobufProducerConsumerTest.scala b/akka-amqp/src/test/scala/se/scalablesolutions/akka/amqp/test/AMQPProtobufProducerConsumerTest.scala new file mode 100644 index 0000000000..5d03dae5c2 --- /dev/null +++ b/akka-amqp/src/test/scala/se/scalablesolutions/akka/amqp/test/AMQPProtobufProducerConsumerTest.scala @@ -0,0 +1,43 @@ +package se.scalablesolutions.akka.amqp.test + +/** + * Copyright (C) 2009-2010 Scalable Solutions AB + */ +import org.scalatest.matchers.MustMatchers +import org.scalatest.junit.JUnitSuite +import se.scalablesolutions.akka.amqp.AMQP +import org.junit.Test +import org.multiverse.api.latches.StandardLatch +import java.util.concurrent.TimeUnit +import se.scalablesolutions.akka.amqp.rpc.RPC +import se.scalablesolutions.akka.remote.protocol.RemoteProtocol.AddressProtocol + +class AMQPProtobufProducerConsumerTest extends JUnitSuite with MustMatchers { + + @Test + def consumerMessage = if (AMQPTest.enabled) AMQPTest.withCleanEndState { + + val connection = AMQP.newConnection() + + val responseLatch = new StandardLatch + + RPC.newProtobufRpcServer(connection, "protoexchange", requestHandler) + + val request = AddressProtocol.newBuilder.setHostname("testhost").setPort(4321).build + + def responseHandler(response: AddressProtocol) = { + assert(response.getHostname == request.getHostname.reverse) + responseLatch.open + } + AMQP.newProtobufConsumer(connection, "", responseHandler, Some("proto.reply.key")) + + val producer = AMQP.newProtobufProducer[AddressProtocol](connection, "protoexchange") + producer.send(request, Some("proto.reply.key")) + + responseLatch.tryAwait(2, TimeUnit.SECONDS) must be (true) + } + + def requestHandler(request: AddressProtocol): AddressProtocol = { + AddressProtocol.newBuilder.setHostname(request.getHostname.reverse).setPort(request.getPort).build + } +} \ No newline at end of file diff --git a/akka-amqp/src/test/scala/se/scalablesolutions/akka/amqp/test/AMQPRpcClientServerTest.scala b/akka-amqp/src/test/scala/se/scalablesolutions/akka/amqp/test/AMQPRpcClientServerTest.scala new file mode 100644 index 0000000000..7de8044314 --- /dev/null +++ b/akka-amqp/src/test/scala/se/scalablesolutions/akka/amqp/test/AMQPRpcClientServerTest.scala @@ -0,0 +1,61 @@ +package se.scalablesolutions.akka.amqp.test + +/** + * Copyright (C) 2009-2010 Scalable Solutions AB + */ + +import se.scalablesolutions.akka.amqp._ +import rpc.RPC +import rpc.RPC.{RpcClientSerializer, RpcServerSerializer} +import se.scalablesolutions.akka.actor.Actor._ +import org.scalatest.matchers.MustMatchers +import java.util.concurrent.{CountDownLatch, TimeUnit} +import se.scalablesolutions.akka.amqp.AMQP._ +import org.scalatest.junit.JUnitSuite +import org.junit.Test + +class AMQPRpcClientServerTest extends JUnitSuite with MustMatchers { + + @Test + def consumerMessage = if (AMQPTest.enabled) AMQPTest.withCleanEndState { + + val connection = AMQP.newConnection() + + val countDown = new CountDownLatch(3) + val channelCallback = actor { + case Started => countDown.countDown + case Restarting => () + case Stopped => () + } + + val exchangeParameters = ExchangeParameters("text_topic_exchange", ExchangeType.Topic) + val channelParameters = ChannelParameters(channelCallback + = Some(channelCallback)) + + val rpcServerSerializer = new RpcServerSerializer[String, Int]( + new FromBinary[String] { + def fromBinary(bytes: Array[Byte]) = new String(bytes) + }, new ToBinary[Int] { + def toBinary(t: Int) = Array(t.toByte) + }) + + def requestHandler(request: String) = 3 + + val rpcServer = RPC.newRpcServer[String, Int](connection, exchangeParameters, "rpc.routing", rpcServerSerializer, + requestHandler, channelParameters = Some(channelParameters)) + + val rpcClientSerializer = new RpcClientSerializer[String, Int]( + new ToBinary[String] { + def toBinary(t: String) = t.getBytes + }, new FromBinary[Int] { + def fromBinary(bytes: Array[Byte]) = bytes.head.toInt + }) + + val rpcClient = RPC.newRpcClient[String, Int](connection, exchangeParameters, "rpc.routing", rpcClientSerializer, + channelParameters = Some(channelParameters)) + + countDown.await(2, TimeUnit.SECONDS) must be(true) + val response = rpcClient !! "some_payload" + response must be(Some(3)) + } +} diff --git a/akka-amqp/src/test/scala/se/scalablesolutions/akka/amqp/test/AMQPRpcProtobufTest.scala b/akka-amqp/src/test/scala/se/scalablesolutions/akka/amqp/test/AMQPRpcProtobufTest.scala new file mode 100644 index 0000000000..6b796374a6 --- /dev/null +++ b/akka-amqp/src/test/scala/se/scalablesolutions/akka/amqp/test/AMQPRpcProtobufTest.scala @@ -0,0 +1,49 @@ +package se.scalablesolutions.akka.amqp.test + +/** + * Copyright (C) 2009-2010 Scalable Solutions AB + */ +import org.scalatest.matchers.MustMatchers +import org.scalatest.junit.JUnitSuite +import se.scalablesolutions.akka.amqp.AMQP +import se.scalablesolutions.akka.remote.protocol.RemoteProtocol.AddressProtocol +import org.junit.Test +import se.scalablesolutions.akka.amqp.rpc.RPC +import org.multiverse.api.latches.StandardLatch +import java.util.concurrent.TimeUnit + +class AMQPRpcProtobufTest extends JUnitSuite with MustMatchers { + + @Test + def consumerMessage = if (AMQPTest.enabled) AMQPTest.withCleanEndState { + + val connection = AMQP.newConnection() + + RPC.newProtobufRpcServer(connection, "protoservice", requestHandler) + + val protobufClient = RPC.newProtobufRpcClient[AddressProtocol, AddressProtocol](connection, "protoservice") + + val request = AddressProtocol.newBuilder.setHostname("testhost").setPort(4321).build + + protobufClient.call(request) match { + case Some(response) => assert(response.getHostname == request.getHostname.reverse) + case None => fail("no response") + } + + val aSyncLatch = new StandardLatch + protobufClient.callAsync(request) { + case Some(response) => { + assert(response.getHostname == request.getHostname.reverse) + aSyncLatch.open + } + case None => fail("no response") + } + + aSyncLatch.tryAwait(2, TimeUnit.SECONDS) must be (true) + + } + + def requestHandler(request: AddressProtocol): AddressProtocol = { + AddressProtocol.newBuilder.setHostname(request.getHostname.reverse).setPort(request.getPort).build + } +} \ No newline at end of file diff --git a/akka-amqp/src/test/scala/se/scalablesolutions/akka/amqp/test/AMQPRpcStringTest.scala b/akka-amqp/src/test/scala/se/scalablesolutions/akka/amqp/test/AMQPRpcStringTest.scala new file mode 100644 index 0000000000..0a55fda954 --- /dev/null +++ b/akka-amqp/src/test/scala/se/scalablesolutions/akka/amqp/test/AMQPRpcStringTest.scala @@ -0,0 +1,47 @@ +package se.scalablesolutions.akka.amqp.test + +/** + * Copyright (C) 2009-2010 Scalable Solutions AB + */ +import org.scalatest.matchers.MustMatchers +import org.scalatest.junit.JUnitSuite +import se.scalablesolutions.akka.amqp.AMQP +import org.junit.Test +import se.scalablesolutions.akka.amqp.rpc.RPC +import org.multiverse.api.latches.StandardLatch +import java.util.concurrent.TimeUnit + +class AMQPRpcStringTest extends JUnitSuite with MustMatchers { + + @Test + def consumerMessage = if (AMQPTest.enabled) AMQPTest.withCleanEndState { + + val connection = AMQP.newConnection() + + RPC.newStringRpcServer(connection, "stringservice", requestHandler) + + val protobufClient = RPC.newStringRpcClient(connection, "stringservice") + + val request = "teststring" + + protobufClient.call(request) match { + case Some(response) => assert(response == request.reverse) + case None => fail("no response") + } + + val aSyncLatch = new StandardLatch + protobufClient.callAsync(request) { + case Some(response) => { + assert(response == request.reverse) + aSyncLatch.open + } + case None => fail("no response") + } + + aSyncLatch.tryAwait(2, TimeUnit.SECONDS) must be (true) + } + + def requestHandler(request: String): String= { + request.reverse + } +} \ No newline at end of file diff --git a/akka-amqp/src/test/scala/se/scalablesolutions/akka/amqp/test/AMQPStringProducerConsumerTest.scala b/akka-amqp/src/test/scala/se/scalablesolutions/akka/amqp/test/AMQPStringProducerConsumerTest.scala new file mode 100644 index 0000000000..bbb77c51a7 --- /dev/null +++ b/akka-amqp/src/test/scala/se/scalablesolutions/akka/amqp/test/AMQPStringProducerConsumerTest.scala @@ -0,0 +1,44 @@ +package se.scalablesolutions.akka.amqp.test + +/** + * Copyright (C) 2009-2010 Scalable Solutions AB + */ +import org.scalatest.matchers.MustMatchers +import org.scalatest.junit.JUnitSuite +import se.scalablesolutions.akka.amqp.AMQP +import org.junit.Test +import org.multiverse.api.latches.StandardLatch +import java.util.concurrent.TimeUnit +import se.scalablesolutions.akka.amqp.rpc.RPC + +class AMQPStringProducerConsumerTest extends JUnitSuite with MustMatchers { + + @Test + def consumerMessage = if (AMQPTest.enabled) AMQPTest.withCleanEndState { + + val connection = AMQP.newConnection() + + val responseLatch = new StandardLatch + + RPC.newStringRpcServer(connection, "stringexchange", requestHandler) + + val request = "somemessage" + + def responseHandler(response: String) = { + + assert(response == request.reverse) + responseLatch.open + } + AMQP.newStringConsumer(connection, "", responseHandler, Some("string.reply.key")) + + val producer = AMQP.newStringProducer(connection, "stringexchange") + producer.send(request, Some("string.reply.key")) + + responseLatch.tryAwait(2, TimeUnit.SECONDS) must be (true) + } + + def requestHandler(request: String): String= { + println("###### Reverse") + request.reverse + } +} \ No newline at end of file diff --git a/akka-amqp/src/test/scala/AMQPTest.scala b/akka-amqp/src/test/scala/se/scalablesolutions/akka/amqp/test/AMQPTest.scala similarity index 51% rename from akka-amqp/src/test/scala/AMQPTest.scala rename to akka-amqp/src/test/scala/se/scalablesolutions/akka/amqp/test/AMQPTest.scala index 5ff9157bc5..2930ce4e68 100644 --- a/akka-amqp/src/test/scala/AMQPTest.scala +++ b/akka-amqp/src/test/scala/se/scalablesolutions/akka/amqp/test/AMQPTest.scala @@ -4,6 +4,16 @@ package se.scalablesolutions.akka.amqp.test +import se.scalablesolutions.akka.amqp.AMQP object AMQPTest { + def enabled = false + + def withCleanEndState(action: => Unit) { + try { + action + } finally { + AMQP.shutdownAll + } + } } diff --git a/akka-core/src/main/scala/actor/ActorRef.scala b/akka-core/src/main/scala/actor/ActorRef.scala index 40d7ad8544..e0c26a4bf7 100644 --- a/akka-core/src/main/scala/actor/ActorRef.scala +++ b/akka-core/src/main/scala/actor/ActorRef.scala @@ -13,7 +13,7 @@ import se.scalablesolutions.akka.stm.TransactionManagement._ import se.scalablesolutions.akka.stm.{TransactionManagement, TransactionSetAbortedException} import se.scalablesolutions.akka.remote.protocol.RemoteProtocol._ import se.scalablesolutions.akka.remote.{RemoteNode, RemoteServer, RemoteClient, MessageSerializer, RemoteRequestProtocolIdFactory} -import se.scalablesolutions.akka.serialization.Serializer +import se.scalablesolutions.akka.serialization.{Serializer, BinaryString} import se.scalablesolutions.akka.util.{HashCode, Logging, UUID, ReentrantGuard} import RemoteActorSerialization._ @@ -1253,6 +1253,15 @@ class LocalActorRef private[akka]( } else message } +/** + * System messages for RemoteActorRef. + * + * @author Jonas Bonér + */ +object RemoteActorSystemMessage { + val Stop = BinaryString("RemoteActorRef:stop") +} + /** * Remote ActorRef that is used when referencing the Actor on a different node than its "home" node. * This reference is network-aware (remembers its origin) and immutable. @@ -1263,6 +1272,7 @@ private[akka] case class RemoteActorRef private[akka] ( uuuid: String, val className: String, val hostname: String, val port: Int, _timeout: Long, loader: Option[ClassLoader]) // uuid: String, className: String, hostname: String, port: Int, timeOut: Long, isOnRemoteHost: Boolean) extends ActorRef { extends ActorRef { + _uuid = uuuid timeout = _timeout @@ -1291,6 +1301,7 @@ private[akka] case class RemoteActorRef private[akka] ( def stop: Unit = { _isRunning = false _isShutDown = true + postMessageToMailbox(RemoteActorSystemMessage.Stop, None) } /** diff --git a/akka-core/src/main/scala/actor/TypedActor.scala b/akka-core/src/main/scala/actor/TypedActor.scala index c171a75211..78b5740344 100644 --- a/akka-core/src/main/scala/actor/TypedActor.scala +++ b/akka-core/src/main/scala/actor/TypedActor.scala @@ -8,7 +8,7 @@ import Actor._ import se.scalablesolutions.akka.config.FaultHandlingStrategy import se.scalablesolutions.akka.remote.protocol.RemoteProtocol._ import se.scalablesolutions.akka.remote.{MessageSerializer, RemoteClient, RemoteRequestProtocolIdFactory} -import se.scalablesolutions.akka.dispatch.{MessageDispatcher, Future, CompletableFuture} +import se.scalablesolutions.akka.dispatch.{MessageDispatcher, Future, CompletableFuture, Dispatchers} import se.scalablesolutions.akka.config.ScalaConfig._ import se.scalablesolutions.akka.serialization.Serializer import se.scalablesolutions.akka.util._ @@ -202,6 +202,7 @@ final class TypedActorConfiguration { private[akka] var _transactionRequired = false private[akka] var _host: Option[InetSocketAddress] = None private[akka] var _messageDispatcher: Option[MessageDispatcher] = None + private[akka] var _threadBasedDispatcher: Option[Boolean] = None def timeout = _timeout def timeout(timeout: Duration) : TypedActorConfiguration = { @@ -220,9 +221,16 @@ final class TypedActorConfiguration { } def dispatcher(messageDispatcher: MessageDispatcher) : TypedActorConfiguration = { + if(_threadBasedDispatcher.isDefined) throw new IllegalArgumentException("Cannot specify both 'threadBasedDispatcher()' and 'dispatcher()'") _messageDispatcher = Some(messageDispatcher) this } + + def threadBasedDispatcher() : TypedActorConfiguration = { + if(_messageDispatcher.isDefined) throw new IllegalArgumentException("Cannot specify both 'threadBasedDispatcher()' and 'dispatcher()'") + _threadBasedDispatcher = Some(true) + this + } } /** @@ -324,6 +332,7 @@ object TypedActor extends Logging { def newInstance[T](intfClass: Class[T], targetClass: Class[_], config: TypedActorConfiguration): T = { val actor = actorOf(new Dispatcher(config._transactionRequired)) if (config._messageDispatcher.isDefined) actor.dispatcher = config._messageDispatcher.get + if (config._threadBasedDispatcher.isDefined) actor.dispatcher = Dispatchers.newThreadBasedDispatcher(actor) newInstance(intfClass, newTypedActor(targetClass), actor, config._host, config.timeout) } diff --git a/akka-core/src/main/scala/config/SupervisionConfig.scala b/akka-core/src/main/scala/config/SupervisionConfig.scala index cb0829704d..071fdf0c12 100644 --- a/akka-core/src/main/scala/config/SupervisionConfig.scala +++ b/akka-core/src/main/scala/config/SupervisionConfig.scala @@ -4,7 +4,7 @@ package se.scalablesolutions.akka.config -import se.scalablesolutions.akka.actor.{Actor, ActorRef} +import se.scalablesolutions.akka.actor.{ActorRef, UntypedActorRef} import se.scalablesolutions.akka.dispatch.MessageDispatcher sealed abstract class FaultHandlingStrategy @@ -25,11 +25,15 @@ object ScalaConfig { case class SupervisorConfig(restartStrategy: RestartStrategy, worker: List[Server]) extends Server class Supervise(val actorRef: ActorRef, val lifeCycle: LifeCycle, _remoteAddress: RemoteAddress) extends Server { + def this(actorRef: UntypedActorRef, lifeCycle: LifeCycle, _remoteAddress: RemoteAddress) = + this(actorRef.actorRef, lifeCycle, _remoteAddress) val remoteAddress: Option[RemoteAddress] = if (_remoteAddress eq null) None else Some(_remoteAddress) } object Supervise { def apply(actorRef: ActorRef, lifeCycle: LifeCycle, remoteAddress: RemoteAddress) = new Supervise(actorRef, lifeCycle, remoteAddress) def apply(actorRef: ActorRef, lifeCycle: LifeCycle) = new Supervise(actorRef, lifeCycle, null) + def apply(actorRef: UntypedActorRef, lifeCycle: LifeCycle, remoteAddress: RemoteAddress) = new Supervise(actorRef, lifeCycle, remoteAddress) + def apply(actorRef: UntypedActorRef, lifeCycle: LifeCycle) = new Supervise(actorRef, lifeCycle, null) def unapply(supervise: Supervise) = Some((supervise.actorRef, supervise.lifeCycle, supervise.remoteAddress)) } @@ -215,6 +219,9 @@ object JavaConfig { def newSupervised(actorRef: ActorRef) = se.scalablesolutions.akka.config.ScalaConfig.Supervise(actorRef, lifeCycle.transform) + + def newSupervised(actorRef: UntypedActorRef) = + se.scalablesolutions.akka.config.ScalaConfig.Supervise(actorRef, lifeCycle.transform) } } diff --git a/akka-core/src/main/scala/remote/MessageSerializer.scala b/akka-core/src/main/scala/remote/MessageSerializer.scala index 24269c7f8e..8ef6f5d590 100644 --- a/akka-core/src/main/scala/remote/MessageSerializer.scala +++ b/akka-core/src/main/scala/remote/MessageSerializer.scala @@ -25,7 +25,6 @@ object MessageSerializer extends Logging { } def deserialize(messageProtocol: MessageProtocol): Any = { - log.debug("scheme = " + messageProtocol.getSerializationScheme) messageProtocol.getSerializationScheme match { case SerializationSchemeType.JAVA => unbox(SERIALIZER_JAVA.fromBinary(messageProtocol.getMessage.toByteArray, None)) diff --git a/akka-core/src/main/scala/remote/RemoteClient.scala b/akka-core/src/main/scala/remote/RemoteClient.scala index c1bd574c3e..fefb1521ed 100644 --- a/akka-core/src/main/scala/remote/RemoteClient.scala +++ b/akka-core/src/main/scala/remote/RemoteClient.scala @@ -44,9 +44,16 @@ object RemoteRequestProtocolIdFactory { * Life-cycle events for RemoteClient. */ sealed trait RemoteClientLifeCycleEvent -case class RemoteClientError(@BeanProperty val cause: Throwable, @BeanProperty val host: String, @BeanProperty val port: Int) extends RemoteClientLifeCycleEvent -case class RemoteClientDisconnected(@BeanProperty val host: String, @BeanProperty val port: Int) extends RemoteClientLifeCycleEvent -case class RemoteClientConnected(@BeanProperty val host: String, @BeanProperty val port: Int) extends RemoteClientLifeCycleEvent +case class RemoteClientError( + @BeanProperty val cause: Throwable, + @BeanProperty val host: String, + @BeanProperty val port: Int) extends RemoteClientLifeCycleEvent +case class RemoteClientDisconnected( + @BeanProperty val host: String, + @BeanProperty val port: Int) extends RemoteClientLifeCycleEvent +case class RemoteClientConnected( + @BeanProperty val host: String, + @BeanProperty val port: Int) extends RemoteClientLifeCycleEvent class RemoteClientException private[akka](message: String) extends RuntimeException(message) @@ -259,23 +266,23 @@ class RemoteClientPipelineFactory( remoteAddress: SocketAddress, timer: HashedWheelTimer, client: RemoteClient) extends ChannelPipelineFactory { - def getPipeline: ChannelPipeline = { - def join(ch: ChannelHandler*) = Array[ChannelHandler](ch:_*) + def getPipeline: ChannelPipeline = { + def join(ch: ChannelHandler*) = Array[ChannelHandler](ch: _*) val engine = RemoteServerSslContext.client.createSSLEngine() engine.setEnabledCipherSuites(engine.getSupportedCipherSuites) //TODO is this sensible? engine.setUseClientMode(true) - val ssl = if(RemoteServer.SECURE) join(new SslHandler(engine)) else join() + val ssl = if (RemoteServer.SECURE) join(new SslHandler(engine)) else join() val timeout = new ReadTimeoutHandler(timer, RemoteClient.READ_TIMEOUT.toMillis.toInt) val lenDec = new LengthFieldBasedFrameDecoder(1048576, 0, 4, 0, 4) val lenPrep = new LengthFieldPrepender(4) val protobufDec = new ProtobufDecoder(RemoteReplyProtocol.getDefaultInstance) val protobufEnc = new ProtobufEncoder - val(enc,dec) = RemoteServer.COMPRESSION_SCHEME match { - case "zlib" => (join(new ZlibEncoder(RemoteServer.ZLIB_COMPRESSION_LEVEL)),join(new ZlibDecoder)) - case _ => (join(),join()) + val (enc, dec) = RemoteServer.COMPRESSION_SCHEME match { + case "zlib" => (join(new ZlibEncoder(RemoteServer.ZLIB_COMPRESSION_LEVEL)), join(new ZlibDecoder)) + case _ => (join(), join()) } val remoteClient = new RemoteClientHandler(name, futures, supervisors, bootstrap, remoteAddress, timer, client) diff --git a/akka-core/src/main/scala/remote/RemoteServer.scala b/akka-core/src/main/scala/remote/RemoteServer.scala index 89c0a6e437..ec33bd8600 100644 --- a/akka-core/src/main/scala/remote/RemoteServer.scala +++ b/akka-core/src/main/scala/remote/RemoteServer.scala @@ -309,10 +309,10 @@ object RemoteServerSslContext { //val algorithm = Option(Security.getProperty("ssl.KeyManagerFactory.algorithm")).getOrElse("SunX509") //val store = KeyStore.getInstance("JKS") val s = SSLContext.getInstance(protocol) - s.init(null,null,null) + s.init(null, null, null) val c = SSLContext.getInstance(protocol) - c.init(null,null,null) - (c,s) + c.init(null, null, null) + (c, s) } } @@ -429,25 +429,29 @@ class RemoteServerHandler( if (request.hasSender) Some(RemoteActorSerialization.fromProtobufToRemoteActorRef(request.getSender, applicationLoader)) else None - if (request.getIsOneWay) actorRef.!(message)(sender) - else { - try { - val resultOrNone = (actorRef.!!(message)(sender)).as[AnyRef] - val result = if (resultOrNone.isDefined) resultOrNone.get else null + message match { // first match on system messages + case RemoteActorSystemMessage.Stop => actorRef.stop + case _ => // then match on user defined messages + if (request.getIsOneWay) actorRef.!(message)(sender) + else { + try { + val resultOrNone = (actorRef.!!(message)(sender)).as[AnyRef] + val result = if (resultOrNone.isDefined) resultOrNone.get else null - log.debug("Returning result from actor invocation [%s]", result) - val replyBuilder = RemoteReplyProtocol.newBuilder - .setId(request.getId) - .setMessage(MessageSerializer.serialize(result)) - .setIsSuccessful(true) - .setIsActor(true) + log.debug("Returning result from actor invocation [%s]", result) + val replyBuilder = RemoteReplyProtocol.newBuilder + .setId(request.getId) + .setMessage(MessageSerializer.serialize(result)) + .setIsSuccessful(true) + .setIsActor(true) - if (request.hasSupervisorUuid) replyBuilder.setSupervisorUuid(request.getSupervisorUuid) - channel.write(replyBuilder.build) + if (request.hasSupervisorUuid) replyBuilder.setSupervisorUuid(request.getSupervisorUuid) + channel.write(replyBuilder.build) - } catch { - case e: Throwable => channel.write(createErrorReplyMessage(e, request, true)) - } + } catch { + case e: Throwable => channel.write(createErrorReplyMessage(e, request, true)) + } + } } } diff --git a/akka-core/src/main/scala/serialization/Serializer.scala b/akka-core/src/main/scala/serialization/Serializer.scala index 79be0bec8b..1365a7d4c1 100644 --- a/akka-core/src/main/scala/serialization/Serializer.scala +++ b/akka-core/src/main/scala/serialization/Serializer.scala @@ -39,8 +39,7 @@ class SerializerFactory { * @author Jonas Bonér */ object Serializer { - val EMPTY_CLASS_ARRAY = Array[Class[_]]() - val EMPTY_ANY_REF_ARRAY = Array[AnyRef]() + val ARRAY_OF_BYTE_ARRAY = Array[Class[_]](classOf[Array[Byte]]) object NOOP extends NOOP class NOOP extends Serializer { @@ -85,10 +84,7 @@ object Serializer { def fromBinary(bytes: Array[Byte], clazz: Option[Class[_]]): AnyRef = { if (!clazz.isDefined) throw new IllegalArgumentException( "Need a protobuf message class to be able to serialize bytes using protobuf") - // TODO: should we cache this method lookup? - val message = clazz.get.getDeclaredMethod( - "getDefaultInstance", EMPTY_CLASS_ARRAY: _*).invoke(null, EMPTY_ANY_REF_ARRAY: _*).asInstanceOf[Message] - message.toBuilder().mergeFrom(bytes).build + clazz.get.getDeclaredMethod("parseFrom", ARRAY_OF_BYTE_ARRAY: _*).invoke(null, bytes).asInstanceOf[Message] } def fromBinary(bytes: Array[Byte], clazz: Class[_]): AnyRef = { diff --git a/akka-core/src/main/scala/stm/Ref.scala b/akka-core/src/main/scala/stm/Ref.scala index 7d99c673a6..d660de1377 100644 --- a/akka-core/src/main/scala/stm/Ref.scala +++ b/akka-core/src/main/scala/stm/Ref.scala @@ -42,13 +42,12 @@ class Ref[T](initialOpt: Option[T] = None) def swap(elem: T) = set(elem) def alter(f: T => T): T = { - ensureNotNull val value = f(this.get) set(value) value } - def getOption: Option[T] = Option(this.get) + def opt: Option[T] = Option(this.get) def getOrWait: T = getOrAwait @@ -94,7 +93,4 @@ class Ref[T](initialOpt: Option[T] = None) def toLeft[X](right: => X) = if (isEmpty) Right(right) else Left(this.get) - - private def ensureNotNull = - if (isNull) throw new RuntimeException("Cannot alter Ref's value when it is null") } diff --git a/akka-core/src/test/scala/stm/RefSpec.scala b/akka-core/src/test/scala/stm/RefSpec.scala index f04c1a7c44..2a8d39a065 100644 --- a/akka-core/src/test/scala/stm/RefSpec.scala +++ b/akka-core/src/test/scala/stm/RefSpec.scala @@ -11,7 +11,7 @@ class RefSpec extends WordSpec with MustMatchers { "optionally accept an initial value" in { val emptyRef = Ref[Int] - val empty = atomic { emptyRef.getOption } + val empty = atomic { emptyRef.opt } empty must be(None) @@ -74,16 +74,6 @@ class RefSpec extends WordSpec with MustMatchers { value must be (3) } - "not be changeable using alter if no value has been set" in { - val ref = Ref[Int] - - def increment = atomic { - ref alter (_ + 1) - } - - evaluating { increment } must produce [RuntimeException] - } - "be able to be mapped" in { val ref1 = Ref(1) @@ -147,13 +137,13 @@ class RefSpec extends WordSpec with MustMatchers { for (value <- ref1 if value < 2) yield value } - val optLess2 = atomic { refLess2.getOption } + val optLess2 = atomic { refLess2.opt } val refGreater2 = atomic { for (value <- ref1 if value > 2) yield value } - val optGreater2 = atomic { refGreater2.getOption } + val optGreater2 = atomic { refGreater2.opt } optLess2 must be (Some(1)) optGreater2 must be (None) diff --git a/akka-persistence/akka-persistence-common/src/main/scala/Storage.scala b/akka-persistence/akka-persistence-common/src/main/scala/Storage.scala index a734d4d815..4519fc7ede 100644 --- a/akka-persistence/akka-persistence-common/src/main/scala/Storage.scala +++ b/akka-persistence/akka-persistence-common/src/main/scala/Storage.scala @@ -292,7 +292,7 @@ trait PersistentRef[T] extends Transactional with Committable with Abortable { ref.swap(elem) } - def get: Option[T] = if (ref.isDefined) ref.getOption else storage.getRefStorageFor(uuid) + def get: Option[T] = if (ref.isDefined) ref.opt else storage.getRefStorageFor(uuid) def isDefined: Boolean = ref.isDefined || storage.getRefStorageFor(uuid).isDefined diff --git a/akka-samples/akka-sample-ants/src/main/scala/Ants.scala b/akka-samples/akka-sample-ants/src/main/scala/Ants.scala index b1e5cee0b8..1fb0dea693 100644 --- a/akka-samples/akka-sample-ants/src/main/scala/Ants.scala +++ b/akka-samples/akka-sample-ants/src/main/scala/Ants.scala @@ -70,7 +70,7 @@ object World { private val snapshotFactory = TransactionFactory(readonly = true, familyName = "snapshot", hooks = false) - def snapshot = atomic(snapshotFactory) { Array.tabulate(Dim, Dim)(place(_, _).getOption) } + def snapshot = atomic(snapshotFactory) { Array.tabulate(Dim, Dim)(place(_, _).opt) } def place(loc: (Int, Int)) = places(loc._1)(loc._2) diff --git a/akka-spring/akka-spring-test-java/pom.xml b/akka-spring/akka-spring-test-java/pom.xml deleted file mode 100644 index 2d03b53032..0000000000 --- a/akka-spring/akka-spring-test-java/pom.xml +++ /dev/null @@ -1,339 +0,0 @@ - - 4.0.0 - - Akka Spring Tests in Java - akka-spring-test-java - se.scalablesolutions.akka - 0.9 - jar - - - 2.8.0.Beta1 - 0.5.2 - 1.1.5 - 1.9.18-i - - - - - akka - Akka Repo - http://www.scalablesolutions.se/akka/repository/ - - - project.embedded.module - Project Embedded Repository - file://${env.AKKA_HOME}/embedded-repo - - - repo1.maven - Maven Main Repository - http://repo1.maven.org/maven2 - - - scala-tools-snapshots - Scala-Tools Maven2 Snapshot Repository - http://scala-tools.org/repo-snapshots - - - scala-tools - Scala-Tools Maven2 Repository - http://scala-tools.org/repo-releases - - - lag - Configgy's' Repository - http://www.lag.net/repo - - - multiverse-releases - http://multiverse.googlecode.com/svn/maven-repository/releases - - false - - - - multiverse-snaphosts - http://multiverse.googlecode.com/svn/maven-repository/snapshots - - - maven2-repository.dev.java.net - Java.net Repository for Maven - http://download.java.net/maven/2 - - - java.net - Java.net Legacy Repository for Maven - http://download.java.net/maven/1 - legacy - - - guiceyfruit.release - GuiceyFruit Release Repository - http://guiceyfruit.googlecode.com/svn/repo/releases/ - - false - - - true - - - - guiceyfruit.snapshot - GuiceyFruit Snapshot Repository - http://guiceyfruit.googlecode.com/svn/repo/snapshots/ - - true - - - false - - - - guice-maven - guice maven - http://guice-maven.googlecode.com/svn/trunk - - - google-maven-repository - Google Maven Repository - http://google-maven-repository.googlecode.com/svn/repository/ - - - repository.codehaus.org - Codehaus Maven Repository - http://repository.codehaus.org - - true - - - - repository.jboss.org - JBoss Repository for Maven - http://repository.jboss.org/maven2 - - false - - - - nexus.griddynamics.net - Grid Dynamics Maven Repository - https://nexus.griddynamics.net/nexus/content/groups/public - - false - - - - databinder.net/repo/ - dbDispatch Repository for Maven - http://databinder.net/repo - - false - - - - - - - - org.scala-lang - scala-compiler - 2.8.0.Beta1 - - - org.scala-lang - scala-library - 2.8.0.Beta1 - - - - - se.scalablesolutions.akka - akka-core_2.8.0.RC3 - 0.9.1 - - - se.scalablesolutions.akka - akka-util_2.8.0.Beta1 - 0.8.1 - - - se.scalablesolutions.akka - akka-util-java_2.8.0.Beta1 - 0.8.1 - - - se.scalablesolutions.akka - akka-spring_2.8.0.RC3 - 0.9.1 - - - org.springframework - spring - - - - - - - org.springframework - spring-beans - 3.0.1.RELEASE - - - org.springframework - spring-context - 3.0.1.RELEASE - - - net.lag - configgy - 2.8.0.Beta1-1.5-SNAPSHOT - - - org.codehaus.aspectwerkz - aspectwerkz-nodeps-jdk5 - 2.1 - - - org.codehaus.aspectwerkz - aspectwerkz-jdk5 - 2.1 - - - org.guiceyfruit - guice-core - 2.0-beta-4 - - - com.google.protobuf - protobuf-java - 2.2.0 - - - com.google.protobuf - protobuf-java - 2.2.0 - - - org.multiverse - multiverse-alpha - 0.4-SNAPSHOT - - - commons-io - commons-io - 1.4 - - - org.jboss.netty - netty - 3.2.0.BETA1 - - - net.databinder - dispatch-json_2.8.0.Beta1 - 0.6.6 - - - net.databinder - dispatch-http_2.8.0.Beta1 - 0.6.6 - - - sjson.json - sjson - 0.5-SNAPSHOT-2.8.Beta1 - - - - sbinary - sbinary - 2.8.0.Beta1-2.8.0.Beta1-0.3.1-SNAPSHOT - - - org.codehaus.jackson - jackson-mapper-asl - 1.2.1 - - - org.codehaus.jackson - jackson-core-asl - 1.2.1 - - - voldemort.store.compress - h2-lzf - 1.0 - - - - jsr166x - jsr166x - 1.0 - - - org.apache.geronimo.specs - geronimo-jta_1.1_spec - 1.1.1 - - - - - junit - junit - 4.5 - test - - - - - src/main/java - src/test/java - - - - org.apache.maven.plugins - maven-compiler-plugin - - 1.5 - 1.5 - - **/* - - - - - org.apache.maven.plugins - maven-surefire-plugin - - - **/*Persistent* - - - - - - - false - src/test/resources - - - false - src/main/resources - - - false - src/test/java - - ** - - - **/*.java - - - - - diff --git a/akka-spring/akka-spring-test-java/src/test/java/se/scalablesolutions/akka/spring/DispatcherConfigurationTest.java b/akka-spring/akka-spring-test-java/src/test/java/se/scalablesolutions/akka/spring/DispatcherConfigurationTest.java deleted file mode 100644 index 862d781802..0000000000 --- a/akka-spring/akka-spring-test-java/src/test/java/se/scalablesolutions/akka/spring/DispatcherConfigurationTest.java +++ /dev/null @@ -1,143 +0,0 @@ -/** - * Copyright (C) 2009-2010 Scalable Solutions AB - */ -package se.scalablesolutions.akka.spring; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertTrue; - -import java.util.concurrent.ArrayBlockingQueue; -import java.util.concurrent.SynchronousQueue; -import java.util.concurrent.LinkedBlockingQueue; -import java.util.concurrent.ThreadPoolExecutor; -import java.util.concurrent.TimeUnit; - -import org.junit.Before; -import org.junit.Test; -import org.springframework.context.ApplicationContext; -import org.springframework.context.support.ClassPathXmlApplicationContext; - -import se.scalablesolutions.akka.dispatch.ExecutorBasedEventDrivenDispatcher; -import se.scalablesolutions.akka.dispatch.ReactorBasedThreadPoolEventDrivenDispatcher; -import se.scalablesolutions.akka.dispatch.ReactorBasedSingleThreadEventDrivenDispatcher; -import se.scalablesolutions.akka.dispatch.MessageDispatcher; -import se.scalablesolutions.akka.dispatch.ThreadPoolBuilder; - -import se.scalablesolutions.akka.spring.foo.MyPojo; - -/** - * Tests for spring configuration of dispatcher configuration. - * @author michaelkober - */ -public class DispatcherConfigurationTest { - - private ApplicationContext context = null; - - @Before - public void setUp() { - context = new ClassPathXmlApplicationContext("se/scalablesolutions/akka/spring/foo/dispatcher-config.xml"); - } - - /** - * test for executor-event-driven-dispatcher with array-blocking-queue - */ - @Test - public void testDispatcher() { - MessageDispatcher dispatcher = (MessageDispatcher) context.getBean("executor-event-driven-dispatcher-1"); - ThreadPoolExecutor executor = getThreadPoolExecutorAndAssert(dispatcher); - assertEquals("wrong core pool size", 1, executor.getCorePoolSize()); - assertEquals("wrong max pool size", 20, executor.getMaximumPoolSize()); - assertEquals("wrong keep alive", 3000, executor.getKeepAliveTime(TimeUnit.MILLISECONDS)); - assertTrue("wrong queue type",executor.getQueue() instanceof ArrayBlockingQueue); - assertEquals("wrong capacity", 100, executor.getQueue().remainingCapacity()); - } - - /** - * test for dispatcher via ref - */ - @Test - public void testDispatcherRef() { - MyPojo pojo = (MyPojo) context.getBean("typed-actor-with-dispatcher-ref"); - assertNotNull(pojo); - } - - /** - * test for executor-event-driven-dispatcher with bounded-linked-blocking-queue with unbounded capacity - */ - @Test - public void testDispatcherWithBoundedLinkedBlockingQueueWithUnboundedCapacity() { - MessageDispatcher dispatcher = (MessageDispatcher) context.getBean("executor-event-driven-dispatcher-2"); - ThreadPoolExecutor executor = getThreadPoolExecutorAndAssert(dispatcher); - assertTrue("wrong queue type", executor.getQueue() instanceof LinkedBlockingQueue); - assertEquals("wrong capacity", Integer.MAX_VALUE, executor.getQueue().remainingCapacity()); - } - - /** - * test for executor-event-driven-dispatcher with unbounded-linked-blocking-queue with bounded capacity - */ - @Test - public void testDispatcherWithLinkedBlockingQueueWithBoundedCapacity() { - MessageDispatcher dispatcher = (MessageDispatcher) context.getBean("executor-event-driven-dispatcher-4"); - ThreadPoolExecutor executor = getThreadPoolExecutorAndAssert(dispatcher); - assertTrue("wrong queue type", executor.getQueue() instanceof LinkedBlockingQueue); - assertEquals("wrong capacity", 55, executor.getQueue().remainingCapacity()); - } - - /** - * test for executor-event-driven-dispatcher with unbounded-linked-blocking-queue with unbounded capacity - */ - @Test - public void testDispatcherWithLinkedBlockingQueueWithUnboundedCapacity() { - MessageDispatcher dispatcher = (MessageDispatcher) context.getBean("executor-event-driven-dispatcher-5"); - ThreadPoolExecutor executor = getThreadPoolExecutorAndAssert(dispatcher); - assertTrue("wrong queue type", executor.getQueue() instanceof LinkedBlockingQueue); - assertEquals("wrong capacity", Integer.MAX_VALUE, executor.getQueue().remainingCapacity()); - } - - /** - * test for executor-event-driven-dispatcher with synchronous-queue - */ - @Test - public void testDispatcherWithSynchronousQueue() { - MessageDispatcher dispatcher = (MessageDispatcher) context.getBean("executor-event-driven-dispatcher-6"); - ThreadPoolExecutor executor = getThreadPoolExecutorAndAssert(dispatcher); - assertTrue("wrong queue type", executor.getQueue() instanceof SynchronousQueue); - } - - /** - * test for reactor-based-thread-pool-event-driven-dispatcher with synchronous-queue - */ - @Test - public void testReactorBasedThreadPoolDispatcherWithSynchronousQueue() { - MessageDispatcher dispatcher = (MessageDispatcher) context.getBean("reactor-based-thread-pool-event-driven-dispatcher"); - assertTrue(dispatcher instanceof ReactorBasedThreadPoolEventDrivenDispatcher); - assertTrue(dispatcher instanceof ThreadPoolBuilder); - ThreadPoolBuilder pool = (ThreadPoolBuilder) dispatcher; - ThreadPoolExecutor executor = pool.se$scalablesolutions$akka$dispatch$ThreadPoolBuilder$$threadPoolBuilder(); - assertNotNull(executor); - assertTrue("wrong queue type", executor.getQueue() instanceof SynchronousQueue); - } - - /** - * test for reactor-based-single-thread-event-driven-dispatcher with synchronous-queue - */ - @Test - public void testReactorBasedSingleThreadDispatcherWithSynchronousQueue() { - MessageDispatcher dispatcher = (MessageDispatcher) context.getBean("reactor-based-single-thread-event-driven-dispatcher"); - assertTrue(dispatcher instanceof ReactorBasedSingleThreadEventDrivenDispatcher); - } - - /** - * Assert that dispatcher is correct type and get executor. - */ - private ThreadPoolExecutor getThreadPoolExecutorAndAssert(MessageDispatcher dispatcher) { - assertTrue(dispatcher instanceof ExecutorBasedEventDrivenDispatcher); - assertTrue(dispatcher instanceof ThreadPoolBuilder); - ThreadPoolBuilder pool = (ThreadPoolBuilder) dispatcher; - ThreadPoolExecutor executor = pool.se$scalablesolutions$akka$dispatch$ThreadPoolBuilder$$threadPoolBuilder(); - assertNotNull(executor); - return executor; - } - -} diff --git a/akka-spring/akka-spring-test-java/src/test/java/se/scalablesolutions/akka/spring/SupervisorConfigurationTest.java b/akka-spring/akka-spring-test-java/src/test/java/se/scalablesolutions/akka/spring/SupervisorConfigurationTest.java deleted file mode 100644 index c90fd56b72..0000000000 --- a/akka-spring/akka-spring-test-java/src/test/java/se/scalablesolutions/akka/spring/SupervisorConfigurationTest.java +++ /dev/null @@ -1,135 +0,0 @@ -/** - * Copyright (C) 2009-2010 Scalable Solutions AB - */ -package se.scalablesolutions.akka.spring; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertTrue; -import net.lag.configgy.Config; - -import org.junit.Before; -import org.junit.Test; -import org.springframework.context.ApplicationContext; -import org.springframework.context.support.ClassPathXmlApplicationContext; - -import se.scalablesolutions.akka.actor.TypedActor; -import se.scalablesolutions.akka.config.TypedActorConfigurator; -import se.scalablesolutions.akka.config.JavaConfig.AllForOne; -import se.scalablesolutions.akka.config.JavaConfig.Component; -import se.scalablesolutions.akka.config.JavaConfig.LifeCycle; -import se.scalablesolutions.akka.config.JavaConfig.Permanent; -import se.scalablesolutions.akka.config.JavaConfig.RemoteAddress; -import se.scalablesolutions.akka.config.JavaConfig.RestartStrategy; -import se.scalablesolutions.akka.remote.RemoteNode; -import se.scalablesolutions.akka.spring.foo.Foo; -import se.scalablesolutions.akka.spring.foo.IBar; -import se.scalablesolutions.akka.spring.foo.MyPojo; -import se.scalablesolutions.akka.spring.foo.StatefulPojo; - -/** - * Testclass for supervisor configuration. - * - * @author michaelkober - * - */ -public class SupervisorConfigurationTest { - - private ApplicationContext context = null; - - @Before - public void setUp() { - context = new ClassPathXmlApplicationContext( - "se/scalablesolutions/akka/spring/foo/supervisor-config.xml"); - } - - @Test - public void testSupervision() { - // get TypedActorConfigurator bean from spring context - TypedActorConfigurator myConfigurator = (TypedActorConfigurator) context - .getBean("supervision1"); - // get TypedActors - Foo foo = myConfigurator.getInstance(Foo.class); - assertNotNull(foo); - IBar bar = myConfigurator.getInstance(IBar.class); - assertNotNull(bar); - MyPojo pojo = myConfigurator.getInstance(MyPojo.class); - assertNotNull(pojo); - } - - @Test - public void testTransactionalState() { - TypedActorConfigurator conf = (TypedActorConfigurator) context - .getBean("supervision2"); - StatefulPojo stateful = conf.getInstance(StatefulPojo.class); - stateful.setMapState("testTransactionalState", "some map state"); - stateful.setVectorState("some vector state"); - stateful.setRefState("some ref state"); - assertEquals("some map state", stateful - .getMapState("testTransactionalState")); - assertEquals("some vector state", stateful.getVectorState()); - assertEquals("some ref state", stateful.getRefState()); - } - - @Test - public void testInitTransactionalState() { - StatefulPojo stateful = TypedActor.newInstance(StatefulPojo.class, - 1000, true); - assertTrue("should be inititalized", stateful.isInitialized()); - } - - @Test - public void testSupervisionWithDispatcher() { - TypedActorConfigurator myConfigurator = (TypedActorConfigurator) context - .getBean("supervision-with-dispatcher"); - // get TypedActors - Foo foo = myConfigurator.getInstance(Foo.class); - assertNotNull(foo); - // TODO how to check dispatcher? - } - - @Test - public void testRemoteTypedActor() { - new Thread(new Runnable() { - public void run() { - RemoteNode.start(); - } - }).start(); - try { - Thread.currentThread().sleep(1000); - } catch (Exception e) { - } - Foo instance = TypedActor.newRemoteInstance(Foo.class, 2000, "localhost", 9999); - System.out.println(instance.foo()); - } - - - @Test - public void testSupervisedRemoteTypedActor() { - new Thread(new Runnable() { - public void run() { - RemoteNode.start(); - } - }).start(); - try { - Thread.currentThread().sleep(1000); - } catch (Exception e) { - } - - TypedActorConfigurator conf = new TypedActorConfigurator(); - conf.configure( - new RestartStrategy(new AllForOne(), 3, 10000, new Class[] { Exception.class }), - new Component[] { - new Component( - Foo.class, - new LifeCycle(new Permanent()), - 10000, - new RemoteAddress("localhost", 9999)) - }).supervise(); - - Foo instance = conf.getInstance(Foo.class); - assertEquals("foo", instance.foo()); - } - - -} diff --git a/akka-spring/akka-spring-test-java/src/test/java/se/scalablesolutions/akka/spring/TypedActorConfigurationTest.java b/akka-spring/akka-spring-test-java/src/test/java/se/scalablesolutions/akka/spring/TypedActorConfigurationTest.java deleted file mode 100644 index e8931fd1a2..0000000000 --- a/akka-spring/akka-spring-test-java/src/test/java/se/scalablesolutions/akka/spring/TypedActorConfigurationTest.java +++ /dev/null @@ -1,99 +0,0 @@ -/** - * Copyright (C) 2009-2010 Scalable Solutions AB - */ -package se.scalablesolutions.akka.spring; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; - -import org.junit.Before; -import org.junit.Test; -import org.springframework.beans.factory.support.DefaultListableBeanFactory; -import org.springframework.beans.factory.xml.XmlBeanDefinitionReader; -import org.springframework.context.ApplicationContext; -import org.springframework.context.support.ClassPathXmlApplicationContext; -import org.springframework.core.io.ClassPathResource; -import org.springframework.core.io.Resource; - -import se.scalablesolutions.akka.config.Config; -import se.scalablesolutions.akka.dispatch.FutureTimeoutException; -import se.scalablesolutions.akka.remote.RemoteNode; -import se.scalablesolutions.akka.spring.foo.MyPojo; - -/** - * Tests for spring configuration of typed actors and supervisor configuration. - * @author michaelkober - */ -public class TypedActorConfigurationTest { - - private ApplicationContext context = null; - - @Before - public void setUp() { - context = new ClassPathXmlApplicationContext("se/scalablesolutions/akka/spring/foo/test-config.xml"); - } - - /** - * Tests that the <akka:typed-actor/> and <akka:supervision/> and <akka:dispatcher/> element - * can be used as a top level element. - */ - @Test - public void testParse() throws Exception { - final Resource CONTEXT = new ClassPathResource("se/scalablesolutions/akka/spring/foo/test-config.xml"); - DefaultListableBeanFactory beanFactory = new DefaultListableBeanFactory(); - XmlBeanDefinitionReader reader = new XmlBeanDefinitionReader(beanFactory); - reader.loadBeanDefinitions(CONTEXT); - assertTrue(beanFactory.containsBeanDefinition("simple-typed-actor")); - assertTrue(beanFactory.containsBeanDefinition("remote-typed-actor")); - assertTrue(beanFactory.containsBeanDefinition("supervision1")); - assertTrue(beanFactory.containsBeanDefinition("dispatcher1")); - } - - @Test - public void testSimpleTypedActor() { - MyPojo myPojo = (MyPojo) context.getBean("simple-typed-actor"); - String msg = myPojo.getFoo(); - msg += myPojo.getBar(); - assertEquals("wrong invocation order", "foobar", msg); - } - - @Test(expected = FutureTimeoutException.class) - public void testSimpleTypedActor_Timeout() { - MyPojo myPojo = (MyPojo) context.getBean("simple-typed-actor"); - myPojo.longRunning(); - } - - @Test - public void testSimpleTypedActor_NoTimeout() { - MyPojo myPojo = (MyPojo) context.getBean("simple-typed-actor-long-timeout"); - String msg = myPojo.longRunning(); - assertEquals("this took long", msg); - } - - @Test - public void testTransactionalTypedActor() { - MyPojo myPojo = (MyPojo) context.getBean("transactional-typed-actor"); - String msg = myPojo.getFoo(); - msg += myPojo.getBar(); - assertEquals("wrong invocation order", "foobar", msg); - } - - @Test - public void testRemoteTypedActor() { - new Thread(new Runnable() { - public void run() { - RemoteNode.start(); - } - }).start(); - try { - Thread.currentThread().sleep(1000); - } catch (Exception e) { - } - Config.config(); - - MyPojo myPojo = (MyPojo) context.getBean("remote-typed-actor"); - assertEquals("foo", myPojo.getFoo()); - } - - -} diff --git a/akka-spring/src/main/resources/se/scalablesolutions/akka/spring/akka-0.10.xsd b/akka-spring/src/main/resources/se/scalablesolutions/akka/spring/akka-0.10.xsd index 6dd0ee7681..2a42ec0900 100644 --- a/akka-spring/src/main/resources/se/scalablesolutions/akka/spring/akka-0.10.xsd +++ b/akka-spring/src/main/resources/se/scalablesolutions/akka/spring/akka-0.10.xsd @@ -38,9 +38,11 @@ + + @@ -74,6 +76,7 @@ + @@ -158,6 +161,52 @@ + + + + + + + + + + + + + Name of the implementation class. + + + + + + + The default timeout for '!!' invocations. + + + + + + + Set this to true if messages should have REQUIRES_NEW semantics. + + + + + + + Defines the lifecycle, can be either 'permanent' or 'temporary'. + + + + + + + Supported scopes are 'singleton' and 'prototype'. + + + + + @@ -172,6 +221,13 @@ + + + + + + + @@ -205,6 +261,7 @@ + @@ -224,6 +281,9 @@ + + + diff --git a/akka-spring/src/main/scala/TypedActorFactoryBean.scala b/akka-spring/src/main/scala/ActorFactoryBean.scala similarity index 64% rename from akka-spring/src/main/scala/TypedActorFactoryBean.scala rename to akka-spring/src/main/scala/ActorFactoryBean.scala index 0cb70e5ae3..4bb3e88241 100644 --- a/akka-spring/src/main/scala/TypedActorFactoryBean.scala +++ b/akka-spring/src/main/scala/ActorFactoryBean.scala @@ -20,7 +20,7 @@ import org.springframework.context.{ApplicationContext,ApplicationContextAware} import org.springframework.util.ReflectionUtils import org.springframework.util.StringUtils -import se.scalablesolutions.akka.actor.{AspectInitRegistry, TypedActorConfiguration, TypedActor} +import se.scalablesolutions.akka.actor.{ActorRef, AspectInitRegistry, TypedActorConfiguration, TypedActor, UntypedActor, UntypedActorRef} import se.scalablesolutions.akka.dispatch.MessageDispatcher import se.scalablesolutions.akka.util.{Logging, Duration} @@ -34,17 +34,18 @@ class AkkaBeansException(message: String, cause:Throwable) extends BeansExceptio } /** - * Factory bean for typed actors. + * Factory bean for typed and untyped actors. * * @author michaelkober * @author Johan Rask * @author Martin Krasser * @author Jonas Bonér */ -class TypedActorFactoryBean extends AbstractFactoryBean[AnyRef] with Logging with ApplicationContextAware { +class ActorFactoryBean extends AbstractFactoryBean[AnyRef] with Logging with ApplicationContextAware { import StringReflect._ import AkkaSpringConfigurationTags._ + @BeanProperty var typed: String = "" @BeanProperty var interface: String = "" @BeanProperty var implementation: String = "" @BeanProperty var timeout: Long = _ @@ -78,19 +79,61 @@ class TypedActorFactoryBean extends AbstractFactoryBean[AnyRef] with Logging wit * @see org.springframework.beans.factory.config.AbstractFactoryBean#createInstance() */ def createInstance: AnyRef = { - var argumentList = "" - if (isRemote) argumentList += "r" - if (hasInterface) argumentList += "i" - if (hasDispatcher) argumentList += "d" - val ref = create(argumentList) - setProperties(AspectInitRegistry.initFor(ref).targetInstance) + val ref = typed match { + case TYPED_ACTOR_TAG => val typedActor = createTypedInstance() + setProperties(AspectInitRegistry.initFor(typedActor).targetInstance) + typedActor + case UNTYPED_ACTOR_TAG => createUntypedInstance() + case _ => throw new IllegalArgumentException("Unknown actor type") + } ref } + private[akka] def createTypedInstance() : AnyRef = { + if (interface == null || interface == "") throw new AkkaBeansException( + "The 'interface' part of the 'akka:actor' element in the Spring config file can't be null or empty string") + if (implementation == null || implementation == "") throw new AkkaBeansException( + "The 'implementation' part of the 'akka:typed-actor' element in the Spring config file can't be null or empty string") + + TypedActor.newInstance(interface.toClass, implementation.toClass, createConfig) + } + + /** + * Create an UntypedActor. + */ + private[akka] def createUntypedInstance() : UntypedActorRef = { + if (implementation == null || implementation == "") throw new AkkaBeansException( + "The 'implementation' part of the 'akka:untyped-actor' element in the Spring config file can't be null or empty string") + val untypedActorRef = UntypedActor.actorOf(implementation.toClass) + if (timeout > 0) { + untypedActorRef.setTimeout(timeout) + } + if (transactional) { + untypedActorRef.makeTransactionRequired + } + if (isRemote) { + untypedActorRef.makeRemote(host, port) + } + if (hasDispatcher) { + if (dispatcher.dispatcherType != THREAD_BASED){ + untypedActorRef.setDispatcher(dispatcherInstance()) + } else { + val actorRef = untypedActorRef.actorRef + untypedActorRef.setDispatcher(dispatcherInstance(Some(actorRef))) + } + } + untypedActorRef + } + /** * Stop the typed actor if it is a singleton. */ - override def destroyInstance(instance: AnyRef) = TypedActor.stop(instance) + override def destroyInstance(instance: AnyRef) { + typed match { + case TYPED_ACTOR_TAG => TypedActor.stop(instance) + case UNTYPED_ACTOR_TAG => instance.asInstanceOf[UntypedActorRef].stop + } + } private def setProperties(ref: AnyRef): AnyRef = { if (hasSetDependecies) return ref @@ -114,40 +157,39 @@ class TypedActorFactoryBean extends AbstractFactoryBean[AnyRef] with Logging wit ref } - private[akka] def create(argList: String): AnyRef = { - if (interface == null || interface == "") throw new AkkaBeansException( - "The 'interface' part of the 'akka:actor' element in the Spring config file can't be null or empty string") - if (implementation == null || implementation == "") throw new AkkaBeansException( - "The 'implementation' part of the 'akka:typed-actor' element in the Spring config file can't be null or empty string") - argList match { - case "ri" => TypedActor.newInstance(interface.toClass, implementation.toClass, createConfig.makeRemote(host, port)) - case "i" => TypedActor.newInstance(interface.toClass, implementation.toClass, createConfig) - case "id" => TypedActor.newInstance(interface.toClass, implementation.toClass, createConfig.dispatcher(dispatcherInstance)) - case "rid" => TypedActor.newInstance(interface.toClass, implementation.toClass, createConfig.makeRemote(host, port).dispatcher(dispatcherInstance)) - case _ => TypedActor.newInstance(interface.toClass, implementation.toClass, createConfig) - // case "rd" => TypedActor.newInstance(implementation.toClass, createConfig.makeRemote(host, port).dispatcher(dispatcherInstance)) - // case "r" => TypedActor.newInstance(implementation.toClass, createConfig.makeRemote(host, port)) - // case "d" => TypedActor.newInstance(implementation.toClass, createConfig.dispatcher(dispatcherInstance)) - } - } private[akka] def createConfig: TypedActorConfiguration = { val config = new TypedActorConfiguration().timeout(Duration(timeout, "millis")) if (transactional) config.makeTransactionRequired + if (isRemote) config.makeRemote(host, port) + if (hasDispatcher) { + if (dispatcher.dispatcherType != THREAD_BASED) { + config.dispatcher(dispatcherInstance()) + } else { + config.threadBasedDispatcher() + } + } config } private[akka] def isRemote = (host != null) && (!host.isEmpty) - private[akka] def hasInterface = (interface != null) && (!interface.isEmpty) - private[akka] def hasDispatcher = (dispatcher != null) && (dispatcher.dispatcherType != null) && (!dispatcher.dispatcherType.isEmpty) - private[akka] def dispatcherInstance: MessageDispatcher = { + /** + * Create dispatcher instance with dispatcher properties. + * @param actorRef actorRef for thread based dispatcher + * @return new dispatcher instance + */ + private[akka] def dispatcherInstance(actorRef: Option[ActorRef] = None) : MessageDispatcher = { import DispatcherFactoryBean._ - createNewInstance(dispatcher) + if (dispatcher.dispatcherType != THREAD_BASED) { + createNewInstance(dispatcher) + } else { + createNewInstance(dispatcher, actorRef) + } } } diff --git a/akka-spring/src/main/scala/TypedActorParser.scala b/akka-spring/src/main/scala/ActorParser.scala similarity index 88% rename from akka-spring/src/main/scala/TypedActorParser.scala rename to akka-spring/src/main/scala/ActorParser.scala index 5f4d68f297..69073bd52f 100644 --- a/akka-spring/src/main/scala/TypedActorParser.scala +++ b/akka-spring/src/main/scala/ActorParser.scala @@ -15,7 +15,7 @@ import se.scalablesolutions.akka.actor.IllegalActorStateException * @author Johan Rask * @author Martin Krasser */ -trait TypedActorParser extends BeanParser with DispatcherParser { +trait ActorParser extends BeanParser with DispatcherParser { import AkkaSpringConfigurationTags._ /** @@ -23,8 +23,8 @@ trait TypedActorParser extends BeanParser with DispatcherParser { * @param element dom element to parse * @return configuration for the typed actor */ - def parseTypedActor(element: Element): TypedActorProperties = { - val objectProperties = new TypedActorProperties() + def parseActor(element: Element): ActorProperties = { + val objectProperties = new ActorProperties() val remoteElement = DomUtils.getChildElementByTagName(element, REMOTE_TAG); val dispatcherElement = DomUtils.getChildElementByTagName(element, DISPATCHER_TAG) val propertyEntries = DomUtils.getChildElementsByTagName(element,PROPERTYENTRY_TAG) @@ -48,7 +48,8 @@ trait TypedActorParser extends BeanParser with DispatcherParser { } try { - objectProperties.timeout = mandatory(element, TIMEOUT).toLong + val timeout = element.getAttribute(TIMEOUT) + objectProperties.timeout = if ((timeout != null) && (!timeout.isEmpty)) timeout.toLong else -1L } catch { case nfe: NumberFormatException => log.error(nfe, "could not parse timeout %s", element.getAttribute(TIMEOUT)) diff --git a/akka-spring/src/main/scala/TypedActorProperties.scala b/akka-spring/src/main/scala/ActorProperties.scala similarity index 93% rename from akka-spring/src/main/scala/TypedActorProperties.scala rename to akka-spring/src/main/scala/ActorProperties.scala index 46c9cd35aa..15c7e61fe0 100644 --- a/akka-spring/src/main/scala/TypedActorProperties.scala +++ b/akka-spring/src/main/scala/ActorProperties.scala @@ -12,7 +12,8 @@ import AkkaSpringConfigurationTags._ * @author michaelkober * @author Martin Krasser */ -class TypedActorProperties { +class ActorProperties { + var typed: String = "" var target: String = "" var timeout: Long = _ var interface: String = "" @@ -30,6 +31,7 @@ class TypedActorProperties { * @param builder bean definition builder */ def setAsProperties(builder: BeanDefinitionBuilder) { + builder.addPropertyValue("typed", typed) builder.addPropertyValue(HOST, host) builder.addPropertyValue(PORT, port) builder.addPropertyValue(TIMEOUT, timeout) diff --git a/akka-spring/src/main/scala/AkkaNamespaceHandler.scala b/akka-spring/src/main/scala/AkkaNamespaceHandler.scala index 694daa90d4..a478b7b262 100644 --- a/akka-spring/src/main/scala/AkkaNamespaceHandler.scala +++ b/akka-spring/src/main/scala/AkkaNamespaceHandler.scala @@ -13,6 +13,7 @@ import AkkaSpringConfigurationTags._ class AkkaNamespaceHandler extends NamespaceHandlerSupport { def init = { registerBeanDefinitionParser(TYPED_ACTOR_TAG, new TypedActorBeanDefinitionParser()); + registerBeanDefinitionParser(UNTYPED_ACTOR_TAG, new UntypedActorBeanDefinitionParser()); registerBeanDefinitionParser(SUPERVISION_TAG, new SupervisionBeanDefinitionParser()); registerBeanDefinitionParser(DISPATCHER_TAG, new DispatcherBeanDefinitionParser()); registerBeanDefinitionParser(CAMEL_SERVICE_TAG, new CamelServiceBeanDefinitionParser); diff --git a/akka-spring/src/main/scala/AkkaSpringConfigurationTags.scala b/akka-spring/src/main/scala/AkkaSpringConfigurationTags.scala index 857d20fa55..518727bd4c 100644 --- a/akka-spring/src/main/scala/AkkaSpringConfigurationTags.scala +++ b/akka-spring/src/main/scala/AkkaSpringConfigurationTags.scala @@ -14,16 +14,18 @@ object AkkaSpringConfigurationTags { // // top level tags val TYPED_ACTOR_TAG = "typed-actor" + val UNTYPED_ACTOR_TAG = "untyped-actor" val SUPERVISION_TAG = "supervision" val DISPATCHER_TAG = "dispatcher" val PROPERTYENTRY_TAG = "property" val CAMEL_SERVICE_TAG = "camel-service" - // typed-actor sub tags + // actor sub tags val REMOTE_TAG = "remote" // superivision sub tags val TYPED_ACTORS_TAG = "typed-actors" + val UNTYPED_ACTORS_TAG = "untyped-actors" val STRATEGY_TAG = "restart-strategy" val TRAP_EXISTS_TAG = "trap-exits" val TRAP_EXIT_TAG = "trap-exit" @@ -36,7 +38,7 @@ object AkkaSpringConfigurationTags { // --- ATTRIBUTES // - // typed actor attributes + // actor attributes val TIMEOUT = "timeout" val IMPLEMENTATION = "implementation" val INTERFACE = "interface" @@ -55,6 +57,7 @@ object AkkaSpringConfigurationTags { val NAME = "name" val REF = "ref" val TYPE = "type" + val AGGREGATE = "aggregate" // HawtDispatcher // thread pool attributes val QUEUE = "queue" @@ -93,8 +96,10 @@ object AkkaSpringConfigurationTags { // dispatcher types val EXECUTOR_BASED_EVENT_DRIVEN = "executor-based-event-driven" + val EXECUTOR_BASED_EVENT_DRIVEN_WORK_STEALING = "executor-based-event-driven-work-stealing" val REACTOR_BASED_THREAD_POOL_EVENT_DRIVEN = "reactor-based-thread-pool-event-driven" val REACTOR_BASED_SINGLE_THREAD_EVENT_DRIVEN = "reactor-based-single-thread-event-driven" val THREAD_BASED = "thread-based" + val HAWT = "hawt" } diff --git a/akka-spring/src/main/scala/DispatcherBeanDefinitionParser.scala b/akka-spring/src/main/scala/DispatcherBeanDefinitionParser.scala index 9d4a16ff9b..90c56b0b5b 100644 --- a/akka-spring/src/main/scala/DispatcherBeanDefinitionParser.scala +++ b/akka-spring/src/main/scala/DispatcherBeanDefinitionParser.scala @@ -12,7 +12,7 @@ import org.springframework.beans.factory.xml.{ParserContext, AbstractSingleBeanD * Parser for custom namespace configuration. * @author michaelkober */ -class DispatcherBeanDefinitionParser extends AbstractSingleBeanDefinitionParser with TypedActorParser with DispatcherParser { +class DispatcherBeanDefinitionParser extends AbstractSingleBeanDefinitionParser with ActorParser with DispatcherParser { /* * @see org.springframework.beans.factory.xml.AbstractSingleBeanDefinitionParser#doParse(org.w3c.dom.Element, org.springframework.beans.factory.xml.ParserContext, org.springframework.beans.factory.support.BeanDefinitionBuilder) */ diff --git a/akka-spring/src/main/scala/DispatcherFactoryBean.scala b/akka-spring/src/main/scala/DispatcherFactoryBean.scala index ac4172a696..06c9994c7f 100644 --- a/akka-spring/src/main/scala/DispatcherFactoryBean.scala +++ b/akka-spring/src/main/scala/DispatcherFactoryBean.scala @@ -7,6 +7,7 @@ import org.springframework.beans.factory.config.AbstractFactoryBean import se.scalablesolutions.akka.config.JavaConfig._ import AkkaSpringConfigurationTags._ import reflect.BeanProperty +import se.scalablesolutions.akka.actor.ActorRef import se.scalablesolutions.akka.dispatch.{ThreadPoolBuilder, Dispatchers, MessageDispatcher} import java.util.concurrent.RejectedExecutionHandler import java.util.concurrent.ThreadPoolExecutor.{DiscardPolicy, DiscardOldestPolicy, CallerRunsPolicy, AbortPolicy} @@ -15,17 +16,30 @@ import java.util.concurrent.ThreadPoolExecutor.{DiscardPolicy, DiscardOldestPoli * Reusable factory method for dispatchers. */ object DispatcherFactoryBean { - def createNewInstance(properties: DispatcherProperties): MessageDispatcher = { + + /** + * factory method for dispatchers + * @param properties dispatcher properties + * @param actorRef actorRef needed for thread based dispatcher + */ + def createNewInstance(properties: DispatcherProperties, actorRef: Option[ActorRef] = None): MessageDispatcher = { var dispatcher = properties.dispatcherType match { case EXECUTOR_BASED_EVENT_DRIVEN => Dispatchers.newExecutorBasedEventDrivenDispatcher(properties.name) + case EXECUTOR_BASED_EVENT_DRIVEN_WORK_STEALING => Dispatchers.newExecutorBasedEventDrivenWorkStealingDispatcher(properties.name) case REACTOR_BASED_THREAD_POOL_EVENT_DRIVEN => Dispatchers.newReactorBasedThreadPoolEventDrivenDispatcher(properties.name) case REACTOR_BASED_SINGLE_THREAD_EVENT_DRIVEN => Dispatchers.newReactorBasedSingleThreadEventDrivenDispatcher(properties.name) - case THREAD_BASED => throw new IllegalArgumentException("not implemented yet") //FIXME + case THREAD_BASED => if (!actorRef.isDefined) { + throw new IllegalArgumentException("Need an ActorRef to create a thread based dispatcher.") + } else { + Dispatchers.newThreadBasedDispatcher(actorRef.get) + } + case HAWT => Dispatchers.newHawtDispatcher(properties.aggregate) case _ => throw new IllegalArgumentException("unknown dispatcher type") } - if ((properties.threadPool != null) && (properties.threadPool.queue != null)) { - var threadPoolBuilder = dispatcher.asInstanceOf[ThreadPoolBuilder] - threadPoolBuilder = properties.threadPool.queue match { + // build threadpool + if ((properties.threadPool != null) && (properties.threadPool.queue != null)) { + var threadPoolBuilder = dispatcher.asInstanceOf[ThreadPoolBuilder] + threadPoolBuilder = properties.threadPool.queue match { case VAL_BOUNDED_ARRAY_BLOCKING_QUEUE => threadPoolBuilder.withNewThreadPoolWithArrayBlockingQueueWithCapacityAndFairness(properties.threadPool.capacity, properties.threadPool.fairness) case VAL_UNBOUNDED_LINKED_BLOCKING_QUEUE => if (properties.threadPool.capacity > -1) threadPoolBuilder.withNewThreadPoolWithLinkedBlockingQueueWithCapacity(properties.threadPool.capacity) diff --git a/akka-spring/src/main/scala/DispatcherParser.scala b/akka-spring/src/main/scala/DispatcherParser.scala index fb9855102e..c4257230f7 100644 --- a/akka-spring/src/main/scala/DispatcherParser.scala +++ b/akka-spring/src/main/scala/DispatcherParser.scala @@ -28,13 +28,24 @@ trait DispatcherParser extends BeanParser { throw new IllegalArgumentException("Referenced dispatcher not found: '" + ref + "'") } } - properties.name = mandatory(dispatcherElement, NAME) + properties.dispatcherType = mandatory(dispatcherElement, TYPE) if (properties.dispatcherType == THREAD_BASED) { - if (dispatcherElement.getParentNode.getNodeName != "typed-actor") { - throw new IllegalArgumentException("Thread based dispatcher must be nested in typed-actor element!") + val allowedParentNodes = "akka:typed-actor" :: "akka:untyped-actor" :: "typed-actor" :: "untyped-actor" :: Nil + if (!allowedParentNodes.contains(dispatcherElement.getParentNode.getNodeName)) { + throw new IllegalArgumentException("Thread based dispatcher must be nested in 'typed-actor' or 'untyped-actor' element!") } } + + if (properties.dispatcherType == HAWT) { // no name for HawtDispatcher + properties.name = dispatcherElement.getAttribute(NAME) + if (dispatcherElement.hasAttribute(AGGREGATE)) { + properties.aggregate = dispatcherElement.getAttribute(AGGREGATE).toBoolean + } + } else { + properties.name = mandatory(dispatcherElement, NAME) + } + val threadPoolElement = DomUtils.getChildElementByTagName(dispatcherElement, THREAD_POOL_TAG); if (threadPoolElement != null) { if (properties.dispatcherType == REACTOR_BASED_SINGLE_THREAD_EVENT_DRIVEN || @@ -45,7 +56,7 @@ trait DispatcherParser extends BeanParser { properties.threadPool = threadPoolProperties } properties -} + } /** * Parses the given element and returns a ThreadPoolProperties. diff --git a/akka-spring/src/main/scala/DispatcherProperties.scala b/akka-spring/src/main/scala/DispatcherProperties.scala index e35bb62d27..183b3825bb 100644 --- a/akka-spring/src/main/scala/DispatcherProperties.scala +++ b/akka-spring/src/main/scala/DispatcherProperties.scala @@ -14,6 +14,7 @@ class DispatcherProperties { var dispatcherType: String = "" var name: String = "" var threadPool: ThreadPoolProperties = _ + var aggregate = true /** * Sets the properties to the given builder. diff --git a/akka-spring/src/main/scala/PropertyEntries.scala b/akka-spring/src/main/scala/PropertyEntries.scala index aa2843064c..bf1898a805 100644 --- a/akka-spring/src/main/scala/PropertyEntries.scala +++ b/akka-spring/src/main/scala/PropertyEntries.scala @@ -1,3 +1,6 @@ +/** + * Copyright (C) 2009-2010 Scalable Solutions AB + */ package se.scalablesolutions.akka.spring import org.springframework.beans.factory.support.BeanDefinitionBuilder @@ -5,14 +8,13 @@ import org.springframework.beans.factory.support.BeanDefinitionBuilder import scala.collection.mutable._ /** -* Simple container for Properties -* @author Johan Rask -*/ + * Simple container for Properties + * @author Johan Rask + */ class PropertyEntries { + var entryList: ListBuffer[PropertyEntry] = ListBuffer[PropertyEntry]() - var entryList:ListBuffer[PropertyEntry] = ListBuffer[PropertyEntry]() - - def add(entry:PropertyEntry) = { - entryList.append(entry) + def add(entry: PropertyEntry) = { + entryList.append(entry) } } diff --git a/akka-spring/src/main/scala/PropertyEntry.scala b/akka-spring/src/main/scala/PropertyEntry.scala index 4d1aaa1a44..9fe6357fc0 100644 --- a/akka-spring/src/main/scala/PropertyEntry.scala +++ b/akka-spring/src/main/scala/PropertyEntry.scala @@ -1,17 +1,19 @@ +/** + * Copyright (C) 2009-2010 Scalable Solutions AB + */ package se.scalablesolutions.akka.spring /** -* Represents a property element -* @author Johan Rask -*/ + * Represents a property element + * @author Johan Rask + */ class PropertyEntry { - - var name:String = _ - var value:String = null - var ref:String = null + var name: String = _ + var value: String = null + var ref: String = null - override def toString(): String = { - format("name = %s,value = %s, ref = %s", name,value,ref) - } + override def toString(): String = { + format("name = %s,value = %s, ref = %s", name, value, ref) + } } diff --git a/akka-spring/src/main/scala/SupervisionBeanDefinitionParser.scala b/akka-spring/src/main/scala/SupervisionBeanDefinitionParser.scala index 5d430c9450..cc88e39f91 100644 --- a/akka-spring/src/main/scala/SupervisionBeanDefinitionParser.scala +++ b/akka-spring/src/main/scala/SupervisionBeanDefinitionParser.scala @@ -18,7 +18,7 @@ import org.springframework.util.xml.DomUtils * Parser for custom namespace for Akka declarative supervisor configuration. * @author michaelkober */ -class SupervisionBeanDefinitionParser extends AbstractSingleBeanDefinitionParser with TypedActorParser { +class SupervisionBeanDefinitionParser extends AbstractSingleBeanDefinitionParser with ActorParser { /* (non-Javadoc) * @see org.springframework.beans.factory.xml.AbstractSingleBeanDefinitionParser#doParse(org.w3c.dom.Element, org.springframework.beans.factory.xml.ParserContext, org.springframework.beans.factory.support.BeanDefinitionBuilder) */ @@ -30,10 +30,20 @@ class SupervisionBeanDefinitionParser extends AbstractSingleBeanDefinitionParser * made accessible for testing */ private[akka] def parseSupervisor(element: Element, builder: BeanDefinitionBuilder) { - val strategyElement = mandatoryElement(element, STRATEGY_TAG); - val typedActorsElement = mandatoryElement(element, TYPED_ACTORS_TAG); + val strategyElement = mandatoryElement(element, STRATEGY_TAG) + val typedActorsElement = DomUtils.getChildElementByTagName(element, TYPED_ACTORS_TAG) + val untypedActorsElement = DomUtils.getChildElementByTagName(element, UNTYPED_ACTORS_TAG) + if ((typedActorsElement == null) && (untypedActorsElement == null)) { + throw new IllegalArgumentException("One of 'akka:typed-actors' or 'akka:untyped-actors' needed.") + } parseRestartStrategy(strategyElement, builder) - parseTypedActorList(typedActorsElement, builder) + if (typedActorsElement != null) { + builder.addPropertyValue("typed", AkkaSpringConfigurationTags.TYPED_ACTOR_TAG) + parseTypedActorList(typedActorsElement, builder) + } else { + builder.addPropertyValue("typed", AkkaSpringConfigurationTags.UNTYPED_ACTOR_TAG) + parseUntypedActorList(untypedActorsElement, builder) + } } private[akka] def parseRestartStrategy(element: Element, builder: BeanDefinitionBuilder) { @@ -48,8 +58,14 @@ class SupervisionBeanDefinitionParser extends AbstractSingleBeanDefinitionParser private[akka] def parseTypedActorList(element: Element, builder: BeanDefinitionBuilder) { val typedActors = DomUtils.getChildElementsByTagName(element, TYPED_ACTOR_TAG).toArray.toList.asInstanceOf[List[Element]] - val typedActorProperties = typedActors.map(parseTypedActor(_)) - builder.addPropertyValue("supervised", typedActorProperties) + val actorProperties = typedActors.map(parseActor(_)) + builder.addPropertyValue("supervised", actorProperties) + } + + private[akka] def parseUntypedActorList(element: Element, builder: BeanDefinitionBuilder) { + val untypedActors = DomUtils.getChildElementsByTagName(element, UNTYPED_ACTOR_TAG).toArray.toList.asInstanceOf[List[Element]] + val actorProperties = untypedActors.map(parseActor(_)) + builder.addPropertyValue("supervised", actorProperties) } private def parseTrapExits(element: Element): Array[Class[_ <: Throwable]] = { diff --git a/akka-spring/src/main/scala/SupervisionFactoryBean.scala b/akka-spring/src/main/scala/SupervisionFactoryBean.scala index 80a1f8a5fa..8ff62ba4af 100644 --- a/akka-spring/src/main/scala/SupervisionFactoryBean.scala +++ b/akka-spring/src/main/scala/SupervisionFactoryBean.scala @@ -6,6 +6,8 @@ package se.scalablesolutions.akka.spring import org.springframework.beans.factory.config.AbstractFactoryBean import se.scalablesolutions.akka.config.TypedActorConfigurator import se.scalablesolutions.akka.config.JavaConfig._ +import se.scalablesolutions.akka.config.ScalaConfig.{Supervise, Server, SupervisorConfig, RemoteAddress => SRemoteAddress} +import se.scalablesolutions.akka.actor.{Supervisor, SupervisorFactory, UntypedActor} import AkkaSpringConfigurationTags._ import reflect.BeanProperty @@ -14,31 +16,45 @@ import reflect.BeanProperty * Factory bean for supervisor configuration. * @author michaelkober */ -class SupervisionFactoryBean extends AbstractFactoryBean[TypedActorConfigurator] { +class SupervisionFactoryBean extends AbstractFactoryBean[AnyRef] { @BeanProperty var restartStrategy: RestartStrategy = _ - @BeanProperty var supervised: List[TypedActorProperties] = _ + @BeanProperty var supervised: List[ActorProperties] = _ + @BeanProperty var typed: String = "" /* * @see org.springframework.beans.factory.FactoryBean#getObjectType() */ - def getObjectType: Class[TypedActorConfigurator] = classOf[TypedActorConfigurator] + def getObjectType: Class[AnyRef] = classOf[AnyRef] /* * @see org.springframework.beans.factory.config.AbstractFactoryBean#createInstance() */ - def createInstance: TypedActorConfigurator = { - val configurator = new TypedActorConfigurator() + def createInstance: AnyRef = typed match { + case AkkaSpringConfigurationTags.TYPED_ACTOR_TAG => createInstanceForTypedActors + case AkkaSpringConfigurationTags.UNTYPED_ACTOR_TAG => createInstanceForUntypedActors + } + private def createInstanceForTypedActors() : TypedActorConfigurator = { + val configurator = new TypedActorConfigurator() configurator.configure( restartStrategy, supervised.map(createComponent(_)).toArray ).supervise + + } + + private def createInstanceForUntypedActors() : Supervisor = { + val factory = new SupervisorFactory( + new SupervisorConfig( + restartStrategy.transform, + supervised.map(createSupervise(_)))) + factory.newInstance } /** * Create configuration for TypedActor */ - private[akka] def createComponent(props: TypedActorProperties): Component = { + private[akka] def createComponent(props: ActorProperties): Component = { import StringReflect._ val lifeCycle = if (!props.lifecycle.isEmpty && props.lifecycle.equalsIgnoreCase(VAL_LIFECYCYLE_TEMPORARY)) new LifeCycle(new Temporary()) else new LifeCycle(new Permanent()) val isRemote = (props.host != null) && (!props.host.isEmpty) @@ -58,4 +74,28 @@ class SupervisionFactoryBean extends AbstractFactoryBean[TypedActorConfigurator] } } } + + /** + * Create configuration for UntypedActor + */ + private[akka] def createSupervise(props: ActorProperties): Server = { + import StringReflect._ + val lifeCycle = if (!props.lifecycle.isEmpty && props.lifecycle.equalsIgnoreCase(VAL_LIFECYCYLE_TEMPORARY)) new LifeCycle(new Temporary()) else new LifeCycle(new Permanent()) + val isRemote = (props.host != null) && (!props.host.isEmpty) + val untypedActorRef = UntypedActor.actorOf(props.target.toClass) + if (props.timeout > 0) { + untypedActorRef.setTimeout(props.timeout) + } + if (props.transactional) { + untypedActorRef.makeTransactionRequired + } + + val supervise = if (isRemote) { + val remote = new SRemoteAddress(props.host, props.port) + Supervise(untypedActorRef.actorRef, lifeCycle.transform, remote) + } else { + Supervise(untypedActorRef.actorRef, lifeCycle.transform) + } + supervise + } } diff --git a/akka-spring/src/main/scala/TypedActorBeanDefinitionParser.scala b/akka-spring/src/main/scala/TypedActorBeanDefinitionParser.scala index ec987aacc0..e8e0cef7d4 100644 --- a/akka-spring/src/main/scala/TypedActorBeanDefinitionParser.scala +++ b/akka-spring/src/main/scala/TypedActorBeanDefinitionParser.scala @@ -6,6 +6,7 @@ package se.scalablesolutions.akka.spring import org.springframework.beans.factory.support.BeanDefinitionBuilder import org.springframework.beans.factory.xml.AbstractSingleBeanDefinitionParser import org.springframework.beans.factory.xml.ParserContext +import AkkaSpringConfigurationTags._ import org.w3c.dom.Element @@ -13,17 +14,18 @@ import org.w3c.dom.Element * Parser for custom namespace configuration. * @author michaelkober */ -class TypedActorBeanDefinitionParser extends AbstractSingleBeanDefinitionParser with TypedActorParser { +class TypedActorBeanDefinitionParser extends AbstractSingleBeanDefinitionParser with ActorParser { /* * @see org.springframework.beans.factory.xml.AbstractSingleBeanDefinitionParser#doParse(org.w3c.dom.Element, org.springframework.beans.factory.xml.ParserContext, org.springframework.beans.factory.support.BeanDefinitionBuilder) */ override def doParse(element: Element, parserContext: ParserContext, builder: BeanDefinitionBuilder) { - val typedActorConf = parseTypedActor(element) + val typedActorConf = parseActor(element) + typedActorConf.typed = TYPED_ACTOR_TAG typedActorConf.setAsProperties(builder) } /* * @see org.springframework.beans.factory.xml.AbstractSingleBeanDefinitionParser#getBeanClass(org.w3c.dom.Element) */ - override def getBeanClass(element: Element): Class[_] = classOf[TypedActorFactoryBean] + override def getBeanClass(element: Element): Class[_] = classOf[ActorFactoryBean] } diff --git a/akka-spring/src/main/scala/UntypedActorBeanDefinitionParser.scala b/akka-spring/src/main/scala/UntypedActorBeanDefinitionParser.scala new file mode 100644 index 0000000000..752e18559f --- /dev/null +++ b/akka-spring/src/main/scala/UntypedActorBeanDefinitionParser.scala @@ -0,0 +1,31 @@ +/** + * Copyright (C) 2009-2010 Scalable Solutions AB + */ +package se.scalablesolutions.akka.spring + +import org.springframework.beans.factory.support.BeanDefinitionBuilder +import org.springframework.beans.factory.xml.AbstractSingleBeanDefinitionParser +import org.springframework.beans.factory.xml.ParserContext +import AkkaSpringConfigurationTags._ +import org.w3c.dom.Element + + +/** + * Parser for custom namespace configuration. + * @author michaelkober + */ +class UntypedActorBeanDefinitionParser extends AbstractSingleBeanDefinitionParser with ActorParser { + /* + * @see org.springframework.beans.factory.xml.AbstractSingleBeanDefinitionParser#doParse(org.w3c.dom.Element, org.springframework.beans.factory.xml.ParserContext, org.springframework.beans.factory.support.BeanDefinitionBuilder) + */ + override def doParse(element: Element, parserContext: ParserContext, builder: BeanDefinitionBuilder) { + val untypedActorConf = parseActor(element) + untypedActorConf.typed = UNTYPED_ACTOR_TAG + untypedActorConf.setAsProperties(builder) + } + + /* + * @see org.springframework.beans.factory.xml.AbstractSingleBeanDefinitionParser#getBeanClass(org.w3c.dom.Element) + */ + override def getBeanClass(element: Element): Class[_] = classOf[ActorFactoryBean] +} diff --git a/akka-spring/akka-spring-test-java/src/main/java/se/scalablesolutions/akka/spring/foo/Bar.java b/akka-spring/src/test/java/se/scalablesolutions/akka/spring/foo/Bar.java similarity index 76% rename from akka-spring/akka-spring-test-java/src/main/java/se/scalablesolutions/akka/spring/foo/Bar.java rename to akka-spring/src/test/java/se/scalablesolutions/akka/spring/foo/Bar.java index 1b9e67e09c..7a0f5c439d 100644 --- a/akka-spring/akka-spring-test-java/src/main/java/se/scalablesolutions/akka/spring/foo/Bar.java +++ b/akka-spring/src/test/java/se/scalablesolutions/akka/spring/foo/Bar.java @@ -1,8 +1,9 @@ package se.scalablesolutions.akka.spring.foo; import java.io.IOException; +import se.scalablesolutions.akka.actor.*; -public class Bar implements IBar { +public class Bar extends TypedActor implements IBar { @Override public String getBar() { diff --git a/akka-spring/akka-spring-test-java/src/main/java/se/scalablesolutions/akka/spring/foo/Foo.java b/akka-spring/src/test/java/se/scalablesolutions/akka/spring/foo/Foo.java similarity index 50% rename from akka-spring/akka-spring-test-java/src/main/java/se/scalablesolutions/akka/spring/foo/Foo.java rename to akka-spring/src/test/java/se/scalablesolutions/akka/spring/foo/Foo.java index 36536cdb5d..00e4b0df2e 100644 --- a/akka-spring/akka-spring-test-java/src/main/java/se/scalablesolutions/akka/spring/foo/Foo.java +++ b/akka-spring/src/test/java/se/scalablesolutions/akka/spring/foo/Foo.java @@ -1,6 +1,8 @@ package se.scalablesolutions.akka.spring.foo; -public class Foo { +import se.scalablesolutions.akka.actor.*; + +public class Foo extends TypedActor implements IFoo{ public String foo() { return "foo"; diff --git a/akka-spring/akka-spring-test-java/src/main/java/se/scalablesolutions/akka/spring/foo/IBar.java b/akka-spring/src/test/java/se/scalablesolutions/akka/spring/foo/IBar.java similarity index 100% rename from akka-spring/akka-spring-test-java/src/main/java/se/scalablesolutions/akka/spring/foo/IBar.java rename to akka-spring/src/test/java/se/scalablesolutions/akka/spring/foo/IBar.java diff --git a/akka-spring/src/test/java/se/scalablesolutions/akka/spring/foo/IFoo.java b/akka-spring/src/test/java/se/scalablesolutions/akka/spring/foo/IFoo.java new file mode 100644 index 0000000000..b7e6b622d5 --- /dev/null +++ b/akka-spring/src/test/java/se/scalablesolutions/akka/spring/foo/IFoo.java @@ -0,0 +1,12 @@ +package se.scalablesolutions.akka.spring.foo; + +/** + * Created by IntelliJ IDEA. + * User: michaelkober + * Date: Aug 11, 2010 + * Time: 12:49:58 PM + * To change this template use File | Settings | File Templates. + */ +public interface IFoo { + public String foo(); +} diff --git a/akka-spring/src/test/java/se/scalablesolutions/akka/spring/foo/IMyPojo.java b/akka-spring/src/test/java/se/scalablesolutions/akka/spring/foo/IMyPojo.java new file mode 100644 index 0000000000..f2c5e24884 --- /dev/null +++ b/akka-spring/src/test/java/se/scalablesolutions/akka/spring/foo/IMyPojo.java @@ -0,0 +1,21 @@ +package se.scalablesolutions.akka.spring.foo; + +/** + * Created by IntelliJ IDEA. + * User: michaelkober + * Date: Aug 11, 2010 + * Time: 12:01:00 PM + * To change this template use File | Settings | File Templates. + */ +public interface IMyPojo { + public String getFoo(); + + public String getBar(); + + public void preRestart(); + + public void postRestart(); + + public String longRunning(); + +} diff --git a/akka-spring/akka-spring-test-java/src/main/java/se/scalablesolutions/akka/spring/foo/MyPojo.java b/akka-spring/src/test/java/se/scalablesolutions/akka/spring/foo/MyPojo.java similarity index 88% rename from akka-spring/akka-spring-test-java/src/main/java/se/scalablesolutions/akka/spring/foo/MyPojo.java rename to akka-spring/src/test/java/se/scalablesolutions/akka/spring/foo/MyPojo.java index 1269f43f62..0ead5901cc 100644 --- a/akka-spring/akka-spring-test-java/src/main/java/se/scalablesolutions/akka/spring/foo/MyPojo.java +++ b/akka-spring/src/test/java/se/scalablesolutions/akka/spring/foo/MyPojo.java @@ -1,6 +1,8 @@ package se.scalablesolutions.akka.spring.foo; -public class MyPojo { +import se.scalablesolutions.akka.actor.*; + +public class MyPojo extends TypedActor implements IMyPojo{ private String foo; private String bar; diff --git a/akka-spring/src/test/java/se/scalablesolutions/akka/spring/foo/PingActor.java b/akka-spring/src/test/java/se/scalablesolutions/akka/spring/foo/PingActor.java new file mode 100644 index 0000000000..c624d63ecd --- /dev/null +++ b/akka-spring/src/test/java/se/scalablesolutions/akka/spring/foo/PingActor.java @@ -0,0 +1,33 @@ +package se.scalablesolutions.akka.spring.foo; + +import se.scalablesolutions.akka.actor.UntypedActor; +import se.scalablesolutions.akka.actor.UntypedActorRef; + +/** + * test class + */ +public class PingActor extends UntypedActor { + + private String longRunning() { + try { + Thread.sleep(6000); + } catch (InterruptedException e) { + } + return "this took long"; + } + + public void onReceive(Object message) throws Exception { + if (message instanceof String) { + System.out.println("Ping received String message: " + message); + if (message.equals("longRunning")) { + System.out.println("### starting pong"); + UntypedActorRef pongActor = UntypedActor.actorOf(PongActor.class).start(); + pongActor.sendRequestReply("longRunning", getContext()); + } + } else { + throw new IllegalArgumentException("Unknown message: " + message); + } + } + + +} diff --git a/akka-spring/src/test/java/se/scalablesolutions/akka/spring/foo/PongActor.java b/akka-spring/src/test/java/se/scalablesolutions/akka/spring/foo/PongActor.java new file mode 100644 index 0000000000..b67c0809fb --- /dev/null +++ b/akka-spring/src/test/java/se/scalablesolutions/akka/spring/foo/PongActor.java @@ -0,0 +1,18 @@ +package se.scalablesolutions.akka.spring.foo; + +import se.scalablesolutions.akka.actor.UntypedActor; + +/** + * test class + */ +public class PongActor extends UntypedActor { + + public void onReceive(Object message) throws Exception { + if (message instanceof String) { + System.out.println("Pongeceived String message: " + message); + getContext().replyUnsafe(message + " from " + getContext().getUuid()); + } else { + throw new IllegalArgumentException("Unknown message: " + message); + } + } +} diff --git a/akka-spring/akka-spring-test-java/src/main/java/se/scalablesolutions/akka/spring/foo/StatefulPojo.java b/akka-spring/src/test/java/se/scalablesolutions/akka/spring/foo/StatefulPojo.java similarity index 88% rename from akka-spring/akka-spring-test-java/src/main/java/se/scalablesolutions/akka/spring/foo/StatefulPojo.java rename to akka-spring/src/test/java/se/scalablesolutions/akka/spring/foo/StatefulPojo.java index 17332c696a..3b4e05453b 100644 --- a/akka-spring/akka-spring-test-java/src/main/java/se/scalablesolutions/akka/spring/foo/StatefulPojo.java +++ b/akka-spring/src/test/java/se/scalablesolutions/akka/spring/foo/StatefulPojo.java @@ -1,18 +1,19 @@ package se.scalablesolutions.akka.spring.foo; -import se.scalablesolutions.akka.actor.annotation.inittransactionalstate; + import se.scalablesolutions.akka.stm.TransactionalMap; import se.scalablesolutions.akka.stm.TransactionalVector; import se.scalablesolutions.akka.stm.Ref; +import se.scalablesolutions.akka.actor.*; -public class StatefulPojo { +public class StatefulPojo extends TypedActor { private TransactionalMap mapState; private TransactionalVector vectorState; private Ref refState; private boolean isInitialized = false; - @inittransactionalstate - public void init() { + @Override + public void initTransactionalState() { if (!isInitialized) { mapState = new TransactionalMap(); vectorState = new TransactionalVector(); @@ -21,7 +22,7 @@ public class StatefulPojo { } } - + /* public String getMapState(String key) { return (String)mapState.get(key).get(); } @@ -49,5 +50,6 @@ public class StatefulPojo { public boolean isInitialized() { return isInitialized; } + */ } diff --git a/akka-spring/akka-spring-test-java/src/main/resources/se/scalablesolutions/akka/spring/foo/dispatcher-config.xml b/akka-spring/src/test/resources/dispatcher-config.xml similarity index 56% rename from akka-spring/akka-spring-test-java/src/main/resources/se/scalablesolutions/akka/spring/foo/dispatcher-config.xml rename to akka-spring/src/test/resources/dispatcher-config.xml index 20879832d0..c02c5b4d14 100644 --- a/akka-spring/akka-spring-test-java/src/main/resources/se/scalablesolutions/akka/spring/foo/dispatcher-config.xml +++ b/akka-spring/src/test/resources/dispatcher-config.xml @@ -10,19 +10,23 @@ http://www.akkasource.org/schema/akka http://scalablesolutions.se/akka/akka-0.10.xsd"> - + - + - + - + - + - + - + @@ -62,6 +66,28 @@ http://scalablesolutions.se/akka/akka-0.10.xsd"> + + + + + + + + + + + + + + + + + @@ -70,11 +96,18 @@ http://scalablesolutions.se/akka/akka-0.10.xsd"> - + - - + implementation="se.scalablesolutions.akka.spring.foo.Bar" + lifecycle="permanent" + timeout="1000"/> + diff --git a/akka-spring/akka-spring-test-java/src/main/resources/se/scalablesolutions/akka/spring/foo/supervisor-config.xml b/akka-spring/src/test/resources/supervisor-config.xml similarity index 62% rename from akka-spring/akka-spring-test-java/src/main/resources/se/scalablesolutions/akka/spring/foo/supervisor-config.xml rename to akka-spring/src/test/resources/supervisor-config.xml index d96fdb1c93..698581d903 100644 --- a/akka-spring/akka-spring-test-java/src/main/resources/se/scalablesolutions/akka/spring/foo/supervisor-config.xml +++ b/akka-spring/src/test/resources/supervisor-config.xml @@ -17,15 +17,40 @@ http://scalablesolutions.se/akka/akka-0.10.xsd"> - + - - + implementation="se.scalablesolutions.akka.spring.foo.Bar" + lifecycle="permanent" + timeout="1000"/> + + + + + java.io.IOException + java.lang.NullPointerException + + + + + + + + + - + - - + - + \ No newline at end of file diff --git a/akka-spring/akka-spring-test-java/src/main/resources/se/scalablesolutions/akka/spring/foo/test-config.xml b/akka-spring/src/test/resources/typed-actor-config.xml similarity index 67% rename from akka-spring/akka-spring-test-java/src/main/resources/se/scalablesolutions/akka/spring/foo/test-config.xml rename to akka-spring/src/test/resources/typed-actor-config.xml index 23d2476995..dfa40f99ad 100644 --- a/akka-spring/akka-spring-test-java/src/main/resources/se/scalablesolutions/akka/spring/foo/test-config.xml +++ b/akka-spring/src/test/resources/typed-actor-config.xml @@ -9,42 +9,41 @@ http://www.springframework.org/schema/beans/spring-beans-2.0.xsd http://www.akkasource.org/schema/akka http://scalablesolutions.se/akka/akka-0.10.xsd"> - - - - - - - - + @@ -65,11 +64,18 @@ http://scalablesolutions.se/akka/akka-0.10.xsd"> - + - - + implementation="se.scalablesolutions.akka.spring.foo.Bar" + lifecycle="permanent" + timeout="1000"/> + diff --git a/akka-spring/src/test/resources/untyped-actor-config.xml b/akka-spring/src/test/resources/untyped-actor-config.xml new file mode 100644 index 0000000000..aea5e86d44 --- /dev/null +++ b/akka-spring/src/test/resources/untyped-actor-config.xml @@ -0,0 +1,36 @@ + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/akka-spring/src/test/scala/TypedActorFactoryBeanTest.scala b/akka-spring/src/test/scala/ActorFactoryBeanTest.scala similarity index 89% rename from akka-spring/src/test/scala/TypedActorFactoryBeanTest.scala rename to akka-spring/src/test/scala/ActorFactoryBeanTest.scala index 4278cf14ac..112f34f0c7 100644 --- a/akka-spring/src/test/scala/TypedActorFactoryBeanTest.scala +++ b/akka-spring/src/test/scala/ActorFactoryBeanTest.scala @@ -16,12 +16,11 @@ import org.scalatest.matchers.ShouldMatchers * @author michaelkober */ @RunWith(classOf[JUnitRunner]) -class TypedActorFactoryBeanTest extends Spec with ShouldMatchers with BeforeAndAfterAll { - +class ActorFactoryBeanTest extends Spec with ShouldMatchers with BeforeAndAfterAll { override protected def afterAll = ActorRegistry.shutdownAll - describe("A TypedActorFactoryBean") { - val bean = new TypedActorFactoryBean + describe("A ActorFactoryBean") { + val bean = new ActorFactoryBean it("should have java getters and setters for all properties") { bean.setImplementation("java.lang.String") assert(bean.getImplementation == "java.lang.String") @@ -34,11 +33,6 @@ class TypedActorFactoryBeanTest extends Spec with ShouldMatchers with BeforeAndA assert(bean.isRemote) } - it("should create object that implements the given interface") { - bean.setInterface("com.biz.IPojo"); - assert(bean.hasInterface) - } - it("should create an typed actor with dispatcher if dispatcher is set") { val props = new DispatcherProperties() props.dispatcherType = "executor-based-event-driven" @@ -52,10 +46,11 @@ class TypedActorFactoryBeanTest extends Spec with ShouldMatchers with BeforeAndA } it("should create a proxy of type PojoInf") { - val bean = new TypedActorFactoryBean() + val bean = new ActorFactoryBean() bean.setInterface("se.scalablesolutions.akka.spring.PojoInf") bean.setImplementation("se.scalablesolutions.akka.spring.Pojo") bean.timeout = 1000 + bean.typed = AkkaSpringConfigurationTags.TYPED_ACTOR_TAG val entries = new PropertyEntries() val entry = new PropertyEntry() entry.name = "stringFromVal" diff --git a/akka-spring/src/test/scala/DispatcherBeanDefinitionParserTest.scala b/akka-spring/src/test/scala/DispatcherBeanDefinitionParserTest.scala index 64e9ea2425..83c179e29a 100644 --- a/akka-spring/src/test/scala/DispatcherBeanDefinitionParserTest.scala +++ b/akka-spring/src/test/scala/DispatcherBeanDefinitionParserTest.scala @@ -15,28 +15,35 @@ import ScalaDom._ */ @RunWith(classOf[JUnitRunner]) class DispatcherBeanDefinitionParserTest extends Spec with ShouldMatchers { - describe("A DispatcherBeanDefinitionParser") { val parser = new DispatcherBeanDefinitionParser() it("should be able to parse the dispatcher configuration") { + // executor-based-event-driven val xml = - val props = parser.parseDispatcher(dom(xml).getDocumentElement); + type="executor-based-event-driven" + name="myDispatcher"/> + var props = parser.parseDispatcher(dom(xml).getDocumentElement); assert(props != null) - assert(props.dispatcherType == "executor-based-event-driven") - assert(props.name == "myDispatcher") + assert(props.dispatcherType === "executor-based-event-driven") + assert(props.name === "myDispatcher") + + // executor-based-event-driven-work-stealing + val xml2 = + props = parser.parseDispatcher(dom(xml2).getDocumentElement); + assert(props.dispatcherType === "executor-based-event-driven-work-stealing") } it("should be able to parse the thread pool configuration") { val xml = + capacity="100" + fairness="true" + max-pool-size="40" + core-pool-size="6" + keep-alive="2000" + rejection-policy="caller-runs-policy"/> val props = parser.parseThreadPool(dom(xml).getDocumentElement); assert(props != null) assert(props.queue == "bounded-array-blocking-queue") @@ -50,14 +57,14 @@ class DispatcherBeanDefinitionParserTest extends Spec with ShouldMatchers { it("should be able to parse the dispatcher with a thread pool configuration") { val xml = - - + type="reactor-based-thread-pool-event-driven" + name="myDispatcher"> + + val props = parser.parseDispatcher(dom(xml).getDocumentElement); assert(props != null) assert(props.dispatcherType == "reactor-based-thread-pool-event-driven") @@ -69,29 +76,40 @@ class DispatcherBeanDefinitionParserTest extends Spec with ShouldMatchers { } it("should throw IllegalArgumentException on not existing reference") { - val xml = - evaluating { parser.parseDispatcher(dom(xml).getDocumentElement) } should produce [IllegalArgumentException] + val xml = + evaluating {parser.parseDispatcher(dom(xml).getDocumentElement)} should produce[IllegalArgumentException] } it("should throw IllegalArgumentException on missing mandatory attributes") { val xml = - evaluating { parser.parseDispatcher(dom(xml).getDocumentElement) } should produce [IllegalArgumentException] + name="myDispatcher"/> + evaluating {parser.parseDispatcher(dom(xml).getDocumentElement)} should produce[IllegalArgumentException] } it("should throw IllegalArgumentException when configuring a single thread dispatcher with a thread pool") { val xml = - - - evaluating { parser.parseDispatcher(dom(xml).getDocumentElement) } should produce [IllegalArgumentException] + type="reactor-based-single-thread-event-driven" + name="myDispatcher"> + + + evaluating {parser.parseDispatcher(dom(xml).getDocumentElement)} should produce[IllegalArgumentException] } - it("should throw IllegalArgumentException when configuring a thread based dispatcher without TypedActor") { - val xml = - evaluating { parser.parseDispatcher(dom(xml).getDocumentElement) } should produce [IllegalArgumentException] + it("should throw IllegalArgumentException when configuring a thread based dispatcher without TypedActor or UntypedActor") { + val xml = + evaluating {parser.parseDispatcher(dom(xml).getDocumentElement)} should produce[IllegalArgumentException] + } + + it("should be able to parse the hawt dispatcher configuration") { + // hawt + val xml = + var props = parser.parseDispatcher(dom(xml).getDocumentElement); + assert(props != null) + assert(props.dispatcherType === "hawt") + assert(props.aggregate === false) } } } diff --git a/akka-spring/src/test/scala/DispatcherSpringFeatureTest.scala b/akka-spring/src/test/scala/DispatcherSpringFeatureTest.scala new file mode 100644 index 0000000000..18dea2abb0 --- /dev/null +++ b/akka-spring/src/test/scala/DispatcherSpringFeatureTest.scala @@ -0,0 +1,144 @@ +/** + * Copyright (C) 2009-2010 Scalable Solutions AB + */ +package se.scalablesolutions.akka.spring + + +import foo.{IMyPojo, MyPojo, PingActor} +import se.scalablesolutions.akka.dispatch._ +import se.scalablesolutions.akka.actor.UntypedActorRef + +import org.scalatest.FeatureSpec +import org.scalatest.matchers.ShouldMatchers +import org.scalatest.junit.JUnitRunner +import org.junit.runner.RunWith +import org.springframework.beans.factory.support.DefaultListableBeanFactory +import org.springframework.beans.factory.xml.XmlBeanDefinitionReader +import org.springframework.context.ApplicationContext +import org.springframework.context.support.ClassPathXmlApplicationContext +import org.springframework.core.io.{ClassPathResource, Resource} +import java.util.concurrent._ + +/** + * Tests for spring configuration of typed actors. + * @author michaelkober + */ +@RunWith(classOf[JUnitRunner]) +class DispatcherSpringFeatureTest extends FeatureSpec with ShouldMatchers { + val EVENT_DRIVEN_PREFIX = "akka:event-driven:dispatcher:" + + feature("Spring configuration") { + + scenario("get a executor-event-driven-dispatcher with array-blocking-queue from context") { + val context = new ClassPathXmlApplicationContext("/dispatcher-config.xml") + val dispatcher = context.getBean("executor-event-driven-dispatcher-1").asInstanceOf[ExecutorBasedEventDrivenDispatcher] + assert(dispatcher.name === EVENT_DRIVEN_PREFIX + "dispatcher-1") + val executor = getThreadPoolExecutorAndAssert(dispatcher) + assert(executor.getCorePoolSize() === 1) + assert(executor.getMaximumPoolSize() === 20) + assert(executor.getKeepAliveTime(TimeUnit.MILLISECONDS) === 3000) + assert(executor.getQueue().isInstanceOf[ArrayBlockingQueue[Runnable]]); + assert(executor.getQueue().remainingCapacity() === 100) + } + + scenario("get a dispatcher via ref from context") { + val context = new ClassPathXmlApplicationContext("/dispatcher-config.xml") + val pojo = context.getBean("typed-actor-with-dispatcher-ref").asInstanceOf[IMyPojo] + assert(pojo != null) + } + + scenario("get a executor-event-driven-dispatcher with bounded-linked-blocking-queue with unbounded capacity from context") { + val context = new ClassPathXmlApplicationContext("/dispatcher-config.xml") + val dispatcher = context.getBean("executor-event-driven-dispatcher-2").asInstanceOf[ExecutorBasedEventDrivenDispatcher] + val executor = getThreadPoolExecutorAndAssert(dispatcher) + assert(executor.getQueue().isInstanceOf[LinkedBlockingQueue[Runnable]]) + assert(executor.getQueue().remainingCapacity() === Integer.MAX_VALUE) + assert(dispatcher.name === EVENT_DRIVEN_PREFIX + "dispatcher-2") + } + + scenario("get a executor-event-driven-dispatcher with unbounded-linked-blocking-queue with bounded capacity from context") { + val context = new ClassPathXmlApplicationContext("/dispatcher-config.xml") + val dispatcher = context.getBean("executor-event-driven-dispatcher-4").asInstanceOf[ExecutorBasedEventDrivenDispatcher] + assert(dispatcher.name === EVENT_DRIVEN_PREFIX + "dispatcher-4") + val executor = getThreadPoolExecutorAndAssert(dispatcher) + assert(executor.getQueue().isInstanceOf[LinkedBlockingQueue[Runnable]]) + assert(executor.getQueue().remainingCapacity() === 55) + } + + scenario("get a executor-event-driven-dispatcher with unbounded-linked-blocking-queue with unbounded capacity from context") { + val context = new ClassPathXmlApplicationContext("/dispatcher-config.xml") + val dispatcher = context.getBean("executor-event-driven-dispatcher-5").asInstanceOf[ExecutorBasedEventDrivenDispatcher] + assert(dispatcher.name === EVENT_DRIVEN_PREFIX + "dispatcher-5") + val executor = getThreadPoolExecutorAndAssert(dispatcher) + assert(executor.getQueue().isInstanceOf[LinkedBlockingQueue[Runnable]]) + assert(executor.getQueue().remainingCapacity() === Integer.MAX_VALUE) + } + + scenario("get a executor-event-driven-dispatcher with synchronous-queue from context") { + val context = new ClassPathXmlApplicationContext("/dispatcher-config.xml") + val dispatcher = context.getBean("executor-event-driven-dispatcher-6").asInstanceOf[ExecutorBasedEventDrivenDispatcher] + assert(dispatcher.name === EVENT_DRIVEN_PREFIX + "dispatcher-6") + val executor = getThreadPoolExecutorAndAssert(dispatcher) + assert(executor.getQueue().isInstanceOf[SynchronousQueue[Runnable]]) + } + + scenario("get a reactor-based-thread-pool-event-driven-dispatcher with synchronous-queue from context") { + val context = new ClassPathXmlApplicationContext("/dispatcher-config.xml") + val dispatcher = context.getBean("reactor-based-thread-pool-event-driven-dispatcher").asInstanceOf[ReactorBasedThreadPoolEventDrivenDispatcher] + val executor = getThreadPoolExecutorAndAssert(dispatcher) + assert(executor.getQueue().isInstanceOf[SynchronousQueue[Runnable]]) + } + + scenario("get a reactor-based-single-thread-event-driven-dispatcher with synchronous-queue from context") { + val context = new ClassPathXmlApplicationContext("/dispatcher-config.xml") + val dispatcher = context.getBean("reactor-based-single-thread-event-driven-dispatcher").asInstanceOf[ReactorBasedSingleThreadEventDrivenDispatcher] + assert(dispatcher != null) + } + + scenario("get a executor-based-event-driven-work-stealing-dispatcher from context") { + val context = new ClassPathXmlApplicationContext("/dispatcher-config.xml") + val dispatcher = context.getBean("executor-based-event-driven-work-stealing-dispatcher").asInstanceOf[ExecutorBasedEventDrivenWorkStealingDispatcher] + assert(dispatcher != null) + assert(dispatcher.name === "akka:event-driven-work-stealing:dispatcher:workStealingDispatcher") + val executor = getThreadPoolExecutorAndAssert(dispatcher) + assert(executor.getQueue().isInstanceOf[BlockingQueue[Runnable]]) + } + + scenario("get a hawt-dispatcher from context") { + val context = new ClassPathXmlApplicationContext("/dispatcher-config.xml") + val dispatcher = context.getBean("hawt-dispatcher").asInstanceOf[HawtDispatcher] + assert(dispatcher != null) + assert(dispatcher.toString === "HawtDispatchEventDrivenDispatcher") + assert(dispatcher.aggregate === false) + } + + scenario("get a thread-based-dispatcher for typed actor from context") { + val context = new ClassPathXmlApplicationContext("/dispatcher-config.xml") + val pojo = context.getBean("typed-actor-with-thread-based-dispatcher").asInstanceOf[IMyPojo] + assert(pojo != null) + } + + scenario("get a thread-based-dispatcher for untyped from context") { + val context = new ClassPathXmlApplicationContext("/dispatcher-config.xml") + val actorRef = context.getBean("untyped-actor-with-thread-based-dispatcher").asInstanceOf[UntypedActorRef] + assert(actorRef.getActorClassName() === "se.scalablesolutions.akka.spring.foo.PingActor") + actorRef.start() + actorRef.sendOneWay("Hello") + assert(actorRef.getDispatcher.isInstanceOf[ThreadBasedDispatcher]) + } + } + + /** + * get ThreadPoolExecutor via reflection and assert that dispatcher is correct type + */ + private def getThreadPoolExecutorAndAssert(dispatcher: MessageDispatcher): ThreadPoolExecutor = { + assert(dispatcher.isInstanceOf[ThreadPoolBuilder]) + val pool = dispatcher.asInstanceOf[ThreadPoolBuilder] + val field = pool.getClass.getDeclaredField("se$scalablesolutions$akka$dispatch$ThreadPoolBuilder$$threadPoolBuilder") + field.setAccessible(true) + val executor = field.get(pool).asInstanceOf[ThreadPoolExecutor] + assert(executor != null) + executor; + } + +} \ No newline at end of file diff --git a/akka-spring/src/test/scala/SupervisionBeanDefinitionParserTest.scala b/akka-spring/src/test/scala/SupervisionBeanDefinitionParserTest.scala index a5fc44007f..fd9ad3e3bd 100644 --- a/akka-spring/src/test/scala/SupervisionBeanDefinitionParserTest.scala +++ b/akka-spring/src/test/scala/SupervisionBeanDefinitionParserTest.scala @@ -27,7 +27,7 @@ class SupervisionBeanDefinitionParserTest extends Spec with ShouldMatchers { val builder = BeanDefinitionBuilder.genericBeanDefinition("foo.bar.Foo") it("should be able to parse typed actor configuration") { - val props = parser.parseTypedActor(createTypedActorElement); + val props = parser.parseActor(createTypedActorElement); assert(props != null) assert(props.timeout == 1000) assert(props.target == "foo.bar.MyPojo") @@ -47,7 +47,7 @@ class SupervisionBeanDefinitionParserTest extends Spec with ShouldMatchers { it("should parse the supervised typed actors") { parser.parseSupervisor(createSupervisorElement, builder); - val supervised = builder.getBeanDefinition.getPropertyValues.getPropertyValue("supervised").getValue.asInstanceOf[List[TypedActorProperties]] + val supervised = builder.getBeanDefinition.getPropertyValues.getPropertyValue("supervised").getValue.asInstanceOf[List[ActorProperties]] assert(supervised != null) expect(4) { supervised.length } val iterator = supervised.iterator diff --git a/akka-spring/src/test/scala/SupervisionFactoryBeanTest.scala b/akka-spring/src/test/scala/SupervisionFactoryBeanTest.scala index 44d1cbd079..2d7baf2b3e 100644 --- a/akka-spring/src/test/scala/SupervisionFactoryBeanTest.scala +++ b/akka-spring/src/test/scala/SupervisionFactoryBeanTest.scala @@ -18,8 +18,8 @@ class SupervisionFactoryBeanTest extends Spec with ShouldMatchers { val restartStrategy = new RestartStrategy(new AllForOne(), 3, 1000, Array(classOf[Throwable])) val typedActors = List(createTypedActorProperties("se.scalablesolutions.akka.spring.Foo", 1000L)) - def createTypedActorProperties(target: String, timeout: Long) : TypedActorProperties = { - val properties = new TypedActorProperties() + def createTypedActorProperties(target: String, timeout: Long) : ActorProperties = { + val properties = new ActorProperties() properties.target = target properties.timeout = timeout properties @@ -34,8 +34,8 @@ class SupervisionFactoryBeanTest extends Spec with ShouldMatchers { assert(bean.getSupervised == typedActors) } - it("should return the object type TypedActorConfigurator") { - assert(bean.getObjectType == classOf[TypedActorConfigurator]) + it("should return the object type AnyRef") { + assert(bean.getObjectType == classOf[AnyRef]) } } } diff --git a/akka-spring/src/test/scala/SupervisorSpringFeatureTest.scala b/akka-spring/src/test/scala/SupervisorSpringFeatureTest.scala new file mode 100644 index 0000000000..30c3710b79 --- /dev/null +++ b/akka-spring/src/test/scala/SupervisorSpringFeatureTest.scala @@ -0,0 +1,57 @@ +/** + * Copyright (C) 2009-2010 Scalable Solutions AB + */ +package se.scalablesolutions.akka.spring + + +import se.scalablesolutions.akka.spring.foo.{IMyPojo, MyPojo, IFoo, IBar} +import se.scalablesolutions.akka.dispatch._ +import se.scalablesolutions.akka.config.TypedActorConfigurator +import se.scalablesolutions.akka.actor.Supervisor + +import org.scalatest.FeatureSpec +import org.scalatest.matchers.ShouldMatchers +import org.scalatest.junit.JUnitRunner +import org.junit.runner.RunWith +import org.springframework.beans.factory.support.DefaultListableBeanFactory +import org.springframework.beans.factory.xml.XmlBeanDefinitionReader +import org.springframework.context.ApplicationContext +import org.springframework.context.support.ClassPathXmlApplicationContext +import org.springframework.core.io.{ClassPathResource, Resource} +import java.util.concurrent._ + +/** + * Tests for spring configuration of supervisor hierarchies. + * @author michaelkober + */ +@RunWith(classOf[JUnitRunner]) +class SupervisorSpringFeatureTest extends FeatureSpec with ShouldMatchers { + + feature("Spring configuration") { + + scenario("get a supervisor for typed actors from context") { + val context = new ClassPathXmlApplicationContext("/supervisor-config.xml") + val myConfigurator = context.getBean("supervision1").asInstanceOf[TypedActorConfigurator] + // get TypedActors + val foo = myConfigurator.getInstance(classOf[IFoo]) + assert(foo != null) + val bar = myConfigurator.getInstance(classOf[IBar]) + assert(bar != null) + val pojo = myConfigurator.getInstance(classOf[IMyPojo]) + assert(pojo != null) + } + + scenario("get a supervisor for untyped actors from context") { + val context = new ClassPathXmlApplicationContext("/supervisor-config.xml") + val supervisor = context.getBean("supervision-untyped-actors").asInstanceOf[Supervisor] + supervisor.children + } + + scenario("get a supervisor and dispatcher from context") { + val context = new ClassPathXmlApplicationContext("/supervisor-config.xml") + val myConfigurator = context.getBean("supervision-with-dispatcher").asInstanceOf[TypedActorConfigurator] + val foo = myConfigurator.getInstance(classOf[IFoo]) + assert(foo != null) + } + } +} \ No newline at end of file diff --git a/akka-spring/src/test/scala/TypedActorBeanDefinitionParserTest.scala b/akka-spring/src/test/scala/TypedActorBeanDefinitionParserTest.scala index 7c0dd30f37..27a42f3d6c 100644 --- a/akka-spring/src/test/scala/TypedActorBeanDefinitionParserTest.scala +++ b/akka-spring/src/test/scala/TypedActorBeanDefinitionParserTest.scala @@ -17,7 +17,7 @@ import org.w3c.dom.Element */ @RunWith(classOf[JUnitRunner]) class TypedActorBeanDefinitionParserTest extends Spec with ShouldMatchers { - private class Parser extends TypedActorParser + private class Parser extends ActorParser describe("An TypedActorParser") { val parser = new Parser() @@ -30,7 +30,7 @@ class TypedActorBeanDefinitionParserTest extends Spec with ShouldMatchers { - val props = parser.parseTypedActor(dom(xml).getDocumentElement); + val props = parser.parseActor(dom(xml).getDocumentElement); assert(props != null) assert(props.timeout === 1000) assert(props.target === "foo.bar.MyPojo") @@ -44,7 +44,7 @@ class TypedActorBeanDefinitionParserTest extends Spec with ShouldMatchers { timeout="1000" transactional="true"/> - evaluating { parser.parseTypedActor(dom(xml).getDocumentElement) } should produce [IllegalArgumentException] + evaluating { parser.parseActor(dom(xml).getDocumentElement) } should produce [IllegalArgumentException] } it("should parse TypedActors configuration with dispatcher") { @@ -52,7 +52,7 @@ class TypedActorBeanDefinitionParserTest extends Spec with ShouldMatchers { timeout="1000"> - val props = parser.parseTypedActor(dom(xml).getDocumentElement); + val props = parser.parseActor(dom(xml).getDocumentElement); assert(props != null) assert(props.dispatcher.dispatcherType === "thread-based") } @@ -62,7 +62,7 @@ class TypedActorBeanDefinitionParserTest extends Spec with ShouldMatchers { timeout="1000"> - val props = parser.parseTypedActor(dom(xml).getDocumentElement); + val props = parser.parseActor(dom(xml).getDocumentElement); assert(props != null) assert(props.host === "com.some.host") assert(props.port === 9999) diff --git a/akka-spring/src/test/scala/TypedActorSpringFeatureTest.scala b/akka-spring/src/test/scala/TypedActorSpringFeatureTest.scala new file mode 100644 index 0000000000..8767b2e75a --- /dev/null +++ b/akka-spring/src/test/scala/TypedActorSpringFeatureTest.scala @@ -0,0 +1,78 @@ +/** + * Copyright (C) 2009-2010 Scalable Solutions AB + */ +package se.scalablesolutions.akka.spring + + +import foo.{IMyPojo, MyPojo} +import se.scalablesolutions.akka.dispatch.FutureTimeoutException +import se.scalablesolutions.akka.remote.RemoteNode +import org.scalatest.FeatureSpec +import org.scalatest.matchers.ShouldMatchers +import org.scalatest.junit.JUnitRunner +import org.junit.runner.RunWith +import org.springframework.beans.factory.support.DefaultListableBeanFactory +import org.springframework.beans.factory.xml.XmlBeanDefinitionReader +import org.springframework.context.ApplicationContext +import org.springframework.context.support.ClassPathXmlApplicationContext +import org.springframework.core.io.{ClassPathResource, Resource} + +/** + * Tests for spring configuration of typed actors. + * @author michaelkober + */ +@RunWith(classOf[JUnitRunner]) +class TypedActorSpringFeatureTest extends FeatureSpec with ShouldMatchers { + feature("parse Spring application context") { + + scenario("akka:typed-actor and akka:supervision and akka:dispatcher can be used as top level elements") { + val context = new ClassPathResource("/typed-actor-config.xml") + val beanFactory = new DefaultListableBeanFactory() + val reader = new XmlBeanDefinitionReader(beanFactory) + reader.loadBeanDefinitions(context) + assert(beanFactory.containsBeanDefinition("simple-typed-actor")) + assert(beanFactory.containsBeanDefinition("remote-typed-actor")) + assert(beanFactory.containsBeanDefinition("supervision1")) + assert(beanFactory.containsBeanDefinition("dispatcher1")) + } + + scenario("get a typed actor") { + val context = new ClassPathXmlApplicationContext("/typed-actor-config.xml") + val myPojo = context.getBean("simple-typed-actor").asInstanceOf[IMyPojo] + var msg = myPojo.getFoo() + msg += myPojo.getBar() + assert(msg === "foobar") + } + + scenario("FutureTimeoutException when timed out") { + val context = new ClassPathXmlApplicationContext("/typed-actor-config.xml") + val myPojo = context.getBean("simple-typed-actor").asInstanceOf[IMyPojo] + evaluating {myPojo.longRunning()} should produce[FutureTimeoutException] + + } + + scenario("typed-actor with timeout") { + val context = new ClassPathXmlApplicationContext("/typed-actor-config.xml") + val myPojo = context.getBean("simple-typed-actor-long-timeout").asInstanceOf[IMyPojo] + assert(myPojo.longRunning() === "this took long"); + } + + scenario("transactional typed-actor") { + val context = new ClassPathXmlApplicationContext("/typed-actor-config.xml") + val myPojo = context.getBean("transactional-typed-actor").asInstanceOf[IMyPojo] + var msg = myPojo.getFoo() + msg += myPojo.getBar() + assert(msg === "foobar") + } + + scenario("get a remote typed-actor") { + RemoteNode.start + Thread.sleep(1000) + val context = new ClassPathXmlApplicationContext("/typed-actor-config.xml") + val myPojo = context.getBean("remote-typed-actor").asInstanceOf[IMyPojo] + assert(myPojo.getFoo === "foo") + } + } + +} + diff --git a/akka-spring/src/test/scala/UntypedActorSpringFeatureTest.scala b/akka-spring/src/test/scala/UntypedActorSpringFeatureTest.scala new file mode 100644 index 0000000000..677a671d53 --- /dev/null +++ b/akka-spring/src/test/scala/UntypedActorSpringFeatureTest.scala @@ -0,0 +1,79 @@ +/** + * Copyright (C) 2009-2010 Scalable Solutions AB + */ +package se.scalablesolutions.akka.spring + + +import foo.PingActor +import se.scalablesolutions.akka.dispatch.ExecutorBasedEventDrivenWorkStealingDispatcher +import se.scalablesolutions.akka.remote.RemoteNode +import se.scalablesolutions.akka.actor.UntypedActorRef +import org.scalatest.FeatureSpec +import org.scalatest.matchers.ShouldMatchers +import org.scalatest.junit.JUnitRunner +import org.junit.runner.RunWith +import org.springframework.context.ApplicationContext +import org.springframework.context.support.ClassPathXmlApplicationContext + + +/** + * Tests for spring configuration of typed actors. + * @author michaelkober + */ +@RunWith(classOf[JUnitRunner]) +class UntypedActorSpringFeatureTest extends FeatureSpec with ShouldMatchers { + feature("parse Spring application context") { + + scenario("get a untyped actor") { + val context = new ClassPathXmlApplicationContext("/untyped-actor-config.xml") + val myactor = context.getBean("simple-untyped-actor").asInstanceOf[UntypedActorRef] + assert(myactor.getActorClassName() === "se.scalablesolutions.akka.spring.foo.PingActor") + myactor.start() + myactor.sendOneWay("Hello") + assert(myactor.actorRef.isDefinedAt("some string message")) + } + + scenario("untyped-actor with timeout") { + val context = new ClassPathXmlApplicationContext("/untyped-actor-config.xml") + val myactor = context.getBean("simple-untyped-actor-long-timeout").asInstanceOf[UntypedActorRef] + assert(myactor.getActorClassName() === "se.scalablesolutions.akka.spring.foo.PingActor") + myactor.start() + myactor.sendOneWay("Hello") + assert(myactor.getTimeout() === 10000) + } + + scenario("transactional untyped-actor") { + val context = new ClassPathXmlApplicationContext("/untyped-actor-config.xml") + val myactor = context.getBean("transactional-untyped-actor").asInstanceOf[UntypedActorRef] + assert(myactor.getActorClassName() === "se.scalablesolutions.akka.spring.foo.PingActor") + myactor.start() + myactor.sendOneWay("Hello") + assert(myactor.actorRef.isDefinedAt("some string message")) + } + + scenario("get a remote typed-actor") { + RemoteNode.start + Thread.sleep(1000) + val context = new ClassPathXmlApplicationContext("/untyped-actor-config.xml") + val myactor = context.getBean("remote-untyped-actor").asInstanceOf[UntypedActorRef] + assert(myactor.getActorClassName() === "se.scalablesolutions.akka.spring.foo.PingActor") + myactor.start() + myactor.sendOneWay("Hello") + assert(myactor.actorRef.isDefinedAt("some string message")) + assert(myactor.getRemoteAddress().isDefined) + assert(myactor.getRemoteAddress().get.getHostName() === "localhost") + assert(myactor.getRemoteAddress().get.getPort() === 9999) + } + + scenario("untyped-actor with custom dispatcher") { + val context = new ClassPathXmlApplicationContext("/untyped-actor-config.xml") + val myactor = context.getBean("untyped-actor-with-dispatcher").asInstanceOf[UntypedActorRef] + assert(myactor.getActorClassName() === "se.scalablesolutions.akka.spring.foo.PingActor") + myactor.start() + myactor.sendOneWay("Hello") + assert(myactor.getTimeout() === 1000) + assert(myactor.getDispatcher.isInstanceOf[ExecutorBasedEventDrivenWorkStealingDispatcher]) + } + } +} + diff --git a/config/logback.xml b/config/logback.xml index 40faeefb3c..5ab49da1c3 100755 --- a/config/logback.xml +++ b/config/logback.xml @@ -27,7 +27,7 @@ ./logs/akka.log.%d{yyyy-MM-dd-HH} - + diff --git a/project/build/AkkaProject.scala b/project/build/AkkaProject.scala index e0874b9e9c..e546ff0f80 100644 --- a/project/build/AkkaProject.scala +++ b/project/build/AkkaProject.scala @@ -374,6 +374,7 @@ class AkkaParentProject(info: ProjectInfo) extends DefaultProject(info) { class AkkaAMQPProject(info: ProjectInfo) extends AkkaDefaultProject(info, distPath) with CodeFellowPlugin { val commons_io = Dependencies.commons_io val rabbit = Dependencies.rabbit + val protobuf = Dependencies.protobuf // testing val junit = Dependencies.junit