diff --git a/README b/README deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/akka-amqp/src/main/java/akka/amqp/ExampleSessionJava.java b/akka-amqp/src/main/java/akka/amqp/ExampleSessionJava.java deleted file mode 100644 index 5764877c3c..0000000000 --- a/akka-amqp/src/main/java/akka/amqp/ExampleSessionJava.java +++ /dev/null @@ -1,267 +0,0 @@ -package akka.amqp; - -import org.multiverse.api.latches.StandardLatch; -import scala.Option; -import akka.actor.ActorRef; -import akka.actor.ActorRegistry; -import akka.actor.UntypedActor; -import akka.actor.UntypedActorFactory; - -import akka.amqp.rpc.RPC; -import akka.remote.protocol.RemoteProtocol; - -import akka.japi.Function; -import akka.japi.Procedure; - -import java.util.concurrent.CountDownLatch; -import java.util.concurrent.TimeUnit; - -@SuppressWarnings({"unchecked"}) -public class ExampleSessionJava { - - public static void main(String... args) { - new ExampleSessionJava(); - } - - public ExampleSessionJava() { - printTopic("DIRECT"); - direct(); - - printTopic("CALLBACK"); - callback(); - - printTopic("EASY STRING PRODUCER AND CONSUMER"); - easyStringProducerConsumer(); - - printTopic("EASY PROTOBUF PRODUCER AND CONSUMER"); - easyProtobufProducerConsumer(); - - printTopic("EASY STRING RPC"); - easyStringRpc(); - - printTopic("EASY PROTOBUF RPC"); - easyProtobufRpc(); - - // postStop everything the amqp tree except the main AMQP supervisor - // all connections/consumers/producers will be stopped - AMQP.shutdownAll(); - - ActorRegistry.shutdownAll(); - - printTopic("Happy hAkking :-)"); - - System.exit(0); - } - - private void printTopic(String topic) { - - System.out.println(""); - System.out.println("==== " + topic + " ==="); - System.out.println(""); - try { - TimeUnit.SECONDS.sleep(2); - } catch (InterruptedException ignore) { - } - } - - private void direct() { - // defaults to amqp://guest:guest@localhost:5672/ - ActorRef connection = AMQP.newConnection(); - - AMQP.ExchangeParameters exchangeParameters = new AMQP.ExchangeParameters("my_direct_exchange", Direct.getInstance()); - - ActorRef deliveryHandler = UntypedActor.actorOf(DirectDeliveryHandlerActor.class); - - AMQP.ConsumerParameters consumerParameters = new AMQP.ConsumerParameters("some.routing", deliveryHandler, exchangeParameters); - ActorRef consumer = AMQP.newConsumer(connection, consumerParameters); - - - ActorRef producer = AMQP.newProducer(connection, new AMQP.ProducerParameters(exchangeParameters)); - producer.sendOneWay(new Message("@jonas_boner: You sucked!!".getBytes(), "some.routing")); - } - - private void callback() { - - final CountDownLatch channelCountdown = new CountDownLatch(2); - - ActorRef connectionCallback = UntypedActor.actorOf(ConnectionCallbackActor.class); - connectionCallback.start(); - - AMQP.ConnectionParameters connectionParameters = new AMQP.ConnectionParameters(connectionCallback); - ActorRef connection = AMQP.newConnection(connectionParameters); - - ActorRef channelCallback = UntypedActor.actorOf(new UntypedActorFactory() { - public UntypedActor create() { - return new ChannelCallbackActor(channelCountdown); - } - }); - channelCallback.start(); - - AMQP.ExchangeParameters exchangeParameters = new AMQP.ExchangeParameters("my_callback_exchange", Direct.getInstance()); - AMQP.ChannelParameters channelParameters = new AMQP.ChannelParameters(channelCallback); - - ActorRef dummyHandler = UntypedActor.actorOf(DummyActor.class); - AMQP.ConsumerParameters consumerParameters = new AMQP.ConsumerParameters("callback.routing", dummyHandler, exchangeParameters, channelParameters); - - ActorRef consumer = AMQP.newConsumer(connection, consumerParameters); - - ActorRef producer = AMQP.newProducer(connection, new AMQP.ProducerParameters(exchangeParameters, channelParameters)); - - // Wait until both channels (producer & consumer) are started before stopping the connection - try { - channelCountdown.await(2, TimeUnit.SECONDS); - } catch (InterruptedException ignore) { - } - connection.stop(); - } - - public void easyStringProducerConsumer() { - ActorRef connection = AMQP.newConnection(); - - String exchangeName = "easy.string"; - - // listen by default to: - // exchange = optional exchangeName - // routingKey = provided routingKey or .request - // queueName = .in - Procedure procedure = new Procedure() { - public void apply(String message) { - System.out.println("### >> Received message: " + message); - } - }; - AMQP.newStringConsumer(connection, procedure, exchangeName); - - // send by default to: - // exchange = exchangeName - // routingKey = .request - AMQP.ProducerClient producer = AMQP.newStringProducer(connection, exchangeName); - - producer.send("This shit is easy!"); - } - - public void easyProtobufProducerConsumer() { - - ActorRef connection = AMQP.newConnection(); - - String exchangeName = "easy.protobuf"; - - Procedure procedure = new Procedure() { - public void apply(RemoteProtocol.AddressProtocol message) { - System.out.println("### >> Received message: " + message); - } - }; - - AMQP.newProtobufConsumer(connection, procedure, exchangeName, RemoteProtocol.AddressProtocol.class); - - AMQP.ProducerClient producerClient = AMQP.newProtobufProducer(connection, exchangeName); - - producerClient.send(RemoteProtocol.AddressProtocol.newBuilder().setHostname("akkarocks.com").setPort(1234).build()); - } - - public void easyStringRpc() { - - ActorRef connection = AMQP.newConnection(); - - String exchangeName = "easy.stringrpc"; - - // listen by default to: - // exchange = exchangeName - // routingKey = .request - // queueName = .in - RPC.newStringRpcServer(connection, exchangeName, new Function() { - public String apply(String request) { - System.out.println("### >> Got request: " + request); - return "Response to: '" + request + "'"; - } - }); - - // send by default to: - // exchange = exchangeName - // routingKey = .request - RPC.RpcClient stringRpcClient = RPC.newStringRpcClient(connection, exchangeName); - - Option response = stringRpcClient.call("AMQP Rocks!"); - System.out.println("### >> Got response: " + response); - - final StandardLatch standardLatch = new StandardLatch(); - stringRpcClient.callAsync("AMQP is dead easy", new Procedure() { - public void apply(String request) { - System.out.println("### >> This is handled async: " + request); - standardLatch.open(); - } - }); - try { - standardLatch.tryAwait(2, TimeUnit.SECONDS); - } catch (InterruptedException ignore) { - } - } - - - public void easyProtobufRpc() { - - ActorRef connection = AMQP.newConnection(); - - String exchangeName = "easy.protobuf.rpc"; - - RPC.newProtobufRpcServer(connection, exchangeName, new Function() { - public RemoteProtocol.AddressProtocol apply(RemoteProtocol.AddressProtocol request) { - return RemoteProtocol.AddressProtocol.newBuilder().setHostname(request.getHostname()).setPort(request.getPort()).build(); - } - }, RemoteProtocol.AddressProtocol.class); - - RPC.RpcClient protobufRpcClient = - RPC.newProtobufRpcClient(connection, exchangeName, RemoteProtocol.AddressProtocol.class); - - scala.Option response = - protobufRpcClient.call(RemoteProtocol.AddressProtocol.newBuilder().setHostname("localhost").setPort(4321).build()); - - System.out.println("### >> Got response: " + response); - } -} - -class DummyActor extends UntypedActor { - public void onReceive(Object message) throws Exception { - // not used - } -} - -class ChannelCallbackActor extends UntypedActor { - - private final CountDownLatch channelCountdown; - - public ChannelCallbackActor(CountDownLatch channelCountdown) { - this.channelCountdown = channelCountdown; - } - - public void onReceive(Object message) throws Exception { - if (Started.getInstance().getClass().isAssignableFrom(message.getClass())) { - System.out.println("### >> Channel callback: Started"); - channelCountdown.countDown(); - } else if (Restarting.getInstance().getClass().isAssignableFrom(message.getClass())) { - } else if (Stopped.getInstance().getClass().isAssignableFrom(message.getClass())) { - System.out.println("### >> Channel callback: Stopped"); - } else throw new IllegalArgumentException("Unknown message: " + message); - } -} - -class ConnectionCallbackActor extends UntypedActor { - - public void onReceive(Object message) throws Exception { - if (Connected.getInstance().getClass().isAssignableFrom(message.getClass())) { - System.out.println("### >> Connection callback: Connected!"); - } else if (Reconnecting.getInstance().getClass().isAssignableFrom(message.getClass())) { - } else if (Disconnected.getInstance().getClass().isAssignableFrom(message.getClass())) { - System.out.println("### >> Connection callback: Disconnected!"); - } else throw new IllegalArgumentException("Unknown message: " + message); - } -} - -class DirectDeliveryHandlerActor extends UntypedActor { - - public void onReceive(Object message) throws Exception { - if (Delivery.class.isAssignableFrom(message.getClass())) { - Delivery delivery = (Delivery) message; - System.out.println("### >> @george_bush received message from: " + new String(delivery.payload())); - } else throw new IllegalArgumentException("Unknown message: " + message); - } -} diff --git a/akka-amqp/src/main/scala/akka/amqp/AMQP.scala b/akka-amqp/src/main/scala/akka/amqp/AMQP.scala deleted file mode 100644 index ec029bc1cd..0000000000 --- a/akka-amqp/src/main/scala/akka/amqp/AMQP.scala +++ /dev/null @@ -1,489 +0,0 @@ -/** - * Copyright (C) 2009-2010 Scalable Solutions AB - */ - -package akka.amqp - -import akka.actor.{Actor, ActorRef} -import akka.actor.Actor._ -import akka.config.Supervision.OneForOneStrategy -import com.rabbitmq.client.{ReturnListener, ShutdownListener, ConnectionFactory} -import ConnectionFactory._ -import com.rabbitmq.client.AMQP.BasicProperties -import java.lang.{String, IllegalArgumentException} -import reflect.Manifest -import akka.japi.Procedure -import akka.dispatch.Dispatchers - -/** - * AMQP Actor API. Implements Connection, Producer and Consumer materialized as Actors. - * - * @see akka.amqp.ExampleSession - * - * @author Irmo Manie - */ -object AMQP { - - lazy val consumerDispatcher = Dispatchers.newExecutorBasedEventDrivenDispatcher("amqp-consumers").build - lazy val producerDispatcher = Dispatchers.newExecutorBasedEventDrivenDispatcher("amqp-producers").build - /** - * Parameters used to make the connection to the amqp broker. Uses the rabbitmq defaults. - */ - case class ConnectionParameters( - host: String = DEFAULT_HOST, - port: Int = DEFAULT_AMQP_PORT, - username: String = DEFAULT_USER, - password: String = DEFAULT_PASS, - virtualHost: String = DEFAULT_VHOST, - initReconnectDelay: Long = 5000, - connectionCallback: Option[ActorRef] = None) { - - // Needed for Java API usage - def this() = this (DEFAULT_HOST, DEFAULT_AMQP_PORT, DEFAULT_USER, DEFAULT_PASS, DEFAULT_VHOST, 5000, None) - - // Needed for Java API usage - def this(host: String, port: Int, username: String, password: String, virtualHost: String) = - this (host, port, username, password, virtualHost, 5000, None) - - // Needed for Java API usage - def this(host: String, port: Int, username: String, password: String, virtualHost: String, initReconnectDelay: Long, connectionCallback: ActorRef) = - this (host, port, username, password, virtualHost, initReconnectDelay, Some(connectionCallback)) - - // Needed for Java API usage - def this(connectionCallback: ActorRef) = - this (DEFAULT_HOST, DEFAULT_AMQP_PORT, DEFAULT_USER, DEFAULT_PASS, DEFAULT_VHOST, 5000, Some(connectionCallback)) - - } - - /** - * Additional parameters for the channel - */ - case class ChannelParameters( - shutdownListener: Option[ShutdownListener] = None, - channelCallback: Option[ActorRef] = None, - prefetchSize: Int = 0) { - - // Needed for Java API usage - def this() = this (None, None) - - // Needed for Java API usage - def this(channelCallback: ActorRef) = this (None, Some(channelCallback)) - - // Needed for Java API usage - def this(shutdownListener: ShutdownListener, channelCallback: ActorRef) = - this (Some(shutdownListener), Some(channelCallback)) - } - - /** - * Declaration type used for either exchange or queue declaration - */ - sealed trait Declaration - case object NoActionDeclaration extends Declaration { - def getInstance() = this // Needed for Java API usage - } - case object PassiveDeclaration extends Declaration { - def getInstance() = this // Needed for Java API usage - } - case class ActiveDeclaration(durable: Boolean = false, autoDelete: Boolean = true, exclusive: Boolean = false) extends Declaration { - - // Needed for Java API usage - def this() = this (false, true, false) - - // Needed for Java API usage - def this(durable: Boolean, autoDelete: Boolean) = this (durable, autoDelete, false) - } - - /** - * Exchange specific parameters - */ - case class ExchangeParameters( - exchangeName: String, - exchangeType: ExchangeType = Topic, - exchangeDeclaration: Declaration = ActiveDeclaration(), - configurationArguments: Map[String, AnyRef] = Map.empty) { - - // Needed for Java API usage - def this(exchangeName: String) = - this (exchangeName, Topic, ActiveDeclaration(), Map.empty) - - // Needed for Java API usage - def this(exchangeName: String, exchangeType: ExchangeType) = - this (exchangeName, exchangeType, ActiveDeclaration(), Map.empty) - - // Needed for Java API usage - def this(exchangeName: String, exchangeType: ExchangeType, exchangeDeclaration: Declaration) = - this (exchangeName, exchangeType, exchangeDeclaration, Map.empty) - } - - /** - * Producer specific parameters - */ - case class ProducerParameters( - exchangeParameters: Option[ExchangeParameters] = None, - producerId: Option[String] = None, - returnListener: Option[ReturnListener] = None, - channelParameters: Option[ChannelParameters] = None) { - def this() = this (None, None, None, None) - - // Needed for Java API usage - def this(exchangeParameters: ExchangeParameters) = this (Some(exchangeParameters), None, None, None) - - // Needed for Java API usage - def this(exchangeParameters: ExchangeParameters, producerId: String) = - this (Some(exchangeParameters), Some(producerId), None, None) - - // Needed for Java API usage - def this(exchangeParameters: ExchangeParameters, returnListener: ReturnListener) = - this (Some(exchangeParameters), None, Some(returnListener), None) - - // Needed for Java API usage - def this(exchangeParameters: ExchangeParameters, channelParameters: ChannelParameters) = - this (Some(exchangeParameters), None, None, Some(channelParameters)) - - // Needed for Java API usage - def this(exchangeParameters: ExchangeParameters, producerId: String, returnListener: ReturnListener, channelParameters: ChannelParameters) = - this (Some(exchangeParameters), Some(producerId), Some(returnListener), Some(channelParameters)) - } - - /** - * Consumer specific parameters - */ - case class ConsumerParameters( - routingKey: String, - deliveryHandler: ActorRef, - queueName: Option[String] = None, - exchangeParameters: Option[ExchangeParameters] = None, - queueDeclaration: Declaration = ActiveDeclaration(), - selfAcknowledging: Boolean = true, - channelParameters: Option[ChannelParameters] = None) { - if (queueName.isEmpty) { - queueDeclaration match { - case ActiveDeclaration(true, _, _) => - throw new IllegalArgumentException("A queue name is required when requesting a durable queue.") - case PassiveDeclaration => - throw new IllegalArgumentException("A queue name is required when requesting passive declaration.") - case _ => () // ignore - } - } - - // Needed for Java API usage - def this(routingKey: String, deliveryHandler: ActorRef) = - this (routingKey, deliveryHandler, None, None, ActiveDeclaration(), true, None) - - // Needed for Java API usage - def this(routingKey: String, deliveryHandler: ActorRef, channelParameters: ChannelParameters) = - this (routingKey, deliveryHandler, None, None, ActiveDeclaration(), true, Some(channelParameters)) - - // Needed for Java API usage - def this(routingKey: String, deliveryHandler: ActorRef, selfAcknowledging: Boolean) = - this (routingKey, deliveryHandler, None, None, ActiveDeclaration(), selfAcknowledging, None) - - // Needed for Java API usage - def this(routingKey: String, deliveryHandler: ActorRef, selfAcknowledging: Boolean, channelParameters: ChannelParameters) = - this (routingKey, deliveryHandler, None, None, ActiveDeclaration(), selfAcknowledging, Some(channelParameters)) - - // Needed for Java API usage - def this(routingKey: String, deliveryHandler: ActorRef, queueName: String) = - this (routingKey, deliveryHandler, Some(queueName), None, ActiveDeclaration(), true, None) - - // Needed for Java API usage - def this(routingKey: String, deliveryHandler: ActorRef, queueName: String, queueDeclaration: Declaration, selfAcknowledging: Boolean, channelParameters: ChannelParameters) = - this (routingKey, deliveryHandler, Some(queueName), None, queueDeclaration, selfAcknowledging, Some(channelParameters)) - - // Needed for Java API usage - def this(routingKey: String, deliveryHandler: ActorRef, exchangeParameters: ExchangeParameters) = - this (routingKey, deliveryHandler, None, Some(exchangeParameters), ActiveDeclaration(), true, None) - - // Needed for Java API usage - def this(routingKey: String, deliveryHandler: ActorRef, exchangeParameters: ExchangeParameters, channelParameters: ChannelParameters) = - this (routingKey, deliveryHandler, None, Some(exchangeParameters), ActiveDeclaration(), true, Some(channelParameters)) - - // Needed for Java API usage - def this(routingKey: String, deliveryHandler: ActorRef, exchangeParameters: ExchangeParameters, selfAcknowledging: Boolean) = - this (routingKey, deliveryHandler, None, Some(exchangeParameters), ActiveDeclaration(), selfAcknowledging, None) - - // Needed for Java API usage - def this(routingKey: String, deliveryHandler: ActorRef, queueName: String, exchangeParameters: ExchangeParameters) = - this (routingKey, deliveryHandler, Some(queueName), Some(exchangeParameters), ActiveDeclaration(), true, None) - - // Needed for Java API usage - def this(routingKey: String, deliveryHandler: ActorRef, queueName: String, exchangeParameters: ExchangeParameters, queueDeclaration: Declaration) = - this (routingKey, deliveryHandler, Some(queueName), Some(exchangeParameters), queueDeclaration, true, None) - - // Needed for Java API usage - def this(routingKey: String, deliveryHandler: ActorRef, queueName: String, exchangeParameters: ExchangeParameters, queueDeclaration: Declaration, selfAcknowledging: Boolean) = - this (routingKey, deliveryHandler, Some(queueName), Some(exchangeParameters), queueDeclaration, selfAcknowledging, None) - - // Needed for Java API usage - def this(routingKey: String, deliveryHandler: ActorRef, queueName: String, exchangeParameters: ExchangeParameters, queueDeclaration: Declaration, selfAcknowledging: Boolean, channelParameters: ChannelParameters) = - this (routingKey, deliveryHandler, Some(queueName), Some(exchangeParameters), queueDeclaration, selfAcknowledging, Some(channelParameters)) - - // How about that for some overloading... huh? :P (yes, I know, there are still possibilities left...sue me!) - // Who said java is easy :( - } - - def newConnection(connectionParameters: ConnectionParameters = new ConnectionParameters()): ActorRef = { - val connection = actorOf(new FaultTolerantConnectionActor(connectionParameters)) - supervisor.startLink(connection) - connection ! Connect - connection - } - - // Needed for Java API usage - def newConnection(): ActorRef = { - newConnection(new ConnectionParameters()) - } - - def newProducer(connection: ActorRef, producerParameters: ProducerParameters): ActorRef = { - val producer: ActorRef = Actor.actorOf(new ProducerActor(producerParameters)) - producer.dispatcher = producerDispatcher - connection.startLink(producer) - producer ! Start - producer - } - - def newConsumer(connection: ActorRef, consumerParameters: ConsumerParameters): ActorRef = { - val consumer: ActorRef = actorOf(new ConsumerActor(consumerParameters)) - consumer.dispatcher = consumerDispatcher - val handler = consumerParameters.deliveryHandler - if (handler.isUnstarted) handler.dispatcher = consumerDispatcher - if (handler.supervisor.isEmpty) consumer.startLink(handler) - connection.startLink(consumer) - consumer ! Start - consumer - } - - /** - * Convenience - */ - class ProducerClient[O](client: ActorRef, routingKey: String, toBinary: ToBinary[O]) { - // Needed for Java API usage - def send(request: O): Unit = { - send(request, None) - } - // Needed for Java API usage - def send(request: O, replyTo: String): Unit = { - send(request, Some(replyTo)) - } - - def send(request: O, replyTo: Option[String] = None) = { - val basicProperties = new BasicProperties - basicProperties.setReplyTo(replyTo.getOrElse(null)) - client ! Message(toBinary.toBinary(request), routingKey, false, false, Some(basicProperties)) - } - - def stop() = client.stop - } - - // Needed for Java API usage - def newStringProducer(connection: ActorRef, - exchangeName: String): ProducerClient[String] = { - newStringProducer(connection, Some(exchangeName)) - } - - // Needed for Java API usage - def newStringProducer(connection: ActorRef, - exchangeName: String, - routingKey: String): ProducerClient[String] = { - newStringProducer(connection, Some(exchangeName), Some(routingKey)) - } - - // Needed for Java API usage - def newStringProducer(connection: ActorRef, - exchangeName: String, - routingKey: String, - producerId: String): ProducerClient[String] = { - newStringProducer(connection, Some(exchangeName), Some(routingKey), Some(producerId)) - } - - def newStringProducer(connection: ActorRef, - exchangeName: Option[String], - routingKey: Option[String] = None, - producerId: Option[String] = None): ProducerClient[String] = { - - if (exchangeName.isEmpty && routingKey.isEmpty) { - throw new IllegalArgumentException("Either exchange name or routing key is mandatory") - } - val exchangeParameters = exchangeName.flatMap(name => Some(ExchangeParameters(name))) - val rKey = routingKey.getOrElse("%s.request".format(exchangeName.get)) - - val producerRef = newProducer(connection, ProducerParameters(exchangeParameters, producerId)) - val toBinary = new ToBinary[String] { - def toBinary(t: String) = t.getBytes - } - new ProducerClient(producerRef, rKey, toBinary) - } - - // Needed for Java API usage - def newStringConsumer(connection: ActorRef, - handler: Procedure[String], - exchangeName: String): ActorRef = { - newStringConsumer(connection, handler.apply _, Some(exchangeName)) - } - - // Needed for Java API usage - def newStringConsumer(connection: ActorRef, - handler: Procedure[String], - exchangeName: String, - routingKey: String): ActorRef = { - newStringConsumer(connection, handler.apply _, Some(exchangeName), Some(routingKey)) - } - - // Needed for Java API usage - def newStringConsumer(connection: ActorRef, - handler: Procedure[String], - exchangeName: String, - routingKey: String, - queueName: String): ActorRef = { - newStringConsumer(connection, handler.apply _, Some(exchangeName), Some(routingKey), Some(queueName)) - } - - def newStringConsumer(connection: ActorRef, - handler: String => Unit, - exchangeName: Option[String], - routingKey: Option[String] = None, - queueName: Option[String] = None): ActorRef = { - - if (exchangeName.isEmpty && routingKey.isEmpty) { - throw new IllegalArgumentException("Either exchange name or routing key is mandatory") - } - - val deliveryHandler = actorOf( new Actor { - def receive = { case Delivery(payload, _, _, _, _, _) => handler.apply(new String(payload)) } - } ).start - - val exchangeParameters = exchangeName.flatMap(name => Some(ExchangeParameters(name))) - val rKey = routingKey.getOrElse("%s.request".format(exchangeName.get)) - val qName = queueName.getOrElse("%s.in".format(rKey)) - - newConsumer(connection, ConsumerParameters(rKey, deliveryHandler, Some(qName), exchangeParameters)) - } - - - // Needed for Java API usage - def newProtobufProducer[O <: com.google.protobuf.Message](connection: ActorRef, - exchangeName: String): ProducerClient[O] = { - newProtobufProducer(connection, Some(exchangeName)) - } - - // Needed for Java API usage - def newProtobufProducer[O <: com.google.protobuf.Message](connection: ActorRef, - exchangeName: String, - routingKey: String): ProducerClient[O] = { - newProtobufProducer(connection, Some(exchangeName), Some(routingKey)) - } - - // Needed for Java API usage - def newProtobufProducer[O <: com.google.protobuf.Message](connection: ActorRef, - exchangeName: String, - routingKey: String, - producerId: String): ProducerClient[O] = { - newProtobufProducer(connection, Some(exchangeName), Some(routingKey), Some(producerId)) - } - - def newProtobufProducer[O <: com.google.protobuf.Message](connection: ActorRef, - exchangeName: Option[String], - routingKey: Option[String] = None, - producerId: Option[String] = None): ProducerClient[O] = { - - if (exchangeName.isEmpty && routingKey.isEmpty) { - throw new IllegalArgumentException("Either exchange name or routing key is mandatory") - } - val exchangeParameters = exchangeName.flatMap(name => Some(ExchangeParameters(name))) - val rKey = routingKey.getOrElse("%s.request".format(exchangeName.get)) - - val producerRef = newProducer(connection, ProducerParameters(exchangeParameters, producerId)) - new ProducerClient(producerRef, rKey, new ToBinary[O] { - def toBinary(t: O) = t.toByteArray - }) - } - - // Needed for Java API usage - def newProtobufConsumer[I <: com.google.protobuf.Message](connection: ActorRef, - handler: Procedure[I], - exchangeName: String, - clazz: Class[I]): ActorRef = { - implicit val manifest = Manifest.classType[I](clazz) - newProtobufConsumer[I](connection, handler.apply _, Some(exchangeName)) - } - - // Needed for Java API usage - def newProtobufConsumer[I <: com.google.protobuf.Message](connection: ActorRef, - handler: Procedure[I], - exchangeName: String, - routingKey: String, - clazz: Class[I]): ActorRef = { - implicit val manifest = Manifest.classType[I](clazz) - newProtobufConsumer[I](connection, handler.apply _, Some(exchangeName), Some(routingKey)) - } - - // Needed for Java API usage - def newProtobufConsumer[I <: com.google.protobuf.Message](connection: ActorRef, - handler: Procedure[I], - exchangeName: String, - routingKey: String, - queueName: String, - clazz: Class[I]): ActorRef = { - implicit val manifest = Manifest.classType[I](clazz) - newProtobufConsumer[I](connection, handler.apply _, Some(exchangeName), Some(routingKey), Some(queueName)) - } - - def newProtobufConsumer[I <: com.google.protobuf.Message](connection: ActorRef, - handler: I => Unit, - exchangeName: Option[String], - routingKey: Option[String] = None, - queueName: Option[String] = None)(implicit manifest: Manifest[I]): ActorRef = { - - if (exchangeName.isEmpty && routingKey.isEmpty) { - throw new IllegalArgumentException("Either exchange name or routing key is mandatory") - } - - val deliveryHandler = actorOf(new Actor { - def receive = { case Delivery(payload, _, _, _, _, _) => handler.apply(createProtobufFromBytes[I](payload)) } - }).start - - val exchangeParameters = exchangeName.flatMap(name => Some(ExchangeParameters(name))) - val rKey = routingKey.getOrElse("%s.request".format(exchangeName.get)) - val qName = queueName.getOrElse("%s.in".format(rKey)) - - newConsumer(connection, ConsumerParameters(rKey, deliveryHandler, Some(qName), exchangeParameters)) - } - - - /** - * Main supervisor - */ - class AMQPSupervisorActor extends Actor { - import self._ - - faultHandler = OneForOneStrategy(List(classOf[Throwable])) - - def receive = { - case _ => {} // ignore all messages - } - } - - private val supervisor = actorOf(new AMQPSupervisorActor).start - - def shutdownAll() = { - supervisor.shutdownLinkedActors - } - - /** - * Serialization stuff - */ - - trait FromBinary[T] { - def fromBinary(bytes: Array[Byte]): T - } - - trait ToBinary[T] { - def toBinary(t: T): Array[Byte] - } - - private val ARRAY_OF_BYTE_ARRAY = Array[Class[_]](classOf[Array[Byte]]) - - private[amqp] def createProtobufFromBytes[I <: com.google.protobuf.Message](bytes: Array[Byte])(implicit manifest: Manifest[I]): I = { - manifest.erasure.getDeclaredMethod("parseFrom", ARRAY_OF_BYTE_ARRAY: _*).invoke(null, bytes).asInstanceOf[I] - } -} diff --git a/akka-amqp/src/main/scala/akka/amqp/AMQPMessage.scala b/akka-amqp/src/main/scala/akka/amqp/AMQPMessage.scala deleted file mode 100644 index af9fb2cbb6..0000000000 --- a/akka-amqp/src/main/scala/akka/amqp/AMQPMessage.scala +++ /dev/null @@ -1,92 +0,0 @@ -/** - * Copyright (C) 2009-2010 Scalable Solutions AB - */ - -package akka.amqp - -import akka.actor.ActorRef -import com.rabbitmq.client.AMQP.BasicProperties -import com.rabbitmq.client.ShutdownSignalException - -sealed trait AMQPMessage -sealed trait InternalAMQPMessage extends AMQPMessage - -case class Message( - payload: Array[Byte], - routingKey: String, - mandatory: Boolean = false, - immediate: Boolean = false, - properties: Option[BasicProperties] = None) extends AMQPMessage { - - // Needed for Java API usage - def this(payload: Array[Byte], routingKey: String) = this(payload, routingKey, false, false, None) - - // Needed for Java API usage - def this(payload: Array[Byte], routingKey: String, mandatory: Boolean, immediate: Boolean) = - this(payload, routingKey, mandatory, immediate, None) - - // Needed for Java API usage - def this(payload: Array[Byte], routingKey: String, properties: BasicProperties) = - this(payload, routingKey, false, false, Some(properties)) - - // Needed for Java API usage - def this(payload: Array[Byte], routingKey: String, mandatory: Boolean, immediate: Boolean, properties: BasicProperties) = - this(payload, routingKey, mandatory, immediate, Some(properties)) -} - -case class Delivery( - payload: Array[Byte], - routingKey: String, - deliveryTag: Long, - isRedeliver: Boolean, - properties: BasicProperties, - sender: Option[ActorRef]) extends AMQPMessage - -// connection messages -case object Connect extends AMQPMessage - -case object Connected extends AMQPMessage { - def getInstance() = this // Needed for Java API usage -} -case object Reconnecting extends AMQPMessage { - def getInstance() = this // Needed for Java API usage -} -case object Disconnected extends AMQPMessage { - def getInstance() = this // Needed for Java API usage -} - -case object ChannelRequest extends InternalAMQPMessage - -// channel messages -case object Start extends AMQPMessage - -case object Started extends AMQPMessage { - def getInstance() = this // Needed for Java API usage -} -case object Restarting extends AMQPMessage { - def getInstance() = this // Needed for Java API usage -} -case object Stopped extends AMQPMessage { - def getInstance() = this // Needed for Java API usage -} - -// delivery messages -case class Acknowledge(deliveryTag: Long) extends AMQPMessage -case class Acknowledged(deliveryTag: Long) extends AMQPMessage -case class Reject(deliveryTag: Long) extends AMQPMessage -case class Rejected(deliveryTag: Long) extends AMQPMessage -class RejectionException(deliveryTag: Long) extends RuntimeException - -// internal messages -private[akka] case class Failure(cause: Throwable) extends InternalAMQPMessage -case class ConnectionShutdown(cause: ShutdownSignalException) extends InternalAMQPMessage -case class ChannelShutdown(cause: ShutdownSignalException) extends InternalAMQPMessage - -private[akka] class MessageNotDeliveredException( - val message: String, - val replyCode: Int, - val replyText: String, - val exchange: String, - val routingKey: String, - val properties: BasicProperties, - val body: Array[Byte]) extends RuntimeException(message) diff --git a/akka-amqp/src/main/scala/akka/amqp/ConsumerActor.scala b/akka-amqp/src/main/scala/akka/amqp/ConsumerActor.scala deleted file mode 100644 index d0324f05c6..0000000000 --- a/akka-amqp/src/main/scala/akka/amqp/ConsumerActor.scala +++ /dev/null @@ -1,143 +0,0 @@ -/** - * Copyright (C) 2009-2010 Scalable Solutions AB - */ - -package akka.amqp - -import collection.JavaConversions - -import akka.util.Logging - -import com.rabbitmq.client.AMQP.BasicProperties -import com.rabbitmq.client.{Channel, Envelope, DefaultConsumer} -import akka.amqp.AMQP._ - -private[amqp] class ConsumerActor(consumerParameters: ConsumerParameters) - extends FaultTolerantChannelActor( - consumerParameters.exchangeParameters, consumerParameters.channelParameters) { - import consumerParameters._ - - var listenerTag: Option[String] = None - - def specificMessageHandler = { - case Acknowledge(deliveryTag) => acknowledgeDeliveryTag(deliveryTag, true) - case Reject(deliveryTag) => rejectDeliveryTag(deliveryTag, true) - case message: Message => - handleIllegalMessage("%s can't be used to send messages, ignoring message [%s]".format(this, message)) - case unknown => - handleIllegalMessage("Unknown message [%s] to %s".format(unknown, this)) - } - - protected def setupChannel(ch: Channel) = { - - channelParameters.foreach(params => ch.basicQos(params.prefetchSize)) - - val exchangeName = exchangeParameters.flatMap(params => Some(params.exchangeName)) - val consumingQueue = exchangeName match { - case Some(exchange) => - val queueDeclare: com.rabbitmq.client.AMQP.Queue.DeclareOk = { - queueName match { - case Some(name) => - declareQueue(ch, name, queueDeclaration) - case None => - log.debug("Declaring new generated queue for %s", toString) - ch.queueDeclare - } - } - log.debug("Binding new queue [%s] with [%s] for %s", queueDeclare.getQueue, routingKey, toString) - ch.queueBind(queueDeclare.getQueue, exchange, routingKey) - queueDeclare.getQueue - case None => - // no exchange, use routing key as queuename - log.debug("No exchange specified, creating queue using routingkey as name (%s)", routingKey) - declareQueue(ch, routingKey, queueDeclaration) - routingKey - } - - - val tag = ch.basicConsume(consumingQueue, false, new DefaultConsumer(ch) with Logging { - override def handleDelivery(tag: String, envelope: Envelope, properties: BasicProperties, payload: Array[Byte]) { - try { - val deliveryTag = envelope.getDeliveryTag - log.debug("Passing a message on to %s", toString) - import envelope._ - deliveryHandler ! Delivery(payload, getRoutingKey, getDeliveryTag, isRedeliver, properties, someSelf) - - if (selfAcknowledging) { - log.debug("Self acking...") - acknowledgeDeliveryTag(deliveryTag, false) - } - } catch { - case cause => - log.error(cause, "Delivery of message to %s failed", toString) - self ! Failure(cause) // pass on and re-throw exception in consumer actor to trigger restart and connect - } - } - }) - listenerTag = Some(tag) - log.info("Intitialized %s", toString) - } - - private def declareQueue(ch: Channel, queueName: String, queueDeclaration: Declaration): com.rabbitmq.client.AMQP.Queue.DeclareOk = { - queueDeclaration match { - case PassiveDeclaration => - log.debug("Passively declaring new queue [%s] for %s", queueName, toString) - ch.queueDeclarePassive(queueName) - case ActiveDeclaration(durable, autoDelete, exclusive) => - log.debug("Actively declaring new queue [%s] for %s", queueName, toString) - val configurationArguments = exchangeParameters match { - case Some(params) => params.configurationArguments - case _ => Map.empty - } - ch.queueDeclare(queueName, durable, exclusive, autoDelete, JavaConversions.asJavaMap(configurationArguments.toMap)) - case NoActionDeclaration => new com.rabbitmq.client.impl.AMQImpl.Queue.DeclareOk(queueName, 0, 0) // do nothing here - } - } - - private def acknowledgeDeliveryTag(deliveryTag: Long, remoteAcknowledgement: Boolean) = { - log.debug("Acking message with delivery tag [%s]", deliveryTag) - channel.foreach { - ch => - ch.basicAck(deliveryTag, false) - if (remoteAcknowledgement) { - deliveryHandler ! Acknowledged(deliveryTag) - } - } - } - - private def rejectDeliveryTag(deliveryTag: Long, remoteAcknowledgement: Boolean) = { - log.debug("Rejecting message with delivery tag [%s]", deliveryTag) - // FIXME: when rabbitmq 1.9 arrives, basicReject should be available on the API and implemented instead of this - log.warning("Consumer is rejecting delivery with tag [%s] - " + - "for now this means we have to self terminate and kill the channel - see you in a second.") - channel.foreach { - ch => - if (remoteAcknowledgement) { - deliveryHandler ! Rejected(deliveryTag) - } - } - throw new RejectionException(deliveryTag) - } - - private def handleIllegalMessage(errorMessage: String) = { - log.error(errorMessage) - throw new IllegalArgumentException(errorMessage) - } - - override def preRestart(reason: Throwable) = { - listenerTag = None - super.preRestart(reason) - } - - override def postStop = { - listenerTag.foreach(tag => channel.foreach(_.basicCancel(tag))) - self.shutdownLinkedActors - super.postStop - } - - override def toString = - "AMQP.Consumer[id= " + self.id + - ", exchangeParameters=" + exchangeParameters + - ", queueDeclaration=" + queueDeclaration + "]" -} - diff --git a/akka-amqp/src/main/scala/akka/amqp/ExampleSession.scala b/akka-amqp/src/main/scala/akka/amqp/ExampleSession.scala deleted file mode 100644 index 0e14f1b3bb..0000000000 --- a/akka-amqp/src/main/scala/akka/amqp/ExampleSession.scala +++ /dev/null @@ -1,268 +0,0 @@ -/** - * Copyright (C) 2009-2010 Scalable Solutions AB - */ - -package akka.amqp - -import rpc.RPC -import rpc.RPC.{RpcClientSerializer, RpcServerSerializer} -import akka.actor.{Actor, ActorRegistry} -import Actor._ -import java.util.concurrent.{CountDownLatch, TimeUnit} -import java.lang.String -import akka.amqp.AMQP._ -import akka.remote.protocol.RemoteProtocol.AddressProtocol - -object ExampleSession { - - def main(args: Array[String]) = { - - printTopic("DIRECT") - direct - - printTopic("FANOUT") - fanout - - printTopic("TOPIC") - topic - - printTopic("CALLBACK") - callback - - printTopic("EASY STRING PRODUCER AND CONSUMER") - easyStringProducerConsumer - - printTopic("EASY PROTOBUF PRODUCER AND CONSUMER") - easyProtobufProducerConsumer - - printTopic("RPC") - rpc - - printTopic("EASY STRING RPC") - easyStringRpc - - printTopic("EASY PROTOBUF RPC") - easyProtobufRpc - - printTopic("Happy hAkking :-)") - - // postStop everything the amqp tree except the main AMQP supervisor - // all connections/consumers/producers will be stopped - AMQP.shutdownAll - - ActorRegistry.shutdownAll - System.exit(0) - } - - def printTopic(topic: String) { - - println("") - println("==== " + topic + " ===") - println("") - TimeUnit.SECONDS.sleep(2) - } - - def direct = { - - // defaults to amqp://guest:guest@localhost:5672/ - val connection = AMQP.newConnection() - - val exchangeParameters = ExchangeParameters("my_direct_exchange", Direct) - - val consumer = AMQP.newConsumer(connection, ConsumerParameters("some.routing", actorOf(new Actor { def receive = { - case Delivery(payload, _, _, _, _, _) => log.info("@george_bush received message from: %s", new String(payload)) - }}), None, Some(exchangeParameters))) - - val producer = AMQP.newProducer(connection, ProducerParameters(Some(exchangeParameters))) - producer ! Message("@jonas_boner: You sucked!!".getBytes, "some.routing") - } - - def fanout = { - - // defaults to amqp://guest:guest@localhost:5672/ - val connection = AMQP.newConnection() - - val exchangeParameters = ExchangeParameters("my_fanout_exchange", Fanout) - - val bushConsumer = AMQP.newConsumer(connection, ConsumerParameters("@george_bush", actorOf(new Actor { def receive = { - case Delivery(payload, _, _, _, _, _) => log.info("@george_bush received message from: %s", new String(payload)) - }}), None, Some(exchangeParameters))) - - val obamaConsumer = AMQP.newConsumer(connection, ConsumerParameters("@barack_obama", actorOf(new Actor { def receive = { - case Delivery(payload, _, _, _, _, _) => log.info("@barack_obama received message from: %s", new String(payload)) - }}), None, Some(exchangeParameters))) - - val producer = AMQP.newProducer(connection, ProducerParameters(Some(exchangeParameters))) - producer ! Message("@jonas_boner: I'm going surfing".getBytes, "") - } - - def topic = { - - // defaults to amqp://guest:guest@localhost:5672/ - val connection = AMQP.newConnection() - - val exchangeParameters = ExchangeParameters("my_topic_exchange", Topic) - - val bushConsumer = AMQP.newConsumer(connection, ConsumerParameters("@george_bush", actorOf(new Actor { def receive = { - case Delivery(payload, _, _, _, _, _) => log.info("@george_bush received message from: %s", new String(payload)) - }}), None, Some(exchangeParameters))) - - val obamaConsumer = AMQP.newConsumer(connection, ConsumerParameters("@barack_obama", actorOf(new Actor { def receive = { - case Delivery(payload, _, _, _, _, _) => log.info("@barack_obama received message from: %s", new String(payload)) - }}), None, Some(exchangeParameters))) - - val producer = AMQP.newProducer(connection, ProducerParameters(Some(exchangeParameters))) - producer ! Message("@jonas_boner: You still suck!!".getBytes, "@george_bush") - producer ! Message("@jonas_boner: Yes I can!".getBytes, "@barack_obama") - } - - def callback = { - - val channelCountdown = new CountDownLatch(2) - - val connectionCallback = actorOf(new Actor { def receive = { - case Connected => log.info("Connection callback: Connected!") - case Reconnecting => () // not used, sent when connection fails and initiates a reconnect - case Disconnected => log.info("Connection callback: Disconnected!") - }}) - val connection = AMQP.newConnection(new ConnectionParameters(connectionCallback = Some(connectionCallback))) - - val channelCallback = actorOf(new Actor { def receive = { - case Started => { - log.info("Channel callback: Started") - channelCountdown.countDown - } - case Restarting => // not used, sent when channel or connection fails and initiates a restart - case Stopped => log.info("Channel callback: Stopped") - }}) - val exchangeParameters = ExchangeParameters("my_callback_exchange", Direct) - val channelParameters = ChannelParameters(channelCallback = Some(channelCallback)) - - val consumer = AMQP.newConsumer(connection, ConsumerParameters("callback.routing", actorOf(new Actor { def receive = { - case _ => () // not used - }}), None, Some(exchangeParameters), channelParameters = Some(channelParameters))) - - val producer = AMQP.newProducer(connection, ProducerParameters(Some(exchangeParameters))) - - // Wait until both channels (producer & consumer) are started before stopping the connection - channelCountdown.await(2, TimeUnit.SECONDS) - connection.stop - } - - def easyStringProducerConsumer = { - val connection = AMQP.newConnection() - - val exchangeName = "easy.string" - - // listen by default to: - // exchange = optional exchangeName - // routingKey = provided routingKey or .request - // queueName = .in - AMQP.newStringConsumer(connection, message => println("Received message: "+message), Some(exchangeName)) - - // send by default to: - // exchange = exchangeName - // routingKey = .request - val producer = AMQP.newStringProducer(connection, Some(exchangeName)) - - producer.send("This shit is easy!") - } - - def easyProtobufProducerConsumer = { - val connection = AMQP.newConnection() - - val exchangeName = "easy.protobuf" - - def protobufMessageHandler(message: AddressProtocol) = { - log.info("Received "+message) - } - - AMQP.newProtobufConsumer(connection, protobufMessageHandler _, Some(exchangeName)) - - val producerClient = AMQP.newProtobufProducer[AddressProtocol](connection, Some(exchangeName)) - producerClient.send(AddressProtocol.newBuilder.setHostname("akkarocks.com").setPort(1234).build) - } - - def rpc = { - - val connection = AMQP.newConnection() - - val exchangeName = "my_rpc_exchange" - - /** Server */ - val serverFromBinary = new FromBinary[String] { - def fromBinary(bytes: Array[Byte]) = new String(bytes) - } - val serverToBinary = new ToBinary[Int] { - def toBinary(t: Int) = Array(t.toByte) - } - val rpcServerSerializer = new RpcServerSerializer[String, Int](serverFromBinary, serverToBinary) - - def requestHandler(request: String) = 3 - - val rpcServer = RPC.newRpcServer(connection, exchangeName, rpcServerSerializer, requestHandler _, - routingKey = Some("rpc.in.key"), queueName = Some("rpc.in.key.queue")) - - - /** Client */ - val clientToBinary = new ToBinary[String] { - def toBinary(t: String) = t.getBytes - } - val clientFromBinary = new FromBinary[Int] { - def fromBinary(bytes: Array[Byte]) = bytes.head.toInt - } - val rpcClientSerializer = new RpcClientSerializer[String, Int](clientToBinary, clientFromBinary) - - val rpcClient = RPC.newRpcClient(connection, exchangeName, rpcClientSerializer, Some("rpc.in.key")) - - val response = rpcClient.call("rpc_request") - log.info("Response: " + response) - } - - def easyStringRpc = { - - val connection = AMQP.newConnection() - - val exchangeName = "easy.stringrpc" - - // listen by default to: - // exchange = exchangeName - // routingKey = .request - // queueName = .in - RPC.newStringRpcServer(connection, exchangeName, request => { - log.info("Got request: "+request) - "Response to: '"+request+"'" - }) - - // send by default to: - // exchange = exchangeName - // routingKey = .request - val stringRpcClient = RPC.newStringRpcClient(connection, exchangeName) - - val response = stringRpcClient.call("AMQP Rocks!") - log.info("Got response: "+response) - - stringRpcClient.callAsync("AMQP is dead easy") { - case response => log.info("This is handled async: "+response) - } - } - - def easyProtobufRpc = { - - val connection = AMQP.newConnection() - - val exchangeName = "easy.protobuf.rpc" - - def protobufRequestHandler(request: AddressProtocol): AddressProtocol = { - AddressProtocol.newBuilder.setHostname(request.getHostname.reverse).setPort(request.getPort).build - } - - RPC.newProtobufRpcServer(connection, exchangeName, protobufRequestHandler) - - val protobufRpcClient = RPC.newProtobufRpcClient[AddressProtocol, AddressProtocol](connection, exchangeName) - - val response = protobufRpcClient.call(AddressProtocol.newBuilder.setHostname("localhost").setPort(4321).build) - - log.info("Got response: "+response) - } -} diff --git a/akka-amqp/src/main/scala/akka/amqp/ExchangeType.scala b/akka-amqp/src/main/scala/akka/amqp/ExchangeType.scala deleted file mode 100644 index b29e1e7170..0000000000 --- a/akka-amqp/src/main/scala/akka/amqp/ExchangeType.scala +++ /dev/null @@ -1,23 +0,0 @@ -/** - * Copyright (C) 2009-2010 Scalable Solutions AB - */ - -package akka.amqp - -sealed trait ExchangeType -case object Direct extends ExchangeType { - def getInstance() = this // Needed for Java API usage - override def toString = "direct" -} -case object Topic extends ExchangeType { - def getInstance() = this // Needed for Java API usage - override def toString = "topic" -} -case object Fanout extends ExchangeType { - def getInstance() = this // Needed for Java API usage - override def toString = "fanout" -} -case object Match extends ExchangeType { - def getInstance() = this // Needed for Java API usage - override def toString = "match" -} diff --git a/akka-amqp/src/main/scala/akka/amqp/FaultTolerantChannelActor.scala b/akka-amqp/src/main/scala/akka/amqp/FaultTolerantChannelActor.scala deleted file mode 100644 index 057ceab257..0000000000 --- a/akka-amqp/src/main/scala/akka/amqp/FaultTolerantChannelActor.scala +++ /dev/null @@ -1,108 +0,0 @@ -/** - * Copyright (C) 2009-2010 Scalable Solutions AB - */ - -package akka.amqp - -import collection.JavaConversions -import java.lang.Throwable -import akka.actor.Actor -import Actor._ -import com.rabbitmq.client.{ShutdownSignalException, Channel, ShutdownListener} -import scala.PartialFunction -import akka.amqp.AMQP._ - -abstract private[amqp] class FaultTolerantChannelActor( - exchangeParameters: Option[ExchangeParameters], channelParameters: Option[ChannelParameters]) extends Actor { - protected[amqp] var channel: Option[Channel] = None - log.info("%s is started", toString) - - override def receive = channelMessageHandler orElse specificMessageHandler - - // to be defined in subclassing actor - def specificMessageHandler: PartialFunction[Any, Unit] - - private def channelMessageHandler: PartialFunction[Any, Unit] = { - case Start => - // ask the connection for a new channel - self.supervisor.foreach { - sup => - log.info("%s is requesting new channel from supervising connection", toString) - val newChannel: Option[Option[Channel]] = (sup !! ChannelRequest).as[Option[Channel]] - newChannel.foreach(ch => ch.foreach(c => setupChannelInternal(c))) - } - case ch: Channel => { - setupChannelInternal(ch) - } - case ChannelShutdown(cause) => { - closeChannel - if (cause.isHardError) { - // connection error - if (cause.isInitiatedByApplication) { - log.info("%s got normal shutdown", toString) - } else { - log.error(cause, "%s got hard error", toString) - } - } else { - // channel error - log.error(cause, "%s self restarting because of channel shutdown", toString) - notifyCallback(Restarting) - self ! Start - } - } - case Failure(cause) => - log.error(cause, "%s self restarting because of channel failure", toString) - closeChannel - notifyCallback(Restarting) - self ! Start - } - - // to be defined in subclassing actor - protected def setupChannel(ch: Channel) - - private def setupChannelInternal(ch: Channel) = if (channel.isEmpty) { - - exchangeParameters.foreach { - params => - import params._ - exchangeDeclaration match { - case PassiveDeclaration => ch.exchangeDeclarePassive(exchangeName) - case ActiveDeclaration(durable, autoDelete, _) => - ch.exchangeDeclare(exchangeName, exchangeType.toString, durable, autoDelete, JavaConversions.asJavaMap(configurationArguments)) - case NoActionDeclaration => // ignore - } - } - ch.addShutdownListener(new ShutdownListener { - def shutdownCompleted(cause: ShutdownSignalException) = { - self ! ChannelShutdown(cause) - } - }) - channelParameters.foreach(_.shutdownListener.foreach(sdl => ch.getConnection.addShutdownListener(sdl))) - - setupChannel(ch) - channel = Some(ch) - notifyCallback(Started) - log.info("Channel setup for %s", toString) - } - - private def closeChannel = { - channel.foreach { - ch => - if (ch.isOpen) ch.close - notifyCallback(Stopped) - log.info("%s channel closed", toString) - } - channel = None - } - - private def notifyCallback(message: AMQPMessage) = { - channelParameters.foreach(_.channelCallback.foreach(cb => if (cb.isRunning) cb ! message)) - } - - override def preRestart(reason: Throwable) = { - notifyCallback(Restarting) - closeChannel - } - - override def postStop = closeChannel -} diff --git a/akka-amqp/src/main/scala/akka/amqp/FaultTolerantConnectionActor.scala b/akka-amqp/src/main/scala/akka/amqp/FaultTolerantConnectionActor.scala deleted file mode 100644 index b202e84b18..0000000000 --- a/akka-amqp/src/main/scala/akka/amqp/FaultTolerantConnectionActor.scala +++ /dev/null @@ -1,118 +0,0 @@ -/** - * Copyright (C) 2009-2010 Scalable Solutions AB - */ - -package akka.amqp - -import java.util.{TimerTask, Timer} -import java.io.IOException -import com.rabbitmq.client._ -import akka.amqp.AMQP.ConnectionParameters -import akka.config.Supervision.{ Permanent, OneForOneStrategy } -import akka.actor.{Exit, Actor} - -private[amqp] class FaultTolerantConnectionActor(connectionParameters: ConnectionParameters) extends Actor { - import connectionParameters._ - - self.id = "amqp-connection-%s".format(host) - self.lifeCycle = Permanent - self.faultHandler = OneForOneStrategy(List(classOf[Throwable])) - - val reconnectionTimer = new Timer("%s-timer".format(self.id)) - - val connectionFactory: ConnectionFactory = new ConnectionFactory() - connectionFactory.setHost(host) - connectionFactory.setPort(port) - connectionFactory.setUsername(username) - connectionFactory.setPassword(password) - connectionFactory.setVirtualHost(virtualHost) - - var connection: Option[Connection] = None - - protected def receive = { - case Connect => connect - case ChannelRequest => { - connection match { - case Some(conn) => { - val chanel: Channel = conn.createChannel - self.reply(Some(chanel)) - } - case None => { - log.warning("Unable to create new channel - no connection") - self.reply(None) - } - } - } - case ConnectionShutdown(cause) => { - if (cause.isHardError) { - // connection error - if (cause.isInitiatedByApplication) { - log.info("ConnectionShutdown by application [%s]", self.id) - } else { - log.error(cause, "ConnectionShutdown is hard error - self terminating") - self ! new Exit(self, cause) - } - } - } - } - - private def connect = if (connection.isEmpty || !connection.get.isOpen) { - try { - connection = Some(connectionFactory.newConnection) - connection.foreach { - conn => - conn.addShutdownListener(new ShutdownListener { - def shutdownCompleted(cause: ShutdownSignalException) = { - self ! ConnectionShutdown(cause) - } - }) - log.info("Successfully (re)connected to AMQP Server %s:%s [%s]", host, port, self.id) - log.debug("Sending new channel to %d already linked actors", self.linkedActors.size) - import scala.collection.JavaConversions._ - self.linkedActors.values.iterator.foreach(_ ! conn.createChannel) - notifyCallback(Connected) - } - } catch { - case e: Exception => - connection = None - log.info("Trying to connect to AMQP server in %d milliseconds [%s]" - , connectionParameters.initReconnectDelay, self.id) - reconnectionTimer.schedule(new TimerTask() { - override def run = { - notifyCallback(Reconnecting) - self ! Connect - } - }, connectionParameters.initReconnectDelay) - } - } - - private def disconnect = { - try { - connection.foreach(_.close) - log.debug("Disconnected AMQP connection at %s:%s [%s]", host, port, self.id) - notifyCallback(Disconnected) - } catch { - case e: IOException => log.error("Could not close AMQP connection %s:%s [%s]", host, port, self.id) - case _ => () - } - connection = None - } - - private def notifyCallback(message: AMQPMessage) = { - connectionCallback.foreach(cb => if (cb.isRunning) cb ! message) - } - - override def postStop = { - reconnectionTimer.cancel - // make sure postStop is called on all linked actors so they can do channel cleanup before connection is killed - self.shutdownLinkedActors - disconnect - } - - override def preRestart(reason: Throwable) = disconnect - - override def postRestart(reason: Throwable) = { - notifyCallback(Reconnecting) - connect - } -} diff --git a/akka-amqp/src/main/scala/akka/amqp/ProducerActor.scala b/akka-amqp/src/main/scala/akka/amqp/ProducerActor.scala deleted file mode 100644 index 8aabd26649..0000000000 --- a/akka-amqp/src/main/scala/akka/amqp/ProducerActor.scala +++ /dev/null @@ -1,60 +0,0 @@ -/** - * Copyright (C) 2009-2010 Scalable Solutions AB - */ - -package akka.amqp - -import com.rabbitmq.client._ - -import akka.amqp.AMQP.ProducerParameters - -private[amqp] class ProducerActor(producerParameters: ProducerParameters) - extends FaultTolerantChannelActor( - producerParameters.exchangeParameters, producerParameters.channelParameters) { - - import producerParameters._ - - val exchangeName = exchangeParameters.flatMap(params => Some(params.exchangeName)) - - producerId.foreach(id => self.id = id) - - def specificMessageHandler = { - - case message@Message(payload, routingKey, mandatory, immediate, properties) if channel.isDefined => { - log.debug("Sending message [%s]", message) - channel.foreach(_.basicPublish(exchangeName.getOrElse(""), routingKey, mandatory, immediate, properties.getOrElse(null), payload)) - } - case message@Message(payload, routingKey, mandatory, immediate, properties) => { - log.warning("Unable to send message [%s]", message) - // FIXME: If channel is not available, messages should be queued back into the actor mailbox and actor should only react on 'Start' - } - } - - protected def setupChannel(ch: Channel) { - returnListener match { - case Some(listener) => ch.setReturnListener(listener) - case None => ch.setReturnListener(new ReturnListener() { - def handleBasicReturn( - replyCode: Int, - replyText: String, - exchange: String, - routingKey: String, - properties: com.rabbitmq.client.AMQP.BasicProperties, - body: Array[Byte]) = { - throw new MessageNotDeliveredException( - "Could not deliver message [" + body + - "] with reply code [" + replyCode + - "] with reply text [" + replyText + - "] and routing key [" + routingKey + - "] to exchange [" + exchange + "]", - replyCode, replyText, exchange, routingKey, properties, body) - } - }) - } - } - - override def toString = - "AMQP.Poducer[id= "+ self.id + - ", exchangeParameters=" + exchangeParameters + "]" -} - diff --git a/akka-amqp/src/main/scala/akka/amqp/rpc/RPC.scala b/akka-amqp/src/main/scala/akka/amqp/rpc/RPC.scala deleted file mode 100644 index 8ce746735a..0000000000 --- a/akka-amqp/src/main/scala/akka/amqp/rpc/RPC.scala +++ /dev/null @@ -1,324 +0,0 @@ -package akka.amqp.rpc - -import akka.amqp.AMQP._ -import com.google.protobuf.Message -import akka.actor.{Actor, ActorRef} -import Actor._ -import akka.amqp._ -import reflect.Manifest -import akka.japi - -object RPC { - - // Needed for Java API usage - def newRpcClient[O, I](connection: ActorRef, - exchangeName: String, - routingKey: String, - serializer: RpcClientSerializer[O, I]): RpcClient[O,I] = { - newRpcClient(connection, exchangeName, serializer, Some(routingKey), None) - } - - // Needed for Java API usage - def newRpcClient[O, I](connection: ActorRef, - exchangeName: String, - routingKey: String, - serializer: RpcClientSerializer[O, I], - channelParameters: ChannelParameters): RpcClient[O,I] = { - newRpcClient(connection, exchangeName, serializer, Some(routingKey), Some(channelParameters)) - } - - def newRpcClient[O, I](connection: ActorRef, - exchangeName: String, - serializer: RpcClientSerializer[O, I], - routingKey: Option[String] = None, - channelParameters: Option[ChannelParameters] = None): RpcClient[O,I] = { - - val rKey = routingKey.getOrElse("%s.request".format(exchangeName)) - - val rpcActor: ActorRef = actorOf(new RpcClientActor[O, I]( - ExchangeParameters(exchangeName, exchangeDeclaration = PassiveDeclaration), rKey, serializer, channelParameters)) - connection.startLink(rpcActor) - rpcActor ! Start - rpcActor - new RpcClient(rpcActor) - } - - // Needed for Java API usage - def newRpcServer[I, O](connection: ActorRef, - exchangeName: String, - serializer: RpcServerSerializer[I, O], - requestHandler: japi.Function[I,O], - routingKey: String): RpcServerHandle = { - newRpcServer(connection, exchangeName, serializer, requestHandler.apply _, Some(routingKey)) - } - - // Needed for Java API usage - def newRpcServer[I, O](connection: ActorRef, - exchangeName: String, - serializer: RpcServerSerializer[I, O], - requestHandler: Function[I,O], - routingKey: String, - queueName: String): RpcServerHandle = { - newRpcServer(connection, exchangeName, serializer, requestHandler.apply _, Some(routingKey), Some(queueName)) - } - - // Needed for Java API usage - def newRpcServer[I, O](connection: ActorRef, - exchangeName: String, - serializer: RpcServerSerializer[I, O], - requestHandler: japi.Function[I,O], - routingKey: String, - channelParameters: ChannelParameters): RpcServerHandle = { - newRpcServer(connection, exchangeName, serializer, requestHandler.apply _, Some(routingKey), None, Some(channelParameters)) - } - - // Needed for Java API usage - def newRpcServer[I, O](connection: ActorRef, - exchangeName: String, - serializer: RpcServerSerializer[I, O], - requestHandler: japi.Function[I,O], - routingKey: String, - queueName: String, - channelParameters: ChannelParameters): RpcServerHandle = { - newRpcServer(connection, exchangeName, serializer, requestHandler.apply _, Some(routingKey), Some(queueName), Some(channelParameters)) - } - - def newRpcServer[I, O](connection: ActorRef, - exchangeName: String, - serializer: RpcServerSerializer[I, O], - requestHandler: I => O, - routingKey: Option[String] = None, - queueName: Option[String] = None, - channelParameters: Option[ChannelParameters] = None, - poolSize: Int = 1): RpcServerHandle = { - - val rKey = routingKey.getOrElse("%s.request".format(exchangeName)) - val qName = queueName.getOrElse("%s.in".format(rKey)) - - val producer = newProducer(connection, ProducerParameters(channelParameters = channelParameters)) - - val consumers = (1 to poolSize).map { - num => - val rpcServer = actorOf(new RpcServerActor[I, O](producer, serializer, requestHandler)) - newConsumer(connection, ConsumerParameters(rKey, rpcServer, - exchangeParameters = Some(ExchangeParameters(exchangeName)), channelParameters = channelParameters, - selfAcknowledging = false, queueName = Some(qName))) - } - RpcServerHandle(producer, consumers) - } - - case class RpcServerHandle(producer: ActorRef, consumers: Seq[ActorRef]) { - def stop = { - consumers.foreach(_.stop) - producer.stop - } - } - - case class RpcClientSerializer[O, I](toBinary: ToBinary[O], fromBinary: FromBinary[I]) - - case class RpcServerSerializer[I, O](fromBinary: FromBinary[I], toBinary: ToBinary[O]) - - - /** - * RPC convenience - */ - class RpcClient[O, I](client: ActorRef){ - - // Needed for Java API usage - def call(request: O): Option[I] = { - call(request, 5000) - } - - def call(request: O, timeout: Long = 5000): Option[I] = { - (client.!!(request, timeout)).as[I] - } - - // Needed for Java API usage - def callAsync(request: O, responseHandler: japi.Procedure[I]): Unit = { - callAsync(request, 5000, responseHandler) - } - - // Needed for Java API usage - def callAsync(request: O, timeout: Long, responseHandler: japi.Procedure[I]): Unit = { - callAsync(request, timeout){ - case Some(response) => responseHandler.apply(response) - } - } - - def callAsync(request: O, timeout: Long = 5000)(responseHandler: PartialFunction[Option[I],Unit]) = { - spawn { - val result = call(request, timeout) - responseHandler.apply(result) - } - } - def stop = client.stop - } - - - // Needed for Java API usage - def newProtobufRpcServer[I <: Message, O <: Message]( - connection: ActorRef, - exchangeName: String, - requestHandler: japi.Function[I,O], - resultClazz: Class[I]): RpcServerHandle = { - - implicit val manifest = Manifest.classType[I](resultClazz) - newProtobufRpcServer(connection, exchangeName, requestHandler.apply _) - } - - // Needed for Java API usage - def newProtobufRpcServer[I <: Message, O <: Message]( - connection: ActorRef, - exchangeName: String, - requestHandler: japi.Function[I,O], - routingKey: String, - resultClazz: Class[I]): RpcServerHandle = { - - implicit val manifest = Manifest.classType[I](resultClazz) - newProtobufRpcServer(connection, exchangeName, requestHandler.apply _, Some(routingKey)) - } - - // Needed for Java API usage - def newProtobufRpcServer[I <: Message, O <: Message]( - connection: ActorRef, - exchangeName: String, - requestHandler: japi.Function[I,O], - routingKey: String, - queueName: String, - resultClazz: Class[I]): RpcServerHandle = { - - implicit val manifest = Manifest.classType[I](resultClazz) - newProtobufRpcServer(connection, exchangeName, requestHandler.apply _, Some(routingKey), Some(queueName)) - } - - def newProtobufRpcServer[I <: Message, O <: Message]( - connection: ActorRef, - exchangeName: String, - requestHandler: I => O, - routingKey: Option[String] = None, - queueName: Option[String] = None)(implicit manifest: Manifest[I]): RpcServerHandle = { - - val serializer = new RpcServerSerializer[I, O]( - new FromBinary[I] { - def fromBinary(bytes: Array[Byte]): I = { - createProtobufFromBytes[I](bytes) - } - }, new ToBinary[O] { - def toBinary(t: O) = t.toByteArray - }) - - newRpcServer(connection, exchangeName, serializer, requestHandler, routingKey, queueName) - } - - // Needed for Java API usage - def newProtobufRpcClient[O <: Message, I <: Message]( - connection: ActorRef, - exchangeName: String, - resultClazz: Class[I]): RpcClient[O, I] = { - - implicit val manifest = Manifest.classType[I](resultClazz) - newProtobufRpcClient(connection, exchangeName, None) - } - - // Needed for Java API usage - def newProtobufRpcClient[O <: Message, I <: Message]( - connection: ActorRef, - exchangeName: String, - routingKey: String, - resultClazz: Class[I]): RpcClient[O, I] = { - - implicit val manifest = Manifest.classType[I](resultClazz) - newProtobufRpcClient(connection, exchangeName, Some(routingKey)) - } - - def newProtobufRpcClient[O <: Message, I <: Message]( - connection: ActorRef, - exchangeName: String, - routingKey: Option[String] = None)(implicit manifest: Manifest[I]): RpcClient[O, I] = { - - - val serializer = new RpcClientSerializer[O, I]( - new ToBinary[O] { - def toBinary(t: O) = t.toByteArray - }, new FromBinary[I] { - def fromBinary(bytes: Array[Byte]): I = { - createProtobufFromBytes[I](bytes) - } - }) - - newRpcClient(connection, exchangeName, serializer, routingKey) - } - - // Needed for Java API usage - def newStringRpcServer(connection: ActorRef, - exchangeName: String, - requestHandler: japi.Function[String,String]): RpcServerHandle = { - newStringRpcServer(connection, exchangeName, requestHandler.apply _) - } - - // Needed for Java API usage - def newStringRpcServer(connection: ActorRef, - exchangeName: String, - requestHandler: japi.Function[String,String], - routingKey: String): RpcServerHandle = { - newStringRpcServer(connection, exchangeName, requestHandler.apply _, Some(routingKey)) - } - - // Needed for Java API usage - def newStringRpcServer(connection: ActorRef, - exchangeName: String, - requestHandler: japi.Function[String,String], - routingKey: String, - queueName: String): RpcServerHandle = { - newStringRpcServer(connection, exchangeName, requestHandler.apply _, Some(routingKey), Some(queueName)) - } - - def newStringRpcServer(connection: ActorRef, - exchangeName: String, - requestHandler: String => String, - routingKey: Option[String] = None, - queueName: Option[String] = None): RpcServerHandle = { - - val serializer = new RpcServerSerializer[String, String]( - new FromBinary[String] { - def fromBinary(bytes: Array[Byte]): String = { - new String(bytes) - } - }, new ToBinary[String] { - def toBinary(t: String) = t.getBytes - }) - - newRpcServer(connection, exchangeName, serializer, requestHandler, routingKey, queueName) - } - - // Needed for Java API usage - def newStringRpcClient(connection: ActorRef, - exchange: String): RpcClient[String, String] = { - newStringRpcClient(connection, exchange, None) - } - - // Needed for Java API usage - def newStringRpcClient(connection: ActorRef, - exchange: String, - routingKey: String): RpcClient[String, String] = { - newStringRpcClient(connection, exchange, Some(routingKey)) - } - - def newStringRpcClient(connection: ActorRef, - exchange: String, - routingKey: Option[String] = None): RpcClient[String, String] = { - - - val serializer = new RpcClientSerializer[String, String]( - new ToBinary[String] { - def toBinary(t: String) = t.getBytes - }, new FromBinary[String] { - def fromBinary(bytes: Array[Byte]): String = { - new String(bytes) - } - }) - - newRpcClient(connection, exchange, serializer, routingKey) - } -} - diff --git a/akka-amqp/src/main/scala/akka/amqp/rpc/RpcClientActor.scala b/akka-amqp/src/main/scala/akka/amqp/rpc/RpcClientActor.scala deleted file mode 100644 index cae8587fb1..0000000000 --- a/akka-amqp/src/main/scala/akka/amqp/rpc/RpcClientActor.scala +++ /dev/null @@ -1,49 +0,0 @@ -/** - * Copyright (C) 2009-2010 Scalable Solutions AB - */ - -package akka.amqp - -import com.rabbitmq.client.{Channel, RpcClient} -import rpc.RPC.RpcClientSerializer -import akka.amqp.AMQP.{ChannelParameters, ExchangeParameters} - -class RpcClientActor[I,O]( - exchangeParameters: ExchangeParameters, - routingKey: String, - serializer: RpcClientSerializer[I,O], - channelParameters: Option[ChannelParameters] = None) - extends FaultTolerantChannelActor(Some(exchangeParameters), channelParameters) { - - import exchangeParameters._ - - var rpcClient: Option[RpcClient] = None - - log.info("%s started", this) - - def specificMessageHandler = { - case payload: I => { - rpcClient match { - case Some(client) => - val response: Array[Byte] = client.primitiveCall(serializer.toBinary.toBinary(payload)) - self.reply(serializer.fromBinary.fromBinary(response)) - case None => error("%s has no client to send messages with".format(this)) - } - } - } - - protected def setupChannel(ch: Channel) = rpcClient = Some(new RpcClient(ch, exchangeName, routingKey)) - - override def preRestart(reason: Throwable) = { - rpcClient = None - super.preRestart(reason) - } - - - override def postStop = { - rpcClient.foreach(rpc => rpc.close) - super.postStop - } - - override def toString = "AMQP.RpcClient[exchange=" +exchangeName + ", routingKey=" + routingKey+ "]" -} diff --git a/akka-amqp/src/main/scala/akka/amqp/rpc/RpcServerActor.scala b/akka-amqp/src/main/scala/akka/amqp/rpc/RpcServerActor.scala deleted file mode 100644 index 2459cde290..0000000000 --- a/akka-amqp/src/main/scala/akka/amqp/rpc/RpcServerActor.scala +++ /dev/null @@ -1,36 +0,0 @@ -/** - * Copyright (C) 2009-2010 Scalable Solutions AB - */ - -package akka.amqp - -import rpc.RPC.RpcServerSerializer -import akka.actor.{ActorRef, Actor} -import com.rabbitmq.client.AMQP.BasicProperties - -class RpcServerActor[I,O]( - producer: ActorRef, - serializer: RpcServerSerializer[I,O], - requestHandler: I => O) extends Actor { - - log.info("%s started", this) - - protected def receive = { - case Delivery(payload, _, tag, _, props, sender) => { - - log.debug("%s handling delivery with tag %d", this, tag) - val request = serializer.fromBinary.fromBinary(payload) - val response: Array[Byte] = serializer.toBinary.toBinary(requestHandler(request)) - - log.debug("%s sending reply to %s", this, props.getReplyTo) - val replyProps = new BasicProperties - replyProps.setCorrelationId(props.getCorrelationId) - producer ! new Message(response, props.getReplyTo, properties = Some(replyProps)) - - sender.foreach(_ ! Acknowledge(tag)) - } - case Acknowledged(tag) => log.debug("%s acknowledged delivery with tag %d", this, tag) - } - - override def toString = "AMQP.RpcServer[]" -} diff --git a/akka-amqp/src/test/scala/akka/amqp/test/AMQPConnectionRecoveryTestIntegration.scala b/akka-amqp/src/test/scala/akka/amqp/test/AMQPConnectionRecoveryTestIntegration.scala deleted file mode 100644 index 64f6c99040..0000000000 --- a/akka-amqp/src/test/scala/akka/amqp/test/AMQPConnectionRecoveryTestIntegration.scala +++ /dev/null @@ -1,54 +0,0 @@ -package akka.amqp.test - -/** - * Copyright (C) 2009-2010 Scalable Solutions AB - */ - -import java.util.concurrent.TimeUnit -import akka.actor.{Actor, ActorRef} -import org.multiverse.api.latches.StandardLatch -import com.rabbitmq.client.ShutdownSignalException -import akka.amqp._ -import akka.amqp.AMQP.ConnectionParameters -import org.scalatest.matchers.MustMatchers -import org.scalatest.junit.JUnitSuite -import org.junit.Test - -class AMQPConnectionRecoveryTestIntegration extends JUnitSuite with MustMatchers { - - @Test - def connectionAndRecovery = AMQPTest.withCleanEndState { - - val connectedLatch = new StandardLatch - val reconnectingLatch = new StandardLatch - val reconnectedLatch = new StandardLatch - val disconnectedLatch = new StandardLatch - - val connectionCallback: ActorRef = Actor.actorOf( new Actor { - def receive = { - case Connected => - if (!connectedLatch.isOpen) { - connectedLatch.open - } else { - reconnectedLatch.open - } - case Reconnecting => reconnectingLatch.open - case Disconnected => disconnectedLatch.open - } - }).start - - val connection = AMQP.newConnection(ConnectionParameters(initReconnectDelay = 50, connectionCallback = Some(connectionCallback))) - try { - connectedLatch.tryAwait(2, TimeUnit.SECONDS) must be(true) - - connection ! new ConnectionShutdown(new ShutdownSignalException(true, false, "TestException", "TestRef")) - reconnectingLatch.tryAwait(2, TimeUnit.SECONDS) must be(true) - reconnectedLatch.tryAwait(2, TimeUnit.SECONDS) must be(true) - - } finally { - AMQP.shutdownAll - disconnectedLatch.tryAwait(2, TimeUnit.SECONDS) must be(true) - } - } - -} diff --git a/akka-amqp/src/test/scala/akka/amqp/test/AMQPConsumerChannelRecoveryTestIntegration.scala b/akka-amqp/src/test/scala/akka/amqp/test/AMQPConsumerChannelRecoveryTestIntegration.scala deleted file mode 100644 index eace56114f..0000000000 --- a/akka-amqp/src/test/scala/akka/amqp/test/AMQPConsumerChannelRecoveryTestIntegration.scala +++ /dev/null @@ -1,65 +0,0 @@ -package akka.amqp.test - -/** - * Copyright (C) 2009-2010 Scalable Solutions AB - */ - -import org.multiverse.api.latches.StandardLatch -import com.rabbitmq.client.ShutdownSignalException -import akka.amqp._ -import org.scalatest.matchers.MustMatchers -import java.util.concurrent.TimeUnit -import org.junit.Test -import akka.amqp.AMQP._ -import org.scalatest.junit.JUnitSuite -import akka.actor.Actor._ -import akka.actor.{Actor, ActorRef} - -class AMQPConsumerChannelRecoveryTestIntegration extends JUnitSuite with MustMatchers { - - @Test - def consumerChannelRecovery = AMQPTest.withCleanEndState { - - val connection = AMQP.newConnection(ConnectionParameters(initReconnectDelay = 50)) - try { - val producer = AMQP.newProducer(connection, ProducerParameters( - Some(ExchangeParameters("text_exchange")))) - - val consumerStartedLatch = new StandardLatch - val consumerRestartedLatch = new StandardLatch - val consumerChannelCallback: ActorRef = actorOf( new Actor { - def receive = { - case Started => { - if (!consumerStartedLatch.isOpen) { - consumerStartedLatch.open - } else { - consumerRestartedLatch.open - } - } - case Restarting => () - case Stopped => () - } - }).start - - val payloadLatch = new StandardLatch - val consumerExchangeParameters = ExchangeParameters("text_exchange") - val consumerChannelParameters = ChannelParameters(channelCallback = Some(consumerChannelCallback)) - val consumer = AMQP.newConsumer(connection, ConsumerParameters("non.interesting.routing.key", actorOf( new Actor { - def receive = { case Delivery(payload, _, _, _, _, _) => payloadLatch.open } - }), - exchangeParameters = Some(consumerExchangeParameters), channelParameters = Some(consumerChannelParameters))) - consumerStartedLatch.tryAwait(2, TimeUnit.SECONDS) must be (true) - - val listenerLatch = new StandardLatch - - consumer ! new ChannelShutdown(new ShutdownSignalException(false, false, "TestException", "TestRef")) - - consumerRestartedLatch.tryAwait(4, TimeUnit.SECONDS) must be (true) - - producer ! Message("some_payload".getBytes, "non.interesting.routing.key") - payloadLatch.tryAwait(2, TimeUnit.SECONDS) must be (true) - } finally { - connection.stop - } - } -} diff --git a/akka-amqp/src/test/scala/akka/amqp/test/AMQPConsumerConnectionRecoveryTestIntegration.scala b/akka-amqp/src/test/scala/akka/amqp/test/AMQPConsumerConnectionRecoveryTestIntegration.scala deleted file mode 100644 index 52769db007..0000000000 --- a/akka-amqp/src/test/scala/akka/amqp/test/AMQPConsumerConnectionRecoveryTestIntegration.scala +++ /dev/null @@ -1,86 +0,0 @@ -package akka.amqp.test - -/** - * Copyright (C) 2009-2010 Scalable Solutions AB - */ - -import org.multiverse.api.latches.StandardLatch -import com.rabbitmq.client.ShutdownSignalException -import akka.amqp._ -import org.scalatest.matchers.MustMatchers -import java.util.concurrent.TimeUnit -import org.junit.Test -import akka.amqp.AMQP._ -import org.scalatest.junit.JUnitSuite -import akka.actor.{Actor, ActorRef} -import Actor._ - -class AMQPConsumerConnectionRecoveryTestIntegration extends JUnitSuite with MustMatchers { - - @Test - def consumerConnectionRecovery = AMQPTest.withCleanEndState { - - val connection = AMQP.newConnection(ConnectionParameters(initReconnectDelay = 50)) - try { - val producerStartedLatch = new StandardLatch - val producerRestartedLatch = new StandardLatch - val producerChannelCallback: ActorRef = actorOf( new Actor { - def receive = { - case Started => { - if (!producerStartedLatch.isOpen) { - producerStartedLatch.open - } else { - producerRestartedLatch.open - } - } - case Restarting => () - case Stopped => () - } - }).start - - val channelParameters = ChannelParameters(channelCallback = Some(producerChannelCallback)) - val producer = AMQP.newProducer(connection, ProducerParameters( - Some(ExchangeParameters("text_exchange")), channelParameters = Some(channelParameters))) - producerStartedLatch.tryAwait(2, TimeUnit.SECONDS) must be (true) - - - val consumerStartedLatch = new StandardLatch - val consumerRestartedLatch = new StandardLatch - val consumerChannelCallback: ActorRef = actorOf( new Actor { - def receive = { - case Started => { - if (!consumerStartedLatch.isOpen) { - consumerStartedLatch.open - } else { - consumerRestartedLatch.open - } - } - case Restarting => () - case Stopped => () - } - }).start - - - val payloadLatch = new StandardLatch - val consumerExchangeParameters = ExchangeParameters("text_exchange") - val consumerChannelParameters = ChannelParameters(channelCallback = Some(consumerChannelCallback)) - val consumer = AMQP.newConsumer(connection, ConsumerParameters("non.interesting.routing.key", actorOf( new Actor { - def receive = { case Delivery(payload, _, _, _, _, _) => payloadLatch.open } - }), exchangeParameters = Some(consumerExchangeParameters), channelParameters = Some(consumerChannelParameters))) - - consumerStartedLatch.tryAwait(2, TimeUnit.SECONDS) must be (true) - - val listenerLatch = new StandardLatch - - connection ! new ConnectionShutdown(new ShutdownSignalException(true, false, "TestException", "TestRef")) - - producerRestartedLatch.tryAwait(4, TimeUnit.SECONDS) must be (true) - consumerRestartedLatch.tryAwait(4, TimeUnit.SECONDS) must be (true) - - producer ! Message("some_payload".getBytes, "non.interesting.routing.key") - payloadLatch.tryAwait(2, TimeUnit.SECONDS) must be (true) - } finally { - connection.stop - } - } -} diff --git a/akka-amqp/src/test/scala/akka/amqp/test/AMQPConsumerManualAcknowledgeTestIntegration.scala b/akka-amqp/src/test/scala/akka/amqp/test/AMQPConsumerManualAcknowledgeTestIntegration.scala deleted file mode 100644 index 3f3bf0539b..0000000000 --- a/akka-amqp/src/test/scala/akka/amqp/test/AMQPConsumerManualAcknowledgeTestIntegration.scala +++ /dev/null @@ -1,65 +0,0 @@ -package akka.amqp.test - -/** - * Copyright (C) 2009-2010 Scalable Solutions AB - */ - -import akka.actor.Actor._ -import org.scalatest.matchers.MustMatchers -import akka.amqp._ -import org.junit.Test -import java.util.concurrent.{CountDownLatch, TimeUnit} -import org.multiverse.api.latches.StandardLatch -import org.scalatest.junit.JUnitSuite -import akka.amqp.AMQP._ -import akka.actor.{Actor, ActorRef} - -class AMQPConsumerManualAcknowledgeTestIntegration extends JUnitSuite with MustMatchers { - - @Test - def consumerMessageManualAcknowledge = AMQPTest.withCleanEndState { - val connection = AMQP.newConnection() - try { - val countDown = new CountDownLatch(2) - val channelCallback = actorOf( new Actor { - def receive = { - case Started => countDown.countDown - case Restarting => () - case Stopped => () - } - }).start - val exchangeParameters = ExchangeParameters("text_exchange") - val channelParameters = ChannelParameters(channelCallback = Some(channelCallback)) - - val failLatch = new StandardLatch - val acknowledgeLatch = new StandardLatch - var deliveryTagCheck: Long = -1 - val consumer:ActorRef = AMQP.newConsumer(connection, ConsumerParameters("manual.ack.this", actorOf( new Actor { - def receive = { - case Delivery(payload, _, deliveryTag, _, _, sender) => { - if (!failLatch.isOpen) { - failLatch.open - error("Make it fail!") - } else { - deliveryTagCheck = deliveryTag - sender.foreach(_ ! Acknowledge(deliveryTag)) - } - } - case Acknowledged(deliveryTag) => if (deliveryTagCheck == deliveryTag) acknowledgeLatch.open - } - }), queueName = Some("self.ack.queue"), exchangeParameters = Some(exchangeParameters), - selfAcknowledging = false, channelParameters = Some(channelParameters), - queueDeclaration = ActiveDeclaration(autoDelete = false))) - - val producer = AMQP.newProducer(connection, - ProducerParameters(Some(exchangeParameters), channelParameters = Some(channelParameters))) - - countDown.await(2, TimeUnit.SECONDS) must be (true) - producer ! Message("some_payload".getBytes, "manual.ack.this") - - acknowledgeLatch.tryAwait(2, TimeUnit.SECONDS) must be (true) - } finally { - connection.stop - } - } -} diff --git a/akka-amqp/src/test/scala/akka/amqp/test/AMQPConsumerManualRejectTestIntegration.scala b/akka-amqp/src/test/scala/akka/amqp/test/AMQPConsumerManualRejectTestIntegration.scala deleted file mode 100644 index 4ba4c27971..0000000000 --- a/akka-amqp/src/test/scala/akka/amqp/test/AMQPConsumerManualRejectTestIntegration.scala +++ /dev/null @@ -1,56 +0,0 @@ -package akka.amqp.test - -/** - * Copyright (C) 2009-2010 Scalable Solutions AB - */ - -import akka.actor.Actor._ -import org.scalatest.matchers.MustMatchers -import akka.amqp._ -import org.junit.Test -import java.util.concurrent.{CountDownLatch, TimeUnit} -import akka.amqp.AMQP.{ExchangeParameters, ConsumerParameters, ChannelParameters, ProducerParameters} -import org.multiverse.api.latches.StandardLatch -import org.scalatest.junit.JUnitSuite -import akka.actor.{Actor, ActorRef} - -class AMQPConsumerManualRejectTestIntegration extends JUnitSuite with MustMatchers { - - @Test - def consumerMessageManualAcknowledge = AMQPTest.withCleanEndState { - val connection = AMQP.newConnection() - try { - val countDown = new CountDownLatch(2) - val restartingLatch = new StandardLatch - val channelCallback = actorOf(new Actor { - def receive = { - case Started => countDown.countDown - case Restarting => restartingLatch.open - case Stopped => () - } - }).start - val exchangeParameters = ExchangeParameters("text_exchange") - val channelParameters = ChannelParameters(channelCallback = Some(channelCallback)) - - val rejectedLatch = new StandardLatch - val consumer:ActorRef = AMQP.newConsumer(connection, ConsumerParameters("manual.reject.this", actorOf( new Actor { - def receive = { - case Delivery(payload, _, deliveryTag, _, _, sender) => sender.foreach(_ ! Reject(deliveryTag)) - case Rejected(deliveryTag) => rejectedLatch.open - } - }), queueName = Some("self.reject.queue"), exchangeParameters = Some(exchangeParameters), - selfAcknowledging = false, channelParameters = Some(channelParameters))) - - val producer = AMQP.newProducer(connection, - ProducerParameters(Some(exchangeParameters), channelParameters = Some(channelParameters))) - - countDown.await(2, TimeUnit.SECONDS) must be (true) - producer ! Message("some_payload".getBytes, "manual.reject.this") - - rejectedLatch.tryAwait(2, TimeUnit.SECONDS) must be (true) - restartingLatch.tryAwait(2, TimeUnit.SECONDS) must be (true) - } finally { - connection.stop - } - } -} diff --git a/akka-amqp/src/test/scala/akka/amqp/test/AMQPConsumerMessageTestIntegration.scala b/akka-amqp/src/test/scala/akka/amqp/test/AMQPConsumerMessageTestIntegration.scala deleted file mode 100644 index 0a9613d21f..0000000000 --- a/akka-amqp/src/test/scala/akka/amqp/test/AMQPConsumerMessageTestIntegration.scala +++ /dev/null @@ -1,46 +0,0 @@ -package akka.amqp.test - -/** - * Copyright (C) 2009-2010 Scalable Solutions AB - */ - -import akka.amqp._ -import org.multiverse.api.latches.StandardLatch -import akka.actor.Actor._ -import org.scalatest.matchers.MustMatchers -import java.util.concurrent.{CountDownLatch, TimeUnit} -import akka.amqp.AMQP.{ExchangeParameters, ConsumerParameters, ChannelParameters, ProducerParameters} -import org.scalatest.junit.JUnitSuite -import org.junit.Test -import akka.actor.Actor - -class AMQPConsumerMessageTestIntegration extends JUnitSuite with MustMatchers { - - @Test - def consumerMessage = AMQPTest.withCleanEndState { - val connection = AMQP.newConnection() - val countDown = new CountDownLatch(2) - val channelCallback = actorOf(new Actor { - def receive = { - case Started => countDown.countDown - case Restarting => () - case Stopped => () - } - }).start - - val exchangeParameters = ExchangeParameters("text_exchange") - val channelParameters = ChannelParameters(channelCallback = Some(channelCallback)) - - val payloadLatch = new StandardLatch - val consumer = AMQP.newConsumer(connection, ConsumerParameters("non.interesting.routing.key", actorOf(new Actor { - def receive = { case Delivery(payload, _, _, _, _, _) => payloadLatch.open } - }), exchangeParameters = Some(exchangeParameters), channelParameters = Some(channelParameters))) - - val producer = AMQP.newProducer(connection, - ProducerParameters(Some(exchangeParameters), channelParameters = Some(channelParameters))) - - countDown.await(2, TimeUnit.SECONDS) must be (true) - producer ! Message("some_payload".getBytes, "non.interesting.routing.key") - payloadLatch.tryAwait(2, TimeUnit.SECONDS) must be (true) - } -} diff --git a/akka-amqp/src/test/scala/akka/amqp/test/AMQPConsumerPrivateQueueTestIntegration.scala b/akka-amqp/src/test/scala/akka/amqp/test/AMQPConsumerPrivateQueueTestIntegration.scala deleted file mode 100644 index 6b03b6ded8..0000000000 --- a/akka-amqp/src/test/scala/akka/amqp/test/AMQPConsumerPrivateQueueTestIntegration.scala +++ /dev/null @@ -1,45 +0,0 @@ -package akka.amqp.test - -/** - * Copyright (C) 2009-2010 Scalable Solutions AB - */ - -import akka.amqp._ -import org.multiverse.api.latches.StandardLatch -import akka.actor.Actor._ -import org.scalatest.matchers.MustMatchers -import java.util.concurrent.{CountDownLatch, TimeUnit} -import akka.amqp.AMQP.{ConsumerParameters, ChannelParameters, ProducerParameters} -import org.scalatest.junit.JUnitSuite -import org.junit.Test -import akka.actor.Actor - -class AMQPConsumerPrivateQueueTestIntegration extends JUnitSuite with MustMatchers { - - @Test - def consumerMessage = AMQPTest.withCleanEndState { - val connection = AMQP.newConnection() - val countDown = new CountDownLatch(2) - val channelCallback = actorOf(new Actor { - def receive = { - case Started => countDown.countDown - case Restarting => () - case Stopped => () - } - }).start - - val channelParameters = ChannelParameters(channelCallback = Some(channelCallback)) - - val payloadLatch = new StandardLatch - val consumer = AMQP.newConsumer(connection, ConsumerParameters("my.private.routing.key", actorOf(new Actor { - def receive = { case Delivery(payload, _, _, _, _, _) => payloadLatch.open } - }), channelParameters = Some(channelParameters))) - - val producer = AMQP.newProducer(connection, - ProducerParameters(channelParameters = Some(channelParameters))) - - countDown.await(2, TimeUnit.SECONDS) must be (true) - producer ! Message("some_payload".getBytes, "my.private.routing.key") - payloadLatch.tryAwait(2, TimeUnit.SECONDS) must be (true) - } -} \ No newline at end of file diff --git a/akka-amqp/src/test/scala/akka/amqp/test/AMQPProducerChannelRecoveryTestIntegration.scala b/akka-amqp/src/test/scala/akka/amqp/test/AMQPProducerChannelRecoveryTestIntegration.scala deleted file mode 100644 index 4b64f946e0..0000000000 --- a/akka-amqp/src/test/scala/akka/amqp/test/AMQPProducerChannelRecoveryTestIntegration.scala +++ /dev/null @@ -1,57 +0,0 @@ -package akka.amqp.test - -/** - * Copyright (C) 2009-2010 Scalable Solutions AB - */ - -import java.util.concurrent.TimeUnit -import akka.actor.{Actor, ActorRef} -import org.multiverse.api.latches.StandardLatch -import com.rabbitmq.client.ShutdownSignalException -import akka.amqp._ -import org.scalatest.matchers.MustMatchers -import akka.amqp.AMQP.{ExchangeParameters, ChannelParameters, ProducerParameters, ConnectionParameters} -import org.scalatest.junit.JUnitSuite -import org.junit.Test - -class AMQPProducerChannelRecoveryTestIntegration extends JUnitSuite with MustMatchers { - - @Test - def producerChannelRecovery = AMQPTest.withCleanEndState { - - val connection = AMQP.newConnection(ConnectionParameters(initReconnectDelay = 50)) - - try { - val startedLatch = new StandardLatch - val restartingLatch = new StandardLatch - val restartedLatch = new StandardLatch - - val producerCallback: ActorRef = Actor.actorOf( new Actor { - def receive = { - case Started => { - if (!startedLatch.isOpen) { - startedLatch.open - } else { - restartedLatch.open - } - } - case Restarting => restartingLatch.open - case Stopped => () - } - }).start - - val channelParameters = ChannelParameters(channelCallback = Some(producerCallback)) - val producerParameters = ProducerParameters( - Some(ExchangeParameters("text_exchange")), channelParameters = Some(channelParameters)) - - val producer = AMQP.newProducer(connection, producerParameters) - startedLatch.tryAwait(2, TimeUnit.SECONDS) must be (true) - - producer ! new ChannelShutdown(new ShutdownSignalException(false, false, "TestException", "TestRef")) - restartingLatch.tryAwait(2, TimeUnit.SECONDS) must be (true) - restartedLatch.tryAwait(2, TimeUnit.SECONDS) must be (true) - } finally { - connection.stop - } - } -} diff --git a/akka-amqp/src/test/scala/akka/amqp/test/AMQPProducerConnectionRecoveryTestIntegration.scala b/akka-amqp/src/test/scala/akka/amqp/test/AMQPProducerConnectionRecoveryTestIntegration.scala deleted file mode 100644 index 00ac02e1c4..0000000000 --- a/akka-amqp/src/test/scala/akka/amqp/test/AMQPProducerConnectionRecoveryTestIntegration.scala +++ /dev/null @@ -1,56 +0,0 @@ -package akka.amqp.test - -/** - * Copyright (C) 2009-2010 Scalable Solutions AB - */ - -import java.util.concurrent.TimeUnit -import akka.actor.{Actor, ActorRef} -import org.multiverse.api.latches.StandardLatch -import com.rabbitmq.client.ShutdownSignalException -import akka.amqp._ -import org.scalatest.matchers.MustMatchers -import akka.amqp.AMQP.{ExchangeParameters, ChannelParameters, ProducerParameters, ConnectionParameters} -import org.scalatest.junit.JUnitSuite -import org.junit.Test - -class AMQPProducerConnectionRecoveryTestIntegration extends JUnitSuite with MustMatchers { - - @Test - def producerConnectionRecovery = AMQPTest.withCleanEndState { - - val connection = AMQP.newConnection(ConnectionParameters(initReconnectDelay = 50)) - try { - val startedLatch = new StandardLatch - val restartingLatch = new StandardLatch - val restartedLatch = new StandardLatch - - val producerCallback: ActorRef = Actor.actorOf(new Actor{ - def receive = { - case Started => { - if (!startedLatch.isOpen) { - startedLatch.open - } else { - restartedLatch.open - } - } - case Restarting => restartingLatch.open - case Stopped => () - } - }).start - - val channelParameters = ChannelParameters(channelCallback = Some(producerCallback)) - val producerParameters = ProducerParameters( - Some(ExchangeParameters("text_exchange")), channelParameters = Some(channelParameters)) - - val producer = AMQP.newProducer(connection, producerParameters) - startedLatch.tryAwait(2, TimeUnit.SECONDS) must be (true) - - connection ! new ConnectionShutdown(new ShutdownSignalException(true, false, "TestException", "TestRef")) - restartingLatch.tryAwait(2, TimeUnit.SECONDS) must be (true) - restartedLatch.tryAwait(2, TimeUnit.SECONDS) must be (true) - } finally { - connection.stop - } - } -} diff --git a/akka-amqp/src/test/scala/akka/amqp/test/AMQPProducerMessageTestIntegration.scala b/akka-amqp/src/test/scala/akka/amqp/test/AMQPProducerMessageTestIntegration.scala deleted file mode 100644 index 037af3e179..0000000000 --- a/akka-amqp/src/test/scala/akka/amqp/test/AMQPProducerMessageTestIntegration.scala +++ /dev/null @@ -1,43 +0,0 @@ -package akka.amqp.test - -/** - * Copyright (C) 2009-2010 Scalable Solutions AB - */ - -import java.util.concurrent.TimeUnit -import akka.actor.ActorRef -import org.multiverse.api.latches.StandardLatch -import akka.amqp._ -import com.rabbitmq.client.ReturnListener -import com.rabbitmq.client.AMQP.BasicProperties -import java.lang.String -import org.scalatest.matchers.MustMatchers -import akka.amqp.AMQP.{ExchangeParameters, ProducerParameters} -import org.scalatest.junit.JUnitSuite -import org.junit.Test - -class AMQPProducerMessageTestIntegration extends JUnitSuite with MustMatchers { - - @Test - def producerMessage = AMQPTest.withCleanEndState { - - val connection: ActorRef = AMQP.newConnection() - try { - val returnLatch = new StandardLatch - val returnListener = new ReturnListener { - def handleBasicReturn(replyCode: Int, replyText: String, exchange: String, routingKey: String, properties: BasicProperties, body: Array[Byte]) = { - returnLatch.open - } - } - val producerParameters = ProducerParameters( - Some(ExchangeParameters("text_exchange")), returnListener = Some(returnListener)) - - val producer = AMQP.newProducer(connection, producerParameters) - - producer ! new Message("some_payload".getBytes, "non.interesing.routing.key", mandatory = true) - returnLatch.tryAwait(2, TimeUnit.SECONDS) must be(true) - } finally { - connection.stop - } - } -} diff --git a/akka-amqp/src/test/scala/akka/amqp/test/AMQPProtobufProducerConsumerTestIntegration.scala b/akka-amqp/src/test/scala/akka/amqp/test/AMQPProtobufProducerConsumerTestIntegration.scala deleted file mode 100644 index 668db09c78..0000000000 --- a/akka-amqp/src/test/scala/akka/amqp/test/AMQPProtobufProducerConsumerTestIntegration.scala +++ /dev/null @@ -1,43 +0,0 @@ -package akka.amqp.test - -/** - * Copyright (C) 2009-2010 Scalable Solutions AB - */ -import org.scalatest.matchers.MustMatchers -import org.scalatest.junit.JUnitSuite -import akka.amqp.AMQP -import org.junit.Test -import org.multiverse.api.latches.StandardLatch -import java.util.concurrent.TimeUnit -import akka.amqp.rpc.RPC -import akka.remote.protocol.RemoteProtocol.AddressProtocol - -class AMQPProtobufProducerConsumerTestIntegration extends JUnitSuite with MustMatchers { - - @Test - def consumerMessage = AMQPTest.withCleanEndState { - - val connection = AMQP.newConnection() - - val responseLatch = new StandardLatch - - RPC.newProtobufRpcServer(connection, "protoexchange", requestHandler) - - val request = AddressProtocol.newBuilder.setHostname("testhost").setPort(4321).build - - def responseHandler(response: AddressProtocol) = { - assert(response.getHostname == request.getHostname.reverse) - responseLatch.open - } - AMQP.newProtobufConsumer(connection, responseHandler _, None, Some("proto.reply.key")) - - val producer = AMQP.newProtobufProducer[AddressProtocol](connection, Some("protoexchange")) - producer.send(request, Some("proto.reply.key")) - - responseLatch.tryAwait(2, TimeUnit.SECONDS) must be (true) - } - - def requestHandler(request: AddressProtocol): AddressProtocol = { - AddressProtocol.newBuilder.setHostname(request.getHostname.reverse).setPort(request.getPort).build - } -} diff --git a/akka-amqp/src/test/scala/akka/amqp/test/AMQPRpcClientServerTestIntegration.scala b/akka-amqp/src/test/scala/akka/amqp/test/AMQPRpcClientServerTestIntegration.scala deleted file mode 100644 index 8ada12c423..0000000000 --- a/akka-amqp/src/test/scala/akka/amqp/test/AMQPRpcClientServerTestIntegration.scala +++ /dev/null @@ -1,64 +0,0 @@ -package akka.amqp.test - -/** - * Copyright (C) 2009-2010 Scalable Solutions AB - */ - -import akka.amqp._ -import rpc.RPC -import rpc.RPC.{RpcClientSerializer, RpcServerSerializer} -import akka.actor.Actor._ -import org.scalatest.matchers.MustMatchers -import java.util.concurrent.{CountDownLatch, TimeUnit} -import akka.amqp.AMQP._ -import org.scalatest.junit.JUnitSuite -import org.junit.Test -import akka.actor.Actor - -class AMQPRpcClientServerTestIntegration extends JUnitSuite with MustMatchers { - - @Test - def consumerMessage = AMQPTest.withCleanEndState { - - val connection = AMQP.newConnection() - - val countDown = new CountDownLatch(3) - val channelCallback = actorOf( new Actor { - def receive = { - case Started => countDown.countDown - case Restarting => () - case Stopped => () - } - }).start - - val exchangeName = "text_topic_exchange" - val channelParameters = ChannelParameters(channelCallback - = Some(channelCallback)) - - val rpcServerSerializer = new RpcServerSerializer[String, Int]( - new FromBinary[String] { - def fromBinary(bytes: Array[Byte]) = new String(bytes) - }, new ToBinary[Int] { - def toBinary(t: Int) = Array(t.toByte) - }) - - def requestHandler(request: String) = 3 - - val rpcServer = RPC.newRpcServer[String, Int](connection, exchangeName, rpcServerSerializer, - requestHandler _, Some("rpc.routing"), channelParameters = Some(channelParameters)) - - val rpcClientSerializer = new RpcClientSerializer[String, Int]( - new ToBinary[String] { - def toBinary(t: String) = t.getBytes - }, new FromBinary[Int] { - def fromBinary(bytes: Array[Byte]) = bytes.head.toInt - }) - - val rpcClient = RPC.newRpcClient[String, Int](connection, exchangeName, rpcClientSerializer, Some("rpc.routing"), - channelParameters = Some(channelParameters)) - - countDown.await(2, TimeUnit.SECONDS) must be(true) - val response = rpcClient.call("some_payload") - response must be(Some(3)) - } -} diff --git a/akka-amqp/src/test/scala/akka/amqp/test/AMQPRpcProtobufTestIntegration.scala b/akka-amqp/src/test/scala/akka/amqp/test/AMQPRpcProtobufTestIntegration.scala deleted file mode 100644 index dddc2f8432..0000000000 --- a/akka-amqp/src/test/scala/akka/amqp/test/AMQPRpcProtobufTestIntegration.scala +++ /dev/null @@ -1,49 +0,0 @@ -package akka.amqp.test - -/** - * Copyright (C) 2009-2010 Scalable Solutions AB - */ -import org.scalatest.matchers.MustMatchers -import org.scalatest.junit.JUnitSuite -import akka.amqp.AMQP -import akka.remote.protocol.RemoteProtocol.AddressProtocol -import org.junit.Test -import akka.amqp.rpc.RPC -import org.multiverse.api.latches.StandardLatch -import java.util.concurrent.TimeUnit - -class AMQPRpcProtobufTestIntegration extends JUnitSuite with MustMatchers { - - @Test - def consumerMessage = AMQPTest.withCleanEndState { - - val connection = AMQP.newConnection() - - RPC.newProtobufRpcServer(connection, "protoservice", requestHandler) - - val protobufClient = RPC.newProtobufRpcClient[AddressProtocol, AddressProtocol](connection, "protoservice") - - val request = AddressProtocol.newBuilder.setHostname("testhost").setPort(4321).build - - protobufClient.call(request) match { - case Some(response) => assert(response.getHostname == request.getHostname.reverse) - case None => fail("no response") - } - - val aSyncLatch = new StandardLatch - protobufClient.callAsync(request) { - case Some(response) => { - assert(response.getHostname == request.getHostname.reverse) - aSyncLatch.open - } - case None => fail("no response") - } - - aSyncLatch.tryAwait(2, TimeUnit.SECONDS) must be (true) - - } - - def requestHandler(request: AddressProtocol): AddressProtocol = { - AddressProtocol.newBuilder.setHostname(request.getHostname.reverse).setPort(request.getPort).build - } -} diff --git a/akka-amqp/src/test/scala/akka/amqp/test/AMQPRpcStringTestIntegration.scala b/akka-amqp/src/test/scala/akka/amqp/test/AMQPRpcStringTestIntegration.scala deleted file mode 100644 index 1610757ab5..0000000000 --- a/akka-amqp/src/test/scala/akka/amqp/test/AMQPRpcStringTestIntegration.scala +++ /dev/null @@ -1,47 +0,0 @@ -package akka.amqp.test - -/** - * Copyright (C) 2009-2010 Scalable Solutions AB - */ -import org.scalatest.matchers.MustMatchers -import org.scalatest.junit.JUnitSuite -import akka.amqp.AMQP -import org.junit.Test -import akka.amqp.rpc.RPC -import org.multiverse.api.latches.StandardLatch -import java.util.concurrent.TimeUnit - -class AMQPRpcStringTestIntegration extends JUnitSuite with MustMatchers { - - @Test - def consumerMessage = AMQPTest.withCleanEndState { - - val connection = AMQP.newConnection() - - RPC.newStringRpcServer(connection, "stringservice", requestHandler _) - - val protobufClient = RPC.newStringRpcClient(connection, "stringservice") - - val request = "teststring" - - protobufClient.call(request) match { - case Some(response) => assert(response == request.reverse) - case None => fail("no response") - } - - val aSyncLatch = new StandardLatch - protobufClient.callAsync(request) { - case Some(response) => { - assert(response == request.reverse) - aSyncLatch.open - } - case None => fail("no response") - } - - aSyncLatch.tryAwait(2, TimeUnit.SECONDS) must be (true) - } - - def requestHandler(request: String): String= { - request.reverse - } -} diff --git a/akka-amqp/src/test/scala/akka/amqp/test/AMQPStringProducerConsumerTestIntegration.scala b/akka-amqp/src/test/scala/akka/amqp/test/AMQPStringProducerConsumerTestIntegration.scala deleted file mode 100644 index 972fd0917d..0000000000 --- a/akka-amqp/src/test/scala/akka/amqp/test/AMQPStringProducerConsumerTestIntegration.scala +++ /dev/null @@ -1,44 +0,0 @@ -package akka.amqp.test - -/** - * Copyright (C) 2009-2010 Scalable Solutions AB - */ -import org.scalatest.matchers.MustMatchers -import org.scalatest.junit.JUnitSuite -import akka.amqp.AMQP -import org.junit.Test -import org.multiverse.api.latches.StandardLatch -import java.util.concurrent.TimeUnit -import akka.amqp.rpc.RPC - -class AMQPStringProducerConsumerTestIntegration extends JUnitSuite with MustMatchers { - - @Test - def consumerMessage = AMQPTest.withCleanEndState { - - val connection = AMQP.newConnection() - - val responseLatch = new StandardLatch - - RPC.newStringRpcServer(connection, "stringexchange", requestHandler _) - - val request = "somemessage" - - def responseHandler(response: String) = { - - assert(response == request.reverse) - responseLatch.open - } - AMQP.newStringConsumer(connection, responseHandler _, None, Some("string.reply.key")) - - val producer = AMQP.newStringProducer(connection, Some("stringexchange")) - producer.send(request, Some("string.reply.key")) - - responseLatch.tryAwait(2, TimeUnit.SECONDS) must be (true) - } - - def requestHandler(request: String): String= { - println("###### Reverse") - request.reverse - } -} diff --git a/akka-amqp/src/test/scala/akka/amqp/test/AMQPTest.scala b/akka-amqp/src/test/scala/akka/amqp/test/AMQPTest.scala deleted file mode 100644 index b9415c929a..0000000000 --- a/akka-amqp/src/test/scala/akka/amqp/test/AMQPTest.scala +++ /dev/null @@ -1,22 +0,0 @@ -/** - * Copyright (C) 2009-2010 Scalable Solutions AB - */ - -package akka.amqp.test - -import akka.amqp.AMQP - -object AMQPTest { - - def withCleanEndState(action: => Unit) { - try { - try { - action - } finally { - AMQP.shutdownAll - } - } catch { - case e => println(e) - } - } -} diff --git a/akka-camel/src/main/java/akka/camel/consume.java b/akka-camel/src/main/java/akka/camel/consume.java deleted file mode 100644 index ebcc2efd29..0000000000 --- a/akka-camel/src/main/java/akka/camel/consume.java +++ /dev/null @@ -1,34 +0,0 @@ -/** - * Copyright (C) 2009-2010 Scalable Solutions AB - */ - -package akka.camel; - -import java.lang.annotation.ElementType; -import java.lang.annotation.Retention; -import java.lang.annotation.RetentionPolicy; -import java.lang.annotation.Target; - -/** - * Annotation used by implementations of {@link akka.actor.TypedActor} - * (on method-level) to define consumer endpoints. - * - * @author Martin Krasser - */ -@Retention(RetentionPolicy.RUNTIME) -@Target({ElementType.METHOD}) -public @interface consume { - - /** - * Consumer endpoint URI - */ - public abstract String value(); - - /** - * Route definition handler class for customizing route to annotated method. - * The handler class must have a default constructor. - */ - public abstract Class routeDefinitionHandler() - default RouteDefinitionIdentity.class; - -} diff --git a/akka-camel/src/main/resources/META-INF/services/org/apache/camel/component/actor b/akka-camel/src/main/resources/META-INF/services/org/apache/camel/component/actor deleted file mode 100644 index 386928c5a8..0000000000 --- a/akka-camel/src/main/resources/META-INF/services/org/apache/camel/component/actor +++ /dev/null @@ -1 +0,0 @@ -class=akka.camel.component.ActorComponent \ No newline at end of file diff --git a/akka-camel/src/main/resources/META-INF/services/org/apache/camel/component/typed-actor b/akka-camel/src/main/resources/META-INF/services/org/apache/camel/component/typed-actor deleted file mode 100644 index 02efe457e6..0000000000 --- a/akka-camel/src/main/resources/META-INF/services/org/apache/camel/component/typed-actor +++ /dev/null @@ -1 +0,0 @@ -class=akka.camel.component.TypedActorComponent \ No newline at end of file diff --git a/akka-camel/src/main/scala/akka/CamelContextLifecycle.scala b/akka-camel/src/main/scala/akka/CamelContextLifecycle.scala deleted file mode 100644 index 93375131d2..0000000000 --- a/akka-camel/src/main/scala/akka/CamelContextLifecycle.scala +++ /dev/null @@ -1,202 +0,0 @@ -/** - * Copyright (C) 2009-2010 Scalable Solutions AB - */ - -package akka.camel - -import java.util.Map - -import org.apache.camel.{ProducerTemplate, CamelContext} -import org.apache.camel.impl.DefaultCamelContext - -import akka.camel.component.TypedActorComponent -import akka.japi.{Option => JOption} -import akka.util.Logging - -/** - * Manages the lifecycle of a CamelContext. Allowed transitions are - * init -> start -> stop -> init -> ... etc. - * - * @author Martin Krasser - */ -trait CamelContextLifecycle extends Logging { - // TODO: enforce correct state transitions - // valid: init -> start -> stop -> init ... - - private var _context: Option[CamelContext] = None - private var _template: Option[ProducerTemplate] = None - - private var _initialized = false - private var _started = false - - /** - * Camel component for accessing typed actors. - */ - private[camel] var typedActorComponent: TypedActorComponent = _ - - /** - * Registry in which typed actors are TEMPORARILY registered during - * creation of Camel routes to these actors. - */ - private[camel] var typedActorRegistry: Map[String, AnyRef] = _ - - /** - * Returns Some(CamelContext) (containing the current CamelContext) - * if CamelContextLifecycle has been initialized, otherwise None. - */ - def context: Option[CamelContext] = _context - - /** - * Returns Some(ProducerTemplate) (containing the current ProducerTemplate) - * if CamelContextLifecycle has been initialized, otherwise None. - */ - def template: Option[ProducerTemplate] = _template - - /** - * Returns Some(CamelContext) (containing the current CamelContext) - * if CamelContextLifecycle has been initialized, otherwise None. - *

- * Java API. - */ - def getContext: JOption[CamelContext] = context - - /** - * Returns Some(ProducerTemplate) (containing the current ProducerTemplate) - * if CamelContextLifecycle has been initialized, otherwise None. - *

- * Java API. - */ - def getTemplate: JOption[ProducerTemplate] = template - - /** - * Returns the current CamelContext if this CamelContextLifecycle - * has been initialized, otherwise throws an IllegalStateException. - */ - def mandatoryContext = - if (context.isDefined) context.get - else throw new IllegalStateException("no current CamelContext") - - /** - * Returns the current ProducerTemplate if this CamelContextLifecycle - * has been initialized, otherwise throws an IllegalStateException. - */ - def mandatoryTemplate = - if (template.isDefined) template.get - else throw new IllegalStateException("no current ProducerTemplate") - - /** - * Returns the current CamelContext if this CamelContextLifecycle - * has been initialized, otherwise throws an IllegalStateException. - *

- * Java API. - */ - def getMandatoryContext = mandatoryContext - - /** - * Returns the current ProducerTemplate if this CamelContextLifecycle - * has been initialized, otherwise throws an IllegalStateException. - *

- * Java API. - */ - def getMandatoryTemplate = mandatoryTemplate - - def initialized = _initialized - def started = _started - - /** - * Starts the CamelContext and an associated ProducerTemplate. - */ - def start = { - for { - c <- context - t <- template - } { - c.start - t.start - _started = true - log.info("Camel context started") - } - } - - /** - * Stops the CamelContext and the associated ProducerTemplate. - */ - def stop = { - for { - t <- template - c <- context - } { - t.stop - c.stop - _started = false - _initialized = false - log.info("Camel context stopped") - } - } - - /** - * Initializes this lifecycle object with the a DefaultCamelContext. - */ - def init(): Unit = init(new DefaultCamelContext) - - /** - * Initializes this lifecycle object with the given CamelContext. For the passed - * CamelContext, stream-caching is enabled. If applications want to disable stream- - * caching they can do so after this method returned and prior to calling start. - * This method also registers a new TypedActorComponent at the passes CamelContext - * under a name defined by TypedActorComponent.InternalSchema. - */ - def init(context: CamelContext) { - this.typedActorComponent = new TypedActorComponent - this.typedActorRegistry = typedActorComponent.typedActorRegistry - - context.setStreamCaching(true) - context.addComponent(TypedActorComponent.InternalSchema, typedActorComponent) - - this._context = Some(context) - this._template = Some(context.createProducerTemplate) - - _initialized = true - log.info("Camel context initialized") - } -} - -/** - * Manages a global CamelContext and an associated ProducerTemplate. - */ -object CamelContextManager extends CamelContextLifecycle { - - // ----------------------------------------------------- - // The inherited getters aren't statically accessible - // from Java. Therefore, they are redefined here. - // TODO: investigate if this is a Scala bug. - // ----------------------------------------------------- - - /** - * see CamelContextLifecycle.getContext - *

- * Java API. - */ - override def getContext: JOption[CamelContext] = super.getContext - - /** - * see CamelContextLifecycle.getTemplate - *

- * Java API. - */ - override def getTemplate: JOption[ProducerTemplate] = super.getTemplate - - /** - * see CamelContextLifecycle.getMandatoryContext - *

- * Java API. - */ - override def getMandatoryContext = super.getMandatoryContext - - /** - * see CamelContextLifecycle.getMandatoryTemplate - *

- * Java API. - */ - override def getMandatoryTemplate = super.getMandatoryTemplate -} diff --git a/akka-camel/src/main/scala/akka/CamelService.scala b/akka-camel/src/main/scala/akka/CamelService.scala deleted file mode 100644 index da71701ae1..0000000000 --- a/akka-camel/src/main/scala/akka/CamelService.scala +++ /dev/null @@ -1,275 +0,0 @@ -/** - * Copyright (C) 2009-2010 Scalable Solutions AB - */ -package akka.camel - -import java.util.concurrent.CountDownLatch -import java.util.concurrent.TimeUnit - -import org.apache.camel.CamelContext - -import akka.actor.Actor._ -import akka.actor.{AspectInitRegistry, ActorRegistry} -import akka.config.Config._ -import akka.japi.{SideEffect, Option => JOption} -import akka.util.{Logging, Bootable} - -/** - * Publishes (untyped) consumer actors and typed consumer actors via Camel endpoints. Actors - * are published (asynchronously) when they are started and unpublished (asynchronously) when - * they are stopped. The CamelService is notified about actor start- and stop-events by - * registering listeners at ActorRegistry and AspectInitRegistry. - * - * @author Martin Krasser - */ -trait CamelService extends Bootable with Logging { - private[camel] val consumerPublisher = actorOf[ConsumerPublisher] - private[camel] val publishRequestor = actorOf[PublishRequestor] - - private val serviceEnabled = config.getList("akka.enabled-modules").exists(_ == "camel") - - /** - * Starts this CamelService unless akka.camel.service is set to false. - */ - abstract override def onLoad = { - if (serviceEnabled) registerPublishRequestor - super.onLoad - if (serviceEnabled) start - } - - /** - * Stops this CamelService unless akka.camel.service is set to false. - */ - abstract override def onUnload = { - if (serviceEnabled) stop - super.onUnload - } - - @deprecated("use start() instead") - def load = start - - @deprecated("use stop() instead") - def unload = stop - - /** - * Starts this CamelService. Any started actor that is a consumer actor will be (asynchronously) - * published as Camel endpoint. Consumer actors that are started after this method returned will - * be published as well. Actor publishing is done asynchronously. A started (loaded) CamelService - * also publishes @consume annotated methods of typed actors that have been created - * with TypedActor.newInstance(..) (and TypedActor.newRemoteInstance(..) - * on a remote node). - */ - def start: CamelService = { - if (!publishRequestorRegistered) registerPublishRequestor - - // Only init and start if not already done by application - if (!CamelContextManager.initialized) CamelContextManager.init - if (!CamelContextManager.started) CamelContextManager.start - - // start actor that exposes consumer actors and typed actors via Camel endpoints - consumerPublisher.start - - // init publishRequestor so that buffered and future events are delivered to consumerPublisher - publishRequestor ! PublishRequestorInit(consumerPublisher) - - // Register this instance as current CamelService and return it - CamelServiceManager.register(this) - CamelServiceManager.mandatoryService - } - - /** - * Stops this CamelService. All published consumer actors and typed consumer actor methods will be - * unpublished asynchronously. - */ - def stop = { - // Unregister this instance as current CamelService - CamelServiceManager.unregister(this) - - // Remove related listeners from registry - unregisterPublishRequestor - - // Stop related services - consumerPublisher.stop - CamelContextManager.stop - } - - /** - * Waits for an expected number (count) of endpoints to be activated - * during execution of f. The wait-timeout is by default 10 seconds. - * Other timeout values can be set via the timeout and timeUnit - * parameters. - */ - def awaitEndpointActivation(count: Int, timeout: Long = 10, timeUnit: TimeUnit = TimeUnit.SECONDS)(f: => Unit): Boolean = { - val activation = expectEndpointActivationCount(count) - f; activation.await(timeout, timeUnit) - } - - /** - * Waits for an expected number (count) of endpoints to be de-activated - * during execution of f. The wait-timeout is by default 10 seconds. - * Other timeout values can be set via the timeout and timeUnit - * parameters. - */ - def awaitEndpointDeactivation(count: Int, timeout: Long = 10, timeUnit: TimeUnit = TimeUnit.SECONDS)(f: => Unit): Boolean = { - val activation = expectEndpointDeactivationCount(count) - f; activation.await(timeout, timeUnit) - } - - /** - * Waits for an expected number (count) of endpoints to be activated - * during execution of p. The wait timeout is 10 seconds. - *

- * Java API - */ - def awaitEndpointActivation(count: Int, p: SideEffect): Boolean = { - awaitEndpointActivation(count, 10, TimeUnit.SECONDS, p) - } - - /** - * Waits for an expected number (count) of endpoints to be activated - * during execution of p. Timeout values can be set via the - * timeout and timeUnit parameters. - *

- * Java API - */ - def awaitEndpointActivation(count: Int, timeout: Long, timeUnit: TimeUnit, p: SideEffect): Boolean = { - awaitEndpointActivation(count, timeout, timeUnit) { p.apply } - } - - /** - * Waits for an expected number (count) of endpoints to be de-activated - * during execution of p. The wait timeout is 10 seconds. - *

- * Java API - */ - def awaitEndpointDeactivation(count: Int, p: SideEffect): Boolean = { - awaitEndpointDeactivation(count, 10, TimeUnit.SECONDS, p) - } - - /** - * Waits for an expected number (count) of endpoints to be de-activated - * during execution of p. Timeout values can be set via the - * timeout and timeUnit parameters. - *

- * Java API - */ - def awaitEndpointDeactivation(count: Int, timeout: Long, timeUnit: TimeUnit, p: SideEffect): Boolean = { - awaitEndpointDeactivation(count, timeout, timeUnit) { p.apply } - } - - /** - * Sets an expectation on the number of upcoming endpoint activations and returns - * a CountDownLatch that can be used to wait for the activations to occur. Endpoint - * activations that occurred in the past are not considered. - */ - private def expectEndpointActivationCount(count: Int): CountDownLatch = - (consumerPublisher !! SetExpectedRegistrationCount(count)).as[CountDownLatch].get - - /** - * Sets an expectation on the number of upcoming endpoint de-activations and returns - * a CountDownLatch that can be used to wait for the de-activations to occur. Endpoint - * de-activations that occurred in the past are not considered. - */ - private def expectEndpointDeactivationCount(count: Int): CountDownLatch = - (consumerPublisher !! SetExpectedUnregistrationCount(count)).as[CountDownLatch].get - - private[camel] def publishRequestorRegistered: Boolean = { - ActorRegistry.hasListener(publishRequestor) || - AspectInitRegistry.hasListener(publishRequestor) - } - - private[camel] def registerPublishRequestor: Unit = { - ActorRegistry.addListener(publishRequestor) - AspectInitRegistry.addListener(publishRequestor) - } - - private[camel] def unregisterPublishRequestor: Unit = { - ActorRegistry.removeListener(publishRequestor) - AspectInitRegistry.removeListener(publishRequestor) - } -} - -/** - * Manages a global CamelService (the 'current' CamelService). - * - * @author Martin Krasser - */ -object CamelServiceManager { - - /** - * The current (optional) CamelService. Is defined when a CamelService has been started. - */ - private var _current: Option[CamelService] = None - - /** - * Starts a new CamelService and makes it the current CamelService. - * - * @see CamelService#start - * @see CamelService#onLoad - */ - def startCamelService = CamelServiceFactory.createCamelService.start - - /** - * Stops the current CamelService. - * - * @see CamelService#stop - * @see CamelService#onUnload - */ - def stopCamelService = for (s <- service) s.stop - - /** - * Returns Some(CamelService) if this CamelService - * has been started, None otherwise. - */ - def service = _current - - /** - * Returns the current CamelService if CamelService - * has been started, otherwise throws an IllegalStateException. - *

- * Java API - */ - def getService: JOption[CamelService] = CamelServiceManager.service - - /** - * Returns Some(CamelService) (containing the current CamelService) - * if this CamelServicehas been started, None otherwise. - */ - def mandatoryService = - if (_current.isDefined) _current.get - else throw new IllegalStateException("co current CamelService") - - /** - * Returns Some(CamelService) (containing the current CamelService) - * if this CamelServicehas been started, None otherwise. - *

- * Java API - */ - def getMandatoryService = mandatoryService - - private[camel] def register(service: CamelService) = - if (_current.isDefined) throw new IllegalStateException("current CamelService already registered") - else _current = Some(service) - - private[camel] def unregister(service: CamelService) = - if (_current == Some(service)) _current = None - else throw new IllegalStateException("only current CamelService can be unregistered") -} - -/** - * @author Martin Krasser - */ -object CamelServiceFactory { - /** - * Creates a new CamelService instance. - */ - def createCamelService: CamelService = new CamelService { } - - /** - * Creates a new CamelService instance and initializes it with the given CamelContext. - */ - def createCamelService(camelContext: CamelContext): CamelService = { - CamelContextManager.init(camelContext) - createCamelService - } -} diff --git a/akka-camel/src/main/scala/akka/Consumer.scala b/akka-camel/src/main/scala/akka/Consumer.scala deleted file mode 100644 index a6323c3bae..0000000000 --- a/akka-camel/src/main/scala/akka/Consumer.scala +++ /dev/null @@ -1,145 +0,0 @@ -/** - * Copyright (C) 2009-2010 Scalable Solutions AB - */ - -package akka.camel - -import java.net.InetSocketAddress - -import org.apache.camel.{Exchange, Processor} -import org.apache.camel.model.{RouteDefinition, ProcessorDefinition} - -import akka.actor._ -import akka.japi.{Function => JFunction} - -/** - * Mixed in by Actor implementations that consume message from Camel endpoints. - * - * @author Martin Krasser - */ -trait Consumer { self: Actor => - import RouteDefinitionHandler._ - - /** - * The default route definition handler is the identity function - */ - private[camel] var routeDefinitionHandler: RouteDefinitionHandler = identity - - /** - * Returns the Camel endpoint URI to consume messages from. - */ - def endpointUri: String - - /** - * Determines whether two-way communications between an endpoint and this consumer actor - * should be done in blocking or non-blocking mode (default is non-blocking). This method - * doesn't have any effect on one-way communications (they'll never block). - */ - def blocking = false - - /** - * Sets the route definition handler for creating a custom route to this consumer instance. - */ - def onRouteDefinition(h: RouteDefinition => ProcessorDefinition[_]): Unit = onRouteDefinition(from(h)) - - /** - * Sets the route definition handler for creating a custom route to this consumer instance. - *

- * Java API. - */ - def onRouteDefinition(h: RouteDefinitionHandler): Unit = routeDefinitionHandler = h -} - -/** - * Java-friendly Consumer. - * - * @see UntypedConsumerActor - * @see RemoteUntypedConsumerActor - * - * @author Martin Krasser - */ -trait UntypedConsumer extends Consumer { self: UntypedActor => - final override def endpointUri = getEndpointUri - final override def blocking = isBlocking - - /** - * Returns the Camel endpoint URI to consume messages from. - */ - def getEndpointUri(): String - - /** - * Determines whether two-way communications between an endpoint and this consumer actor - * should be done in blocking or non-blocking mode (default is non-blocking). This method - * doesn't have any effect on one-way communications (they'll never block). - */ - def isBlocking() = super.blocking -} - -/** - * Subclass this abstract class to create an MDB-style untyped consumer actor. This - * class is meant to be used from Java. - */ -abstract class UntypedConsumerActor extends UntypedActor with UntypedConsumer - -/** - * Subclass this abstract class to create an MDB-style remote untyped consumer - * actor. This class is meant to be used from Java. - */ -abstract class RemoteUntypedConsumerActor(address: InetSocketAddress) extends RemoteUntypedActor(address) with UntypedConsumer { - def this(host: String, port: Int) = this(new InetSocketAddress(host, port)) -} - -/** - * A callback handler for route definitions to consumer actors. - * - * @author Martin Krasser - */ -trait RouteDefinitionHandler { - def onRouteDefinition(rd: RouteDefinition): ProcessorDefinition[_] -} - -/** - * The identity route definition handler. - * - * @author Martin Krasser - * - */ -class RouteDefinitionIdentity extends RouteDefinitionHandler { - def onRouteDefinition(rd: RouteDefinition) = rd -} - -/** - * @author Martin Krasser - */ -object RouteDefinitionHandler { - /** - * Returns the identity route definition handler - */ - val identity = new RouteDefinitionIdentity - - /** - * Created a route definition handler from the given function. - */ - def from(f: RouteDefinition => ProcessorDefinition[_]) = new RouteDefinitionHandler { - def onRouteDefinition(rd: RouteDefinition) = f(rd) - } -} - -/** - * @author Martin Krasser - */ -private[camel] object Consumer { - /** - * Applies a function f to actorRef if actorRef - * references a consumer actor. A valid reference to a consumer actor is a local actor - * reference with a target actor that implements the Consumer trait. The - * target Consumer object is passed as argument to f. This - * method returns None if actorRef is not a valid reference - * to a consumer actor, Some consumer actor otherwise. - */ - def forConsumer[T](actorRef: ActorRef)(f: Consumer => T): Option[T] = { - if (!actorRef.actor.isInstanceOf[Consumer]) None - else if (actorRef.remoteAddress.isDefined) None - else Some(f(actorRef.actor.asInstanceOf[Consumer])) - } -} diff --git a/akka-camel/src/main/scala/akka/ConsumerPublisher.scala b/akka-camel/src/main/scala/akka/ConsumerPublisher.scala deleted file mode 100644 index 39c4e0bb2f..0000000000 --- a/akka-camel/src/main/scala/akka/ConsumerPublisher.scala +++ /dev/null @@ -1,351 +0,0 @@ -/** - * Copyright (C) 2009-2010 Scalable Solutions AB - */ -package akka.camel - -import collection.mutable.ListBuffer - -import java.io.InputStream -import java.lang.reflect.Method -import java.util.concurrent.CountDownLatch - -import org.apache.camel.builder.RouteBuilder -import org.apache.camel.model.{ProcessorDefinition, RouteDefinition} - -import akka.actor._ -import akka.camel.component.TypedActorComponent -import akka.util.Logging - -/** - * @author Martin Krasser - */ -private[camel] object ConsumerPublisher extends Logging { - /** - * Creates a route to the registered consumer actor. - */ - def handleConsumerActorRegistered(event: ConsumerActorRegistered) { - CamelContextManager.mandatoryContext.addRoutes(new ConsumerActorRouteBuilder(event)) - log.info("published actor %s at endpoint %s" format (event.actorRef, event.endpointUri)) - } - - /** - * Stops the route to the already un-registered consumer actor. - */ - def handleConsumerActorUnregistered(event: ConsumerActorUnregistered) { - CamelContextManager.mandatoryContext.stopRoute(event.uuid) - log.info("unpublished actor %s from endpoint %s" format (event.actorRef, event.endpointUri)) - } - - /** - * Creates a route to an typed actor method. - */ - def handleConsumerMethodRegistered(event: ConsumerMethodRegistered) { - CamelContextManager.typedActorRegistry.put(event.methodUuid, event.typedActor) - CamelContextManager.mandatoryContext.addRoutes(new ConsumerMethodRouteBuilder(event)) - log.info("published method %s of %s at endpoint %s" format (event.methodName, event.typedActor, event.endpointUri)) - } - - /** - * Stops the route to the already un-registered consumer actor method. - */ - def handleConsumerMethodUnregistered(event: ConsumerMethodUnregistered) { - CamelContextManager.typedActorRegistry.remove(event.methodUuid) - CamelContextManager.mandatoryContext.stopRoute(event.methodUuid) - log.info("unpublished method %s of %s from endpoint %s" format (event.methodName, event.typedActor, event.endpointUri)) - } -} - -/** - * Actor that publishes consumer actors and typed actor methods at Camel endpoints. - * The Camel context used for publishing is obtained via CamelContextManager.context. - * This actor accepts messages of type - * akka.camel.ConsumerActorRegistered, - * akka.camel.ConsumerActorUnregistered, - * akka.camel.ConsumerMethodRegistered and - * akka.camel.ConsumerMethodUnregistered. - * - * @author Martin Krasser - */ -private[camel] class ConsumerPublisher extends Actor { - import ConsumerPublisher._ - - @volatile private var registrationLatch = new CountDownLatch(0) - @volatile private var unregistrationLatch = new CountDownLatch(0) - - protected def receive = { - case r: ConsumerActorRegistered => { - handleConsumerActorRegistered(r) - registrationLatch.countDown - } - case u: ConsumerActorUnregistered => { - handleConsumerActorUnregistered(u) - unregistrationLatch.countDown - } - case mr: ConsumerMethodRegistered => { - handleConsumerMethodRegistered(mr) - registrationLatch.countDown - } - case mu: ConsumerMethodUnregistered => { - handleConsumerMethodUnregistered(mu) - unregistrationLatch.countDown - } - case SetExpectedRegistrationCount(num) => { - registrationLatch = new CountDownLatch(num) - self.reply(registrationLatch) - } - case SetExpectedUnregistrationCount(num) => { - unregistrationLatch = new CountDownLatch(num) - self.reply(unregistrationLatch) - } - case _ => { /* ignore */} - } -} - -private[camel] case class SetExpectedRegistrationCount(num: Int) -private[camel] case class SetExpectedUnregistrationCount(num: Int) - -/** - * Abstract route to a target which is either an actor or an typed actor method. - * - * @param endpointUri endpoint URI of the consumer actor or typed actor method. - * @param id actor identifier or typed actor identifier (registry key). - * - * @author Martin Krasser - */ -private[camel] abstract class ConsumerRouteBuilder(endpointUri: String, id: String) extends RouteBuilder { - // TODO: make conversions configurable - private val bodyConversions = Map( - "file" -> classOf[InputStream] - ) - - def configure = { - val schema = endpointUri take endpointUri.indexOf(":") // e.g. "http" from "http://whatever/..." - val cnvopt = bodyConversions.get(schema) - - onRouteDefinition(startRouteDefinition(cnvopt)).to(targetUri) - } - - protected def routeDefinitionHandler: RouteDefinitionHandler - protected def targetUri: String - - private def onRouteDefinition(rd: RouteDefinition) = routeDefinitionHandler.onRouteDefinition(rd) - private def startRouteDefinition(bodyConversion: Option[Class[_]]): RouteDefinition = bodyConversion match { - case Some(clazz) => from(endpointUri).routeId(id).convertBodyTo(clazz) - case None => from(endpointUri).routeId(id) - } -} - -/** - * Defines the route to a (untyped) consumer actor. - * - * @author Martin Krasser - */ -private[camel] class ConsumerActorRouteBuilder(event: ConsumerActorRegistered) extends ConsumerRouteBuilder(event.endpointUri, event.uuid) { - protected def routeDefinitionHandler: RouteDefinitionHandler = event.routeDefinitionHandler - protected def targetUri = "actor:uuid:%s?blocking=%s" format (event.uuid, event.blocking) -} - -/** - * Defines the route to a typed actor method. - * - * @author Martin Krasser - */ -private[camel] class ConsumerMethodRouteBuilder(event: ConsumerMethodRegistered) extends ConsumerRouteBuilder(event.endpointUri, event.methodUuid) { - protected def routeDefinitionHandler: RouteDefinitionHandler = event.routeDefinitionHandler - protected def targetUri = "%s:%s?method=%s" format (TypedActorComponent.InternalSchema, event.methodUuid, event.methodName) -} - -/** - * A registration listener that triggers publication of consumer actors and typed actor - * methods as well as un-publication of consumer actors and typed actor methods. This actor - * needs to be initialized with a PublishRequestorInit command message for - * obtaining a reference to a publisher actor. Before initialization it buffers - * all outbound messages and delivers them to the publisher when receiving a - * PublishRequestorInit message. After initialization, outbound messages are - * delivered directly without buffering. - * - * @see PublishRequestorInit - * - * @author Martin Krasser - */ -private[camel] class PublishRequestor extends Actor { - private val events = ListBuffer[ConsumerEvent]() - private var publisher: Option[ActorRef] = None - - protected def receive = { - case ActorRegistered(actor) => - for (event <- ConsumerActorRegistered.forConsumer(actor)) deliverCurrentEvent(event) - case ActorUnregistered(actor) => - for (event <- ConsumerActorUnregistered.forConsumer(actor)) deliverCurrentEvent(event) - case AspectInitRegistered(proxy, init) => - for (event <- ConsumerMethodRegistered.forConsumer(proxy, init)) deliverCurrentEvent(event) - case AspectInitUnregistered(proxy, init) => - for (event <- ConsumerMethodUnregistered.forConsumer(proxy, init)) deliverCurrentEvent(event) - case PublishRequestorInit(pub) => { - publisher = Some(pub) - deliverBufferedEvents - } - case _ => { /* ignore */ } - } - - private def deliverCurrentEvent(event: ConsumerEvent) = { - publisher match { - case Some(pub) => pub ! event - case None => events += event - } - } - - private def deliverBufferedEvents = { - for (event <- events) deliverCurrentEvent(event) - events.clear - } -} - -/** - * Command message to initialize a PublishRequestor to use consumerPublisher - * for publishing actors or typed actor methods. - */ -private[camel] case class PublishRequestorInit(consumerPublisher: ActorRef) - -/** - * A consumer (un)registration event. - */ -private[camel] sealed trait ConsumerEvent - -/** - * A consumer actor (un)registration event. - */ -private[camel] trait ConsumerActorEvent extends ConsumerEvent { - val actorRef: ActorRef - val actor: Consumer - - val uuid = actorRef.uuid.toString - val endpointUri = actor.endpointUri - val blocking = actor.blocking - val routeDefinitionHandler = actor.routeDefinitionHandler -} - -/** - * A consumer method (un)registration event. - */ -private[camel] trait ConsumerMethodEvent extends ConsumerEvent { - val typedActor: AnyRef - val init: AspectInit - val method: Method - - val uuid = init.actorRef.uuid.toString - val methodName = method.getName - val methodUuid = "%s_%s" format (uuid, methodName) - - lazy val routeDefinitionHandler = consumeAnnotation.routeDefinitionHandler.newInstance - lazy val consumeAnnotation = method.getAnnotation(classOf[consume]) - lazy val endpointUri = consumeAnnotation.value -} - -/** - * Event indicating that a consumer actor has been registered at the actor registry. - */ -private[camel] case class ConsumerActorRegistered(actorRef: ActorRef, actor: Consumer) extends ConsumerActorEvent - -/** - * Event indicating that a consumer actor has been unregistered from the actor registry. - */ -private[camel] case class ConsumerActorUnregistered(actorRef: ActorRef, actor: Consumer) extends ConsumerActorEvent - -/** - * Event indicating that an typed actor proxy has been created for a typed actor. For each @consume - * annotated typed actor method a separate instance of this class is created. - */ -private[camel] case class ConsumerMethodRegistered(typedActor: AnyRef, init: AspectInit, method: Method) extends ConsumerMethodEvent - -/** - * Event indicating that an typed actor has been stopped. For each @consume - * annotated typed object method a separate instance of this class is created. - */ -private[camel] case class ConsumerMethodUnregistered(typedActor: AnyRef, init: AspectInit, method: Method) extends ConsumerMethodEvent - -/** - * @author Martin Krasser - */ -private[camel] object ConsumerActorRegistered { - /** - * Creates an ConsumerActorRegistered event message for a consumer actor or None if - * actorRef is not a consumer actor. - */ - def forConsumer(actorRef: ActorRef): Option[ConsumerActorRegistered] = { - Consumer.forConsumer[ConsumerActorRegistered](actorRef) { - actor => ConsumerActorRegistered(actorRef, actor) - } - } -} - -/** - * @author Martin Krasser - */ -private[camel] object ConsumerActorUnregistered { - /** - * Creates an ConsumerActorUnregistered event message for a consumer actor or None if - * actorRef is not a consumer actor. - */ - def forConsumer(actorRef: ActorRef): Option[ConsumerActorUnregistered] = { - Consumer.forConsumer[ConsumerActorUnregistered](actorRef) { - actor => ConsumerActorUnregistered(actorRef, actor) - } - } -} - -/** - * @author Martin Krasser - */ -private[camel] object ConsumerMethod { - /** - * Applies a function f to each consumer method of TypedActor and - * returns the function results as a list. A consumer method is one that is annotated with - * @consume. If typedActor is a proxy for a remote typed actor - * f is never called and Nil is returned. - */ - def forConsumer[T](typedActor: AnyRef, init: AspectInit)(f: Method => T): List[T] = { - if (init.remoteAddress.isDefined) Nil // let remote node publish typed actor methods on endpoints - else { - // TODO: support consumer annotation inheritance - // - visit overridden methods in superclasses - // - visit implemented method declarations in interfaces - val intfClass = typedActor.getClass - val implClass = init.targetInstance.getClass - (for (m <- intfClass.getMethods.toList; if (m.isAnnotationPresent(classOf[consume]))) yield f(m)) ++ - (for (m <- implClass.getMethods.toList; if (m.isAnnotationPresent(classOf[consume]))) yield f(m)) - } - } -} - -/** - * @author Martin Krasser - */ -private[camel] object ConsumerMethodRegistered { - /** - * Creates a list of ConsumerMethodRegistered event messages for a typed actor or an empty - * list if the typed actor is a proxy for a remote typed actor or the typed actor doesn't - * have any @consume annotated methods. - */ - def forConsumer(typedActor: AnyRef, init: AspectInit): List[ConsumerMethodRegistered] = { - ConsumerMethod.forConsumer(typedActor, init) { - m => ConsumerMethodRegistered(typedActor, init, m) - } - } -} - -/** - * @author Martin Krasser - */ -private[camel] object ConsumerMethodUnregistered { - /** - * Creates a list of ConsumerMethodUnregistered event messages for a typed actor or an empty - * list if the typed actor is a proxy for a remote typed actor or the typed actor doesn't - * have any @consume annotated methods. - */ - def forConsumer(typedActor: AnyRef, init: AspectInit): List[ConsumerMethodUnregistered] = { - ConsumerMethod.forConsumer(typedActor, init) { - m => ConsumerMethodUnregistered(typedActor, init, m) - } - } -} diff --git a/akka-camel/src/main/scala/akka/Message.scala b/akka-camel/src/main/scala/akka/Message.scala deleted file mode 100644 index aa1fcbd083..0000000000 --- a/akka-camel/src/main/scala/akka/Message.scala +++ /dev/null @@ -1,380 +0,0 @@ -/** - * Copyright (C) 2009-2010 Scalable Solutions AB - */ -package akka.camel - -import java.util.{Map => JMap, Set => JSet} - -import scala.collection.JavaConversions._ - -import org.apache.camel.{Exchange, Message => CamelMessage} -import org.apache.camel.util.ExchangeHelper - -import akka.japi.{Function => JFunction} - -/** - * An immutable representation of a Camel message. - * - * @author Martin Krasser - */ -case class Message(val body: Any, val headers: Map[String, Any] = Map.empty) { - - /** - * Creates a Message with given body and empty headers map. - */ - def this(body: Any) = this(body, Map.empty[String, Any]) - - /** - * Creates a Message with given body and headers map. A copy of the headers map is made. - *

- * Java API - */ - def this(body: Any, headers: JMap[String, Any]) = this(body, headers.toMap) - - /** - * Returns the body of the message converted to the type T. Conversion is done - * using Camel's type converter. The type converter is obtained from the CamelContext managed - * by CamelContextManager. Applications have to ensure proper initialization of - * CamelContextManager. - * - * @see CamelContextManager. - */ - def bodyAs[T](implicit m: Manifest[T]): T = getBodyAs(m.erasure.asInstanceOf[Class[T]]) - - /** - * Returns the body of the message converted to the type as given by the clazz - * parameter. Conversion is done using Camel's type converter. The type converter is obtained - * from the CamelContext managed by CamelContextManager. Applications have to ensure proper - * initialization of CamelContextManager. - *

- * Java API - * - * @see CamelContextManager. - */ - def getBodyAs[T](clazz: Class[T]): T = - CamelContextManager.mandatoryContext.getTypeConverter.mandatoryConvertTo[T](clazz, body) - - /** - * Returns those headers from this message whose name is contained in names. - */ - def headers(names: Set[String]): Map[String, Any] = headers.filter(names contains _._1) - - /** - * Returns those headers from this message whose name is contained in names. - * The returned headers map is backed up by an immutable headers map. Any attempt to modify - * the returned map will throw an exception. - *

- * Java API - */ - def getHeaders(names: JSet[String]): JMap[String, Any] = headers.filter(names contains _._1) - - /** - * Returns all headers from this message. The returned headers map is backed up by this - * message's immutable headers map. Any attempt to modify the returned map will throw an - * exception. - *

- * Java API - */ - def getHeaders: JMap[String, Any] = headers - - /** - * Returns the header with given name. Throws NoSuchElementException - * if the header doesn't exist. - */ - def header(name: String): Any = headers(name) - - /** - * Returns the header with given name. Throws NoSuchElementException - * if the header doesn't exist. - *

- * Java API - */ - def getHeader(name: String): Any = header(name) - - /** - * Returns the header with given name converted to type T. Throws - * NoSuchElementException if the header doesn't exist. - */ - def headerAs[T](name: String)(implicit m: Manifest[T]): T = - getHeaderAs(name, m.erasure.asInstanceOf[Class[T]]) - - /** - * Returns the header with given name converted to type as given by the clazz - * parameter. Throws NoSuchElementException if the header doesn't exist. - *

- * Java API - */ - def getHeaderAs[T](name: String, clazz: Class[T]): T = - CamelContextManager.mandatoryContext.getTypeConverter.mandatoryConvertTo[T](clazz, header(name)) - - /** - * Creates a Message with a transformed body using a transformer function. - */ - def transformBody[A](transformer: A => Any): Message = setBody(transformer(body.asInstanceOf[A])) - - /** - * Creates a Message with a transformed body using a transformer function. - *

- * Java API - */ - def transformBody[A](transformer: JFunction[A, Any]): Message = setBody(transformer(body.asInstanceOf[A])) - - /** - * Creates a Message with current body converted to type T. - */ - def setBodyAs[T](implicit m: Manifest[T]): Message = setBodyAs(m.erasure.asInstanceOf[Class[T]]) - - /** - * Creates a Message with current body converted to type clazz. - *

- * Java API - */ - def setBodyAs[T](clazz: Class[T]): Message = setBody(getBodyAs(clazz)) - - /** - * Creates a Message with a given body. - */ - def setBody(body: Any) = new Message(body, this.headers) - - /** - * Creates a new Message with given headers. - */ - def setHeaders(headers: Map[String, Any]): Message = copy(this.body, headers) - - /** - * Creates a new Message with given headers. A copy of the headers map is made. - *

- * Java API - */ - def setHeaders(headers: JMap[String, Any]): Message = setHeaders(headers.toMap) - - /** - * Creates a new Message with given headers added to the current headers. - */ - def addHeaders(headers: Map[String, Any]): Message = copy(this.body, this.headers ++ headers) - - /** - * Creates a new Message with given headers added to the current headers. - * A copy of the headers map is made. - *

- * Java API - */ - def addHeaders(headers: JMap[String, Any]): Message = addHeaders(headers.toMap) - - /** - * Creates a new Message with the given header added to the current headers. - */ - def addHeader(header: (String, Any)): Message = copy(this.body, this.headers + header) - - /** - * Creates a new Message with the given header, represented by name and - * value added to the existing headers. - *

- * Java API - */ - def addHeader(name: String, value: Any): Message = addHeader((name, value)) - - /** - * Creates a new Message where the header with given headerName is removed from - * the existing headers. - */ - def removeHeader(headerName: String) = copy(this.body, this.headers - headerName) -} - -/** - * Companion object of Message class. - * - * @author Martin Krasser - */ -object Message { - - /** - * Message header to correlate request with response messages. Applications that send - * messages to a Producer actor may want to set this header on the request message - * so that it can be correlated with an asynchronous response. Messages send to Consumer - * actors have this header already set. - */ - val MessageExchangeId = "MessageExchangeId".intern - - /** - * Creates a new Message with body as message body and an empty header map. - */ - //def apply(body: Any) = new Message(body) - - /** - * Creates a canonical form of the given message msg. If msg of type - * Message then msg is returned, otherwise msg is set as body of a - * newly created Message object. - */ - def canonicalize(msg: Any) = msg match { - case mobj: Message => mobj - case body => new Message(body) - } -} - -/** - * An immutable representation of a failed Camel exchange. It contains the failure cause - * obtained from Exchange.getException and the headers from either the Exchange.getIn - * message or Exchange.getOut message, depending on the exchange pattern. - * - * @author Martin Krasser - */ -case class Failure(val cause: Exception, val headers: Map[String, Any] = Map.empty) { - - /** - * Creates a Failure with cause body and empty headers map. - */ - def this(cause: Exception) = this(cause, Map.empty[String, Any]) - - /** - * Creates a Failure with given cause and headers map. A copy of the headers map is made. - *

- * Java API - */ - def this(cause: Exception, headers: JMap[String, Any]) = this(cause, headers.toMap) - - /** - * Returns the cause of this Failure. - *

- * Java API. - */ - def getCause = cause - - /** - * Returns all headers from this failure message. The returned headers map is backed up by - * this message's immutable headers map. Any attempt to modify the returned map will throw - * an exception. - *

- * Java API - */ - def getHeaders: JMap[String, Any] = headers -} - -/** - * Adapter for converting an org.apache.camel.Exchange to and from Message and Failure objects. - * - * @author Martin Krasser - */ -class CamelExchangeAdapter(exchange: Exchange) { - import CamelMessageConversion.toMessageAdapter - - /** - * Sets Exchange.getIn from the given Message object. - */ - def fromRequestMessage(msg: Message): Exchange = { requestMessage.fromMessage(msg); exchange } - - /** - * Depending on the exchange pattern, sets Exchange.getIn or Exchange.getOut from the given - * Message object. If the exchange is out-capable then the Exchange.getOut is set, otherwise - * Exchange.getIn. - */ - def fromResponseMessage(msg: Message): Exchange = { responseMessage.fromMessage(msg); exchange } - - /** - * Sets Exchange.getException from the given Failure message. Headers of the Failure message - * are ignored. - */ - def fromFailureMessage(msg: Failure): Exchange = { exchange.setException(msg.cause); exchange } - - /** - * Creates a Message object from Exchange.getIn. - */ - def toRequestMessage: Message = toRequestMessage(Map.empty) - - /** - * Depending on the exchange pattern, creates a Message object from Exchange.getIn or Exchange.getOut. - * If the exchange is out-capable then the Exchange.getOut is set, otherwise Exchange.getIn. - */ - def toResponseMessage: Message = toResponseMessage(Map.empty) - - /** - * Creates a Failure object from the adapted Exchange. - * - * @see Failure - */ - def toFailureMessage: Failure = toFailureMessage(Map.empty) - - /** - * Creates a Message object from Exchange.getIn. - * - * @param headers additional headers to set on the created Message in addition to those - * in the Camel message. - */ - def toRequestMessage(headers: Map[String, Any]): Message = requestMessage.toMessage(headers) - - /** - * Depending on the exchange pattern, creates a Message object from Exchange.getIn or Exchange.getOut. - * If the exchange is out-capable then the Exchange.getOut is set, otherwise Exchange.getIn. - * - * @param headers additional headers to set on the created Message in addition to those - * in the Camel message. - */ - def toResponseMessage(headers: Map[String, Any]): Message = responseMessage.toMessage(headers) - - /** - * Creates a Failure object from the adapted Exchange. - * - * @param headers additional headers to set on the created Message in addition to those - * in the Camel message. - * - * @see Failure - */ - def toFailureMessage(headers: Map[String, Any]): Failure = - Failure(exchange.getException, headers ++ responseMessage.toMessage.headers) - - private def requestMessage = exchange.getIn - - private def responseMessage = ExchangeHelper.getResultMessage(exchange) - -} - -/** - * Adapter for converting an org.apache.camel.Message to and from Message objects. - * - * @author Martin Krasser - */ -class CamelMessageAdapter(val cm: CamelMessage) { - /** - * Set the adapted Camel message from the given Message object. - */ - def fromMessage(m: Message): CamelMessage = { - cm.setBody(m.body) - for (h <- m.headers) cm.getHeaders.put(h._1, h._2.asInstanceOf[AnyRef]) - cm - } - - /** - * Creates a new Message object from the adapted Camel message. - */ - def toMessage: Message = toMessage(Map.empty) - - /** - * Creates a new Message object from the adapted Camel message. - * - * @param headers additional headers to set on the created Message in addition to those - * in the Camel message. - */ - def toMessage(headers: Map[String, Any]): Message = Message(cm.getBody, cmHeaders(headers, cm)) - - private def cmHeaders(headers: Map[String, Any], cm: CamelMessage) = headers ++ cm.getHeaders -} - -/** - * Defines conversion methods to CamelExchangeAdapter and CamelMessageAdapter. - * Imported by applications that implicitly want to use conversion methods of - * CamelExchangeAdapter and CamelMessageAdapter. - */ -object CamelMessageConversion { - - /** - * Creates an CamelExchangeAdapter for the given Camel exchange. - */ - implicit def toExchangeAdapter(ce: Exchange): CamelExchangeAdapter = - new CamelExchangeAdapter(ce) - - /** - * Creates an CamelMessageAdapter for the given Camel message. - */ - implicit def toMessageAdapter(cm: CamelMessage): CamelMessageAdapter = - new CamelMessageAdapter(cm) -} diff --git a/akka-camel/src/main/scala/akka/Producer.scala b/akka-camel/src/main/scala/akka/Producer.scala deleted file mode 100644 index ae23ae8c4e..0000000000 --- a/akka-camel/src/main/scala/akka/Producer.scala +++ /dev/null @@ -1,256 +0,0 @@ -/** - * Copyright (C) 2009-2010 Scalable Solutions AB - */ - -package akka.camel - -import CamelMessageConversion.toExchangeAdapter - -import org.apache.camel._ -import org.apache.camel.processor.SendProcessor - -import akka.actor.{Actor, ActorRef, UntypedActor} - -/** - * Support trait for producing messages to Camel endpoints. - * - * @author Martin Krasser - */ -trait ProducerSupport { this: Actor => - - /** - * Message headers to copy by default from request message to response-message. - */ - private val headersToCopyDefault = Set(Message.MessageExchangeId) - - /** - * Endpoint object resolved from the current CamelContext with - * endpointUri. - */ - private lazy val endpoint = CamelContextManager.mandatoryContext.getEndpoint(endpointUri) - - /** - * SendProcessor for producing messages to endpoint. - */ - private lazy val processor = createSendProcessor - - /** - * If set to false (default), this producer expects a response message from the Camel endpoint. - * If set to true, this producer initiates an in-only message exchange with the Camel endpoint - * (fire and forget). - */ - def oneway: Boolean = false - - /** - * Returns the Camel endpoint URI to produce messages to. - */ - def endpointUri: String - - /** - * Returns the names of message headers to copy from a request message to a response message. - * By default only the Message.MessageExchangeId is copied. Applications may override this to - * define an application-specific set of message headers to copy. - */ - def headersToCopy: Set[String] = headersToCopyDefault - - /** - * Default implementation of Actor.postStop for freeing resources needed - * to actually send messages to endpointUri. - */ - override def postStop { - processor.stop - } - - /** - * Initiates a message exchange of given pattern with the endpoint specified by - * endpointUri. The in-message of the initiated exchange is the canonical form - * of msg. After sending the in-message, the processing result (response) is passed - * as argument to receiveAfterProduce. If the response is received synchronously from - * the endpoint then receiveAfterProduce is called synchronously as well. If the - * response is received asynchronously, the receiveAfterProduce is called - * asynchronously. This is done by wrapping the response, adding it to this producers - * mailbox, unwrapping it and calling receiveAfterProduce. The original - * sender and senderFuture are thereby preserved. - * - * @see Message#canonicalize(Any) - * - * @param msg message to produce - * @param pattern exchange pattern - */ - protected def produce(msg: Any, pattern: ExchangePattern): Unit = { - val cmsg = Message.canonicalize(msg) - val exchange = createExchange(pattern).fromRequestMessage(cmsg) - processor.process(exchange, new AsyncCallback { - val producer = self - // Need copies of sender and senderFuture references here - // since the callback could be done later by another thread. - val sender = self.sender - val senderFuture = self.senderFuture - - def done(doneSync: Boolean): Unit = { - (doneSync, exchange.isFailed) match { - case (true, true) => dispatchSync(exchange.toFailureMessage(cmsg.headers(headersToCopy))) - case (true, false) => dispatchSync(exchange.toResponseMessage(cmsg.headers(headersToCopy))) - case (false, true) => dispatchAsync(FailureResult(exchange.toFailureMessage(cmsg.headers(headersToCopy)))) - case (false, false) => dispatchAsync(MessageResult(exchange.toResponseMessage(cmsg.headers(headersToCopy)))) - } - } - - private def dispatchSync(result: Any) = - receiveAfterProduce(result) - - private def dispatchAsync(result: Any) = { - if (senderFuture.isDefined) - producer.postMessageToMailboxAndCreateFutureResultWithTimeout(result, producer.timeout, sender, senderFuture) - else - producer.postMessageToMailbox(result, sender) - } - }) - } - - /** - * Produces msg to the endpoint specified by endpointUri. Before the message is - * actually sent it is pre-processed by calling receiveBeforeProduce. If oneway - * is true, an in-only message exchange is initiated, otherwise an in-out message exchange. - * - * @see Producer#produce(Any, ExchangePattern) - */ - protected def produce: Receive = { - case res: MessageResult => receiveAfterProduce(res.message) - case res: FailureResult => receiveAfterProduce(res.failure) - case msg => { - if (oneway) - produce(receiveBeforeProduce(msg), ExchangePattern.InOnly) - else - produce(receiveBeforeProduce(msg), ExchangePattern.InOut) - } - } - - /** - * Called before the message is sent to the endpoint specified by endpointUri. The original - * message is passed as argument. By default, this method simply returns the argument but may be overridden - * by subtraits or subclasses. - */ - protected def receiveBeforeProduce: PartialFunction[Any, Any] = { - case msg => msg - } - - /** - * Called after a response was received from the endpoint specified by endpointUri. The - * response is passed as argument. By default, this method sends the response back to the original sender - * if oneway is false. If oneway is true, nothing is - * done. This method may be overridden by subtraits or subclasses (e.g. to forward responses to another - * actor). - */ - protected def receiveAfterProduce: Receive = { - case msg => if (!oneway) self.reply(msg) - } - - /** - * Creates a new Exchange of given pattern from the endpoint specified by - * endpointUri. - */ - private def createExchange(pattern: ExchangePattern): Exchange = endpoint.createExchange(pattern) - - /** - * Creates a new SendProcessor for endpoint. - */ - private def createSendProcessor = { - val sendProcessor = new SendProcessor(endpoint) - sendProcessor.start - sendProcessor - } -} - -/** - * Mixed in by Actor implementations to produce messages to Camel endpoints. - */ -trait Producer extends ProducerSupport { this: Actor => - - /** - * Default implementation of Actor.receive. Any messages received by this actors - * will be produced to the endpoint specified by endpointUri. - */ - protected def receive = produce -} - -/** - * Java-friendly ProducerSupport. - * - * @see UntypedProducerActor - * - * @author Martin Krasser - */ -trait UntypedProducer extends ProducerSupport { this: UntypedActor => - final override def endpointUri = getEndpointUri - final override def oneway = isOneway - - final override def receiveBeforeProduce = { - case msg => onReceiveBeforeProduce(msg) - } - - final override def receiveAfterProduce = { - case msg => onReceiveAfterProduce(msg) - } - - /** - * Default implementation of UntypedActor.onReceive - */ - def onReceive(message: Any) = produce(message) - - /** - * Returns the Camel endpoint URI to produce messages to. - */ - def getEndpointUri(): String - - /** - * If set to false (default), this producer expects a response message from the Camel endpoint. - * If set to true, this producer communicates with the Camel endpoint with an in-only message - * exchange pattern (fire and forget). - */ - def isOneway() = super.oneway - - /** - * Called before the message is sent to the endpoint specified by getEndpointUri. The original - * message is passed as argument. By default, this method simply returns the argument but may be overridden - * by subclasses. - */ - @throws(classOf[Exception]) - def onReceiveBeforeProduce(message: Any): Any = super.receiveBeforeProduce(message) - - /** - * Called after a response was received from the endpoint specified by endpointUri. The - * response is passed as argument. By default, this method sends the response back to the original sender - * if oneway is false. If oneway is true, nothing is - * done. This method may be overridden by subclasses (e.g. to forward responses to another actor). - */ - @throws(classOf[Exception]) - def onReceiveAfterProduce(message: Any): Unit = super.receiveAfterProduce(message) -} - -/** - * Subclass this abstract class to create an untyped producer actor. This class is meant to be used from Java. - * - * @author Martin Krasser - */ -abstract class UntypedProducerActor extends UntypedActor with UntypedProducer - -/** - * @author Martin Krasser - */ -private[camel] case class MessageResult(message: Message) - -/** - * @author Martin Krasser - */ -private[camel] case class FailureResult(failure: Failure) - -/** - * A one-way producer. - * - * @author Martin Krasser - */ -trait Oneway extends Producer { this: Actor => - override def oneway = true -} - diff --git a/akka-camel/src/main/scala/akka/component/ActorComponent.scala b/akka-camel/src/main/scala/akka/component/ActorComponent.scala deleted file mode 100644 index e84a894ee3..0000000000 --- a/akka-camel/src/main/scala/akka/component/ActorComponent.scala +++ /dev/null @@ -1,305 +0,0 @@ -/** - * Copyright (C) 2009-2010 Scalable Solutions AB - */ - -package akka.camel.component - -import java.net.InetSocketAddress -import java.util.{Map => JMap} -import java.util.concurrent.TimeoutException -import java.util.concurrent.atomic.AtomicReference - -import org.apache.camel._ -import org.apache.camel.impl.{DefaultProducer, DefaultEndpoint, DefaultComponent} - -import akka.actor._ -import akka.camel.{Failure, Message} -import akka.camel.CamelMessageConversion.toExchangeAdapter -import akka.dispatch.{CompletableFuture, MessageInvocation, MessageDispatcher} - -import scala.reflect.BeanProperty - -/** - * @author Martin Krasser - */ -object ActorComponent { - /** - * Name of the message header containing the actor id or uuid. - */ - val ActorIdentifier = "CamelActorIdentifier" -} - -/** - * Camel component for sending messages to and receiving replies from (untyped) actors. - * - * @see akka.camel.component.ActorEndpoint - * @see akka.camel.component.ActorProducer - * - * @author Martin Krasser - */ -class ActorComponent extends DefaultComponent { - def createEndpoint(uri: String, remaining: String, parameters: JMap[String, Object]): ActorEndpoint = { - val (idType, idValue) = parsePath(remaining) - new ActorEndpoint(uri, this, idType, idValue) - } - - private def parsePath(remaining: String): Tuple2[String, Option[String]] = remaining match { - case null | "" => throw new IllegalArgumentException("invalid path: [%s] - should be or id: or uuid:" format remaining) - case id if id startsWith "id:" => ("id", parseIdentifier(id substring 3)) - case uuid if uuid startsWith "uuid:" => ("uuid", parseIdentifier(uuid substring 5)) - case id => ("id", parseIdentifier(id)) - } - - private def parseIdentifier(identifier: String): Option[String] = - if (identifier.length > 0) Some(identifier) else None -} - -/** - * Camel endpoint for sending messages to and receiving replies from (untyped) actors. Actors - * are referenced using actor endpoint URIs of the following format: - * actor:, - * actor:id:[] and - * actor:uuid:[], - * where refers to ActorRef.id and - * refers to the String-representation od ActorRef.uuid. In URIs that contain - * id: or uuid:, an actor identifier (id or uuid) is optional. In this - * case, the in-message of an exchange produced to this endpoint must contain a message header - * with name CamelActorIdentifier and a value that is the target actor's identifier. - * If the URI contains an actor identifier, a message with a CamelActorIdentifier - * header overrides the identifier in the endpoint URI. - * - * @see akka.camel.component.ActorComponent - * @see akka.camel.component.ActorProducer - - * @author Martin Krasser - */ -class ActorEndpoint(uri: String, - comp: ActorComponent, - val idType: String, - val idValue: Option[String]) extends DefaultEndpoint(uri, comp) { - - /** - * Whether to block caller thread during two-way message exchanges with (untyped) actors. This is - * set via the blocking=true|false endpoint URI parameter. Default value is - * false. - */ - @BeanProperty var blocking: Boolean = false - - /** - * @throws UnsupportedOperationException - */ - def createConsumer(processor: Processor): Consumer = - throw new UnsupportedOperationException("actor consumer not supported yet") - - /** - * Creates a new ActorProducer instance initialized with this endpoint. - */ - def createProducer: ActorProducer = new ActorProducer(this) - - /** - * Returns true. - */ - def isSingleton: Boolean = true -} - -/** - * Sends the in-message of an exchange to an (untyped) actor, identified by an - * actor endpoint URI or by a CamelActorIdentifier message header. - *

    - *
  • If the exchange pattern is out-capable and blocking is set to - * true then the producer waits for a reply, using the !! operator.
  • - *
  • If the exchange pattern is out-capable and blocking is set to - * false then the producer sends the message using the ! operator, together - * with a callback handler. The callback handler is an ActorRef that can be - * used by the receiving actor to asynchronously reply to the route that is sending the - * message.
  • - *
  • If the exchange pattern is in-only then the producer sends the message using the - * ! operator.
  • - *
- * - * @see akka.camel.component.ActorComponent - * @see akka.camel.component.ActorEndpoint - * - * @author Martin Krasser - */ -class ActorProducer(val ep: ActorEndpoint) extends DefaultProducer(ep) with AsyncProcessor { - import ActorProducer._ - - private lazy val uuid = uuidFrom(ep.idValue.getOrElse(throw new ActorIdentifierNotSetException)) - - def process(exchange: Exchange) = - if (exchange.getPattern.isOutCapable) sendSync(exchange) else sendAsync(exchange) - - def process(exchange: Exchange, callback: AsyncCallback): Boolean = { - (exchange.getPattern.isOutCapable, ep.blocking) match { - case (true, true) => { - sendSync(exchange) - callback.done(true) - true - } - case (true, false) => { - sendAsync(exchange, Some(AsyncCallbackAdapter(exchange, callback))) - false - } - case (false, _) => { - sendAsync(exchange) - callback.done(true) - true - } - } - } - - private def sendSync(exchange: Exchange) = { - val actor = target(exchange) - val result: Any = actor !! requestFor(exchange) - - result match { - case Some(msg: Failure) => exchange.fromFailureMessage(msg) - case Some(msg) => exchange.fromResponseMessage(Message.canonicalize(msg)) - case None => throw new TimeoutException("timeout (%d ms) while waiting response from %s" - format (actor.timeout, ep.getEndpointUri)) - } - } - - private def sendAsync(exchange: Exchange, sender: Option[ActorRef] = None) = - target(exchange).!(requestFor(exchange))(sender) - - private def target(exchange: Exchange) = - targetOption(exchange) getOrElse (throw new ActorNotRegisteredException(ep.getEndpointUri)) - - private def targetOption(exchange: Exchange): Option[ActorRef] = ep.idType match { - case "id" => targetById(targetId(exchange)) - case "uuid" => targetByUuid(targetUuid(exchange)) - } - - private def targetId(exchange: Exchange) = exchange.getIn.getHeader(ActorComponent.ActorIdentifier) match { - case id: String => id - case null => ep.idValue.getOrElse(throw new ActorIdentifierNotSetException) - } - - private def targetUuid(exchange: Exchange) = exchange.getIn.getHeader(ActorComponent.ActorIdentifier) match { - case uuid: Uuid => uuid - case uuid: String => uuidFrom(uuid) - case null => uuid - } - - private def targetById(id: String) = ActorRegistry.actorsFor(id) match { - case actors if actors.length == 0 => None - case actors => Some(actors(0)) - } - - private def targetByUuid(uuid: Uuid) = ActorRegistry.actorFor(uuid) -} - -/** - * @author Martin Krasser - */ -private[camel] object ActorProducer { - def requestFor(exchange: Exchange) = - exchange.toRequestMessage(Map(Message.MessageExchangeId -> exchange.getExchangeId)) -} - -/** - * Thrown to indicate that an actor referenced by an endpoint URI cannot be - * found in the ActorRegistry. - * - * @author Martin Krasser - */ -class ActorNotRegisteredException(uri: String) extends RuntimeException { - override def getMessage = "%s not registered" format uri -} - -/** - * Thrown to indicate that no actor identifier has been set. - * - * @author Martin Krasser - */ -class ActorIdentifierNotSetException extends RuntimeException { - override def getMessage = "actor identifier not set" -} - -/** - * @author Martin Krasser - */ -private[akka] object AsyncCallbackAdapter { - /** - * Creates and starts an AsyncCallbackAdapter. - * - * @param exchange message exchange to write results to. - * @param callback callback object to generate completion notifications. - */ - def apply(exchange: Exchange, callback: AsyncCallback) = - new AsyncCallbackAdapter(exchange, callback).start -} - -/** - * Adapts an ActorRef to a Camel AsyncCallback. Used by receiving actors to reply - * asynchronously to Camel routes with ActorRef.reply. - *

- * Please note that this adapter can only be used locally at the moment which should not - * be a problem is most situations since Camel endpoints are only activated for local actor references, - * never for remote references. - * - * @author Martin Krasser - */ -private[akka] class AsyncCallbackAdapter(exchange: Exchange, callback: AsyncCallback) extends ActorRef with ScalaActorRef { - - def start = { - _status = ActorRefInternals.RUNNING - this - } - - def stop() = { - _status = ActorRefInternals.SHUTDOWN - } - - /** - * Populates the initial exchange with the reply message and uses the - * callback handler to notify Camel about the asynchronous completion of the message - * exchange. - * - * @param message reply message - * @param sender ignored - */ - protected[akka] def postMessageToMailbox(message: Any, senderOption: Option[ActorRef]) = { - message match { - case msg: Failure => exchange.fromFailureMessage(msg) - case msg => exchange.fromResponseMessage(Message.canonicalize(msg)) - } - callback.done(false) - } - - def actorClass: Class[_ <: Actor] = unsupported - def actorClassName = unsupported - def dispatcher_=(md: MessageDispatcher): Unit = unsupported - def dispatcher: MessageDispatcher = unsupported - def makeRemote(hostname: String, port: Int): Unit = unsupported - def makeRemote(address: InetSocketAddress): Unit = unsupported - def homeAddress_=(address: InetSocketAddress): Unit = unsupported - def remoteAddress: Option[InetSocketAddress] = unsupported - def link(actorRef: ActorRef): Unit = unsupported - def unlink(actorRef: ActorRef): Unit = unsupported - def startLink(actorRef: ActorRef): Unit = unsupported - def startLinkRemote(actorRef: ActorRef, hostname: String, port: Int): Unit = unsupported - def spawn(clazz: Class[_ <: Actor]): ActorRef = unsupported - def spawnRemote(clazz: Class[_ <: Actor], hostname: String, port: Int): ActorRef = unsupported - def spawnLink(clazz: Class[_ <: Actor]): ActorRef = unsupported - def spawnLinkRemote(clazz: Class[_ <: Actor], hostname: String, port: Int): ActorRef = unsupported - def shutdownLinkedActors: Unit = unsupported - def supervisor: Option[ActorRef] = unsupported - protected[akka] def postMessageToMailboxAndCreateFutureResultWithTimeout[T](message: Any, timeout: Long, senderOption: Option[ActorRef], senderFuture: Option[CompletableFuture[T]]) = unsupported - protected[akka] def mailbox: AnyRef = unsupported - protected[akka] def mailbox_=(msg: AnyRef):AnyRef = unsupported - protected[akka] def restart(reason: Throwable, maxNrOfRetries: Option[Int], withinTimeRange: Option[Int]): Unit = unsupported - protected[akka] def restartLinkedActors(reason: Throwable, maxNrOfRetries: Option[Int], withinTimeRange: Option[Int]): Unit = unsupported - protected[akka] def handleTrapExit(dead: ActorRef, reason: Throwable): Unit = unsupported - protected[akka] def linkedActors: JMap[Uuid, ActorRef] = unsupported - protected[akka] def linkedActorsAsList: List[ActorRef] = unsupported - protected[akka] def invoke(messageHandle: MessageInvocation): Unit = unsupported - protected[akka] def remoteAddress_=(addr: Option[InetSocketAddress]): Unit = unsupported - protected[akka] def registerSupervisorAsRemoteActor = unsupported - protected[akka] def supervisor_=(sup: Option[ActorRef]): Unit = unsupported - protected[akka] def actorInstance: AtomicReference[Actor] = unsupported - - private def unsupported = throw new UnsupportedOperationException("Not supported for %s" format classOf[AsyncCallbackAdapter].getName) -} diff --git a/akka-camel/src/main/scala/akka/component/TypedActorComponent.scala b/akka-camel/src/main/scala/akka/component/TypedActorComponent.scala deleted file mode 100644 index f4a7f1b099..0000000000 --- a/akka-camel/src/main/scala/akka/component/TypedActorComponent.scala +++ /dev/null @@ -1,111 +0,0 @@ -/** - * Copyright (C) 2009-2010 Scalable Solutions AB - */ - -package akka.camel.component - -import java.util.Map -import java.util.concurrent.ConcurrentHashMap -import org.apache.camel.CamelContext -import org.apache.camel.component.bean._ - -/** - * @author Martin Krasser - */ -object TypedActorComponent { - /** - * Default schema name for typed actor endpoint URIs. - */ - val InternalSchema = "typed-actor-internal" -} - -/** - * Camel component for exchanging messages with typed actors. This component - * tries to obtain the typed actor from its typedActorRegistry - * first. If it's not there it tries to obtain it from the CamelContext's registry. - * - * @see org.apache.camel.component.bean.BeanComponent - * - * @author Martin Krasser - */ -class TypedActorComponent extends BeanComponent { - val typedActorRegistry = new ConcurrentHashMap[String, AnyRef] - - /** - * Creates an org.apache.camel.component.bean.BeanEndpoint with a custom - * bean holder that uses typedActorRegistry for getting access to typed - * actors (beans). - * - * @see akka.camel.component.TypedActorHolder - */ - override def createEndpoint(uri: String, remaining: String, parameters: Map[String, AnyRef]) = { - val endpoint = new BeanEndpoint(uri, this) - endpoint.setBeanName(remaining) - endpoint.setBeanHolder(createBeanHolder(remaining)) - setProperties(endpoint.getProcessor, parameters) - endpoint - } - - private def createBeanHolder(beanName: String) = - new TypedActorHolder(typedActorRegistry, getCamelContext, beanName).createCacheHolder -} - -/** - * org.apache.camel.component.bean.BeanHolder implementation that uses a custom - * registry for getting access to typed actors. - * - * @author Martin Krasser - */ -class TypedActorHolder(typedActorRegistry: Map[String, AnyRef], context: CamelContext, name: String) - extends RegistryBean(context, name) { - - /** - * Returns an akka.camel.component.TypedActorInfo instance. - */ - override def getBeanInfo: BeanInfo = - new TypedActorInfo(getContext, getBean.getClass, getParameterMappingStrategy) - - /** - * Obtains a typed actor from typedActorRegistry. If the typed actor cannot - * be found then this method tries to obtain the actor from the CamelContext's registry. - * - * @return a typed actor or null. - */ - override def getBean: AnyRef = { - val bean = typedActorRegistry.get(getName) - if (bean eq null) super.getBean else bean - } -} - -/** - * Typed actor meta information. - * - * @author Martin Krasser - */ -class TypedActorInfo(context: CamelContext, clazz: Class[_], strategy: ParameterMappingStrategy) - extends BeanInfo(context, clazz, strategy) { - - /** - * Introspects AspectWerkz proxy classes. - * - * @param clazz AspectWerkz proxy class. - */ - protected override def introspect(clazz: Class[_]): Unit = { - - // TODO: fix target class detection in BeanInfo.introspect(Class) - // Camel assumes that classes containing a '$$' in the class name - // are classes generated with CGLIB. This conflicts with proxies - // created from interfaces with AspectWerkz. Once the fix is in - // place this method can be removed. - - for (method <- clazz.getDeclaredMethods) { - if (isValidMethod(clazz, method)) { - introspect(clazz, method) - } - } - val superclass = clazz.getSuperclass - if ((superclass ne null) && !superclass.equals(classOf[AnyRef])) { - introspect(superclass) - } - } -} diff --git a/akka-camel/src/test/java/akka/camel/ConsumerJavaTestBase.java b/akka-camel/src/test/java/akka/camel/ConsumerJavaTestBase.java deleted file mode 100644 index c34ce0cc2e..0000000000 --- a/akka-camel/src/test/java/akka/camel/ConsumerJavaTestBase.java +++ /dev/null @@ -1,59 +0,0 @@ -package akka.camel; - -import akka.actor.ActorRegistry; -import akka.actor.TypedActor; -import akka.actor.UntypedActor; -import akka.japi.SideEffect; - -import org.junit.AfterClass; -import org.junit.BeforeClass; -import org.junit.Test; - -import static akka.camel.CamelContextManager.*; -import static akka.camel.CamelServiceManager.*; - -import static org.junit.Assert.*; - -/** - * @author Martin Krasser - */ -public class ConsumerJavaTestBase { - - private SampleErrorHandlingTypedConsumer consumer; - - @BeforeClass - public static void setUpBeforeClass() { - startCamelService(); - } - - @AfterClass - public static void tearDownAfterClass() { - stopCamelService(); - ActorRegistry.shutdownAll(); - } - - @Test - public void shouldHandleExceptionThrownByActorAndGenerateCustomResponse() { - getMandatoryService().awaitEndpointActivation(1, new SideEffect() { - public void apply() { - UntypedActor.actorOf(SampleErrorHandlingConsumer.class).start(); - } - }); - String result = getMandatoryTemplate().requestBody("direct:error-handler-test-java", "hello", String.class); - assertEquals("error: hello", result); - } - - @Test - public void shouldHandleExceptionThrownByTypedActorAndGenerateCustomResponse() { - getMandatoryService().awaitEndpointActivation(1, new SideEffect() { - public void apply() { - consumer = TypedActor.newInstance( - SampleErrorHandlingTypedConsumer.class, - SampleErrorHandlingTypedConsumerImpl.class); - } - }); - String result = getMandatoryTemplate().requestBody("direct:error-handler-test-java-typed", "hello", String.class); - assertEquals("error: hello", result); - } - -} diff --git a/akka-camel/src/test/java/akka/camel/MessageJavaTestBase.java b/akka-camel/src/test/java/akka/camel/MessageJavaTestBase.java deleted file mode 100644 index 38e0b95692..0000000000 --- a/akka-camel/src/test/java/akka/camel/MessageJavaTestBase.java +++ /dev/null @@ -1,129 +0,0 @@ -package akka.camel; - -import org.apache.camel.NoTypeConversionAvailableException; -import org.junit.BeforeClass; -import org.junit.Test; - -import akka.camel.CamelContextManager; -import akka.camel.Message; -import akka.japi.Function; - -import java.io.InputStream; -import java.util.*; - -import static org.junit.Assert.*; - -/** - * @author Martin Krasser - */ -public class MessageJavaTestBase { - - @BeforeClass - public static void setUpBeforeClass() { - CamelContextManager.init(); - } - - @Test public void shouldConvertDoubleBodyToString() { - assertEquals("1.4", new Message("1.4").getBodyAs(String.class)); - } - - @Test(expected=NoTypeConversionAvailableException.class) - public void shouldThrowExceptionWhenConvertingDoubleBodyToInputStream() { - new Message(1.4).getBodyAs(InputStream.class); - } - - @Test public void shouldReturnDoubleHeader() { - Message message = new Message("test" , createMap("test", 1.4)); - assertEquals(1.4, message.getHeader("test")); - } - - @Test public void shouldConvertDoubleHeaderToString() { - Message message = new Message("test" , createMap("test", 1.4)); - assertEquals("1.4", message.getHeaderAs("test", String.class)); - } - - @Test public void shouldReturnSubsetOfHeaders() { - Message message = new Message("test" , createMap("A", "1", "B", "2")); - assertEquals(createMap("B", "2"), message.getHeaders(createSet("B"))); - } - - @Test(expected=UnsupportedOperationException.class) - public void shouldReturnSubsetOfHeadersUnmodifiable() { - Message message = new Message("test" , createMap("A", "1", "B", "2")); - message.getHeaders(createSet("B")).put("x", "y"); - } - - @Test public void shouldReturnAllHeaders() { - Message message = new Message("test" , createMap("A", "1", "B", "2")); - assertEquals(createMap("A", "1", "B", "2"), message.getHeaders()); - } - - @Test(expected=UnsupportedOperationException.class) - public void shouldReturnAllHeadersUnmodifiable() { - Message message = new Message("test" , createMap("A", "1", "B", "2")); - message.getHeaders().put("x", "y"); - } - - @Test public void shouldTransformBodyAndPreserveHeaders() { - assertEquals( - new Message("ab", createMap("A", "1")), - new Message("a" , createMap("A", "1")).transformBody((Function)new TestTransformer())); - } - - @Test public void shouldConvertBodyAndPreserveHeaders() { - assertEquals( - new Message("1.4", createMap("A", "1")), - new Message(1.4 , createMap("A", "1")).setBodyAs(String.class)); - } - - @Test public void shouldSetBodyAndPreserveHeaders() { - assertEquals( - new Message("test2" , createMap("A", "1")), - new Message("test1" , createMap("A", "1")).setBody("test2")); - } - - @Test public void shouldSetHeadersAndPreserveBody() { - assertEquals( - new Message("test1" , createMap("C", "3")), - new Message("test1" , createMap("A", "1")).setHeaders(createMap("C", "3"))); - } - - @Test public void shouldAddHeaderAndPreserveBodyAndHeaders() { - assertEquals( - new Message("test1" , createMap("A", "1", "B", "2")), - new Message("test1" , createMap("A", "1")).addHeader("B", "2")); - } - - @Test public void shouldAddHeadersAndPreserveBodyAndHeaders() { - assertEquals( - new Message("test1" , createMap("A", "1", "B", "2")), - new Message("test1" , createMap("A", "1")).addHeaders(createMap("B", "2"))); - } - - @Test public void shouldRemoveHeadersAndPreserveBodyAndRemainingHeaders() { - assertEquals( - new Message("test1" , createMap("A", "1")), - new Message("test1" , createMap("A", "1", "B", "2")).removeHeader("B")); - } - - private static Set createSet(String... entries) { - HashSet set = new HashSet(); - set.addAll(Arrays.asList(entries)); - return set; - } - - private static Map createMap(Object... pairs) { - HashMap map = new HashMap(); - for (int i = 0; i < pairs.length; i += 2) { - map.put((String)pairs[i], pairs[i+1]); - } - return map; - } - - private static class TestTransformer implements Function { - public String apply(String param) { - return param + "b"; - } - } - -} diff --git a/akka-camel/src/test/java/akka/camel/SampleErrorHandlingConsumer.java b/akka-camel/src/test/java/akka/camel/SampleErrorHandlingConsumer.java deleted file mode 100644 index 4e35d4e6ab..0000000000 --- a/akka-camel/src/test/java/akka/camel/SampleErrorHandlingConsumer.java +++ /dev/null @@ -1,34 +0,0 @@ -package akka.camel; - -import org.apache.camel.builder.Builder; -import org.apache.camel.model.ProcessorDefinition; -import org.apache.camel.model.RouteDefinition; - -/** - * @author Martin Krasser - */ -public class SampleErrorHandlingConsumer extends UntypedConsumerActor { - - public String getEndpointUri() { - return "direct:error-handler-test-java"; - } - - public boolean isBlocking() { - return true; - } - - public void preStart() { - onRouteDefinition(new RouteDefinitionHandler() { - public ProcessorDefinition onRouteDefinition(RouteDefinition rd) { - return rd.onException(Exception.class).handled(true).transform(Builder.exceptionMessage()).end(); - } - }); - } - - public void onReceive(Object message) throws Exception { - Message msg = (Message)message; - String body = msg.getBodyAs(String.class); - throw new Exception(String.format("error: %s", body)); - } - -} diff --git a/akka-camel/src/test/java/akka/camel/SampleErrorHandlingTypedConsumer.java b/akka-camel/src/test/java/akka/camel/SampleErrorHandlingTypedConsumer.java deleted file mode 100644 index d8a8c79440..0000000000 --- a/akka-camel/src/test/java/akka/camel/SampleErrorHandlingTypedConsumer.java +++ /dev/null @@ -1,11 +0,0 @@ -package akka.camel; - -/** - * @author Martin Krasser - */ -public interface SampleErrorHandlingTypedConsumer { - - @consume(value="direct:error-handler-test-java-typed", routeDefinitionHandler=SampleRouteDefinitionHandler.class) - String willFail(String s); - -} diff --git a/akka-camel/src/test/java/akka/camel/SampleErrorHandlingTypedConsumerImpl.java b/akka-camel/src/test/java/akka/camel/SampleErrorHandlingTypedConsumerImpl.java deleted file mode 100644 index cfa42a7521..0000000000 --- a/akka-camel/src/test/java/akka/camel/SampleErrorHandlingTypedConsumerImpl.java +++ /dev/null @@ -1,14 +0,0 @@ -package akka.camel; - -import akka.actor.TypedActor; - -/** - * @author Martin Krasser - */ -public class SampleErrorHandlingTypedConsumerImpl extends TypedActor implements SampleErrorHandlingTypedConsumer { - - public String willFail(String s) { - throw new RuntimeException(String.format("error: %s", s)); - } - -} diff --git a/akka-camel/src/test/java/akka/camel/SampleRemoteTypedConsumer.java b/akka-camel/src/test/java/akka/camel/SampleRemoteTypedConsumer.java deleted file mode 100644 index 41a3c3f057..0000000000 --- a/akka-camel/src/test/java/akka/camel/SampleRemoteTypedConsumer.java +++ /dev/null @@ -1,12 +0,0 @@ -package akka.camel; - -import akka.camel.consume; - -/** - * @author Martin Krasser - */ -public interface SampleRemoteTypedConsumer { - - @consume("direct:remote-typed-consumer") - public String foo(String s); -} diff --git a/akka-camel/src/test/java/akka/camel/SampleRemoteTypedConsumerImpl.java b/akka-camel/src/test/java/akka/camel/SampleRemoteTypedConsumerImpl.java deleted file mode 100644 index d7fb463b44..0000000000 --- a/akka-camel/src/test/java/akka/camel/SampleRemoteTypedConsumerImpl.java +++ /dev/null @@ -1,14 +0,0 @@ -package akka.camel; - -import akka.actor.TypedActor; - -/** - * @author Martin Krasser - */ -public class SampleRemoteTypedConsumerImpl extends TypedActor implements SampleRemoteTypedConsumer { - - public String foo(String s) { - return String.format("remote typed actor: %s", s); - } - -} diff --git a/akka-camel/src/test/java/akka/camel/SampleRemoteUntypedConsumer.java b/akka-camel/src/test/java/akka/camel/SampleRemoteUntypedConsumer.java deleted file mode 100644 index 85ccb2638b..0000000000 --- a/akka-camel/src/test/java/akka/camel/SampleRemoteUntypedConsumer.java +++ /dev/null @@ -1,29 +0,0 @@ -package akka.camel; - -import akka.camel.RemoteUntypedConsumerActor; - -/** - * @author Martin Krasser - */ -public class SampleRemoteUntypedConsumer extends RemoteUntypedConsumerActor { - - public SampleRemoteUntypedConsumer() { - this("localhost", 7774); - } - - public SampleRemoteUntypedConsumer(String host, int port) { - super(host, port); - } - - public String getEndpointUri() { - return "direct:remote-untyped-consumer"; - } - - public void onReceive(Object message) { - Message msg = (Message)message; - String body = msg.getBodyAs(String.class); - String header = msg.getHeaderAs("test", String.class); - getContext().replySafe(String.format("%s %s", body, header)); - } - -} diff --git a/akka-camel/src/test/java/akka/camel/SampleRouteDefinitionHandler.java b/akka-camel/src/test/java/akka/camel/SampleRouteDefinitionHandler.java deleted file mode 100644 index f1a99aa7d4..0000000000 --- a/akka-camel/src/test/java/akka/camel/SampleRouteDefinitionHandler.java +++ /dev/null @@ -1,14 +0,0 @@ -package akka.camel; - -import org.apache.camel.builder.Builder; -import org.apache.camel.model.ProcessorDefinition; -import org.apache.camel.model.RouteDefinition; - -/** - * @author Martin Krasser - */ -public class SampleRouteDefinitionHandler implements RouteDefinitionHandler { - public ProcessorDefinition onRouteDefinition(RouteDefinition rd) { - return rd.onException(Exception.class).handled(true).transform(Builder.exceptionMessage()).end(); - } -} diff --git a/akka-camel/src/test/java/akka/camel/SampleTypedActor.java b/akka-camel/src/test/java/akka/camel/SampleTypedActor.java deleted file mode 100644 index 798d07a66c..0000000000 --- a/akka-camel/src/test/java/akka/camel/SampleTypedActor.java +++ /dev/null @@ -1,9 +0,0 @@ -package akka.camel; - -/** - * @author Martin Krasser - */ -public interface SampleTypedActor { - - public String foo(String s); -} diff --git a/akka-camel/src/test/java/akka/camel/SampleTypedActorImpl.java b/akka-camel/src/test/java/akka/camel/SampleTypedActorImpl.java deleted file mode 100644 index 773e3ec3ec..0000000000 --- a/akka-camel/src/test/java/akka/camel/SampleTypedActorImpl.java +++ /dev/null @@ -1,14 +0,0 @@ -package akka.camel; - -import akka.actor.TypedActor; - -/** - * @author Martin Krasser - */ -public class SampleTypedActorImpl extends TypedActor implements SampleTypedActor { - - public String foo(String s) { - return String.format("foo: %s", s); - } - -} diff --git a/akka-camel/src/test/java/akka/camel/SampleTypedConsumer.java b/akka-camel/src/test/java/akka/camel/SampleTypedConsumer.java deleted file mode 100644 index 26283d8e61..0000000000 --- a/akka-camel/src/test/java/akka/camel/SampleTypedConsumer.java +++ /dev/null @@ -1,20 +0,0 @@ -package akka.camel; - -import org.apache.camel.Body; -import org.apache.camel.Header; - -import akka.camel.consume; - -/** - * @author Martin Krasser - */ -public interface SampleTypedConsumer { - - public String m1(String b, String h); - public String m2(@Body String b, @Header("test") String h); - public String m3(@Body String b, @Header("test") String h); - - @consume("direct:m4") - public String m4(@Body String b, @Header("test") String h); - public void m5(@Body String b, @Header("test") String h); -} diff --git a/akka-camel/src/test/java/akka/camel/SampleTypedConsumerImpl.java b/akka-camel/src/test/java/akka/camel/SampleTypedConsumerImpl.java deleted file mode 100644 index 3bbe7a9442..0000000000 --- a/akka-camel/src/test/java/akka/camel/SampleTypedConsumerImpl.java +++ /dev/null @@ -1,30 +0,0 @@ -package akka.camel; - -import akka.actor.TypedActor; - -/** - * @author Martin Krasser - */ -public class SampleTypedConsumerImpl extends TypedActor implements SampleTypedConsumer { - - public String m1(String b, String h) { - return "m1: " + b + " " + h; - } - - @consume("direct:m2") - public String m2(String b, String h) { - return "m2: " + b + " " + h; - } - - @consume("direct:m3") - public String m3(String b, String h) { - return "m3: " + b + " " + h; - } - - public String m4(String b, String h) { - return "m4: " + b + " " + h; - } - - public void m5(String b, String h) { - } -} diff --git a/akka-camel/src/test/java/akka/camel/SampleTypedSingleConsumer.java b/akka-camel/src/test/java/akka/camel/SampleTypedSingleConsumer.java deleted file mode 100644 index ff0b7bc715..0000000000 --- a/akka-camel/src/test/java/akka/camel/SampleTypedSingleConsumer.java +++ /dev/null @@ -1,13 +0,0 @@ -package akka.camel; - -import akka.camel.consume; - -/** - * @author Martin Krasser - */ -public interface SampleTypedSingleConsumer { - - @consume("direct:foo") - public void foo(String b); - -} diff --git a/akka-camel/src/test/java/akka/camel/SampleTypedSingleConsumerImpl.java b/akka-camel/src/test/java/akka/camel/SampleTypedSingleConsumerImpl.java deleted file mode 100644 index 27fbfdaa0d..0000000000 --- a/akka-camel/src/test/java/akka/camel/SampleTypedSingleConsumerImpl.java +++ /dev/null @@ -1,13 +0,0 @@ -package akka.camel; - -import akka.actor.TypedActor; - -/** - * @author Martin Krasser - */ -public class SampleTypedSingleConsumerImpl extends TypedActor implements SampleTypedSingleConsumer { - - public void foo(String b) { - } - -} diff --git a/akka-camel/src/test/java/akka/camel/SampleUntypedActor.java b/akka-camel/src/test/java/akka/camel/SampleUntypedActor.java deleted file mode 100644 index 56614a6b80..0000000000 --- a/akka-camel/src/test/java/akka/camel/SampleUntypedActor.java +++ /dev/null @@ -1,12 +0,0 @@ -package akka.camel; - -import akka.actor.UntypedActor; - -/** - * @author Martin Krasser - */ -public class SampleUntypedActor extends UntypedActor { - public void onReceive(Object message) { - logger().debug("Yay! I haz a message!"); - } -} diff --git a/akka-camel/src/test/java/akka/camel/SampleUntypedConsumer.java b/akka-camel/src/test/java/akka/camel/SampleUntypedConsumer.java deleted file mode 100644 index 99300836c1..0000000000 --- a/akka-camel/src/test/java/akka/camel/SampleUntypedConsumer.java +++ /dev/null @@ -1,21 +0,0 @@ -package akka.camel; - -import akka.camel.UntypedConsumerActor; - -/** - * @author Martin Krasser - */ -public class SampleUntypedConsumer extends UntypedConsumerActor { - - public String getEndpointUri() { - return "direct:test-untyped-consumer"; - } - - public void onReceive(Object message) { - Message msg = (Message)message; - String body = msg.getBodyAs(String.class); - String header = msg.getHeaderAs("test", String.class); - getContext().replySafe(String.format("%s %s", body, header)); - } - -} diff --git a/akka-camel/src/test/java/akka/camel/SampleUntypedConsumerBlocking.java b/akka-camel/src/test/java/akka/camel/SampleUntypedConsumerBlocking.java deleted file mode 100644 index b5b22a04ae..0000000000 --- a/akka-camel/src/test/java/akka/camel/SampleUntypedConsumerBlocking.java +++ /dev/null @@ -1,23 +0,0 @@ -package akka.camel; - -/** - * @author Martin Krasser - */ -public class SampleUntypedConsumerBlocking extends UntypedConsumerActor { - - public String getEndpointUri() { - return "direct:test-untyped-consumer-blocking"; - } - - public boolean isBlocking() { - return true; - } - - public void onReceive(Object message) { - Message msg = (Message)message; - String body = msg.getBodyAs(String.class); - String header = msg.getHeaderAs("test", String.class); - getContext().replySafe(String.format("%s %s", body, header)); - } - -} diff --git a/akka-camel/src/test/java/akka/camel/SampleUntypedForwardingProducer.java b/akka-camel/src/test/java/akka/camel/SampleUntypedForwardingProducer.java deleted file mode 100644 index 3161c0f2d8..0000000000 --- a/akka-camel/src/test/java/akka/camel/SampleUntypedForwardingProducer.java +++ /dev/null @@ -1,18 +0,0 @@ -package akka.camel; - -/** - * @author Martin Krasser - */ -public class SampleUntypedForwardingProducer extends UntypedProducerActor { - - public String getEndpointUri() { - return "direct:producer-test-1"; - } - - @Override - public void onReceiveAfterProduce(Object message) { - Message msg = (Message)message; - String body = msg.getBodyAs(String.class); - CamelContextManager.getMandatoryTemplate().sendBody("direct:forward-test-1", body); - } -} diff --git a/akka-camel/src/test/java/akka/camel/SampleUntypedReplyingProducer.java b/akka-camel/src/test/java/akka/camel/SampleUntypedReplyingProducer.java deleted file mode 100644 index 09b7b86502..0000000000 --- a/akka-camel/src/test/java/akka/camel/SampleUntypedReplyingProducer.java +++ /dev/null @@ -1,12 +0,0 @@ -package akka.camel; - -/** - * @author Martin Krasser - */ -public class SampleUntypedReplyingProducer extends UntypedProducerActor { - - public String getEndpointUri() { - return "direct:producer-test-1"; - } - -} diff --git a/akka-camel/src/test/scala/akka/CamelContextLifecycleTest.scala b/akka-camel/src/test/scala/akka/CamelContextLifecycleTest.scala deleted file mode 100644 index 910373738f..0000000000 --- a/akka-camel/src/test/scala/akka/CamelContextLifecycleTest.scala +++ /dev/null @@ -1,36 +0,0 @@ -package akka.camel - -import org.apache.camel.impl.{DefaultProducerTemplate, DefaultCamelContext} -import org.junit.Test -import org.scalatest.junit.JUnitSuite - -class CamelContextLifecycleTest extends JUnitSuite with CamelContextLifecycle { - @Test def shouldManageCustomCamelContext { - assert(context === None) - assert(template === None) - - intercept[IllegalStateException] { mandatoryContext } - intercept[IllegalStateException] { mandatoryTemplate } - - val ctx = new TestCamelContext - assert(ctx.isStreamCaching === false) - - init(ctx) - - assert(mandatoryContext.isStreamCaching === true) - assert(!mandatoryContext.asInstanceOf[TestCamelContext].isStarted) - assert(mandatoryTemplate.asInstanceOf[DefaultProducerTemplate].isStarted) - - start - - assert(mandatoryContext.asInstanceOf[TestCamelContext].isStarted) - assert(mandatoryTemplate.asInstanceOf[DefaultProducerTemplate].isStarted) - - stop - - assert(!mandatoryContext.asInstanceOf[TestCamelContext].isStarted) - assert(!mandatoryTemplate.asInstanceOf[DefaultProducerTemplate].isStarted) - } - - class TestCamelContext extends DefaultCamelContext -} diff --git a/akka-camel/src/test/scala/akka/CamelExchangeAdapterTest.scala b/akka-camel/src/test/scala/akka/CamelExchangeAdapterTest.scala deleted file mode 100644 index 3996179b5b..0000000000 --- a/akka-camel/src/test/scala/akka/CamelExchangeAdapterTest.scala +++ /dev/null @@ -1,109 +0,0 @@ -package akka.camel - -import org.apache.camel.impl.{DefaultCamelContext, DefaultExchange} -import org.apache.camel.ExchangePattern -import org.junit.Test -import org.scalatest.junit.JUnitSuite - -class CamelExchangeAdapterTest extends JUnitSuite { - import CamelMessageConversion.toExchangeAdapter - - @Test def shouldSetInMessageFromRequestMessage = { - val e1 = sampleInOnly.fromRequestMessage(Message("x")) - assert(e1.getIn.getBody === "x") - val e2 = sampleInOut.fromRequestMessage(Message("y")) - assert(e2.getIn.getBody === "y") - } - - @Test def shouldSetOutMessageFromResponseMessage = { - val e1 = sampleInOut.fromResponseMessage(Message("y")) - assert(e1.getOut.getBody === "y") - } - - @Test def shouldSetInMessageFromResponseMessage = { - val e1 = sampleInOnly.fromResponseMessage(Message("x")) - assert(e1.getIn.getBody === "x") - } - - @Test def shouldSetExceptionFromFailureMessage = { - val e1 = sampleInOnly.fromFailureMessage(Failure(new Exception("test1"))) - assert(e1.getException.getMessage === "test1") - val e2 = sampleInOut.fromFailureMessage(Failure(new Exception("test2"))) - assert(e2.getException.getMessage === "test2") - } - - @Test def shouldCreateRequestMessageFromInMessage = { - val m = sampleInOnly.toRequestMessage - assert(m === Message("test-in", Map("key-in" -> "val-in"))) - } - - @Test def shouldCreateResponseMessageFromInMessage = { - val m = sampleInOnly.toResponseMessage - assert(m === Message("test-in", Map("key-in" -> "val-in"))) - } - - @Test def shouldCreateResponseMessageFromOutMessage = { - val m = sampleInOut.toResponseMessage - assert(m === Message("test-out", Map("key-out" -> "val-out"))) - } - - @Test def shouldCreateFailureMessageFromExceptionAndInMessage = { - val e1 = sampleInOnly - e1.setException(new Exception("test1")) - assert(e1.toFailureMessage.cause.getMessage === "test1") - assert(e1.toFailureMessage.headers("key-in") === "val-in") - } - - @Test def shouldCreateFailureMessageFromExceptionAndOutMessage = { - val e1 = sampleInOut - e1.setException(new Exception("test2")) - assert(e1.toFailureMessage.cause.getMessage === "test2") - assert(e1.toFailureMessage.headers("key-out") === "val-out") - } - - @Test def shouldCreateRequestMessageFromInMessageWithAdditionalHeader = { - val m = sampleInOnly.toRequestMessage(Map("x" -> "y")) - assert(m === Message("test-in", Map("key-in" -> "val-in", "x" -> "y"))) - } - - @Test def shouldCreateResponseMessageFromInMessageWithAdditionalHeader = { - val m = sampleInOnly.toResponseMessage(Map("x" -> "y")) - assert(m === Message("test-in", Map("key-in" -> "val-in", "x" -> "y"))) - } - - @Test def shouldCreateResponseMessageFromOutMessageWithAdditionalHeader = { - val m = sampleInOut.toResponseMessage(Map("x" -> "y")) - assert(m === Message("test-out", Map("key-out" -> "val-out", "x" -> "y"))) - } - - @Test def shouldCreateFailureMessageFromExceptionAndInMessageWithAdditionalHeader = { - val e1 = sampleInOnly - e1.setException(new Exception("test1")) - assert(e1.toFailureMessage.cause.getMessage === "test1") - val headers = e1.toFailureMessage(Map("x" -> "y")).headers - assert(headers("key-in") === "val-in") - assert(headers("x") === "y") - } - - @Test def shouldCreateFailureMessageFromExceptionAndOutMessageWithAdditionalHeader = { - val e1 = sampleInOut - e1.setException(new Exception("test2")) - assert(e1.toFailureMessage.cause.getMessage === "test2") - val headers = e1.toFailureMessage(Map("x" -> "y")).headers - assert(headers("key-out") === "val-out") - assert(headers("x") === "y") - } - - private def sampleInOnly = sampleExchange(ExchangePattern.InOnly) - private def sampleInOut = sampleExchange(ExchangePattern.InOut) - - private def sampleExchange(pattern: ExchangePattern) = { - val exchange = new DefaultExchange(new DefaultCamelContext) - exchange.getIn.setBody("test-in") - exchange.getOut.setBody("test-out") - exchange.getIn.setHeader("key-in", "val-in") - exchange.getOut.setHeader("key-out", "val-out") - exchange.setPattern(pattern) - exchange - } -} diff --git a/akka-camel/src/test/scala/akka/CamelMessageAdapterTest.scala b/akka-camel/src/test/scala/akka/CamelMessageAdapterTest.scala deleted file mode 100644 index 0c20ae1c29..0000000000 --- a/akka-camel/src/test/scala/akka/CamelMessageAdapterTest.scala +++ /dev/null @@ -1,38 +0,0 @@ -package akka.camel - -import org.apache.camel.impl.DefaultMessage -import org.junit.Test -import org.scalatest.junit.JUnitSuite - -class CamelMessageAdapterTest extends JUnitSuite { - import CamelMessageConversion.toMessageAdapter - - @Test def shouldOverwriteBodyAndAddHeader = { - val cm = sampleMessage.fromMessage(Message("blah", Map("key" -> "baz"))) - assert(cm.getBody === "blah") - assert(cm.getHeader("foo") === "bar") - assert(cm.getHeader("key") === "baz") - } - - @Test def shouldCreateMessageWithBodyAndHeader = { - val m = sampleMessage.toMessage - assert(m.body === "test") - assert(m.headers("foo") === "bar") - } - - @Test def shouldCreateMessageWithBodyAndHeaderAndCustomHeader = { - val m = sampleMessage.toMessage(Map("key" -> "baz")) - assert(m.body === "test") - assert(m.headers("foo") === "bar") - assert(m.headers("key") === "baz") - } - - private[camel] def sampleMessage = { - val message = new DefaultMessage - message.setBody("test") - message.setHeader("foo", "bar") - message - } - - -} diff --git a/akka-camel/src/test/scala/akka/CamelServiceManagerTest.scala b/akka-camel/src/test/scala/akka/CamelServiceManagerTest.scala deleted file mode 100644 index 48ab29c6b8..0000000000 --- a/akka-camel/src/test/scala/akka/CamelServiceManagerTest.scala +++ /dev/null @@ -1,62 +0,0 @@ -package akka.camel - -import org.scalatest.{BeforeAndAfterAll, WordSpec} -import org.scalatest.matchers.MustMatchers - -import akka.actor.ActorRegistry - -/** - * @author Martin Krasser - */ -class CamelServiceManagerTest extends WordSpec with BeforeAndAfterAll with MustMatchers { - - override def afterAll = { - CamelServiceManager.stopCamelService - ActorRegistry.shutdownAll - } - - "A CamelServiceManager" when { - "the startCamelService method been has been called" must { - "have registered the started CamelService instance" in { - val service = CamelServiceManager.startCamelService - CamelServiceManager.mandatoryService must be theSameInstanceAs (service) - } - } - "the stopCamelService method been has been called" must { - "have unregistered the current CamelService instance" in { - val service = CamelServiceManager.stopCamelService - CamelServiceManager.service must be (None) - } - } - } - - "A CamelServiceManager" when { - val service = CamelServiceFactory.createCamelService - "a CamelService instance has been started externally" must { - "have registered the started CamelService instance" in { - service.start - CamelServiceManager.mandatoryService must be theSameInstanceAs (service) - } - } - "the current CamelService instance has been stopped externally" must { - "have unregistered the current CamelService instance" in { - service.stop - CamelServiceManager.service must be (None) - } - } - } - - "A CamelServiceManager" when { - "a CamelService has been started" must { - "not allow further CamelService instances to be started" in { - CamelServiceManager.startCamelService - intercept[IllegalStateException] { CamelServiceManager.startCamelService } - } - } - "a CamelService has been stopped" must { - "only allow the current CamelService instance to be stopped" in { - intercept[IllegalStateException] { CamelServiceFactory.createCamelService.stop } - } - } - } -} diff --git a/akka-camel/src/test/scala/akka/ConsumerJavaTest.scala b/akka-camel/src/test/scala/akka/ConsumerJavaTest.scala deleted file mode 100644 index 48741dda96..0000000000 --- a/akka-camel/src/test/scala/akka/ConsumerJavaTest.scala +++ /dev/null @@ -1,5 +0,0 @@ -package akka.camel - -import org.scalatest.junit.JUnitSuite - -class ConsumerJavaTest extends ConsumerJavaTestBase with JUnitSuite \ No newline at end of file diff --git a/akka-camel/src/test/scala/akka/ConsumerRegisteredTest.scala b/akka-camel/src/test/scala/akka/ConsumerRegisteredTest.scala deleted file mode 100644 index e85c5f905c..0000000000 --- a/akka-camel/src/test/scala/akka/ConsumerRegisteredTest.scala +++ /dev/null @@ -1,63 +0,0 @@ -package akka.camel - -import org.junit.Test -import org.scalatest.junit.JUnitSuite -import akka.actor.{ActorRef, Actor, UntypedActor} - -class ConsumerRegisteredTest extends JUnitSuite { - import ConsumerRegisteredTest._ - - @Test def shouldCreateSomeNonBlockingPublishRequestFromConsumer = { - val c = Actor.actorOf[ConsumerActor1] - val event = ConsumerActorRegistered.forConsumer(c) - assert(event === Some(ConsumerActorRegistered(c, consumerOf(c)))) - } - - @Test def shouldCreateSomeBlockingPublishRequestFromConsumer = { - val c = Actor.actorOf[ConsumerActor2] - val event = ConsumerActorRegistered.forConsumer(c) - assert(event === Some(ConsumerActorRegistered(c, consumerOf(c)))) - } - - @Test def shouldCreateNoneFromConsumer = { - val event = ConsumerActorRegistered.forConsumer(Actor.actorOf[PlainActor]) - assert(event === None) - } - - @Test def shouldCreateSomeNonBlockingPublishRequestFromUntypedConsumer = { - val uc = UntypedActor.actorOf(classOf[SampleUntypedConsumer]) - val event = ConsumerActorRegistered.forConsumer(uc) - assert(event === Some(ConsumerActorRegistered(uc, consumerOf(uc)))) - } - - @Test def shouldCreateSomeBlockingPublishRequestFromUntypedConsumer = { - val uc = UntypedActor.actorOf(classOf[SampleUntypedConsumerBlocking]) - val event = ConsumerActorRegistered.forConsumer(uc) - assert(event === Some(ConsumerActorRegistered(uc, consumerOf(uc)))) - } - - @Test def shouldCreateNoneFromUntypedConsumer = { - val a = UntypedActor.actorOf(classOf[SampleUntypedActor]) - val event = ConsumerActorRegistered.forConsumer(a) - assert(event === None) - } - - private def consumerOf(ref: ActorRef) = ref.actor.asInstanceOf[Consumer] -} - -object ConsumerRegisteredTest { - class ConsumerActor1 extends Actor with Consumer { - def endpointUri = "mock:test1" - protected def receive = null - } - - class ConsumerActor2 extends Actor with Consumer { - def endpointUri = "mock:test2" - override def blocking = true - protected def receive = null - } - - class PlainActor extends Actor { - protected def receive = null - } -} diff --git a/akka-camel/src/test/scala/akka/ConsumerScalaTest.scala b/akka-camel/src/test/scala/akka/ConsumerScalaTest.scala deleted file mode 100644 index ddbe757a3f..0000000000 --- a/akka-camel/src/test/scala/akka/ConsumerScalaTest.scala +++ /dev/null @@ -1,271 +0,0 @@ -package akka.camel - -import java.util.concurrent.{TimeoutException, CountDownLatch, TimeUnit} - -import org.apache.camel.CamelExecutionException -import org.apache.camel.builder.Builder -import org.apache.camel.model.RouteDefinition -import org.scalatest.{BeforeAndAfterAll, WordSpec} -import org.scalatest.matchers.MustMatchers - -import akka.actor.Actor._ -import akka.actor._ - -/** - * @author Martin Krasser - */ -class ConsumerScalaTest extends WordSpec with BeforeAndAfterAll with MustMatchers { - import CamelContextManager.mandatoryTemplate - import ConsumerScalaTest._ - - var service: CamelService = _ - - override protected def beforeAll = { - ActorRegistry.shutdownAll - // create new CamelService instance - service = CamelServiceFactory.createCamelService - // Register publish requestor as listener - service.registerPublishRequestor - // register test consumer before starting the CamelService - actorOf(new TestConsumer("direct:publish-test-1")).start - // start consumer publisher, otherwise we cannot set message - // count expectations in the next step (needed for testing only). - service.consumerPublisher.start - service.awaitEndpointActivation(1) { - service.start - } must be (true) - } - - override protected def afterAll = { - service.stop - ActorRegistry.shutdownAll - } - - "A responding consumer" when { - val consumer = actorOf(new TestConsumer("direct:publish-test-2")) - "started before starting the CamelService" must { - "support an in-out message exchange via its endpoint" in { - mandatoryTemplate.requestBody("direct:publish-test-1", "msg1") must equal ("received msg1") - } - } - "not started" must { - "not have an associated endpoint in the CamelContext" in { - CamelContextManager.mandatoryContext.hasEndpoint("direct:publish-test-2") must be (null) - } - } - "started" must { - "support an in-out message exchange via its endpoint" in { - service.awaitEndpointActivation(1) { - consumer.start - } must be (true) - mandatoryTemplate.requestBody("direct:publish-test-2", "msg2") must equal ("received msg2") - } - "have an associated endpoint in the CamelContext" in { - CamelContextManager.mandatoryContext.hasEndpoint("direct:publish-test-2") must not be (null) - } - } - "stopped" must { - "not support an in-out message exchange via its endpoint" in { - service.awaitEndpointDeactivation(1) { - consumer.stop - } must be (true) - intercept[CamelExecutionException] { - mandatoryTemplate.requestBody("direct:publish-test-2", "msg2") - } - } - } - } - - "A responding, typed consumer" when { - var actor: SampleTypedConsumer = null - "started" must { - "support in-out message exchanges via its endpoints" in { - service.awaitEndpointActivation(3) { - actor = TypedActor.newInstance(classOf[SampleTypedConsumer], classOf[SampleTypedConsumerImpl]) - } must be (true) - mandatoryTemplate.requestBodyAndHeader("direct:m2", "x", "test", "y") must equal ("m2: x y") - mandatoryTemplate.requestBodyAndHeader("direct:m3", "x", "test", "y") must equal ("m3: x y") - mandatoryTemplate.requestBodyAndHeader("direct:m4", "x", "test", "y") must equal ("m4: x y") - } - } - "stopped" must { - "not support in-out message exchanges via its endpoints" in { - service.awaitEndpointDeactivation(3) { - TypedActor.stop(actor) - } must be (true) - intercept[CamelExecutionException] { - mandatoryTemplate.requestBodyAndHeader("direct:m2", "x", "test", "y") - } - intercept[CamelExecutionException] { - mandatoryTemplate.requestBodyAndHeader("direct:m3", "x", "test", "y") - } - intercept[CamelExecutionException] { - mandatoryTemplate.requestBodyAndHeader("direct:m4", "x", "test", "y") - } - } - } - } - - "A responding, typed consumer (Scala)" when { - var actor: TestTypedConsumer = null - "started" must { - "support in-out message exchanges via its endpoints" in { - service.awaitEndpointActivation(2) { - actor = TypedActor.newInstance(classOf[TestTypedConsumer], classOf[TestTypedConsumerImpl]) - } must be (true) - mandatoryTemplate.requestBody("direct:publish-test-3", "x") must equal ("foo: x") - mandatoryTemplate.requestBody("direct:publish-test-4", "x") must equal ("bar: x") - } - } - "stopped" must { - "not support in-out message exchanges via its endpoints" in { - service.awaitEndpointDeactivation(2) { - TypedActor.stop(actor) - } must be (true) - intercept[CamelExecutionException] { - mandatoryTemplate.requestBody("direct:publish-test-3", "x") - } - intercept[CamelExecutionException] { - mandatoryTemplate.requestBody("direct:publish-test-4", "x") - } - } - } - } - - "A responding, untyped consumer" when { - val consumer = UntypedActor.actorOf(classOf[SampleUntypedConsumer]) - "started" must { - "support an in-out message exchange via its endpoint" in { - service.awaitEndpointActivation(1) { - consumer.start - } must be (true) - mandatoryTemplate.requestBodyAndHeader("direct:test-untyped-consumer", "x", "test", "y") must equal ("x y") - } - } - "stopped" must { - "not support an in-out message exchange via its endpoint" in { - service.awaitEndpointDeactivation(1) { - consumer.stop - } must be (true) - intercept[CamelExecutionException] { - mandatoryTemplate.sendBodyAndHeader("direct:test-untyped-consumer", "blah", "test", "blub") - } - } - } - } - - "A non-responding, blocking consumer" when { - "receiving an in-out message exchange" must { - "lead to a TimeoutException" in { - service.awaitEndpointActivation(1) { - actorOf(new TestBlocker("direct:publish-test-5")).start - } must be (true) - - try { - mandatoryTemplate.requestBody("direct:publish-test-5", "msg3") - fail("expected TimoutException not thrown") - } catch { - case e => { - assert(e.getCause.isInstanceOf[TimeoutException]) - } - } - } - } - } - - "A responding, blocking consumer" when { - "activated with a custom error handler" must { - "handle thrown exceptions by generating a custom response" in { - service.awaitEndpointActivation(1) { - actorOf[ErrorHandlingConsumer].start - } must be (true) - mandatoryTemplate.requestBody("direct:error-handler-test", "hello") must equal ("error: hello") - - } - } - "activated with a custom redelivery handler" must { - "handle thrown exceptions by redelivering the initial message" in { - service.awaitEndpointActivation(1) { - actorOf[RedeliveringConsumer].start - } must be (true) - mandatoryTemplate.requestBody("direct:redelivery-test", "hello") must equal ("accepted: hello") - - } - } - } -} - -object ConsumerScalaTest { - trait BlockingConsumer extends Consumer { self: Actor => - override def blocking = true - } - - class TestConsumer(uri: String) extends Actor with Consumer { - def endpointUri = uri - protected def receive = { - case msg: Message => self.reply("received %s" format msg.body) - } - } - - class TestBlocker(uri: String) extends Actor with BlockingConsumer { - self.timeout = 1000 - def endpointUri = uri - protected def receive = { - case msg: Message => { /* do not reply */ } - } - } - - class ErrorHandlingConsumer extends Actor with BlockingConsumer { - def endpointUri = "direct:error-handler-test" - - onRouteDefinition {rd: RouteDefinition => - rd.onException(classOf[Exception]).handled(true).transform(Builder.exceptionMessage).end - } - - protected def receive = { - case msg: Message => throw new Exception("error: %s" format msg.body) - } - } - - class RedeliveringConsumer extends Actor with BlockingConsumer { - def endpointUri = "direct:redelivery-test" - - onRouteDefinition {rd: RouteDefinition => - rd.onException(classOf[Exception]).maximumRedeliveries(1).end - } - - // - // first message to this actor is not valid and will be rejected - // - - var valid = false - - protected def receive = { - case msg: Message => try { - respondTo(msg) - } finally { - valid = true - } - } - - private def respondTo(msg: Message) = - if (valid) self.reply("accepted: %s" format msg.body) - else throw new Exception("rejected: %s" format msg.body) - - } - - trait TestTypedConsumer { - @consume("direct:publish-test-3") - def foo(s: String): String - def bar(s: String): String - } - - class TestTypedConsumerImpl extends TypedActor with TestTypedConsumer { - def foo(s: String) = "foo: %s" format s - @consume("direct:publish-test-4") - def bar(s: String) = "bar: %s" format s - } - - - -} diff --git a/akka-camel/src/test/scala/akka/MessageJavaTest.scala b/akka-camel/src/test/scala/akka/MessageJavaTest.scala deleted file mode 100644 index 3c95887eb4..0000000000 --- a/akka-camel/src/test/scala/akka/MessageJavaTest.scala +++ /dev/null @@ -1,5 +0,0 @@ -package akka.camel - -import org.scalatest.junit.JUnitSuite - -class MessageJavaTest extends MessageJavaTestBase with JUnitSuite diff --git a/akka-camel/src/test/scala/akka/MessageScalaTest.scala b/akka-camel/src/test/scala/akka/MessageScalaTest.scala deleted file mode 100644 index 5f43db596b..0000000000 --- a/akka-camel/src/test/scala/akka/MessageScalaTest.scala +++ /dev/null @@ -1,83 +0,0 @@ -package akka.camel - -import java.io.InputStream - -import org.apache.camel.NoTypeConversionAvailableException -import org.junit.Assert._ -import org.junit.Test - -import org.scalatest.BeforeAndAfterAll -import org.scalatest.junit.JUnitSuite - - -class MessageScalaTest extends JUnitSuite with BeforeAndAfterAll { - override protected def beforeAll = CamelContextManager.init - - @Test def shouldConvertDoubleBodyToString = { - assertEquals("1.4", Message(1.4).bodyAs[String]) - } - - @Test def shouldThrowExceptionWhenConvertingDoubleBodyToInputStream { - intercept[NoTypeConversionAvailableException] { - Message(1.4).bodyAs[InputStream] - } - } - - @Test def shouldReturnDoubleHeader = { - val message = Message("test" , Map("test" -> 1.4)) - assertEquals(1.4, message.header("test")) - } - - @Test def shouldConvertDoubleHeaderToString = { - val message = Message("test" , Map("test" -> 1.4)) - assertEquals("1.4", message.headerAs[String]("test")) - } - - @Test def shouldReturnSubsetOfHeaders = { - val message = Message("test" , Map("A" -> "1", "B" -> "2")) - assertEquals(Map("B" -> "2"), message.headers(Set("B"))) - } - - @Test def shouldTransformBodyAndPreserveHeaders = { - assertEquals( - Message("ab", Map("A" -> "1")), - Message("a" , Map("A" -> "1")).transformBody((body: String) => body + "b")) - } - - @Test def shouldConvertBodyAndPreserveHeaders = { - assertEquals( - Message("1.4", Map("A" -> "1")), - Message(1.4 , Map("A" -> "1")).setBodyAs[String]) - } - - @Test def shouldSetBodyAndPreserveHeaders = { - assertEquals( - Message("test2" , Map("A" -> "1")), - Message("test1" , Map("A" -> "1")).setBody("test2")) - } - - @Test def shouldSetHeadersAndPreserveBody = { - assertEquals( - Message("test1" , Map("C" -> "3")), - Message("test1" , Map("A" -> "1")).setHeaders(Map("C" -> "3"))) - - } - - @Test def shouldAddHeaderAndPreserveBodyAndHeaders = { - assertEquals( - Message("test1" , Map("A" -> "1", "B" -> "2")), - Message("test1" , Map("A" -> "1")).addHeader("B" -> "2")) - } - - @Test def shouldAddHeadersAndPreserveBodyAndHeaders = { - assertEquals( - Message("test1" , Map("A" -> "1", "B" -> "2")), - Message("test1" , Map("A" -> "1")).addHeaders(Map("B" -> "2"))) - } - - @Test def shouldRemoveHeadersAndPreserveBodyAndRemainingHeaders = { - assertEquals( - Message("test1" , Map("A" -> "1")), - Message("test1" , Map("A" -> "1", "B" -> "2")).removeHeader("B")) - } -} diff --git a/akka-camel/src/test/scala/akka/ProducerFeatureTest.scala b/akka-camel/src/test/scala/akka/ProducerFeatureTest.scala deleted file mode 100644 index 66bcbe9186..0000000000 --- a/akka-camel/src/test/scala/akka/ProducerFeatureTest.scala +++ /dev/null @@ -1,301 +0,0 @@ -package akka.camel - -import org.apache.camel.{Exchange, Processor} -import org.apache.camel.builder.RouteBuilder -import org.apache.camel.component.mock.MockEndpoint -import org.scalatest.{GivenWhenThen, BeforeAndAfterEach, BeforeAndAfterAll, FeatureSpec} - -import akka.actor.Actor._ -import akka.actor.{ActorRef, Actor, ActorRegistry} - -class ProducerFeatureTest extends FeatureSpec with BeforeAndAfterAll with BeforeAndAfterEach with GivenWhenThen { - import ProducerFeatureTest._ - - override protected def beforeAll = { - ActorRegistry.shutdownAll - CamelContextManager.init - CamelContextManager.mandatoryContext.addRoutes(new TestRoute) - CamelContextManager.start - } - - override protected def afterAll = { - CamelContextManager.stop - ActorRegistry.shutdownAll - } - - override protected def afterEach = { - mockEndpoint.reset - } - - feature("Produce a message to a sync Camel route") { - - scenario("produce message and receive normal response") { - given("a registered two-way producer") - val producer = actorOf(new TestProducer("direct:producer-test-2", true)) - producer.start - - when("a test message is sent to the producer with !!") - val message = Message("test", Map(Message.MessageExchangeId -> "123")) - val result = producer !! message - - then("a normal response should have been returned by the producer") - val expected = Message("received TEST", Map(Message.MessageExchangeId -> "123")) - assert(result === Some(expected)) - } - - scenario("produce message and receive failure response") { - given("a registered two-way producer") - val producer = actorOf(new TestProducer("direct:producer-test-2")) - producer.start - - when("a test message causing an exception is sent to the producer with !!") - val message = Message("fail", Map(Message.MessageExchangeId -> "123")) - val result = (producer !! message).as[Failure] - - then("a failure response should have been returned by the producer") - val expectedFailureText = result.get.cause.getMessage - val expectedHeaders = result.get.headers - assert(expectedFailureText === "failure") - assert(expectedHeaders === Map(Message.MessageExchangeId -> "123")) - } - - scenario("produce message oneway") { - given("a registered one-way producer") - val producer = actorOf(new TestProducer("direct:producer-test-1", true) with Oneway) - producer.start - - when("a test message is sent to the producer with !") - mockEndpoint.expectedBodiesReceived("TEST") - producer ! Message("test") - - then("the test message should have been sent to mock:mock") - mockEndpoint.assertIsSatisfied - } - - scenario("produce message twoway without sender reference") { - given("a registered two-way producer") - val producer = actorOf(new TestProducer("direct:producer-test-1")) - producer.start - - when("a test message is sent to the producer with !") - mockEndpoint.expectedBodiesReceived("test") - producer ! Message("test") - - then("there should be only a warning that there's no sender reference") - mockEndpoint.assertIsSatisfied - } - } - - feature("Produce a message to an async Camel route") { - - scenario("produce message and receive normal response") { - given("a registered two-way producer") - val producer = actorOf(new TestProducer("direct:producer-test-3")) - producer.start - - when("a test message is sent to the producer with !!") - val message = Message("test", Map(Message.MessageExchangeId -> "123")) - val result = producer !! message - - then("a normal response should have been returned by the producer") - val expected = Message("received test", Map(Message.MessageExchangeId -> "123")) - assert(result === Some(expected)) - } - - scenario("produce message and receive failure response") { - given("a registered two-way producer") - val producer = actorOf(new TestProducer("direct:producer-test-3")) - producer.start - - when("a test message causing an exception is sent to the producer with !!") - val message = Message("fail", Map(Message.MessageExchangeId -> "123")) - val result = (producer !! message).as[Failure] - - then("a failure response should have been returned by the producer") - val expectedFailureText = result.get.cause.getMessage - val expectedHeaders = result.get.headers - assert(expectedFailureText === "failure") - assert(expectedHeaders === Map(Message.MessageExchangeId -> "123")) - } - } - - feature("Produce a message to a sync Camel route and then forward the response") { - - scenario("produce message, forward normal response to a replying target actor and receive response") { - given("a registered two-way producer configured with a forward target") - val target = actorOf[ReplyingForwardTarget].start - val producer = actorOf(new TestForwarder("direct:producer-test-2", target)).start - - when("a test message is sent to the producer with !!") - val message = Message("test", Map(Message.MessageExchangeId -> "123")) - val result = producer !! message - - then("a normal response should have been returned by the forward target") - val expected = Message("received test", Map(Message.MessageExchangeId -> "123", "test" -> "result")) - assert(result === Some(expected)) - } - - scenario("produce message, forward failure response to a replying target actor and receive response") { - given("a registered two-way producer configured with a forward target") - val target = actorOf[ReplyingForwardTarget].start - val producer = actorOf(new TestForwarder("direct:producer-test-2", target)).start - - when("a test message causing an exception is sent to the producer with !!") - val message = Message("fail", Map(Message.MessageExchangeId -> "123")) - val result = (producer !! message).as[Failure] - - then("a failure response should have been returned by the forward target") - val expectedFailureText = result.get.cause.getMessage - val expectedHeaders = result.get.headers - assert(expectedFailureText === "failure") - assert(expectedHeaders === Map(Message.MessageExchangeId -> "123", "test" -> "failure")) - } - - scenario("produce message, forward normal response to a producing target actor and produce response to direct:forward-test-1") { - given("a registered one-way producer configured with a forward target") - val target = actorOf[ProducingForwardTarget].start - val producer = actorOf(new TestForwarder("direct:producer-test-2", target)).start - - when("a test message is sent to the producer with !") - mockEndpoint.expectedBodiesReceived("received test") - val result = producer.!(Message("test"))(Some(producer)) - - then("a normal response should have been produced by the forward target") - mockEndpoint.assertIsSatisfied - } - - scenario("produce message, forward failure response to a producing target actor and produce response to direct:forward-test-1") { - given("a registered one-way producer configured with a forward target") - val target = actorOf[ProducingForwardTarget].start - val producer = actorOf(new TestForwarder("direct:producer-test-2", target)).start - - when("a test message causing an exception is sent to the producer with !") - mockEndpoint.expectedMessageCount(1) - mockEndpoint.message(0).body().isInstanceOf(classOf[Failure]) - val result = producer.!(Message("fail"))(Some(producer)) - - then("a failure response should have been produced by the forward target") - mockEndpoint.assertIsSatisfied - } - } - - feature("Produce a message to an async Camel route and then forward the response") { - - scenario("produce message, forward normal response to a replying target actor and receive response") { - given("a registered two-way producer configured with a forward target") - val target = actorOf[ReplyingForwardTarget].start - val producer = actorOf(new TestForwarder("direct:producer-test-3", target)).start - - when("a test message is sent to the producer with !!") - val message = Message("test", Map(Message.MessageExchangeId -> "123")) - val result = producer !! message - - then("a normal response should have been returned by the forward target") - val expected = Message("received test", Map(Message.MessageExchangeId -> "123", "test" -> "result")) - assert(result === Some(expected)) - } - - scenario("produce message, forward failure response to a replying target actor and receive response") { - given("a registered two-way producer configured with a forward target") - val target = actorOf[ReplyingForwardTarget].start - val producer = actorOf(new TestForwarder("direct:producer-test-3", target)).start - - when("a test message causing an exception is sent to the producer with !!") - val message = Message("fail", Map(Message.MessageExchangeId -> "123")) - val result = (producer !! message).as[Failure] - - then("a failure response should have been returned by the forward target") - val expectedFailureText = result.get.cause.getMessage - val expectedHeaders = result.get.headers - assert(expectedFailureText === "failure") - assert(expectedHeaders === Map(Message.MessageExchangeId -> "123", "test" -> "failure")) - } - - scenario("produce message, forward normal response to a producing target actor and produce response to direct:forward-test-1") { - given("a registered one-way producer configured with a forward target") - val target = actorOf[ProducingForwardTarget].start - val producer = actorOf(new TestForwarder("direct:producer-test-3", target)).start - - when("a test message is sent to the producer with !") - mockEndpoint.expectedBodiesReceived("received test") - val result = producer.!(Message("test"))(Some(producer)) - - then("a normal response should have been produced by the forward target") - mockEndpoint.assertIsSatisfied - } - - scenario("produce message, forward failure response to a producing target actor and produce response to direct:forward-test-1") { - given("a registered one-way producer configured with a forward target") - val target = actorOf[ProducingForwardTarget].start - val producer = actorOf(new TestForwarder("direct:producer-test-3", target)).start - - when("a test message causing an exception is sent to the producer with !") - mockEndpoint.expectedMessageCount(1) - mockEndpoint.message(0).body().isInstanceOf(classOf[Failure]) - val result = producer.!(Message("fail"))(Some(producer)) - - then("a failure response should have been produced by the forward target") - mockEndpoint.assertIsSatisfied - } - } - - private def mockEndpoint = CamelContextManager.mandatoryContext.getEndpoint("mock:mock", classOf[MockEndpoint]) -} - -object ProducerFeatureTest { - class TestProducer(uri: String, upper: Boolean = false) extends Actor with Producer { - def endpointUri = uri - override protected def receiveBeforeProduce = { - case msg: Message => if (upper) msg.transformBody { body: String => body.toUpperCase } else msg - } - } - - class TestForwarder(uri: String, target: ActorRef) extends Actor with Producer { - def endpointUri = uri - override protected def receiveAfterProduce = { - case msg => target forward msg - } - } - - class TestResponder extends Actor { - protected def receive = { - case msg: Message => msg.body match { - case "fail" => self.reply(Failure(new Exception("failure"), msg.headers)) - case _ => self.reply(msg.transformBody { body: String => "received %s" format body }) - } - } - } - - class ReplyingForwardTarget extends Actor { - protected def receive = { - case msg: Message => - self.reply(msg.addHeader("test" -> "result")) - case msg: Failure => - self.reply(Failure(msg.cause, msg.headers + ("test" -> "failure"))) - } - } - - class ProducingForwardTarget extends Actor with Producer with Oneway { - def endpointUri = "direct:forward-test-1" - } - - class TestRoute extends RouteBuilder { - val responder = actorOf[TestResponder].start - def configure { - from("direct:forward-test-1").to("mock:mock") - // for one-way messaging tests - from("direct:producer-test-1").to("mock:mock") - // for two-way messaging tests (async) - from("direct:producer-test-3").to("actor:uuid:%s" format responder.uuid) - // for two-way messaging tests (sync) - from("direct:producer-test-2").process(new Processor() { - def process(exchange: Exchange) = { - exchange.getIn.getBody match { - case "fail" => throw new Exception("failure") - case body => exchange.getOut.setBody("received %s" format body) - } - } - }) - } - } -} diff --git a/akka-camel/src/test/scala/akka/PublishRequestorTest.scala b/akka-camel/src/test/scala/akka/PublishRequestorTest.scala deleted file mode 100644 index 8578abef60..0000000000 --- a/akka-camel/src/test/scala/akka/PublishRequestorTest.scala +++ /dev/null @@ -1,103 +0,0 @@ -package akka.camel - -import java.util.concurrent.{CountDownLatch, TimeUnit} - -import org.junit.{Before, After, Test} -import org.scalatest.junit.JUnitSuite - -import akka.actor._ -import akka.actor.Actor._ -import akka.camel.support.{SetExpectedMessageCount => SetExpectedTestMessageCount, _} - -class PublishRequestorTest extends JUnitSuite { - import PublishRequestorTest._ - - var publisher: ActorRef = _ - var requestor: ActorRef = _ - var consumer: ActorRef = _ - - val ascendingMethodName = (r1: ConsumerMethodRegistered, r2: ConsumerMethodRegistered) => - r1.method.getName < r2.method.getName - - @Before def setUp: Unit = { - publisher = actorOf[PublisherMock].start - requestor = actorOf[PublishRequestor].start - requestor ! PublishRequestorInit(publisher) - consumer = actorOf(new Actor with Consumer { - def endpointUri = "mock:test" - protected def receive = null - }).start - } - - @After def tearDown = { - AspectInitRegistry.removeListener(requestor); - ActorRegistry.shutdownAll - } - - @Test def shouldReceiveOneConsumerMethodRegisteredEvent = { - AspectInitRegistry.addListener(requestor) - val latch = (publisher !! SetExpectedTestMessageCount(1)).as[CountDownLatch].get - val obj = TypedActor.newInstance(classOf[SampleTypedSingleConsumer], classOf[SampleTypedSingleConsumerImpl]) - assert(latch.await(5000, TimeUnit.MILLISECONDS)) - val event = (publisher !! GetRetainedMessage).as[ConsumerMethodRegistered].get - assert(event.endpointUri === "direct:foo") - assert(event.typedActor === obj) - assert(event.methodName === "foo") - } - - @Test def shouldReceiveOneConsumerMethodUnregisteredEvent = { - val obj = TypedActor.newInstance(classOf[SampleTypedSingleConsumer], classOf[SampleTypedSingleConsumerImpl]) - val latch = (publisher !! SetExpectedTestMessageCount(1)).as[CountDownLatch].get - AspectInitRegistry.addListener(requestor) - TypedActor.stop(obj) - assert(latch.await(5000, TimeUnit.MILLISECONDS)) - val event = (publisher !! GetRetainedMessage).as[ConsumerMethodUnregistered].get - assert(event.endpointUri === "direct:foo") - assert(event.typedActor === obj) - assert(event.methodName === "foo") - } - - @Test def shouldReceiveThreeConsumerMethodRegisteredEvents = { - AspectInitRegistry.addListener(requestor) - val latch = (publisher !! SetExpectedTestMessageCount(3)).as[CountDownLatch].get - val obj = TypedActor.newInstance(classOf[SampleTypedConsumer], classOf[SampleTypedConsumerImpl]) - assert(latch.await(5000, TimeUnit.MILLISECONDS)) - val request = GetRetainedMessages(_.isInstanceOf[ConsumerMethodRegistered]) - val events = (publisher !! request).as[List[ConsumerMethodRegistered]].get - assert(events.map(_.method.getName).sortWith(_ < _) === List("m2", "m3", "m4")) - } - - @Test def shouldReceiveThreeConsumerMethodUnregisteredEvents = { - val obj = TypedActor.newInstance(classOf[SampleTypedConsumer], classOf[SampleTypedConsumerImpl]) - val latch = (publisher !! SetExpectedTestMessageCount(3)).as[CountDownLatch].get - AspectInitRegistry.addListener(requestor) - TypedActor.stop(obj) - assert(latch.await(5000, TimeUnit.MILLISECONDS)) - val request = GetRetainedMessages(_.isInstanceOf[ConsumerMethodUnregistered]) - val events = (publisher !! request).as[List[ConsumerMethodUnregistered]].get - assert(events.map(_.method.getName).sortWith(_ < _) === List("m2", "m3", "m4")) - } - - @Test def shouldReceiveOneConsumerRegisteredEvent = { - val latch = (publisher !! SetExpectedTestMessageCount(1)).as[CountDownLatch].get - requestor ! ActorRegistered(consumer) - assert(latch.await(5000, TimeUnit.MILLISECONDS)) - assert((publisher !! GetRetainedMessage) === - Some(ConsumerActorRegistered(consumer, consumer.actor.asInstanceOf[Consumer]))) - } - - @Test def shouldReceiveOneConsumerUnregisteredEvent = { - val latch = (publisher !! SetExpectedTestMessageCount(1)).as[CountDownLatch].get - requestor ! ActorUnregistered(consumer) - assert(latch.await(5000, TimeUnit.MILLISECONDS)) - assert((publisher !! GetRetainedMessage) === - Some(ConsumerActorUnregistered(consumer, consumer.actor.asInstanceOf[Consumer]))) - } -} - -object PublishRequestorTest { - class PublisherMock extends TestActor with Retain with Countdown { - def handler = retain andThen countdown - } -} - diff --git a/akka-camel/src/test/scala/akka/RemoteConsumerTest.scala b/akka-camel/src/test/scala/akka/RemoteConsumerTest.scala deleted file mode 100644 index 957080c2ec..0000000000 --- a/akka-camel/src/test/scala/akka/RemoteConsumerTest.scala +++ /dev/null @@ -1,101 +0,0 @@ -package akka.camel - -import java.util.concurrent.{CountDownLatch, TimeUnit} - -import org.scalatest.{GivenWhenThen, BeforeAndAfterAll, FeatureSpec} - -import akka.actor._ -import akka.actor.Actor._ -import akka.remote.{RemoteClient, RemoteServer} - -/** - * @author Martin Krasser - */ -class RemoteConsumerTest extends FeatureSpec with BeforeAndAfterAll with GivenWhenThen { - import CamelServiceManager._ - import RemoteConsumerTest._ - - var server: RemoteServer = _ - - override protected def beforeAll = { - ActorRegistry.shutdownAll - - startCamelService - - server = new RemoteServer() - server.start(host, port) - - Thread.sleep(1000) - } - - override protected def afterAll = { - server.shutdown - - stopCamelService - - RemoteClient.shutdownAll - ActorRegistry.shutdownAll - - Thread.sleep(1000) - } - - feature("Publish consumer on remote node") { - scenario("access published remote consumer") { - given("a client-initiated remote consumer") - val consumer = actorOf[RemoteConsumer].start - - when("remote consumer publication is triggered") - assert(mandatoryService.awaitEndpointActivation(1) { - consumer !! "init" - }) - - then("the published consumer is accessible via its endpoint URI") - val response = CamelContextManager.mandatoryTemplate.requestBody("direct:remote-consumer", "test") - assert(response === "remote actor: test") - } - } - - feature("Publish typed consumer on remote node") { - scenario("access published remote consumer method") { - given("a client-initiated remote typed consumer") - val consumer = TypedActor.newRemoteInstance(classOf[SampleRemoteTypedConsumer], classOf[SampleRemoteTypedConsumerImpl], host, port) - - when("remote typed consumer publication is triggered") - assert(mandatoryService.awaitEndpointActivation(1) { - consumer.foo("init") - }) - then("the published method is accessible via its endpoint URI") - val response = CamelContextManager.mandatoryTemplate.requestBody("direct:remote-typed-consumer", "test") - assert(response === "remote typed actor: test") - } - } - - feature("Publish untyped consumer on remote node") { - scenario("access published remote untyped consumer") { - given("a client-initiated remote untyped consumer") - val consumer = UntypedActor.actorOf(classOf[SampleRemoteUntypedConsumer]).start - - when("remote untyped consumer publication is triggered") - assert(mandatoryService.awaitEndpointActivation(1) { - consumer.sendRequestReply(Message("init", Map("test" -> "init"))) - }) - then("the published untyped consumer is accessible via its endpoint URI") - val response = CamelContextManager.mandatoryTemplate.requestBodyAndHeader("direct:remote-untyped-consumer", "a", "test", "b") - assert(response === "a b") - } - } -} - -object RemoteConsumerTest { - val host = "localhost" - val port = 7774 - - class RemoteConsumer extends RemoteActor(host, port) with Consumer { - def endpointUri = "direct:remote-consumer" - - protected def receive = { - case "init" => self.reply("done") - case m: Message => self.reply("remote actor: %s" format m.body) - } - } -} diff --git a/akka-camel/src/test/scala/akka/UntypedProducerFeatureTest.scala b/akka-camel/src/test/scala/akka/UntypedProducerFeatureTest.scala deleted file mode 100644 index 18930f9ab4..0000000000 --- a/akka-camel/src/test/scala/akka/UntypedProducerFeatureTest.scala +++ /dev/null @@ -1,98 +0,0 @@ -package akka.camel - -import org.apache.camel.{Exchange, Processor} -import org.apache.camel.builder.RouteBuilder -import org.apache.camel.component.mock.MockEndpoint -import org.scalatest.{GivenWhenThen, BeforeAndAfterEach, BeforeAndAfterAll, FeatureSpec} - -import akka.actor.UntypedActor._ -import akka.actor.ActorRegistry - -class UntypedProducerFeatureTest extends FeatureSpec with BeforeAndAfterAll with BeforeAndAfterEach with GivenWhenThen { - import UntypedProducerFeatureTest._ - - override protected def beforeAll = { - ActorRegistry.shutdownAll - CamelContextManager.init - CamelContextManager.mandatoryContext.addRoutes(new TestRoute) - CamelContextManager.start - } - - override protected def afterAll = { - CamelContextManager.stop - ActorRegistry.shutdownAll - } - - override protected def afterEach = { - mockEndpoint.reset - } - - feature("Produce a message to a sync Camel route") { - - scenario("produce message and receive normal response") { - given("a registered two-way producer") - val producer = actorOf(classOf[SampleUntypedReplyingProducer]) - producer.start - - when("a test message is sent to the producer with !!") - val message = Message("test", Map(Message.MessageExchangeId -> "123")) - val result = producer.sendRequestReply(message) - - then("a normal response should have been returned by the producer") - val expected = Message("received test", Map(Message.MessageExchangeId -> "123")) - assert(result === expected) - } - - scenario("produce message and receive failure response") { - given("a registered two-way producer") - val producer = actorOf(classOf[SampleUntypedReplyingProducer]) - producer.start - - when("a test message causing an exception is sent to the producer with !!") - val message = Message("fail", Map(Message.MessageExchangeId -> "123")) - val result = producer.sendRequestReply(message).asInstanceOf[Failure] - - then("a failure response should have been returned by the producer") - val expectedFailureText = result.cause.getMessage - val expectedHeaders = result.headers - assert(expectedFailureText === "failure") - assert(expectedHeaders === Map(Message.MessageExchangeId -> "123")) - } - - } - - feature("Produce a message to a sync Camel route and then forward the response") { - - scenario("produce message and send normal response to direct:forward-test-1") { - given("a registered one-way producer configured with a forward target") - val producer = actorOf(classOf[SampleUntypedForwardingProducer]) - producer.start - - when("a test message is sent to the producer with !") - mockEndpoint.expectedBodiesReceived("received test") - val result = producer.sendOneWay(Message("test"), producer) - - then("a normal response should have been sent") - mockEndpoint.assertIsSatisfied - } - - } - - private def mockEndpoint = CamelContextManager.mandatoryContext.getEndpoint("mock:mock", classOf[MockEndpoint]) -} - -object UntypedProducerFeatureTest { - class TestRoute extends RouteBuilder { - def configure { - from("direct:forward-test-1").to("mock:mock") - from("direct:producer-test-1").process(new Processor() { - def process(exchange: Exchange) = { - exchange.getIn.getBody match { - case "fail" => throw new Exception("failure") - case body => exchange.getOut.setBody("received %s" format body) - } - } - }) - } - } -} diff --git a/akka-camel/src/test/scala/akka/component/ActorComponentFeatureTest.scala b/akka-camel/src/test/scala/akka/component/ActorComponentFeatureTest.scala deleted file mode 100644 index 993fe72096..0000000000 --- a/akka-camel/src/test/scala/akka/component/ActorComponentFeatureTest.scala +++ /dev/null @@ -1,130 +0,0 @@ -package akka.camel.component - -import java.util.concurrent.{TimeUnit, CountDownLatch} - -import org.apache.camel.RuntimeCamelException -import org.apache.camel.builder.RouteBuilder -import org.apache.camel.component.mock.MockEndpoint -import org.scalatest.{BeforeAndAfterEach, BeforeAndAfterAll, FeatureSpec} - -import akka.actor.Actor._ -import akka.actor.{ActorRegistry, Actor} -import akka.camel.{Failure, Message, CamelContextManager} -import akka.camel.support._ - -class ActorComponentFeatureTest extends FeatureSpec with BeforeAndAfterAll with BeforeAndAfterEach { - import ActorComponentFeatureTest._ - - override protected def beforeAll = { - ActorRegistry.shutdownAll - CamelContextManager.init - CamelContextManager.mandatoryContext.addRoutes(new TestRoute) - CamelContextManager.start - } - - override protected def afterAll = CamelContextManager.stop - - override protected def afterEach = { - ActorRegistry.shutdownAll - mockEndpoint.reset - } - - feature("Communicate with an actor via an actor:uuid endpoint") { - import CamelContextManager.mandatoryTemplate - - scenario("one-way communication") { - val actor = actorOf[Tester1].start - val latch = (actor !! SetExpectedMessageCount(1)).as[CountDownLatch].get - mandatoryTemplate.sendBody("actor:uuid:%s" format actor.uuid, "Martin") - assert(latch.await(5000, TimeUnit.MILLISECONDS)) - val reply = (actor !! GetRetainedMessage).get.asInstanceOf[Message] - assert(reply.body === "Martin") - } - - scenario("two-way communication") { - val actor = actorOf[Tester2].start - assert(mandatoryTemplate.requestBody("actor:uuid:%s" format actor.uuid, "Martin") === "Hello Martin") - } - - scenario("two-way communication with timeout") { - val actor = actorOf[Tester3].start - intercept[RuntimeCamelException] { - mandatoryTemplate.requestBody("actor:uuid:%s?blocking=true" format actor.uuid, "Martin") - } - } - - scenario("two-way communication via a custom route with failure response") { - mockEndpoint.expectedBodiesReceived("whatever") - mandatoryTemplate.requestBody("direct:failure-test-1", "whatever") - mockEndpoint.assertIsSatisfied - } - - scenario("two-way communication via a custom route with exception") { - mockEndpoint.expectedBodiesReceived("whatever") - mandatoryTemplate.requestBody("direct:failure-test-2", "whatever") - mockEndpoint.assertIsSatisfied - } - } - - feature("Communicate with an actor via an actor:id endpoint") { - import CamelContextManager.mandatoryTemplate - - scenario("one-way communication") { - val actor = actorOf[Tester1].start - val latch = (actor !! SetExpectedMessageCount(1)).as[CountDownLatch].get - mandatoryTemplate.sendBody("actor:%s" format actor.id, "Martin") - assert(latch.await(5000, TimeUnit.MILLISECONDS)) - val reply = (actor !! GetRetainedMessage).get.asInstanceOf[Message] - assert(reply.body === "Martin") - } - - scenario("two-way communication") { - val actor = actorOf[Tester2].start - assert(mandatoryTemplate.requestBody("actor:%s" format actor.id, "Martin") === "Hello Martin") - } - - scenario("two-way communication via a custom route") { - val actor = actorOf[CustomIdActor].start - assert(mandatoryTemplate.requestBody("direct:custom-id-test-1", "Martin") === "Received Martin") - assert(mandatoryTemplate.requestBody("direct:custom-id-test-2", "Martin") === "Received Martin") - } - } - - private def mockEndpoint = CamelContextManager.mandatoryContext.getEndpoint("mock:mock", classOf[MockEndpoint]) -} - -object ActorComponentFeatureTest { - class CustomIdActor extends Actor { - self.id = "custom-id" - protected def receive = { - case msg: Message => self.reply("Received %s" format msg.body) - } - } - - class FailWithMessage extends Actor { - protected def receive = { - case msg: Message => self.reply(Failure(new Exception("test"))) - } - } - - class FailWithException extends Actor { - protected def receive = { - case msg: Message => throw new Exception("test") - } - } - - class TestRoute extends RouteBuilder { - val failWithMessage = actorOf[FailWithMessage].start - val failWithException = actorOf[FailWithException].start - def configure { - from("direct:custom-id-test-1").to("actor:custom-id") - from("direct:custom-id-test-2").to("actor:id:custom-id") - from("direct:failure-test-1") - .onException(classOf[Exception]).to("mock:mock").handled(true).end - .to("actor:uuid:%s" format failWithMessage.uuid) - from("direct:failure-test-2") - .onException(classOf[Exception]).to("mock:mock").handled(true).end - .to("actor:uuid:%s?blocking=true" format failWithException.uuid) - } - } -} diff --git a/akka-camel/src/test/scala/akka/component/ActorComponentTest.scala b/akka-camel/src/test/scala/akka/component/ActorComponentTest.scala deleted file mode 100644 index 0af9f00213..0000000000 --- a/akka-camel/src/test/scala/akka/component/ActorComponentTest.scala +++ /dev/null @@ -1,79 +0,0 @@ -package akka.camel.component - -import org.apache.camel.{Endpoint, AsyncProcessor} -import org.apache.camel.impl.DefaultCamelContext -import org.junit._ -import org.scalatest.junit.JUnitSuite - -import akka.actor.uuidFrom - -class ActorComponentTest extends JUnitSuite { - val component: ActorComponent = ActorComponentTest.actorComponent - - def testUUID = "93da8c80-c3fd-11df-abed-60334b120057" - - @Test def shouldCreateEndpointWithIdDefined = { - val ep1: ActorEndpoint = component.createEndpoint("actor:abc").asInstanceOf[ActorEndpoint] - val ep2: ActorEndpoint = component.createEndpoint("actor:id:abc").asInstanceOf[ActorEndpoint] - assert(ep1.idValue === Some("abc")) - assert(ep2.idValue === Some("abc")) - assert(ep1.idType === "id") - assert(ep2.idType === "id") - assert(!ep1.blocking) - assert(!ep2.blocking) - } - - @Test def shouldCreateEndpointWithIdTemplate = { - val ep: ActorEndpoint = component.createEndpoint("actor:id:").asInstanceOf[ActorEndpoint] - assert(ep.idValue === None) - assert(ep.idType === "id") - assert(!ep.blocking) - } - - @Test def shouldCreateEndpointWithIdTemplateAndBlockingSet = { - val ep: ActorEndpoint = component.createEndpoint("actor:id:?blocking=true").asInstanceOf[ActorEndpoint] - assert(ep.idValue === None) - assert(ep.idType === "id") - assert(ep.blocking) - } - - @Test def shouldCreateEndpointWithUuidDefined = { - val ep: ActorEndpoint = component.createEndpoint("actor:uuid:%s" format testUUID).asInstanceOf[ActorEndpoint] - assert(ep.idValue === Some(testUUID)) - assert(ep.idType === "uuid") - assert(!ep.blocking) - } - - @Test def shouldCreateEndpointWithUuidTemplate = { - val ep: ActorEndpoint = component.createEndpoint("actor:uuid:").asInstanceOf[ActorEndpoint] - assert(ep.idValue === None) - assert(ep.idType === "uuid") - assert(!ep.blocking) - } - - @Test def shouldCreateEndpointWithUuidTemplateandBlockingSet = { - val ep: ActorEndpoint = component.createEndpoint("actor:uuid:?blocking=true").asInstanceOf[ActorEndpoint] - assert(ep.idValue === None) - assert(ep.idType === "uuid") - assert(ep.blocking) - } - - @Test def shouldCreateEndpointWithBlockingSet = { - val ep: ActorEndpoint = component.createEndpoint("actor:uuid:%s?blocking=true" format testUUID).asInstanceOf[ActorEndpoint] - assert(ep.idValue === Some(testUUID)) - assert(ep.idType === "uuid") - assert(ep.blocking) - } -} - -object ActorComponentTest { - def actorComponent = { - val component = new ActorComponent - component.setCamelContext(new DefaultCamelContext) - component - } - - def actorEndpoint(uri:String) = actorComponent.createEndpoint(uri) - def actorProducer(endpoint: Endpoint) = endpoint.createProducer - def actorAsyncProducer(endpoint: Endpoint) = endpoint.createProducer.asInstanceOf[AsyncProcessor] -} diff --git a/akka-camel/src/test/scala/akka/component/ActorProducerTest.scala b/akka-camel/src/test/scala/akka/component/ActorProducerTest.scala deleted file mode 100644 index 62ccf9cb0e..0000000000 --- a/akka-camel/src/test/scala/akka/component/ActorProducerTest.scala +++ /dev/null @@ -1,230 +0,0 @@ -package akka.camel.component - -import ActorComponentTest._ - -import java.util.concurrent.{CountDownLatch, TimeoutException, TimeUnit} - -import org.apache.camel.{AsyncCallback, ExchangePattern} - -import org.junit.{After, Test} -import org.scalatest.junit.JUnitSuite -import org.scalatest.BeforeAndAfterAll - -import akka.actor.Actor._ -import akka.actor.ActorRegistry -import akka.camel.{Failure, Message} -import akka.camel.support._ - -class ActorProducerTest extends JUnitSuite with BeforeAndAfterAll { - import ActorProducerTest._ - - @After def tearDown = ActorRegistry.shutdownAll - - @Test def shouldSendMessageToActorWithSyncProcessor = { - val actor = actorOf[Tester1].start - val latch = (actor !! SetExpectedMessageCount(1)).as[CountDownLatch].get - val endpoint = actorEndpoint("actor:uuid:%s" format actor.uuid) - val exchange = endpoint.createExchange(ExchangePattern.InOnly) - exchange.getIn.setBody("Martin") - exchange.getIn.setHeader("k1", "v1") - actorProducer(endpoint).process(exchange) - assert(latch.await(5000, TimeUnit.MILLISECONDS)) - val reply = (actor !! GetRetainedMessage).get.asInstanceOf[Message] - assert(reply.body === "Martin") - assert(reply.headers === Map(Message.MessageExchangeId -> exchange.getExchangeId, "k1" -> "v1")) - } - - @Test def shouldSendMessageToActorWithAsyncProcessor = { - val actor = actorOf[Tester1].start - val latch = (actor !! SetExpectedMessageCount(1)).as[CountDownLatch].get - val endpoint = actorEndpoint("actor:uuid:%s" format actor.uuid) - val exchange = endpoint.createExchange(ExchangePattern.InOnly) - exchange.getIn.setBody("Martin") - exchange.getIn.setHeader("k1", "v1") - actorAsyncProducer(endpoint).process(exchange, expectSyncCompletion) - assert(latch.await(5000, TimeUnit.MILLISECONDS)) - val reply = (actor !! GetRetainedMessage).get.asInstanceOf[Message] - assert(reply.body === "Martin") - assert(reply.headers === Map(Message.MessageExchangeId -> exchange.getExchangeId, "k1" -> "v1")) - } - - @Test def shouldSendMessageToActorAndReceiveResponseWithSyncProcessor = { - val actor = actorOf(new Tester2 { - override def response(msg: Message) = Message(super.response(msg), Map("k2" -> "v2")) - }).start - val endpoint = actorEndpoint("actor:uuid:%s" format actor.uuid) - val exchange = endpoint.createExchange(ExchangePattern.InOut) - exchange.getIn.setBody("Martin") - exchange.getIn.setHeader("k1", "v1") - actorProducer(endpoint).process(exchange) - assert(exchange.getOut.getBody === "Hello Martin") - assert(exchange.getOut.getHeader("k2") === "v2") - } - - @Test def shouldSendMessageToActorAndReceiveResponseWithAsyncProcessor = { - val actor = actorOf(new Tester2 { - override def response(msg: Message) = Message(super.response(msg), Map("k2" -> "v2")) - }).start - val completion = expectAsyncCompletion - val endpoint = actorEndpoint("actor:uuid:%s" format actor.uuid) - val exchange = endpoint.createExchange(ExchangePattern.InOut) - exchange.getIn.setBody("Martin") - exchange.getIn.setHeader("k1", "v1") - actorAsyncProducer(endpoint).process(exchange, completion) - assert(completion.latch.await(5000, TimeUnit.MILLISECONDS)) - assert(exchange.getOut.getBody === "Hello Martin") - assert(exchange.getOut.getHeader("k2") === "v2") - } - - @Test def shouldSendMessageToActorAndReceiveFailureWithAsyncProcessor = { - val actor = actorOf(new Tester2 { - override def response(msg: Message) = Failure(new Exception("testmsg"), Map("k3" -> "v3")) - }).start - val completion = expectAsyncCompletion - val endpoint = actorEndpoint("actor:uuid:%s" format actor.uuid) - val exchange = endpoint.createExchange(ExchangePattern.InOut) - exchange.getIn.setBody("Martin") - exchange.getIn.setHeader("k1", "v1") - actorAsyncProducer(endpoint).process(exchange, completion) - assert(completion.latch.await(5000, TimeUnit.MILLISECONDS)) - assert(exchange.getException.getMessage === "testmsg") - assert(exchange.getOut.getBody === null) - assert(exchange.getOut.getHeader("k3") === null) // headers from failure message are currently ignored - } - - @Test def shouldDynamicallyRouteMessageToActorWithDefaultId = { - val actor1 = actorOf[Tester1] - val actor2 = actorOf[Tester1] - actor1.id = "x" - actor2.id = "y" - actor1.start - actor2.start - val latch1 = (actor1 !! SetExpectedMessageCount(1)).as[CountDownLatch].get - val latch2 = (actor2 !! SetExpectedMessageCount(1)).as[CountDownLatch].get - val endpoint = actorEndpoint("actor:id:%s" format actor1.id) - val exchange1 = endpoint.createExchange(ExchangePattern.InOnly) - val exchange2 = endpoint.createExchange(ExchangePattern.InOnly) - exchange1.getIn.setBody("Test1") - exchange2.getIn.setBody("Test2") - exchange2.getIn.setHeader(ActorComponent.ActorIdentifier, actor2.id) - actorProducer(endpoint).process(exchange1) - actorProducer(endpoint).process(exchange2) - assert(latch1.await(5, TimeUnit.SECONDS)) - assert(latch2.await(5, TimeUnit.SECONDS)) - val reply1 = (actor1 !! GetRetainedMessage).get.asInstanceOf[Message] - val reply2 = (actor2 !! GetRetainedMessage).get.asInstanceOf[Message] - assert(reply1.body === "Test1") - assert(reply2.body === "Test2") - } - - @Test def shouldDynamicallyRouteMessageToActorWithoutDefaultId = { - val actor1 = actorOf[Tester1] - val actor2 = actorOf[Tester1] - actor1.id = "x" - actor2.id = "y" - actor1.start - actor2.start - val latch1 = (actor1 !! SetExpectedMessageCount(1)).as[CountDownLatch].get - val latch2 = (actor2 !! SetExpectedMessageCount(1)).as[CountDownLatch].get - val endpoint = actorEndpoint("actor:id:") - val exchange1 = endpoint.createExchange(ExchangePattern.InOnly) - val exchange2 = endpoint.createExchange(ExchangePattern.InOnly) - exchange1.getIn.setBody("Test1") - exchange2.getIn.setBody("Test2") - exchange1.getIn.setHeader(ActorComponent.ActorIdentifier, actor1.id) - exchange2.getIn.setHeader(ActorComponent.ActorIdentifier, actor2.id) - actorProducer(endpoint).process(exchange1) - actorProducer(endpoint).process(exchange2) - assert(latch1.await(5, TimeUnit.SECONDS)) - assert(latch2.await(5, TimeUnit.SECONDS)) - val reply1 = (actor1 !! GetRetainedMessage).get.asInstanceOf[Message] - val reply2 = (actor2 !! GetRetainedMessage).get.asInstanceOf[Message] - assert(reply1.body === "Test1") - assert(reply2.body === "Test2") - } - - @Test def shouldDynamicallyRouteMessageToActorWithDefaultUuid = { - val actor1 = actorOf[Tester1].start - val actor2 = actorOf[Tester1].start - val latch1 = (actor1 !! SetExpectedMessageCount(1)).as[CountDownLatch].get - val latch2 = (actor2 !! SetExpectedMessageCount(1)).as[CountDownLatch].get - val endpoint = actorEndpoint("actor:uuid:%s" format actor1.uuid) - val exchange1 = endpoint.createExchange(ExchangePattern.InOnly) - val exchange2 = endpoint.createExchange(ExchangePattern.InOnly) - exchange1.getIn.setBody("Test1") - exchange2.getIn.setBody("Test2") - exchange2.getIn.setHeader(ActorComponent.ActorIdentifier, actor2.uuid.toString) - actorProducer(endpoint).process(exchange1) - actorProducer(endpoint).process(exchange2) - assert(latch1.await(5, TimeUnit.SECONDS)) - assert(latch2.await(5, TimeUnit.SECONDS)) - val reply1 = (actor1 !! GetRetainedMessage).get.asInstanceOf[Message] - val reply2 = (actor2 !! GetRetainedMessage).get.asInstanceOf[Message] - assert(reply1.body === "Test1") - assert(reply2.body === "Test2") - } - - @Test def shouldDynamicallyRouteMessageToActorWithoutDefaultUuid = { - val actor1 = actorOf[Tester1].start - val actor2 = actorOf[Tester1].start - val latch1 = (actor1 !! SetExpectedMessageCount(1)).as[CountDownLatch].get - val latch2 = (actor2 !! SetExpectedMessageCount(1)).as[CountDownLatch].get - val endpoint = actorEndpoint("actor:uuid:") - val exchange1 = endpoint.createExchange(ExchangePattern.InOnly) - val exchange2 = endpoint.createExchange(ExchangePattern.InOnly) - exchange1.getIn.setBody("Test1") - exchange2.getIn.setBody("Test2") - exchange1.getIn.setHeader(ActorComponent.ActorIdentifier, actor1.uuid) - exchange2.getIn.setHeader(ActorComponent.ActorIdentifier, actor2.uuid.toString) - actorProducer(endpoint).process(exchange1) - actorProducer(endpoint).process(exchange2) - assert(latch1.await(5, TimeUnit.SECONDS)) - assert(latch2.await(5, TimeUnit.SECONDS)) - val reply1 = (actor1 !! GetRetainedMessage).get.asInstanceOf[Message] - val reply2 = (actor2 !! GetRetainedMessage).get.asInstanceOf[Message] - assert(reply1.body === "Test1") - assert(reply2.body === "Test2") - } - - @Test def shouldThrowExceptionWhenIdNotSet: Unit = { - val actor = actorOf[Tester1].start - val latch = (actor !! SetExpectedMessageCount(1)).as[CountDownLatch].get - val endpoint = actorEndpoint("actor:id:") - intercept[ActorIdentifierNotSetException] { - actorProducer(endpoint).process(endpoint.createExchange(ExchangePattern.InOnly)) - } - } - - @Test def shouldThrowExceptionWhenUuidNotSet: Unit = { - val actor = actorOf[Tester1].start - val latch = (actor !! SetExpectedMessageCount(1)).as[CountDownLatch].get - val endpoint = actorEndpoint("actor:uuid:") - intercept[ActorIdentifierNotSetException] { - actorProducer(endpoint).process(endpoint.createExchange(ExchangePattern.InOnly)) - } - } - - @Test def shouldSendMessageToActorAndTimeout(): Unit = { - val actor = actorOf[Tester3].start - val endpoint = actorEndpoint("actor:uuid:%s" format actor.uuid) - val exchange = endpoint.createExchange(ExchangePattern.InOut) - exchange.getIn.setBody("Martin") - intercept[TimeoutException] { - endpoint.createProducer.process(exchange) - } - } -} - -object ActorProducerTest { - def expectSyncCompletion = new AsyncCallback { - def done(doneSync: Boolean) = assert(doneSync) - } - - def expectAsyncCompletion = new AsyncCallback { - val latch = new CountDownLatch(1); - def done(doneSync: Boolean) = { - assert(!doneSync) - latch.countDown - } - } -} diff --git a/akka-camel/src/test/scala/akka/component/TypedActorComponentFeatureTest.scala b/akka-camel/src/test/scala/akka/component/TypedActorComponentFeatureTest.scala deleted file mode 100644 index e6e1293a54..0000000000 --- a/akka-camel/src/test/scala/akka/component/TypedActorComponentFeatureTest.scala +++ /dev/null @@ -1,108 +0,0 @@ -package akka.camel.component - -import org.apache.camel._ -import org.apache.camel.builder.RouteBuilder -import org.apache.camel.impl.{DefaultCamelContext, SimpleRegistry} -import org.scalatest.{BeforeAndAfterEach, BeforeAndAfterAll, FeatureSpec} - -import akka.actor.{ActorRegistry, TypedActor} -import akka.camel._ - -/** - * @author Martin Krasser - */ -class TypedActorComponentFeatureTest extends FeatureSpec with BeforeAndAfterAll with BeforeAndAfterEach { - import TypedActorComponentFeatureTest._ - import CamelContextManager.mandatoryTemplate - - override protected def beforeAll = { - val typedActor = TypedActor.newInstance(classOf[SampleTypedActor], classOf[SampleTypedActorImpl]) // not a consumer - val typedConsumer = TypedActor.newInstance(classOf[SampleTypedConsumer], classOf[SampleTypedConsumerImpl]) - - val registry = new SimpleRegistry - // external registration - registry.put("ta", typedActor) - - CamelContextManager.init(new DefaultCamelContext(registry)) - CamelContextManager.mandatoryContext.addRoutes(new CustomRouteBuilder) - CamelContextManager.start - - // Internal registration - CamelContextManager.typedActorRegistry.put("tc", typedConsumer) - } - - override protected def afterAll = { - CamelContextManager.stop - ActorRegistry.shutdownAll - } - - feature("Communicate with an internally-registered typed actor using typed-actor-internal endpoint URIs") { - import TypedActorComponent.InternalSchema - import ExchangePattern._ - - scenario("two-way communication with method returning String") { - val result1 = mandatoryTemplate.requestBodyAndHeader("%s:tc?method=m2" format InternalSchema, "x", "test", "y") - val result2 = mandatoryTemplate.requestBodyAndHeader("%s:tc?method=m4" format InternalSchema, "x", "test", "y") - assert(result1 === "m2: x y") - assert(result2 === "m4: x y") - } - - scenario("two-way communication with method returning void") { - val result = mandatoryTemplate.requestBodyAndHeader("%s:tc?method=m5" format InternalSchema, "x", "test", "y") - assert(result === "x") // returns initial body - } - - scenario("one-way communication with method returning String") { - val result = mandatoryTemplate.send("%s:tc?method=m2" format InternalSchema, InOnly, new Processor { - def process(exchange: Exchange) = { - exchange.getIn.setBody("x") - exchange.getIn.setHeader("test", "y") - } - }); - assert(result.getPattern === InOnly) - assert(result.getIn.getBody === "m2: x y") - assert(result.getOut.getBody === null) - } - - scenario("one-way communication with method returning void") { - val result = mandatoryTemplate.send("%s:tc?method=m5" format InternalSchema, InOnly, new Processor { - def process(exchange: Exchange) = { - exchange.getIn.setBody("x") - exchange.getIn.setHeader("test", "y") - } - }); - assert(result.getPattern === InOnly) - assert(result.getIn.getBody === "x") - assert(result.getOut.getBody === null) - } - - } - - feature("Communicate with an internally-registered typed actor using typed-actor endpoint URIs") { - scenario("communication not possible") { - intercept[ResolveEndpointFailedException] { - mandatoryTemplate.requestBodyAndHeader("typed-actor:tc?method=m2", "x", "test", "y") - } - } - } - - feature("Communicate with an externally-registered typed actor using typed-actor endpoint URIs") { - scenario("two-way communication with method returning String") { - val result = mandatoryTemplate.requestBody("typed-actor:ta?method=foo", "test") - assert(result === "foo: test") - } - - scenario("two-way communication with method returning String via custom route") { - val result = mandatoryTemplate.requestBody("direct:test", "test") - assert(result === "foo: test") - } - } -} - -object TypedActorComponentFeatureTest { - class CustomRouteBuilder extends RouteBuilder { - def configure = { - from("direct:test").to("typed-actor:ta?method=foo") - } - } -} diff --git a/akka-camel/src/test/scala/akka/support/TestSupport.scala b/akka-camel/src/test/scala/akka/support/TestSupport.scala deleted file mode 100644 index 4744d774f5..0000000000 --- a/akka-camel/src/test/scala/akka/support/TestSupport.scala +++ /dev/null @@ -1,81 +0,0 @@ -package akka.camel.support - -import java.util.concurrent.{TimeUnit, CountDownLatch} - -import collection.mutable.Buffer - -import akka.camel.Message -import akka.actor.Actor - -import TestSupport._ - -object TestSupport { - type Handler = PartialFunction[Any, Any] -} - -trait TestActor extends Actor { - def receive = { - case msg => { - handler(msg) - } - } - - def handler: Handler -} - -class Tester1 extends TestActor with Retain with Countdown { - def handler = retain andThen countdown -} - -class Tester2 extends TestActor with Respond { - def handler = respond -} - -class Tester3 extends TestActor with Noop { - self.timeout = 1 - def handler = noop -} - -trait Countdown { this: Actor => - var latch: CountDownLatch = new CountDownLatch(0) - def countdown: Handler = { - case SetExpectedMessageCount(num) => { - latch = new CountDownLatch(num) - self.reply(latch) - } - case msg => latch.countDown - } -} - -trait Respond { this: Actor => - def respond: Handler = { - case msg: Message => self.reply(response(msg)) - } - - def response(msg: Message): Any = "Hello %s" format msg.body -} - -trait Retain { this: Actor => - val messages = Buffer[Any]() - - def retain: Handler = { - case GetRetainedMessage => self.reply(messages.last) - case GetRetainedMessages(p) => self.reply(messages.toList.filter(p)) - case msg => { - messages += msg - msg - } - } -} - -trait Noop { this: Actor => - def noop: Handler = { - case msg => msg - } -} - -case class SetExpectedMessageCount(num: Int) -case class GetRetainedMessage() -case class GetRetainedMessages(p: Any => Boolean) { - def this() = this(_ => true) -} diff --git a/akka-http/src/main/scala/DefaultAkkaLoader.scala b/akka-http/src/main/scala/DefaultAkkaLoader.scala index b0bc3ad020..0e032f184d 100644 --- a/akka-http/src/main/scala/DefaultAkkaLoader.scala +++ b/akka-http/src/main/scala/DefaultAkkaLoader.scala @@ -6,16 +6,12 @@ package akka.http import akka.config.Config import akka.util.{Logging, Bootable} -import akka.camel.CamelService import akka.remote.BootableRemoteActorService import akka.actor.BootableActorLoaderService import akka.servlet.AkkaLoader class DefaultAkkaLoader extends AkkaLoader { - def boot(): Unit = boot(true, - new EmbeddedAppServer with BootableActorLoaderService - with BootableRemoteActorService - with CamelService) + def boot(): Unit = boot(true, new EmbeddedAppServer with BootableActorLoaderService with BootableRemoteActorService) } diff --git a/akka-http/src/main/scala/Initializer.scala b/akka-http/src/main/scala/Initializer.scala index a470949821..c9eb1f8f67 100644 --- a/akka-http/src/main/scala/Initializer.scala +++ b/akka-http/src/main/scala/Initializer.scala @@ -6,7 +6,6 @@ package akka.servlet import akka.remote.BootableRemoteActorService import akka.actor.BootableActorLoaderService -import akka.camel.CamelService import akka.config.Config import akka.util.{Logging, Bootable} @@ -30,5 +29,5 @@ class Initializer extends ServletContextListener { loader.shutdown def contextInitialized(e: ServletContextEvent): Unit = - loader.boot(true, new BootableActorLoaderService with BootableRemoteActorService with CamelService) + loader.boot(true, new BootableActorLoaderService with BootableRemoteActorService) } diff --git a/akka-jta/src/main/scala/akka/AtomikosTransactionService.scala b/akka-jta/src/main/scala/akka/AtomikosTransactionService.scala deleted file mode 100644 index f2af9c01bf..0000000000 --- a/akka-jta/src/main/scala/akka/AtomikosTransactionService.scala +++ /dev/null @@ -1,41 +0,0 @@ -/** - * Copyright (C) 2009-2010 Scalable Solutions AB - */ - -package akka.jta - -import javax.transaction.{TransactionManager, SystemException} - -import com.atomikos.icatch.jta.{J2eeTransactionManager, J2eeUserTransaction} -import com.atomikos.icatch.config.{TSInitInfo, UserTransactionService, UserTransactionServiceImp} - -import akka.config.Config._ -import akka.util.Duration - -object AtomikosTransactionService extends AtomikosTransactionService - -/** - * Atomikos implementation of the transaction service trait. - * - * @author Jonas Bonér - */ -class AtomikosTransactionService extends TransactionService with TransactionProtocol { - val JTA_TRANSACTION_TIMEOUT = Duration(config.getInt("akka.jta.timeout", 60), TIME_UNIT) - - private val txService: UserTransactionService = new UserTransactionServiceImp - private val info: TSInitInfo = txService.createTSInitInfo - - val transactionContainer: TransactionContainer = TransactionContainer(Right(Some( - try { - txService.init(info) - val tm: TransactionManager = new J2eeTransactionManager - tm.setTransactionTimeout(JTA_TRANSACTION_TIMEOUT.toSeconds.toInt) - tm - } catch { - case e => throw new SystemException( - "Could not create a new Atomikos J2EE Transaction Manager, due to: " + e.toString) - } - ))) - // TODO: gracefully postStop of the TM - //txService.postStop(false) -} diff --git a/akka-jta/src/main/scala/akka/JTA.scala b/akka-jta/src/main/scala/akka/JTA.scala deleted file mode 100644 index 1f637fc17b..0000000000 --- a/akka-jta/src/main/scala/akka/JTA.scala +++ /dev/null @@ -1,223 +0,0 @@ -/** - * Copyright (C) 2009-2010 Scalable Solutions AB - */ - -package akka.jta - -import javax.transaction.{TransactionManager, UserTransaction, - Transaction => JtaTransaction, SystemException, - Status, Synchronization, TransactionSynchronizationRegistry} -import javax.naming.{InitialContext, Context, NamingException} - -import akka.config.Config._ -import akka.util.Logging -import akka.stm.Transaction -import akka.AkkaException - -class JtaConfigurationException(message: String) extends AkkaException(message) - -/** - * Detects if there is a UserTransaction or TransactionManager available in the JNDI. - * - * @author Jonas Bonér - */ -object TransactionContainer extends Logging { - val AKKA_JTA_TRANSACTION_SERVICE_CLASS = "akka.jta.AtomikosTransactionService" - val DEFAULT_USER_TRANSACTION_NAME = "java:comp/UserTransaction" - val FALLBACK_TRANSACTION_MANAGER_NAMES = "java:comp/TransactionManager" :: - "java:appserver/TransactionManager" :: - "java:pm/TransactionManager" :: - "java:/TransactionManager" :: Nil - val DEFAULT_TRANSACTION_SYNCHRONIZATION_REGISTRY_NAME = "java:comp/TransactionSynchronizationRegistry" - - val JTA_PROVIDER = config.getString("akka.jta.provider", "from-jndi") - - private var synchronizationRegistry: Option[TransactionSynchronizationRegistry] = None - - def apply(tm: Either[Option[UserTransaction], Option[TransactionManager]]) = new TransactionContainer(tm) - - def apply(): TransactionContainer = - JTA_PROVIDER match { - case "from-jndi" => - new TransactionContainer(findUserTransaction match { - case None => Right(findTransactionManager) - case tm => Left(tm) - }) - case "atomikos" => - try { - Class.forName(AKKA_JTA_TRANSACTION_SERVICE_CLASS) - .newInstance.asInstanceOf[TransactionService] - .transactionContainer - } catch { - case e: ClassNotFoundException => - throw new JtaConfigurationException( - "JTA provider defined as 'atomikos', but the AtomikosTransactionService classes can not be found." + - "\n\tPlease make sure you have 'akka-jta' JAR and its dependencies on your classpath.") - } - case _ => - throw new JtaConfigurationException( - "No UserTransaction on TransactionManager could be found in scope." + - "\n\tEither add 'akka-jta' to the classpath or make sure there is a" + - "\n\tTransactionManager or UserTransaction defined in the JNDI.") - - } - - def findUserTransaction: Option[UserTransaction] = { - val located = createInitialContext.lookup(DEFAULT_USER_TRANSACTION_NAME) - if (located eq null) None - else { - log.info("JTA UserTransaction detected [%s]", located) - Some(located.asInstanceOf[UserTransaction]) - } - } - - def findSynchronizationRegistry: Option[TransactionSynchronizationRegistry] = synchronized { - if (synchronizationRegistry.isDefined) synchronizationRegistry - else { - val located = createInitialContext.lookup(DEFAULT_TRANSACTION_SYNCHRONIZATION_REGISTRY_NAME) - if (located eq null) None - else { - log.info("JTA TransactionSynchronizationRegistry detected [%s]", located) - synchronizationRegistry = Some(located.asInstanceOf[TransactionSynchronizationRegistry]) - synchronizationRegistry - } - } - } - - def findTransactionManager: Option[TransactionManager] = { - val context = createInitialContext - val tms = for { - name <- FALLBACK_TRANSACTION_MANAGER_NAMES - tm = context.lookup(name) - if tm ne null - } yield tm - tms match { - case Nil => None - case tm :: _ => - log.info("JTA TransactionManager detected [%s]", tm) - Some(tm.asInstanceOf[TransactionManager]) - } - } - - private def createInitialContext = new InitialContext(new java.util.Hashtable) -} - -/** - * JTA transaction container holding either a UserTransaction or a TransactionManager. - *

- * The TransactionContainer is created using the factory val container = TransactionContainer() - * - * @author Jonas Bonér - */ -class TransactionContainer private ( - val tm: Either[Option[UserTransaction], Option[TransactionManager]]) extends Logging { - - def registerSynchronization(sync: Synchronization) = { - TransactionContainer.findSynchronizationRegistry match { // try to use SynchronizationRegistry in JNDI - case Some(registry) => - registry.asInstanceOf[TransactionSynchronizationRegistry].registerInterposedSynchronization(sync) - case None => - tm match { - case Right(Some(txMan)) => // try to use TransactionManager - txMan.getTransaction.registerSynchronization(sync) - case _ => - log.warning("Cannot find TransactionSynchronizationRegistry in JNDI, can't register STM synchronization") - } - } - } - - def beginWithStmSynchronization(transaction: Transaction) = { - begin - registerSynchronization(new StmSynchronization(this, transaction)) - } - - def begin = { - TransactionContainer.log.trace("Starting JTA transaction") - tm match { - case Left(Some(userTx)) => userTx.begin - case Right(Some(txMan)) => txMan.begin - case _ => throw new JtaConfigurationException("Does not have a UserTransaction or TransactionManager in scope") - } - } - - def commit = { - TransactionContainer.log.trace("Committing JTA transaction") - tm match { - case Left(Some(userTx)) => userTx.commit - case Right(Some(txMan)) => txMan.commit - case _ => throw new JtaConfigurationException("Does not have a UserTransaction or TransactionManager in scope") - } - } - - def rollback = { - TransactionContainer.log.trace("Aborting JTA transaction") - tm match { - case Left(Some(userTx)) => userTx.rollback - case Right(Some(txMan)) => txMan.rollback - case _ => throw new JtaConfigurationException("Does not have a UserTransaction or TransactionManager in scope") - } - } - - def getStatus = tm match { - case Left(Some(userTx)) => userTx.getStatus - case Right(Some(txMan)) => txMan.getStatus - case _ => throw new JtaConfigurationException("Does not have a UserTransaction or TransactionManager in scope") - } - - def isInExistingTransaction = tm match { - case Left(Some(userTx)) => userTx.getStatus == Status.STATUS_ACTIVE - case Right(Some(txMan)) => txMan.getStatus == Status.STATUS_ACTIVE - case _ => throw new JtaConfigurationException("Does not have a UserTransaction or TransactionManager in scope") - } - - def isRollbackOnly = tm match { - case Left(Some(userTx)) => userTx.getStatus == Status.STATUS_MARKED_ROLLBACK - case Right(Some(txMan)) => txMan.getStatus == Status.STATUS_MARKED_ROLLBACK - case _ => throw new JtaConfigurationException("Does not have a UserTransaction or TransactionManager in scope") - } - - def setRollbackOnly = tm match { - case Left(Some(userTx)) => userTx.setRollbackOnly - case Right(Some(txMan)) => txMan.setRollbackOnly - case _ => throw new JtaConfigurationException("Does not have a UserTransaction or TransactionManager in scope") - } - - def suspend = tm match { - case Right(Some(txMan)) => txMan.suspend - case _ => throw new JtaConfigurationException("Does not have a TransactionManager in scope") - } - - def resume(tx: JtaTransaction) = tm match { - case Right(Some(txMan)) => txMan.resume(tx) - case _ => throw new JtaConfigurationException("Does not have a TransactionManager in scope") - } -} - -/** - * STM Synchronization class for synchronizing with the JTA TransactionManager. - * - * @author Jonas Bonér - */ -class StmSynchronization(tc: TransactionContainer, tx: Transaction) extends Synchronization with Logging { - def beforeCompletion = { - val status = tc.getStatus - if (status != Status.STATUS_ROLLEDBACK && - status != Status.STATUS_ROLLING_BACK && - status != Status.STATUS_MARKED_ROLLBACK) { - log.debug("JTA transaction has failed, abort STM transaction") - tx.transaction.foreach(_.abort) // abort multiverse tx - } - } - - def afterCompletion(status: Int) = {} -} - -/** - * JTA Transaction service. - * - * @author Jonas Bonér - */ -trait TransactionService { - def transactionContainer: TransactionContainer -} - diff --git a/akka-jta/src/main/scala/akka/TransactionContext.scala b/akka-jta/src/main/scala/akka/TransactionContext.scala deleted file mode 100644 index ca92e5aa75..0000000000 --- a/akka-jta/src/main/scala/akka/TransactionContext.scala +++ /dev/null @@ -1,238 +0,0 @@ -/** - * Copyright (C) 2009-2010 Scalable Solutions AB - */ - -package akka.jta - -import javax.transaction.{Transaction, Status, TransactionManager, Synchronization} - -import akka.util.Logging -import akka.config.Config._ - -/** - * The TransactionContext object manages the transactions. - * Can be used as higher-order functional 'atomic blocks' or monadic. - * - * Manages a thread-local stack of TransactionContexts. - *

- * Example usage 1: - *

- * import TransactionContext._
- *
- * withTxRequired {
- *   ... // transactional stuff
- * }
- * // or
- * withTxRequiresNew {
- *   ... // transactional stuff
- * }
- * 
- * Example usage 2: - *
- * for {
- *   ctx <- TransactionContext.Required
- *   entity <- updatedEntities
- *   if !ctx.isRollbackOnly
- * } {
- *   // transactional stuff
- *   ...
- * }
- * 
- * Example usage 3: - *
- * val users = for {
- *   ctx <- TransactionContext.Required
- *   name <- userNames
- * } yield {
- *   // transactional stuff
- *   ...
- * }
- * 
- * - * @author Jonas Bonér - */ -object TransactionContext extends TransactionProtocol with Logging { - implicit val tc = TransactionContainer() - - private[TransactionContext] val stack = new scala.util.DynamicVariable(new TransactionContext(tc)) - - /** - * This method can be used to register a Synchronization instance for participating with the JTA transaction. - * Here is an example of how to add a JPA EntityManager integration. - *
-   *   TransactionContext.registerSynchronization(new javax.transaction.Synchronization() {
-   *     def beforeCompletion = {
-   *       try {
-   *         val status = tm.getStatus
-   *         if (status != Status.STATUS_ROLLEDBACK &&
-   *             status != Status.STATUS_ROLLING_BACK &&
-   *             status != Status.STATUS_MARKED_ROLLBACK) {
-   *           log.debug("Flushing EntityManager...")
-   *           em.flush // flush EntityManager on success
-   *         }
-   *       } catch {
-   *         case e: javax.transaction.SystemException => throw new RuntimeException(e)
-   *       }
-   *     }
-   *
-   *     def afterCompletion(status: Int) = {
-   *       val status = tm.getStatus
-   *       if (closeAtTxCompletion) em.close
-   *       if (status == Status.STATUS_ROLLEDBACK ||
-   *           status == Status.STATUS_ROLLING_BACK ||
-   *           status == Status.STATUS_MARKED_ROLLBACK) {
-   *         em.close
-   *       }
-   *     }
-   *   })
-   * 
- * You should also override the 'joinTransaction' and 'handleException' methods. - * See ScalaDoc for these methods in the 'TransactionProtocol' for details. - */ - def registerSynchronization(sync: Synchronization) = synchronization.add(sync) - - /** - * Registeres a join transaction function. - *

- * Here is an example on how to integrate with JPA EntityManager. - * - *

-   * TransactionContext.registerJoinTransactionFun(() => {
-   *   val em: EntityManager = ... // get the EntityManager
-   *   em.joinTransaction // join JTA transaction
-   * })
-   * 
- */ - def registerJoinTransactionFun(fn: () => Unit) = joinTransactionFuns.add(fn) - - /** - * Handle exception. Can be overriden by concrete transaction service implementation. - *

- * Here is an example on how to handle JPA exceptions. - * - *

-   * TransactionContext.registerExceptionNotToRollbackOn(classOf[NoResultException])
-   * TransactionContext.registerExceptionNotToRollbackOn(classOf[NonUniqueResultException])
-   * 
- */ - def registerExceptionNotToRollbackOn(e: Class[_ <: Exception]) = exceptionsNotToRollbackOn.add(e) - - object Required extends TransactionMonad { - def map[T](f: TransactionMonad => T): T = withTxRequired { f(this) } - def flatMap[T](f: TransactionMonad => T): T = withTxRequired { f(this) } - def foreach(f: TransactionMonad => Unit): Unit = withTxRequired { f(this) } - } - - object RequiresNew extends TransactionMonad { - def map[T](f: TransactionMonad => T): T = withTxRequiresNew { f(this) } - def flatMap[T](f: TransactionMonad => T): T = withTxRequiresNew { f(this) } - def foreach(f: TransactionMonad => Unit): Unit = withTxRequiresNew { f(this) } - } - - object Supports extends TransactionMonad { - def map[T](f: TransactionMonad => T): T = withTxSupports { f(this) } - def flatMap[T](f: TransactionMonad => T): T = withTxSupports { f(this) } - def foreach(f: TransactionMonad => Unit): Unit = withTxSupports { f(this) } - } - - object Mandatory extends TransactionMonad { - def map[T](f: TransactionMonad => T): T = withTxMandatory { f(this) } - def flatMap[T](f: TransactionMonad => T): T = withTxMandatory { f(this) } - def foreach(f: TransactionMonad => Unit): Unit = withTxMandatory { f(this) } - } - - object Never extends TransactionMonad { - def map[T](f: TransactionMonad => T): T = withTxNever { f(this) } - def flatMap[T](f: TransactionMonad => T): T = withTxNever { f(this) } - def foreach(f: TransactionMonad => Unit): Unit = withTxNever { f(this) } - } - - object NoOpTransactionMonad extends TransactionMonad { - def map[T](f: TransactionMonad => T): T = f(this) - def flatMap[T](f: TransactionMonad => T): T = f(this) - def foreach(f: TransactionMonad => Unit): Unit = f(this) - override def filter(f: TransactionMonad => Boolean): TransactionMonad = this - } - - private[jta] def setRollbackOnly = current.setRollbackOnly - - private[jta] def isRollbackOnly = current.isRollbackOnly - - private[jta] def getTransactionContainer: TransactionContainer = current.getTransactionContainer - - private[this] def current = stack.value - - /** - * Continues with the invocation defined in 'body' with the brand new context define in 'newCtx', the old - * one is put on the stack and will automatically come back in scope when the method exits. - *

- * Suspends and resumes the current JTA transaction. - */ - private[jta] def withNewContext[T](body: => T): T = { - val suspendedTx: Option[Transaction] = - if (getTransactionContainer.isInExistingTransaction) { - log.debug("Suspending TX") - Some(getTransactionContainer.suspend) - } else None - val result = stack.withValue(new TransactionContext(tc)) { body } - if (suspendedTx.isDefined) { - log.debug("Resuming TX") - getTransactionContainer.resume(suspendedTx.get) - } - result - } -} - -/** - * Base monad for the transaction monad implementations. - * - * @author Jonas Bonér - */ -trait TransactionMonad { - - // ----------------------------- - // Monadic definitions - // ----------------------------- - - def map[T](f: TransactionMonad => T): T - def flatMap[T](f: TransactionMonad => T): T - def foreach(f: TransactionMonad => Unit): Unit - def filter(f: TransactionMonad => Boolean): TransactionMonad = - if (f(this)) this else TransactionContext.NoOpTransactionMonad - - // ----------------------------- - // JTA Transaction definitions - // ----------------------------- - - /** - * Marks the current transaction as doomed. - */ - def setRollbackOnly = TransactionContext.setRollbackOnly - - /** - * Marks the current transaction as doomed. - */ - def doom = TransactionContext.setRollbackOnly - - /** - * Checks if the current transaction is doomed. - */ - def isRollbackOnly = TransactionContext.isRollbackOnly - - /** - * Checks that the current transaction is NOT doomed. - */ - def isNotDoomed = !TransactionContext.isRollbackOnly -} - -/** - * Transaction context, holds the EntityManager and the TransactionManager. - * - * @author Jonas Bonér - */ -class TransactionContext(val tc: TransactionContainer) { - def registerSynchronization(sync: Synchronization) = TransactionContext.registerSynchronization(sync) - def setRollbackOnly = tc.setRollbackOnly - def isRollbackOnly: Boolean = tc.getStatus == Status.STATUS_MARKED_ROLLBACK - def getTransactionContainer: TransactionContainer = tc -} diff --git a/akka-jta/src/main/scala/akka/TransactionProtocol.scala b/akka-jta/src/main/scala/akka/TransactionProtocol.scala deleted file mode 100644 index 11965df9e6..0000000000 --- a/akka-jta/src/main/scala/akka/TransactionProtocol.scala +++ /dev/null @@ -1,227 +0,0 @@ -/** - * Copyright (C) 2009-2010 Scalable Solutions AB - */ - -package akka.jta - -import akka.util.Logging - -import java.util.{List => JList} -import java.util.concurrent.CopyOnWriteArrayList - -import javax.naming.{NamingException, Context, InitialContext} -import javax.transaction.{ - Transaction, - UserTransaction, - TransactionManager, - Status, - RollbackException, - SystemException, - Synchronization, - TransactionRequiredException -} - -/** - *

- * Trait that implements a JTA transaction service that obeys the transaction semantics defined - * in the transaction attribute types for the transacted methods according to the EJB 3 draft specification. - * The aspect handles UserTransaction, TransactionManager instance variable injection thru @javax.ejb.Inject - * (name subject to change as per EJB 3 spec) and method transaction levels thru @javax.ejb.TransactionAttribute. - *

- * - *

- * This trait should be inherited to implement the getTransactionManager() method that should return a concrete - * javax.transaction.TransactionManager implementation (from JNDI lookup etc). - *

- *

- *

Transaction attribute semantics

- * (From http://www.kevinboone.com/ejb-transactions.html) - *

- *

- *

Required

- * 'Required' is probably the best choice (at least initially) for an EJB method that will need to be transactional. In this case, if the method's caller is already part of a transaction, then the EJB method does not create a new transaction, but continues in the same transaction as its caller. If the caller is not in a transaction, then a new transaction is created for the EJB method. If something happens in the EJB that means that a rollback is required, then the extent of the rollback will include everything done in the EJB method, whatever the condition of the caller. If the caller was in a transaction, then everything done by the caller will be rolled back as well. Thus the 'required' attribute ensures that any work done by the EJB will be rolled back if necessary, and if the caller requires a rollback that too will be rolled back. - *

- *

- *

RequiresNew

- * 'RequiresNew' will be appropriate if you want to ensure that the EJB method is rolled back if necessary, but you don't want the rollback to propogate back to the caller. This attribute results in the creation of a new transaction for the method, regardless of the transactional state of the caller. If the caller was operating in a transaction, then its transaction is suspended until the EJB method completes. Because a new transaction is always created, there may be a slight performance penalty if this attribute is over-used. - *

- *

- *

Mandatory

- * With the 'mandatory' attribute, the EJB method will not even start unless its caller is in a transaction. It will throw a TransactionRequiredException instead. If the method does start, then it will become part of the transaction of the caller. So if the EJB method signals a failure, the caller will be rolled back as well as the EJB. - *

- *

- *

Supports

- * With this attribute, the EJB method does not care about the transactional context of its caller. If the caller is part of a transaction, then the EJB method will be part of the same transaction. If the EJB method fails, the transaction will roll back. If the caller is not part of a transaction, then the EJB method will still operate, but a failure will not cause anything to roll back. 'Supports' is probably the attribute that leads to the fastest method call (as there is no transactional overhead), but it can lead to unpredicatable results. If you want a method to be isolated from transactions, that is, to have no effect on the transaction of its caller, then use 'NotSupported' instead. - *

- *

- *

NotSupported

- * With the 'NotSupported' attribute, the EJB method will never take part in a transaction. If the caller is part of a transaction, then the caller's transaction is suspended. If the EJB method fails, there will be no effect on the caller's transaction, and no rollback will occur. Use this method if you want to ensure that the EJB method will not cause a rollback in its caller. This is appropriate if, for example, the method does something non-essential, such as logging a message. It would not be helpful if the failure of this operation caused a transaction rollback. - *

- *

- *

Never

- * The 'NotSupported'' attribute will ensure that the EJB method is never called by a transactional caller. Any attempt to do so will result in a RemoteException being thrown. This attribute is probably less useful than `NotSupported', in that NotSupported will assure that the caller's transaction is never affected by the EJB method (just as `Never' does), but will allow a call from a transactional caller if necessary. - *

- * - * @author Jonas Bonér - */ -trait TransactionProtocol extends Logging { - - protected val synchronization: JList[Synchronization] = new CopyOnWriteArrayList[Synchronization] - protected val joinTransactionFuns: JList[() => Unit] = new CopyOnWriteArrayList[() => Unit] - protected val exceptionsNotToRollbackOn: JList[Class[_ <: Exception]] = new CopyOnWriteArrayList[Class[_ <: Exception]] - - def joinTransaction(): Unit = { - val it = joinTransactionFuns.iterator - while (it.hasNext) { - val fn = it.next - fn() - } - } - - def handleException(tm: TransactionContainer, e: Exception) = { - var rollback = true - val it = joinTransactionFuns.iterator - while (it.hasNext) { - val exception = it.next - if (e.getClass.isAssignableFrom(exception.getClass)) - rollback = false - } - if (rollback) tm.setRollbackOnly - throw e - } - - /** - * Wraps body in a transaction with REQUIRED semantics. - *

- * Creates a new transaction if no transaction is active in scope, else joins the outer transaction. - */ - def withTxRequired[T](body: => T): T = { - val tm = TransactionContext.getTransactionContainer - if (!isInExistingTransaction(tm)) { - tm.begin - registerSynchronization - try { - joinTransaction - body - } catch { - case e: Exception => handleException(tm, e) - } finally { - commitOrRollBack(tm) - } - } else body - } - - /** - * Wraps body in a transaction with REQUIRES_NEW semantics. - *

- * Suspends existing transaction, starts a new transaction, invokes body, - * commits or rollbacks new transaction, finally resumes previous transaction. - */ - def withTxRequiresNew[T](body: => T): T = TransactionContext.withNewContext { - val tm = TransactionContext.getTransactionContainer - tm.begin - registerSynchronization - try { - joinTransaction - body - } catch { - case e: Exception => handleException(tm, e) - } finally { - commitOrRollBack(tm) - } - } - - /** - * Wraps body in a transaction with NOT_SUPPORTED semantics. - *

- * Suspends existing transaction, invokes body, resumes transaction. - */ - def withTxNotSupported[T](body: => T): T = TransactionContext.withNewContext { - body - } - - /** - * Wraps body in a transaction with SUPPORTS semantics. - *

- * Basicalla a No-op. - */ - def withTxSupports[T](body: => T): T = { - // attach to current if exists else skip -> do nothing - body - } - - /** - * Wraps body in a transaction with MANDATORY semantics. - *

- * Throws a TransactionRequiredException if there is no transaction active in scope. - */ - def withTxMandatory[T](body: => T): T = { - if (!isInExistingTransaction(TransactionContext.getTransactionContainer)) - throw new TransactionRequiredException("No active TX at method with TX type set to MANDATORY") - body - } - - /** - * Wraps body in a transaction with NEVER semantics. - *

- * Throws a SystemException in case of an existing transaction in scope. - */ - def withTxNever[T](body: => T): T = { - if (isInExistingTransaction(TransactionContext.getTransactionContainer)) - throw new SystemException("Detected active TX at method with TX type set to NEVER") - body - } - - protected def commitOrRollBack(tm: TransactionContainer) = { - if (isInExistingTransaction(tm)) { - if (isRollbackOnly(tm)) { - log.debug("Rolling back TX marked as ROLLBACK_ONLY") - tm.rollback - } else { - log.debug("Committing TX") - tm.commit - } - } - } - - // --------------------------- - // Helper methods - // --------------------------- - - protected def registerSynchronization = { - val it = synchronization.iterator - while (it.hasNext) TransactionContext.getTransactionContainer.registerSynchronization(it.next) - } - /** - * Checks if a transaction is an existing transaction. - * - * @param tm the transaction manager - * @return boolean - */ - protected def isInExistingTransaction(tm: TransactionContainer): Boolean = - tm.getStatus != Status.STATUS_NO_TRANSACTION - - /** - * Checks if current transaction is set to rollback only. - * - * @param tm the transaction manager - * @return boolean - */ - protected def isRollbackOnly(tm: TransactionContainer): Boolean = - tm.getStatus == Status.STATUS_MARKED_ROLLBACK - - /** - * A ThreadLocal variable where to store suspended TX and enable pay as you go - * before advice - after advice data sharing in a specific case of requiresNew TX - */ - private val suspendedTx = new ThreadLocal[Transaction] { - override def initialValue = null - } - - private def storeInThreadLocal(tx: Transaction) = suspendedTx.set(tx) - - private def fetchFromThreadLocal: Option[Transaction] = { - if ((suspendedTx ne null) && (suspendedTx.get() ne null)) Some(suspendedTx.get.asInstanceOf[Transaction]) - else None - } -} diff --git a/akka-jta/src/test/scala/ReflectiveAccessSpec.scala b/akka-jta/src/test/scala/ReflectiveAccessSpec.scala deleted file mode 100644 index 76bd83e15c..0000000000 --- a/akka-jta/src/test/scala/ReflectiveAccessSpec.scala +++ /dev/null @@ -1,16 +0,0 @@ -/** - * Copyright (C) 2009-2010 Scalable Solutions AB - */ - -package akka.jta - -import org.scalatest.junit.JUnitSuite -import org.junit.Test -import akka.stm.ReflectiveJtaModule - -class ReflectiveAccessSpec extends JUnitSuite { - @Test def ensureReflectiveAccessCanLoadTransactionContainer { - ReflectiveJtaModule.ensureJtaEnabled - assert(ReflectiveJtaModule.transactionContainerObjectInstance.isDefined) - } -} diff --git a/akka-karaf/akka-features/src/main/resources/features.xml b/akka-karaf/akka-features/src/main/resources/features.xml deleted file mode 100644 index ad96d7bf05..0000000000 --- a/akka-karaf/akka-features/src/main/resources/features.xml +++ /dev/null @@ -1,22 +0,0 @@ - - - - - mvn:com.weiglewilczek.scala-lang-osgi/scala-library/2.8.0.RC2 - mvn:org.eclipse.scalamodules/scalamodules-core/2.0-M2 - - - - - mvn:akka.akka-wrap/dispatch-json_2.8.0.RC3_osgi/0.7.4 - mvn:org.objenesis/objenesis/1.2 - mvn:sjson.json/sjson/0.6-SNAPSHOT - - - - sjson - mvn:akka.akka-wrap/jgroups-wrapper_2.8.0.RC3_osgi/2.9.0.GA - mvn:org.jboss.netty/netty/3.2.0.CR1 - mvn:akka/akka-remote_2.8.0.RC3_osgi/0.9 - - diff --git a/akka-kernel/src/main/scala/akka/Kernel.scala b/akka-kernel/src/main/scala/akka/Kernel.scala deleted file mode 100644 index 342078913b..0000000000 --- a/akka-kernel/src/main/scala/akka/Kernel.scala +++ /dev/null @@ -1,25 +0,0 @@ -/** - * Copyright (C) 2009-2010 Scalable Solutions AB - */ - -package akka.kernel - -import akka.http.{ EmbeddedAppServer, DefaultAkkaLoader } -import akka.remote.BootableRemoteActorService - -object Main { - def main(args: Array[String]) = Kernel.boot -} - -/** - * The Akka Kernel, is used to start And postStop Akka in standalone/kernel mode. - * - * @author Jonas Bonér - */ -object Kernel extends DefaultAkkaLoader { - //For testing purposes only - def startRemoteService(): Unit = bundles.foreach( _ match { - case x: BootableRemoteActorService => x.startRemoteService - case _ => - }) -} diff --git a/akka-persistence/akka-persistence-cassandra/src/main/scala/akka/CassandraSession.scala b/akka-persistence/akka-persistence-cassandra/src/main/scala/akka/CassandraSession.scala deleted file mode 100644 index b8474812ab..0000000000 --- a/akka-persistence/akka-persistence-cassandra/src/main/scala/akka/CassandraSession.scala +++ /dev/null @@ -1,199 +0,0 @@ -/** - * Copyright (C) 2009-2010 Scalable Solutions AB - */ - -package akka.persistence.cassandra - -import java.io.{Flushable, Closeable} - -import akka.persistence.common._ -import akka.util.Logging -import akka.util.Helpers._ -import akka.serialization.Serializer -import akka.config.Config.config - -import scala.collection.mutable.Map - -import org.apache.cassandra.db.ColumnFamily -import org.apache.cassandra.thrift._ - -import org.apache.thrift.transport._ -import org.apache.thrift.protocol._ - -/** - * @author Jonas Bonér - */ -trait CassandraSession extends Closeable with Flushable { - import scala.collection.JavaConversions._ - import java.util.{Map => JMap, List => JList} - - val client: Cassandra.Client - val keyspace: String - - val obtainedAt: Long - val consistencyLevel: ConsistencyLevel - val schema: JMap[String, JMap[String, String]] - - /** - * Count is always the max number of results to return. - - So it means, starting with `start`, or the first one if start is - empty, go until you hit `finish` or `count`, whichever comes first. - Empty is not a legal column name so if finish is empty it is ignored - and only count is used. - - We don't offer a numeric offset since that can't be supported - efficiently with a log-structured merge disk format. - */ - - // ==================================== - // ====== Scala-style API names - // ==================================== - - def /(key: String, columnParent: ColumnParent, start: Array[Byte], end: Array[Byte], ascending: Boolean, count: Int): List[ColumnOrSuperColumn] = - /(key, columnParent, start, end, ascending, count, consistencyLevel) - - def /(key: String, columnParent: ColumnParent, start: Array[Byte], end: Array[Byte], ascending: Boolean, count: Int, consistencyLevel: ConsistencyLevel): List[ColumnOrSuperColumn] = { - val slicePredicate = new SlicePredicate - slicePredicate.setSlice_range(new SliceRange(start, end, ascending, count)) - client.get_slice(keyspace, key, columnParent, slicePredicate, consistencyLevel).toList - } - - def /(key: String, columnParent: ColumnParent, slicePredicate: SlicePredicate): List[ColumnOrSuperColumn] = - client.get_slice(keyspace, key, columnParent, slicePredicate, consistencyLevel).toList - - def /(key: String, columnParent: ColumnParent, slicePredicate: SlicePredicate, consistencyLevel: ConsistencyLevel): List[ColumnOrSuperColumn] = - client.get_slice(keyspace, key, columnParent, slicePredicate, consistencyLevel).toList - - def |(key: String, colPath: ColumnPath): Option[ColumnOrSuperColumn] = - |(key, colPath, consistencyLevel) - - def |(key: String, colPath: ColumnPath, consistencyLevel: ConsistencyLevel): Option[ColumnOrSuperColumn] = - client.get(keyspace, key, colPath, consistencyLevel) - - def |#(key: String, columnParent: ColumnParent): Int = - |#(key, columnParent, consistencyLevel) - - def |#(key: String, columnParent: ColumnParent, consistencyLevel: ConsistencyLevel): Int = - client.get_count(keyspace, key, columnParent, consistencyLevel) - - def ++|(key: String, colPath: ColumnPath, value: Array[Byte]): Unit = - ++|(key, colPath, value, obtainedAt, consistencyLevel) - - def ++|(key: String, colPath: ColumnPath, value: Array[Byte], consistencyLevel: ConsistencyLevel): Unit = - ++|(key, colPath, value, obtainedAt, consistencyLevel) - - def ++|(key: String, colPath: ColumnPath, value: Array[Byte], timestamp: Long): Unit = - ++|(key, colPath, value, timestamp, consistencyLevel) - - def ++|(key: String, colPath: ColumnPath, value: Array[Byte], timestamp: Long, consistencyLevel: ConsistencyLevel) = - client.insert(keyspace, key, colPath, value, timestamp, consistencyLevel) - - def ++|(key: String, batch: Map[String, List[ColumnOrSuperColumn]]): Unit = - ++|(key, batch, consistencyLevel) - - def ++|(key: String, batch: Map[String, List[ColumnOrSuperColumn]], consistencyLevel: ConsistencyLevel): Unit = { - val jmap = new java.util.HashMap[String, JList[ColumnOrSuperColumn]] - for (entry <- batch; (key, value) = entry) jmap.put(key, new java.util.ArrayList(value)) - client.batch_insert(keyspace, key, jmap, consistencyLevel) - } - - def --(key: String, columnPath: ColumnPath, timestamp: Long): Unit = - --(key, columnPath, timestamp, consistencyLevel) - - def --(key: String, columnPath: ColumnPath, timestamp: Long, consistencyLevel: ConsistencyLevel): Unit = - client.remove(keyspace, key, columnPath, timestamp, consistencyLevel) - - // ==================================== - // ====== Java-style API names - // ==================================== - - def getSlice(key: String, columnParent: ColumnParent, start: Array[Byte], end: Array[Byte], ascending: Boolean, count: Int) = / (key, columnParent, start, end, ascending, count, consistencyLevel) - - def getSlice(key: String, columnParent: ColumnParent, start: Array[Byte], end: Array[Byte], ascending: Boolean, count: Int, consistencyLevel: ConsistencyLevel) = / (key, columnParent, start, end, ascending, count, consistencyLevel) - - def getSlice(key: String, columnParent: ColumnParent, slicePredicate: SlicePredicate) = / (key, columnParent, slicePredicate) - - def getSlice(key: String, columnParent: ColumnParent, slicePredicate: SlicePredicate, consistencyLevel: ConsistencyLevel) = / (key, columnParent, slicePredicate, consistencyLevel) - - - def get(key: String, colPath: ColumnPath) = |(key, colPath) - - def get(key: String, colPath: ColumnPath, consistencyLevel: ConsistencyLevel) = |(key, colPath, consistencyLevel) - - def getCount(key: String, columnParent: ColumnParent)= |#(key, columnParent) - - def getCount(key: String, columnParent: ColumnParent, consistencyLevel: ConsistencyLevel) = |#(key, columnParent, consistencyLevel) - - - def insert(key: String, colPath: ColumnPath, value: Array[Byte]): Unit = ++|(key, colPath, value) - - def insert(key: String, colPath: ColumnPath, value: Array[Byte], consistencyLevel: ConsistencyLevel): Unit = ++|(key, colPath, value, consistencyLevel) - - def insert(key: String, colPath: ColumnPath, value: Array[Byte], timestamp: Long): Unit = ++|(key, colPath, value, timestamp) - - def insert(key: String, colPath: ColumnPath, value: Array[Byte], timestamp: Long, consistencyLevel: ConsistencyLevel) = ++|(key, colPath, value, timestamp, consistencyLevel) - - def insert(key: String, batch: Map[String, List[ColumnOrSuperColumn]]): Unit = ++|(key, batch) - - def insert(key: String, batch: Map[String, List[ColumnOrSuperColumn]], consistencyLevel: ConsistencyLevel): Unit = ++|(key, batch, consistencyLevel) - - def remove(key: String, columnPath: ColumnPath, timestamp: Long): Unit = --(key, columnPath, timestamp) - - def remove(key: String, columnPath: ColumnPath, timestamp: Long, consistencyLevel: ConsistencyLevel): Unit = --(key, columnPath, timestamp, consistencyLevel) - -} - -class CassandraSessionPool[T <: TTransport]( - space: String, - transportPool: Pool[T], - inputProtocol: Protocol, - outputProtocol: Protocol, - consistency: ConsistencyLevel) extends Closeable with Logging { - - def this(space: String, transportPool: Pool[T], ioProtocol: Protocol, consistency: ConsistencyLevel) = - this (space, transportPool, ioProtocol, ioProtocol, consistency) - - def newSession: CassandraSession = newSession(consistency) - - def newSession(consistencyLevel: ConsistencyLevel): CassandraSession = { - val socket = transportPool.borrowObject - val cassandraClient = new Cassandra.Client(inputProtocol(socket), outputProtocol(socket)) - val cassandraSchema = cassandraClient.describe_keyspace(space) - new CassandraSession { - val keyspace = space - val client = cassandraClient - val obtainedAt = System.currentTimeMillis - val consistencyLevel = consistency - val schema = cassandraSchema - log.debug("Creating %s", toString) - - def flush = socket.flush - def close = transportPool.returnObject(socket) - override def toString = "[CassandraSession]\n\tkeyspace = " + keyspace + "\n\tschema = " + schema - } - } - - def withSession[T](body: CassandraSession => T) = { - val session = newSession(consistency) - try { - val result = body(session) - session.flush - result - } finally { - session.close - } - } - - def close = transportPool.close -} - -sealed abstract class Protocol(val factory: TProtocolFactory) { - def apply(transport: TTransport) = factory.getProtocol(transport) -} - -object Protocol { - object Binary extends Protocol(new TBinaryProtocol.Factory) - object SimpleJSON extends Protocol(new TSimpleJSONProtocol.Factory) - object JSON extends Protocol(new TJSONProtocol.Factory) -} diff --git a/akka-persistence/akka-persistence-cassandra/src/main/scala/akka/CassandraStorage.scala b/akka-persistence/akka-persistence-cassandra/src/main/scala/akka/CassandraStorage.scala deleted file mode 100644 index 166ccbe676..0000000000 --- a/akka-persistence/akka-persistence-cassandra/src/main/scala/akka/CassandraStorage.scala +++ /dev/null @@ -1,59 +0,0 @@ -/** - * Copyright (C) 2009-2010 Scalable Solutions AB - */ - -package akka.persistence.cassandra - -import akka.stm._ -import akka.persistence.common._ -import akka.actor.{newUuid} - -object CassandraStorage extends Storage { - type ElementType = Array[Byte] - - def newMap: PersistentMap[ElementType, ElementType] = newMap(newUuid.toString) - def newVector: PersistentVector[ElementType] = newVector(newUuid.toString) - def newRef: PersistentRef[ElementType] = newRef(newUuid.toString) - override def newQueue: PersistentQueue[ElementType] = newQueue(newUuid.toString) - - def getMap(id: String): PersistentMap[ElementType, ElementType] = newMap(id) - def getVector(id: String): PersistentVector[ElementType] = newVector(id) - def getRef(id: String): PersistentRef[ElementType] = newRef(id) - override def getQueue(id: String): PersistentQueue[ElementType] = newQueue(id) - - def newMap(id: String): PersistentMap[ElementType, ElementType] = new CassandraPersistentMap(id) - def newVector(id: String): PersistentVector[ElementType] = new CassandraPersistentVector(id) - def newRef(id: String): PersistentRef[ElementType] = new CassandraPersistentRef(id) - override def newQueue(id: String): PersistentQueue[ElementType] = new CassandraPersistentQueue(id) -} - -/** - * Implements a persistent transactional map based on the Cassandra distributed P2P key-value storage. - * - * @author Jonas Bonér - */ -class CassandraPersistentMap(id: String) extends PersistentMapBinary { - val uuid = id - val storage = CassandraStorageBackend -} - -/** - * Implements a persistent transactional vector based on the Cassandra - * distributed P2P key-value storage. - * - * @author Jonas Bonér - */ -class CassandraPersistentVector(id: String) extends PersistentVector[Array[Byte]] { - val uuid = id - val storage = CassandraStorageBackend -} - -class CassandraPersistentRef(id: String) extends PersistentRef[Array[Byte]] { - val uuid = id - val storage = CassandraStorageBackend -} - -class CassandraPersistentQueue(id: String) extends PersistentQueue[Array[Byte]] { - val uuid = id - val storage = CassandraStorageBackend -} diff --git a/akka-persistence/akka-persistence-cassandra/src/main/scala/akka/CassandraStorageBackend.scala b/akka-persistence/akka-persistence-cassandra/src/main/scala/akka/CassandraStorageBackend.scala deleted file mode 100644 index a835866713..0000000000 --- a/akka-persistence/akka-persistence-cassandra/src/main/scala/akka/CassandraStorageBackend.scala +++ /dev/null @@ -1,161 +0,0 @@ -/** - * Copyright (C) 2009-2010 Scalable Solutions AB - */ - -package akka.persistence.cassandra - -import akka.stm._ -import akka.persistence.common._ -import akka.util.Logging -import akka.util.Helpers._ -import akka.config.Config.config - -import org.apache.cassandra.thrift._ -import java.lang.String -import collection.JavaConversions -import collection.immutable.{TreeMap, Iterable} -import java.util.{Map => JMap, HashMap => JHMap, List => JList, ArrayList => JAList} - -/** - * @author Jonas Bonér - */ - -private[akka] object CassandraStorageBackend extends CommonStorageBackend { - - import CommonStorageBackend._ - - type ElementType = Array[Byte] - - val KEYSPACE = "akka" - val MAP_COLUMN_PARENT = new ColumnParent("map") - val VECTOR_COLUMN_PARENT = new ColumnParent("vector") - val REF_COLUMN_PARENT = new ColumnParent("ref") - val QUEUE_COLUMN_PARENT = new ColumnParent("queue") - val REF_KEY = "item".getBytes("UTF-8") - val EMPTY_BYTE_ARRAY = new Array[Byte](0) - - val CASSANDRA_SERVER_HOSTNAME = config.getString("akka.persistence.cassandra.hostname", "127.0.0.1") - val CASSANDRA_SERVER_PORT = config.getInt("akka.persistence.cassandra.port", 9160) - val CONSISTENCY_LEVEL = { - config.getString("akka.persistence.cassandra.consistency-level", "QUORUM") match { - case "ZERO" => ConsistencyLevel.ZERO - case "ONE" => ConsistencyLevel.ONE - case "QUORUM" => ConsistencyLevel.QUORUM - case "DCQUORUM" => ConsistencyLevel.DCQUORUM - case "DCQUORUMSYNC" => ConsistencyLevel.DCQUORUMSYNC - case "ALL" => ConsistencyLevel.ALL - case "ANY" => ConsistencyLevel.ANY - case unknown => throw new IllegalArgumentException( - "Cassandra consistency level [" + unknown + "] is not supported." + - "\n\tExpected one of [ZERO, ONE, QUORUM, DCQUORUM, DCQUORUMSYNC, ALL, ANY] in the akka.conf configuration file.") - } - } - val IS_ASCENDING = true - - @volatile private[this] var isRunning = false - private[this] val protocol: Protocol = Protocol.Binary - - private[this] val sessions = new CassandraSessionPool( - KEYSPACE, - StackPool(SocketProvider(CASSANDRA_SERVER_HOSTNAME, CASSANDRA_SERVER_PORT)), - protocol, - CONSISTENCY_LEVEL) - - - class CassandraAccess(parent: ColumnParent) extends CommonStorageBackendAccess { - - def path(key: Array[Byte]): ColumnPath = { - new ColumnPath(parent.getColumn_family).setColumn(key) - } - - def delete(owner: String, key: Array[Byte]) = { - sessions.withSession{ - session => { - session -- (owner, path(key), System.currentTimeMillis, CONSISTENCY_LEVEL) - } - } - } - - override def getAll(owner: String, keys: Iterable[Array[Byte]]): Map[Array[Byte], Array[Byte]] = { - sessions.withSession{ - session => { - var predicate = new SlicePredicate().setColumn_names(JavaConversions.asJavaList(keys.toList)) - val cols = session / (owner, parent, predicate, CONSISTENCY_LEVEL) - var map = new TreeMap[Array[Byte], Array[Byte]]()(ordering) - cols.foreach{ - cosc => map += cosc.getColumn.getName -> cosc.getColumn.getValue - } - map - } - } - } - - - def get(owner: String, key: Array[Byte], default: Array[Byte]) = { - sessions.withSession{ - session => { - try - { - session | (owner, path(key), CONSISTENCY_LEVEL) match { - case Some(cosc) => cosc.getColumn.getValue - case None => default - } - } catch { - case e: NotFoundException => default - } - } - } - } - - def put(owner: String, key: Array[Byte], value: Array[Byte]) = { - sessions.withSession{ - session => { - session ++| (owner, path(key), value, System.currentTimeMillis, CONSISTENCY_LEVEL) - } - } - } - - - def drop() = { - sessions.withSession{ - session => { - val slices = session.client.get_range_slices(session.keyspace, parent, - new SlicePredicate().setSlice_range(new SliceRange().setStart(Array.empty[Byte]).setFinish(Array.empty[Byte])), - new KeyRange().setStart_key("").setEnd_key(""), CONSISTENCY_LEVEL) - - val mutations = new JHMap[String, JMap[String, JList[Mutation]]] - JavaConversions.asScalaIterable(slices).foreach{ - keySlice: KeySlice => { - val key = keySlice.getKey - val keyMutations = JavaConversions.asScalaMap(mutations).getOrElse(key, { - val km = new JHMap[String, JList[Mutation]] - mutations.put(key, km) - km - }) - val amutation = new JAList[Mutation] - val cols = new JAList[Array[Byte]] - keyMutations.put(parent.getColumn_family, amutation) - JavaConversions.asScalaIterable(keySlice.getColumns) foreach { - cosc: ColumnOrSuperColumn => { - cols.add(cosc.getColumn.getName) - } - } - amutation.add(new Mutation().setDeletion(new Deletion(System.currentTimeMillis).setPredicate(new SlicePredicate().setColumn_names(cols)))) - - } - } - session.client.batch_mutate(session.keyspace, mutations, CONSISTENCY_LEVEL) - } - } - } - - } - - def queueAccess = new CassandraAccess(QUEUE_COLUMN_PARENT) - - def mapAccess = new CassandraAccess(MAP_COLUMN_PARENT) - - def vectorAccess = new CassandraAccess(VECTOR_COLUMN_PARENT) - - def refAccess = new CassandraAccess(REF_COLUMN_PARENT) -} diff --git a/akka-persistence/akka-persistence-cassandra/src/test/resources/log4j.properties b/akka-persistence/akka-persistence-cassandra/src/test/resources/log4j.properties deleted file mode 100644 index 3c8738fdc3..0000000000 --- a/akka-persistence/akka-persistence-cassandra/src/test/resources/log4j.properties +++ /dev/null @@ -1,25 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -log4j.rootLogger=DEBUG,R - -# rolling log file ("system.log -log4j.appender.R=org.apache.log4j.DailyRollingFileAppender -log4j.appender.R.DatePattern='.'yyyy-MM-dd-HH -log4j.appender.R.layout=org.apache.log4j.PatternLayout -log4j.appender.R.layout.ConversionPattern=%5p [%t] %d{ISO8601} %F (line %L) %m%n -log4j.appender.R.File=target/logs/system.log diff --git a/akka-persistence/akka-persistence-cassandra/src/test/resources/storage-conf.xml b/akka-persistence/akka-persistence-cassandra/src/test/resources/storage-conf.xml deleted file mode 100644 index b01e547665..0000000000 --- a/akka-persistence/akka-persistence-cassandra/src/test/resources/storage-conf.xml +++ /dev/null @@ -1,337 +0,0 @@ - - - - - - - - akka - - - false - - - - - - 0.01 - - - - - - - - - - - - org.apache.cassandra.dht.RandomPartitioner - - - - - - org.apache.cassandra.locator.EndPointSnitch - - - org.apache.cassandra.locator.RackUnawareStrategy - - - 1 - - - target/cassandra/commitlog - - target/cassandra/data - - target/cassandra/callouts - target/cassandra/staging - - - - - 127.0.0.1 - - - - - - - 5000 - - 128 - - - - - - localhost - - 7000 - - 7001 - - - localhost - - 9160 - - false - - - - - - - - 64 - - - 32 - 8 - - - 64 - - - 64 - - 0.1 - - 60 - - - 8 - 32 - - - periodic - - 10000 - - - - - 864000 - - - 256 - - diff --git a/akka-persistence/akka-persistence-cassandra/src/test/scala/CassandraPersistentActorSpec.scala b/akka-persistence/akka-persistence-cassandra/src/test/scala/CassandraPersistentActorSpec.scala deleted file mode 100644 index f0284c84d2..0000000000 --- a/akka-persistence/akka-persistence-cassandra/src/test/scala/CassandraPersistentActorSpec.scala +++ /dev/null @@ -1,167 +0,0 @@ -package akka.persistence.cassandra - -import akka.actor.{Actor, ActorRef} -import Actor._ -import akka.stm._ - -import org.junit.Test -import org.junit.Assert._ -import org.junit.Before -import org.scalatest.junit.JUnitSuite - -case class GetMapState(key: String) -case object GetVectorState -case object GetVectorSize -case object GetRefState - -case class SetMapState(key: String, value: String) -case class SetVectorState(key: String) -case class SetRefState(key: String) -case class Success(key: String, value: String) -case class Failure(key: String, value: String) - -case class SetMapStateOneWay(key: String, value: String) -case class SetVectorStateOneWay(key: String) -case class SetRefStateOneWay(key: String) -case class SuccessOneWay(key: String, value: String) -case class FailureOneWay(key: String, value: String) - -class CassandraPersistentActor extends Actor { - self.timeout = 100000 - - private val mapState = CassandraStorage.newMap - private val vectorState = CassandraStorage.newVector - private val refState = CassandraStorage.newRef - - def receive = { case message => atomic { atomicReceive(message) } } - - def atomicReceive: Receive = { - case GetMapState(key) => - self.reply(mapState.get(key.getBytes("UTF-8")).get) - case GetVectorSize => - self.reply(vectorState.length.asInstanceOf[AnyRef]) - case GetRefState => - self.reply(refState.get.get) - case SetMapState(key, msg) => - mapState.put(key.getBytes("UTF-8"), msg.getBytes("UTF-8")) - self.reply(msg) - case SetVectorState(msg) => - vectorState.add(msg.getBytes("UTF-8")) - self.reply(msg) - case SetRefState(msg) => - refState.swap(msg.getBytes("UTF-8")) - self.reply(msg) - case Success(key, msg) => - mapState.put(key.getBytes("UTF-8"), msg.getBytes("UTF-8")) - vectorState.add(msg.getBytes("UTF-8")) - refState.swap(msg.getBytes("UTF-8")) - self.reply(msg) - case Failure(key, msg) => - mapState.put(key.getBytes("UTF-8"), msg.getBytes("UTF-8")) - vectorState.add(msg.getBytes("UTF-8")) - refState.swap(msg.getBytes("UTF-8")) - fail - self.reply(msg) - } - - def fail = throw new RuntimeException("Expected exception; to test fault-tolerance") -} - -class CassandraPersistentActorSpec extends JUnitSuite { - - // @Before - // def startCassandra = EmbeddedCassandraService.start - - @Test - def testMapShouldNotRollbackStateForStatefulServerInCaseOfSuccess = { - val stateful = actorOf[CassandraPersistentActor] - stateful.start - stateful !! SetMapState("testShouldNotRollbackStateForStatefulServerInCaseOfSuccess", "init") // set init state - stateful !! Success("testShouldNotRollbackStateForStatefulServerInCaseOfSuccess", "new state") // transactionrequired - val result = (stateful !! GetMapState("testShouldNotRollbackStateForStatefulServerInCaseOfSuccess")).as[Array[Byte]].get - assertEquals("new state", new String(result, 0, result.length, "UTF-8")) - } - - @Test - def testMapShouldRollbackStateForStatefulServerInCaseOfFailure = { - val stateful = actorOf[CassandraPersistentActor] - stateful.start - stateful !! SetMapState("testShouldRollbackStateForStatefulServerInCaseOfFailure", "init") // set init state - try { - stateful !! Failure("testShouldRollbackStateForStatefulServerInCaseOfFailure", "new state") // call failing transactionrequired method - fail("should have thrown an exception") - } catch {case e: RuntimeException => {}} - val result = (stateful !! GetMapState("testShouldRollbackStateForStatefulServerInCaseOfFailure")).as[Array[Byte]].get - assertEquals("init", new String(result, 0, result.length, "UTF-8")) // check that state is == init state - } - - @Test - def testVectorShouldNotRollbackStateForStatefulServerInCaseOfSuccess = { - val stateful = actorOf[CassandraPersistentActor] - stateful.start - stateful !! SetVectorState("init") // set init state - stateful !! Success("testShouldNotRollbackStateForStatefulServerInCaseOfSuccess", "new state") // transactionrequired - assertEquals(2, (stateful !! GetVectorSize).get.asInstanceOf[java.lang.Integer].intValue) - } - - @Test - def testVectorShouldRollbackStateForStatefulServerInCaseOfFailure = { - val stateful = actorOf[CassandraPersistentActor] - stateful.start - stateful !! SetVectorState("init") // set init state - try { - stateful !! Failure("testShouldRollbackStateForStatefulServerInCaseOfFailure", "new state") // call failing transactionrequired method - fail("should have thrown an exception") - } catch {case e: RuntimeException => {}} - assertEquals(1, (stateful !! GetVectorSize).get.asInstanceOf[java.lang.Integer].intValue) - } - - @Test - def testRefShouldNotRollbackStateForStatefulServerInCaseOfSuccess = { - val stateful = actorOf[CassandraPersistentActor] - stateful.start - stateful !! SetRefState("init") // set init state - stateful !! Success("testShouldNotRollbackStateForStatefulServerInCaseOfSuccess", "new state") // transactionrequired - val result = (stateful !! GetRefState).as[Array[Byte]].get - assertEquals("new state", new String(result, 0, result.length, "UTF-8")) - } - - @Test - def testRefShouldRollbackStateForStatefulServerInCaseOfFailure = { - val stateful = actorOf[CassandraPersistentActor] - stateful.start - stateful !! SetRefState("init") // set init state - try { - stateful !! Failure("testShouldRollbackStateForStatefulServerInCaseOfFailure", "new state") // call failing transactionrequired method - fail("should have thrown an exception") - } catch {case e: RuntimeException => {}} - val result = (stateful !! GetRefState).as[Array[Byte]].get - assertEquals("init", new String(result, 0, result.length, "UTF-8")) // check that state is == init state - } - -} - -/* -object EmbeddedCassandraService { - import org.apache.cassandra.thrift.CassandraDaemon - - System.setProperty("storage-config", "src/test/resources"); - - val cassandra = new Runnable { - - val cassandraDaemon = new CassandraDaemon - cassandraDaemon.init(null) - - def run = cassandraDaemon.start - - } - - // spawn cassandra in a new thread - val t = new Thread(cassandra) - t.setDaemon(true) - t.start - - def start(): Unit = {} - -} -*/ diff --git a/akka-persistence/akka-persistence-cassandra/src/test/scala/CassandraStorageBackendCompatibilityTest.scala b/akka-persistence/akka-persistence-cassandra/src/test/scala/CassandraStorageBackendCompatibilityTest.scala deleted file mode 100644 index 500dfc8977..0000000000 --- a/akka-persistence/akka-persistence-cassandra/src/test/scala/CassandraStorageBackendCompatibilityTest.scala +++ /dev/null @@ -1,51 +0,0 @@ -package akka.persistence.cassandra - - -import org.junit.runner.RunWith -import org.scalatest.junit.JUnitRunner -import akka.persistence.common.{QueueStorageBackendTest, VectorStorageBackendTest, MapStorageBackendTest, RefStorageBackendTest} - -@RunWith(classOf[JUnitRunner]) -class CassandraRefStorageBackendTestIntegration extends RefStorageBackendTest { - def dropRefs = { - CassandraStorageBackend.refAccess.drop - } - - - def storage = CassandraStorageBackend -} - -@RunWith(classOf[JUnitRunner]) -class CassandraMapStorageBackendTestIntegration extends MapStorageBackendTest { - def dropMaps = { - CassandraStorageBackend.mapAccess.drop - } - - - def storage = CassandraStorageBackend -} - -@RunWith(classOf[JUnitRunner]) -class CassandraVectorStorageBackendTestIntegration extends VectorStorageBackendTest { - def dropVectors = { - CassandraStorageBackend.vectorAccess.drop - } - - - def storage = CassandraStorageBackend -} - -@RunWith(classOf[JUnitRunner]) -class CassandraQueueStorageBackendTestIntegration extends QueueStorageBackendTest { - def dropQueues = { - CassandraStorageBackend.queueAccess.drop - } - - - def storage = CassandraStorageBackend -} - - - - - diff --git a/akka-persistence/akka-persistence-cassandra/src/test/scala/CassandraTicket343TestIntegration.scala b/akka-persistence/akka-persistence-cassandra/src/test/scala/CassandraTicket343TestIntegration.scala deleted file mode 100644 index 7a062f9c31..0000000000 --- a/akka-persistence/akka-persistence-cassandra/src/test/scala/CassandraTicket343TestIntegration.scala +++ /dev/null @@ -1,23 +0,0 @@ -/** - * Copyright (C) 2009-2010 Scalable Solutions AB - */ - -package akka.persistence.cassandra - - -import org.junit.runner.RunWith -import org.scalatest.junit.JUnitRunner -import akka.persistence.common._ - -@RunWith(classOf[JUnitRunner]) -class CassandraTicket343TestIntegration extends Ticket343Test { - def dropMapsAndVectors: Unit = { - CassandraStorageBackend.vectorAccess.drop - CassandraStorageBackend.mapAccess.drop - } - - def getVector: (String) => PersistentVector[Array[Byte]] = CassandraStorage.getVector - - def getMap: (String) => PersistentMap[Array[Byte], Array[Byte]] = CassandraStorage.getMap - -} diff --git a/akka-persistence/akka-persistence-common/src/main/scala/akka/CommonStorageBackend.scala b/akka-persistence/akka-persistence-common/src/main/scala/akka/CommonStorageBackend.scala deleted file mode 100644 index 18020ff180..0000000000 --- a/akka-persistence/akka-persistence-common/src/main/scala/akka/CommonStorageBackend.scala +++ /dev/null @@ -1,741 +0,0 @@ -/** - * Copyright (C) 2009-2010 Scalable Solutions AB - */ - -package akka.persistence.common - -import akka.util.Logging -import java.lang.String -import java.nio.ByteBuffer -import collection.Map -import java.util.{Map => JMap} -import akka.persistence.common.PersistentMapBinary.COrdering._ -import collection.immutable._ -import collection.mutable.ArrayBuffer - - -private[akka] trait CommonStorageBackendAccess { - - import CommonStorageBackend._ - - /*abstract*/ - - def get(owner: String, key: Array[Byte], default: Array[Byte]): Array[Byte] - - def getAll(owner: String, keys: Iterable[Array[Byte]]): Map[Array[Byte], Array[Byte]] = { - keys.foldLeft(new HashMap[Array[Byte], Array[Byte]]) { - (map, key) => { - Option(get(owner, key)) match { - case Some(value) => map + (key -> value) - case None => map - } - } - } - } - - def put(owner: String, key: Array[Byte], value: Array[Byte]): Unit - - def putAll(owner: String, keyValues: Iterable[(Array[Byte], Array[Byte])]): Unit = { - keyValues.foreach{ - kv => kv match { - case (key, value) => put(owner, key, value) - } - } - } - - def delete(owner: String, key: Array[Byte]): Unit - - def deleteAll(owner: String, keys: Iterable[Array[Byte]]): Unit = { - keys.foreach(delete(owner, _)) - } - - def drop(): Unit - - /*concrete*/ - - def decodeMapKey(owner: String, key: Array[Byte]): Array[Byte] = key - - def encodeMapKey(owner: String, key: Array[Byte]): Array[Byte] = key - - def decodeIndexedKey(owner: String, key: Array[Byte]): Int = IntSerializer.fromBytes(key) - - def encodeIndexedKey(owner: String, keyint: Int): Array[Byte] = IntSerializer.toBytes(keyint) - - def deleteIndexed(owner: String, index: Int): Unit = delete(owner, encodeIndexedKey(owner, index)) - - def getIndexed(owner: String, index: Int): Array[Byte] = get(owner, encodeIndexedKey(owner, index)) - - def get(owner: String, key: Array[Byte]): Array[Byte] = get(owner, key, null) - - def putIndexed(owner: String, index: Int, value: Array[Byte]): Unit = put(owner, encodeIndexedKey(owner, index), value) - - def putAllIndexed(owner: String, values: Iterable[(Int, Array[Byte])]): Unit = { - putAll(owner, values.map{ - iv => { - iv match { - case (i, value) => (encodeIndexedKey(owner, i) -> value) - } - } - }) - } - - def getAllIndexed(owner: String, keys: Iterable[Int]): Map[Int, Array[Byte]] = { - val byteKeys = keys.map(encodeIndexedKey(owner, _)) - getAll(owner, byteKeys).map{ - kv => kv match { - case (key, value) => (decodeIndexedKey(owner, key) -> value) - } - } - } - - def deleteAllIndexed(owner: String, keys: Iterable[Int]): Unit = { - val byteKeys = keys.map(encodeIndexedKey(owner, _)) - deleteAll(owner, byteKeys) - } -} - -private[akka] trait KVStorageBackendAccess extends CommonStorageBackendAccess with Logging { - - import CommonStorageBackend._ - import KVStorageBackend._ - - def put(key: Array[Byte], value: Array[Byte]): Unit - - def get(key: Array[Byte]): Array[Byte] - - def get(key: Array[Byte], default: Array[Byte]): Array[Byte] - - def getAll(keys: Iterable[Array[Byte]]): Map[Array[Byte], Array[Byte]] - - def delete(key: Array[Byte]): Unit - - override def decodeMapKey(owner: String, key: Array[Byte]): Array[Byte] = { - val mapKeyLength = key.length - IntSerializer.bytesPerInt - owner.getBytes("UTF-8").length - val mapkey = new Array[Byte](mapKeyLength) - System.arraycopy(key, key.length - mapKeyLength, mapkey, 0, mapKeyLength) - mapkey - } - - - override def decodeIndexedKey(owner: String, key: Array[Byte]): Int = { - IntSerializer.fromBytes(decodeMapKey(owner,key)) - } - - override def put(owner: String, key: Array[Byte], value: Array[Byte]): Unit = { - put(getKey(owner, key), value) - } - - override def putIndexed(owner: String, index: Int, value: Array[Byte]): Unit = { - put(getIndexedKey(owner, index), value) - } - - - override def get(owner: String, key: Array[Byte]): Array[Byte] = { - get(getKey(owner, key)) - } - - override def getIndexed(owner: String, index: Int): Array[Byte] = { - get(getIndexedKey(owner, index)) - } - - - override def get(owner: String, key: Array[Byte], default: Array[Byte]): Array[Byte] = { - get(getKey(owner, key), default) - } - - - override def getAll(owner: String, keys: Iterable[Array[Byte]]): Map[Array[Byte], Array[Byte]] = { - getAll(keys.map{ - getKey(owner, _) - }) - } - - override def deleteIndexed(owner: String, index: Int): Unit = { - delete(getIndexedKey(owner, index)) - } - - override def delete(owner: String, key: Array[Byte]): Unit = { - delete(getKey(owner, key)) - } -} - -private[akka] object CommonStorageBackendAccess { - implicit def stringToByteArray(st: String): Array[Byte] = { - st.getBytes("UTF-8") - } -} - -private[akka] object CommonStorageBackend { - val nullMapValueHeader = 0x00.byteValue - val nullMapValue: Array[Byte] = Array(nullMapValueHeader) - val notNullMapValueHeader: Byte = 0xff.byteValue - val mapKeySetKeyHeader = 0x00.byteValue - val mapKeyHeader = 0xff.byteValue - val mapKeysIndex: Array[Byte] = new Array[Byte](1).padTo(1, mapKeySetKeyHeader) - val mapKeysWrapperPad: Array[Byte] = new Array[Byte](1).padTo(1, mapKeyHeader) - - /** - * Wrap map key prepends mapKeysWrapperPad (1-byte) to map keys so that we can - * use a seperate 1 byte key to store the map keyset. - * - * This basically creates the map key used in underlying storage - */ - - def wrapMapKey(key: Array[Byte]): Array[Byte] = { - val wrapped = new Array[Byte](key.length + mapKeysWrapperPad.length) - System.arraycopy(mapKeysWrapperPad, 0, wrapped, 0, mapKeysWrapperPad.length) - System.arraycopy(key, 0, wrapped, mapKeysWrapperPad.length, key.length) - wrapped - } - - /** - * unwrapMapKey removes the mapKeysWrapperPad, this translates the map key used - * in underlying storage back to a key that is understandable by the frontend - */ - - def unwrapMapKey(key: Array[Byte]): Array[Byte] = { - val unwrapped = new Array[Byte](key.length - mapKeysWrapperPad.length) - System.arraycopy(key, mapKeysWrapperPad.length, unwrapped, 0, unwrapped.length) - unwrapped - } - - def getStoredMapValue(value: Array[Byte]): Array[Byte] = { - value match { - case null => nullMapValue - case value => { - val stored = new Array[Byte](value.length + 1) - stored(0) = notNullMapValueHeader - System.arraycopy(value, 0, stored, 1, value.length) - stored - } - } - } - - def getMapValueFromStored(value: Array[Byte]): Array[Byte] = { - - if (value(0) == nullMapValueHeader) { - null - } else if (value(0) == notNullMapValueHeader) { - val returned = new Array[Byte](value.length - 1) - System.arraycopy(value, 1, returned, 0, value.length - 1) - returned - } else { - throw new StorageException("unknown header byte on map value:" + value(0)) - } - } - - object IntSerializer { - val bytesPerInt = java.lang.Integer.SIZE / java.lang.Byte.SIZE - - def toBytes(i: Int) = ByteBuffer.wrap(new Array[Byte](bytesPerInt)).putInt(i).array() - - def fromBytes(bytes: Array[Byte]) = ByteBuffer.wrap(bytes).getInt() - - def toString(obj: Int) = obj.toString - - def fromString(str: String) = str.toInt - } - - object SortedSetSerializer { - def toBytes(set: SortedSet[Array[Byte]]): Array[Byte] = { - val length = set.foldLeft(0) { - (total, bytes) => { - total + bytes.length + IntSerializer.bytesPerInt - } - } - val allBytes = new Array[Byte](length) - val written = set.foldLeft(0) { - (total, bytes) => { - val sizeBytes = IntSerializer.toBytes(bytes.length) - System.arraycopy(sizeBytes, 0, allBytes, total, sizeBytes.length) - System.arraycopy(bytes, 0, allBytes, total + sizeBytes.length, bytes.length) - total + sizeBytes.length + bytes.length - } - } - require(length == written, "Bytes Written Did not equal Calculated Length, written %d, length %d".format(written, length)) - allBytes - } - - def fromBytes(bytes: Array[Byte]): SortedSet[Array[Byte]] = { - var set = new TreeSet[Array[Byte]] - if (bytes.length > IntSerializer.bytesPerInt) { - var pos = 0 - while (pos < bytes.length) { - val lengthBytes = new Array[Byte](IntSerializer.bytesPerInt) - System.arraycopy(bytes, pos, lengthBytes, 0, IntSerializer.bytesPerInt) - pos += IntSerializer.bytesPerInt - val length = IntSerializer.fromBytes(lengthBytes) - val item = new Array[Byte](length) - System.arraycopy(bytes, pos, item, 0, length) - set = set + item - pos += length - } - } - set - } - - } - -} - -private[akka] object KVStorageBackend { - - import CommonStorageBackend._ - - /** - * Concat the ownerlenght+owner+key+ of owner so owned data will be colocated - * Store the length of owner as first byte to work around the rare case - * where ownerbytes1 + keybytes1 == ownerbytes2 + keybytes2 but ownerbytes1 != ownerbytes2 - */ - - def getKey(owner: String, key: Array[Byte]): Array[Byte] = { - val ownerBytes: Array[Byte] = owner.getBytes("UTF-8") - val ownerLenghtBytes: Array[Byte] = IntSerializer.toBytes(owner.length) - val theKey = new Array[Byte](ownerLenghtBytes.length + ownerBytes.length + key.length) - System.arraycopy(ownerLenghtBytes, 0, theKey, 0, ownerLenghtBytes.length) - System.arraycopy(ownerBytes, 0, theKey, ownerLenghtBytes.length, ownerBytes.length) - System.arraycopy(key, 0, theKey, ownerLenghtBytes.length + ownerBytes.length, key.length) - theKey - } - - def getIndexedKey(owner: String, index: Int): Array[Byte] = { - getKey(owner, IntSerializer.toBytes(index)) - } - -} - -private[akka] trait CommonStorageBackend extends MapStorageBackend[Array[Byte], Array[Byte]] with VectorStorageBackend[Array[Byte]] with RefStorageBackend[Array[Byte]] with QueueStorageBackend[Array[Byte]] with Logging { - - import CommonStorageBackend._ - - val vectorHeadIndex = -1 - val vectorTailIndex = -2 - val queueHeadIndex = -1 - val queueTailIndex = -2 - val zero = IntSerializer.toBytes(0) - val refItem = "refItem".getBytes("UTF-8") - - implicit val ordering = ArrayOrdering - - - def refAccess: CommonStorageBackendAccess - - def vectorAccess: CommonStorageBackendAccess - - def mapAccess: CommonStorageBackendAccess - - def queueAccess: CommonStorageBackendAccess - - - def getRefStorageFor(name: String): Option[Array[Byte]] = { - val result: Array[Byte] = refAccess.get(name, refItem) - Option(result) - } - - def insertRefStorageFor(name: String, element: Array[Byte]) = { - element match { - case null => refAccess.delete(name, refItem) - case _ => refAccess.put(name, refItem, element) - } - } - - - def getMapStorageRangeFor(name: String, start: Option[Array[Byte]], finish: Option[Array[Byte]], count: Int): List[(Array[Byte], Array[Byte])] = { - val allkeys: SortedSet[Array[Byte]] = getMapKeys(name) - val range = allkeys.rangeImpl(start, finish).take(count) - getKeyValues(name, range) - } - - def getMapStorageFor(name: String): List[(Array[Byte], Array[Byte])] = { - val keys = getMapKeys(name) - getKeyValues(name, keys) - } - - private def getKeyValues(name: String, keys: SortedSet[Array[Byte]]): List[(Array[Byte], Array[Byte])] = { - val all: Map[Array[Byte], Array[Byte]] = - mapAccess.getAll(name, keys) - - var returned = new TreeMap[Array[Byte], Array[Byte]]()(ordering) - all.foreach{ - (entry) => { - entry match { - case (namePlusKey: Array[Byte], value: Array[Byte]) => { - //need to fix here - returned += mapAccess.decodeMapKey(name, unwrapMapKey(namePlusKey)) -> getMapValueFromStored(value) - } - } - } - } - returned.toList - } - - def getMapStorageSizeFor(name: String): Int = { - val keys = getMapKeys(name) - keys.size - } - - def getMapStorageEntryFor(name: String, key: Array[Byte]): Option[Array[Byte]] = { - val result: Array[Byte] = mapAccess.get(name, wrapMapKey(key)) - result match { - case null => None - case _ => Some(getMapValueFromStored(result)) - } - } - - def removeMapStorageFor(name: String, key: Array[Byte]) = { - val wrapped = wrapMapKey(key) - var keys = getMapKeys(name) - keys -= wrapped - putMapKeys(name, keys) - mapAccess.delete(name, wrapped) - } - - def removeMapStorageFor(name: String) = { - val keys = getMapKeys(name) - keys.foreach{ - key => - mapAccess.delete(name, key) - } - mapAccess.delete(name, mapKeysIndex) - } - - def insertMapStorageEntryFor(name: String, key: Array[Byte], value: Array[Byte]) = { - val wrapped = wrapMapKey(key) - mapAccess.put(name, wrapped, getStoredMapValue(value)) - var keys = getMapKeys(name) - keys += wrapped - putMapKeys(name, keys) - } - - def insertMapStorageEntriesFor(name: String, entries: List[(Array[Byte], Array[Byte])]) = { - val toInsert = entries.map{ - kv => kv match { - case (key, value) => (wrapMapKey(key) -> getStoredMapValue(value)) - } - } - mapAccess.putAll(name, toInsert) - val newKeys = toInsert.map{ - case (key, value) => { - key - } - } - var keys = getMapKeys(name) - keys ++= newKeys - putMapKeys(name, keys) - } - - def putMapKeys(name: String, keys: SortedSet[Array[Byte]]) = { - mapAccess.put(name, mapKeysIndex, SortedSetSerializer.toBytes(keys)) - } - - def getMapKeys(name: String): SortedSet[Array[Byte]] = { - SortedSetSerializer.fromBytes(mapAccess.get(name, mapKeysIndex, Array.empty[Byte])) - } - - def getVectorStorageSizeFor(name: String): Int = { - getVectorMetadata(name).size - } - - def getVectorStorageRangeFor(name: String, start: Option[Int], finish: Option[Int], count: Int): List[Array[Byte]] = { - val mdata = getVectorMetadata(name) - - val st = start.getOrElse(0) - var cnt = - if (finish.isDefined) { - val f = finish.get - if (f >= st) (f - st) else count - } else { - count - } - if (cnt > (mdata.size - st)) { - cnt = mdata.size - st - } - - val indexes = mdata.getRangeIndexes(st, count) - val result = vectorAccess.getAllIndexed(name, indexes) - indexes.map(result.get(_).get).toList - - } - - def getVectorStorageEntryFor(name: String, index: Int): Array[Byte] = { - val mdata = getVectorMetadata(name) - if (mdata.size > 0 && index < mdata.size) { - vectorAccess.getIndexed(name, mdata.getRangeIndexes(index, 1)(0)) - } else { - throw new StorageException("In Vector:" + name + " No such Index:" + index) - } - } - - def updateVectorStorageEntryFor(name: String, index: Int, elem: Array[Byte]) = { - val mdata = getVectorMetadata(name) - if (mdata.size > 0 && index < mdata.size) { - elem match { - case null => vectorAccess.deleteIndexed(name, mdata.getRangeIndexes(index, 1)(0)) - case _ => vectorAccess.putIndexed(name, mdata.getRangeIndexes(index, 1)(0), elem) - } - } else { - throw new StorageException("In Vector:" + name + " No such Index:" + index) - } - } - - def insertVectorStorageEntriesFor(name: String, elements: List[Array[Byte]]) = { - var mdata = getVectorMetadata(name) - var deletes: List[Int] = Nil - var puts: List[(Int, Array[Byte])] = Nil - elements.foreach{ - element => { - if (mdata.canInsert) { - element match { - case null => deletes = mdata.head :: deletes - case _ => puts = (mdata.head -> element) :: puts - } - mdata = mdata.copy(head = mdata.nextInsert) - } else { - throw new IllegalStateException("The vector dosent have enough capacity to insert these entries") - } - } - } - - vectorAccess.deleteAllIndexed(name, deletes) - vectorAccess.putAllIndexed(name, puts) - vectorAccess.putIndexed(name, vectorHeadIndex, IntSerializer.toBytes(mdata.head)) - - } - - def insertVectorStorageEntryFor(name: String, element: Array[Byte]) = { - val mdata = getVectorMetadata(name) - if (mdata.canInsert) { - element match { - case null => vectorAccess.deleteIndexed(name, mdata.head) - case _ => vectorAccess.putIndexed(name, mdata.head, element) - } - vectorAccess.putIndexed(name, vectorHeadIndex, IntSerializer.toBytes(mdata.nextInsert)) - } else { - throw new IllegalStateException("The vector %s is full".format(name)) - } - - } - - - override def removeVectorStorageEntryFor(name: String) = { - val mdata = getVectorMetadata(name) - if (mdata.canRemove) { - vectorAccess.putIndexed(name, vectorTailIndex, IntSerializer.toBytes(mdata.nextRemove)) - try - { - vectorAccess.deleteIndexed(name, mdata.tail) - } catch { - case e: Exception => log.warn("Exception while trying to clean up a popped element from the vector, this is acceptable") - } - - } else { - //blow up or not? - } - } - - def getVectorMetadata(name: String): VectorMetadata = { - val result = vectorAccess.getAllIndexed(name, List(vectorHeadIndex, vectorTailIndex)) - val head = result.getOrElse(vectorHeadIndex, zero) - val tail = result.getOrElse(vectorTailIndex, zero) - val mdata = VectorMetadata(IntSerializer.fromBytes(head), IntSerializer.fromBytes(tail)) - mdata - } - - def getOrDefaultToZero(map: Map[Array[Byte], Array[Byte]], key: Array[Byte]): Int = { - map.get(key) match { - case Some(value) => IntSerializer.fromBytes(value) - case None => 0 - } - } - - - def remove(name: String): Boolean = { - val mdata = getQueueMetadata(name) - mdata.getActiveIndexes foreach { - index => - queueAccess.deleteIndexed(name, index) - } - queueAccess.deleteIndexed(name, queueHeadIndex) - queueAccess.deleteIndexed(name, queueTailIndex) - true - } - - def peek(name: String, start: Int, count: Int): List[Array[Byte]] = { - val mdata = getQueueMetadata(name) - val indexes = mdata.getPeekIndexes(start, count) - val result = queueAccess.getAllIndexed(name, indexes) - indexes.map(result.get(_).get).toList - } - - def size(name: String): Int = { - getQueueMetadata(name).size - } - - def dequeue(name: String): Option[Array[Byte]] = { - val mdata = getQueueMetadata(name) - if (mdata.canDequeue) { - try - { - val dequeued = queueAccess.getIndexed(name, mdata.head) - queueAccess.putIndexed(name, queueHeadIndex, IntSerializer.toBytes(mdata.nextDequeue)) - Some(dequeued) - } finally { - try - { - queueAccess.deleteIndexed(name, mdata.head) - } catch { - //a failure to delete is ok, just leaves a K-V in Voldemort that will be overwritten if the queue ever wraps around - case e: Exception => log.warn(e, "caught an exception while deleting a dequeued element, however this will not cause any inconsistency in the queue") - } - } - } else { - None - } - } - - def enqueue(name: String, item: Array[Byte]): Option[Int] = { - val mdata = getQueueMetadata(name) - if (mdata.canEnqueue) { - item match { - case null => queueAccess.deleteIndexed(name, mdata.tail) - case _ => queueAccess.putIndexed(name, mdata.tail, item) - } - queueAccess.putIndexed(name, queueTailIndex, IntSerializer.toBytes(mdata.nextEnqueue)) - Some(mdata.size + 1) - } else { - None - } - } - - def getQueueMetadata(name: String): QueueMetadata = { - val result = queueAccess.getAllIndexed(name, List(vectorHeadIndex, vectorTailIndex)) - val head = result.get(vectorHeadIndex).getOrElse(zero) - val tail = result.get(vectorTailIndex).getOrElse(zero) - QueueMetadata(IntSerializer.fromBytes(head), IntSerializer.fromBytes(tail)) - } - - - //wrapper for null - - - case class QueueMetadata(head: Int, tail: Int) { - //queue is an sequence with indexes from 0 to Int.MAX_VALUE - //wraps around when one pointer gets to max value - //head has an element in it. - //tail is the next slot to write to. - - def size = { - if (tail >= head) { - tail - head - } else { - //queue has wrapped - (Integer.MAX_VALUE - head) + (tail + 1) - } - } - - def canEnqueue = { - //the -1 stops the tail from catching the head on a wrap around - size < Integer.MAX_VALUE - 1 - } - - def canDequeue = { - size > 0 - } - - def getActiveIndexes(): IndexedSeq[Int] = { - if (tail >= head) { - Range(head, tail) - } else { - //queue has wrapped - val headRange = Range.inclusive(head, Integer.MAX_VALUE) - (if (tail > 0) { - headRange ++ Range(0, tail) - } else { - headRange - }) - } - } - - def getPeekIndexes(start: Int, count: Int): IndexedSeq[Int] = { - val indexes = getActiveIndexes - if (indexes.size < start) { - IndexedSeq.empty[Int] - } else { - indexes.drop(start).take(count) - } - } - - def nextEnqueue = { - tail match { - case Integer.MAX_VALUE => 0 - case _ => tail + 1 - } - } - - def nextDequeue = { - head match { - case Integer.MAX_VALUE => 0 - case _ => head + 1 - } - } - } - - case class VectorMetadata(head: Int, tail: Int) { - def size = { - if (head >= tail) { - head - tail - } else { - //queue has wrapped - (Integer.MAX_VALUE - tail) + (head + 1) - } - } - - def canInsert = { - //the -1 stops the tail from catching the head on a wrap around - size < Integer.MAX_VALUE - 1 - } - - def canRemove = { - size > 0 - } - - def getActiveIndexes(): IndexedSeq[Int] = { - if (head >= tail) { - Range(tail, head) - } else { - //queue has wrapped - val headRange = Range.inclusive(tail, Integer.MAX_VALUE) - (if (head > 0) { - headRange ++ Range(0, head) - } else { - headRange - }) - } - } - - def getRangeIndexes(start: Int, count: Int): IndexedSeq[Int] = { - val indexes = getActiveIndexes.reverse - if (indexes.size < start) { - IndexedSeq.empty[Int] - } else { - indexes.drop(start).take(count) - } - } - - def nextInsert = { - head match { - case Integer.MAX_VALUE => 0 - case _ => head + 1 - } - } - - def nextRemove = { - tail match { - case Integer.MAX_VALUE => 0 - case _ => tail + 1 - } - } - } - - -} diff --git a/akka-persistence/akka-persistence-common/src/main/scala/akka/Pool.scala b/akka-persistence/akka-persistence-common/src/main/scala/akka/Pool.scala deleted file mode 100644 index 3e205dcfe9..0000000000 --- a/akka-persistence/akka-persistence-common/src/main/scala/akka/Pool.scala +++ /dev/null @@ -1,91 +0,0 @@ -/** - * Copyright (C) 2009-2010 Scalable Solutions AB - */ - -package akka.persistence.common - -import org.apache.commons.pool._ -import org.apache.commons.pool.impl._ - -import org.apache.thrift.transport._ - -trait Pool[T] extends java.io.Closeable { - def borrowObject: T - def returnObject(t: T): Unit - def invalidateObject(t: T): Unit - def addObject(): Unit - def getNumIdle: Int - def getNumActive: Int - def clear(): Unit - def setFactory(factory: PoolItemFactory[T]): Unit -} - -trait PoolFactory[T] { - def createPool: Pool[T] -} - -trait PoolItemFactory[T] { - def makeObject: T - def destroyObject(t: T): Unit - def validateObject(t: T): Boolean - def activateObject(t: T): Unit - def passivateObject(t: T): Unit -} - -trait PoolBridge[T, OP <: ObjectPool] extends Pool[T] { - val impl: OP - override def borrowObject: T = impl.borrowObject.asInstanceOf[T] - override def returnObject(t: T) = impl.returnObject(t) - override def invalidateObject(t: T) = impl.invalidateObject(t) - override def addObject = impl.addObject - override def getNumIdle: Int = impl.getNumIdle - override def getNumActive: Int = impl.getNumActive - override def clear(): Unit = impl.clear() - override def close(): Unit = impl.close() - override def setFactory(factory: PoolItemFactory[T]) = impl.setFactory(toPoolableObjectFactory(factory)) - - def toPoolableObjectFactory[T](pif: PoolItemFactory[T]) = new PoolableObjectFactory { - def makeObject: Object = pif.makeObject.asInstanceOf[Object] - def destroyObject(o: Object): Unit = pif.destroyObject(o.asInstanceOf[T]) - def validateObject(o: Object): Boolean = pif.validateObject(o.asInstanceOf[T]) - def activateObject(o: Object): Unit = pif.activateObject(o.asInstanceOf[T]) - def passivateObject(o: Object): Unit = pif.passivateObject(o.asInstanceOf[T]) - } -} - -object StackPool { - def apply[T](factory: PoolItemFactory[T]) = new PoolBridge[T,StackObjectPool] { - val impl = new StackObjectPool(toPoolableObjectFactory(factory)) - } - - def apply[T](factory: PoolItemFactory[T], maxIdle: Int) = new PoolBridge[T,StackObjectPool] { - val impl = new StackObjectPool(toPoolableObjectFactory(factory),maxIdle) - } - - def apply[T](factory: PoolItemFactory[T], maxIdle: Int, initIdleCapacity: Int) = new PoolBridge[T,StackObjectPool] { - val impl = new StackObjectPool(toPoolableObjectFactory(factory),maxIdle,initIdleCapacity) - } -} - -object SoftRefPool { - def apply[T](factory: PoolItemFactory[T]) = new PoolBridge[T,SoftReferenceObjectPool] { - val impl = new SoftReferenceObjectPool(toPoolableObjectFactory(factory)) - } -} - -trait TransportFactory[T <: TTransport] extends PoolItemFactory[T] { - def createTransport: T - def makeObject: T = createTransport - def destroyObject(transport: T): Unit = transport.close - def validateObject(transport: T) = transport.isOpen - def activateObject(transport: T): Unit = if( !transport.isOpen ) transport.open else () - def passivateObject(transport: T): Unit = transport.flush -} - -case class SocketProvider(val host: String, val port: Int) extends TransportFactory[TSocket] { - def createTransport = { - val t = new TSocket(host, port) - t.open - t - } -} diff --git a/akka-persistence/akka-persistence-common/src/main/scala/akka/Storage.scala b/akka-persistence/akka-persistence-common/src/main/scala/akka/Storage.scala deleted file mode 100644 index de5106d610..0000000000 --- a/akka-persistence/akka-persistence-common/src/main/scala/akka/Storage.scala +++ /dev/null @@ -1,876 +0,0 @@ -/** - * Copyright (C) 2009-2010 Scalable Solutions AB - */ - -package akka.persistence.common - -import akka.stm._ -import akka.stm.TransactionManagement.transaction -import akka.util.Logging -import akka.japi.{Option => JOption} -import collection.mutable.ArraySeq - -// FIXME move to 'stm' package + add message with more info -class NoTransactionInScopeException extends RuntimeException - -class StorageException(message: String) extends RuntimeException(message) - -/** - * Example Scala usage. - *

- * New map with generated id. - *

- * val myMap = CassandraStorage.newMap
- * 
- * - * New map with user-defined id. - *
- * val myMap = MongoStorage.newMap(id)
- * 
- * - * Get map by user-defined id. - *
- * val myMap = CassandraStorage.getMap(id)
- * 
- * - * Example Java usage: - *
- * PersistentMap myMap = MongoStorage.newMap();
- * 
- * Or: - *
- * MongoPersistentMap myMap = MongoStorage.getMap(id);
- * 
- * - * @author Jonas Bonér - * @author Debasish Ghosh - */ -trait Storage { - type ElementType - - def newMap: PersistentMap[ElementType, ElementType] - - def newVector: PersistentVector[ElementType] - - def newRef: PersistentRef[ElementType] - - def newQueue: PersistentQueue[ElementType] = // only implemented for redis - throw new UnsupportedOperationException - - def newSortedSet: PersistentSortedSet[ElementType] = // only implemented for redis - throw new UnsupportedOperationException - - def getMap(id: String): PersistentMap[ElementType, ElementType] - - def getVector(id: String): PersistentVector[ElementType] - - def getRef(id: String): PersistentRef[ElementType] - - def getQueue(id: String): PersistentQueue[ElementType] = // only implemented for redis - throw new UnsupportedOperationException - - def getSortedSet(id: String): PersistentSortedSet[ElementType] = // only implemented for redis - throw new UnsupportedOperationException - - def newMap(id: String): PersistentMap[ElementType, ElementType] - - def newVector(id: String): PersistentVector[ElementType] - - def newRef(id: String): PersistentRef[ElementType] - - def newQueue(id: String): PersistentQueue[ElementType] = // only implemented for redis - throw new UnsupportedOperationException - - def newSortedSet(id: String): PersistentSortedSet[ElementType] = // only implemented for redis - throw new UnsupportedOperationException -} - -private[akka] object PersistentMap { - // operations on the Map - sealed trait Op - case object PUT extends Op - case object REM extends Op - case object UPD extends Op - case object CLR extends Op -} - -/** - * Implementation of PersistentMap for every concrete - * storage will have the same workflow. This abstracts the workflow. - * - * Subclasses just need to provide the actual concrete instance for the - * abstract val storage. - * - * @author Jonas Bonér - */ -trait PersistentMap[K, V] extends scala.collection.mutable.Map[K, V] - with Transactional with Committable with Abortable with Logging { - - import scalaj.collection.Imports._ - def asJavaMap() : java.util.Map[K, V] = this.asJava - - //Import Ops - import PersistentMap._ - - // append only log: records all mutating operations - protected val appendOnlyTxLog = TransactionalVector[LogEntry]() - - case class LogEntry(key: Option[K], value: Option[V], op: Op) - - // need to override in subclasses e.g. "sameElements" for Array[Byte] - def equal(k1: K, k2: K): Boolean = k1 == k2 - - // Seqable type that's required for maintaining the log of distinct keys affected in current transaction - type T <: Equals - - // converts key K to the Seqable type Equals - def toEquals(k: K): T - - // keys affected in the current transaction - protected val keysInCurrentTx = TransactionalMap[T, K]() - - protected def addToListOfKeysInTx(key: K): Unit = - keysInCurrentTx += (toEquals(key), key) - - protected def clearDistinctKeys = keysInCurrentTx.clear - - protected def filterTxLogByKey(key: K): IndexedSeq[LogEntry] = - appendOnlyTxLog filter (e => e.key.map(equal(_, key)).getOrElse(true)) - - // need to get current value considering the underlying storage as well as the transaction log - protected def getCurrentValue(key: K): Option[V] = { - - // get all mutating entries for this key for this tx - val txEntries = filterTxLogByKey(key) - - // get the snapshot from the underlying store for this key - val underlying = try { - storage.getMapStorageEntryFor(uuid, key) - } catch {case e: Exception => None} - - if (txEntries.isEmpty) underlying - else txEntries.last match { - case LogEntry(_, _, CLR) => None - case _ => replay(txEntries, key, underlying) - } - } - - // replay all tx entries for key k with seed = initial - private def replay(txEntries: IndexedSeq[LogEntry], key: K, initial: Option[V]): Option[V] = { - import scala.collection.mutable._ - - val m = initial match { - case None => Map.empty[K, V] - case Some(v) => Map((key, v)) - } - txEntries.foreach { - case LogEntry(k, v, o) => o match { - case PUT => m.put(k.get, v.get) - case REM => m -= k.get - case UPD => m.update(k.get, v.get) - case CLR => Map.empty[K, V] - } - } - m get key - } - - // to be concretized in subclasses - val storage: MapStorageBackend[K, V] - - def commit = { - appendOnlyTxLog.foreach { - case LogEntry(k, v, o) => o match { - case PUT => storage.insertMapStorageEntryFor(uuid, k.get, v.get) - case UPD => storage.insertMapStorageEntryFor(uuid, k.get, v.get) - case REM => storage.removeMapStorageFor(uuid, k.get) - case CLR => storage.removeMapStorageFor(uuid) - } - } - - appendOnlyTxLog.clear - clearDistinctKeys - } - - def abort = { - appendOnlyTxLog.clear - clearDistinctKeys - } - - def -=(key: K) = { - remove(key) - this - } - - override def +=(kv: (K, V)) = { - put(kv._1, kv._2) - this - } - - def +=(key: K, value: V) = { - put(key, value) - this - } - - override def put(key: K, value: V): Option[V] = { - register - val curr = getCurrentValue(key) - appendOnlyTxLog add LogEntry(Some(key), Some(value), PUT) - addToListOfKeysInTx(key) - curr - } - - override def update(key: K, value: V) { - register - val curr = getCurrentValue(key) - appendOnlyTxLog add LogEntry(Some(key), Some(value), UPD) - addToListOfKeysInTx(key) - curr - } - - override def remove(key: K) : Option[V] = { - register - val curr = getCurrentValue(key) - appendOnlyTxLog add LogEntry(Some(key), None, REM) - addToListOfKeysInTx(key) - curr - } - - def slice(start: Option[K], count: Int): List[(K, V)] = - slice(start, None, count) - - def slice(start: Option[K], finish: Option[K], count: Int): List[(K, V)] - - override def clear = { - register - appendOnlyTxLog add LogEntry(None, None, CLR) - clearDistinctKeys - } - - override def contains(key: K): Boolean = try { - filterTxLogByKey(key) match { - case Seq() => // current tx doesn't use this - storage.getMapStorageEntryFor(uuid, key).isDefined // check storage - case txs => // present in log - val lastOp = txs.last.op - lastOp != REM && lastOp != CLR // last entry cannot be a REM - } - } catch {case e: Exception => false} - - protected def existsInStorage(key: K): Option[V] = try { - storage.getMapStorageEntryFor(uuid, key) - } catch { - case e: Exception => None - } - - override def size: Int = try { - // partition key set affected in current tx into those which r added & which r deleted - val (keysAdded, keysRemoved) = keysInCurrentTx.map { - case (kseq, k) => ((kseq, k), getCurrentValue(k)) - }.partition(_._2.isDefined) - - // keys which existed in storage but removed in current tx - val inStorageRemovedInTx = - keysRemoved.keySet - .map(_._2) - .filter(k => existsInStorage(k).isDefined) - .size - - // all keys in storage - val keysInStorage = - storage.getMapStorageFor(uuid) - .map {case (k, v) => toEquals(k)} - .toSet - - // (keys that existed UNION keys added ) - (keys removed) - (keysInStorage union keysAdded.keySet.map(_._1)).size - inStorageRemovedInTx - } catch { - case e: Exception => 0 - } - - // get must consider underlying storage & current uncommitted tx log - override def get(key: K): Option[V] = getCurrentValue(key) - - def iterator: Iterator[Tuple2[K, V]] - - protected def register = { - if (transaction.get.isEmpty) throw new NoTransactionInScopeException - transaction.get.get.register("Map:" + uuid, this) - } -} - -object PersistentMapBinary { - object COrdering { - //frontend - implicit object ArraySeqOrdering extends Ordering[ArraySeq[Byte]] { - def compare(o1: ArraySeq[Byte], o2: ArraySeq[Byte]) = - ArrayOrdering.compare(o1.toArray, o2.toArray) - } - //backend - - implicit object ArrayOrdering extends Ordering[Array[Byte]] { - import java.lang.{Math=>M} - def compare(o1: Array[Byte], o2: Array[Byte]): Int = { - if (o1.size == o2.size) { - for (i <- 0 until o1.size) { - var a = o1(i) - var b = o2(i) - if (a != b) { - return (a - b) / (M.abs(a - b)) - } - } - 0 - } else { - (o1.length - o2.length) / (M.max(1, M.abs(o1.length - o2.length))) - } - } - - } - - } -} - -trait PersistentMapBinary extends PersistentMap[Array[Byte], Array[Byte]] { - import scala.collection.mutable.ArraySeq - - type T = ArraySeq[Byte] - - def toEquals(k: Array[Byte]) = ArraySeq(k: _*) - - override def equal(k1: Array[Byte], k2: Array[Byte]): Boolean = k1 sameElements k2 - - - - import scala.collection.immutable.{TreeMap, SortedMap} - private def replayAllKeys: SortedMap[ArraySeq[Byte], Array[Byte]] = { - import PersistentMapBinary.COrdering._ - - // need ArraySeq for ordering - val fromStorage = - TreeMap(storage.getMapStorageFor(uuid).map {case (k, v) => (ArraySeq(k: _*), v)}: _*) - - val (keysAdded, keysRemoved) = keysInCurrentTx.map { - case (_, k) => (k, getCurrentValue(k)) - }.partition(_._2.isDefined) - - val inStorageRemovedInTx = - keysRemoved.keySet - .filter(k => existsInStorage(k).isDefined) - .map(k => ArraySeq(k: _*)) - - (fromStorage -- inStorageRemovedInTx) ++ keysAdded.map {case (k, v) => (ArraySeq(k: _*), v.get)} - } - - override def slice(start: Option[Array[Byte]], finish: Option[Array[Byte]], count: Int): List[(Array[Byte], Array[Byte])] = try { - val newMap = replayAllKeys - - if (newMap isEmpty) List[(Array[Byte], Array[Byte])]() - - val startKey = - start match { - case Some(bytes) => Some(ArraySeq(bytes: _*)) - case None => None - } - - val endKey = - finish match { - case Some(bytes) => Some(ArraySeq(bytes: _*)) - case None => None - } - - ((startKey, endKey, count): @unchecked) match { - case ((Some(s), Some(e), _)) => - newMap.range(s, e) - .toList - .map(e => (e._1.toArray, e._2)) - .toList - case ((Some(s), None, c)) if c > 0 => - newMap.from(s) - .iterator - .take(count) - .map(e => (e._1.toArray, e._2)) - .toList - case ((Some(s), None, _)) => - newMap.from(s) - .toList - .map(e => (e._1.toArray, e._2)) - .toList - case ((None, Some(e), _)) => - newMap.until(e) - .toList - .map(e => (e._1.toArray, e._2)) - .toList - } - } catch {case e: Exception => Nil} - - override def iterator: Iterator[(Array[Byte], Array[Byte])] = { - new Iterator[(Array[Byte], Array[Byte])] { - private var elements = replayAllKeys - - override def next: (Array[Byte], Array[Byte]) = synchronized { - val (k, v) = elements.head - elements = elements.tail - (k.toArray, v) - } - - override def hasNext: Boolean = synchronized {!elements.isEmpty} - } - } - - /** - * Java API. - */ - def javaIterator: java.util.Iterator[java.util.Map.Entry[Array[Byte],Array[Byte]]] = { - new java.util.Iterator[java.util.Map.Entry[Array[Byte],Array[Byte]]] { - private var elements = replayAllKeys - override def next: java.util.Map.Entry[Array[Byte], Array[Byte]] = synchronized { - val (k, v) = elements.head - elements = elements.tail - val entry = new java.util.Map.Entry[Array[Byte], Array[Byte]] { - override def getKey = k.toArray - override def getValue = v - override def setValue(v: Array[Byte]) = throw new UnsupportedOperationException("Use put or update methods to change a map entry.") - } - entry - } - override def hasNext: Boolean = synchronized { !elements.isEmpty } - override def remove: Unit = throw new UnsupportedOperationException("Use remove method to remove a map entry.") - } - } -} - -private[akka] object PersistentVector { - // operations on the Vector - sealed trait Op - case object ADD extends Op - case object UPD extends Op - case object POP extends Op -} - -/** - * Implements a template for a concrete persistent transactional vector based storage. - * - * @author Jonas Bonér - */ -trait PersistentVector[T] extends IndexedSeq[T] with Transactional with Committable with Abortable { - //Import Ops - import PersistentVector._ - - import scalaj.collection.Imports._ - def asJavaList() : java.util.List[T] = this.asJava - - // append only log: records all mutating operations - protected val appendOnlyTxLog = TransactionalVector[LogEntry]() - - case class LogEntry(index: Option[Int], value: Option[T], op: Op) - - // need to override in subclasses e.g. "sameElements" for Array[Byte] - // def equal(v1: T, v2: T): Boolean = v1 == v2 - - val storage: VectorStorageBackend[T] - - def commit = { - for (entry <- appendOnlyTxLog) { - (entry: @unchecked) match { - case LogEntry(_, Some(v), ADD) => storage.insertVectorStorageEntryFor(uuid, v) - case LogEntry(Some(i), Some(v), UPD) => storage.updateVectorStorageEntryFor(uuid, i, v) - case LogEntry(_, _, POP) => storage.removeVectorStorageEntryFor(uuid) - } - } - appendOnlyTxLog.clear - } - - def abort = { - appendOnlyTxLog.clear - } - - private def replay: List[T] = { - import scala.collection.mutable.ArrayBuffer - var elemsStorage = ArrayBuffer(storage.getVectorStorageRangeFor(uuid, None, None, storage.getVectorStorageSizeFor(uuid)).reverse: _*) - - for (entry <- appendOnlyTxLog) { - (entry: @unchecked) match { - case LogEntry(_, Some(v), ADD) => elemsStorage += v - case LogEntry(Some(i), Some(v), UPD) => elemsStorage.update(i, v) - case LogEntry(_, _, POP) => elemsStorage = elemsStorage.drop(1) - } - } - elemsStorage.toList.reverse - } - - def +(elem: T) = add(elem) - - def add(elem: T) = { - register - appendOnlyTxLog + LogEntry(None, Some(elem), ADD) - } - - def apply(index: Int): T = get(index) - - def get(index: Int): T = { - if (appendOnlyTxLog.isEmpty) { - storage.getVectorStorageEntryFor(uuid, index) - } else { - val curr = replay - curr(index) - } - } - - override def slice(start: Int, finish: Int): IndexedSeq[T] = slice(Some(start), Some(finish)) - - def slice(start: Option[Int], finish: Option[Int], count: Int = 0): IndexedSeq[T] = { - val curr = replay - val s = if (start.isDefined) start.get else 0 - val cnt = - if (finish.isDefined) { - val f = finish.get - if (f >= s) (f - s) else count - } - else count - if (s == 0 && cnt == 0) List().toIndexedSeq - else curr.slice(s, s + cnt).toIndexedSeq - } - - /** - * Removes the tail element of this vector. - */ - def pop: T = { - register - val curr = replay - appendOnlyTxLog + LogEntry(None, None, POP) - curr.last - } - - def update(index: Int, newElem: T) = { - register - appendOnlyTxLog + LogEntry(Some(index), Some(newElem), UPD) - } - - override def first: T = get(0) - - override def last: T = replay.last - - def length: Int = replay.length - - protected def register = { - if (transaction.get.isEmpty) throw new NoTransactionInScopeException - transaction.get.get.register("Vector" + uuid, this) - } -} - -/** - * Implements a persistent reference with abstract storage. - * - * @author Jonas Bonér - */ -trait PersistentRef[T] extends Transactional with Committable with Abortable { - protected val ref = Ref[T]() - - val storage: RefStorageBackend[T] - - def commit = if (ref.isDefined) { - storage.insertRefStorageFor(uuid, ref.get) - ref.swap(null.asInstanceOf[T]) - } - - def abort = ref.swap(null.asInstanceOf[T]) - - def swap(elem: T) = { - register - ref.swap(elem) - } - - def get: Option[T] = if (ref.isDefined) ref.opt else storage.getRefStorageFor(uuid) - - def isDefined: Boolean = ref.isDefined || storage.getRefStorageFor(uuid).isDefined - - def getOrElse(default: => T): T = { - val current = get - if (current.isDefined) current.get - else default - } - - protected def register = { - if (transaction.get.isEmpty) throw new NoTransactionInScopeException - transaction.get.get.register("Ref" + uuid, this) - } -} - -private[akka] object PersistentQueue { - //Operations for PersistentQueue - sealed trait QueueOp - case object ENQ extends QueueOp - case object DEQ extends QueueOp -} - -/** - * Implementation of PersistentQueue for every concrete - * storage will have the same workflow. This abstracts the workflow. - *

- * Enqueue is simpler, we just have to record the operation in a local - * transactional store for playback during commit. This store - * enqueueNDequeuedEntries stores the entire history of enqueue - * and dequeue that will be played at commit on the underlying store. - *

- * The main challenge with dequeue is that we need to return the element - * that has been dequeued. Hence in addition to the above store, we need to - * have another local queue that actually does the enqueue dequeue operations - * that take place only during this transaction. This gives us the - * element that will be dequeued next from the set of elements enqueued - * during this transaction. - *

- * The third item that we need is an index to the underlying storage element - * that may also have to be dequeued as part of the current transaction. This - * is modeled using a ref to an Int that points to elements in the underlyinng store. - *

- * Subclasses just need to provide the actual concrete instance for the - * abstract val storage. - * - * @author Debasish Ghosh - */ -trait PersistentQueue[A] extends scala.collection.mutable.Queue[A] - with Transactional with Committable with Abortable with Logging { - - //Import Ops - import PersistentQueue._ - - case class LogEntry(value: Option[A], op: QueueOp) - - // current trail that will be played on commit to the underlying store - protected val appendOnlyTxLog = TransactionalVector[LogEntry]() - - // to be concretized in subclasses - val storage: QueueStorageBackend[A] - - def commit = synchronized { - for (entry <- appendOnlyTxLog) { - (entry: @unchecked) match { - case LogEntry(Some(v), ENQ) => storage.enqueue(uuid, v) - case LogEntry(_, DEQ) => storage.dequeue(uuid) - } - } - appendOnlyTxLog.clear - } - - def abort = synchronized { - appendOnlyTxLog.clear - } - - override def toList = replay - - override def enqueue(elems: A*) = synchronized { - register - elems.foreach(e => appendOnlyTxLog.add(LogEntry(Some(e), ENQ))) - } - - private def replay: List[A] = synchronized { - import scala.collection.mutable.ListBuffer - var elemsStorage = ListBuffer(storage.peek(uuid, 0, storage.size(uuid)): _*) - - for (entry <- appendOnlyTxLog) { - (entry: @unchecked) match { - case LogEntry(Some(v), ENQ) => elemsStorage += v - case LogEntry(_, DEQ) => elemsStorage = elemsStorage.drop(1) - } - } - elemsStorage.toList - } - - override def dequeue: A = synchronized { - register - val l = replay - if (l.isEmpty) throw new NoSuchElementException("trying to dequeue from empty queue") - appendOnlyTxLog.add(LogEntry(None, DEQ)) - l.head - } - - override def clear = synchronized { - register - appendOnlyTxLog.clear - } - - override def size: Int = try { - replay.size - } catch {case e: Exception => 0} - - override def isEmpty: Boolean = size == 0 - - override def +=(elem: A) = { - enqueue(elem) - this - } - - def ++=(elems: Iterator[A]) = { - enqueue(elems.toList: _*) - this - } - - def ++=(elems: Iterable[A]): Unit = this ++= elems.iterator - - override def dequeueFirst(p: A => Boolean): Option[A] = - throw new UnsupportedOperationException("dequeueFirst not supported") - - override def dequeueAll(p: A => Boolean): scala.collection.mutable.Seq[A] = - throw new UnsupportedOperationException("dequeueAll not supported") - - protected def register = { - if (transaction.get.isEmpty) throw new NoTransactionInScopeException - transaction.get.get.register("Queue:" + uuid, this) - } -} - -private[akka] object PersistentSortedSet { - // operations on the SortedSet - sealed trait Op - case object ADD extends Op - case object REM extends Op -} - -/** - * Implements a template for a concrete persistent transactional sorted set based storage. - *

- * Sorting is done based on a zscore. But the computation of zscore has been kept - * outside the abstraction. - *

- * zscore can be implemented in a variety of ways by the calling class: - *

- * trait ZScorable        {
- *   def toZScore: Float
- * }
- *
- * class Foo extends ZScorable        {
- *   //.. implemnetation
- * }
- * 
- * Or we can also use views: - *
- * class Foo        {
- *   //..
- * }
- *
- * implicit def Foo2Scorable(foo: Foo): ZScorable = new ZScorable        {
- *   def toZScore =        {
- *     //..
- * }
- * }
- * 
- * - * and use foo.toZScore to compute the zscore and pass to the APIs. - * - * @author - */ -trait PersistentSortedSet[A] extends Transactional with Committable with Abortable { - //Import Ops - import PersistentSortedSet._ - - // append only log: records all mutating operations - protected val appendOnlyTxLog = TransactionalVector[LogEntry]() - - // need to override in subclasses e.g. "sameElements" for Array[Byte] - def equal(v1: A, v2: A): Boolean = v1 == v2 - - case class LogEntry(value: A, score: Option[Float], op: Op) - - val storage: SortedSetStorageBackend[A] - - def commit = { - for (entry <- appendOnlyTxLog) { - (entry: @unchecked) match { - case LogEntry(e, Some(s), ADD) => storage.zadd(uuid, String.valueOf(s), e) - case LogEntry(e, _, REM) => storage.zrem(uuid, e) - } - } - appendOnlyTxLog.clear - } - - def abort = { - appendOnlyTxLog.clear - } - - def +(elem: A, score: Float) = add(elem, score) - - def add(elem: A, score: Float) = { - register - appendOnlyTxLog.add(LogEntry(elem, Some(score), ADD)) - } - - def -(elem: A) = remove(elem) - - def remove(elem: A) = { - register - appendOnlyTxLog.add(LogEntry(elem, None, REM)) - } - - protected def replay: List[(A, Float)] = { - val es = collection.mutable.Map() ++ storage.zrangeWithScore(uuid, 0, -1) - - for (entry <- appendOnlyTxLog) { - (entry: @unchecked) match { - case LogEntry(v, Some(s), ADD) => es += ((v, s)) - case LogEntry(v, _, REM) => es -= v - } - } - es.toList - } - - def contains(elem: A): Boolean = replay.map(_._1).contains(elem) - - def size: Int = replay size - - def zscore(elem: A): Float = replay.filter { case (e, s) => equal(e, elem) }.map(_._2).head - - def zrange(start: Int, end: Int): List[(A, Float)] = { - import PersistentSortedSet._ - - // easier would have been to use a TreeSet - // problem is the treeset has to be ordered on the score - // but we cannot kick out elements with duplicate score - // But we need to treat the value (A) as set, i.e. replace duplicates with - // the latest one, as par with the behavior of redis zrange - val es = replay - - // a multimap with key as A and value as Set of scores - val m = new collection.mutable.HashMap[A, collection.mutable.Set[Float]] - with collection.mutable.MultiMap[A, Float] - for(e <- es) m.addBinding(e._1, e._2) - - // another list for unique values - val as = es.map(_._1).distinct - - // iterate the list of unique values and for each pick the head element - // from the score map - val ts = as.map(a => (a, m(a).head)).sortWith((a, b) => a._2 < b._2) - val l = ts.size - - // -1 means the last element, -2 means the second last - val s = if (start < 0) start + l else start - val e = - if (end < 0) end + l - else if (end >= l) (l - 1) - else end - // slice is open at the end, we need a closed end range - ts.iterator.slice(s, e + 1).toList - } - - protected def register = { - if (transaction.get.isEmpty) throw new NoTransactionInScopeException - transaction.get.get.register("SortedSet:" + uuid, this) - } -} - -trait PersistentSortedSetBinary extends PersistentSortedSet[Array[Byte]] { - import PersistentSortedSet._ - - override def equal(k1: Array[Byte], k2: Array[Byte]): Boolean = k1 sameElements k2 - - override protected def replay: List[(Array[Byte], Float)] = { - val es = collection.mutable.Map() ++ storage.zrangeWithScore(uuid, 0, -1).map { case (k, v) => (ArraySeq(k: _*), v) } - - for (entry <- appendOnlyTxLog) { - (entry: @unchecked) match { - case LogEntry(v, Some(s), ADD) => es += ((ArraySeq(v: _*), s)) - case LogEntry(v, _, REM) => es -= ArraySeq(v: _*) - } - } - es.toList.map { case (k, v) => (k.toArray, v) } - } -} diff --git a/akka-persistence/akka-persistence-common/src/main/scala/akka/StorageBackend.scala b/akka-persistence/akka-persistence-common/src/main/scala/akka/StorageBackend.scala deleted file mode 100644 index 44833f3535..0000000000 --- a/akka-persistence/akka-persistence-common/src/main/scala/akka/StorageBackend.scala +++ /dev/null @@ -1,79 +0,0 @@ -/** - * Copyright (C) 2009-2010 Scalable Solutions AB - */ - -package akka.persistence.common - -// abstracts persistence storage -trait StorageBackend - -// for Maps -trait MapStorageBackend[K, V] extends StorageBackend { - def insertMapStorageEntriesFor(name: String, entries: List[Tuple2[K, V]]) - def insertMapStorageEntryFor(name: String, key: K, value: V) - def removeMapStorageFor(name: String) - def removeMapStorageFor(name: String, key: K) - def getMapStorageEntryFor(name: String, key: K): Option[V] - def getMapStorageSizeFor(name: String): Int - def getMapStorageFor(name: String): List[Tuple2[K, V]] - def getMapStorageRangeFor(name: String, start: Option[K], finish: Option[K], count: Int): List[Tuple2[K, V]] -} - -// for Vectors -trait VectorStorageBackend[T] extends StorageBackend { - def insertVectorStorageEntryFor(name: String, element: T) - def insertVectorStorageEntriesFor(name: String, elements: List[T]) - def updateVectorStorageEntryFor(name: String, index: Int, elem: T) - def getVectorStorageEntryFor(name: String, index: Int): T - def getVectorStorageRangeFor(name: String, start: Option[Int], finish: Option[Int], count: Int): List[T] - def getVectorStorageSizeFor(name: String): Int - def removeVectorStorageEntryFor(name:String):Unit = { - //should remove the "tail" if supported - throw new UnsupportedOperationException("VectorStorageBackend.removeVectorStorageEntry is not supported") - } -} - -// for Ref -trait RefStorageBackend[T] extends StorageBackend { - def insertRefStorageFor(name: String, element: T) - def getRefStorageFor(name: String): Option[T] -} - -// for Queue -trait QueueStorageBackend[T] extends StorageBackend { - // add to the end of the queue - def enqueue(name: String, item: T): Option[Int] - - // pop from the front of the queue - def dequeue(name: String): Option[T] - - // get the size of the queue - def size(name: String): Int - - // return an array of items currently stored in the queue - // start is the item to begin, count is how many items to return - def peek(name: String, start: Int, count: Int): List[T] - - // completely delete the queue - def remove(name: String): Boolean -} - -trait SortedSetStorageBackend[T] extends StorageBackend { - // add item to sorted set identified by name - def zadd(name: String, zscore: String, item: T): Boolean - - // remove item from sorted set identified by name - def zrem(name: String, item: T): Boolean - - // cardinality of the set identified by name - def zcard(name: String): Int - - // zscore of the item from sorted set identified by name - def zscore(name: String, item: T): Option[Float] - - // zrange from the sorted set identified by name - def zrange(name: String, start: Int, end: Int): List[T] - - // zrange with score from the sorted set identified by name - def zrangeWithScore(name: String, start: Int, end: Int): List[(T, Float)] -} diff --git a/akka-persistence/akka-persistence-common/src/test/scala/MapStorageBackendTest.scala b/akka-persistence/akka-persistence-common/src/test/scala/MapStorageBackendTest.scala deleted file mode 100644 index 4900ea7695..0000000000 --- a/akka-persistence/akka-persistence-common/src/test/scala/MapStorageBackendTest.scala +++ /dev/null @@ -1,177 +0,0 @@ -/** - * Copyright (C) 2009-2010 Scalable Solutions AB - */ - -package akka.persistence.common - -import org.scalatest.matchers.ShouldMatchers -import akka.util.Logging -import org.scalatest.{BeforeAndAfterEach, Spec} -import scala.util.Random -import collection.immutable.{TreeMap, HashMap, HashSet} -import akka.persistence.common.PersistentMapBinary.COrdering._ - - -/** - * Implementation Compatibility test for PersistentMap backend implementations. - */ - -trait MapStorageBackendTest extends Spec with ShouldMatchers with BeforeAndAfterEach with Logging { - def storage: MapStorageBackend[Array[Byte], Array[Byte]] - - def dropMaps: Unit - - override def beforeEach = { - log.info("beforeEach: dropping maps") - dropMaps - } - - override def afterEach = { - log.info("afterEach: dropping maps") - dropMaps - } - - - describe("A Properly functioning MapStorageBackend") { - it("should remove map storage properly") { - val mapName = "removeTest" - val mkey = "removeTestKey".getBytes - val value = "removeTestValue".getBytes - - storage.insertMapStorageEntryFor(mapName, mkey, value) - storage.getMapStorageEntryFor(mapName, mkey).isDefined should be(true) - storage.removeMapStorageFor(mapName, mkey) - storage.getMapStorageEntryFor(mapName, mkey) should be(None) - - storage.insertMapStorageEntryFor(mapName, mkey, value) - storage.getMapStorageEntryFor(mapName, mkey).isDefined should be(true) - storage.removeMapStorageFor(mapName) - storage.getMapStorageEntryFor(mapName, mkey) should be(None) - } - - it("should insert a single map storage element properly") { - val mapName = "insertSingleTest" - val mkey = "insertSingleTestKey".getBytes - val value = "insertSingleTestValue".getBytes - - storage.insertMapStorageEntryFor(mapName, mkey, value) - storage.getMapStorageEntryFor(mapName, mkey).get should be(value) - storage.removeMapStorageFor(mapName, mkey) - storage.getMapStorageEntryFor(mapName, mkey) should be(None) - - storage.insertMapStorageEntryFor(mapName, mkey, value) - storage.getMapStorageEntryFor(mapName, mkey).get should be(value) - storage.removeMapStorageFor(mapName) - storage.getMapStorageEntryFor(mapName, mkey) should be(None) - } - - - it("should insert multiple map storage elements properly") { - val mapName = "insertMultipleTest" - val rand = new Random(3).nextInt(100) - val entries = (1 to rand).toList.map{ - index => - (("insertMultipleTestKey" + index).getBytes -> ("insertMutlipleTestValue" + index).getBytes) - } - - storage.insertMapStorageEntriesFor(mapName, entries) - entries foreach { - _ match { - case (mkey, value) => { - storage.getMapStorageEntryFor(mapName, mkey).isDefined should be(true) - storage.getMapStorageEntryFor(mapName, mkey).get should be(value) - } - } - } - storage.removeMapStorageFor(mapName) - entries foreach { - _ match { - case (mkey, value) => { - storage.getMapStorageEntryFor(mapName, mkey) should be(None) - } - } - } - } - - - it("should accurately track the number of key value pairs in a map") { - val mapName = "sizeTest" - val rand = new Random(3).nextInt(100) - val entries = (1 to rand).toList.map{ - index => - (("sizeTestKey" + index).getBytes -> ("sizeTestValue" + index).getBytes) - } - - storage.insertMapStorageEntriesFor(mapName, entries) - storage.getMapStorageSizeFor(mapName) should be(rand) - } - - - - it("should return all the key value pairs in the map in the correct order when getMapStorageFor(name) is called") { - val mapName = "allTest" - val rand = new Random(3).nextInt(100) - var entries = new TreeMap[Array[Byte], Array[Byte]]()(ArrayOrdering) - (1 to rand).foreach{ - index => - entries += (("allTestKey" + index).getBytes -> ("allTestValue" + index).getBytes) - } - - storage.insertMapStorageEntriesFor(mapName, entries.toList) - val retrieved = storage.getMapStorageFor(mapName) - retrieved.size should be(rand) - entries.size should be(rand) - - - - val entryMap = new HashMap[String, String] ++ entries.map{ - _ match { - case (k, v) => (new String(k), new String(v)) - } - } - val retrievedMap = new HashMap[String, String] ++ entries.map{ - _ match { - case (k, v) => (new String(k), new String(v)) - } - } - - entryMap should equal(retrievedMap) - - (0 until rand).foreach{ - i: Int => { - new String(entries.toList(i)._1) should be(new String(retrieved(i)._1)) - } - } - - } - - it("should return all the key->value pairs that exist in the map that are between start and end, up to count pairs when getMapStorageRangeFor is called") { - //implement if this method will be used - } - - - it("should return Some(null), not None, for a key that has had the value null set and None for a key with no value set") { - val mapName = "nullTest" - val key = "key".getBytes - storage.insertMapStorageEntryFor(mapName, key, null) - storage.getMapStorageEntryFor(mapName, key).get should be(null) - storage.removeMapStorageFor(mapName, key) - storage.getMapStorageEntryFor(mapName, key) should be(None) - } - - it("should not throw an exception when size is called on a non existent map?") { - storage.getMapStorageSizeFor("nonExistent") should be(0) - } - - it("should not stomp on the map keyset when a map key of 0xff is used") { - val mapName = "keySetStomp" - val key = CommonStorageBackend.mapKeysIndex - storage.insertMapStorageEntryFor(mapName, key, key) - storage.getMapStorageSizeFor(mapName) should be(1) - storage.getMapStorageEntryFor(mapName,key).get should be (key) - } - - - } - -} diff --git a/akka-persistence/akka-persistence-common/src/test/scala/QueueStorageBackendTest.scala b/akka-persistence/akka-persistence-common/src/test/scala/QueueStorageBackendTest.scala deleted file mode 100644 index 767f29bc7e..0000000000 --- a/akka-persistence/akka-persistence-common/src/test/scala/QueueStorageBackendTest.scala +++ /dev/null @@ -1,123 +0,0 @@ -/** - * Copyright (C) 2009-2010 Scalable Solutions AB - */ - -package akka.persistence.common - -import org.scalatest.matchers.ShouldMatchers -import akka.util.Logging -import org.scalatest.{BeforeAndAfterEach, Spec} -import scala.util.Random - -/** - * Implementation Compatibility test for PersistentQueue backend implementations. - */ - -trait QueueStorageBackendTest extends Spec with ShouldMatchers with BeforeAndAfterEach with Logging { - def storage: QueueStorageBackend[Array[Byte]] - - def dropQueues: Unit - - override def beforeEach = { - log.info("beforeEach: dropping queues") - dropQueues - } - - override def afterEach = { - log.info("afterEach: dropping queues") - dropQueues - } - - - - describe("A Properly functioning QueueStorage Backend") { - it("should enqueue properly when there is capacity in the queue") { - val queue = "enqueueTest" - val value = "enqueueTestValue".getBytes - storage.size(queue) should be(0) - storage.enqueue(queue, value).get should be(1) - storage.size(queue) should be(1) - } - - it("should return None when enqueue is called on a full queue?") { - - } - - it("should dequeue properly when the queue is not empty") { - val queue = "dequeueTest" - val value = "dequeueTestValue".getBytes - storage.size(queue) should be(0) - storage.enqueue(queue, value) - storage.size(queue) should be(1) - storage.dequeue(queue).get should be(value) - } - - it("should return None when dequeue is called on an empty queue") { - val queue = "dequeueTest2" - val value = "dequeueTestValue2".getBytes - storage.size(queue) should be(0) - storage.dequeue(queue) should be(None) - } - - it("should accurately reflect the size of the queue") { - val queue = "sizeTest" - val rand = new Random(3).nextInt(100) - val values = (1 to rand).toList.map {i: Int => ("sizeTestValue" + i).getBytes} - values.foreach {storage.enqueue(queue, _)} - storage.size(queue) should be(rand) - val drand = new Random(3).nextInt(rand) - (1 to drand).foreach { - i: Int => { - storage.dequeue(queue).isDefined should be(true) - storage.size(queue) should be(rand - i) - } - } - } - - it("should support peek properly") { - val queue = "sizeTest" - val rand = new Random(3).nextInt(100) - val values = (1 to rand).toList.map {i: Int => ("peekTestValue" + i)} - storage.remove(queue) - values.foreach {s: String => storage.enqueue(queue, s.getBytes)} - (1 to rand).foreach { - index => { - val peek = storage.peek(queue, 0, index).map {new String(_)} - peek.size should be(index) - values.dropRight(values.size - index).equals(peek) should be(true) - } - } - (0 until rand).foreach { - index => { - val peek = storage.peek(queue, index, rand - index).map {new String(_)} - peek.size should be(rand - index) - values.drop(index).equals(peek) should be(true) - } - } - - //Should we test counts greater than queue size? or greater than queue size - count??? - } - - it("should not throw an exception when remove is called on a non-existent queue") { - storage.remove("exceptionTest") - } - - it("should remove queue storage properly") { - val queue = "removeTest" - val rand = new Random(3).nextInt(100) - val values = (1 to rand).toList.map {i: Int => ("removeValue" + i).getBytes} - values.foreach {storage.enqueue(queue, _)} - storage.size(queue) should be(rand) - storage.remove(queue) - storage.size(queue) should be(0) - } - - it("should accept null as a value to enqueue and return Some(null) when that value is dequeued") { - val queue = "nullTest" - storage.enqueue(queue, null).get should be(1) - storage.dequeue(queue).get should be(null) - storage.dequeue(queue) should be(None) - } - } - -} diff --git a/akka-persistence/akka-persistence-common/src/test/scala/RefStorageBackendTest.scala b/akka-persistence/akka-persistence-common/src/test/scala/RefStorageBackendTest.scala deleted file mode 100644 index de9f38321d..0000000000 --- a/akka-persistence/akka-persistence-common/src/test/scala/RefStorageBackendTest.scala +++ /dev/null @@ -1,52 +0,0 @@ -/** - * Copyright (C) 2009-2010 Scalable Solutions AB - */ - -package akka.persistence.common - -import org.scalatest.matchers.ShouldMatchers -import akka.util.Logging -import org.scalatest.{BeforeAndAfterEach, Spec} - -/** - * Implementation Compatibility test for PersistentRef backend implementations. - */ - -trait RefStorageBackendTest extends Spec with ShouldMatchers with BeforeAndAfterEach with Logging { - def storage: RefStorageBackend[Array[Byte]] - - def dropRefs: Unit - - override def beforeEach = { - log.info("beforeEach: dropping refs") - dropRefs - } - - override def afterEach = { - log.info("afterEach: dropping refs") - dropRefs - } - - - describe("A Properly functioning RefStorageBackend") { - it("should successfully insert ref storage") { - val name = "RefStorageTest #1" - val value = name.getBytes - storage.insertRefStorageFor(name, value) - storage.getRefStorageFor(name).get should be(value) - } - - it("should return None when getRefStorage is called when no value has been inserted") { - val name = "RefStorageTest #2" - val value = name.getBytes - storage.getRefStorageFor(name) should be(None) - } - - it("Should return None, not Some(null) when getRefStorageFor is called when null has been set") { - val name = "RefStorageTest #3" - storage.insertRefStorageFor(name, null) - storage.getRefStorageFor(name) should be(None) - } - } - -} diff --git a/akka-persistence/akka-persistence-common/src/test/scala/SortedSetStorageBackendTest.scala b/akka-persistence/akka-persistence-common/src/test/scala/SortedSetStorageBackendTest.scala deleted file mode 100644 index 9c0eb7a50c..0000000000 --- a/akka-persistence/akka-persistence-common/src/test/scala/SortedSetStorageBackendTest.scala +++ /dev/null @@ -1,35 +0,0 @@ -/** - * Copyright (C) 2009-2010 Scalable Solutions AB - */ - -package akka.persistence.common - -import org.scalatest.matchers.ShouldMatchers -import akka.util.Logging -import org.scalatest.{BeforeAndAfterEach, Spec} - -/** - * Implementation Compatibility test for PersistentSortedSet backend implementations. - */ - -trait SortedSetStorageBackendTest extends Spec with ShouldMatchers with BeforeAndAfterEach with Logging { - def storage: SortedSetStorageBackend[Array[Byte]] - - def dropSortedSets: Unit - - override def beforeEach = { - log.info("beforeEach: dropping sorted sets") - dropSortedSets - } - - override def afterEach = { - log.info("afterEach: dropping sorted sets") - dropSortedSets - } - - - describe("A Properly functioning SortedSetStorageBackend Backend") { - - } - -} diff --git a/akka-persistence/akka-persistence-common/src/test/scala/Ticket343Test.scala b/akka-persistence/akka-persistence-common/src/test/scala/Ticket343Test.scala deleted file mode 100644 index c2d564b7a1..0000000000 --- a/akka-persistence/akka-persistence-common/src/test/scala/Ticket343Test.scala +++ /dev/null @@ -1,368 +0,0 @@ -/** - * Copyright (C) 2009-2010 Scalable Solutions AB - */ - -package akka.persistence.common - -import org.scalatest.Spec -import org.scalatest.matchers.ShouldMatchers -import org.scalatest.{BeforeAndAfterAll, BeforeAndAfterEach} -import org.scalatest.junit.JUnitRunner -import org.junit.runner.RunWith - -import akka.actor.{Actor, ActorRef} -import akka.config.Supervision.{OneForOneStrategy, Permanent} -import Actor._ -import akka.stm._ -import akka.util.Logging -import StorageObj._ - - -case class GET(k: String) -case class SET(k: String, v: String) -case class REM(k: String) -case class CONTAINS(k: String) -case object MAP_SIZE -case class MSET(kvs: List[(String, String)]) -case class REMOVE_AFTER_PUT(kvsToAdd: List[(String, String)], ksToRem: List[String]) -case class CLEAR_AFTER_PUT(kvsToAdd: List[(String, String)]) -case class PUT_WITH_SLICE(kvsToAdd: List[(String, String)], start: String, cnt: Int) -case class PUT_REM_WITH_SLICE(kvsToAdd: List[(String, String)], ksToRem: List[String], start: String, cnt: Int) - -case class VADD(v: String) -case class VUPD(i: Int, v: String) -case class VUPD_AND_ABORT(i: Int, v: String) -case class VGET(i: Int) -case object VSIZE -case object VPOP -case class VGET_AFTER_VADD(vsToAdd: List[String], isToFetch: List[Int]) -case class VADD_WITH_SLICE(vsToAdd: List[String], start: Int, cnt: Int) - - -object StorageObj { - var getMap: String => PersistentMap[Array[Byte], Array[Byte]] = _ - var getVector: String => PersistentVector[Array[Byte]] = _ - - class SampleMapStorage extends Actor { - self.lifeCycle = Permanent - val FOO_MAP = "akka.sample.map" - - private var fooMap = atomic {StorageObj.getMap(FOO_MAP)} - - def receive = { - case SET(k, v) => - atomic { - fooMap += (k.getBytes, v.getBytes) - } - self.reply((k, v)) - - case GET(k) => - val v = atomic { - fooMap.get(k.getBytes).map(new String(_)).getOrElse(k + " Not found") - } - self.reply(v) - - case REM(k) => - val v = atomic { - fooMap -= k.getBytes - } - self.reply(k) - - case CONTAINS(k) => - val v = atomic { - fooMap contains k.getBytes - } - self.reply(v) - - case MAP_SIZE => - val v = atomic { - fooMap.size - } - self.reply(v) - - case MSET(kvs) => atomic { - kvs.foreach {kv => fooMap += (kv._1.getBytes, kv._2.getBytes)} - } - self.reply(kvs.size) - - case REMOVE_AFTER_PUT(kvs2add, ks2rem) => atomic { - kvs2add.foreach { - kv => - fooMap += (kv._1.getBytes, kv._2.getBytes) - } - - ks2rem.foreach { - k => - fooMap -= k.getBytes - } - } - self.reply(fooMap.size) - - case CLEAR_AFTER_PUT(kvs2add) => atomic { - kvs2add.foreach { - kv => - fooMap += (kv._1.getBytes, kv._2.getBytes) - } - fooMap.clear - } - self.reply(true) - - case PUT_WITH_SLICE(kvs2add, from, cnt) => - val v = atomic { - kvs2add.foreach { - kv => - fooMap += (kv._1.getBytes, kv._2.getBytes) - } - fooMap.slice(Some(from.getBytes), cnt) - } - self.reply(v: List[(Array[Byte], Array[Byte])]) - - case PUT_REM_WITH_SLICE(kvs2add, ks2rem, from, cnt) => - val v = atomic { - kvs2add.foreach { - kv => - fooMap += (kv._1.getBytes, kv._2.getBytes) - } - ks2rem.foreach { - k => - fooMap -= k.getBytes - } - fooMap.slice(Some(from.getBytes), cnt) - } - self.reply(v: List[(Array[Byte], Array[Byte])]) - } - } - - class SampleVectorStorage extends Actor { - self.lifeCycle = Permanent - val FOO_VECTOR = "akka.sample.vector" - - private var fooVector = atomic {StorageObj.getVector(FOO_VECTOR)} - - def receive = { - case VADD(v) => - val size = - atomic { - fooVector + v.getBytes - fooVector length - } - self.reply(size) - - case VGET(index) => - val ind = - atomic { - fooVector get index - } - self.reply(ind) - - case VGET_AFTER_VADD(vs, is) => - val els = - atomic { - vs.foreach(fooVector + _.getBytes) - (is.foldRight(List[Array[Byte]]())(fooVector.get(_) :: _)).map(new String(_)) - } - self.reply(els) - - case VUPD_AND_ABORT(index, value) => - val l = - atomic { - fooVector.update(index, value.getBytes) - // force fail - fooVector get 100 - } - self.reply(index) - - case VADD_WITH_SLICE(vs, s, c) => - val l = - atomic { - vs.foreach(fooVector + _.getBytes) - fooVector.slice(Some(s), None, c) - } - self.reply(l.map(new String(_))) - - case VPOP => - val p = atomic{fooVector.pop} - self.reply(p) - } - } -} - - - -trait Ticket343Test extends -Spec with - ShouldMatchers with - BeforeAndAfterEach { - def getMap: String => PersistentMap[Array[Byte], Array[Byte]] - - def getVector: String => PersistentVector[Array[Byte]] - - def dropMapsAndVectors: Unit - - override def beforeEach { - StorageObj.getMap = getMap - StorageObj.getVector = getVector - dropMapsAndVectors - println("** dropMapsAndVectors") - } - - override def afterEach { - dropMapsAndVectors - println("** dropMapsAndVectors") - } - - describe("Ticket 343 Issue #1") { - it("remove after put should work within the same transaction") { - val proc = actorOf[SampleMapStorage] - proc.start - - (proc !! SET("debasish", "anshinsoft")).getOrElse("Set failed") should equal(("debasish", "anshinsoft")) - (proc !! GET("debasish")).getOrElse("Get failed") should equal("anshinsoft") - (proc !! MAP_SIZE).getOrElse("Size failed") should equal(1) - - (proc !! MSET(List(("dg", "1"), ("mc", "2"), ("nd", "3")))).getOrElse("Mset failed") should equal(3) - - (proc !! GET("dg")).getOrElse("Get failed") should equal("1") - (proc !! GET("mc")).getOrElse("Get failed") should equal("2") - (proc !! GET("nd")).getOrElse("Get failed") should equal("3") - - (proc !! MAP_SIZE).getOrElse("Size failed") should equal(4) - - val add = List(("a", "1"), ("b", "2"), ("c", "3")) - val rem = List("a", "debasish") - (proc !! REMOVE_AFTER_PUT(add, rem)).getOrElse("REMOVE_AFTER_PUT failed") should equal(5) - - (proc !! GET("debasish")).getOrElse("debasish not found") should equal("debasish Not found") - (proc !! GET("a")).getOrElse("a not found") should equal("a Not found") - - (proc !! GET("b")).getOrElse("b not found") should equal("2") - - (proc !! CONTAINS("b")).getOrElse("b not found") should equal(true) - (proc !! CONTAINS("debasish")).getOrElse("debasish not found") should equal(false) - (proc !! MAP_SIZE).getOrElse("Size failed") should equal(5) - proc.stop - } - } - - describe("Ticket 343 Issue #2") { - it("clear after put should work within the same transaction") { - val proc = actorOf[SampleMapStorage] - proc.start - - (proc !! SET("debasish", "anshinsoft")).getOrElse("Set failed") should equal(("debasish", "anshinsoft")) - (proc !! GET("debasish")).getOrElse("Get failed") should equal("anshinsoft") - (proc !! MAP_SIZE).getOrElse("Size failed") should equal(1) - - val add = List(("a", "1"), ("b", "2"), ("c", "3")) - (proc !! CLEAR_AFTER_PUT(add)).getOrElse("CLEAR_AFTER_PUT failed") should equal(true) - - (proc !! MAP_SIZE).getOrElse("Size failed") should equal(0) - proc.stop - } - } - - describe("Ticket 343 Issue #3") { - it("map size should change after the transaction") { - val proc = actorOf[SampleMapStorage] - proc.start - - (proc !! SET("debasish", "anshinsoft")).getOrElse("Set failed") should equal(("debasish", "anshinsoft")) - (proc !! GET("debasish")).getOrElse("Get failed") should equal("anshinsoft") - (proc !! MAP_SIZE).getOrElse("Size failed") should equal(1) - - (proc !! MSET(List(("dg", "1"), ("mc", "2"), ("nd", "3")))).getOrElse("Mset failed") should equal(3) - (proc !! MAP_SIZE).getOrElse("Size failed") should equal(4) - - (proc !! GET("dg")).getOrElse("Get failed") should equal("1") - (proc !! GET("mc")).getOrElse("Get failed") should equal("2") - (proc !! GET("nd")).getOrElse("Get failed") should equal("3") - proc.stop - } - } - - describe("slice test") { - it("should pass") { - val proc = actorOf[SampleMapStorage] - proc.start - - (proc !! SET("debasish", "anshinsoft")).getOrElse("Set failed") should equal(("debasish", "anshinsoft")) - (proc !! GET("debasish")).getOrElse("Get failed") should equal("anshinsoft") - // (proc !! MAP_SIZE).getOrElse("Size failed") should equal(1) - - (proc !! MSET(List(("dg", "1"), ("mc", "2"), ("nd", "3")))).getOrElse("Mset failed") should equal(3) - (proc !! MAP_SIZE).getOrElse("Size failed") should equal(4) - - (proc !! PUT_WITH_SLICE(List(("ec", "1"), ("tb", "2"), ("mc", "10")), "dg", 3)).get.asInstanceOf[List[(Array[Byte], Array[Byte])]].map {case (k, v) => (new String(k), new String(v))} should equal(List(("dg", "1"), ("ec", "1"), ("mc", "10"))) - - (proc !! PUT_REM_WITH_SLICE(List(("fc", "1"), ("gb", "2"), ("xy", "10")), List("tb", "fc"), "dg", 5)).get.asInstanceOf[List[(Array[Byte], Array[Byte])]].map {case (k, v) => (new String(k), new String(v))} should equal(List(("dg", "1"), ("ec", "1"), ("gb", "2"), ("mc", "10"), ("nd", "3"))) - proc.stop - } - } - - describe("Ticket 343 Issue #4") { - it("vector get should not ignore elements that were in vector before transaction") { - - val proc = actorOf[SampleVectorStorage] - proc.start - - // add 4 elements in separate transactions //also test add + pop of a 5th element - - (proc !! VADD("ticktock")).getOrElse("VADD failed") should equal(1) - (proc !! VADD("debasish")).getOrElse("VADD failed") should equal(2) - (proc !! VADD("maulindu")).getOrElse("VADD failed") should equal(3) - (proc !! VADD("ramanendu")).getOrElse("VADD failed") should equal(4) - (proc !! VADD("nilanjan")).getOrElse("VADD failed") should equal(5) - (proc !! VPOP).getOrElse("VPOP failed") should equal("ticktock".getBytes) - - new String((proc !! VGET(0)).get.asInstanceOf[Array[Byte]]) should equal("nilanjan") - new String((proc !! VGET(1)).get.asInstanceOf[Array[Byte]]) should equal("ramanendu") - new String((proc !! VGET(2)).get.asInstanceOf[Array[Byte]]) should equal("maulindu") - new String((proc !! VGET(3)).get.asInstanceOf[Array[Byte]]) should equal("debasish") - - // now add 3 more and do gets in the same transaction - (proc !! VGET_AFTER_VADD(List("a", "b", "c"), List(0, 2, 4))).get.asInstanceOf[List[String]] should equal(List("c", "a", "ramanendu")) - proc.stop - } - } - - describe("Ticket 343 Issue #6") { - it("vector update should not ignore transaction") { - val proc = actorOf[SampleVectorStorage] - proc.start - - // add 4 elements in separate transactions - (proc !! VADD("debasish")).getOrElse("VADD failed") should equal(1) - (proc !! VADD("maulindu")).getOrElse("VADD failed") should equal(2) - (proc !! VADD("ramanendu")).getOrElse("VADD failed") should equal(3) - (proc !! VADD("nilanjan")).getOrElse("VADD failed") should equal(4) - - evaluating { - (proc !! VUPD_AND_ABORT(0, "virat")).getOrElse("VUPD_AND_ABORT failed") - } should produce[Exception] - - // update aborts and hence values will remain unchanged - new String((proc !! VGET(0)).get.asInstanceOf[Array[Byte]]) should equal("nilanjan") - proc.stop - } - } - - describe("Ticket 343 Issue #5") { - it("vector slice() should not ignore elements added in current transaction") { - val proc = actorOf[SampleVectorStorage] - proc.start - - // add 4 elements in separate transactions - (proc !! VADD("debasish")).getOrElse("VADD failed") should equal(1) - (proc !! VADD("maulindu")).getOrElse("VADD failed") should equal(2) - (proc !! VADD("ramanendu")).getOrElse("VADD failed") should equal(3) - (proc !! VADD("nilanjan")).getOrElse("VADD failed") should equal(4) - - // slice with no new elements added in current transaction - (proc !! VADD_WITH_SLICE(List(), 2, 2)).getOrElse("VADD_WITH_SLICE failed") should equal(Vector("maulindu", "debasish")) - - // slice with new elements added in current transaction - (proc !! VADD_WITH_SLICE(List("a", "b", "c", "d"), 2, 2)).getOrElse("VADD_WITH_SLICE failed") should equal(Vector("b", "a")) - proc.stop - } - } -} diff --git a/akka-persistence/akka-persistence-common/src/test/scala/VectorStorageBackendTest.scala b/akka-persistence/akka-persistence-common/src/test/scala/VectorStorageBackendTest.scala deleted file mode 100644 index 02c50689ff..0000000000 --- a/akka-persistence/akka-persistence-common/src/test/scala/VectorStorageBackendTest.scala +++ /dev/null @@ -1,183 +0,0 @@ -/** - * Copyright (C) 2009-2010 Scalable Solutions AB - */ - -package akka.persistence.common - -import org.scalatest.matchers.ShouldMatchers -import akka.util.Logging -import org.scalatest.{BeforeAndAfterEach, Spec} -import scala.util.Random - -/** - * Implementation Compatibility test for PersistentVector backend implementations. - */ - -trait VectorStorageBackendTest extends Spec with ShouldMatchers with BeforeAndAfterEach with Logging { - def storage: VectorStorageBackend[Array[Byte]] - - def dropVectors: Unit - - override def beforeEach = { - log.info("beforeEach: dropping vectors") - dropVectors - } - - override def afterEach = { - log.info("afterEach: dropping vectors") - dropVectors - } - - - describe("A Properly functioning VectorStorageBackend") { - it("should insertVectorStorageEntry as a logical prepend operation to the existing list") { - val vector = "insertSingleTest" - val rand = new Random(3).nextInt(100) - val values = (0 to rand).toList.map{ - i: Int => vector + "value" + i - } - storage.getVectorStorageSizeFor(vector) should be(0) - values.foreach{ - s: String => storage.insertVectorStorageEntryFor(vector, s.getBytes) - } - val shouldRetrieve = values.reverse - (0 to rand).foreach{ - i: Int => { - shouldRetrieve(i) should be(new String(storage.getVectorStorageEntryFor(vector, i))) - } - } - } - - it("should insertVectorStorageEntries as a logical prepend operation to the existing list") { - val vector = "insertMultiTest" - val rand = new Random(3).nextInt(100) - val values = (0 to rand).toList.map{ - i: Int => vector + "value" + i - } - storage.getVectorStorageSizeFor(vector) should be(0) - storage.insertVectorStorageEntriesFor(vector, values.map{ - s: String => s.getBytes - }) - val shouldRetrieve = values.reverse - (0 to rand).foreach{ - i: Int => { - shouldRetrieve(i) should be(new String(storage.getVectorStorageEntryFor(vector, i))) - } - } - } - - it("should successfully update entries") { - val vector = "updateTest" - val rand = new Random(3).nextInt(100) - val values = (0 to rand).toList.map{ - i: Int => vector + "value" + i - } - val urand = new Random(3).nextInt(rand) - storage.insertVectorStorageEntriesFor(vector, values.map{ - s: String => s.getBytes - }) - val toUpdate = "updated" + values.reverse(urand) - storage.updateVectorStorageEntryFor(vector, urand, toUpdate.getBytes) - toUpdate should be(new String(storage.getVectorStorageEntryFor(vector, urand))) - } - - it("should return the correct value from getVectorStorageFor") { - val vector = "getTest" - val rand = new Random(3).nextInt(100) - val values = (0 to rand).toList.map{ - i: Int => vector + "value" + i - } - val urand = new Random(3).nextInt(rand) - storage.insertVectorStorageEntriesFor(vector, values.map{ - s: String => s.getBytes - }) - values.reverse(urand) should be(new String(storage.getVectorStorageEntryFor(vector, urand))) - } - - it("should return the correct values from getVectorStorageRangeFor") { - val vector = "getTest" - val rand = new Random(3).nextInt(100) - val drand = new Random(3).nextInt(rand) - val values = (0 to rand).toList.map{ - i: Int => vector + "value" + i - } - storage.insertVectorStorageEntriesFor(vector, values.map{ - s: String => s.getBytes - }) - values.reverse should be(storage.getVectorStorageRangeFor(vector, None, None, rand + 1).map{ - b: Array[Byte] => new String(b) - }) - (0 to drand).foreach{ - i: Int => { - val value: String = vector + "value" + (rand - i) - log.debug(value) - List(value) should be(storage.getVectorStorageRangeFor(vector, Some(i), None, 1).map{ - b: Array[Byte] => new String(b) - }) - } - } - } - - - it("should support remove properly") { - val vector = "removeTest" - val rand = new Random(3).nextInt(100) - val values = (0 to rand).toList.map{ - i: Int => vector + "value" + i - } - storage.insertVectorStorageEntriesFor(vector, values.map{ - s: String => s.getBytes - }) - storage.getVectorStorageSizeFor(vector) should be(values.size) - (1 to rand).foreach{ - i: Int => { - storage.removeVectorStorageEntryFor(vector) - values.reverse.dropRight(i) should be(storage.getVectorStorageRangeFor(vector, None, None, rand + 1 - i).map{ - b: Array[Byte] => new String(b) - }) - } - - } - storage.removeVectorStorageEntryFor(vector) - storage.getVectorStorageSizeFor(vector) should be(0) - storage.insertVectorStorageEntriesFor(vector, values.map{ - s: String => s.getBytes - }) - storage.getVectorStorageSizeFor(vector) should be(values.size) - values.foreach{ - s => storage.removeVectorStorageEntryFor(vector) - } - storage.getVectorStorageSizeFor(vector) should be(0) - - } - - it("should behave properly when the range used in getVectorStorageRangeFor has indexes outside the current size of the vector") { - //what is proper? - } - - it("shoud return null when getStorageEntry is called on a null entry") { - //What is proper? - val vector = "nullTest" - storage.insertVectorStorageEntryFor(vector, null) - storage.getVectorStorageEntryFor(vector, 0) should be(null) - } - - it("shoud throw a Storage exception when there is an attempt to retrieve an index larger than the Vector") { - val vector = "tooLargeRetrieve" - storage.insertVectorStorageEntryFor(vector, null) - evaluating{ - storage.getVectorStorageEntryFor(vector, 9) - } should produce[StorageException] - } - - it("shoud throw a Storage exception when there is an attempt to update an index larger than the Vector") { - val vector = "tooLargeUpdate" - storage.insertVectorStorageEntryFor(vector, null) - evaluating{ - storage.updateVectorStorageEntryFor(vector, 9, null) - } should produce[StorageException] - } - - } - -} diff --git a/akka-persistence/akka-persistence-couchdb/src/main/scala/akka/CouchDBStorage.scala b/akka-persistence/akka-persistence-couchdb/src/main/scala/akka/CouchDBStorage.scala deleted file mode 100644 index 83180a9b4a..0000000000 --- a/akka-persistence/akka-persistence-couchdb/src/main/scala/akka/CouchDBStorage.scala +++ /dev/null @@ -1,47 +0,0 @@ -package akka.persistence.couchdb - -import akka.actor.{newUuid} -import akka.stm._ -import akka.persistence.common._ - -object CouchDBStorage extends Storage { - type ElementType = Array[Byte] - - def newMap: PersistentMap[ElementType, ElementType] = newMap(newUuid.toString) - def newVector: PersistentVector[ElementType] = newVector(newUuid.toString) - def newRef: PersistentRef[ElementType] = newRef(newUuid.toString) - - def getMap(id: String): PersistentMap[ElementType, ElementType] = newMap(id) - def getVector(id: String): PersistentVector[ElementType] = newVector(id) - def getRef(id: String): PersistentRef[ElementType] = newRef(id) - - def newMap(id: String): PersistentMap[ElementType, ElementType] = new CouchDBPersistentMap(id) - def newVector(id: String): PersistentVector[ElementType] = new CouchDBPersistentVector(id) - def newRef(id: String): PersistentRef[ElementType] = new CouchDBPersistentRef(id) -} - -/** - * Implements a persistent transactional map based on the CouchDB storage. - * - * @author - */ -class CouchDBPersistentMap(id: String) extends PersistentMapBinary { - val uuid = id - val storage = CouchDBStorageBackend -} - -/** - * Implements a persistent transactional vector based on the CouchDB - * storage. - * - * @author - */ -class CouchDBPersistentVector(id: String) extends PersistentVector[Array[Byte]] { - val uuid = id - val storage = CouchDBStorageBackend -} - -class CouchDBPersistentRef(id: String) extends PersistentRef[Array[Byte]] { - val uuid = id - val storage = CouchDBStorageBackend -} diff --git a/akka-persistence/akka-persistence-couchdb/src/main/scala/akka/CouchDBStorageBackend.scala b/akka-persistence/akka-persistence-couchdb/src/main/scala/akka/CouchDBStorageBackend.scala deleted file mode 100644 index e86ad9bfd1..0000000000 --- a/akka-persistence/akka-persistence-couchdb/src/main/scala/akka/CouchDBStorageBackend.scala +++ /dev/null @@ -1,210 +0,0 @@ -package akka.persistence.couchdb - -import akka.stm._ -import akka.persistence.common._ -import akka.util.Logging -import akka.config.Config.config - - -import org.apache.commons.httpclient.methods.{GetMethod, PostMethod, PutMethod, DeleteMethod} -import org.apache.commons.httpclient.params.HttpMethodParams -import org.apache.commons.httpclient.methods._ -import org.apache.commons.httpclient.{DefaultHttpMethodRetryHandler, HttpClient} - -import scala.util.parsing.json._; -import sjson.json._ -import DefaultProtocol._ - - - -private [akka] object CouchDBStorageBackend extends - MapStorageBackend[Array[Byte], Array[Byte]] with - VectorStorageBackend[Array[Byte]] with - RefStorageBackend[Array[Byte]] with - Logging { - - - import dispatch.json._ - - implicit object widgetWrites extends Writes[Map[String,Any]] { - def writes(o: Map[String,Any]): JsValue = JsValue(o) - } - - lazy val URL = config. - getString("akka.persistence.couchdb.url"). - getOrElse(throw new IllegalArgumentException("'akka.persistence.couchdb.url' not found in config")) - - def drop() = { - val client = new HttpClient() - val delete = new DeleteMethod(URL) - client.executeMethod(delete) - } - - def create() = { - val client = new HttpClient() - val put = new PutMethod(URL) - put.setRequestEntity(new StringRequestEntity("", null, "utf-8")) - put.setRequestHeader("Content-Type", "application/json") - client.executeMethod(put) - put.getResponseBodyAsString - } - - private def storeMap(name: String, postfix: String, entries: List[(Array[Byte], Array[Byte])]) ={ - var m = entries.map(e=>(new String(e._1) -> new String(e._2))).toMap + ("_id" -> (name + postfix)) - val dataJson = JsonSerialization.tojson(m) - postData(URL, dataJson.toString) - } - - private def storeMap(name: String, postfix: String, entries: Map[String, Any]) ={ - postData(URL, JsonSerialization.tojson(entries + ("_id" -> (name + postfix))).toString) - } - - private def getResponseForNameAsMap(name: String, postfix: String): Option[Map[String, Any]] = { - getResponse(URL + name + postfix).flatMap(JSON.parseFull(_)).asInstanceOf[Option[Map[String, Any]]] - } - - - def insertMapStorageEntriesFor(name: String, entries: List[(Array[Byte], Array[Byte])]) ={ - val newDoc = getResponseForNameAsMap(name, "_map").getOrElse(Map[String, Any]()) ++ - entries.map(e => (new String(e._1) -> new String(e._2))).toMap - storeMap(name, "_map", newDoc) - } - - def insertMapStorageEntryFor(name: String, key: Array[Byte], value: Array[Byte])={ - insertMapStorageEntriesFor(name, List((key, value))) - } - - - def removeMapStorageFor(name: String) { - lazy val url = URL + name + "_map" - findDocRev(name + "_map").foreach(deleteData(url, _)) - } - - def removeMapStorageFor(name: String, key: Array[Byte]): Unit = { - lazy val sKey = new String(key) - // if we can't find the map for name, then we don't need to delete it. - getResponseForNameAsMap(name, "_map").foreach(doc => storeMap(name, "_map", doc - sKey)) - } - - def getMapStorageEntryFor(name: String, key: Array[Byte]): Option[Array[Byte]] = { - lazy val sKey = new String(key) - getResponseForNameAsMap(name, "_map").flatMap(_.get(sKey)).asInstanceOf[Option[String]].map(_.getBytes) - } - - def getMapStorageSizeFor(name: String): Int = getMapStorageFor(name).size - - def getMapStorageFor(name: String): List[(Array[Byte], Array[Byte])] = { - val m = getResponseForNameAsMap(name, "_map").map(_ - ("_id", "_rev")).getOrElse(Map[String, Any]()) - m.toList.map(e => (e._1.getBytes, e._2.asInstanceOf[String].getBytes)) - } - - def getMapStorageRangeFor(name: String, start: Option[Array[Byte]], finish: Option[Array[Byte]], count: Int): List[(Array[Byte], Array[Byte])] = { - val m = getResponseForNameAsMap(name, "_map").map(_ - ("_id", "_rev")).getOrElse(Map[String, Any]()) - val keys = m.keys.toList.sortWith(_ < _) - - // if the supplied start is not defined, get the head of keys - val s = start.map(new String(_)).getOrElse(keys.head) - - // if the supplied finish is not defined, get the last element of keys - val f = finish.map(new String(_)).getOrElse(keys.last) - - val c = if (count == 0) Int.MaxValue else count - // slice from keys: both ends inclusive - val ks = keys.slice(keys.indexOf(s), scala.math.min(keys.indexOf(s) + c, keys.indexOf(f) + 1)) - ks.map(k => (k.getBytes, m(k).asInstanceOf[String].getBytes)) - } - - def insertVectorStorageEntryFor(name: String, element: Array[Byte]) = { - insertVectorStorageEntriesFor(name, List(element)) - } - - def insertVectorStorageEntriesFor(name: String, elements: List[Array[Byte]]) = { - val m = getResponseForNameAsMap(name, "_vector").getOrElse(Map[String, Any]()) - val v = elements.map(x =>new String(x)) ::: m.getOrElse("vector", List[String]()).asInstanceOf[List[String]] - storeMap(name, "_vector", m + ("vector" -> v)) - } - - def updateVectorStorageEntryFor(name: String, index: Int, elem: Array[Byte]) = { - val m = getResponseForNameAsMap(name, "_vector").getOrElse(Map[String, Any]()) - val v: List[String] = m.getOrElse("vector", List[String]()).asInstanceOf[List[String]] - if (v.indices.contains(index)) { - storeMap(name, "_vector", m + ("vector" -> v.updated(index, new String(elem)))) - } - } - - def getVectorStorageEntryFor(name: String, index: Int): Array[Byte] ={ - val v = getResponseForNameAsMap(name, "_vector").flatMap(_.get("vector")).getOrElse(List[String]()).asInstanceOf[List[String]] - if (v.indices.contains(index)) - v(index).getBytes - else - Array[Byte]() - } - - def getVectorStorageSizeFor(name: String): Int ={ - getResponseForNameAsMap(name, "_vector").flatMap(_.get("vector")).map(_.asInstanceOf[List[String]].size).getOrElse(0) - } - - def getVectorStorageRangeFor(name: String, start: Option[Int], finish: Option[Int], count: Int): List[Array[Byte]] = { - val v = getResponseForNameAsMap(name, "_vector").flatMap(_.get("vector")).asInstanceOf[Option[List[String]]].getOrElse(List[String]()) - val s = start.getOrElse(0) - val f = finish.getOrElse(v.length) - val c = if (count == 0) v.length else count - v.slice(s, scala.math.min(s + c, f)).map(_.getBytes) - } - - def insertRefStorageFor(name: String, element: Array[Byte]) ={ - val newDoc = getResponseForNameAsMap(name, "_ref").getOrElse(Map[String, Any]()) + ("ref" -> new String(element)) - storeMap(name, "_ref", newDoc) - } - - def getRefStorageFor(name: String): Option[Array[Byte]] ={ - getResponseForNameAsMap(name, "_ref").flatMap(_.get("ref")).map(_.asInstanceOf[String].getBytes) - } - - private def findDocRev(name: String) = { - getResponse(URL + name).flatMap(JSON.parseFull(_)).asInstanceOf[Option[Map[String, Any]]] - .flatMap(_.get("_rev")).asInstanceOf[Option[String]] - } - - private def deleteData(url:String, rev:String): Option[String] = { - val client = new HttpClient() - val delete = new DeleteMethod(url) - delete.setRequestHeader("If-Match", rev) - client.executeMethod(delete) - - val response = delete.getResponseBodyAsString() - if (response != null) - Some(response) - else - None - } - - private def postData(url: String, data: String): Option[String] = { - val client = new HttpClient() - val post = new PostMethod(url) - post.setRequestEntity(new StringRequestEntity(data, null, "utf-8")) - post.setRequestHeader("Content-Type", "application/json") - client.executeMethod(post) - val response = post.getResponseBodyAsString - if (response != null) - Some(response) - else - None - } - - private def getResponse(url: String): Option[String] = { - val client = new HttpClient() - val method = new GetMethod(url) - - method.getParams().setParameter(HttpMethodParams.RETRY_HANDLER, - new DefaultHttpMethodRetryHandler(3, false)) - - client.executeMethod(method) - val response = method.getResponseBodyAsString - if (method.getStatusCode == 200 && response != null) - Some(response) - else - None - } -} - diff --git a/akka-persistence/akka-persistence-couchdb/src/test/scala/CouchDBPersistentActorSpec.scala b/akka-persistence/akka-persistence-couchdb/src/test/scala/CouchDBPersistentActorSpec.scala deleted file mode 100644 index 2433f90e80..0000000000 --- a/akka-persistence/akka-persistence-couchdb/src/test/scala/CouchDBPersistentActorSpec.scala +++ /dev/null @@ -1,158 +0,0 @@ -package akka.persistence.couchdb - -import org.scalatest.Spec -import org.scalatest.matchers.ShouldMatchers -import org.scalatest.BeforeAndAfterEach -import org.scalatest.junit.JUnitRunner -import org.junit.runner.RunWith - -import akka.actor.{Actor, ActorRef} -import Actor._ -import akka.stm._ - - -case class Balance(accountNo: String) -case class Debit(accountNo: String, amount: Int) -case class MultiDebit(accountNo: String, amounts: List[Int]) -case class Credit(accountNo: String, amount: Int) -case class Log(start: Int, finish: Int) -case object LogSize - -class BankAccountActor extends Actor { - - private val accountState = CouchDBStorage.newMap - private val txnLog = CouchDBStorage.newVector - - import sjson.json.DefaultProtocol._ - import sjson.json.JsonSerialization._ - - def receive = { case message => atomic { atomicReceive(message) } } - - def atomicReceive: Receive = { - // check balance - case Balance(accountNo) => - txnLog.add(("Balance:" + accountNo).getBytes) - self.reply( - accountState.get(accountNo.getBytes) - .map(frombinary[Int](_)) - .getOrElse(0)) - - // debit amount: can fail - case Debit(accountNo, amount) => - txnLog.add(("Debit:" + accountNo + " " + amount).getBytes) - val m = accountState.get(accountNo.getBytes) - .map(frombinary[Int](_)) - .getOrElse(0) - - accountState.put(accountNo.getBytes, tobinary(m - amount)) - if (amount > m) fail - - self.reply(m - amount) - - // many debits: can fail - // demonstrates true rollback even if multiple puts have been done - case MultiDebit(accountNo, amounts) => - val sum = amounts.foldRight(0)(_ + _) - txnLog.add(("MultiDebit:" + accountNo + " " + sum).getBytes) - - val m = accountState.get(accountNo.getBytes) - .map(frombinary[Int](_)) - .getOrElse(0) - - var cbal = m - amounts.foreach { - amount => - accountState.put(accountNo.getBytes, tobinary(m - amount)) - cbal = cbal - amount - if (cbal < 0) fail - } - - self.reply(m - sum) - - // credit amount - case Credit(accountNo, amount) => - txnLog.add(("Credit:" + accountNo + " " + amount).getBytes) - val m = accountState.get(accountNo.getBytes) - .map(frombinary[Int](_)) - .getOrElse(0) - - accountState.put(accountNo.getBytes, tobinary(m + amount)) - - self.reply(m + amount) - - case LogSize => - self.reply(txnLog.length) - - case Log(start, finish) => - self.reply(txnLog.slice(start, finish).map(new String(_))) - } - - def fail = throw new RuntimeException("Expected exception; to test fault-tolerance") -} - -@RunWith(classOf[JUnitRunner]) -class CouchDBPersistentActor extends - Spec with - ShouldMatchers with - BeforeAndAfterEach { - - override def beforeEach { - CouchDBStorageBackend.create - } - - override def afterEach { - CouchDBStorageBackend.drop - } - - describe("successful debit") { - it("should debit successfully") { - Actor.log.info("Succesful Debit starting") - val bactor = actorOf[BankAccountActor] - bactor.start - bactor !! Credit("a-123", 5000) - Actor.log.info("credited") - bactor !! Debit("a-123", 3000) - Actor.log.info("debited") - (bactor !! Balance("a-123")).get.asInstanceOf[Int] should equal(2000) - Actor.log.info("balane matched") - bactor !! Credit("a-123", 7000) - Actor.log.info("Credited") - (bactor !! Balance("a-123")).get.asInstanceOf[Int] should equal(9000) - Actor.log.info("Balance matched") - bactor !! Debit("a-123", 8000) - Actor.log.info("Debited") - (bactor !! Balance("a-123")).get.asInstanceOf[Int] should equal(1000) - Actor.log.info("Balance matched") - (bactor !! LogSize).get.asInstanceOf[Int] should equal(7) - (bactor !! Log(0, 7)).get.asInstanceOf[Iterable[String]].size should equal(7) - } - } - - describe("unsuccessful debit") { - it("debit should fail") { - val bactor = actorOf[BankAccountActor] - bactor.start - bactor !! Credit("a-123", 5000) - (bactor !! Balance("a-123")).get.asInstanceOf[Int] should equal(5000) - evaluating { - bactor !! Debit("a-123", 7000) - } should produce [Exception] - (bactor !! Balance("a-123")).get.asInstanceOf[Int] should equal(5000) - (bactor !! LogSize).get.asInstanceOf[Int] should equal(3) - } - } - - describe("unsuccessful multidebit") { - it("multidebit should fail") { - val bactor = actorOf[BankAccountActor] - bactor.start - bactor !! Credit("a-123", 5000) - (bactor !! Balance("a-123")).get.asInstanceOf[Int] should equal(5000) - evaluating { - bactor !! MultiDebit("a-123", List(1000, 2000, 4000)) - } should produce [Exception] - (bactor !! Balance("a-123")).get.asInstanceOf[Int] should equal(5000) - (bactor !! LogSize).get.asInstanceOf[Int] should equal(3) - } - } -} diff --git a/akka-persistence/akka-persistence-couchdb/src/test/scala/CouchDBStorageBackendSpec.scala b/akka-persistence/akka-persistence-couchdb/src/test/scala/CouchDBStorageBackendSpec.scala deleted file mode 100644 index c4a44666a5..0000000000 --- a/akka-persistence/akka-persistence-couchdb/src/test/scala/CouchDBStorageBackendSpec.scala +++ /dev/null @@ -1,195 +0,0 @@ -package akka.persistence.couchdb - -import org.specs._ -import org.scalatest.junit.JUnitRunner -import org.junit.runner.RunWith - -import akka.serialization.Serializable -import akka.serialization.Serializer._ - -import CouchDBStorageBackend._ -import sbinary._ -import sbinary.Operations._ -import sbinary.DefaultProtocol._ -import java.util.{Calendar, Date} - -@RunWith(classOf[JUnitRunner]) -class CouchDBStorageBackendSpec extends Specification { - doBeforeSpec { - CouchDBStorageBackend.create() - } - - doAfterSpec { - CouchDBStorageBackend.drop() - } - - "CouchDBStorageBackend store and query in map" should { - "enter 4 entries for transaction T-1" in { - insertMapStorageEntryFor("T-1", "debasish.company".getBytes, "anshinsoft".getBytes) - insertMapStorageEntryFor("T-1", "debasish.language".getBytes, "java".getBytes) - insertMapStorageEntryFor("T-1", "debasish.age".getBytes, "44".getBytes) - insertMapStorageEntryFor("T-1", "debasish.spouse".getBytes, "paramita".getBytes) - - getMapStorageSizeFor("T-1") mustEqual(4) - new String(getMapStorageEntryFor("T-1", "debasish.language".getBytes).get) mustEqual("java") - getMapStorageSizeFor("T-1") mustEqual(4) - } - - "enter key/values for another transaction T-2" in { - insertMapStorageEntryFor("T-2", "debasish.age".getBytes, "49".getBytes) - insertMapStorageEntryFor("T-2", "debasish.spouse".getBytes, "paramita".getBytes) - getMapStorageSizeFor("T-2") mustEqual(2) - } - - "remove map storage for T-99" in { - insertMapStorageEntryFor("T-99", "provider".getBytes, "googleapp".getBytes) - insertMapStorageEntryFor("T-99", "quota".getBytes, "100mb".getBytes) - getMapStorageSizeFor("T-99") mustEqual(2) - removeMapStorageFor("T-99", "quota".getBytes) - getMapStorageSizeFor("T-99") mustEqual(1) - getMapStorageEntryFor("T-99", "quota".getBytes) mustEqual(None) - } - - "remove map storage for T-1 and T2" in { - removeMapStorageFor("T-1") - removeMapStorageFor("T-2") - getMapStorageSizeFor("T-1") mustEqual(0) - getMapStorageSizeFor("T-2") mustEqual(0) - } - } - - "CouchDBStorageBackend store and query long value in map" should { - "enter 4 entries for transaction T-11" in { - val d = Calendar.getInstance.getTime.getTime - insertMapStorageEntryFor("T-11", "steve".getBytes, toByteArray[Long](d)) - insertMapStorageEntryFor("T-11", "john".getBytes, toByteArray[Long](d + 1)) - insertMapStorageEntryFor("T-11", "bill".getBytes, toByteArray[Long](d * 999)) - insertMapStorageEntryFor("T-11", "david".getBytes, toByteArray[Long](d / 2)) - - getMapStorageSizeFor("T-11") mustEqual(4) - fromByteArray[Long](getMapStorageEntryFor("T-11", "steve".getBytes).get) mustEqual(d) - } - } - - - "Range query in maps" should { - "enter 7 entries in couchdb for transaction T-5" in { - insertMapStorageEntryFor("T-5", "trade.refno".getBytes, "R-123".getBytes) - insertMapStorageEntryFor("T-5", "trade.instrument".getBytes, "IBM".getBytes) - insertMapStorageEntryFor("T-5", "trade.type".getBytes, "BUY".getBytes) - insertMapStorageEntryFor("T-5", "trade.account".getBytes, "A-123".getBytes) - insertMapStorageEntryFor("T-5", "trade.amount".getBytes, "1000000".getBytes) - insertMapStorageEntryFor("T-5", "trade.quantity".getBytes, "1000".getBytes) - insertMapStorageEntryFor("T-5", "trade.broker".getBytes, "Nomura".getBytes) - getMapStorageSizeFor("T-5") mustEqual(7) - - getMapStorageRangeFor("T-5", - Some("trade.account".getBytes), - None, 3).map(e => (new String(e._1), new String(e._2))).size mustEqual(3) - - getMapStorageRangeFor("T-5", - Some("trade.account".getBytes), - Some("trade.type".getBytes), 3).map(e => (new String(e._1), new String(e._2))).size mustEqual(3) - - getMapStorageRangeFor("T-5", - Some("trade.amount".getBytes), - Some("trade.type".getBytes), 0).map(e => (new String(e._1), new String(e._2))).size mustEqual(6) - - getMapStorageRangeFor("T-5", - Some("trade.account".getBytes), - None, 0).map(e => (new String(e._1), new String(e._2))).size mustEqual(7) - - removeMapStorageFor("T-5") - } - } - - "Store and query objects in maps" should { - import NameSerialization._ - "write a Name object and fetch it properly" in { - val dtb = Calendar.getInstance.getTime - val n = Name(100, "debasish ghosh", "kolkata", dtb, Some(dtb)) - - insertMapStorageEntryFor("T-31", "debasish".getBytes, toByteArray[Name](n)) - getMapStorageSizeFor("T-31") mustEqual(1) - fromByteArray[Name](getMapStorageEntryFor("T-31", "debasish".getBytes).getOrElse(Array[Byte]())) mustEqual(n) - removeMapStorageFor("T-31") - } - } - - "Store and query in vectors" should { - "write 4 entries in a vector for transaction T-3" in { - insertVectorStorageEntryFor("T-3", "debasish".getBytes) - insertVectorStorageEntryFor("T-3", "maulindu".getBytes) - insertVectorStorageEntryFor("T-3", "1200".getBytes) - - val dt = Calendar.getInstance.getTime.getTime - insertVectorStorageEntryFor("T-3", toByteArray[Long](dt)) - getVectorStorageSizeFor("T-3") mustEqual(4) - fromByteArray[Long](getVectorStorageEntryFor("T-3", 0)) mustEqual(dt) - getVectorStorageSizeFor("T-3") mustEqual(4) - } - } - - "Store and query objects in vectors" should { - import NameSerialization._ - "write a Name object and fetch it properly" in { - val dtb = Calendar.getInstance.getTime - val n = Name(100, "debasish ghosh", "kolkata", dtb, Some(dtb)) - - insertVectorStorageEntryFor("T-31", toByteArray[Name](n)) - getVectorStorageSizeFor("T-31") mustEqual(1) - fromByteArray[Name](getVectorStorageEntryFor("T-31", 0)) mustEqual(n) - } - } - - "Store and query in ref" should { - import NameSerialization._ - "write 4 entries in 4 refs for transaction T-4" in { - insertRefStorageFor("T-4", "debasish".getBytes) - insertRefStorageFor("T-4", "maulindu".getBytes) - - insertRefStorageFor("T-4", "1200".getBytes) - new String(getRefStorageFor("T-4").get) mustEqual("1200") - } - - "write a Name object and fetch it properly" in { - val dtb = Calendar.getInstance.getTime - val n = Name(100, "debasish ghosh", "kolkata", dtb, Some(dtb)) - insertRefStorageFor("T-4", toByteArray[Name](n)) - fromByteArray[Name](getRefStorageFor("T-4").get) mustEqual(n) - } - } - - "Mix the 3 different types storage with the same name" should { - "work independently without inference each other" in { - insertVectorStorageEntryFor("SameName", "v1".getBytes) - insertMapStorageEntryFor("SameName", "vector".getBytes, "map_value_v".getBytes) - insertVectorStorageEntryFor("SameName", "v2".getBytes) - insertMapStorageEntryFor("SameName", "ref".getBytes, "map_value_r".getBytes) - insertVectorStorageEntryFor("SameName", "v3".getBytes) - insertRefStorageFor("SameName", "I am a ref!".getBytes) - - getMapStorageSizeFor("SameName") mustEqual(2) - new String(getMapStorageEntryFor("SameName", "vector".getBytes).get) mustEqual("map_value_v") - new String(getMapStorageEntryFor("SameName", "ref".getBytes).get) mustEqual("map_value_r") - getVectorStorageSizeFor("SameName") mustEqual(3) - new String(getRefStorageFor("SameName").get) mustEqual("I am a ref!") - } - } -} - -object NameSerialization { - implicit object DateFormat extends Format[Date] { - def reads(in : Input) = - new Date(read[Long](in)) - - def writes(out: Output, value: Date) = - write[Long](out, value.getTime) - } - - case class Name(id: Int, name: String, - address: String, dateOfBirth: Date, dateDied: Option[Date]) - - implicit val NameFormat: Format[Name] = - asProduct5(Name)(Name.unapply(_).get) -} diff --git a/akka-persistence/akka-persistence-hbase/src/main/resources/log4j.properties b/akka-persistence/akka-persistence-hbase/src/main/resources/log4j.properties deleted file mode 100644 index 5763ff8232..0000000000 --- a/akka-persistence/akka-persistence-hbase/src/main/resources/log4j.properties +++ /dev/null @@ -1,25 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -log4j.rootLogger=ERROR,R - -# rolling log file ("system.log -log4j.appender.R=org.apache.log4j.DailyRollingFileAppender -log4j.appender.R.DatePattern='.'yyyy-MM-dd-HH -log4j.appender.R.layout=org.apache.log4j.PatternLayout -log4j.appender.R.layout.ConversionPattern=%5p [%t] %d{ISO8601} %F (line %L) %m%n -log4j.appender.R.File=target/logs/system.log diff --git a/akka-persistence/akka-persistence-hbase/src/main/scala/akka/HbaseStorage.scala b/akka-persistence/akka-persistence-hbase/src/main/scala/akka/HbaseStorage.scala deleted file mode 100644 index 131361922c..0000000000 --- a/akka-persistence/akka-persistence-hbase/src/main/scala/akka/HbaseStorage.scala +++ /dev/null @@ -1,50 +0,0 @@ -/** - * Copyright (C) 2009-2010 Scalable Solutions AB - */ - -package akka.persistence.hbase - -import akka.actor.{Uuid,newUuid} -import akka.stm._ -import akka.persistence.common._ - -object HbaseStorage extends Storage { - type ElementType = Array[Byte] - - def newMap: PersistentMap[ElementType, ElementType] = newMap(newUuid.toString) - def newVector: PersistentVector[ElementType] = newVector(newUuid.toString) - def newRef: PersistentRef[ElementType] = newRef(newUuid.toString) - - def getMap(id: String): PersistentMap[ElementType, ElementType] = newMap(id) - def getVector(id: String): PersistentVector[ElementType] = newVector(id) - def getRef(id: String): PersistentRef[ElementType] = newRef(id) - - def newMap(id: String): PersistentMap[ElementType, ElementType] = new HbasePersistentMap(id) - def newVector(id: String): PersistentVector[ElementType] = new HbasePersistentVector(id) - def newRef(id: String): PersistentRef[ElementType] = new HbasePersistentRef(id) -} - -/** - * Implements a persistent transactional map based on Hbase. - * - * @author David Greco - */ -class HbasePersistentMap(id: String) extends PersistentMapBinary { - val uuid = id - val storage = HbaseStorageBackend -} - -/** - * Implements a persistent transactional vector based on Hbase. - * - * @author David Greco - */ -class HbasePersistentVector(id: String) extends PersistentVector[Array[Byte]] { - val uuid = id - val storage = HbaseStorageBackend -} - -class HbasePersistentRef(id: String) extends PersistentRef[Array[Byte]] { - val uuid = id - val storage = HbaseStorageBackend -} diff --git a/akka-persistence/akka-persistence-hbase/src/main/scala/akka/HbaseStorageBackend.scala b/akka-persistence/akka-persistence-hbase/src/main/scala/akka/HbaseStorageBackend.scala deleted file mode 100644 index 7e3d750803..0000000000 --- a/akka-persistence/akka-persistence-hbase/src/main/scala/akka/HbaseStorageBackend.scala +++ /dev/null @@ -1,254 +0,0 @@ -/** - * Copyright (C) 2009-2010 Scalable Solutions AB - */ - -package akka.persistence.hbase - -import scala.collection.mutable.ListBuffer -import akka.stm._ -import akka.persistence.common._ -import akka.util.Logging -import akka.util.Helpers._ -import akka.config.Config.config -import org.apache.hadoop.hbase.HBaseConfiguration -import org.apache.hadoop.hbase.HColumnDescriptor -import org.apache.hadoop.hbase.HTableDescriptor -import org.apache.hadoop.hbase.client.HBaseAdmin -import org.apache.hadoop.hbase.client.HTable -import org.apache.hadoop.hbase.client.Put -import org.apache.hadoop.hbase.client.Get -import org.apache.hadoop.hbase.client.Delete -import org.apache.hadoop.hbase.util.Bytes - -/** - * @author David Greco - */ -private[akka] object HbaseStorageBackend extends MapStorageBackend[Array[Byte], Array[Byte]] with VectorStorageBackend[Array[Byte]] with RefStorageBackend[Array[Byte]] with Logging { - - val HBASE_ZOOKEEPER_QUORUM = config.getString("akka.persistence.hbase.zookeeper-quorum", "localhost") - val CONFIGURATION = new HBaseConfiguration - val REF_TABLE_NAME = "__REF_TABLE" - val VECTOR_TABLE_NAME = "__VECTOR_TABLE" - val VECTOR_ELEMENT_COLUMN_FAMILY_NAME = "__VECTOR_ELEMENT" - val MAP_ELEMENT_COLUMN_FAMILY_NAME = "__MAP_ELEMENT" - val MAP_TABLE_NAME = "__MAP_TABLE" - var REF_TABLE: HTable = _ - var VECTOR_TABLE: HTable = _ - var MAP_TABLE: HTable = _ - - CONFIGURATION.set("hbase.zookeeper.quorum", HBASE_ZOOKEEPER_QUORUM) - - init - - def init { - val ADMIN = new HBaseAdmin(CONFIGURATION) - - if (!ADMIN.tableExists(REF_TABLE_NAME)) { - ADMIN.createTable(new HTableDescriptor(REF_TABLE_NAME)) - ADMIN.disableTable(REF_TABLE_NAME) - ADMIN.addColumn(REF_TABLE_NAME, new HColumnDescriptor("element")) - ADMIN.enableTable(REF_TABLE_NAME) - } - REF_TABLE = new HTable(CONFIGURATION, REF_TABLE_NAME); - - if (!ADMIN.tableExists(VECTOR_TABLE_NAME)) { - ADMIN.createTable(new HTableDescriptor(VECTOR_TABLE_NAME)) - ADMIN.disableTable(VECTOR_TABLE_NAME) - ADMIN.addColumn(VECTOR_TABLE_NAME, new HColumnDescriptor(VECTOR_ELEMENT_COLUMN_FAMILY_NAME)) - ADMIN.enableTable(VECTOR_TABLE_NAME); - } - VECTOR_TABLE = new HTable(CONFIGURATION, VECTOR_TABLE_NAME) - - if (!ADMIN.tableExists(MAP_TABLE_NAME)) { - ADMIN.createTable(new HTableDescriptor(MAP_TABLE_NAME)) - ADMIN.disableTable(MAP_TABLE_NAME) - ADMIN.addColumn(MAP_TABLE_NAME, new HColumnDescriptor(MAP_ELEMENT_COLUMN_FAMILY_NAME)) - ADMIN.enableTable(MAP_TABLE_NAME); - } - MAP_TABLE = new HTable(CONFIGURATION, MAP_TABLE_NAME) - } - - def drop { - val ADMIN = new HBaseAdmin(CONFIGURATION) - - if (ADMIN.tableExists(REF_TABLE_NAME)) { - ADMIN.disableTable(REF_TABLE_NAME) - ADMIN.deleteTable(REF_TABLE_NAME) - } - if (ADMIN.tableExists(VECTOR_TABLE_NAME)) { - ADMIN.disableTable(VECTOR_TABLE_NAME) - ADMIN.deleteTable(VECTOR_TABLE_NAME) - } - if (ADMIN.tableExists(MAP_TABLE_NAME)) { - ADMIN.disableTable(MAP_TABLE_NAME) - ADMIN.deleteTable(MAP_TABLE_NAME) - } - init - } - - // =============================================================== - // For Ref - // =============================================================== - - def insertRefStorageFor(name: String, element: Array[Byte]) = { - val row = new Put(Bytes.toBytes(name)) - row.add(Bytes.toBytes("element"), Bytes.toBytes("element"), element) - REF_TABLE.put(row) - } - - def getRefStorageFor(name: String): Option[Array[Byte]] = { - val row = new Get(Bytes.toBytes(name)) - val result = REF_TABLE.get(row) - - if (result.isEmpty()) - None - else - Some(result.getValue(Bytes.toBytes("element"), Bytes.toBytes("element"))) - } - - // =============================================================== - // For Vector - // =============================================================== - - def insertVectorStorageEntryFor(name: String, element: Array[Byte]) = { - val row = new Put(Bytes.toBytes(name)) - val size = getVectorStorageSizeFor(name) - row.add(Bytes.toBytes(VECTOR_ELEMENT_COLUMN_FAMILY_NAME), Bytes.toBytes(size), element) - VECTOR_TABLE.put(row) - } - - def insertVectorStorageEntriesFor(name: String, elements: List[Array[Byte]]) = elements.reverse.foreach(insertVectorStorageEntryFor(name, _)) - - def updateVectorStorageEntryFor(name: String, index: Int, element: Array[Byte]) = { - val row = new Put(Bytes.toBytes(name)) - row.add(Bytes.toBytes(VECTOR_ELEMENT_COLUMN_FAMILY_NAME), Bytes.toBytes(index), element) - VECTOR_TABLE.put(row) - } - - def getVectorStorageEntryFor(name: String, index: Int): Array[Byte] = { - val row = new Get(Bytes.toBytes(name)) - val result = VECTOR_TABLE.get(row) - val size = result.size - val colnum = size - index - 1 - - result.getValue(Bytes.toBytes(VECTOR_ELEMENT_COLUMN_FAMILY_NAME),Bytes.toBytes(colnum)) - } - - /** - * if start and finish both are defined, ignore count and - * report the range [start, finish) - * if start is not defined, assume start = 0 - * if start == 0 and finish == 0, return an empty collection - */ - def getVectorStorageRangeFor(name: String, start: Option[Int], finish: Option[Int], count: Int): List[Array[Byte]] = { - - import scala.math._ - - val row = new Get(Bytes.toBytes(name)) - val result = VECTOR_TABLE.get(row) - val size = result.size - var listBuffer = new ListBuffer[Array[Byte]] - var b = 0 - var e = 0 - - if(start.isDefined && finish.isDefined) { - b = start.get - e = finish.get - 1 - } else { - b = start.getOrElse(0) - e = finish.getOrElse(min(b + count - 1, size - 1)) - } - for(i <- b to e) { - val colnum = size - i - 1 - listBuffer += result.getValue(Bytes.toBytes(VECTOR_ELEMENT_COLUMN_FAMILY_NAME),Bytes.toBytes(colnum)) - } - listBuffer.toList - } - - def getVectorStorageSizeFor(name: String): Int = { - val row = new Get(Bytes.toBytes(name)) - val result = VECTOR_TABLE.get(row) - - if (result.isEmpty) - 0 - else - result.size - } - - // =============================================================== - // For Map - // =============================================================== - - def insertMapStorageEntryFor(name: String, key: Array[Byte], element: Array[Byte]) = { - val row = new Put(Bytes.toBytes(name)) - row.add(Bytes.toBytes(MAP_ELEMENT_COLUMN_FAMILY_NAME), key, element) - MAP_TABLE.put(row) - } - - def insertMapStorageEntriesFor(name: String, entries: List[Tuple2[Array[Byte], Array[Byte]]]) = entries.foreach((x:Tuple2[Array[Byte], Array[Byte]]) => insertMapStorageEntryFor(name, x._1, x._2)) - - def getMapStorageEntryFor(name: String, key: Array[Byte]): Option[Array[Byte]] = { - val row = new Get(Bytes.toBytes(name)) - val result = MAP_TABLE.get(row) - - Option(result.getValue(Bytes.toBytes(MAP_ELEMENT_COLUMN_FAMILY_NAME), key)) - } - - def getMapStorageFor(name: String): List[Tuple2[Array[Byte], Array[Byte]]] = { - val row = new Get(Bytes.toBytes(name)) - val result = MAP_TABLE.get(row) - val raw = result.getFamilyMap(Bytes.toBytes(MAP_ELEMENT_COLUMN_FAMILY_NAME)).entrySet.toArray - val listBuffer = new ListBuffer[Tuple2[Array[Byte], Array[Byte]]] - - for(i <- Range(raw.size-1, -1, -1)) { - listBuffer += Tuple2(raw.apply(i).asInstanceOf[java.util.Map.Entry[Array[Byte], Array[Byte]]].getKey, raw.apply(i).asInstanceOf[java.util.Map.Entry[Array[Byte],Array[Byte]]].getValue) - } - listBuffer.toList - } - - def getMapStorageSizeFor(name: String): Int = { - val row = new Get(Bytes.toBytes(name)) - val result = MAP_TABLE.get(row) - - if (result.isEmpty) - 0 - else - result.size - } - - def removeMapStorageFor(name: String): Unit = { - val row = new Delete(Bytes.toBytes(name)) - MAP_TABLE.delete(row) - } - - def removeMapStorageFor(name: String, key: Array[Byte]): Unit = { - val row = new Delete(Bytes.toBytes(name)) - row.deleteColumns(Bytes.toBytes(MAP_ELEMENT_COLUMN_FAMILY_NAME), key) - MAP_TABLE.delete(row) - } - - def getMapStorageRangeFor(name: String, start: Option[Array[Byte]], finish: Option[Array[Byte]], count: Int): List[Tuple2[Array[Byte], Array[Byte]]] = { - val row = new Get(Bytes.toBytes(name)) - val result = MAP_TABLE.get(row) - val map = result.getFamilyMap(Bytes.toBytes(MAP_ELEMENT_COLUMN_FAMILY_NAME)) - - val startBytes = if (start.isDefined) start.get else map.firstEntry.getKey - val finishBytes = if (finish.isDefined) finish.get else map.lastEntry.getKey - val submap = map.subMap(startBytes, true, finishBytes, true) - - val iterator = submap.entrySet.iterator - val listBuffer = new ListBuffer[Tuple2[Array[Byte], Array[Byte]]] - val size = submap.size - - val cnt = if(count > size) size else count - var i: Int = 0 - while(iterator.hasNext && i < cnt) { - iterator.next match { - case entry: java.util.Map.Entry[Array[Byte], Array[Byte]] => listBuffer += ((entry.getKey,entry.getValue)) - case _ => - } - i = i+1 - } - listBuffer.toList - } -} diff --git a/akka-persistence/akka-persistence-hbase/src/test/resources/log4j.properties b/akka-persistence/akka-persistence-hbase/src/test/resources/log4j.properties deleted file mode 100644 index 3c8738fdc3..0000000000 --- a/akka-persistence/akka-persistence-hbase/src/test/resources/log4j.properties +++ /dev/null @@ -1,25 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -log4j.rootLogger=DEBUG,R - -# rolling log file ("system.log -log4j.appender.R=org.apache.log4j.DailyRollingFileAppender -log4j.appender.R.DatePattern='.'yyyy-MM-dd-HH -log4j.appender.R.layout=org.apache.log4j.PatternLayout -log4j.appender.R.layout.ConversionPattern=%5p [%t] %d{ISO8601} %F (line %L) %m%n -log4j.appender.R.File=target/logs/system.log diff --git a/akka-persistence/akka-persistence-hbase/src/test/scala/HbasePersistentActorSpecTestIntegration.scala b/akka-persistence/akka-persistence-hbase/src/test/scala/HbasePersistentActorSpecTestIntegration.scala deleted file mode 100644 index aa638572ae..0000000000 --- a/akka-persistence/akka-persistence-hbase/src/test/scala/HbasePersistentActorSpecTestIntegration.scala +++ /dev/null @@ -1,165 +0,0 @@ -package akka.persistence.hbase - -import akka.actor.{ Actor, ActorRef } -import Actor._ -import akka.stm._ - -import org.junit.Test -import org.junit.Assert._ -import org.junit.BeforeClass -import org.junit.Before -import org.junit.AfterClass -import org.junit.After - -import org.scalatest.junit.JUnitSuite -import org.scalatest.BeforeAndAfterAll -import org.apache.hadoop.hbase.HBaseTestingUtility - -case class GetMapState(key: String) -case object GetVectorState -case object GetVectorSize -case object GetRefState - -case class SetMapState(key: String, value: String) -case class SetVectorState(key: String) -case class SetRefState(key: String) -case class Success(key: String, value: String) -case class Failure(key: String, value: String) - -case class SetMapStateOneWay(key: String, value: String) -case class SetVectorStateOneWay(key: String) -case class SetRefStateOneWay(key: String) -case class SuccessOneWay(key: String, value: String) -case class FailureOneWay(key: String, value: String) - -class HbasePersistentActor extends Actor { - self.timeout = 100000 - - private val mapState = HbaseStorage.newMap - private val vectorState = HbaseStorage.newVector - private val refState = HbaseStorage.newRef - - def receive = { case message => atomic { atomicReceive(message) } } - - def atomicReceive: Receive = { - case GetMapState(key) => - self.reply(mapState.get(key.getBytes("UTF-8")).get) - case GetVectorSize => - self.reply(vectorState.length.asInstanceOf[AnyRef]) - case GetRefState => - self.reply(refState.get.get) - case SetMapState(key, msg) => - mapState.put(key.getBytes("UTF-8"), msg.getBytes("UTF-8")) - self.reply(msg) - case SetVectorState(msg) => - vectorState.add(msg.getBytes("UTF-8")) - self.reply(msg) - case SetRefState(msg) => - refState.swap(msg.getBytes("UTF-8")) - self.reply(msg) - case Success(key, msg) => - mapState.put(key.getBytes("UTF-8"), msg.getBytes("UTF-8")) - vectorState.add(msg.getBytes("UTF-8")) - refState.swap(msg.getBytes("UTF-8")) - self.reply(msg) - case Failure(key, msg) => - mapState.put(key.getBytes("UTF-8"), msg.getBytes("UTF-8")) - vectorState.add(msg.getBytes("UTF-8")) - refState.swap(msg.getBytes("UTF-8")) - fail - self.reply(msg) - } - - def fail = throw new RuntimeException("Expected exception; to test fault-tolerance") -} - -class HbasePersistentActorSpecTestIntegration extends JUnitSuite with BeforeAndAfterAll { - - val testUtil = new HBaseTestingUtility - - override def beforeAll { - testUtil.startMiniCluster - } - - override def afterAll { - testUtil.shutdownMiniCluster - } - - @Before - def beforeEach { - HbaseStorageBackend.drop - } - - @After - def afterEach { - HbaseStorageBackend.drop - } - - @Test - def testMapShouldNotRollbackStateForStatefulServerInCaseOfSuccess = { - val stateful = actorOf[HbasePersistentActor] - stateful.start - stateful !! SetMapState("testShouldNotRollbackStateForStatefulServerInCaseOfSuccess", "init") // set init state - stateful !! Success("testShouldNotRollbackStateForStatefulServerInCaseOfSuccess", "new state") // transactionrequired - val result = (stateful !! GetMapState("testShouldNotRollbackStateForStatefulServerInCaseOfSuccess")).as[Array[Byte]].get - assertEquals("new state", new String(result, 0, result.length, "UTF-8")) - } - - @Test - def testMapShouldRollbackStateForStatefulServerInCaseOfFailure = { - val stateful = actorOf[HbasePersistentActor] - stateful.start - stateful !! SetMapState("testShouldRollbackStateForStatefulServerInCaseOfFailure", "init") // set init state - try { - stateful !! Failure("testShouldRollbackStateForStatefulServerInCaseOfFailure", "new state") // call failing transactionrequired method - fail("should have thrown an exception") - } catch { case e: RuntimeException => {} } - val result = (stateful !! GetMapState("testShouldRollbackStateForStatefulServerInCaseOfFailure")).as[Array[Byte]].get - assertEquals("init", new String(result, 0, result.length, "UTF-8")) // check that state is == init state - } - - @Test - def testVectorShouldNotRollbackStateForStatefulServerInCaseOfSuccess = { - val stateful = actorOf[HbasePersistentActor] - stateful.start - stateful !! SetVectorState("init") // set init state - stateful !! Success("testShouldNotRollbackStateForStatefulServerInCaseOfSuccess", "new state") // transactionrequired - assertEquals(2, (stateful !! GetVectorSize).get.asInstanceOf[java.lang.Integer].intValue) - } - - @Test - def testVectorShouldRollbackStateForStatefulServerInCaseOfFailure = { - val stateful = actorOf[HbasePersistentActor] - stateful.start - stateful !! SetVectorState("init") // set init state - try { - stateful !! Failure("testShouldRollbackStateForStatefulServerInCaseOfFailure", "new state") // call failing transactionrequired method - fail("should have thrown an exception") - } catch { case e: RuntimeException => {} } - assertEquals(1, (stateful !! GetVectorSize).get.asInstanceOf[java.lang.Integer].intValue) - } - - @Test - def testRefShouldNotRollbackStateForStatefulServerInCaseOfSuccess = { - val stateful = actorOf[HbasePersistentActor] - stateful.start - stateful !! SetRefState("init") // set init state - stateful !! Success("testShouldNotRollbackStateForStatefulServerInCaseOfSuccess", "new state") // transactionrequired - val result = (stateful !! GetRefState).as[Array[Byte]].get - assertEquals("new state", new String(result, 0, result.length, "UTF-8")) - } - - @Test - def testRefShouldRollbackStateForStatefulServerInCaseOfFailure = { - val stateful = actorOf[HbasePersistentActor] - stateful.start - stateful !! SetRefState("init") // set init state - try { - stateful !! Failure("testShouldRollbackStateForStatefulServerInCaseOfFailure", "new state") // call failing transactionrequired method - fail("should have thrown an exception") - } catch { case e: RuntimeException => {} } - val result = (stateful !! GetRefState).as[Array[Byte]].get - assertEquals("init", new String(result, 0, result.length, "UTF-8")) // check that state is == init state - } - -} diff --git a/akka-persistence/akka-persistence-hbase/src/test/scala/HbaseStorageSpecTestIntegration.scala b/akka-persistence/akka-persistence-hbase/src/test/scala/HbaseStorageSpecTestIntegration.scala deleted file mode 100644 index cd154476eb..0000000000 --- a/akka-persistence/akka-persistence-hbase/src/test/scala/HbaseStorageSpecTestIntegration.scala +++ /dev/null @@ -1,177 +0,0 @@ -package akka.persistence.hbase - -import org.scalatest.Spec -import org.scalatest.matchers.ShouldMatchers -import org.scalatest.BeforeAndAfterAll -import org.scalatest.BeforeAndAfterEach - -class HbaseStorageSpecTestIntegration extends -Spec with -ShouldMatchers with -BeforeAndAfterAll with -BeforeAndAfterEach { - - import org.apache.hadoop.hbase.HBaseTestingUtility - - val testUtil = new HBaseTestingUtility - - override def beforeAll { - testUtil.startMiniCluster - } - - override def afterAll { - testUtil.shutdownMiniCluster - } - - override def beforeEach { - HbaseStorageBackend.drop - } - - override def afterEach { - HbaseStorageBackend.drop - } - - describe("persistent maps") { - it("should insert with single key and value") { - import HbaseStorageBackend._ - - insertMapStorageEntryFor("t1", "odersky".getBytes, "scala".getBytes) - insertMapStorageEntryFor("t1", "gosling".getBytes, "java".getBytes) - insertMapStorageEntryFor("t1", "stroustrup".getBytes, "c++".getBytes) - getMapStorageSizeFor("t1") should equal(3) - new String(getMapStorageEntryFor("t1", "odersky".getBytes).get) should equal("scala") - new String(getMapStorageEntryFor("t1", "gosling".getBytes).get) should equal("java") - new String(getMapStorageEntryFor("t1", "stroustrup".getBytes).get) should equal("c++") - getMapStorageEntryFor("t1", "torvalds".getBytes) should equal(None) - } - - it("should insert with multiple keys and values") { - import HbaseStorageBackend._ - - val l = List(("stroustrup", "c++"), ("odersky", "scala"), ("gosling", "java")) - insertMapStorageEntriesFor("t1", l.map { case (k, v) => (k.getBytes, v.getBytes) }) - getMapStorageSizeFor("t1") should equal(3) - new String(getMapStorageEntryFor("t1", "stroustrup".getBytes).get) should equal("c++") - new String(getMapStorageEntryFor("t1", "gosling".getBytes).get) should equal("java") - new String(getMapStorageEntryFor("t1", "odersky".getBytes).get) should equal("scala") - getMapStorageEntryFor("t1", "torvalds".getBytes) should equal(None) - - getMapStorageEntryFor("t2", "torvalds".getBytes) should equal(None) - - getMapStorageFor("t1").map { case (k, v) => (new String(k), new String(v)) } should equal (l) - - removeMapStorageFor("t1", "gosling".getBytes) - getMapStorageSizeFor("t1") should equal(2) - - removeMapStorageFor("t1") - getMapStorageSizeFor("t1") should equal(0) - } - - it("should do proper range queries") { - import HbaseStorageBackend._ - val l = List( - ("bjarne stroustrup", "c++"), - ("martin odersky", "scala"), - ("james gosling", "java"), - ("yukihiro matsumoto", "ruby"), - ("slava pestov", "factor"), - ("rich hickey", "clojure"), - ("ola bini", "ioke"), - ("dennis ritchie", "c"), - ("larry wall", "perl"), - ("guido van rossum", "python"), - ("james strachan", "groovy")) - val rl = List( - ("james gosling", "java"), - ("james strachan", "groovy"), - ("larry wall", "perl"), - ("martin odersky", "scala"), - ("ola bini", "ioke"), ("rich hickey", "clojure"), - ("slava pestov", "factor")) - insertMapStorageEntriesFor("t1", l.map { case (k, v) => (k.getBytes, v.getBytes) }) - getMapStorageSizeFor("t1") should equal(l.size) - getMapStorageRangeFor("t1", None, None, 100).map { case (k, v) => (new String(k), new String(v)) } should equal(l.sortWith(_._1 < _._1)) - getMapStorageRangeFor("t1", Option("james gosling".getBytes), Option("slava pestov".getBytes), 100).map { case (k, v) => (new String(k), new String(v)) } should equal(rl.sortWith(_._1 < _._1)) - getMapStorageRangeFor("t1", None, None, 5).map { case (k, v) => (new String(k), new String(v)) }.size should equal(5) - } - - } - - describe("persistent vectors") { - it("should insert a single value") { - import HbaseStorageBackend._ - - insertVectorStorageEntryFor("t1", "martin odersky".getBytes) - insertVectorStorageEntryFor("t1", "james gosling".getBytes) - new String(getVectorStorageEntryFor("t1", 0)) should equal("james gosling") - new String(getVectorStorageEntryFor("t1", 1)) should equal("martin odersky") - } - - it("should insert multiple values") { - import HbaseStorageBackend._ - - insertVectorStorageEntryFor("t1", "martin odersky".getBytes) - insertVectorStorageEntryFor("t1", "james gosling".getBytes) - insertVectorStorageEntriesFor("t1", List("ola bini".getBytes, "james strachan".getBytes, "dennis ritchie".getBytes)) - new String(getVectorStorageEntryFor("t1", 0)) should equal("ola bini") - new String(getVectorStorageEntryFor("t1", 1)) should equal("james strachan") - new String(getVectorStorageEntryFor("t1", 2)) should equal("dennis ritchie") - new String(getVectorStorageEntryFor("t1", 3)) should equal("james gosling") - new String(getVectorStorageEntryFor("t1", 4)) should equal("martin odersky") - } - - it("should fetch a range of values") { - import HbaseStorageBackend._ - - insertVectorStorageEntryFor("t1", "martin odersky".getBytes) - insertVectorStorageEntryFor("t1", "james gosling".getBytes) - getVectorStorageSizeFor("t1") should equal(2) - insertVectorStorageEntriesFor("t1", List("ola bini".getBytes, "james strachan".getBytes, "dennis ritchie".getBytes)) - getVectorStorageRangeFor("t1", None, None, 100).map(new String(_)) should equal(List("ola bini", "james strachan", "dennis ritchie", "james gosling", "martin odersky")) - getVectorStorageRangeFor("t1", Some(0), Some(5), 100).map(new String(_)) should equal(List("ola bini", "james strachan", "dennis ritchie", "james gosling", "martin odersky")) - getVectorStorageRangeFor("t1", Some(2), Some(5), 100).map(new String(_)) should equal(List("dennis ritchie", "james gosling", "martin odersky")) - getVectorStorageRangeFor("t1", Some(0), Some(0), 100).size should equal(0) - getVectorStorageSizeFor("t1") should equal(5) - } - - it("should insert and query complex structures") { - import HbaseStorageBackend._ - import sjson.json.DefaultProtocol._ - import sjson.json.JsonSerialization._ - - // a list[AnyRef] should be added successfully - val l = List("ola bini".getBytes, tobinary(List(100, 200, 300)), tobinary(List(1, 2, 3))) - - // for id = t1 - insertVectorStorageEntriesFor("t1", l) - new String(getVectorStorageEntryFor("t1", 0)) should equal("ola bini") - frombinary[List[Int]](getVectorStorageEntryFor("t1", 1)) should equal(List(100, 200, 300)) - frombinary[List[Int]](getVectorStorageEntryFor("t1", 2)) should equal(List(1, 2, 3)) - - getVectorStorageSizeFor("t1") should equal(3) - - // some more for id = t1 - val m = List(tobinary(Map(1 -> "dg", 2 -> "mc", 3 -> "nd")), tobinary(List("martin odersky", "james gosling"))) - insertVectorStorageEntriesFor("t1", m) - - // size should add up - getVectorStorageSizeFor("t1") should equal(5) - - // now for a diff id - insertVectorStorageEntriesFor("t2", l) - getVectorStorageSizeFor("t2") should equal(3) - } - } - - describe("persistent refs") { - it("should insert a ref") { - import HbaseStorageBackend._ - - insertRefStorageFor("t1", "martin odersky".getBytes) - new String(getRefStorageFor("t1").get) should equal("martin odersky") - insertRefStorageFor("t1", "james gosling".getBytes) - new String(getRefStorageFor("t1").get) should equal("james gosling") - getRefStorageFor("t2") should equal(None) - } - } -} diff --git a/akka-persistence/akka-persistence-hbase/src/test/scala/HbaseTicket343SpecTestIntegration.scala b/akka-persistence/akka-persistence-hbase/src/test/scala/HbaseTicket343SpecTestIntegration.scala deleted file mode 100644 index 0403d07946..0000000000 --- a/akka-persistence/akka-persistence-hbase/src/test/scala/HbaseTicket343SpecTestIntegration.scala +++ /dev/null @@ -1,346 +0,0 @@ -package akka.persistence.hbase - -import org.scalatest.Spec -import org.scalatest.matchers.ShouldMatchers -import org.scalatest.{BeforeAndAfterAll, BeforeAndAfterEach} -import org.scalatest.junit.JUnitRunner -import org.junit.runner.RunWith - -import akka.actor.{Actor, ActorRef} -import akka.config.Supervision.{OneForOneStrategy,Permanent} -import Actor._ -import akka.stm._ -import akka.util.Logging - -import HbaseStorageBackend._ - -case class GET(k: String) -case class SET(k: String, v: String) -case class REM(k: String) -case class CONTAINS(k: String) -case object MAP_SIZE -case class MSET(kvs: List[(String, String)]) -case class REMOVE_AFTER_PUT(kvsToAdd: List[(String, String)], ksToRem: List[String]) -case class CLEAR_AFTER_PUT(kvsToAdd: List[(String, String)]) -case class PUT_WITH_SLICE(kvsToAdd: List[(String, String)], start: String, cnt: Int) -case class PUT_REM_WITH_SLICE(kvsToAdd: List[(String, String)], ksToRem: List[String], start: String, cnt: Int) - -case class VADD(v: String) -case class VUPD(i: Int, v: String) -case class VUPD_AND_ABORT(i: Int, v: String) -case class VGET(i: Int) -case object VSIZE -case class VGET_AFTER_VADD(vsToAdd: List[String], isToFetch: List[Int]) -case class VADD_WITH_SLICE(vsToAdd: List[String], start: Int, cnt: Int) - -object Storage { - class HbaseSampleMapStorage extends Actor { - self.lifeCycle = Permanent - val FOO_MAP = "akka.sample.map" - - private var fooMap = atomic { HbaseStorage.getMap(FOO_MAP) } - - def receive = { - case SET(k, v) => - atomic { - fooMap += (k.getBytes, v.getBytes) - } - self.reply((k, v)) - - case GET(k) => - val v = atomic { - fooMap.get(k.getBytes).map(new String(_)).getOrElse(k + " Not found") - } - self.reply(v) - - case REM(k) => - val v = atomic { - fooMap -= k.getBytes - } - self.reply(k) - - case CONTAINS(k) => - val v = atomic { - fooMap contains k.getBytes - } - self.reply(v) - - case MAP_SIZE => - val v = atomic { - fooMap.size - } - self.reply(v) - - case MSET(kvs) => atomic { - kvs.foreach {kv => fooMap += (kv._1.getBytes, kv._2.getBytes) } - } - self.reply(kvs.size) - - case REMOVE_AFTER_PUT(kvs2add, ks2rem) => atomic { - kvs2add.foreach {kv => - fooMap += (kv._1.getBytes, kv._2.getBytes) - } - - ks2rem.foreach {k => - fooMap -= k.getBytes - }} - self.reply(fooMap.size) - - case CLEAR_AFTER_PUT(kvs2add) => atomic { - kvs2add.foreach {kv => - fooMap += (kv._1.getBytes, kv._2.getBytes) - } - fooMap.clear - } - self.reply(true) - - case PUT_WITH_SLICE(kvs2add, from, cnt) => - val v = atomic { - kvs2add.foreach {kv => - fooMap += (kv._1.getBytes, kv._2.getBytes) - } - fooMap.slice(Some(from.getBytes), cnt) - } - self.reply(v: List[(Array[Byte], Array[Byte])]) - - case PUT_REM_WITH_SLICE(kvs2add, ks2rem, from, cnt) => - val v = atomic { - kvs2add.foreach {kv => - fooMap += (kv._1.getBytes, kv._2.getBytes) - } - ks2rem.foreach {k => - fooMap -= k.getBytes - } - fooMap.slice(Some(from.getBytes), cnt) - } - self.reply(v: List[(Array[Byte], Array[Byte])]) - } - } - - class HbaseSampleVectorStorage extends Actor { - self.lifeCycle = Permanent - val FOO_VECTOR = "akka.sample.vector" - - private var fooVector = atomic { HbaseStorage.getVector(FOO_VECTOR) } - - def receive = { - case VADD(v) => - val size = - atomic { - fooVector + v.getBytes - fooVector length - } - self.reply(size) - - case VGET(index) => - val ind = - atomic { - fooVector get index - } - self.reply(ind) - - case VGET_AFTER_VADD(vs, is) => - val els = - atomic { - vs.foreach(fooVector + _.getBytes) - (is.foldRight(List[Array[Byte]]())(fooVector.get(_) :: _)).map(new String(_)) - } - self.reply(els) - - case VUPD_AND_ABORT(index, value) => - val l = - atomic { - fooVector.update(index, value.getBytes) - // force fail - fooVector get 100 - } - self.reply(index) - - case VADD_WITH_SLICE(vs, s, c) => - val l = - atomic { - vs.foreach(fooVector + _.getBytes) - fooVector.slice(Some(s), None, c) - } - self.reply(l.map(new String(_))) - } - } -} - -import Storage._ - -@RunWith(classOf[JUnitRunner]) -class HbaseTicket343SpecTestIntegration extends Spec with ShouldMatchers with BeforeAndAfterAll with BeforeAndAfterEach { - - import org.apache.hadoop.hbase.HBaseTestingUtility - - val testUtil = new HBaseTestingUtility - - override def beforeAll { - testUtil.startMiniCluster - } - - override def afterAll { - testUtil.shutdownMiniCluster - } - - override def beforeEach { - HbaseStorageBackend.drop - } - - override def afterEach { - HbaseStorageBackend.drop - } - - describe("Ticket 343 Issue #1") { - it("remove after put should work within the same transaction") { - val proc = actorOf[HbaseSampleMapStorage] - proc.start - - (proc !! SET("debasish", "anshinsoft")).getOrElse("Set failed") should equal(("debasish", "anshinsoft")) - (proc !! GET("debasish")).getOrElse("Get failed") should equal("anshinsoft") - (proc !! MAP_SIZE).getOrElse("Size failed") should equal(1) - - (proc !! MSET(List(("dg", "1"), ("mc", "2"), ("nd", "3")))).getOrElse("Mset failed") should equal(3) - - (proc !! GET("dg")).getOrElse("Get failed") should equal("1") - (proc !! GET("mc")).getOrElse("Get failed") should equal("2") - (proc !! GET("nd")).getOrElse("Get failed") should equal("3") - - (proc !! MAP_SIZE).getOrElse("Size failed") should equal(4) - - val add = List(("a", "1"), ("b", "2"), ("c", "3")) - val rem = List("a", "debasish") - (proc !! REMOVE_AFTER_PUT(add, rem)).getOrElse("REMOVE_AFTER_PUT failed") should equal(5) - - (proc !! GET("debasish")).getOrElse("debasish not found") should equal("debasish Not found") - (proc !! GET("a")).getOrElse("a not found") should equal("a Not found") - - (proc !! GET("b")).getOrElse("b not found") should equal("2") - - (proc !! CONTAINS("b")).getOrElse("b not found") should equal(true) - (proc !! CONTAINS("debasish")).getOrElse("debasish not found") should equal(false) - (proc !! MAP_SIZE).getOrElse("Size failed") should equal(5) - proc.stop - } - } - - describe("Ticket 343 Issue #2") { - it("clear after put should work within the same transaction") { - val proc = actorOf[HbaseSampleMapStorage] - proc.start - - (proc !! SET("debasish", "anshinsoft")).getOrElse("Set failed") should equal(("debasish", "anshinsoft")) - (proc !! GET("debasish")).getOrElse("Get failed") should equal("anshinsoft") - (proc !! MAP_SIZE).getOrElse("Size failed") should equal(1) - - val add = List(("a", "1"), ("b", "2"), ("c", "3")) - (proc !! CLEAR_AFTER_PUT(add)).getOrElse("CLEAR_AFTER_PUT failed") should equal(true) - - (proc !! MAP_SIZE).getOrElse("Size failed") should equal(0) - proc.stop - } - } - - describe("Ticket 343 Issue #3") { - it("map size should change after the transaction") { - val proc = actorOf[HbaseSampleMapStorage] - proc.start - - (proc !! SET("debasish", "anshinsoft")).getOrElse("Set failed") should equal(("debasish", "anshinsoft")) - (proc !! GET("debasish")).getOrElse("Get failed") should equal("anshinsoft") - (proc !! MAP_SIZE).getOrElse("Size failed") should equal(1) - - (proc !! MSET(List(("dg", "1"), ("mc", "2"), ("nd", "3")))).getOrElse("Mset failed") should equal(3) - (proc !! MAP_SIZE).getOrElse("Size failed") should equal(4) - - (proc !! GET("dg")).getOrElse("Get failed") should equal("1") - (proc !! GET("mc")).getOrElse("Get failed") should equal("2") - (proc !! GET("nd")).getOrElse("Get failed") should equal("3") - proc.stop - } - } - - describe("slice test") { - it("should pass") { - val proc = actorOf[HbaseSampleMapStorage] - proc.start - - (proc !! SET("debasish", "anshinsoft")).getOrElse("Set failed") should equal(("debasish", "anshinsoft")) - (proc !! GET("debasish")).getOrElse("Get failed") should equal("anshinsoft") - // (proc !! MAP_SIZE).getOrElse("Size failed") should equal(1) - - (proc !! MSET(List(("dg", "1"), ("mc", "2"), ("nd", "3")))).getOrElse("Mset failed") should equal(3) - (proc !! MAP_SIZE).getOrElse("Size failed") should equal(4) - - (proc !! PUT_WITH_SLICE(List(("ec", "1"), ("tb", "2"), ("mc", "10")), "dg", 3)).get.asInstanceOf[List[(Array[Byte], Array[Byte])]].map { case (k, v) => (new String(k), new String(v)) } should equal(List(("dg", "1"), ("ec", "1"), ("mc", "10"))) - - (proc !! PUT_REM_WITH_SLICE(List(("fc", "1"), ("gb", "2"), ("xy", "10")), List("tb", "fc"), "dg", 5)).get.asInstanceOf[List[(Array[Byte], Array[Byte])]].map { case (k, v) => (new String(k), new String(v)) } should equal(List(("dg", "1"), ("ec", "1"), ("gb", "2"), ("mc", "10"), ("nd", "3"))) - proc.stop - } - } - - describe("Ticket 343 Issue #4") { - it("vector get should not ignore elements that were in vector before transaction") { - - val proc = actorOf[HbaseSampleVectorStorage] - proc.start - - // add 4 elements in separate transactions - (proc !! VADD("debasish")).getOrElse("VADD failed") should equal(1) - (proc !! VADD("maulindu")).getOrElse("VADD failed") should equal(2) - (proc !! VADD("ramanendu")).getOrElse("VADD failed") should equal(3) - (proc !! VADD("nilanjan")).getOrElse("VADD failed") should equal(4) - - new String((proc !! VGET(0)).get.asInstanceOf[Array[Byte]] ) should equal("nilanjan") - new String((proc !! VGET(1)).get.asInstanceOf[Array[Byte]] ) should equal("ramanendu") - new String((proc !! VGET(2)).get.asInstanceOf[Array[Byte]] ) should equal("maulindu") - new String((proc !! VGET(3)).get.asInstanceOf[Array[Byte]] ) should equal("debasish") - - // now add 3 more and do gets in the same transaction - (proc !! VGET_AFTER_VADD(List("a", "b", "c"), List(0, 2, 4))).get.asInstanceOf[List[String]] should equal(List("c", "a", "ramanendu")) - proc.stop - } - } - - describe("Ticket 343 Issue #6") { - it("vector update should not ignore transaction") { - val proc = actorOf[HbaseSampleVectorStorage] - proc.start - - // add 4 elements in separate transactions - (proc !! VADD("debasish")).getOrElse("VADD failed") should equal(1) - (proc !! VADD("maulindu")).getOrElse("VADD failed") should equal(2) - (proc !! VADD("ramanendu")).getOrElse("VADD failed") should equal(3) - (proc !! VADD("nilanjan")).getOrElse("VADD failed") should equal(4) - - evaluating { - (proc !! VUPD_AND_ABORT(0, "virat")).getOrElse("VUPD_AND_ABORT failed") - } should produce [Exception] - - // update aborts and hence values will remain unchanged - new String((proc !! VGET(0)).get.asInstanceOf[Array[Byte]] ) should equal("nilanjan") - proc.stop - } - } - - describe("Ticket 343 Issue #5") { - it("vector slice() should not ignore elements added in current transaction") { - val proc = actorOf[HbaseSampleVectorStorage] - proc.start - - // add 4 elements in separate transactions - (proc !! VADD("debasish")).getOrElse("VADD failed") should equal(1) - (proc !! VADD("maulindu")).getOrElse("VADD failed") should equal(2) - (proc !! VADD("ramanendu")).getOrElse("VADD failed") should equal(3) - (proc !! VADD("nilanjan")).getOrElse("VADD failed") should equal(4) - - // slice with no new elements added in current transaction - (proc !! VADD_WITH_SLICE(List(), 2, 2)).getOrElse("VADD_WITH_SLICE failed") should equal(Vector("maulindu", "debasish")) - - // slice with new elements added in current transaction - (proc !! VADD_WITH_SLICE(List("a", "b", "c", "d"), 2, 2)).getOrElse("VADD_WITH_SLICE failed") should equal(Vector("b", "a")) - proc.stop - } - } -} diff --git a/akka-persistence/akka-persistence-hbase/src/test/scala/SimpleHbaseSpecTestIntegration.scala b/akka-persistence/akka-persistence-hbase/src/test/scala/SimpleHbaseSpecTestIntegration.scala deleted file mode 100644 index 5e949c8a28..0000000000 --- a/akka-persistence/akka-persistence-hbase/src/test/scala/SimpleHbaseSpecTestIntegration.scala +++ /dev/null @@ -1,62 +0,0 @@ -package akka.persistence.hbase - -import org.scalatest.Spec -import org.scalatest.matchers.ShouldMatchers -import org.scalatest.BeforeAndAfterAll -import org.scalatest.junit.JUnitRunner -import org.junit.runner.RunWith -import org.junit.Test - -import org.apache.hadoop.hbase.HBaseTestingUtility - -@RunWith(classOf[JUnitRunner]) -class SimpleHbaseSpecTestIntegration extends Spec with BeforeAndAfterAll with ShouldMatchers { - - import org.apache.hadoop.hbase.HBaseTestingUtility - - val testUtil = new HBaseTestingUtility - - override def beforeAll { - testUtil.startMiniCluster - } - - override def afterAll { - testUtil.shutdownMiniCluster - } - - describe("simple hbase persistence test") { - it("should create a table") { - import org.apache.hadoop.hbase.util.Bytes - import org.apache.hadoop.hbase.HTableDescriptor - import org.apache.hadoop.hbase.HColumnDescriptor - import org.apache.hadoop.hbase.client.HBaseAdmin - import org.apache.hadoop.hbase.client.HTable - - val descriptor = new HTableDescriptor(Bytes.toBytes("ATable")) - descriptor.addFamily(new HColumnDescriptor(Bytes.toBytes("Family1"))) - descriptor.addFamily(new HColumnDescriptor(Bytes.toBytes("Family2"))) - val admin = new HBaseAdmin(testUtil.getConfiguration) - admin.createTable(descriptor) - val table = new HTable(testUtil.getConfiguration, Bytes.toBytes("ATable")) - - table should not equal (null) - } - - it("should use the quorum read from the akka configuration and access the table") { - import akka.config.Config.config - import org.apache.hadoop.hbase.HBaseConfiguration - import org.apache.hadoop.hbase.client.HBaseAdmin - import org.apache.hadoop.hbase.client.HTable - - val HBASE_ZOOKEEPER_QUORUM = config.getString("akka.persistence.hbase.zookeeper-quorum", "0") - HBASE_ZOOKEEPER_QUORUM should not equal ("0") - HBASE_ZOOKEEPER_QUORUM should equal("localhost") - - val configuration = new HBaseConfiguration - configuration.set("hbase.zookeeper.quorum", HBASE_ZOOKEEPER_QUORUM) - val admin = new HBaseAdmin(configuration) - admin.tableExists("ATable") should equal(true) - } - } - -} diff --git a/akka-persistence/akka-persistence-memcached/src/main/scala/akka/MemcachedStorage.scala b/akka-persistence/akka-persistence-memcached/src/main/scala/akka/MemcachedStorage.scala deleted file mode 100644 index 3289a33f12..0000000000 --- a/akka-persistence/akka-persistence-memcached/src/main/scala/akka/MemcachedStorage.scala +++ /dev/null @@ -1,51 +0,0 @@ -/** - * Copyright (C) 2009-2010 Scalable Solutions AB - */ - -package akka.persistence.memcached - -import akka.actor.{newUuid} -import akka.stm._ -import akka.persistence.common._ - - -object MemcachedStorage extends Storage { - - type ElementType = Array[Byte] - def newMap: PersistentMap[ElementType, ElementType] = newMap(newUuid.toString) - def newVector: PersistentVector[ElementType] = newVector(newUuid.toString) - def newRef: PersistentRef[ElementType] = newRef(newUuid.toString) - override def newQueue: PersistentQueue[ElementType] = newQueue(newUuid.toString) - - def getMap(id: String): PersistentMap[ElementType, ElementType] = newMap(id) - def getVector(id: String): PersistentVector[ElementType] = newVector(id) - def getRef(id: String): PersistentRef[ElementType] = newRef(id) - override def getQueue(id: String): PersistentQueue[ElementType] = newQueue(id) - - def newMap(id: String): PersistentMap[ElementType, ElementType] = new MemcachedPersistentMap(id) - def newVector(id: String): PersistentVector[ElementType] = new MemcachedPersistentVector(id) - def newRef(id: String): PersistentRef[ElementType] = new MemcachedPersistentRef(id) - override def newQueue(id:String): PersistentQueue[ElementType] = new MemcachedPersistentQueue(id) -} - - -class MemcachedPersistentMap(id: String) extends PersistentMapBinary { - val uuid = id - val storage = MemcachedStorageBackend -} - - -class MemcachedPersistentVector(id: String) extends PersistentVector[Array[Byte]] { - val uuid = id - val storage = MemcachedStorageBackend -} - -class MemcachedPersistentRef(id: String) extends PersistentRef[Array[Byte]] { - val uuid = id - val storage = MemcachedStorageBackend -} - -class MemcachedPersistentQueue(id: String) extends PersistentQueue[Array[Byte]] { - val uuid = id - val storage = MemcachedStorageBackend -} diff --git a/akka-persistence/akka-persistence-memcached/src/main/scala/akka/MemcachedStorageBackend.scala b/akka-persistence/akka-persistence-memcached/src/main/scala/akka/MemcachedStorageBackend.scala deleted file mode 100644 index 21c4772fe1..0000000000 --- a/akka-persistence/akka-persistence-memcached/src/main/scala/akka/MemcachedStorageBackend.scala +++ /dev/null @@ -1,117 +0,0 @@ -/** - * Copyright (C) 2009-2010 Scalable Solutions AB - */ - -package akka.persistence.memcached - -import akka.persistence.common._ -import akka.config.Config.config -import net.spy.memcached._ -import net.spy.memcached.transcoders._ -import collection.JavaConversions -import java.lang.String -import collection.immutable.{TreeMap, Iterable} -import java.util.concurrent.{TimeoutException, Future, TimeUnit} - -private[akka] object MemcachedStorageBackend extends CommonStorageBackend { - - import CommonStorageBackendAccess._ - import CommonStorageBackend._ - import KVStorageBackend._ - import org.apache.commons.codec.binary.Base64 - - val clientAddresses = config.getString("akka.persistence.memcached.client.addresses", "localhost:11211") - val factory = new KetamaConnectionFactory - val client = new MemcachedClient(factory, AddrUtil.getAddresses(clientAddresses)) - val base64 = new Base64(76, Array.empty[Byte], true) - - def queueAccess = new MemcachedAccess("Q") - - def mapAccess = new MemcachedAccess("M") - - def vectorAccess = new MemcachedAccess("V") - - def refAccess = new MemcachedAccess("R") - - private[akka] class MemcachedAccess(val accessType: String) extends KVStorageBackendAccess { - - val typeBytes = stringToByteArray(accessType) - - private def encodeKey(key: Array[Byte]): Array[Byte] = { - val newkey = new Array[Byte](key.length + typeBytes.length) - System.arraycopy(key, 0, newkey, 0, key.length) - System.arraycopy(typeBytes, 0, newkey, key.length, typeBytes.length) - newkey - } - - private def keyStr(key: Array[Byte]): String = { - base64.encodeToString(key) - } - - override def decodeMapKey(owner: String, key: Array[Byte]) = { - val newkey = new Array[Byte](key.length - typeBytes.length) - System.arraycopy(key, 0, newkey, 0, newkey.length) - super.decodeMapKey(owner, newkey) - } - - def drop() = client.flush() - - def delete(key: Array[Byte]) = { - retry(5, (1L, TimeUnit.SECONDS), false) { - client.delete(keyStr(encodeKey(key))) - } - } - - def getAll(keys: Iterable[Array[Byte]]) = { - val jmap = client.getBulk(JavaConversions.asJavaList(keys.map{ - k: Array[Byte] => - keyStr(encodeKey(k)) - }.toList)) - JavaConversions.asScalaMap(jmap).map{ - kv => kv match { - case (key, value) => (base64.decode(key) -> value.asInstanceOf[Array[Byte]]) - } - } - } - - def get(key: Array[Byte], default: Array[Byte]) = { - Option(client.get(keyStr(encodeKey(key)))) match { - case Some(value) => value.asInstanceOf[Array[Byte]] - case None => default - } - } - - def get(key: Array[Byte]) = get(key, null) - - - def put(key: Array[Byte], value: Array[Byte]) = { - retry(5, (1L, TimeUnit.SECONDS), true) { - client.set(keyStr(encodeKey(key)), Integer.MAX_VALUE, value) - } - - } - - private def retry(tries: Int, waitFor: (Long, TimeUnit), tillTrue: Boolean)(action: => Future[java.lang.Boolean]): Unit = { - if (tries == 0) { - throw new TimeoutException("Exahusted all retries performing an operation on memcached") - } else { - val future = action - try - { - if (future.get(waitFor._1, waitFor._2).equals(false) && tillTrue) { - log.debug("memcached future returned false, operation failed. retrying") - retry(tries - 1, waitFor, tillTrue)(action) - } - } catch { - case te: TimeoutException => { - log.debug("memcached future timed out. retrying") - retry(tries - 1, waitFor, tillTrue)(action) - } - } - } - } - - } - - -} diff --git a/akka-persistence/akka-persistence-memcached/src/test/scala/MemcachedStorageBackendCompatibilityTest.scala b/akka-persistence/akka-persistence-memcached/src/test/scala/MemcachedStorageBackendCompatibilityTest.scala deleted file mode 100644 index 6881d25c20..0000000000 --- a/akka-persistence/akka-persistence-memcached/src/test/scala/MemcachedStorageBackendCompatibilityTest.scala +++ /dev/null @@ -1,49 +0,0 @@ -package akka.persistence.memcached - - -import org.junit.runner.RunWith -import org.scalatest.junit.JUnitRunner -import akka.persistence.common.{QueueStorageBackendTest, VectorStorageBackendTest, MapStorageBackendTest, RefStorageBackendTest} - -@RunWith(classOf[JUnitRunner]) -class MemcachedRefStorageBackendTestIntegration extends RefStorageBackendTest { - def dropRefs = { - MemcachedStorageBackend.refAccess.drop - } - - - def storage = MemcachedStorageBackend -} - -@RunWith(classOf[JUnitRunner]) -class MemcachedMapStorageBackendTestIntegration extends MapStorageBackendTest { - def dropMaps = { - MemcachedStorageBackend.mapAccess.drop - } - - - def storage = MemcachedStorageBackend -} - -@RunWith(classOf[JUnitRunner]) -class MemcachedVectorStorageBackendTestIntegration extends VectorStorageBackendTest { - def dropVectors = { - MemcachedStorageBackend.vectorAccess.drop - } - - - def storage = MemcachedStorageBackend -} - - -@RunWith(classOf[JUnitRunner]) -class MemcachedQueueStorageBackendTestIntegration extends QueueStorageBackendTest { - def dropQueues = { - MemcachedStorageBackend.queueAccess.drop - } - - - def storage = MemcachedStorageBackend -} - - diff --git a/akka-persistence/akka-persistence-memcached/src/test/scala/MemcachedTicket343TestIntegration.scala b/akka-persistence/akka-persistence-memcached/src/test/scala/MemcachedTicket343TestIntegration.scala deleted file mode 100644 index 3a5da2241f..0000000000 --- a/akka-persistence/akka-persistence-memcached/src/test/scala/MemcachedTicket343TestIntegration.scala +++ /dev/null @@ -1,23 +0,0 @@ -/** - * Copyright (C) 2009-2010 Scalable Solutions AB - */ - -package akka.persistence.memcached - - -import org.junit.runner.RunWith -import org.scalatest.junit.JUnitRunner -import akka.persistence.common._ - -@RunWith(classOf[JUnitRunner]) -class MemcachedTicket343TestIntegration extends Ticket343Test { - def dropMapsAndVectors: Unit = { - MemcachedStorageBackend.vectorAccess.drop - MemcachedStorageBackend.mapAccess.drop - } - - def getVector: (String) => PersistentVector[Array[Byte]] = MemcachedStorage.getVector - - def getMap: (String) => PersistentMap[Array[Byte], Array[Byte]] = MemcachedStorage.getMap - -} diff --git a/akka-persistence/akka-persistence-mongo/src/main/scala/akka/MongoStorage.scala b/akka-persistence/akka-persistence-mongo/src/main/scala/akka/MongoStorage.scala deleted file mode 100644 index 07980a8e3c..0000000000 --- a/akka-persistence/akka-persistence-mongo/src/main/scala/akka/MongoStorage.scala +++ /dev/null @@ -1,51 +0,0 @@ -/** - * Copyright (C) 2009-2010 Scalable Solutions AB - */ - -package akka.persistence.mongo - -import akka.stm._ -import akka.persistence.common._ -import akka.actor.{newUuid} - -object MongoStorage extends Storage { - type ElementType = Array[Byte] - - def newMap: PersistentMap[ElementType, ElementType] = newMap(newUuid.toString) - def newVector: PersistentVector[ElementType] = newVector(newUuid.toString) - def newRef: PersistentRef[ElementType] = newRef(newUuid.toString) - - def getMap(id: String): PersistentMap[ElementType, ElementType] = newMap(id) - def getVector(id: String): PersistentVector[ElementType] = newVector(id) - def getRef(id: String): PersistentRef[ElementType] = newRef(id) - - def newMap(id: String): PersistentMap[ElementType, ElementType] = new MongoPersistentMap(id) - def newVector(id: String): PersistentVector[ElementType] = new MongoPersistentVector(id) - def newRef(id: String): PersistentRef[ElementType] = new MongoPersistentRef(id) -} - -/** - * Implements a persistent transactional map based on the MongoDB document storage. - * - * @author Debasish Ghosh - */ -class MongoPersistentMap(id: String) extends PersistentMapBinary { - val uuid = id - val storage = MongoStorageBackend -} - -/** - * Implements a persistent transactional vector based on the MongoDB - * document storage. - * - * @author Debaissh Ghosh - */ -class MongoPersistentVector(id: String) extends PersistentVector[Array[Byte]] { - val uuid = id - val storage = MongoStorageBackend -} - -class MongoPersistentRef(id: String) extends PersistentRef[Array[Byte]] { - val uuid = id - val storage = MongoStorageBackend -} diff --git a/akka-persistence/akka-persistence-mongo/src/main/scala/akka/MongoStorageBackend.scala b/akka-persistence/akka-persistence-mongo/src/main/scala/akka/MongoStorageBackend.scala deleted file mode 100644 index e1a2405222..0000000000 --- a/akka-persistence/akka-persistence-mongo/src/main/scala/akka/MongoStorageBackend.scala +++ /dev/null @@ -1,229 +0,0 @@ -/** - * Copyright (C) 2009-2010 Scalable Solutions AB - */ - -package akka.persistence.mongo - -import akka.stm._ -import akka.persistence.common._ -import akka.util.Logging -import akka.config.Config.config - -import com.novus.casbah.mongodb.Imports._ - -/** - * A module for supporting MongoDB based persistence. - *

- * The module offers functionality for: - *

  • Persistent Maps
  • - *
  • Persistent Vectors
  • - *
  • Persistent Refs
  • - *

    - * @author Debasish Ghosh - */ -private[akka] object MongoStorageBackend extends - MapStorageBackend[Array[Byte], Array[Byte]] with - VectorStorageBackend[Array[Byte]] with - RefStorageBackend[Array[Byte]] with - Logging { - - val KEY = "__key" - val REF = "__ref" - val COLLECTION = "akka_coll" - - val HOSTNAME = config.getString("akka.persistence.mongodb.hostname", "127.0.0.1") - val DBNAME = config.getString("akka.persistence.mongodb.dbname", "testdb") - val PORT = config.getInt("akka.persistence.mongodb.port", 27017) - - val db: MongoDB = MongoConnection(HOSTNAME, PORT)(DBNAME) - val coll: MongoCollection = db(COLLECTION) - - def drop() { db.dropDatabase() } - - def insertMapStorageEntryFor(name: String, key: Array[Byte], value: Array[Byte]) { - insertMapStorageEntriesFor(name, List((key, value))) - } - - def insertMapStorageEntriesFor(name: String, entries: List[(Array[Byte], Array[Byte])]) { - db.safely { db => - val q: DBObject = MongoDBObject(KEY -> name) - coll.findOne(q) match { - case Some(dbo) => - entries.foreach { case (k, v) => dbo += new String(k) -> v } - db.safely { db => coll.update(q, dbo, true, false) } - case None => - val builder = MongoDBObject.newBuilder - builder += KEY -> name - entries.foreach { case (k, v) => builder += new String(k) -> v } - coll += builder.result.asDBObject - } - } - } - - def removeMapStorageFor(name: String): Unit = { - val q: DBObject = MongoDBObject(KEY -> name) - db.safely { db => coll.remove(q) } - } - - - private def queryFor[T](name: String)(body: (MongoDBObject, Option[DBObject]) => T): T = { - val q = MongoDBObject(KEY -> name) - body(q, coll.findOne(q)) - } - - def removeMapStorageFor(name: String, key: Array[Byte]): Unit = queryFor(name) { (q, dbo) => - dbo.foreach { d => - d -= new String(key) - db.safely { db => coll.update(q, d, true, false) } - } - } - - def getMapStorageEntryFor(name: String, key: Array[Byte]): Option[Array[Byte]] = queryFor(name) { (q, dbo) => - dbo.map { d => - d.getAs[Array[Byte]](new String(key)) - }.getOrElse(None) - } - - def getMapStorageSizeFor(name: String): Int = queryFor(name) { (q, dbo) => - dbo.map { d => - d.size - 2 // need to exclude object id and our KEY - }.getOrElse(0) - } - - def getMapStorageFor(name: String): List[(Array[Byte], Array[Byte])] = queryFor(name) { (q, dbo) => - dbo.map { d => - for { - (k, v) <- d.toList - if k != "_id" && k != KEY - } yield (k.getBytes, v.asInstanceOf[Array[Byte]]) - }.getOrElse(List.empty[(Array[Byte], Array[Byte])]) - } - - def getMapStorageRangeFor(name: String, start: Option[Array[Byte]], - finish: Option[Array[Byte]], - count: Int): List[(Array[Byte], Array[Byte])] = queryFor(name) { (q, dbo) => - dbo.map { d => - // get all keys except the special ones - val keys = d.keys - .filter(k => k != "_id" && k != KEY) - .toList - .sortWith(_ < _) - - // if the supplied start is not defined, get the head of keys - val s = start.map(new String(_)).getOrElse(keys.head) - - // if the supplied finish is not defined, get the last element of keys - val f = finish.map(new String(_)).getOrElse(keys.last) - - // slice from keys: both ends inclusive - val ks = keys.slice(keys.indexOf(s), scala.math.min(count, keys.indexOf(f) + 1)) - ks.map(k => (k.getBytes, d.getAs[Array[Byte]](k).get)) - }.getOrElse(List.empty[(Array[Byte], Array[Byte])]) - } - - def insertVectorStorageEntryFor(name: String, element: Array[Byte]) = { - insertVectorStorageEntriesFor(name, List(element)) - } - - def insertVectorStorageEntriesFor(name: String, elements: List[Array[Byte]]) = { - // lookup with name - val q: DBObject = MongoDBObject(KEY -> name) - - db.safely { db => - coll.findOne(q) match { - // exists : need to update - case Some(dbo) => - dbo -= KEY - dbo -= "_id" - val listBuilder = MongoDBList.newBuilder - - // expensive! - listBuilder ++= (elements ++ dbo.toSeq.sortWith((e1, e2) => (e1._1.toInt < e2._1.toInt)).map(_._2)) - - val builder = MongoDBObject.newBuilder - builder += KEY -> name - builder ++= listBuilder.result - coll.update(q, builder.result.asDBObject, true, false) - - // new : just add - case None => - val listBuilder = MongoDBList.newBuilder - listBuilder ++= elements - - val builder = MongoDBObject.newBuilder - builder += KEY -> name - builder ++= listBuilder.result - coll += builder.result.asDBObject - } - } - } - - def updateVectorStorageEntryFor(name: String, index: Int, elem: Array[Byte]) = queryFor(name) { (q, dbo) => - dbo.foreach { d => - d += ((index.toString, elem)) - db.safely { db => coll.update(q, d, true, false) } - } - } - - def getVectorStorageEntryFor(name: String, index: Int): Array[Byte] = queryFor(name) { (q, dbo) => - dbo.map { d => - d(index.toString).asInstanceOf[Array[Byte]] - }.getOrElse(Array.empty[Byte]) - } - - /** - * if start and finish both are defined, ignore count and - * report the range [start, finish) - * if start is not defined, assume start = 0 - * if start == 0 and finish == 0, return an empty collection - */ - def getVectorStorageRangeFor(name: String, start: Option[Int], finish: Option[Int], count: Int): List[Array[Byte]] = queryFor(name) { (q, dbo) => - dbo.map { d => - val ls = d.filter { case (k, v) => k != KEY && k != "_id" } - .toSeq - .sortWith((e1, e2) => (e1._1.toInt < e2._1.toInt)) - .map(_._2) - - val st = start.getOrElse(0) - val cnt = - if (finish.isDefined) { - val f = finish.get - if (f >= st) (f - st) else count - } - else count - if (st == 0 && cnt == 0) List() - ls.slice(st, st + cnt).asInstanceOf[List[Array[Byte]]] - }.getOrElse(List.empty[Array[Byte]]) - } - - def getVectorStorageSizeFor(name: String): Int = queryFor(name) { (q, dbo) => - dbo.map { d => d.size - 2 }.getOrElse(0) - } - - def insertRefStorageFor(name: String, element: Array[Byte]) = { - // lookup with name - val q: DBObject = MongoDBObject(KEY -> name) - - db.safely { db => - coll.findOne(q) match { - // exists : need to update - case Some(dbo) => - dbo += ((REF, element)) - coll.update(q, dbo, true, false) - - // not found : make one - case None => - val builder = MongoDBObject.newBuilder - builder += KEY -> name - builder += REF -> element - coll += builder.result.asDBObject - } - } - } - - def getRefStorageFor(name: String): Option[Array[Byte]] = queryFor(name) { (q, dbo) => - dbo.map { d => - d.getAs[Array[Byte]](REF) - }.getOrElse(None) - } -} diff --git a/akka-persistence/akka-persistence-mongo/src/test/scala/MongoPersistentActorSpec.scala b/akka-persistence/akka-persistence-mongo/src/test/scala/MongoPersistentActorSpec.scala deleted file mode 100644 index 48929a1688..0000000000 --- a/akka-persistence/akka-persistence-mongo/src/test/scala/MongoPersistentActorSpec.scala +++ /dev/null @@ -1,153 +0,0 @@ -package akka.persistence.mongo - -import org.scalatest.Spec -import org.scalatest.matchers.ShouldMatchers -import org.scalatest.BeforeAndAfterEach -import org.scalatest.junit.JUnitRunner -import org.junit.runner.RunWith - -import akka.actor.{Actor, ActorRef} -import Actor._ -import akka.stm._ - - -case class Balance(accountNo: String) -case class Debit(accountNo: String, amount: Int) -case class MultiDebit(accountNo: String, amounts: List[Int]) -case class Credit(accountNo: String, amount: Int) -case class Log(start: Int, finish: Int) -case object LogSize - -class BankAccountActor extends Actor { - - private val accountState = MongoStorage.newMap - private val txnLog = MongoStorage.newVector - - import sjson.json.DefaultProtocol._ - import sjson.json.JsonSerialization._ - - def receive = { case message => atomic { atomicReceive(message) } } - - def atomicReceive: Receive = { - // check balance - case Balance(accountNo) => - txnLog.add(("Balance:" + accountNo).getBytes) - self.reply( - accountState.get(accountNo.getBytes) - .map(frombinary[Int](_)) - .getOrElse(0)) - - // debit amount: can fail - case Debit(accountNo, amount) => - txnLog.add(("Debit:" + accountNo + " " + amount).getBytes) - val m = accountState.get(accountNo.getBytes) - .map(frombinary[Int](_)) - .getOrElse(0) - - accountState.put(accountNo.getBytes, tobinary(m - amount)) - if (amount > m) fail - - self.reply(m - amount) - - // many debits: can fail - // demonstrates true rollback even if multiple puts have been done - case MultiDebit(accountNo, amounts) => - val sum = amounts.foldRight(0)(_ + _) - txnLog.add(("MultiDebit:" + accountNo + " " + sum).getBytes) - - val m = accountState.get(accountNo.getBytes) - .map(frombinary[Int](_)) - .getOrElse(0) - - var cbal = m - amounts.foreach { amount => - accountState.put(accountNo.getBytes, tobinary(m - amount)) - cbal = cbal - amount - if (cbal < 0) fail - } - - self.reply(m - sum) - - // credit amount - case Credit(accountNo, amount) => - txnLog.add(("Credit:" + accountNo + " " + amount).getBytes) - val m = accountState.get(accountNo.getBytes) - .map(frombinary[Int](_)) - .getOrElse(0) - - accountState.put(accountNo.getBytes, tobinary(m + amount)) - - self.reply(m + amount) - - case LogSize => - self.reply(txnLog.length) - - case Log(start, finish) => - self.reply(txnLog.slice(start, finish).map(new String(_))) - } - - def fail = throw new RuntimeException("Expected exception; to test fault-tolerance") -} - -@RunWith(classOf[JUnitRunner]) -class MongoPersistentActorSpec extends - Spec with - ShouldMatchers with - BeforeAndAfterEach { - - override def beforeEach { - MongoStorageBackend.drop - } - - override def afterEach { - MongoStorageBackend.drop - } - - describe("successful debit") { - it("should debit successfully") { - val bactor = actorOf[BankAccountActor] - bactor.start - bactor !! Credit("a-123", 5000) - bactor !! Debit("a-123", 3000) - - (bactor !! Balance("a-123")).get.asInstanceOf[Int] should equal(2000) - - bactor !! Credit("a-123", 7000) - (bactor !! Balance("a-123")).get.asInstanceOf[Int] should equal(9000) - - bactor !! Debit("a-123", 8000) - (bactor !! Balance("a-123")).get.asInstanceOf[Int] should equal(1000) - - (bactor !! LogSize).get.asInstanceOf[Int] should equal(7) - (bactor !! Log(0, 7)).get.asInstanceOf[Iterable[String]].size should equal(7) - } - } - - describe("unsuccessful debit") { - it("debit should fail") { - val bactor = actorOf[BankAccountActor] - bactor.start - bactor !! Credit("a-123", 5000) - (bactor !! Balance("a-123")).get.asInstanceOf[Int] should equal(5000) - evaluating { - bactor !! Debit("a-123", 7000) - } should produce [Exception] - (bactor !! Balance("a-123")).get.asInstanceOf[Int] should equal(5000) - (bactor !! LogSize).get.asInstanceOf[Int] should equal(3) - } - } - - describe("unsuccessful multidebit") { - it("multidebit should fail") { - val bactor = actorOf[BankAccountActor] - bactor.start - bactor !! Credit("a-123", 5000) - (bactor !! Balance("a-123")).get.asInstanceOf[Int] should equal(5000) - evaluating { - bactor !! MultiDebit("a-123", List(1000, 2000, 4000)) - } should produce [Exception] - (bactor !! Balance("a-123")).get.asInstanceOf[Int] should equal(5000) - (bactor !! LogSize).get.asInstanceOf[Int] should equal(3) - } - } -} diff --git a/akka-persistence/akka-persistence-mongo/src/test/scala/MongoStorageSpec.scala b/akka-persistence/akka-persistence-mongo/src/test/scala/MongoStorageSpec.scala deleted file mode 100644 index 482179455a..0000000000 --- a/akka-persistence/akka-persistence-mongo/src/test/scala/MongoStorageSpec.scala +++ /dev/null @@ -1,158 +0,0 @@ -package akka.persistence.mongo - -import org.scalatest.Spec -import org.scalatest.matchers.ShouldMatchers -import org.scalatest.BeforeAndAfterEach -import org.scalatest.junit.JUnitRunner -import org.junit.runner.RunWith -import java.util.NoSuchElementException - -@RunWith(classOf[JUnitRunner]) -class MongoStorageSpec extends - Spec with - ShouldMatchers with - BeforeAndAfterEach { - - override def beforeEach { - MongoStorageBackend.drop - } - - override def afterEach { - MongoStorageBackend.drop - } - - describe("persistent maps") { - it("should insert with single key and value") { - import MongoStorageBackend._ - - insertMapStorageEntryFor("t1", "odersky".getBytes, "scala".getBytes) - insertMapStorageEntryFor("t1", "gosling".getBytes, "java".getBytes) - insertMapStorageEntryFor("t1", "stroustrup".getBytes, "c++".getBytes) - getMapStorageSizeFor("t1") should equal(3) - new String(getMapStorageEntryFor("t1", "odersky".getBytes).get) should equal("scala") - new String(getMapStorageEntryFor("t1", "gosling".getBytes).get) should equal("java") - new String(getMapStorageEntryFor("t1", "stroustrup".getBytes).get) should equal("c++") - getMapStorageEntryFor("t1", "torvalds".getBytes) should equal(None) - } - - it("should insert with multiple keys and values") { - import MongoStorageBackend._ - - val l = List(("stroustrup", "c++"), ("odersky", "scala"), ("gosling", "java")) - insertMapStorageEntriesFor("t1", l.map { case (k, v) => (k.getBytes, v.getBytes) }) - getMapStorageSizeFor("t1") should equal(3) - new String(getMapStorageEntryFor("t1", "stroustrup".getBytes).get) should equal("c++") - new String(getMapStorageEntryFor("t1", "gosling".getBytes).get) should equal("java") - new String(getMapStorageEntryFor("t1", "odersky".getBytes).get) should equal("scala") - getMapStorageEntryFor("t1", "torvalds".getBytes) should equal(None) - - getMapStorageEntryFor("t2", "torvalds".getBytes) should equal(None) - - getMapStorageFor("t1").map { case (k, v) => (new String(k), new String(v)) } should equal (l) - - removeMapStorageFor("t1", "gosling".getBytes) - getMapStorageSizeFor("t1") should equal(2) - - removeMapStorageFor("t1") - getMapStorageSizeFor("t1") should equal(0) - } - - it("should do proper range queries") { - import MongoStorageBackend._ - val l = List( - ("bjarne stroustrup", "c++"), - ("martin odersky", "scala"), - ("james gosling", "java"), - ("yukihiro matsumoto", "ruby"), - ("slava pestov", "factor"), - ("rich hickey", "clojure"), - ("ola bini", "ioke"), - ("dennis ritchie", "c"), - ("larry wall", "perl"), - ("guido van rossum", "python"), - ("james strachan", "groovy")) - insertMapStorageEntriesFor("t1", l.map { case (k, v) => (k.getBytes, v.getBytes) }) - getMapStorageSizeFor("t1") should equal(l.size) - getMapStorageRangeFor("t1", None, None, 100).map { case (k, v) => (new String(k), new String(v)) } should equal(l.sortWith(_._1 < _._1)) - getMapStorageRangeFor("t1", None, None, 5).map { case (k, v) => (new String(k), new String(v)) }.size should equal(5) - } - } - - describe("persistent vectors") { - it("should insert a single value") { - import MongoStorageBackend._ - - insertVectorStorageEntryFor("t1", "martin odersky".getBytes) - insertVectorStorageEntryFor("t1", "james gosling".getBytes) - new String(getVectorStorageEntryFor("t1", 0)) should equal("james gosling") - new String(getVectorStorageEntryFor("t1", 1)) should equal("martin odersky") - } - - it("should insert multiple values") { - import MongoStorageBackend._ - - insertVectorStorageEntryFor("t1", "martin odersky".getBytes) - insertVectorStorageEntryFor("t1", "james gosling".getBytes) - insertVectorStorageEntriesFor("t1", List("ola bini".getBytes, "james strachan".getBytes, "dennis ritchie".getBytes)) - new String(getVectorStorageEntryFor("t1", 0)) should equal("ola bini") - new String(getVectorStorageEntryFor("t1", 1)) should equal("james strachan") - new String(getVectorStorageEntryFor("t1", 2)) should equal("dennis ritchie") - new String(getVectorStorageEntryFor("t1", 3)) should equal("james gosling") - new String(getVectorStorageEntryFor("t1", 4)) should equal("martin odersky") - } - - it("should fetch a range of values") { - import MongoStorageBackend._ - - insertVectorStorageEntryFor("t1", "martin odersky".getBytes) - insertVectorStorageEntryFor("t1", "james gosling".getBytes) - getVectorStorageSizeFor("t1") should equal(2) - insertVectorStorageEntriesFor("t1", List("ola bini".getBytes, "james strachan".getBytes, "dennis ritchie".getBytes)) - getVectorStorageRangeFor("t1", None, None, 100).map(new String(_)) should equal(List("ola bini", "james strachan", "dennis ritchie", "james gosling", "martin odersky")) - getVectorStorageRangeFor("t1", Some(0), Some(5), 100).map(new String(_)) should equal(List("ola bini", "james strachan", "dennis ritchie", "james gosling", "martin odersky")) - getVectorStorageRangeFor("t1", Some(2), Some(5), 100).map(new String(_)) should equal(List("dennis ritchie", "james gosling", "martin odersky")) - - getVectorStorageSizeFor("t1") should equal(5) - } - - it("should insert and query complex structures") { - import MongoStorageBackend._ - import sjson.json.DefaultProtocol._ - import sjson.json.JsonSerialization._ - - // a list[AnyRef] should be added successfully - val l = List("ola bini".getBytes, tobinary(List(100, 200, 300)), tobinary(List(1, 2, 3))) - - // for id = t1 - insertVectorStorageEntriesFor("t1", l) - new String(getVectorStorageEntryFor("t1", 0)) should equal("ola bini") - frombinary[List[Int]](getVectorStorageEntryFor("t1", 1)) should equal(List(100, 200, 300)) - frombinary[List[Int]](getVectorStorageEntryFor("t1", 2)) should equal(List(1, 2, 3)) - - getVectorStorageSizeFor("t1") should equal(3) - - // some more for id = t1 - val m = List(tobinary(Map(1 -> "dg", 2 -> "mc", 3 -> "nd")), tobinary(List("martin odersky", "james gosling"))) - insertVectorStorageEntriesFor("t1", m) - - // size should add up - getVectorStorageSizeFor("t1") should equal(5) - - // now for a diff id - insertVectorStorageEntriesFor("t2", l) - getVectorStorageSizeFor("t2") should equal(3) - } - } - - describe("persistent refs") { - it("should insert a ref") { - import MongoStorageBackend._ - - insertRefStorageFor("t1", "martin odersky".getBytes) - new String(getRefStorageFor("t1").get) should equal("martin odersky") - insertRefStorageFor("t1", "james gosling".getBytes) - new String(getRefStorageFor("t1").get) should equal("james gosling") - getRefStorageFor("t2") should equal(None) - } - } -} diff --git a/akka-persistence/akka-persistence-mongo/src/test/scala/MongoTicket343Spec.scala b/akka-persistence/akka-persistence-mongo/src/test/scala/MongoTicket343Spec.scala deleted file mode 100644 index 71eec80652..0000000000 --- a/akka-persistence/akka-persistence-mongo/src/test/scala/MongoTicket343Spec.scala +++ /dev/null @@ -1,346 +0,0 @@ -package akka.persistence.mongo - -import org.scalatest.Spec -import org.scalatest.matchers.ShouldMatchers -import org.scalatest.{BeforeAndAfterAll, BeforeAndAfterEach} -import org.scalatest.junit.JUnitRunner -import org.junit.runner.RunWith - -import akka.actor.{Actor, ActorRef} -import akka.config.Supervision.{OneForOneStrategy,Permanent} -import Actor._ -import akka.stm._ -import akka.util.Logging - -import MongoStorageBackend._ - -case class GET(k: String) -case class SET(k: String, v: String) -case class REM(k: String) -case class CONTAINS(k: String) -case object MAP_SIZE -case class MSET(kvs: List[(String, String)]) -case class REMOVE_AFTER_PUT(kvsToAdd: List[(String, String)], ksToRem: List[String]) -case class CLEAR_AFTER_PUT(kvsToAdd: List[(String, String)]) -case class PUT_WITH_SLICE(kvsToAdd: List[(String, String)], start: String, cnt: Int) -case class PUT_REM_WITH_SLICE(kvsToAdd: List[(String, String)], ksToRem: List[String], start: String, cnt: Int) - -case class VADD(v: String) -case class VUPD(i: Int, v: String) -case class VUPD_AND_ABORT(i: Int, v: String) -case class VGET(i: Int) -case object VSIZE -case class VGET_AFTER_VADD(vsToAdd: List[String], isToFetch: List[Int]) -case class VADD_WITH_SLICE(vsToAdd: List[String], start: Int, cnt: Int) - -object Storage { - class MongoSampleMapStorage extends Actor { - self.lifeCycle = Permanent - val FOO_MAP = "akka.sample.map" - - private var fooMap = atomic { MongoStorage.getMap(FOO_MAP) } - - def receive = { - case SET(k, v) => - atomic { - fooMap += (k.getBytes, v.getBytes) - } - self.reply((k, v)) - - case GET(k) => - val v = atomic { - fooMap.get(k.getBytes).map(new String(_)).getOrElse(k + " Not found") - } - self.reply(v) - - case REM(k) => - val v = atomic { - fooMap -= k.getBytes - } - self.reply(k) - - case CONTAINS(k) => - val v = atomic { - fooMap contains k.getBytes - } - self.reply(v) - - case MAP_SIZE => - val v = atomic { - fooMap.size - } - self.reply(v) - - case MSET(kvs) => atomic { - kvs.foreach {kv => fooMap += (kv._1.getBytes, kv._2.getBytes) } - } - self.reply(kvs.size) - - case REMOVE_AFTER_PUT(kvs2add, ks2rem) => atomic { - kvs2add.foreach {kv => - fooMap += (kv._1.getBytes, kv._2.getBytes) - } - - ks2rem.foreach {k => - fooMap -= k.getBytes - }} - self.reply(fooMap.size) - - case CLEAR_AFTER_PUT(kvs2add) => atomic { - kvs2add.foreach {kv => - fooMap += (kv._1.getBytes, kv._2.getBytes) - } - fooMap.clear - } - self.reply(true) - - case PUT_WITH_SLICE(kvs2add, from, cnt) => - val v = atomic { - kvs2add.foreach {kv => - fooMap += (kv._1.getBytes, kv._2.getBytes) - } - fooMap.slice(Some(from.getBytes), cnt) - } - self.reply(v: List[(Array[Byte], Array[Byte])]) - - case PUT_REM_WITH_SLICE(kvs2add, ks2rem, from, cnt) => - val v = atomic { - kvs2add.foreach {kv => - fooMap += (kv._1.getBytes, kv._2.getBytes) - } - ks2rem.foreach {k => - fooMap -= k.getBytes - } - fooMap.slice(Some(from.getBytes), cnt) - } - self.reply(v: List[(Array[Byte], Array[Byte])]) - } - } - - class MongoSampleVectorStorage extends Actor { - self.lifeCycle = Permanent - val FOO_VECTOR = "akka.sample.vector" - - private var fooVector = atomic { MongoStorage.getVector(FOO_VECTOR) } - - def receive = { - case VADD(v) => - val size = - atomic { - fooVector + v.getBytes - fooVector length - } - self.reply(size) - - case VGET(index) => - val ind = - atomic { - fooVector get index - } - self.reply(ind) - - case VGET_AFTER_VADD(vs, is) => - val els = - atomic { - vs.foreach(fooVector + _.getBytes) - (is.foldRight(List[Array[Byte]]())(fooVector.get(_) :: _)).map(new String(_)) - } - self.reply(els) - - case VUPD_AND_ABORT(index, value) => - val l = - atomic { - fooVector.update(index, value.getBytes) - // force fail - fooVector get 100 - } - self.reply(index) - - case VADD_WITH_SLICE(vs, s, c) => - val l = - atomic { - vs.foreach(fooVector + _.getBytes) - fooVector.slice(Some(s), None, c) - } - self.reply(l.map(new String(_))) - } - } -} - -import Storage._ - -@RunWith(classOf[JUnitRunner]) -class MongoTicket343Spec extends - Spec with - ShouldMatchers with - BeforeAndAfterAll with - BeforeAndAfterEach { - - - override def beforeAll { - MongoStorageBackend.drop - println("** destroyed database") - } - - override def beforeEach { - MongoStorageBackend.drop - println("** destroyed database") - } - - override def afterEach { - MongoStorageBackend.drop - println("** destroyed database") - } - - describe("Ticket 343 Issue #1") { - it("remove after put should work within the same transaction") { - val proc = actorOf[MongoSampleMapStorage] - proc.start - - (proc !! SET("debasish", "anshinsoft")).getOrElse("Set failed") should equal(("debasish", "anshinsoft")) - (proc !! GET("debasish")).getOrElse("Get failed") should equal("anshinsoft") - (proc !! MAP_SIZE).getOrElse("Size failed") should equal(1) - - (proc !! MSET(List(("dg", "1"), ("mc", "2"), ("nd", "3")))).getOrElse("Mset failed") should equal(3) - - (proc !! GET("dg")).getOrElse("Get failed") should equal("1") - (proc !! GET("mc")).getOrElse("Get failed") should equal("2") - (proc !! GET("nd")).getOrElse("Get failed") should equal("3") - - (proc !! MAP_SIZE).getOrElse("Size failed") should equal(4) - - val add = List(("a", "1"), ("b", "2"), ("c", "3")) - val rem = List("a", "debasish") - (proc !! REMOVE_AFTER_PUT(add, rem)).getOrElse("REMOVE_AFTER_PUT failed") should equal(5) - - (proc !! GET("debasish")).getOrElse("debasish not found") should equal("debasish Not found") - (proc !! GET("a")).getOrElse("a not found") should equal("a Not found") - - (proc !! GET("b")).getOrElse("b not found") should equal("2") - - (proc !! CONTAINS("b")).getOrElse("b not found") should equal(true) - (proc !! CONTAINS("debasish")).getOrElse("debasish not found") should equal(false) - (proc !! MAP_SIZE).getOrElse("Size failed") should equal(5) - proc.stop - } - } - - describe("Ticket 343 Issue #2") { - it("clear after put should work within the same transaction") { - val proc = actorOf[MongoSampleMapStorage] - proc.start - - (proc !! SET("debasish", "anshinsoft")).getOrElse("Set failed") should equal(("debasish", "anshinsoft")) - (proc !! GET("debasish")).getOrElse("Get failed") should equal("anshinsoft") - (proc !! MAP_SIZE).getOrElse("Size failed") should equal(1) - - val add = List(("a", "1"), ("b", "2"), ("c", "3")) - (proc !! CLEAR_AFTER_PUT(add)).getOrElse("CLEAR_AFTER_PUT failed") should equal(true) - - (proc !! MAP_SIZE).getOrElse("Size failed") should equal(0) - proc.stop - } - } - - describe("Ticket 343 Issue #3") { - it("map size should change after the transaction") { - val proc = actorOf[MongoSampleMapStorage] - proc.start - - (proc !! SET("debasish", "anshinsoft")).getOrElse("Set failed") should equal(("debasish", "anshinsoft")) - (proc !! GET("debasish")).getOrElse("Get failed") should equal("anshinsoft") - (proc !! MAP_SIZE).getOrElse("Size failed") should equal(1) - - (proc !! MSET(List(("dg", "1"), ("mc", "2"), ("nd", "3")))).getOrElse("Mset failed") should equal(3) - (proc !! MAP_SIZE).getOrElse("Size failed") should equal(4) - - (proc !! GET("dg")).getOrElse("Get failed") should equal("1") - (proc !! GET("mc")).getOrElse("Get failed") should equal("2") - (proc !! GET("nd")).getOrElse("Get failed") should equal("3") - proc.stop - } - } - - describe("slice test") { - it("should pass") { - val proc = actorOf[MongoSampleMapStorage] - proc.start - - (proc !! SET("debasish", "anshinsoft")).getOrElse("Set failed") should equal(("debasish", "anshinsoft")) - (proc !! GET("debasish")).getOrElse("Get failed") should equal("anshinsoft") - // (proc !! MAP_SIZE).getOrElse("Size failed") should equal(1) - - (proc !! MSET(List(("dg", "1"), ("mc", "2"), ("nd", "3")))).getOrElse("Mset failed") should equal(3) - (proc !! MAP_SIZE).getOrElse("Size failed") should equal(4) - - (proc !! PUT_WITH_SLICE(List(("ec", "1"), ("tb", "2"), ("mc", "10")), "dg", 3)).get.asInstanceOf[List[(Array[Byte], Array[Byte])]].map { case (k, v) => (new String(k), new String(v)) } should equal(List(("dg", "1"), ("ec", "1"), ("mc", "10"))) - - (proc !! PUT_REM_WITH_SLICE(List(("fc", "1"), ("gb", "2"), ("xy", "10")), List("tb", "fc"), "dg", 5)).get.asInstanceOf[List[(Array[Byte], Array[Byte])]].map { case (k, v) => (new String(k), new String(v)) } should equal(List(("dg", "1"), ("ec", "1"), ("gb", "2"), ("mc", "10"), ("nd", "3"))) - proc.stop - } - } - - describe("Ticket 343 Issue #4") { - it("vector get should not ignore elements that were in vector before transaction") { - - val proc = actorOf[MongoSampleVectorStorage] - proc.start - - // add 4 elements in separate transactions - (proc !! VADD("debasish")).getOrElse("VADD failed") should equal(1) - (proc !! VADD("maulindu")).getOrElse("VADD failed") should equal(2) - (proc !! VADD("ramanendu")).getOrElse("VADD failed") should equal(3) - (proc !! VADD("nilanjan")).getOrElse("VADD failed") should equal(4) - - new String((proc !! VGET(0)).get.asInstanceOf[Array[Byte]] ) should equal("nilanjan") - new String((proc !! VGET(1)).get.asInstanceOf[Array[Byte]] ) should equal("ramanendu") - new String((proc !! VGET(2)).get.asInstanceOf[Array[Byte]] ) should equal("maulindu") - new String((proc !! VGET(3)).get.asInstanceOf[Array[Byte]] ) should equal("debasish") - - // now add 3 more and do gets in the same transaction - (proc !! VGET_AFTER_VADD(List("a", "b", "c"), List(0, 2, 4))).get.asInstanceOf[List[String]] should equal(List("c", "a", "ramanendu")) - proc.stop - } - } - - describe("Ticket 343 Issue #6") { - it("vector update should not ignore transaction") { - val proc = actorOf[MongoSampleVectorStorage] - proc.start - - // add 4 elements in separate transactions - (proc !! VADD("debasish")).getOrElse("VADD failed") should equal(1) - (proc !! VADD("maulindu")).getOrElse("VADD failed") should equal(2) - (proc !! VADD("ramanendu")).getOrElse("VADD failed") should equal(3) - (proc !! VADD("nilanjan")).getOrElse("VADD failed") should equal(4) - - evaluating { - (proc !! VUPD_AND_ABORT(0, "virat")).getOrElse("VUPD_AND_ABORT failed") - } should produce [Exception] - - // update aborts and hence values will remain unchanged - new String((proc !! VGET(0)).get.asInstanceOf[Array[Byte]] ) should equal("nilanjan") - proc.stop - } - } - - describe("Ticket 343 Issue #5") { - it("vector slice() should not ignore elements added in current transaction") { - val proc = actorOf[MongoSampleVectorStorage] - proc.start - - // add 4 elements in separate transactions - (proc !! VADD("debasish")).getOrElse("VADD failed") should equal(1) - (proc !! VADD("maulindu")).getOrElse("VADD failed") should equal(2) - (proc !! VADD("ramanendu")).getOrElse("VADD failed") should equal(3) - (proc !! VADD("nilanjan")).getOrElse("VADD failed") should equal(4) - - // slice with no new elements added in current transaction - (proc !! VADD_WITH_SLICE(List(), 2, 2)).getOrElse("VADD_WITH_SLICE failed") should equal(Vector("maulindu", "debasish")) - - // slice with new elements added in current transaction - (proc !! VADD_WITH_SLICE(List("a", "b", "c", "d"), 2, 2)).getOrElse("VADD_WITH_SLICE failed") should equal(Vector("b", "a")) - proc.stop - } - } -} diff --git a/akka-persistence/akka-persistence-redis/src/main/scala/akka/RedisPubSubServer.scala b/akka-persistence/akka-persistence-redis/src/main/scala/akka/RedisPubSubServer.scala deleted file mode 100644 index 375bc60bae..0000000000 --- a/akka-persistence/akka-persistence-redis/src/main/scala/akka/RedisPubSubServer.scala +++ /dev/null @@ -1,42 +0,0 @@ -package akka.persistence.redis - -import akka.actor.Actor -import com.redis._ - -sealed trait Msg -case class Subscribe(channels: Array[String]) extends Msg -case class Register(callback: PubSubMessage => Any) extends Msg -case class Unsubscribe(channels: Array[String]) extends Msg -case object UnsubscribeAll extends Msg -case class Publish(channel: String, msg: String) extends Msg - -class Subscriber(client: RedisClient) extends Actor { - var callback: PubSubMessage => Any = { m => } - - def receive = { - case Subscribe(channels) => - client.subscribe(channels.head, channels.tail: _*)(callback) - self.reply_?(true) - - case Register(cb) => - callback = cb - self.reply_?(true) - - case Unsubscribe(channels) => - client.unsubscribe(channels.head, channels.tail: _*) - self.reply_?(true) - - case UnsubscribeAll => - client.unsubscribe - self.reply_?(true) - } -} - -class Publisher(client: RedisClient) extends Actor { - def receive = { - case Publish(channel, message) => - client.publish(channel, message) - self.reply_?(true) - } -} - diff --git a/akka-persistence/akka-persistence-redis/src/main/scala/akka/RedisStorage.scala b/akka-persistence/akka-persistence-redis/src/main/scala/akka/RedisStorage.scala deleted file mode 100644 index 591d337af9..0000000000 --- a/akka-persistence/akka-persistence-redis/src/main/scala/akka/RedisStorage.scala +++ /dev/null @@ -1,80 +0,0 @@ -/** - * Copyright (C) 2009-2010 Scalable Solutions AB - */ - -package akka.persistence.redis - -import akka.actor.{newUuid} -import akka.stm._ -import akka.persistence.common._ - -object RedisStorage extends Storage { - type ElementType = Array[Byte] - - def newMap: PersistentMap[ElementType, ElementType] = newMap(newUuid.toString) - def newVector: PersistentVector[ElementType] = newVector(newUuid.toString) - def newRef: PersistentRef[ElementType] = newRef(newUuid.toString) - override def newQueue: PersistentQueue[ElementType] = newQueue(newUuid.toString) - override def newSortedSet: PersistentSortedSet[ElementType] = newSortedSet(newUuid.toString) - - def getMap(id: String): PersistentMap[ElementType, ElementType] = newMap(id) - def getVector(id: String): PersistentVector[ElementType] = newVector(id) - def getRef(id: String): PersistentRef[ElementType] = newRef(id) - override def getQueue(id: String): PersistentQueue[ElementType] = newQueue(id) - override def getSortedSet(id: String): PersistentSortedSet[ElementType] = newSortedSet(id) - - def newMap(id: String): PersistentMap[ElementType, ElementType] = new RedisPersistentMap(id) - def newVector(id: String): PersistentVector[ElementType] = new RedisPersistentVector(id) - def newRef(id: String): PersistentRef[ElementType] = new RedisPersistentRef(id) - override def newQueue(id: String): PersistentQueue[ElementType] = new RedisPersistentQueue(id) - override def newSortedSet(id: String): PersistentSortedSet[ElementType] = - new RedisPersistentSortedSet(id) -} - -/** - * Implements a persistent transactional map based on the Redis storage. - * - * @author Debasish Ghosh - */ -class RedisPersistentMap(id: String) extends PersistentMapBinary { - val uuid = id - val storage = RedisStorageBackend -} - -/** - * Implements a persistent transactional vector based on the Redis - * storage. - * - * @author Debasish Ghosh - */ -class RedisPersistentVector(id: String) extends PersistentVector[Array[Byte]] { - val uuid = id - val storage = RedisStorageBackend -} - -class RedisPersistentRef(id: String) extends PersistentRef[Array[Byte]] { - val uuid = id - val storage = RedisStorageBackend -} - -/** - * Implements a persistent transactional queue based on the Redis - * storage. - * - * @author Debasish Ghosh - */ -class RedisPersistentQueue(id: String) extends PersistentQueue[Array[Byte]] { - val uuid = id - val storage = RedisStorageBackend -} - -/** - * Implements a persistent transactional sorted set based on the Redis - * storage. - * - * @author Debasish Ghosh - */ -class RedisPersistentSortedSet(id: String) extends PersistentSortedSetBinary { - val uuid = id - val storage = RedisStorageBackend -} diff --git a/akka-persistence/akka-persistence-redis/src/main/scala/akka/RedisStorageBackend.scala b/akka-persistence/akka-persistence-redis/src/main/scala/akka/RedisStorageBackend.scala deleted file mode 100644 index d6a8cfb0a7..0000000000 --- a/akka-persistence/akka-persistence-redis/src/main/scala/akka/RedisStorageBackend.scala +++ /dev/null @@ -1,362 +0,0 @@ -/** - * Copyright (C) 2009-2010 Scalable Solutions AB - */ - -package akka.persistence.redis - -import akka.stm._ -import akka.persistence.common._ -import akka.util.Logging -import akka.config.Config.config - -import com.redis._ - -trait Base64StringEncoder { - def byteArrayToString(bytes: Array[Byte]): String - def stringToByteArray(str: String): Array[Byte] -} - -object CommonsCodec { - import org.apache.commons.codec.binary.Base64 - import org.apache.commons.codec.binary.Base64._ - - val b64 = new Base64(true) - - trait CommonsCodecBase64StringEncoder { - def byteArrayToString(bytes: Array[Byte]) = encodeBase64URLSafeString(bytes) - def stringToByteArray(str: String) = b64.decode(str) - } - - object Base64StringEncoder extends Base64StringEncoder with CommonsCodecBase64StringEncoder -} - -import CommonsCodec._ -import CommonsCodec.Base64StringEncoder._ - -/** - * A module for supporting Redis based persistence. - *

    - * The module offers functionality for: - *

  • Persistent Maps
  • - *
  • Persistent Vectors
  • - *
  • Persistent Refs
  • - *

    - * @author Debasish Ghosh - */ -private [akka] object RedisStorageBackend extends - MapStorageBackend[Array[Byte], Array[Byte]] with - VectorStorageBackend[Array[Byte]] with - RefStorageBackend[Array[Byte]] with - QueueStorageBackend[Array[Byte]] with - SortedSetStorageBackend[Array[Byte]] with - Logging { - - // need an explicit definition in akka-conf - val nodes = config.getList("akka.persistence.redis.cluster") - - def connect() = - nodes match { - case Seq() => - // no cluster defined - val REDIS_SERVER_HOSTNAME = config.getString("akka.persistence.redis.hostname", "127.0.0.1") - val REDIS_SERVER_PORT = config.getInt("akka.persistence.redis.port", 6379) - new RedisClient(REDIS_SERVER_HOSTNAME, REDIS_SERVER_PORT) - - case s => - // with cluster - import com.redis.cluster._ - log.info("Running on Redis cluster") - new RedisCluster(nodes: _*) { - val keyTag = Some(NoOpKeyTag) - } - } - - var db = connect() - - /** - * Map storage in Redis. - *

    - * Maps are stored as key/value pairs in redis. - */ - def insertMapStorageEntryFor(name: String, key: Array[Byte], value: Array[Byte]): Unit = withErrorHandling { - insertMapStorageEntriesFor(name, List((key, value))) - } - - def insertMapStorageEntriesFor(name: String, entries: List[Tuple2[Array[Byte], Array[Byte]]]): Unit = withErrorHandling { - mset(entries.map(e => - (makeRedisKey(name, e._1), byteArrayToString(e._2)))) - } - - /** - * Make a redis key from an Akka Map key. - *

    - * The key is made as follows: - *

  • redis key is composed of 2 parts: the transaction id and the map key separated by :
  • - *
  • : is chosen since it cannot appear in base64 encoding charset
  • - *
  • both parts of the key need to be based64 encoded since there can be spaces within each of them
  • - */ - private [this] def makeRedisKey(name: String, key: Array[Byte]): String = withErrorHandling { - "%s:%s".format(name, new String(key)) - } - - private [this] def makeKeyFromRedisKey(redisKey: String) = withErrorHandling { - val nk = redisKey.split(':') - (nk(0), nk(1).getBytes) - } - - private [this] def mset(entries: List[(String, String)]): Unit = withErrorHandling { - entries.foreach {e: (String, String) => - db.set(e._1, e._2) - } - } - - def removeMapStorageFor(name: String): Unit = withErrorHandling { - db.keys("%s:*".format(name)) match { - case None => - throw new NoSuchElementException(name + " not present") - case Some(keys) => - keys.foreach(k => db.del(k.get)) - } - } - - def removeMapStorageFor(name: String, key: Array[Byte]): Unit = withErrorHandling { - db.del(makeRedisKey(name, key)) - } - - def getMapStorageEntryFor(name: String, key: Array[Byte]): Option[Array[Byte]] = withErrorHandling { - db.get(makeRedisKey(name, key)) - .map(stringToByteArray(_)) - .orElse(throw new NoSuchElementException(new String(key) + " not present")) - } - - def getMapStorageSizeFor(name: String): Int = withErrorHandling { - db.keys("%s:*".format(name)).map(_.length).getOrElse(0) - } - - def getMapStorageFor(name: String): List[(Array[Byte], Array[Byte])] = withErrorHandling { - db.keys("%s:*".format(name)) - .map { keys => - keys.map(key => (makeKeyFromRedisKey(key.get)._2, stringToByteArray(db.get(key.get).get))).toList - }.getOrElse { - throw new NoSuchElementException(name + " not present") - } - } - - def getMapStorageRangeFor(name: String, start: Option[Array[Byte]], - finish: Option[Array[Byte]], - count: Int): List[(Array[Byte], Array[Byte])] = withErrorHandling { - - import scala.collection.immutable.TreeMap - val wholeSorted = - TreeMap(getMapStorageFor(name).map(e => (new String(e._1), e._2)): _*) - - if (wholeSorted isEmpty) List() - - val startKey = - start match { - case Some(bytes) => Some(new String(bytes)) - case None => None - } - - val endKey = - finish match { - case Some(bytes) => Some(new String(bytes)) - case None => None - } - - ((startKey, endKey, count): @unchecked) match { - case ((Some(s), Some(e), _)) => - wholeSorted.range(s, e) - .toList - .map(e => (e._1.getBytes, e._2)) - .toList - case ((Some(s), None, c)) if c > 0 => - wholeSorted.from(s) - .iterator - .take(count) - .map(e => (e._1.getBytes, e._2)) - .toList - case ((Some(s), None, _)) => - wholeSorted.from(s) - .toList - .map(e => (e._1.getBytes, e._2)) - .toList - case ((None, Some(e), _)) => - wholeSorted.until(e) - .toList - .map(e => (e._1.getBytes, e._2)) - .toList - } - } - - def insertVectorStorageEntryFor(name: String, element: Array[Byte]): Unit = withErrorHandling { - db.lpush(name, byteArrayToString(element)) - } - - def insertVectorStorageEntriesFor(name: String, elements: List[Array[Byte]]): Unit = withErrorHandling { - elements.foreach(insertVectorStorageEntryFor(name, _)) - } - - def updateVectorStorageEntryFor(name: String, index: Int, elem: Array[Byte]): Unit = withErrorHandling { - db.lset(name, index, byteArrayToString(elem)) - } - - def getVectorStorageEntryFor(name: String, index: Int): Array[Byte] = withErrorHandling { - db.lindex(name, index) - .map(stringToByteArray(_)) - .getOrElse { - throw new NoSuchElementException(name + " does not have element at " + index) - } - } - - /** - * if start and finish both are defined, ignore count and - * report the range [start, finish) - * if start is not defined, assume start = 0 - * if start == 0 and finish == 0, return an empty collection - */ - def getVectorStorageRangeFor(name: String, start: Option[Int], finish: Option[Int], count: Int): List[Array[Byte]] = withErrorHandling { - val s = if (start.isDefined) start.get else 0 - val cnt = - if (finish.isDefined) { - val f = finish.get - if (f >= s) (f - s) else count - } - else count - if (s == 0 && cnt == 0) List() - else - db.lrange(name, s, s + cnt - 1) match { - case None => - throw new NoSuchElementException(name + " does not have elements in the range specified") - case Some(l) => - l map (e => stringToByteArray(e.get)) - } - } - - def getVectorStorageSizeFor(name: String): Int = withErrorHandling { - db.llen(name).getOrElse { throw new NoSuchElementException(name + " not present") } - } - - def insertRefStorageFor(name: String, element: Array[Byte]): Unit = withErrorHandling { - db.set(name, byteArrayToString(element)) - } - - def insertRefStorageFor(name: String, element: String): Unit = withErrorHandling { - db.set(name, element) - } - - def getRefStorageFor(name: String): Option[Array[Byte]] = withErrorHandling { - db.get(name) - .map(stringToByteArray(_)) - } - - // add to the end of the queue - def enqueue(name: String, item: Array[Byte]): Option[Int] = withErrorHandling { - db.rpush(name, byteArrayToString(item)) - } - - // pop from the front of the queue - def dequeue(name: String): Option[Array[Byte]] = withErrorHandling { - db.lpop(name) - .map(stringToByteArray(_)) - .orElse { - throw new NoSuchElementException(name + " not present") - } - } - - // get the size of the queue - def size(name: String): Int = withErrorHandling { - db.llen(name).getOrElse { throw new NoSuchElementException(name + " not present") } - } - - // return an array of items currently stored in the queue - // start is the item to begin, count is how many items to return - def peek(name: String, start: Int, count: Int): List[Array[Byte]] = withErrorHandling { - count match { - case 1 => - db.lindex(name, start) match { - case None => - throw new NoSuchElementException("No element at " + start) - case Some(s) => - List(stringToByteArray(s)) - } - case n => - db.lrange(name, start, start + count - 1) match { - case None => - throw new NoSuchElementException( - "No element found between " + start + " and " + (start + count - 1)) - case Some(es) => - es.map(e => stringToByteArray(e.get)) - } - } - } - - // completely delete the queue - def remove(name: String): Boolean = withErrorHandling { - db.del(name).map { case 1 => true }.getOrElse(false) - } - - // add item to sorted set identified by name - def zadd(name: String, zscore: String, item: Array[Byte]): Boolean = withErrorHandling { - db.zadd(name, zscore, byteArrayToString(item)) - .map { e => - e match { - case 1 => true - case _ => false - } - }.getOrElse(false) - } - - // remove item from sorted set identified by name - def zrem(name: String, item: Array[Byte]): Boolean = withErrorHandling { - db.zrem(name, byteArrayToString(item)) - .map { e => - e match { - case 1 => true - case _ => false - } - }.getOrElse(false) - } - - // cardinality of the set identified by name - def zcard(name: String): Int = withErrorHandling { - db.zcard(name).getOrElse { throw new NoSuchElementException(name + " not present") } - } - - def zscore(name: String, item: Array[Byte]): Option[Float] = withErrorHandling { - db.zscore(name, byteArrayToString(item)).map(_.toFloat) - } - - def zrange(name: String, start: Int, end: Int): List[Array[Byte]] = withErrorHandling { - db.zrange(name, start.toString, end.toString, RedisClient.ASC, false) - .map(_.map(e => stringToByteArray(e.get))) - .getOrElse { - throw new NoSuchElementException(name + " not present") - } - } - - def zrangeWithScore(name: String, start: Int, end: Int): List[(Array[Byte], Float)] = withErrorHandling { - db.zrangeWithScore(name, start.toString, end.toString, RedisClient.ASC) - .map(_.map { case (elem, score) => (stringToByteArray(elem.get), score.get.toFloat) }) - .getOrElse { - throw new NoSuchElementException(name + " not present") - } - } - - def flushDB = withErrorHandling(db.flushdb) - - private def withErrorHandling[T](body: => T): T = { - try { - body - } catch { - case e: RedisConnectionException => { - db = connect() - body - } - case e: java.lang.NullPointerException => - throw new StorageException("Could not connect to Redis server") - case e => - throw new StorageException("Error in Redis: " + e.getMessage) - } - } -} diff --git a/akka-persistence/akka-persistence-redis/src/test/java/akka/persistence/redis/ChatLog.java b/akka-persistence/akka-persistence-redis/src/test/java/akka/persistence/redis/ChatLog.java deleted file mode 100644 index a802db45e3..0000000000 --- a/akka-persistence/akka-persistence-redis/src/test/java/akka/persistence/redis/ChatLog.java +++ /dev/null @@ -1,24 +0,0 @@ -package akka.persistence.redis; - -import java.util.List; - -public class ChatLog extends Event { - - private static final long serialVersionUID = -7318212379604445117L; - private List log = null; - - public ChatLog(List log) { - this.log = log; - } - - public List getLog() { - return log; - } - - public String getLogString(String separator) { - String result = ""; - for (String logEntry : log) - result = result + separator + logEntry; - return result; - } -} diff --git a/akka-persistence/akka-persistence-redis/src/test/java/akka/persistence/redis/ChatMessage.java b/akka-persistence/akka-persistence-redis/src/test/java/akka/persistence/redis/ChatMessage.java deleted file mode 100644 index 54f715c91c..0000000000 --- a/akka-persistence/akka-persistence-redis/src/test/java/akka/persistence/redis/ChatMessage.java +++ /dev/null @@ -1,21 +0,0 @@ -package akka.persistence.redis; - -public class ChatMessage extends Event { - - private static final long serialVersionUID = -764895205230020563L; - private String from = null; - private String message = null; - - public ChatMessage(String from, String message) { - this.from = from; - this.message = message; - } - - public String getFrom() { - return from; - } - - public String getMessage() { - return message; - } -} diff --git a/akka-persistence/akka-persistence-redis/src/test/java/akka/persistence/redis/Event.java b/akka-persistence/akka-persistence-redis/src/test/java/akka/persistence/redis/Event.java deleted file mode 100644 index d410aef110..0000000000 --- a/akka-persistence/akka-persistence-redis/src/test/java/akka/persistence/redis/Event.java +++ /dev/null @@ -1,12 +0,0 @@ -package akka.persistence.redis; - -import java.io.Serializable; - -/** - * ChatServer's internal events. - */ -public abstract class Event implements Serializable { - - private static final long serialVersionUID = -1354942905395394545L; - -} diff --git a/akka-persistence/akka-persistence-redis/src/test/java/akka/persistence/redis/GetChatLog.java b/akka-persistence/akka-persistence-redis/src/test/java/akka/persistence/redis/GetChatLog.java deleted file mode 100644 index 7a34951e2d..0000000000 --- a/akka-persistence/akka-persistence-redis/src/test/java/akka/persistence/redis/GetChatLog.java +++ /dev/null @@ -1,15 +0,0 @@ -package akka.persistence.redis; - -public class GetChatLog extends Event { - - private static final long serialVersionUID = -7000786115556740575L; - private String from = null; - - public GetChatLog(String from) { - this.from = from; - } - - public String getFrom() { - return from; - } -} diff --git a/akka-persistence/akka-persistence-redis/src/test/java/akka/persistence/redis/RedisChatStorage.java b/akka-persistence/akka-persistence-redis/src/test/java/akka/persistence/redis/RedisChatStorage.java deleted file mode 100644 index 2d2d42cf2b..0000000000 --- a/akka-persistence/akka-persistence-redis/src/test/java/akka/persistence/redis/RedisChatStorage.java +++ /dev/null @@ -1,53 +0,0 @@ -package akka.persistence.redis; - -import java.io.UnsupportedEncodingException; -import java.util.ArrayList; -import java.util.List; - -import akka.transactor.UntypedTransactor; -import akka.persistence.common.PersistentVector; -import akka.persistence.redis.RedisStorage; -import akka.stm.*; - -public class RedisChatStorage extends UntypedTransactor { - private final String CHAT_LOG = "akka.chat.log"; - private PersistentVector chatLog = null; - - public RedisChatStorage() { - chatLog = RedisStorage.newVector(CHAT_LOG); - } - - public void atomically(final Object msg) throws Exception { - if (msg instanceof ChatMessage) { - new Atomic() { - public Object atomically() { - try { - return chatLog.add(((ChatMessage) msg).getMessage().getBytes("UTF-8")); - } catch (UnsupportedEncodingException e) { - e.printStackTrace(); - } - return null; - } - }.execute(); - } else if (msg instanceof GetChatLog) { - List messageList = new Atomic>() { - public List atomically() { - List messages = new ArrayList(); - - for (byte[] messageBytes : chatLog.asJavaList()) - try { - messages.add(new String(messageBytes, "UTF-8")); - } catch (UnsupportedEncodingException e) { - e.printStackTrace(); - } - return messages; - } - }.execute(); - getContext().replyUnsafe(new ChatLog(messageList)); - } - } - - public void postRestart(Throwable reason) { - chatLog = RedisStorage.getVector(CHAT_LOG); - } -} diff --git a/akka-persistence/akka-persistence-redis/src/test/java/akka/persistence/redis/RedisStorageTests.java b/akka-persistence/akka-persistence-redis/src/test/java/akka/persistence/redis/RedisStorageTests.java deleted file mode 100644 index 0da4679065..0000000000 --- a/akka-persistence/akka-persistence-redis/src/test/java/akka/persistence/redis/RedisStorageTests.java +++ /dev/null @@ -1,34 +0,0 @@ -package akka.persistence.redis; - -import static org.junit.Assert.*; -import org.junit.Test; -import org.junit.Before; - -import akka.actor.ActorRef; -import akka.actor.UntypedActor; -import akka.actor.UntypedActorFactory; - -public class RedisStorageTests { - - private ActorRef chat = null; - - @Before public void initialise() { - RedisStorageBackend.flushDB(); - chat = UntypedActor.actorOf(new UntypedActorFactory() { - public UntypedActor create() { - return new RedisChatStorage(); - } - }); - chat.start(); - } - - @Test public void doChat() { - chat.sendOneWay(new ChatMessage("debasish", "hi there")); - ChatLog cl = (ChatLog)chat.sendRequestReply(new GetChatLog("debasish")); - assertEquals(1, cl.getLog().size()); - chat.sendOneWay(new ChatMessage("debasish", "hi again")); - cl = (ChatLog)chat.sendRequestReply(new GetChatLog("debasish")); - assertEquals(2, cl.getLog().size()); - } -} - diff --git a/akka-persistence/akka-persistence-redis/src/test/scala/RedisInconsistentSizeBugTest.scala b/akka-persistence/akka-persistence-redis/src/test/scala/RedisInconsistentSizeBugTest.scala deleted file mode 100644 index 650eabfe4d..0000000000 --- a/akka-persistence/akka-persistence-redis/src/test/scala/RedisInconsistentSizeBugTest.scala +++ /dev/null @@ -1,73 +0,0 @@ -package akka.persistence.redis - -import sbinary._ -import sbinary.Operations._ -import sbinary.DefaultProtocol._ - -import akka.actor.{Actor, ActorRef} -import akka.config.Supervision.{OneForOneStrategy, Permanent} -import Actor._ -import akka.persistence.common.PersistentVector -import akka.stm._ -import akka.util.Logging - -import java.util.{Calendar, Date} - -object Serial { - implicit object DateFormat extends Format[Date] { - def reads(in : Input) = new Date(read[Long](in)) - def writes(out: Output, value: Date) = write[Long](out, value.getTime) - } - case class Name(id: Int, name: String, address: String, dateOfBirth: Date, dateDied: Option[Date]) - implicit val NameFormat: Format[Name] = asProduct5(Name)(Name.unapply(_).get) -} - -case class GETFOO(s: String) -case class SETFOO(s: String) - -object SampleStorage { - class RedisSampleStorage extends Actor { - self.lifeCycle = Permanent - val EVENT_MAP = "akka.sample.map" - - private var eventMap = atomic { RedisStorage.getMap(EVENT_MAP) } - - import sbinary._ - import DefaultProtocol._ - import Operations._ - import Serial._ - import java.util.Calendar - - val dtb = Calendar.getInstance.getTime - val n = Name(100, "debasish ghosh", "kolkata", dtb, Some(dtb)) - - def receive = { - case SETFOO(str) => - atomic { - eventMap += (str.getBytes, toByteArray[Name](n)) - } - self.reply(str) - - case GETFOO(str) => - val ev = atomic { - eventMap.keySet.size - } - println("************* " + ev) - self.reply(ev) - } - } -} - -import Serial._ -import SampleStorage._ - -object Runner { - def run { - val proc = actorOf[RedisSampleStorage] - proc.start - val i = (proc !! SETFOO("debasish")).as[String] - println("i = " + i) - val ev = (proc !! GETFOO("debasish")).as[Int] - println(ev) - } -} diff --git a/akka-persistence/akka-persistence-redis/src/test/scala/RedisPersistentActorSpec.scala b/akka-persistence/akka-persistence-redis/src/test/scala/RedisPersistentActorSpec.scala deleted file mode 100644 index ddc77a4bdf..0000000000 --- a/akka-persistence/akka-persistence-redis/src/test/scala/RedisPersistentActorSpec.scala +++ /dev/null @@ -1,141 +0,0 @@ -package akka.persistence.redis - -import org.scalatest.junit.JUnitSuite - -import org.junit.{Test, Before} -import org.junit.Assert._ - -import akka.actor.{Actor, ActorRef} -import akka.actor.Actor._ -import akka.stm._ - -/** - * A persistent actor based on Redis storage. - *

    - * Demonstrates a bank account operation consisting of messages that: - *

  • checks balance Balance
  • - *
  • debits amountDebit
  • - *
  • debits multiple amountsMultiDebit
  • - *
  • credits amountCredit
  • - *

    - * Needs a running Redis server. - * @author Debasish Ghosh - */ - -case class Balance(accountNo: String) -case class Debit(accountNo: String, amount: Int) -case class MultiDebit(accountNo: String, amounts: List[Int]) -case class Credit(accountNo: String, amount: Int) -case object LogSize - -class AccountActor extends Actor { - private val accountState = RedisStorage.newMap - private val txnLog = RedisStorage.newVector - self.timeout = 100000 - - def receive = { case message => atomic { atomicReceive(message) } } - - def atomicReceive: Receive = { - // check balance - case Balance(accountNo) => - txnLog.add("Balance:%s".format(accountNo).getBytes) - self.reply(new String(accountState.get(accountNo.getBytes).get).toInt) - - // debit amount: can fail - case Debit(accountNo, amount) => - txnLog.add("Debit:%s %s".format(accountNo, amount.toString).getBytes) - - val Some(m) = accountState.get(accountNo.getBytes).map(x => (new String(x)).toInt) orElse Some(0) - accountState.put(accountNo.getBytes, (m - amount).toString.getBytes) - if (amount > m) fail - - self.reply(m - amount) - - // many debits: can fail - // demonstrates true rollback even if multiple puts have been done - case MultiDebit(accountNo, amounts) => - txnLog.add("MultiDebit:%s %s".format(accountNo, amounts.map(_.intValue).foldLeft(0)(_ + _).toString).getBytes) - - val Some(m) = accountState.get(accountNo.getBytes).map(x => (new String(x)).toInt) orElse Some(0) - var bal = 0 - amounts.foreach {amount => - bal = bal + amount - accountState.put(accountNo.getBytes, (m - bal).toString.getBytes) - } - if (bal > m) fail - - self.reply(m - bal) - - // credit amount - case Credit(accountNo, amount) => - txnLog.add("Credit:%s %s".format(accountNo, amount.toString).getBytes) - - val Some(m) = accountState.get(accountNo.getBytes).map(x => (new String(x)).toInt) orElse Some(0) - accountState.put(accountNo.getBytes, (m + amount).toString.getBytes) - self.reply(m + amount) - - case LogSize => - self.reply(txnLog.length.asInstanceOf[AnyRef]) - } - - def fail = throw new RuntimeException("Expected exception; to test fault-tolerance") -} - -class RedisPersistentActorSpec extends JUnitSuite { - @Test - def testSuccessfulDebit = { - val bactor = actorOf(new AccountActor) - bactor.start - bactor !! Credit("a-123", 5000) - bactor !! Debit("a-123", 3000) - assertEquals(2000, (bactor !! Balance("a-123")).get) - - bactor !! Credit("a-123", 7000) - assertEquals(9000, (bactor !! Balance("a-123")).get) - - bactor !! Debit("a-123", 8000) - assertEquals(1000, (bactor !! Balance("a-123")).get) - - val c = (bactor !! LogSize).as[Int].get - assertTrue(7 == c) - } - - @Test - def testUnsuccessfulDebit = { - val bactor = actorOf[AccountActor] - bactor.start - bactor !! Credit("a-123", 5000) - assertEquals(5000, (bactor !! Balance("a-123")).get) - - try { - bactor !! Debit("a-123", 7000) - fail("should throw exception") - } catch { case e: RuntimeException => {}} - - assertEquals(5000, (bactor !! Balance("a-123")).get) - - // should not count the failed one - val c = (bactor !! LogSize).as[Int].get - assertTrue(3 == c) - } - - @Test - def testUnsuccessfulMultiDebit = { - val bactor = actorOf[AccountActor] - bactor.start - bactor !! Credit("a-123", 5000) - - assertEquals(5000, (bactor !! (Balance("a-123"), 5000)).get) - - try { - bactor !! MultiDebit("a-123", List(500, 2000, 1000, 3000)) - fail("should throw exception") - } catch { case e: RuntimeException => {}} - - assertEquals(5000, (bactor !! (Balance("a-123"), 5000)).get) - - // should not count the failed one - val c = (bactor !! LogSize).as[Int].get - assertTrue(3 == c) - } -} diff --git a/akka-persistence/akka-persistence-redis/src/test/scala/RedisPersistentQSpec.scala b/akka-persistence/akka-persistence-redis/src/test/scala/RedisPersistentQSpec.scala deleted file mode 100644 index 94c001f8e8..0000000000 --- a/akka-persistence/akka-persistence-redis/src/test/scala/RedisPersistentQSpec.scala +++ /dev/null @@ -1,144 +0,0 @@ -package akka.persistence.redis - -import org.junit.{Test, Before} -import org.junit.Assert._ - -import akka.actor.{Actor, ActorRef} -import Actor._ -import akka.stm._ - -/** - * A persistent actor based on Redis queue storage. - *

    - * Needs a running Redis server. - * @author Debasish Ghosh - */ - -case class NQ(accountNo: String) -case object DQ -case class MNDQ(accountNos: List[String], noOfDQs: Int) -case object SZ - -class QueueActor extends Actor { - private val accounts = RedisStorage.newQueue - - def receive = { case message => atomic { atomicReceive(message) } } - - def atomicReceive: Receive = { - // enqueue - case NQ(accountNo) => - accounts.enqueue(accountNo.getBytes) - self.reply(true) - - // dequeue - case DQ => - val d = new String(accounts.dequeue) - self.reply(d) - - // multiple NQ and DQ - case MNDQ(enqs, no) => - accounts.enqueue(enqs.map(_.getBytes): _*) - try { - (1 to no).foreach(e => accounts.dequeue) - } catch { - case e: Exception => fail - } - self.reply(true) - - // size - case SZ => - self.reply(accounts.size) - } - - def fail = throw new RuntimeException("Expected exception; to test fault-tolerance") -} - -import org.scalatest.junit.JUnitSuite -class RedisPersistentQSpec extends JUnitSuite { - @Test - def testSuccessfulNQ = { - val qa = actorOf(new QueueActor) - qa.start - qa !! NQ("a-123") - qa !! NQ("a-124") - qa !! NQ("a-125") - val t = (qa !! SZ).as[Int].get - assertTrue(3 == t) - } - - @Test - def testSuccessfulDQ = { - val qa = actorOf[QueueActor] - qa.start - qa !! NQ("a-123") - qa !! NQ("a-124") - qa !! NQ("a-125") - val s = (qa !! SZ).as[Int].get - assertTrue(3 == s) - assertEquals("a-123", (qa !! DQ).get) - assertEquals("a-124", (qa !! DQ).get) - assertEquals("a-125", (qa !! DQ).get) - val t = (qa !! SZ).as[Int].get - assertTrue(0 == t) - } - - @Test - def testSuccessfulMNDQ = { - val qa = actorOf[QueueActor] - qa.start - - qa !! NQ("a-123") - qa !! NQ("a-124") - qa !! NQ("a-125") - val t = (qa !! SZ).as[Int].get - assertTrue(3 == t) - assertEquals("a-123", (qa !! DQ).get) - val s = (qa !! SZ).as[Int].get - assertTrue(2 == s) - qa !! MNDQ(List("a-126", "a-127"), 2) - val u = (qa !! SZ).as[Int].get - assertTrue(2 == u) - } - - @Test - def testMixedMNDQ = { - val qa = actorOf[QueueActor] - qa.start - - // 3 enqueues - qa !! NQ("a-123") - qa !! NQ("a-124") - qa !! NQ("a-125") - - val t = (qa !! SZ).as[Int].get - assertTrue(3 == t) - - // dequeue 1 - assertEquals("a-123", (qa !! DQ).get) - - // size == 2 - val s = (qa !! SZ).as[Int].get - assertTrue(2 == s) - - // enqueue 2, dequeue 2 => size == 2 - qa !! MNDQ(List("a-126", "a-127"), 2) - val u = (qa !! SZ).as[Int].get - assertTrue(2 == u) - - // enqueue 2 => size == 4 - qa !! NQ("a-128") - qa !! NQ("a-129") - val v = (qa !! SZ).as[Int].get - assertTrue(4 == v) - - // enqueue 1 => size 5 - // dequeue 6 => fail transaction - // size should remain 4 - try { - qa !! MNDQ(List("a-130"), 6) - } catch { case e: Exception => {} } - - val w = (qa !! SZ).as[Int].get - assertTrue(4 == w) - } -} diff --git a/akka-persistence/akka-persistence-redis/src/test/scala/RedisPersistentSortedSetSpec.scala b/akka-persistence/akka-persistence-redis/src/test/scala/RedisPersistentSortedSetSpec.scala deleted file mode 100644 index 6215f00911..0000000000 --- a/akka-persistence/akka-persistence-redis/src/test/scala/RedisPersistentSortedSetSpec.scala +++ /dev/null @@ -1,271 +0,0 @@ -package akka.persistence.redis - -import org.scalatest.Spec -import org.scalatest.Assertions -import org.scalatest.matchers.ShouldMatchers -import org.scalatest.BeforeAndAfterAll -import org.scalatest.junit.JUnitRunner -import org.junit.runner.RunWith - -import akka.actor.{Actor, ActorRef} -import Actor._ -import akka.stm._ - -/** - * A persistent actor based on Redis sortedset storage. - *

    - * Needs a running Redis server. - * @author Debasish Ghosh - */ - -trait ZScorable { - def zscore: Float -} - -case class Hacker(name: String, birth: String) extends ZScorable { - def zscore = birth.toFloat -} - -class SetThresholdViolationException extends RuntimeException - -// add hacker to the set -case class ADD(h: Hacker) - -// remove hacker from set -case class REMOVE(h: Hacker) - -// size of the set -case object SIZE - -// zscore of the hacker -case class SCORE(h: Hacker) - -// zrange -case class RANGE(start: Int, end: Int) - -// add and remove subject to the condition that there will be at least 3 hackers -case class MULTI(add: List[Hacker], rem: List[Hacker]) - -case class MULTIRANGE(add: List[Hacker]) - -class SortedSetActor extends Actor { - self.timeout = 100000 - private val hackers = RedisStorage.newSortedSet - - def receive = { case message => atomic { atomicReceive(message) } } - - def atomicReceive: Receive = { - case ADD(h) => - hackers.+(h.name.getBytes, h.zscore) - self.reply(true) - - case REMOVE(h) => - hackers.-(h.name.getBytes) - self.reply(true) - - case SIZE => - self.reply(hackers.size) - - case SCORE(h) => - self.reply(hackers.zscore(h.name.getBytes)) - - case RANGE(s, e) => - self.reply(hackers.zrange(s, e)) - - case MULTI(a, r) => - a.foreach{ h: Hacker => - hackers.+(h.name.getBytes, h.zscore) - } - try { - r.foreach { h => - if (hackers.size <= 3) - throw new SetThresholdViolationException - hackers.-(h.name.getBytes) - } - } catch { - case e: Exception => fail - } - self.reply((a.size, r.size)) - - case MULTIRANGE(hs) => - hs.foreach{ h: Hacker => - hackers.+(h.name.getBytes, h.zscore) - } - self.reply(hackers.zrange(0, -1)) - } - - def fail = throw new RuntimeException("Expected exception; to test fault-tolerance") -} - -import RedisStorageBackend._ - -@RunWith(classOf[JUnitRunner]) -class RedisPersistentSortedSetSpec extends - Spec with - ShouldMatchers with - BeforeAndAfterAll { - - override def beforeAll { - flushDB - println("** destroyed database") - } - - override def afterAll { - flushDB - println("** destroyed database") - } - - val h1 = Hacker("Alan kay", "1940") - val h2 = Hacker("Richard Stallman", "1953") - val h3 = Hacker("Yukihiro Matsumoto", "1965") - val h4 = Hacker("Claude Shannon", "1916") - val h5 = Hacker("Linus Torvalds", "1969") - val h6 = Hacker("Alan Turing", "1912") - - describe("Add and report cardinality of the set") { - val qa = actorOf[SortedSetActor] - qa.start - - it("should enter 6 hackers") { - qa !! ADD(h1) - qa !! ADD(h2) - qa !! ADD(h3) - qa !! ADD(h4) - qa !! ADD(h5) - qa !! ADD(h6) - (qa !! SIZE).get.asInstanceOf[Int] should equal(6) - } - - it("should fetch correct scores for hackers") { - (qa !! SCORE(h1)).get.asInstanceOf[Float] should equal(1940.0f) - (qa !! SCORE(h5)).get.asInstanceOf[Float] should equal(1969.0f) - (qa !! SCORE(h6)).get.asInstanceOf[Float] should equal(1912.0f) - } - - it("should fetch proper range") { - (qa !! RANGE(0, 4)).get.asInstanceOf[List[_]].size should equal(5) - (qa !! RANGE(0, 6)).get.asInstanceOf[List[_]].size should equal(6) - } - - it("should remove and throw exception for removing non-existent hackers") { - qa !! REMOVE(h2) - (qa !! SIZE).get.asInstanceOf[Int] should equal(5) - qa !! REMOVE(h3) - (qa !! SIZE).get.asInstanceOf[Int] should equal(4) - val h7 = Hacker("Paul Snively", "1952") - try { - qa !! REMOVE(h7) - } - catch { - case e: NoSuchElementException => - e.getMessage should endWith("not present") - } - } - - it("should change score for entering the same hacker name with diff score") { - (qa !! SIZE).get.asInstanceOf[Int] should equal(4) - - // same name as h6 - val h7 = Hacker("Alan Turing", "1992") - qa !! ADD(h7) - - // size remains same - (qa !! SIZE).get.asInstanceOf[Int] should equal(4) - - // score updated - (qa !! SCORE(h7)).get.asInstanceOf[Float] should equal(1992.0f) - } - } - - describe("Transaction semantics") { - it("should rollback on exception") { - val qa = actorOf[SortedSetActor] - qa.start - - (qa !! SIZE).get.asInstanceOf[Int] should equal(0) - val add = List(h1, h2, h3, h4) - val rem = List(h2) - (qa !! MULTI(add, rem)).get.asInstanceOf[Tuple2[Int, Int]] should equal((4,1)) - (qa !! SIZE).get.asInstanceOf[Int] should equal(3) - // size == 3 - - // add 2 more - val add1 = List(h5, h6) - - // remove 3 - val rem1 = List(h1, h3, h4, h5) - try { - qa !! MULTI(add1, rem1) - } catch { case e: RuntimeException => {} } - (qa !! SIZE).get.asInstanceOf[Int] should equal(3) - } - } - - describe("zrange") { - it ("should report proper range") { - val qa = actorOf[SortedSetActor] - qa.start - qa !! ADD(h1) - qa !! ADD(h2) - qa !! ADD(h3) - qa !! ADD(h4) - qa !! ADD(h5) - qa !! ADD(h6) - (qa !! SIZE).get.asInstanceOf[Int] should equal(6) - val l = (qa !! RANGE(0, 6)).get.asInstanceOf[List[(Array[Byte], Float)]] - l.map { case (e, s) => (new String(e), s) }.head should equal(("Alan Turing", 1912.0f)) - val h7 = Hacker("Alan Turing", "1992") - qa !! ADD(h7) - (qa !! SIZE).get.asInstanceOf[Int] should equal(6) - val m = (qa !! RANGE(0, 6)).get.asInstanceOf[List[(Array[Byte], Float)]] - m.map { case (e, s) => (new String(e), s) }.head should equal(("Claude Shannon", 1916.0f)) - } - - it ("should report proper rge") { - val qa = actorOf[SortedSetActor] - qa.start - qa !! ADD(h1) - qa !! ADD(h2) - qa !! ADD(h3) - qa !! ADD(h4) - qa !! ADD(h5) - qa !! ADD(h6) - (qa !! SIZE).get.asInstanceOf[Int] should equal(6) - (qa !! RANGE(0, 5)).get.asInstanceOf[List[_]].size should equal(6) - (qa !! RANGE(0, 6)).get.asInstanceOf[List[_]].size should equal(6) - (qa !! RANGE(0, 3)).get.asInstanceOf[List[_]].size should equal(4) - (qa !! RANGE(0, 1)).get.asInstanceOf[List[_]].size should equal(2) - (qa !! RANGE(0, 0)).get.asInstanceOf[List[_]].size should equal(1) - (qa !! RANGE(3, 1)).get.asInstanceOf[List[_]].size should equal(0) - (qa !! RANGE(0, -1)).get.asInstanceOf[List[_]].size should equal(6) - (qa !! RANGE(0, -2)).get.asInstanceOf[List[_]].size should equal(5) - (qa !! RANGE(0, -4)).get.asInstanceOf[List[_]].size should equal(3) - (qa !! RANGE(-4, -1)).get.asInstanceOf[List[_]].size should equal(4) - } - } - - describe("zrange with equal values and equal score") { - it ("should report proper range") { - val qa = actorOf[SortedSetActor] - qa.start - - (qa !! SIZE).get.asInstanceOf[Int] should equal(0) - val add = List(h1, h2, h3, h4, h5, h6) - val rem = List(h2) - (qa !! MULTI(add, rem)).get.asInstanceOf[Tuple2[Int, Int]] should equal((6,1)) - (qa !! SIZE).get.asInstanceOf[Int] should equal(5) - - // has equal score as h6 - val h7 = Hacker("Debasish Ghosh", "1912") - - // has equal value as h6 - val h8 = Hacker("Alan Turing", "1992") - - val ret = (qa !! MULTIRANGE(List(h7, h8))).get.asInstanceOf[List[(Array[Byte], Float)]] - ret.size should equal(6) - val m = collection.immutable.Map() ++ ret.map(e => (new String(e._1), e._2)) - m("Debasish Ghosh") should equal(1912f) - m("Alan Turing") should equal(1992f) - } - } -} diff --git a/akka-persistence/akka-persistence-redis/src/test/scala/RedisStorageBackendSpec.scala b/akka-persistence/akka-persistence-redis/src/test/scala/RedisStorageBackendSpec.scala deleted file mode 100644 index 699c4ea60e..0000000000 --- a/akka-persistence/akka-persistence-redis/src/test/scala/RedisStorageBackendSpec.scala +++ /dev/null @@ -1,246 +0,0 @@ -package akka.persistence.redis - -import org.scalatest.Spec -import org.scalatest.matchers.ShouldMatchers -import org.scalatest.BeforeAndAfterAll -import org.scalatest.junit.JUnitRunner -import org.junit.runner.RunWith - -import akka.serialization.Serializable -import akka.serialization.Serializer._ - -import sbinary._ -import sbinary.Operations._ -import sbinary.DefaultProtocol._ -import java.util.{Calendar, Date} - -import RedisStorageBackend._ - -@RunWith(classOf[JUnitRunner]) -class RedisStorageBackendSpec extends - Spec with - ShouldMatchers with - BeforeAndAfterAll { - - override def beforeAll { - flushDB - println("** destroyed database") - } - - override def afterAll { - flushDB - println("** destroyed database") - } - - describe("Store and query in maps") { - it("should enter 4 entries in redis for transaction T-1") { - insertMapStorageEntryFor("T-1", "debasish.company".getBytes, "anshinsoft".getBytes) - insertMapStorageEntryFor("T-1", "debasish.language".getBytes, "java".getBytes) - insertMapStorageEntryFor("T-1", "debasish.age".getBytes, "44".getBytes) - insertMapStorageEntryFor("T-1", "debasish.spouse".getBytes, "paramita".getBytes) - - getMapStorageSizeFor("T-1") should equal(4) - new String(getMapStorageEntryFor( - "T-1", "debasish.language".getBytes).get) should equal("java") - } - - it("should enter key/values for another transaction T-2") { - insertMapStorageEntryFor("T-2", "debasish.age".getBytes, "49".getBytes) - insertMapStorageEntryFor("T-2", "debasish.spouse".getBytes, "paramita".getBytes) - getMapStorageSizeFor("T-1") should equal(4) - getMapStorageSizeFor("T-2") should equal(2) - } - - it("should remove map storage for T-1 and T2") { - removeMapStorageFor("T-1") - removeMapStorageFor("T-2") - } - } - - describe("Store and query long value in maps") { - it("should enter 4 entries in redis for transaction T-1") { - val d = Calendar.getInstance.getTime.getTime - insertMapStorageEntryFor("T-11", "debasish".getBytes, - toByteArray[Long](d)) - - getMapStorageSizeFor("T-11") should equal(1) - fromByteArray[Long](getMapStorageEntryFor("T-11", "debasish".getBytes).get) should equal(d) - } - - it("should remove map storage for T-1 and T2") { - removeMapStorageFor("T-11") - } - } - - describe("Range query in maps") { - it("should enter 7 entries in redis for transaction T-5") { - insertMapStorageEntryFor("T-5", "trade.refno".getBytes, "R-123".getBytes) - insertMapStorageEntryFor("T-5", "trade.instrument".getBytes, "IBM".getBytes) - insertMapStorageEntryFor("T-5", "trade.type".getBytes, "BUY".getBytes) - insertMapStorageEntryFor("T-5", "trade.account".getBytes, "A-123".getBytes) - insertMapStorageEntryFor("T-5", "trade.amount".getBytes, "1000000".getBytes) - insertMapStorageEntryFor("T-5", "trade.quantity".getBytes, "1000".getBytes) - insertMapStorageEntryFor("T-5", "trade.broker".getBytes, "Nomura".getBytes) - getMapStorageSizeFor("T-5") should equal(7) - - getMapStorageRangeFor("T-5", - Some("trade.account".getBytes), - None, 3).map(e => (new String(e._1), new String(e._2))).size should equal(3) - - getMapStorageRangeFor("T-5", - Some("trade.account".getBytes), - Some("trade.type".getBytes), 3).map(e => (new String(e._1), new String(e._2))).size should equal(6) - - getMapStorageRangeFor("T-5", - Some("trade.account".getBytes), - Some("trade.type".getBytes), 0).map(e => (new String(e._1), new String(e._2))).size should equal(6) - - getMapStorageRangeFor("T-5", - Some("trade.account".getBytes), - None, 0).map(e => (new String(e._1), new String(e._2))).size should equal(7) - } - it("should remove map storage for T5") { - removeMapStorageFor("T-5") - } - } - - describe("Store and query objects in maps") { - import NameSerialization._ - it("should write a Name object and fetch it properly") { - val dtb = Calendar.getInstance.getTime - val n = Name(100, "debasish ghosh", "kolkata", dtb, Some(dtb)) - - insertMapStorageEntryFor("T-31", "debasish".getBytes, toByteArray[Name](n)) - getMapStorageSizeFor("T-31") should equal(1) - fromByteArray[Name](getMapStorageEntryFor("T-31", "debasish".getBytes).get) should equal(n) - } - it("should remove map storage for T31") { - removeMapStorageFor("T-31") - } - } - - describe("Store and query in vectors") { - it("should write 4 entries in a vector for transaction T-3") { - insertVectorStorageEntryFor("T-3", "debasish".getBytes) - insertVectorStorageEntryFor("T-3", "maulindu".getBytes) - insertVectorStorageEntryFor("T-3", "1200".getBytes) - - val dt = Calendar.getInstance.getTime.getTime - insertVectorStorageEntryFor("T-3", toByteArray[Long](dt)) - getVectorStorageSizeFor("T-3") should equal(4) - fromByteArray[Long](getVectorStorageEntryFor("T-3", 0)) should equal(dt) - getVectorStorageSizeFor("T-3") should equal(4) - } - } - - describe("Store and query objects in vectors") { - import NameSerialization._ - it("should write a Name object and fetch it properly") { - val dtb = Calendar.getInstance.getTime - val n = Name(100, "debasish ghosh", "kolkata", dtb, Some(dtb)) - - insertVectorStorageEntryFor("T-31", toByteArray[Name](n)) - getVectorStorageSizeFor("T-31") should equal(1) - fromByteArray[Name](getVectorStorageEntryFor("T-31", 0)) should equal(n) - } - } - - describe("Store and query in ref") { - import NameSerialization._ - it("should write 4 entries in 4 refs for transaction T-4") { - insertRefStorageFor("T-4", "debasish".getBytes) - insertRefStorageFor("T-4", "maulindu".getBytes) - - insertRefStorageFor("T-4", "1200".getBytes) - new String(getRefStorageFor("T-4").get) should equal("1200") - getRefStorageFor("T-44") should equal(None) - } - it("should write a Name object and fetch it properly") { - val dtb = Calendar.getInstance.getTime - val n = Name(100, "debasish ghosh", "kolkata", dtb, Some(dtb)) - insertRefStorageFor("T-4", toByteArray[Name](n)) - fromByteArray[Name](getRefStorageFor("T-4").get) should equal(n) - } - } - - describe("store and query in queue") { - it("should give proper queue semantics") { - enqueue("T-5", "alan kay".getBytes) - enqueue("T-5", "alan turing".getBytes) - enqueue("T-5", "richard stallman".getBytes) - enqueue("T-5", "yukihiro matsumoto".getBytes) - enqueue("T-5", "claude shannon".getBytes) - enqueue("T-5", "linus torvalds".getBytes) - - RedisStorageBackend.size("T-5") should equal(6) - - new String(dequeue("T-5").get) should equal("alan kay") - new String(dequeue("T-5").get) should equal("alan turing") - - RedisStorageBackend.size("T-5") should equal(4) - - val l = peek("T-5", 0, 3) - l.size should equal(3) - new String(l(0)) should equal("richard stallman") - new String(l(1)) should equal("yukihiro matsumoto") - new String(l(2)) should equal("claude shannon") - } - it("should write a Name object and fetch it properly") { - import NameSerialization._ - val dtb = Calendar.getInstance.getTime - val n = Name(100, "debasish ghosh", "kolkata", dtb, Some(dtb)) - enqueue("T-5-1", toByteArray[Name](n)) - fromByteArray[Name](peek("T-5-1", 0, 1).head) should equal(n) - fromByteArray[Name](dequeue("T-5-1").get) should equal(n) - } - } - - describe("store and query in sorted set") { - it("should give proper sorted set semantics") { - zadd("hackers", "1965", "yukihiro matsumoto".getBytes) - zadd("hackers", "1953", "richard stallman".getBytes) - zadd("hackers", "1916", "claude shannon".getBytes) - zadd("hackers", "1969", "linus torvalds".getBytes) - zadd("hackers", "1940", "alan kay".getBytes) - zadd("hackers", "1912", "alan turing".getBytes) - - zcard("hackers") should equal(6) - - zscore("hackers", "alan turing".getBytes).get should equal(1912.0f) - zscore("hackers", "richard stallman".getBytes).get should equal(1953.0f) - zscore("hackers", "claude shannon".getBytes).get should equal(1916.0f) - zscore("hackers", "linus torvalds".getBytes).get should equal(1969.0f) - - val s: List[Array[Byte]] = zrange("hackers", 0, 2) - s.size should equal(3) - s.map(new String(_)) should equal(List("alan turing", "claude shannon", "alan kay")) - - var sorted: List[String] = - List("alan turing", "claude shannon", "alan kay", "richard stallman", "yukihiro matsumoto", "linus torvalds") - - val t: List[Array[Byte]] = zrange("hackers", 0, -1) - t.size should equal(6) - t.map(new String(_)) should equal(sorted) - - val u: List[(Array[Byte], Float)] = zrangeWithScore("hackers", 0, -1) - u.size should equal(6) - u.map{ case (e, s) => new String(e) } should equal(sorted) - } - } -} - -object NameSerialization { - implicit object DateFormat extends Format[Date] { - def reads(in : Input) = - new Date(read[Long](in)) - - def writes(out: Output, value: Date) = - write[Long](out, value.getTime) - } - - case class Name(id: Int, name: String, - address: String, dateOfBirth: Date, dateDied: Option[Date]) - - implicit val NameFormat: Format[Name] = - asProduct5(Name)(Name.unapply(_).get) -} diff --git a/akka-persistence/akka-persistence-redis/src/test/scala/RedisStorageSpec.scala b/akka-persistence/akka-persistence-redis/src/test/scala/RedisStorageSpec.scala deleted file mode 100644 index 6ccce06ab3..0000000000 --- a/akka-persistence/akka-persistence-redis/src/test/scala/RedisStorageSpec.scala +++ /dev/null @@ -1,5 +0,0 @@ -package akka.persistence.redis - -import org.scalatest.junit.JUnitWrapperSuite - -class RedisStorageSpec extends JUnitWrapperSuite("akka.persistence.redis.RedisStorageTests", Thread.currentThread.getContextClassLoader) diff --git a/akka-persistence/akka-persistence-redis/src/test/scala/RedisTicket343Spec.scala b/akka-persistence/akka-persistence-redis/src/test/scala/RedisTicket343Spec.scala deleted file mode 100644 index 3af8c89a39..0000000000 --- a/akka-persistence/akka-persistence-redis/src/test/scala/RedisTicket343Spec.scala +++ /dev/null @@ -1,397 +0,0 @@ -package akka.persistence.redis - -import org.scalatest.Spec -import org.scalatest.matchers.ShouldMatchers -import org.scalatest.{BeforeAndAfterAll, BeforeAndAfterEach} -import org.scalatest.junit.JUnitRunner -import org.junit.runner.RunWith - -import akka.actor.{Actor} -import akka.config.Supervision.{OneForOneStrategy,Permanent} -import Actor._ -import akka.persistence.common.PersistentVector -import akka.stm._ -import akka.util.Logging - -import RedisStorageBackend._ - -case class GET(k: String) -case class SET(k: String, v: String) -case class REM(k: String) -case class CONTAINS(k: String) -case object MAP_SIZE -case class MSET(kvs: List[(String, String)]) -case class REMOVE_AFTER_PUT(kvsToAdd: List[(String, String)], ksToRem: List[String]) -case class CLEAR_AFTER_PUT(kvsToAdd: List[(String, String)]) -case class PUT_WITH_SLICE(kvsToAdd: List[(String, String)], start: String, cnt: Int) -case class PUT_REM_WITH_SLICE(kvsToAdd: List[(String, String)], ksToRem: List[String], start: String, cnt: Int) - -case class VADD(v: String) -case class VUPD(i: Int, v: String) -case class VUPD_AND_ABORT(i: Int, v: String) -case class VGET(i: Int) -case object VSIZE -case object VLAST -case object VFIRST -case class VLAST_AFTER_ADD(vsToAdd: List[String]) -case class VFIRST_AFTER_ADD(vsToAdd: List[String]) -case class VGET_AFTER_VADD(vsToAdd: List[String], isToFetch: List[Int]) -case class VADD_WITH_SLICE(vsToAdd: List[String], start: Int, cnt: Int) - -object Storage { - class RedisSampleMapStorage extends Actor { - self.lifeCycle = Permanent - val FOO_MAP = "akka.sample.map" - - private var fooMap = atomic { RedisStorage.getMap(FOO_MAP) } - - def receive = { - case SET(k, v) => - atomic { - fooMap += (k.getBytes, v.getBytes) - } - self.reply((k, v)) - - case GET(k) => - val v = atomic { - fooMap.get(k.getBytes) - } - self.reply(v.collect {case byte => new String(byte)}.getOrElse(k + " Not found")) - - case REM(k) => - val v = atomic { - fooMap -= k.getBytes - } - self.reply(k) - - case CONTAINS(k) => - val v = atomic { - fooMap contains k.getBytes - } - self.reply(v) - - case MAP_SIZE => - val v = atomic { - fooMap.size - } - self.reply(v) - - case MSET(kvs) => - atomic { - kvs.foreach {kv => - fooMap += (kv._1.getBytes, kv._2.getBytes) - } - } - self.reply(kvs.size) - - case REMOVE_AFTER_PUT(kvs2add, ks2rem) => - val v = - atomic { - kvs2add.foreach {kv => - fooMap += (kv._1.getBytes, kv._2.getBytes) - } - - ks2rem.foreach {k => - fooMap -= k.getBytes - } - fooMap.size - } - self.reply(v) - - case CLEAR_AFTER_PUT(kvs2add) => - atomic { - kvs2add.foreach {kv => - fooMap += (kv._1.getBytes, kv._2.getBytes) - } - fooMap.clear - } - self.reply(true) - - case PUT_WITH_SLICE(kvs2add, from, cnt) => - val v = - atomic { - kvs2add.foreach {kv => - fooMap += (kv._1.getBytes, kv._2.getBytes) - } - fooMap.slice(Some(from.getBytes), cnt) - } - self.reply(v: List[(Array[Byte], Array[Byte])]) - - case PUT_REM_WITH_SLICE(kvs2add, ks2rem, from, cnt) => - val v = - atomic { - kvs2add.foreach {kv => - fooMap += (kv._1.getBytes, kv._2.getBytes) - } - ks2rem.foreach {k => - fooMap -= k.getBytes - } - fooMap.slice(Some(from.getBytes), cnt) - } - self.reply(v: List[(Array[Byte], Array[Byte])]) - } - } - - class RedisSampleVectorStorage extends Actor { - self.lifeCycle = Permanent - val FOO_VECTOR = "akka.sample.vector" - - private var fooVector = atomic { RedisStorage.getVector(FOO_VECTOR) } - - def receive = { - case VADD(v) => - val size = - atomic { - fooVector + v.getBytes - fooVector length - } - self.reply(size) - - case VGET(index) => - val ind = - atomic { - fooVector get index - } - self.reply(ind) - - case VGET_AFTER_VADD(vs, is) => - val els = - atomic { - vs.foreach(fooVector + _.getBytes) - (is.foldRight(List[Array[Byte]]())(fooVector.get(_) :: _)).map(new String(_)) - } - self.reply(els) - - case VUPD_AND_ABORT(index, value) => - val l = - atomic { - fooVector.update(index, value.getBytes) - // force fail - fooVector get 100 - } - self.reply(index) - - case VADD_WITH_SLICE(vs, s, c) => - val l = - atomic { - vs.foreach(fooVector + _.getBytes) - fooVector.slice(Some(s), None, c) - } - self.reply(l.map(new String(_))) - - case VLAST => - val l = atomic { fooVector last } - self.reply(l) - - case VFIRST => - val l = atomic { fooVector first } - self.reply(l) - - case VLAST_AFTER_ADD(vs) => - val l = - atomic { - vs.foreach(fooVector + _.getBytes) - fooVector last - } - self.reply(l) - - case VFIRST_AFTER_ADD(vs) => - val l = - atomic { - vs.foreach(fooVector + _.getBytes) - fooVector first - } - self.reply(l) - } - } -} - -import Storage._ - -@RunWith(classOf[JUnitRunner]) -class RedisTicket343Spec extends - Spec with - ShouldMatchers with - BeforeAndAfterAll with - BeforeAndAfterEach { - - override def beforeAll { - flushDB - println("** destroyed database") - } - - override def afterEach { - flushDB - println("** destroyed database") - } - - describe("Ticket 343 Issue #1") { - it("remove after put should work within the same transaction") { - val proc = actorOf[RedisSampleMapStorage] - proc.start - - (proc !! SET("debasish", "anshinsoft")).getOrElse("Set failed") should equal(("debasish", "anshinsoft")) - (proc !! GET("debasish")).getOrElse("Get failed") should equal("anshinsoft") - (proc !! MAP_SIZE).getOrElse("Size failed") should equal(1) - - (proc !! MSET(List(("dg", "1"), ("mc", "2"), ("nd", "3")))).getOrElse("Mset failed") should equal(3) - - (proc !! GET("dg")).getOrElse("Get failed") should equal("1") - (proc !! GET("mc")).getOrElse("Get failed") should equal("2") - (proc !! GET("nd")).getOrElse("Get failed") should equal("3") - - (proc !! MAP_SIZE).getOrElse("Size failed") should equal(4) - - val add = List(("a", "1"), ("b", "2"), ("c", "3")) - val rem = List("a", "debasish") - (proc !! REMOVE_AFTER_PUT(add, rem)).getOrElse("REMOVE_AFTER_PUT failed") should equal(5) - - (proc !! GET("debasish")).getOrElse("debasish not found") should equal("debasish Not found") - (proc !! GET("a")).getOrElse("a not found") should equal("a Not found") - - (proc !! GET("b")).getOrElse("b not found") should equal("2") - - (proc !! CONTAINS("b")).getOrElse("b not found") should equal(true) - (proc !! CONTAINS("debasish")).getOrElse("debasish not found") should equal(false) - (proc !! MAP_SIZE).getOrElse("Size failed") should equal(5) - proc.stop - } - } - - describe("Ticket 343 Issue #2") { - it("clear after put should work within the same transaction") { - val proc = actorOf[RedisSampleMapStorage] - proc.start - - (proc !! SET("debasish", "anshinsoft")).getOrElse("Set failed") should equal(("debasish", "anshinsoft")) - (proc !! GET("debasish")).getOrElse("Get failed") should equal("anshinsoft") - (proc !! MAP_SIZE).getOrElse("Size failed") should equal(1) - - val add = List(("a", "1"), ("b", "2"), ("c", "3")) - (proc !! CLEAR_AFTER_PUT(add)).getOrElse("CLEAR_AFTER_PUT failed") should equal(true) - - (proc !! MAP_SIZE).getOrElse("Size failed") should equal(0) - proc.stop - } - } - - describe("Ticket 343 Issue #3") { - it("map size should change after the transaction") { - val proc = actorOf[RedisSampleMapStorage] - proc.start - - (proc !! SET("debasish", "anshinsoft")).getOrElse("Set failed") should equal(("debasish", "anshinsoft")) - (proc !! GET("debasish")).getOrElse("Get failed") should equal("anshinsoft") - (proc !! MAP_SIZE).getOrElse("Size failed") should equal(1) - - (proc !! MSET(List(("dg", "1"), ("mc", "2"), ("nd", "3")))).getOrElse("Mset failed") should equal(3) - (proc !! MAP_SIZE).getOrElse("Size failed") should equal(4) - - (proc !! GET("dg")).getOrElse("Get failed") should equal("1") - (proc !! GET("mc")).getOrElse("Get failed") should equal("2") - (proc !! GET("nd")).getOrElse("Get failed") should equal("3") - proc.stop - } - } - - describe("slice test") { - it("should pass") { - val proc = actorOf[RedisSampleMapStorage] - proc.start - - (proc !! SET("debasish", "anshinsoft")).getOrElse("Set failed") should equal(("debasish", "anshinsoft")) - (proc !! GET("debasish")).getOrElse("Get failed") should equal("anshinsoft") - (proc !! MAP_SIZE).getOrElse("Size failed") should equal(1) - - (proc !! MSET(List(("dg", "1"), ("mc", "2"), ("nd", "3")))).getOrElse("Mset failed") should equal(3) - (proc !! MAP_SIZE).getOrElse("Size failed") should equal(4) - - (proc !! PUT_WITH_SLICE(List(("ec", "1"), ("tb", "2"), ("mc", "10")), "dg", 3)).get.asInstanceOf[List[(Array[Byte], Array[Byte])]].map { case (k, v) => (new String(k), new String(v)) } should equal(List(("dg", "1"), ("ec", "1"), ("mc", "10"))) - - (proc !! PUT_REM_WITH_SLICE(List(("fc", "1"), ("gb", "2"), ("xy", "10")), List("tb", "fc"), "dg", 5)).get.asInstanceOf[List[(Array[Byte], Array[Byte])]].map { case (k, v) => (new String(k), new String(v)) } should equal(List(("dg", "1"), ("ec", "1"), ("gb", "2"), ("mc", "10"), ("nd", "3"))) - proc.stop - } - } - - describe("Ticket 343 Issue #4") { - it("vector get should not ignore elements that were in vector before transaction") { - val proc = actorOf[RedisSampleVectorStorage] - proc.start - - // add 4 elements in separate transactions - (proc !! VADD("debasish")).getOrElse("VADD failed") should equal(1) - (proc !! VADD("maulindu")).getOrElse("VADD failed") should equal(2) - (proc !! VADD("ramanendu")).getOrElse("VADD failed") should equal(3) - (proc !! VADD("nilanjan")).getOrElse("VADD failed") should equal(4) - - new String((proc !! VGET(0)).get.asInstanceOf[Array[Byte]] ) should equal("nilanjan") - new String((proc !! VGET(1)).get.asInstanceOf[Array[Byte]] ) should equal("ramanendu") - new String((proc !! VGET(2)).get.asInstanceOf[Array[Byte]] ) should equal("maulindu") - new String((proc !! VGET(3)).get.asInstanceOf[Array[Byte]] ) should equal("debasish") - - // now add 3 more and do gets in the same transaction - (proc !! VGET_AFTER_VADD(List("a", "b", "c"), List(0, 2, 4))).get.asInstanceOf[List[String]] should equal(List("c", "a", "ramanendu")) - proc.stop - } - } - - describe("Ticket 343 Issue #6") { - it("vector update should not ignore transaction") { - val proc = actorOf[RedisSampleVectorStorage] - proc.start - - // add 4 elements in separate transactions - (proc !! VADD("debasish")).getOrElse("VADD failed") should equal(1) - (proc !! VADD("maulindu")).getOrElse("VADD failed") should equal(2) - (proc !! VADD("ramanendu")).getOrElse("VADD failed") should equal(3) - (proc !! VADD("nilanjan")).getOrElse("VADD failed") should equal(4) - - evaluating { - (proc !! VUPD_AND_ABORT(0, "virat")).getOrElse("VUPD_AND_ABORT failed") - } should produce [Exception] - - // update aborts and hence values will remain unchanged - new String((proc !! VGET(0)).get.asInstanceOf[Array[Byte]] ) should equal("nilanjan") - proc.stop - } - } - - describe("Ticket 343 Issue #5") { - it("vector slice() should not ignore elements added in current transaction") { - val proc = actorOf[RedisSampleVectorStorage] - proc.start - - // add 4 elements in separate transactions - (proc !! VADD("debasish")).getOrElse("VADD failed") should equal(1) - (proc !! VADD("maulindu")).getOrElse("VADD failed") should equal(2) - (proc !! VADD("ramanendu")).getOrElse("VADD failed") should equal(3) - (proc !! VADD("nilanjan")).getOrElse("VADD failed") should equal(4) - - // slice with no new elements added in current transaction - (proc !! VADD_WITH_SLICE(List(), 2, 2)).getOrElse("VADD_WITH_SLICE failed") should equal(Vector("maulindu", "debasish")) - - // slice with new elements added in current transaction - (proc !! VADD_WITH_SLICE(List("a", "b", "c", "d"), 2, 4)).getOrElse("VADD_WITH_SLICE failed") should equal(Vector("b", "a", "nilanjan", "ramanendu")) - proc.stop - } - } - - describe("Miscellaneous vector ops") { - it("vector slice() should not ignore elements added in current transaction") { - val proc = actorOf[RedisSampleVectorStorage] - proc.start - - // add 4 elements in separate transactions - (proc !! VADD("debasish")).getOrElse("VADD failed") should equal(1) - (proc !! VADD("maulindu")).getOrElse("VADD failed") should equal(2) - (proc !! VADD("ramanendu")).getOrElse("VADD failed") should equal(3) - (proc !! VADD("nilanjan")).getOrElse("VADD failed") should equal(4) - - new String((proc !! VLAST).getOrElse("VLAST failed").asInstanceOf[Array[Byte]]) should equal("debasish") - new String((proc !! VFIRST).getOrElse("VFIRST failed").asInstanceOf[Array[Byte]]) should equal("nilanjan") - new String((proc !! VLAST_AFTER_ADD(List("kausik", "tarun"))).getOrElse("VLAST_AFTER_ADD failed").asInstanceOf[Array[Byte]]) should equal("debasish") - new String((proc !! VFIRST_AFTER_ADD(List("kausik", "tarun"))).getOrElse("VFIRST_AFTER_ADD failed").asInstanceOf[Array[Byte]]) should equal("tarun") - proc.stop - } - } -} diff --git a/akka-persistence/akka-persistence-redis/src/test/scala/RedisTicket513Spec.scala b/akka-persistence/akka-persistence-redis/src/test/scala/RedisTicket513Spec.scala deleted file mode 100644 index a09fe54b16..0000000000 --- a/akka-persistence/akka-persistence-redis/src/test/scala/RedisTicket513Spec.scala +++ /dev/null @@ -1,70 +0,0 @@ -package akka.persistence.redis - -import org.scalatest.Spec -import org.scalatest.Assertions -import org.scalatest.matchers.ShouldMatchers -import org.scalatest.BeforeAndAfterAll -import org.scalatest.junit.JUnitRunner -import org.junit.runner.RunWith - -import akka.actor.{Actor, ActorRef} -import Actor._ -import akka.stm._ - -/** - * A persistent actor based on Redis sortedset storage. - *

    - * Needs a running Redis server. - * @author Debasish Ghosh - */ - -case class AddEmail(email: String, value: String) -case class GetAll(email: String) - -class MySortedSet extends Actor { - - def receive = { case message => atomic { atomicReceive(message) } } - - def atomicReceive: Receive = { - case AddEmail(userEmail, value) => { - val registryId = "userValues:%s".format(userEmail) - val storageSet = RedisStorage.getSortedSet(registryId) - storageSet.add(value.getBytes, System.currentTimeMillis.toFloat) - self.reply(storageSet.size) - } - case GetAll(userEmail) => { - val registryId = "userValues:%s".format(userEmail) - val storageSet = RedisStorage.getSortedSet(registryId) - self.reply(storageSet.zrange(0, -1)) - } - } -} - -import RedisStorageBackend._ - -@RunWith(classOf[JUnitRunner]) -class RedisTicket513Spec extends - Spec with - ShouldMatchers with - BeforeAndAfterAll { - - override def beforeAll { - flushDB - println("** destroyed database") - } - - override def afterAll { - flushDB - println("** destroyed database") - } - - describe("insert into user specific set") { - val a = actorOf[MySortedSet] - a.start - it("should work with same score value") { - (a !! AddEmail("test.user@gmail.com", "foo")).get should equal(1) - (a !! AddEmail("test.user@gmail.com", "bar")).get should equal(2) - (a !! GetAll("test.user@gmail.com")).get.asInstanceOf[List[_]].size should equal(2) - } - } -} diff --git a/akka-persistence/akka-persistence-riak/src/main/scala/akka/RiakStorage.scala b/akka-persistence/akka-persistence-riak/src/main/scala/akka/RiakStorage.scala deleted file mode 100644 index a155144b30..0000000000 --- a/akka-persistence/akka-persistence-riak/src/main/scala/akka/RiakStorage.scala +++ /dev/null @@ -1,51 +0,0 @@ -/** - * Copyright (C) 2009-2010 Scalable Solutions AB - */ - -package akka.persistence.riak - -import akka.actor.{newUuid} -import akka.stm._ -import akka.persistence.common._ - - -object RiakStorage extends Storage { - - type ElementType = Array[Byte] - def newMap: PersistentMap[ElementType, ElementType] = newMap(newUuid.toString) - def newVector: PersistentVector[ElementType] = newVector(newUuid.toString) - def newRef: PersistentRef[ElementType] = newRef(newUuid.toString) - override def newQueue: PersistentQueue[ElementType] = newQueue(newUuid.toString) - - def getMap(id: String): PersistentMap[ElementType, ElementType] = newMap(id) - def getVector(id: String): PersistentVector[ElementType] = newVector(id) - def getRef(id: String): PersistentRef[ElementType] = newRef(id) - override def getQueue(id: String): PersistentQueue[ElementType] = newQueue(id) - - def newMap(id: String): PersistentMap[ElementType, ElementType] = new RiakPersistentMap(id) - def newVector(id: String): PersistentVector[ElementType] = new RiakPersistentVector(id) - def newRef(id: String): PersistentRef[ElementType] = new RiakPersistentRef(id) - override def newQueue(id:String): PersistentQueue[ElementType] = new RiakPersistentQueue(id) -} - - -class RiakPersistentMap(id: String) extends PersistentMapBinary { - val uuid = id - val storage = RiakStorageBackend -} - - -class RiakPersistentVector(id: String) extends PersistentVector[Array[Byte]] { - val uuid = id - val storage = RiakStorageBackend -} - -class RiakPersistentRef(id: String) extends PersistentRef[Array[Byte]] { - val uuid = id - val storage = RiakStorageBackend -} - -class RiakPersistentQueue(id: String) extends PersistentQueue[Array[Byte]] { - val uuid = id - val storage = RiakStorageBackend -} diff --git a/akka-persistence/akka-persistence-riak/src/main/scala/akka/RiakStorageBackend.scala b/akka-persistence/akka-persistence-riak/src/main/scala/akka/RiakStorageBackend.scala deleted file mode 100644 index e18890e93e..0000000000 --- a/akka-persistence/akka-persistence-riak/src/main/scala/akka/RiakStorageBackend.scala +++ /dev/null @@ -1,123 +0,0 @@ -/** - * Copyright (C) 2009-2010 Scalable Solutions AB - */ - -package akka.persistence.riak - -import akka.persistence.common._ -import akka.config.Config.config - -import java.lang.String -import collection.JavaConversions -import collection.Map -import java.util.{Map => JMap} -import akka.persistence.common.PersistentMapBinary.COrdering._ -import collection.immutable._ -import com.google.protobuf.ByteString -import com.trifork.riak.{RequestMeta, RiakObject, RiakClient} - - -private[akka] object RiakStorageBackend extends CommonStorageBackend { - val refBucket = config.getString("akka.persistence.riak.bucket.ref", "Refs") - val mapBucket = config.getString("akka.persistence.riak.bucket.map", "Maps") - val vectorBucket = config.getString("akka.persistence.riak.bucket.vector", "Vectors") - val queueBucket = config.getString("akka.persistence.riak.bucket.queue", "Queues") - val clientHost = config.getString("akka.persistence.riak.client.host", "localhost") - val clientPort = config.getInt("akka.persistence.riak.client.port", 8087) - val riakClient: RiakClient = new RiakClient(clientHost, clientPort); - - import CommonStorageBackendAccess._ - import KVStorageBackend._ - import RiakAccess._ - - - val refs = new RiakAccess(refBucket) - val maps = new RiakAccess(mapBucket) - val vectors = new RiakAccess(vectorBucket) - val queues = new RiakAccess(queueBucket) - - def refAccess = refs - - def mapAccess = maps - - def vectorAccess = vectors - - def queueAccess = queues - - object RiakAccess { - implicit def byteArrayToByteString(ary: Array[Byte]): ByteString = { - ByteString.copyFrom(ary) - } - - implicit def byteStringToByteArray(bs: ByteString): Array[Byte] = { - bs.toByteArray - } - - implicit def stringToByteString(bucket: String): ByteString = { - ByteString.copyFromUtf8(bucket) - } - - } - - - class RiakAccess(val bucket: String) extends KVStorageBackendAccess { - //http://www.mail-archive.com/riak-users@lists.basho.com/msg01013.html - val quorum: Int = 0xfffffffd - val one: Int = 0xfffffffe - val all: Int = 0xfffffffc - val default: Int = 0xfffffffb - - def put(key: Array[Byte], value: Array[Byte]) = { - val objs: Array[RiakObject] = riakClient.fetch(bucket, key, quorum) - objs.size match { - case 0 => riakClient.store(new RiakObject(bucket, key, value), new RequestMeta().w(quorum).dw(quorum)) - case _ => riakClient.store(new RiakObject(objs(0).getVclock, bucket, key, value), new RequestMeta().w(quorum).dw(quorum)) - } - } - - def get(key: Array[Byte]): Array[Byte] = { - val objs = riakClient.fetch(bucket, key, quorum) - objs.size match { - case 0 => null; - case _ => objs(0).getValue.isEmpty match { - case true => null - case false => objs(0).getValue - } - } - } - - def get(key: Array[Byte], default: Array[Byte]): Array[Byte] = { - Option(get(key)) match { - case Some(value) => value - case None => default - } - } - - def getAll(keys: Iterable[Array[Byte]]): Map[Array[Byte], Array[Byte]] = { - var result = new HashMap[Array[Byte], Array[Byte]] - keys.foreach{ - key => - val value = get(key) - Option(value) match { - case Some(value) => result += key -> value - case None => () - } - } - result - } - - def delete(key: Array[Byte]) = { - riakClient.delete(bucket, key, quorum) - } - - def drop() { - val keys = riakClient.listKeys(bucket) - JavaConversions.asScalaIterable(keys) foreach { - delete(_) - } - keys.close - } - } - - -} diff --git a/akka-persistence/akka-persistence-riak/src/test/scala/RiakStorageBackendCompatibilityTest.scala b/akka-persistence/akka-persistence-riak/src/test/scala/RiakStorageBackendCompatibilityTest.scala deleted file mode 100644 index 66cf126bb6..0000000000 --- a/akka-persistence/akka-persistence-riak/src/test/scala/RiakStorageBackendCompatibilityTest.scala +++ /dev/null @@ -1,49 +0,0 @@ -package akka.persistence.riak - - -import org.junit.runner.RunWith -import org.scalatest.junit.JUnitRunner -import akka.persistence.common.{QueueStorageBackendTest, VectorStorageBackendTest, MapStorageBackendTest, RefStorageBackendTest} - -@RunWith(classOf[JUnitRunner]) -class RiakRefStorageBackendTestIntegration extends RefStorageBackendTest { - def dropRefs = { - RiakStorageBackend.refAccess.drop - } - - - def storage = RiakStorageBackend -} - -@RunWith(classOf[JUnitRunner]) -class RiakMapStorageBackendTestIntegration extends MapStorageBackendTest { - def dropMaps = { - RiakStorageBackend.mapAccess.drop - } - - - def storage = RiakStorageBackend -} - -@RunWith(classOf[JUnitRunner]) -class RiakVectorStorageBackendTestIntegration extends VectorStorageBackendTest { - def dropVectors = { - RiakStorageBackend.vectorAccess.drop - } - - - def storage = RiakStorageBackend -} - - -@RunWith(classOf[JUnitRunner]) -class RiakQueueStorageBackendTestIntegration extends QueueStorageBackendTest { - def dropQueues = { - RiakStorageBackend.queueAccess.drop - } - - - def storage = RiakStorageBackend -} - - diff --git a/akka-persistence/akka-persistence-riak/src/test/scala/RiakTicket343TestIntegration.scala b/akka-persistence/akka-persistence-riak/src/test/scala/RiakTicket343TestIntegration.scala deleted file mode 100644 index ce9ead7f14..0000000000 --- a/akka-persistence/akka-persistence-riak/src/test/scala/RiakTicket343TestIntegration.scala +++ /dev/null @@ -1,23 +0,0 @@ -/** - * Copyright (C) 2009-2010 Scalable Solutions AB - */ - -package akka.persistence.riak - - -import org.junit.runner.RunWith -import org.scalatest.junit.JUnitRunner -import akka.persistence.common._ - -@RunWith(classOf[JUnitRunner]) -class RiakTicket343TestIntegration extends Ticket343Test { - def dropMapsAndVectors: Unit = { - RiakStorageBackend.vectorAccess.drop - RiakStorageBackend.mapAccess.drop - } - - def getVector: (String) => PersistentVector[Array[Byte]] = RiakStorage.getVector - - def getMap: (String) => PersistentMap[Array[Byte], Array[Byte]] = RiakStorage.getMap - -} diff --git a/akka-persistence/akka-persistence-simpledb/src/main/scala/akka/SimpledbStorage.scala b/akka-persistence/akka-persistence-simpledb/src/main/scala/akka/SimpledbStorage.scala deleted file mode 100644 index c8ff43d633..0000000000 --- a/akka-persistence/akka-persistence-simpledb/src/main/scala/akka/SimpledbStorage.scala +++ /dev/null @@ -1,51 +0,0 @@ -/** - * Copyright (C) 2009-2010 Scalable Solutions AB - */ - -package akka.persistence.simpledb - -import akka.actor.{newUuid} -import akka.stm._ -import akka.persistence.common._ - - -object SimpledbStorage extends Storage { - - type ElementType = Array[Byte] - def newMap: PersistentMap[ElementType, ElementType] = newMap(newUuid.toString) - def newVector: PersistentVector[ElementType] = newVector(newUuid.toString) - def newRef: PersistentRef[ElementType] = newRef(newUuid.toString) - override def newQueue: PersistentQueue[ElementType] = newQueue(newUuid.toString) - - def getMap(id: String): PersistentMap[ElementType, ElementType] = newMap(id) - def getVector(id: String): PersistentVector[ElementType] = newVector(id) - def getRef(id: String): PersistentRef[ElementType] = newRef(id) - override def getQueue(id: String): PersistentQueue[ElementType] = newQueue(id) - - def newMap(id: String): PersistentMap[ElementType, ElementType] = new SimpledbPersistentMap(id) - def newVector(id: String): PersistentVector[ElementType] = new SimpledbPersistentVector(id) - def newRef(id: String): PersistentRef[ElementType] = new SimpledbPersistentRef(id) - override def newQueue(id:String): PersistentQueue[ElementType] = new SimpledbPersistentQueue(id) -} - - -class SimpledbPersistentMap(id: String) extends PersistentMapBinary { - val uuid = id - val storage = SimpledbStorageBackend -} - - -class SimpledbPersistentVector(id: String) extends PersistentVector[Array[Byte]] { - val uuid = id - val storage = SimpledbStorageBackend -} - -class SimpledbPersistentRef(id: String) extends PersistentRef[Array[Byte]] { - val uuid = id - val storage = SimpledbStorageBackend -} - -class SimpledbPersistentQueue(id: String) extends PersistentQueue[Array[Byte]] { - val uuid = id - val storage = SimpledbStorageBackend -} diff --git a/akka-persistence/akka-persistence-simpledb/src/main/scala/akka/SimpledbStorageBackend.scala b/akka-persistence/akka-persistence-simpledb/src/main/scala/akka/SimpledbStorageBackend.scala deleted file mode 100644 index 3addda797f..0000000000 --- a/akka-persistence/akka-persistence-simpledb/src/main/scala/akka/SimpledbStorageBackend.scala +++ /dev/null @@ -1,295 +0,0 @@ -/** - * Copyright (C) 2009-2010 Scalable Solutions AB - */ - -package akka.persistence.simpledb - -import akka.persistence.common._ -import akka.config.Config.config -import java.lang.String -import java.util.{List => JList, ArrayList => JAList} - -import collection.immutable.{HashMap, Iterable} -import com.amazonaws.auth.BasicAWSCredentials -import com.amazonaws.services.simpledb.AmazonSimpleDBClient -import com.amazonaws.services.simpledb.model._ -import collection.{JavaConversions, Map} -import collection.mutable.{ArrayBuffer, HashMap => MMap} -import com.amazonaws.{Protocol, ClientConfiguration} - -private[akka] object SimpledbStorageBackend extends CommonStorageBackend { - - import org.apache.commons.codec.binary.Base64 - import KVStorageBackend._ - - val seperator = "\r\n" - val seperatorBytes = seperator.getBytes("UTF-8") - val sizeAtt = "size" - val ownerAtt = "owner" - val base64 = new Base64(1024, seperatorBytes, true) - val base64key = new Base64(1024, Array.empty[Byte], true) - val id = config.getString("akka.persistence.simpledb.account.id").getOrElse{ - val e = new IllegalStateException("You must provide an AWS id") - log.error(e, "You Must Provide an AWS id to use the SimpledbStorageBackend") - throw e - } - val secretKey = config.getString("akka.persistence.simpledb.account.secretKey").getOrElse{ - val e = new IllegalStateException("You must provide an AWS secretKey") - log.error(e, "You Must Provide an AWS secretKey to use the SimpledbStorageBackend") - throw e - } - val refDomain = config.getString("akka.persistence.simpledb.domain.ref", "ref") - val mapDomain = config.getString("akka.persistence.simpledb.domain.map", "map") - val queueDomain = config.getString("akka.persistence.simpledb.domain.queue", "queue") - val vectorDomain = config.getString("akka.persistence.simpledb.domain.vector", "vector") - val credentials = new BasicAWSCredentials(id, secretKey); - val clientConfig = new ClientConfiguration() - for (i <- config.getInt("akka.persistence.simpledb.client.timeout")) { - clientConfig.setConnectionTimeout(i) - } - for (i <- config.getInt("akka.persistence.simpledb.client.maxconnections")) { - clientConfig.setMaxConnections(i) - } - clientConfig.setMaxErrorRetry(config.getInt("akka.persistence.simpledb.client.maxretries", 10)) - - for (s <- config.getString("akka.persistence.simpledb.client.protocol")) { - clientConfig.setProtocol(Protocol.valueOf(s)) - } - for (i <- config.getInt("akka.persistence.simpledb.client.sockettimeout")) { - clientConfig.setSocketTimeout(i) - } - for {s <- config.getInt("akka.persistence.simpledb.client.sendbuffer") - r <- config.getInt("akka.persistence.simpledb.client.receivebuffer")} { - clientConfig.setSocketBufferSizeHints(s, r) - } - - for (s <- config.getString("akka.persistence.simpledb.client.useragent")) { - clientConfig.setUserAgent(s) - } - - val client = new AmazonSimpleDBClient(credentials, clientConfig) - - def queueAccess = queue - - def mapAccess = map - - def vectorAccess = vector - - def refAccess = ref - - val queue = new SimpledbAccess(queueDomain) - - val map = new SimpledbAccess(mapDomain) - - val vector = new SimpledbAccess(vectorDomain) - - val ref = new SimpledbAccess(refDomain) - - private[akka] class SimpledbAccess(val domainName: String) extends KVStorageBackendAccess { - var created = false - - def getClient(): AmazonSimpleDBClient = { - if (!created) { - client.createDomain(new CreateDomainRequest(domainName)) - created = true - } - client - } - - - def drop(): Unit = { - created = false - client.deleteDomain(new DeleteDomainRequest(domainName)) - } - - def delete(key: Array[Byte]): Unit = getClient.deleteAttributes(new DeleteAttributesRequest(domainName, encodeAndValidateKey(key))) - - override def getAll(keys: Iterable[Array[Byte]]): Map[Array[Byte], Array[Byte]] = { - - var map = new HashMap[Array[Byte], Array[Byte]] - - GetBatcher(domainName, 20).addItems(keys).getRequests foreach { - req => { - var res = getClient.select(req) - var continue = true - do { - JavaConversions.asScalaIterable(res.getItems) foreach { - item => map += (base64key.decode(item.getName) -> recomposeValue(item.getAttributes).get) - } - if (res.getNextToken ne null) { - res = getClient.select(req.withNextToken(res.getNextToken)) - } else { - continue = false - } - } while (continue == true) - } - } - map - } - - case class GetBatcher(domain: String, maxItems: Int) { - - val reqs = new ArrayBuffer[SelectRequest] - var currentItems = new ArrayBuffer[String] - var items = 0 - - def addItems(items: Iterable[Array[Byte]]): GetBatcher = { - items foreach(addItem(_)) - this - } - - def addItem(item: Array[Byte]) = { - if ((items + 1 > maxItems)) { - addReq - } - currentItems += (encodeAndValidateKey(item)) - items += 1 - } - - private def addReq() { - items = 0 - reqs += new SelectRequest(select, true) - currentItems = new ArrayBuffer[String] - } - - def getRequests() = { - if (items > 0) { - addReq - } - reqs - } - - - def select(): String = { - val in = currentItems.reduceLeft[String] { - (acc, key) => { - acc + "', '" + key - } - } - - "select * from " + domainName + " where itemName() in ('" + in + "')" - } - - } - - - def get(key: Array[Byte]) = get(key, null) - - def get(key: Array[Byte], default: Array[Byte]): Array[Byte] = { - val req = new GetAttributesRequest(domainName, encodeAndValidateKey(key)).withConsistentRead(true) - val resp = getClient.getAttributes(req) - recomposeValue(resp.getAttributes) match { - case Some(value) => value - case None => default - } - } - - - override def put(key: Array[Byte], value: Array[Byte]) = { - val req = new PutAttributesRequest(domainName, encodeAndValidateKey(key), decomposeValue(value)) - getClient.putAttributes(req) - } - - - override def putAll(owner: String, keyValues: Iterable[(Array[Byte], Array[Byte])]) = { - val items = keyValues.foldLeft(new ArrayBuffer[ReplaceableItem]()) { - (jal, kv) => kv match { - case (key, value) => { - jal += (new ReplaceableItem(encodeAndValidateKey(getKey(owner, key)), decomposeValue(value))) - } - } - } - - PutBatcher(domainName, 25, 1000).addItems(items).getRequests foreach (getClient.batchPutAttributes(_)) - - } - - - case class PutBatcher(domain: String, maxItems: Int, maxAttributes: Int) { - - val reqs = new ArrayBuffer[BatchPutAttributesRequest] - var currentItems = new JAList[ReplaceableItem]() - var items = 0 - var atts = 0 - - def addItems(items: Seq[ReplaceableItem]): PutBatcher = { - items foreach(addItem(_)) - this - } - - def addItem(item: ReplaceableItem) = { - if ((items + 1 > maxItems) || (atts + item.getAttributes.size > maxAttributes)) { - addReq - } - currentItems.add(item) - items += 1 - atts += item.getAttributes.size - } - - private def addReq() { - items = 0 - atts = 0 - reqs += new BatchPutAttributesRequest(domain, currentItems) - currentItems = new JAList[ReplaceableItem]() - } - - def getRequests() = { - if (items > 0) { - addReq - } - reqs - } - - } - - def encodeAndValidateKey(key: Array[Byte]): String = { - val keystr = base64key.encodeToString(key) - if (keystr.size > 1024) { - throw new IllegalArgumentException("encoded key was longer than 1024 bytes (or 768 bytes unencoded)") - } - keystr - } - - def decomposeValue(value: Array[Byte]): JList[ReplaceableAttribute] = { - val encoded = base64.encodeToString(value) - val strings = encoded.split(seperator) - if (strings.size > 255) { - throw new IllegalArgumentException("The decomposed value is larger than 255K (or 195840 bytes unencoded)") - } - - val list: JAList[ReplaceableAttribute] = strings.zipWithIndex.foldLeft(new JAList[ReplaceableAttribute]) { - (list, zip) => { - zip match { - case (encode, index) => { - list.add(new ReplaceableAttribute(index.toString, encode, true)) - list - } - } - } - } - list.add(new ReplaceableAttribute(sizeAtt, list.size.toString, true)) - list - } - - def recomposeValue(atts: JList[Attribute]): Option[Array[Byte]] = { - val itemSnapshot = JavaConversions.asScalaIterable(atts).foldLeft(new MMap[String, String]) { - (map, att) => { - map += (att.getName -> att.getValue) - } - } - itemSnapshot.get(sizeAtt) match { - case Some(strSize) => { - val size = Integer.parseInt(strSize) - val encoded = (0 until size).map(_.toString).map(itemSnapshot.get(_).get).reduceLeft[String] { - (acc, str) => acc + seperator + str - } - Some(base64.decode(encoded)) - } - case None => None - } - } - - } - - -} diff --git a/akka-persistence/akka-persistence-simpledb/src/test/scala/SimpledbStorageBackendCompatibilityTest.scala b/akka-persistence/akka-persistence-simpledb/src/test/scala/SimpledbStorageBackendCompatibilityTest.scala deleted file mode 100644 index 3e2df27160..0000000000 --- a/akka-persistence/akka-persistence-simpledb/src/test/scala/SimpledbStorageBackendCompatibilityTest.scala +++ /dev/null @@ -1,49 +0,0 @@ -package akka.persistence.simpledb - - -import org.junit.runner.RunWith -import org.scalatest.junit.JUnitRunner -import akka.persistence.common.{QueueStorageBackendTest, VectorStorageBackendTest, MapStorageBackendTest, RefStorageBackendTest} - -@RunWith(classOf[JUnitRunner]) -class SimpledbRefStorageBackendTestIntegration extends RefStorageBackendTest { - def dropRefs = { - SimpledbStorageBackend.refAccess.drop - } - - - def storage = SimpledbStorageBackend -} - -@RunWith(classOf[JUnitRunner]) -class SimpledbMapStorageBackendTestIntegration extends MapStorageBackendTest { - def dropMaps = { - SimpledbStorageBackend.mapAccess.drop - } - - - def storage = SimpledbStorageBackend -} - -@RunWith(classOf[JUnitRunner]) -class SimpledbVectorStorageBackendTestIntegration extends VectorStorageBackendTest { - def dropVectors = { - SimpledbStorageBackend.vectorAccess.drop - } - - - def storage = SimpledbStorageBackend -} - - -@RunWith(classOf[JUnitRunner]) -class SimpledbQueueStorageBackendTestIntegration extends QueueStorageBackendTest { - def dropQueues = { - SimpledbStorageBackend.queueAccess.drop - } - - - def storage = SimpledbStorageBackend -} - - diff --git a/akka-persistence/akka-persistence-simpledb/src/test/scala/SimpledbTestIntegration.scala b/akka-persistence/akka-persistence-simpledb/src/test/scala/SimpledbTestIntegration.scala deleted file mode 100644 index 7dce059757..0000000000 --- a/akka-persistence/akka-persistence-simpledb/src/test/scala/SimpledbTestIntegration.scala +++ /dev/null @@ -1,52 +0,0 @@ -package akka.persistence.simpledb - - -import org.junit.runner.RunWith -import org.scalatest.junit.JUnitRunner -import org.scalatest.matchers.ShouldMatchers -import org.scalatest.{BeforeAndAfterEach, Spec} - -@RunWith(classOf[JUnitRunner]) -class SimpledbTestIntegration extends Spec with ShouldMatchers with BeforeAndAfterEach { - import SimpledbStorageBackend._ - - - describe("the limitations of the simpledb storage backend") { - it("should store up to 255K per item base 64 encoded with a name+key length <= 1024 bytes base64 encoded") { - val name = "123456" - val keysize: Int = 758 - log.info("key:" + keysize) - val key = new Array[Byte](keysize) - val valsize: Int = 195840 - log.info("value:" + valsize) - - val value = new Array[Byte](valsize) - mapAccess.put(name, key, value) - val result = mapAccess.get(name, key, Array.empty[Byte]) - result.size should be(value.size) - result should be(value) - } - - it("should not accept a name+key longer that 1024 bytes base64 encoded") { - val name = "fail" - val key = new Array[Byte](2048) - val value = new Array[Byte](1) - evaluating { - mapAccess.put(name, key, value) - } should produce[IllegalArgumentException] - } - - it("should not accept a value larger than 255K base 64 encoded") { - val name = "failValue" - val key = "failKey".getBytes - val value = new Array[Byte](1024 * 512) - evaluating { - mapAccess.put(name, key, value) - } should produce[IllegalArgumentException] - } - } - - override protected def beforeEach(): Unit = { - mapAccess.drop - } -} \ No newline at end of file diff --git a/akka-persistence/akka-persistence-simpledb/src/test/scala/SimpledbTicket343TestIntegration.scala b/akka-persistence/akka-persistence-simpledb/src/test/scala/SimpledbTicket343TestIntegration.scala deleted file mode 100644 index 6fc1e75fd3..0000000000 --- a/akka-persistence/akka-persistence-simpledb/src/test/scala/SimpledbTicket343TestIntegration.scala +++ /dev/null @@ -1,23 +0,0 @@ -/** - * Copyright (C) 2009-2010 Scalable Solutions AB - */ - -package akka.persistence.simpledb - - -import org.junit.runner.RunWith -import org.scalatest.junit.JUnitRunner -import akka.persistence.common._ - -@RunWith(classOf[JUnitRunner]) -class SimpledbTicket343TestIntegration extends Ticket343Test { - def dropMapsAndVectors: Unit = { - SimpledbStorageBackend.vectorAccess.drop - SimpledbStorageBackend.mapAccess.drop - } - - def getVector: (String) => PersistentVector[Array[Byte]] = SimpledbStorage.getVector - - def getMap: (String) => PersistentMap[Array[Byte], Array[Byte]] = SimpledbStorage.getMap - -} diff --git a/akka-persistence/akka-persistence-voldemort/src/main/scala/akka/VoldemortStorage.scala b/akka-persistence/akka-persistence-voldemort/src/main/scala/akka/VoldemortStorage.scala deleted file mode 100644 index f6d252df89..0000000000 --- a/akka-persistence/akka-persistence-voldemort/src/main/scala/akka/VoldemortStorage.scala +++ /dev/null @@ -1,51 +0,0 @@ -/** - * Copyright (C) 2009-2010 Scalable Solutions AB - */ - -package akka.persistence.voldemort - -import akka.actor.{newUuid} -import akka.stm._ -import akka.persistence.common._ - - -object VoldemortStorage extends Storage { - - type ElementType = Array[Byte] - def newMap: PersistentMap[ElementType, ElementType] = newMap(newUuid.toString) - def newVector: PersistentVector[ElementType] = newVector(newUuid.toString) - def newRef: PersistentRef[ElementType] = newRef(newUuid.toString) - override def newQueue: PersistentQueue[ElementType] = newQueue(newUuid.toString) - - def getMap(id: String): PersistentMap[ElementType, ElementType] = newMap(id) - def getVector(id: String): PersistentVector[ElementType] = newVector(id) - def getRef(id: String): PersistentRef[ElementType] = newRef(id) - override def getQueue(id: String): PersistentQueue[ElementType] = newQueue(id) - - def newMap(id: String): PersistentMap[ElementType, ElementType] = new VoldemortPersistentMap(id) - def newVector(id: String): PersistentVector[ElementType] = new VoldemortPersistentVector(id) - def newRef(id: String): PersistentRef[ElementType] = new VoldemortPersistentRef(id) - override def newQueue(id:String): PersistentQueue[ElementType] = new VoldemortPersistentQueue(id) -} - - -class VoldemortPersistentMap(id: String) extends PersistentMapBinary { - val uuid = id - val storage = VoldemortStorageBackend -} - - -class VoldemortPersistentVector(id: String) extends PersistentVector[Array[Byte]] { - val uuid = id - val storage = VoldemortStorageBackend -} - -class VoldemortPersistentRef(id: String) extends PersistentRef[Array[Byte]] { - val uuid = id - val storage = VoldemortStorageBackend -} - -class VoldemortPersistentQueue(id: String) extends PersistentQueue[Array[Byte]] { - val uuid = id - val storage = VoldemortStorageBackend -} diff --git a/akka-persistence/akka-persistence-voldemort/src/main/scala/akka/VoldemortStorageBackend.scala b/akka-persistence/akka-persistence-voldemort/src/main/scala/akka/VoldemortStorageBackend.scala deleted file mode 100644 index ad9c8d4df8..0000000000 --- a/akka-persistence/akka-persistence-voldemort/src/main/scala/akka/VoldemortStorageBackend.scala +++ /dev/null @@ -1,146 +0,0 @@ -/** - * Copyright (C) 2009-2010 Scalable Solutions AB - */ - -package akka.persistence.voldemort - -import akka.persistence.common._ -import akka.config.Config.config - -import voldemort.client._ -import java.lang.String -import voldemort.versioning.Versioned -import collection.JavaConversions -import collection.Map -import java.util.{Properties, Map => JMap} -import collection.immutable._ -import voldemort.client.protocol.admin.{AdminClientConfig, AdminClient} - -/* - RequiredReads + RequiredWrites should be > ReplicationFactor for all Voldemort Stores - In this case all VoldemortBackend operations can be retried until successful, and data should remain consistent - */ - -private[akka] object VoldemortStorageBackend extends CommonStorageBackend { - - import CommonStorageBackendAccess._ - import KVStorageBackend._ - import VoldemortAccess._ - - val bootstrapUrlsProp = "bootstrap_urls" - val clientConfig = config.getConfigMap("akka.persistence.voldemort.client") match { - case Some(configMap) => getClientConfig(configMap.asMap) - case None => getClientConfig(new HashMap[String, String] + (bootstrapUrlsProp -> "tcp://localhost:6666")) - } - val refStore = config.getString("akka.persistence.voldemort.store.ref", "Refs") - val mapStore = config.getString("akka.persistence.voldemort.store.map", "Maps") - val vectorStore = config.getString("akka.persistence.voldemort.store.vector", "Vectors") - val queueStore = config.getString("akka.persistence.voldemort.store.queue", "Queues") - - - var storeClientFactory: StoreClientFactory = null - var refs: KVStorageBackendAccess = null - var maps: KVStorageBackendAccess = null - var vectors: KVStorageBackendAccess = null - var queues: KVStorageBackendAccess = null - resetAccess - - def refAccess = refs - - def mapAccess = maps - - def vectorAccess = vectors - - def queueAccess = queues - - - object VoldemortAccess { - var admin: AdminClient = null - } - - class VoldemortAccess(val store: String) extends KVStorageBackendAccess { - import KVStorageBackend._ - import VoldemortAccess._ - - val client: StoreClient[Array[Byte], Array[Byte]] = VoldemortStorageBackend.storeClientFactory.getStoreClient(store) - - def put(key: Array[Byte], value: Array[Byte]) = { - client.put(key, value) - } - - def get(key: Array[Byte]): Array[Byte] = { - client.getValue(key) - } - - def get(key: Array[Byte], default: Array[Byte]): Array[Byte] = { - client.getValue(key, default) - } - - def getAll(keys: Iterable[Array[Byte]]): Map[Array[Byte], Array[Byte]] = { - val jmap = client.getAll(JavaConversions.asJavaIterable(keys)) - JavaConversions.asScalaMap(jmap).map{ - kv => - kv match { - case (key: Array[Byte], versioned: Versioned[Array[Byte]]) => (key -> versioned.getValue) - } - } - } - - def delete(key: Array[Byte]) = { - client.delete(key) - } - - def drop() = { - admin.truncate(0, store) - } - } - - - def getClientConfig(configMap: Map[String, String]): Properties = { - val properites = new Properties - configMap.foreach{ - keyval => keyval match { - case (key, value) => properites.setProperty(key.asInstanceOf[java.lang.String], value.asInstanceOf[java.lang.String]) - } - } - properites - } - - def initStoreClientFactory(): StoreClientFactory = { - if (storeClientFactory ne null) { - storeClientFactory.close - } - - if (clientConfig.getProperty(bootstrapUrlsProp, "none").startsWith("tcp")) { - new SocketStoreClientFactory(new ClientConfig(clientConfig)) - } else if (clientConfig.getProperty(bootstrapUrlsProp, "none").startsWith("http")) { - new HttpStoreClientFactory(new ClientConfig(clientConfig)) - } else { - throw new IllegalArgumentException("Unknown boostrapUrl syntax" + clientConfig.getProperty(bootstrapUrlsProp, "No Bootstrap URLs defined")) - } - } - - def initAdminClient(): AdminClient = { - if (VoldemortAccess.admin ne null) { - VoldemortAccess.admin.stop - } - - new AdminClient(VoldemortStorageBackend.clientConfig.getProperty(VoldemortStorageBackend.bootstrapUrlsProp), new AdminClientConfig) - - } - - def initKVAccess = { - refs = new VoldemortAccess(refStore) - maps = new VoldemortAccess(mapStore) - vectors = new VoldemortAccess(vectorStore) - queues = new VoldemortAccess(queueStore) - } - - def resetAccess() { - storeClientFactory = initStoreClientFactory - VoldemortAccess.admin = initAdminClient - initKVAccess - } - - -} diff --git a/akka-persistence/akka-persistence-voldemort/src/test/resources/config/cluster.xml b/akka-persistence/akka-persistence-voldemort/src/test/resources/config/cluster.xml deleted file mode 100644 index dcf806b0ca..0000000000 --- a/akka-persistence/akka-persistence-voldemort/src/test/resources/config/cluster.xml +++ /dev/null @@ -1,14 +0,0 @@ - - - akka-test - - - 0 - localhost - 8081 - 6666 - 6667 - - 0,1,2,3 - - diff --git a/akka-persistence/akka-persistence-voldemort/src/test/resources/config/server.properties b/akka-persistence/akka-persistence-voldemort/src/test/resources/config/server.properties deleted file mode 100644 index 6dcd5bb340..0000000000 --- a/akka-persistence/akka-persistence-voldemort/src/test/resources/config/server.properties +++ /dev/null @@ -1,4 +0,0 @@ -node.id=0 -enable.rebalancing=false -enable.bdb.engine=false -slop.enable=false diff --git a/akka-persistence/akka-persistence-voldemort/src/test/resources/config/stores.xml b/akka-persistence/akka-persistence-voldemort/src/test/resources/config/stores.xml deleted file mode 100644 index 5e1289190c..0000000000 --- a/akka-persistence/akka-persistence-voldemort/src/test/resources/config/stores.xml +++ /dev/null @@ -1,67 +0,0 @@ - - - Refs - 1 - 1 - 1 - 1 - 1 - memory - client - - identity - - - identity - - - - Maps - 1 - 1 - 1 - 1 - 1 - memory - client - - identity - - - identity - - - - Vectors - 1 - 1 - 1 - 1 - 1 - memory - client - - identity - - - identity - - - - Queues - 1 - 1 - 1 - 1 - 1 - memory - client - - identity - - - identity - - - - diff --git a/akka-persistence/akka-persistence-voldemort/src/test/scala/EmbeddedVoldemort.scala b/akka-persistence/akka-persistence-voldemort/src/test/scala/EmbeddedVoldemort.scala deleted file mode 100644 index e3f956396d..0000000000 --- a/akka-persistence/akka-persistence-voldemort/src/test/scala/EmbeddedVoldemort.scala +++ /dev/null @@ -1,40 +0,0 @@ -package akka.persistence.voldemort - -import voldemort.server.{VoldemortServer, VoldemortConfig} -import org.scalatest.{Suite, BeforeAndAfterAll} -import org.junit.runner.RunWith -import org.scalatest.junit.JUnitRunner -import java.io.File -import akka.util.{Logging} -import collection.JavaConversions -import voldemort.store.memory.InMemoryStorageConfiguration -import voldemort.client.protocol.admin.{AdminClientConfig, AdminClient} - - -trait EmbeddedVoldemort extends BeforeAndAfterAll with Logging { - this: Suite => - var server: VoldemortServer = null - - override protected def beforeAll(): Unit = { - - try { - val dir = "./akka-persistence/akka-persistence-voldemort/target/test-resources" - val home = new File(dir) - log.info("Creating Voldemort Config") - val config = VoldemortConfig.loadFromVoldemortHome(home.getCanonicalPath) - config.setStorageConfigurations(JavaConversions.asJavaList(List(classOf[InMemoryStorageConfiguration].getName))) - log.info("Starting Voldemort") - server = new VoldemortServer(config) - server.start - VoldemortStorageBackend.resetAccess - log.info("Started") - } catch { - case e => log.error(e, "Error Starting Voldemort") - throw e - } - } - - override protected def afterAll(): Unit = { - server.stop - } -} diff --git a/akka-persistence/akka-persistence-voldemort/src/test/scala/VoldemortPersistentActorSuite.scala b/akka-persistence/akka-persistence-voldemort/src/test/scala/VoldemortPersistentActorSuite.scala deleted file mode 100644 index 0518191f0a..0000000000 --- a/akka-persistence/akka-persistence-voldemort/src/test/scala/VoldemortPersistentActorSuite.scala +++ /dev/null @@ -1,166 +0,0 @@ -package akka.persistence.voldemort - -import org.scalatest.Spec -import org.scalatest.matchers.ShouldMatchers -import org.scalatest.BeforeAndAfterEach -import org.scalatest.junit.JUnitRunner -import org.junit.runner.RunWith - -import akka.actor.{Actor, ActorRef} -import Actor._ -import BankAccountActor._ -import akka.stm._ - - -case class Balance(accountNo: String) -case class Debit(accountNo: String, amount: Int) -case class MultiDebit(accountNo: String, amounts: List[Int]) -case class Credit(accountNo: String, amount: Int) -case class Log(start: Int, finish: Int) -case object LogSize - -object BankAccountActor { - val state = "accountState" - val tx = "txnLog" -} - -class BankAccountActor extends Actor { - private val accountState = VoldemortStorage.newMap(state) - private val txnLog = VoldemortStorage.newVector(tx) - - import sjson.json.DefaultProtocol._ - import sjson.json.JsonSerialization._ - - def receive = { case message => atomic { atomicReceive(message) } } - - def atomicReceive: Receive = { - // check balance - case Balance(accountNo) => - txnLog.add(("Balance:" + accountNo).getBytes) - self.reply( - accountState.get(accountNo.getBytes) - .map(frombinary[Int](_)) - .getOrElse(0)) - - // debit amount: can fail - case Debit(accountNo, amount) => - txnLog.add(("Debit:" + accountNo + " " + amount).getBytes) - val m = accountState.get(accountNo.getBytes) - .map(frombinary[Int](_)) - .getOrElse(0) - - accountState.put(accountNo.getBytes, tobinary(m - amount)) - if (amount > m) fail - - self.reply(m - amount) - - // many debits: can fail - // demonstrates true rollback even if multiple puts have been done - case MultiDebit(accountNo, amounts) => - val sum = amounts.foldRight(0)(_ + _) - txnLog.add(("MultiDebit:" + accountNo + " " + sum).getBytes) - - val m = accountState.get(accountNo.getBytes) - .map(frombinary[Int](_)) - .getOrElse(0) - - var cbal = m - amounts.foreach { - amount => - accountState.put(accountNo.getBytes, tobinary(m - amount)) - cbal = cbal - amount - if (cbal < 0) fail - } - - self.reply(m - sum) - - // credit amount - case Credit(accountNo, amount) => - txnLog.add(("Credit:" + accountNo + " " + amount).getBytes) - val m = accountState.get(accountNo.getBytes) - .map(frombinary[Int](_)) - .getOrElse(0) - - accountState.put(accountNo.getBytes, tobinary(m + amount)) - - self.reply(m + amount) - - case LogSize => - self.reply(txnLog.length) - - case Log(start, finish) => - self.reply(txnLog.slice(start, finish).map(new String(_))) - } - - def fail = throw new RuntimeException("Expected exception; to test fault-tolerance") -} - -@RunWith(classOf[JUnitRunner]) -class VoldemortPersistentActorSuite extends -Spec with - ShouldMatchers with - BeforeAndAfterEach with EmbeddedVoldemort { - import VoldemortStorageBackend._ - - - override def beforeEach { - vectorAccess.drop - mapAccess.drop - } - - override def afterEach { - beforeEach - } - - describe("successful debit") { - it("should debit successfully") { - log.info("Succesful Debit starting") - val bactor = actorOf[BankAccountActor] - bactor.start - bactor !! Credit("a-123", 5000) - log.info("credited") - bactor !! Debit("a-123", 3000) - log.info("debited") - (bactor !! Balance("a-123")).get.asInstanceOf[Int] should equal(2000) - log.info("balane matched") - bactor !! Credit("a-123", 7000) - log.info("Credited") - (bactor !! Balance("a-123")).get.asInstanceOf[Int] should equal(9000) - log.info("Balance matched") - bactor !! Debit("a-123", 8000) - log.info("Debited") - (bactor !! Balance("a-123")).get.asInstanceOf[Int] should equal(1000) - log.info("Balance matched") - (bactor !! LogSize).get.asInstanceOf[Int] should equal(7) - (bactor !! Log(0, 7)).get.asInstanceOf[Iterable[String]].size should equal(7) - } - } - - describe("unsuccessful debit") { - it("debit should fail") { - val bactor = actorOf[BankAccountActor] - bactor.start - bactor !! Credit("a-123", 5000) - (bactor !! Balance("a-123")).get.asInstanceOf[Int] should equal(5000) - evaluating { - bactor !! Debit("a-123", 7000) - } should produce[Exception] - (bactor !! Balance("a-123")).get.asInstanceOf[Int] should equal(5000) - (bactor !! LogSize).get.asInstanceOf[Int] should equal(3) - } - } - - describe("unsuccessful multidebit") { - it("multidebit should fail") { - val bactor = actorOf[BankAccountActor] - bactor.start - bactor !! Credit("a-123", 5000) - (bactor !! Balance("a-123")).get.asInstanceOf[Int] should equal(5000) - evaluating { - bactor !! MultiDebit("a-123", List(1000, 2000, 4000)) - } should produce[Exception] - (bactor !! Balance("a-123")).get.asInstanceOf[Int] should equal(5000) - (bactor !! LogSize).get.asInstanceOf[Int] should equal(3) - } - } -} diff --git a/akka-persistence/akka-persistence-voldemort/src/test/scala/VoldemortStorageBackendCompatibilityTest.scala b/akka-persistence/akka-persistence-voldemort/src/test/scala/VoldemortStorageBackendCompatibilityTest.scala deleted file mode 100644 index 058c0f1385..0000000000 --- a/akka-persistence/akka-persistence-voldemort/src/test/scala/VoldemortStorageBackendCompatibilityTest.scala +++ /dev/null @@ -1,49 +0,0 @@ -package akka.persistence.voldemort - - -import org.junit.runner.RunWith -import org.scalatest.junit.JUnitRunner -import akka.persistence.common.{QueueStorageBackendTest, VectorStorageBackendTest, MapStorageBackendTest, RefStorageBackendTest} - -@RunWith(classOf[JUnitRunner]) -class VoldemortRefStorageBackendTest extends RefStorageBackendTest with EmbeddedVoldemort { - def dropRefs = { - VoldemortStorageBackend.refAccess.drop - } - - - def storage = VoldemortStorageBackend -} - -@RunWith(classOf[JUnitRunner]) -class VoldemortMapStorageBackendTest extends MapStorageBackendTest with EmbeddedVoldemort { - def dropMaps = { - VoldemortStorageBackend.mapAccess.drop - } - - - def storage = VoldemortStorageBackend -} - -@RunWith(classOf[JUnitRunner]) -class VoldemortVectorStorageBackendTest extends VectorStorageBackendTest with EmbeddedVoldemort { - def dropVectors = { - VoldemortStorageBackend.vectorAccess.drop - } - - - def storage = VoldemortStorageBackend -} - - -@RunWith(classOf[JUnitRunner]) -class VoldemortQueueStorageBackendTest extends QueueStorageBackendTest with EmbeddedVoldemort { - def dropQueues = { - VoldemortStorageBackend.queueAccess.drop - } - - - def storage = VoldemortStorageBackend -} - - diff --git a/akka-persistence/akka-persistence-voldemort/src/test/scala/VoldemortStorageBackendSuite.scala b/akka-persistence/akka-persistence-voldemort/src/test/scala/VoldemortStorageBackendSuite.scala deleted file mode 100644 index 7153387d6a..0000000000 --- a/akka-persistence/akka-persistence-voldemort/src/test/scala/VoldemortStorageBackendSuite.scala +++ /dev/null @@ -1,183 +0,0 @@ -package akka.persistence.voldemort - -import org.scalatest.FunSuite -import org.scalatest.matchers.ShouldMatchers -import org.junit.runner.RunWith -import org.scalatest.junit.JUnitRunner -import akka.persistence.voldemort.VoldemortStorageBackend._ -import akka.persistence.common.CommonStorageBackend._ -import akka.persistence.common.KVStorageBackend._ -import akka.util.Logging -import collection.immutable.TreeSet -import VoldemortStorageBackendSuite._ -import scala.None - -@RunWith(classOf[JUnitRunner]) -class VoldemortStorageBackendSuite extends FunSuite with ShouldMatchers with EmbeddedVoldemort with Logging { - test("that ref storage and retrieval works") { - val key = "testRef" - val value = "testRefValue" - val valueBytes = bytes(value) - refAccess.delete(key.getBytes) - refAccess.get(key.getBytes, empty) should be(empty) - refAccess.put(key.getBytes, valueBytes) - refAccess.get(key.getBytes) should be(valueBytes) - } - - test("PersistentRef apis function as expected") { - val key = "apiTestRef" - val value = "apiTestRefValue" - val valueBytes = bytes(value) - refAccess.delete(key.getBytes) - getRefStorageFor(key) should be(None) - insertRefStorageFor(key, valueBytes) - getRefStorageFor(key).get should equal(valueBytes) - } - - test("that map key storage and retrieval works") { - val key = "testmapKey" - val mapKeys = new TreeSet[Array[Byte]] + bytes("key1") - mapAccess.delete(getKey(key, mapKeysIndex)) - mapAccess.get(getKey(key, mapKeysIndex), SortedSetSerializer.toBytes(emptySet)) should equal(SortedSetSerializer.toBytes(emptySet)) - putMapKeys(key, mapKeys) - getMapKeys(key) should equal(mapKeys) - } - - test("that map value storage and retrieval works") { - val key = bytes("keyForTestingMapValueClient") - val value = bytes("value for testing map value client") - mapAccess.put(key, value) - mapAccess.get(key, empty) should equal(value) - } - - - test("PersistentMap apis function as expected") { - val name = "theMap" - val key = bytes("mapkey") - val value = bytes("mapValue") - removeMapStorageFor(name, key) - removeMapStorageFor(name) - getMapStorageEntryFor(name, key) should be(None) - getMapStorageSizeFor(name) should be(0) - getMapStorageFor(name).length should be(0) - getMapStorageRangeFor(name, None, None, 100).length should be(0) - - insertMapStorageEntryFor(name, key, value) - - getMapStorageEntryFor(name, key).get should equal(value) - getMapStorageSizeFor(name) should be(1) - getMapStorageFor(name).length should be(1) - getMapStorageRangeFor(name, None, None, 100).length should be(1) - - removeMapStorageFor(name, key) - removeMapStorageFor(name) - getMapStorageEntryFor(name, key) should be(None) - getMapStorageSizeFor(name) should be(0) - getMapStorageFor(name).length should be(0) - getMapStorageRangeFor(name, None, None, 100).length should be(0) - - insertMapStorageEntriesFor(name, List(key -> value)) - - getMapStorageEntryFor(name, key).get should equal(value) - getMapStorageSizeFor(name) should be(1) - getMapStorageFor(name).length should be(1) - getMapStorageRangeFor(name, None, None, 100).length should be(1) - - } - - - test("that vector value storage and retrieval works") { - val key = "vectorValueKey" - val index = 3 - val value = bytes("some bytes") - val vecKey = getIndexedKey(key, index) - getIndexFromVectorValueKey(key, vecKey) should be(index) - vectorAccess.delete(vecKey) - vectorAccess.get(vecKey, empty) should equal(empty) - vectorAccess.put(vecKey, value) - vectorAccess.get(vecKey) should equal(value) - } - - test("PersistentVector apis function as expected") { - val key = "vectorApiKey" - val value = bytes("Some bytes we want to store in a vector") - val updatedValue = bytes("Some updated bytes we want to store in a vector") - vectorAccess.deleteIndexed(key, vectorHeadIndex) - vectorAccess.deleteIndexed(key, vectorTailIndex) - vectorAccess.delete(getIndexedKey(key, 0)) - vectorAccess.delete(getIndexedKey(key, 1)) - - insertVectorStorageEntryFor(key, value) - //again - insertVectorStorageEntryFor(key, value) - - getVectorStorageEntryFor(key, 0) should be(value) - getVectorStorageEntryFor(key, 1) should be(value) - getVectorStorageRangeFor(key, None, None, 1).head should be(value) - getVectorStorageRangeFor(key, Some(1), None, 1).head should be(value) - getVectorStorageSizeFor(key) should be(2) - - updateVectorStorageEntryFor(key, 1, updatedValue) - - getVectorStorageEntryFor(key, 0) should be(value) - getVectorStorageEntryFor(key, 1) should be(updatedValue) - getVectorStorageRangeFor(key, None, None, 1).head should be(value) - getVectorStorageRangeFor(key, Some(1), None, 1).head should be(updatedValue) - getVectorStorageSizeFor(key) should be(2) - - } - - test("Persistent Queue apis function as expected") { - val key = "queueApiKey" - val value = bytes("some bytes even") - val valueOdd = bytes("some bytes odd") - - remove(key) - VoldemortStorageBackend.size(key) should be(0) - enqueue(key, value) should be(Some(1)) - VoldemortStorageBackend.size(key) should be(1) - enqueue(key, valueOdd) should be(Some(2)) - VoldemortStorageBackend.size(key) should be(2) - peek(key, 0, 1)(0) should be(value) - peek(key, 1, 1)(0) should be(valueOdd) - dequeue(key).get should be(value) - VoldemortStorageBackend.size(key) should be(1) - dequeue(key).get should be(valueOdd) - VoldemortStorageBackend.size(key) should be(0) - dequeue(key) should be(None) - queueAccess.putIndexed(key,queueHeadIndex, IntSerializer.toBytes(Integer.MAX_VALUE)) - queueAccess.putIndexed(key, queueTailIndex, IntSerializer.toBytes(Integer.MAX_VALUE)) - VoldemortStorageBackend.size(key) should be(0) - enqueue(key, value) should be(Some(1)) - VoldemortStorageBackend.size(key) should be(1) - enqueue(key, valueOdd) should be(Some(2)) - VoldemortStorageBackend.size(key) should be(2) - peek(key, 0, 1)(0) should be(value) - peek(key, 1, 1)(0) should be(valueOdd) - dequeue(key).get should be(value) - VoldemortStorageBackend.size(key) should be(1) - dequeue(key).get should be(valueOdd) - VoldemortStorageBackend.size(key) should be(0) - dequeue(key) should be(None) - - - } - - def getIndexFromVectorValueKey(owner: String, key: Array[Byte]): Int = { - val indexBytes = new Array[Byte](IntSerializer.bytesPerInt) - System.arraycopy(key, key.length - IntSerializer.bytesPerInt, indexBytes, 0, IntSerializer.bytesPerInt) - IntSerializer.fromBytes(indexBytes) - } - - -} - -object VoldemortStorageBackendSuite { - val empty = Array.empty[Byte] - val emptySet = new TreeSet[Array[Byte]] - - def bytes(value: String): Array[Byte] = { - value.getBytes("UTF-8") - } - -} diff --git a/akka-persistence/akka-persistence-voldemort/src/test/scala/VoldemortTicket343Test.scala b/akka-persistence/akka-persistence-voldemort/src/test/scala/VoldemortTicket343Test.scala deleted file mode 100644 index 8a3cb1d0be..0000000000 --- a/akka-persistence/akka-persistence-voldemort/src/test/scala/VoldemortTicket343Test.scala +++ /dev/null @@ -1,23 +0,0 @@ -/** - * Copyright (C) 2009-2010 Scalable Solutions AB - */ - -package akka.persistence.voldemort - - -import org.junit.runner.RunWith -import org.scalatest.junit.JUnitRunner -import akka.persistence.common._ - -@RunWith(classOf[JUnitRunner]) -class VoldemortTicket343Test extends Ticket343Test with EmbeddedVoldemort { - def dropMapsAndVectors: Unit = { - VoldemortStorageBackend.mapAccess.drop - VoldemortStorageBackend.vectorAccess.drop - } - - def getVector: (String) => PersistentVector[Array[Byte]] = VoldemortStorage.getVector - - def getMap: (String) => PersistentMap[Array[Byte], Array[Byte]] = VoldemortStorage.getMap - -} diff --git a/akka-samples/akka-sample-camel/src/main/java/sample/camel/BeanImpl.java b/akka-samples/akka-sample-camel/src/main/java/sample/camel/BeanImpl.java deleted file mode 100644 index 9ceba85d64..0000000000 --- a/akka-samples/akka-sample-camel/src/main/java/sample/camel/BeanImpl.java +++ /dev/null @@ -1,13 +0,0 @@ -package sample.camel; - -import akka.actor.TypedActor; -/** - * @author Martin Krasser - */ -public class BeanImpl extends TypedActor implements BeanIntf { - - public String foo(String s) { - return "hello " + s; - } - -} diff --git a/akka-samples/akka-sample-camel/src/main/java/sample/camel/BeanIntf.java b/akka-samples/akka-sample-camel/src/main/java/sample/camel/BeanIntf.java deleted file mode 100644 index a7b2e6e6a4..0000000000 --- a/akka-samples/akka-sample-camel/src/main/java/sample/camel/BeanIntf.java +++ /dev/null @@ -1,10 +0,0 @@ -package sample.camel; - -/** - * @author Martin Krasser - */ -public interface BeanIntf { - - public String foo(String s); - -} diff --git a/akka-samples/akka-sample-camel/src/main/java/sample/camel/RemoteTypedConsumer1.java b/akka-samples/akka-sample-camel/src/main/java/sample/camel/RemoteTypedConsumer1.java deleted file mode 100644 index 3e8ce1e20f..0000000000 --- a/akka-samples/akka-sample-camel/src/main/java/sample/camel/RemoteTypedConsumer1.java +++ /dev/null @@ -1,15 +0,0 @@ -package sample.camel; - -import org.apache.camel.Body; -import org.apache.camel.Header; - -import akka.camel.consume; - -/** - * @author Martin Krasser - */ -public interface RemoteTypedConsumer1 { - - @consume("jetty:http://localhost:6644/camel/remote-typed-actor-1") - public String foo(@Body String body, @Header("name") String header); -} diff --git a/akka-samples/akka-sample-camel/src/main/java/sample/camel/RemoteTypedConsumer1Impl.java b/akka-samples/akka-sample-camel/src/main/java/sample/camel/RemoteTypedConsumer1Impl.java deleted file mode 100644 index 522db0e4a7..0000000000 --- a/akka-samples/akka-sample-camel/src/main/java/sample/camel/RemoteTypedConsumer1Impl.java +++ /dev/null @@ -1,13 +0,0 @@ -package sample.camel; - -import akka.actor.TypedActor; - -/** - * @author Martin Krasser - */ -public class RemoteTypedConsumer1Impl extends TypedActor implements RemoteTypedConsumer1 { - - public String foo(String body, String header) { - return String.format("remote1: body=%s header=%s", body, header); - } -} diff --git a/akka-samples/akka-sample-camel/src/main/java/sample/camel/RemoteTypedConsumer2.java b/akka-samples/akka-sample-camel/src/main/java/sample/camel/RemoteTypedConsumer2.java deleted file mode 100644 index ba093a1d96..0000000000 --- a/akka-samples/akka-sample-camel/src/main/java/sample/camel/RemoteTypedConsumer2.java +++ /dev/null @@ -1,15 +0,0 @@ -package sample.camel; - -import org.apache.camel.Body; -import org.apache.camel.Header; -import akka.camel.consume; - -/** - * @author Martin Krasser - */ -public interface RemoteTypedConsumer2 { - - @consume("jetty:http://localhost:6644/camel/remote-typed-actor-2") - public String foo(@Body String body, @Header("name") String header); - -} diff --git a/akka-samples/akka-sample-camel/src/main/java/sample/camel/RemoteTypedConsumer2Impl.java b/akka-samples/akka-sample-camel/src/main/java/sample/camel/RemoteTypedConsumer2Impl.java deleted file mode 100644 index b3475ad2d6..0000000000 --- a/akka-samples/akka-sample-camel/src/main/java/sample/camel/RemoteTypedConsumer2Impl.java +++ /dev/null @@ -1,14 +0,0 @@ -package sample.camel; - -import akka.actor.TypedActor; - -/** - * @author Martin Krasser - */ -public class RemoteTypedConsumer2Impl extends TypedActor implements RemoteTypedConsumer2 { - - public String foo(String body, String header) { - return String.format("remote2: body=%s header=%s", body, header); - } - -} diff --git a/akka-samples/akka-sample-camel/src/main/java/sample/camel/TypedConsumer1.java b/akka-samples/akka-sample-camel/src/main/java/sample/camel/TypedConsumer1.java deleted file mode 100644 index 6213fb8f09..0000000000 --- a/akka-samples/akka-sample-camel/src/main/java/sample/camel/TypedConsumer1.java +++ /dev/null @@ -1,17 +0,0 @@ -package sample.camel; - -import org.apache.camel.Body; -import org.apache.camel.Header; - -import akka.camel.consume; - -/** - * @author Martin Krasser - */ -public interface TypedConsumer1 { - @consume("file:data/input/typed-actor") - public void foo(String body); - - @consume("jetty:http://0.0.0.0:8877/camel/typed-actor") - public String bar(@Body String body, @Header("name") String header); -} diff --git a/akka-samples/akka-sample-camel/src/main/java/sample/camel/TypedConsumer1Impl.java b/akka-samples/akka-sample-camel/src/main/java/sample/camel/TypedConsumer1Impl.java deleted file mode 100644 index bd735fe14b..0000000000 --- a/akka-samples/akka-sample-camel/src/main/java/sample/camel/TypedConsumer1Impl.java +++ /dev/null @@ -1,21 +0,0 @@ -package sample.camel; - -import org.apache.camel.Body; -import org.apache.camel.Header; - -import akka.actor.TypedActor; - -/** - * @author Martin Krasser - */ -public class TypedConsumer1Impl extends TypedActor implements TypedConsumer1 { - - public void foo(String body) { - System.out.println("Received message:"); - System.out.println(body); - } - - public String bar(@Body String body, @Header("name") String header) { - return String.format("body=%s header=%s", body, header); - } -} diff --git a/akka-samples/akka-sample-camel/src/main/java/sample/camel/TypedConsumer2.java b/akka-samples/akka-sample-camel/src/main/java/sample/camel/TypedConsumer2.java deleted file mode 100644 index 9a39b534b5..0000000000 --- a/akka-samples/akka-sample-camel/src/main/java/sample/camel/TypedConsumer2.java +++ /dev/null @@ -1,14 +0,0 @@ -package sample.camel; - -import org.apache.camel.Body; -import org.apache.camel.Header; -import akka.camel.consume; - -/** - * @author Martin Krasser - */ -public interface TypedConsumer2 { - - @consume("direct:default") - public String foo(String body); -} diff --git a/akka-samples/akka-sample-camel/src/main/java/sample/camel/TypedConsumer2Impl.java b/akka-samples/akka-sample-camel/src/main/java/sample/camel/TypedConsumer2Impl.java deleted file mode 100644 index ed82810c10..0000000000 --- a/akka-samples/akka-sample-camel/src/main/java/sample/camel/TypedConsumer2Impl.java +++ /dev/null @@ -1,13 +0,0 @@ -package sample.camel; - -import akka.actor.TypedActor; - -/** - * @author Martin Krasser - */ -public class TypedConsumer2Impl extends TypedActor implements TypedConsumer2 { - - public String foo(String body) { - return String.format("default: %s", body); - } -} diff --git a/akka-samples/akka-sample-camel/src/main/resources/context-jms.xml b/akka-samples/akka-sample-camel/src/main/resources/context-jms.xml deleted file mode 100644 index 12e4541be3..0000000000 --- a/akka-samples/akka-sample-camel/src/main/resources/context-jms.xml +++ /dev/null @@ -1,27 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - diff --git a/akka-samples/akka-sample-camel/src/main/resources/context-standalone.xml b/akka-samples/akka-sample-camel/src/main/resources/context-standalone.xml deleted file mode 100644 index 686dfc569c..0000000000 --- a/akka-samples/akka-sample-camel/src/main/resources/context-standalone.xml +++ /dev/null @@ -1,25 +0,0 @@ - - - - - - - - - - - - - - - diff --git a/akka-samples/akka-sample-camel/src/main/scala/Actors.scala b/akka-samples/akka-sample-camel/src/main/scala/Actors.scala deleted file mode 100644 index f19144bbbf..0000000000 --- a/akka-samples/akka-sample-camel/src/main/scala/Actors.scala +++ /dev/null @@ -1,143 +0,0 @@ -package sample.camel - -import org.apache.camel.Exchange - -import akka.actor.{Actor, ActorRef, RemoteActor} -import akka.camel.{Failure, Producer, Message, Consumer} -import akka.util.Logging - -/** - * Client-initiated remote actor. - */ -class RemoteActor1 extends RemoteActor("localhost", 7777) with Consumer { - def endpointUri = "jetty:http://localhost:6644/camel/remote-actor-1" - - protected def receive = { - case msg: Message => self.reply(Message("hello %s" format msg.bodyAs[String], Map("sender" -> "remote1"))) - } -} - -/** - * Server-initiated remote actor. - */ -class RemoteActor2 extends Actor with Consumer { - def endpointUri = "jetty:http://localhost:6644/camel/remote-actor-2" - - protected def receive = { - case msg: Message => self.reply(Message("hello %s" format msg.bodyAs[String], Map("sender" -> "remote2"))) - } -} - -class Producer1 extends Actor with Producer { - def endpointUri = "direct:welcome" - override def oneway = false // default -} - -class Consumer1 extends Actor with Consumer with Logging { - def endpointUri = "file:data/input/actor" - - def receive = { - case msg: Message => log.info("received %s" format msg.bodyAs[String]) - } -} - -class Consumer2 extends Actor with Consumer { - def endpointUri = "jetty:http://0.0.0.0:8877/camel/default" - - def receive = { - case msg: Message => self.reply("Hello %s" format msg.bodyAs[String]) - } -} - -class Consumer3(transformer: ActorRef) extends Actor with Consumer { - def endpointUri = "jetty:http://0.0.0.0:8877/camel/welcome" - - def receive = { - case msg: Message => transformer.forward(msg.setBodyAs[String]) - } -} - -class Consumer4 extends Actor with Consumer with Logging { - def endpointUri = "jetty:http://0.0.0.0:8877/camel/stop" - - def receive = { - case msg: Message => msg.bodyAs[String] match { - case "stop" => { - self.reply("Consumer4 stopped") - self.stop - } - case body => self.reply(body) - } - } -} - -class Consumer5 extends Actor with Consumer with Logging { - def endpointUri = "jetty:http://0.0.0.0:8877/camel/start" - - def receive = { - case _ => { - Actor.actorOf[Consumer4].start - self.reply("Consumer4 started") - } - } -} - -class Transformer(producer: ActorRef) extends Actor { - protected def receive = { - case msg: Message => producer.forward(msg.transformBody( (body: String) => "- %s -" format body)) - } -} - -class Subscriber(name:String, uri: String) extends Actor with Consumer with Logging { - def endpointUri = uri - - protected def receive = { - case msg: Message => log.info("%s received: %s" format (name, msg.body)) - } -} - -class Publisher(name: String, uri: String) extends Actor with Producer { - self.id = name - def endpointUri = uri - override def oneway = true -} - -class PublisherBridge(uri: String, publisher: ActorRef) extends Actor with Consumer { - def endpointUri = uri - - protected def receive = { - case msg: Message => { - publisher ! msg.bodyAs[String] - self.reply("message published") - } - } -} - -class HttpConsumer(producer: ActorRef) extends Actor with Consumer { - def endpointUri = "jetty:http://0.0.0.0:8875/" - - protected def receive = { - case msg => producer forward msg - } -} - -class HttpProducer(transformer: ActorRef) extends Actor with Producer { - def endpointUri = "jetty://http://akkasource.org/?bridgeEndpoint=true" - - override protected def receiveBeforeProduce = { - // only keep Exchange.HTTP_PATH message header (which needed by bridge endpoint) - case msg: Message => msg.setHeaders(msg.headers(Set(Exchange.HTTP_PATH))) - } - - override protected def receiveAfterProduce = { - // do not reply but forward result to transformer - case msg => transformer forward msg - } -} - -class HttpTransformer extends Actor { - protected def receive = { - case msg: Message => self.reply(msg.transformBody {body: String => body replaceAll ("Akka ", "AKKA ")}) - case msg: Failure => self.reply(msg) - } -} diff --git a/akka-samples/akka-sample-camel/src/main/scala/Boot.scala b/akka-samples/akka-sample-camel/src/main/scala/Boot.scala deleted file mode 100644 index d8fe43a7a9..0000000000 --- a/akka-samples/akka-sample-camel/src/main/scala/Boot.scala +++ /dev/null @@ -1,108 +0,0 @@ -package sample.camel - -import org.apache.camel.{Exchange, Processor} -import org.apache.camel.builder.RouteBuilder -import org.apache.camel.impl.DefaultCamelContext -import org.apache.camel.spring.spi.ApplicationContextRegistry -import org.springframework.context.support.ClassPathXmlApplicationContext - -import akka.actor.Actor._ -import akka.actor.{TypedActor, Supervisor} -import akka.camel.CamelContextManager -import akka.config.Supervision._ - -/** - * @author Martin Krasser - */ -class Boot { - - // ----------------------------------------------------------------------- - // Basic example - // ----------------------------------------------------------------------- - - actorOf[Consumer1].start - actorOf[Consumer2].start - - // Alternatively, use a supervisor for these actors - //val supervisor = Supervisor( - // SupervisorConfig( - // RestartStrategy(OneForOne, 3, 100, List(classOf[Exception])), - // Supervise(actorOf[Consumer1], Permanent) :: - // Supervise(actorOf[Consumer2], Permanent) :: Nil)) - - // ----------------------------------------------------------------------- - // Custom Camel route example - // ----------------------------------------------------------------------- - - // Create CamelContext and a Spring-based registry - val context = new ClassPathXmlApplicationContext("/context-jms.xml", getClass) - val registry = new ApplicationContextRegistry(context) - - // Use a custom Camel context and a custom touter builder - CamelContextManager.init(new DefaultCamelContext(registry)) - CamelContextManager.mandatoryContext.addRoutes(new CustomRouteBuilder) - - val producer = actorOf[Producer1] - val mediator = actorOf(new Transformer(producer)) - val consumer = actorOf(new Consumer3(mediator)) - - producer.start - mediator.start - consumer.start - - // ----------------------------------------------------------------------- - // Asynchronous consumer-producer example (Akka homepage transformation) - // ----------------------------------------------------------------------- - - val httpTransformer = actorOf(new HttpTransformer).start - val httpProducer = actorOf(new HttpProducer(httpTransformer)).start - val httpConsumer = actorOf(new HttpConsumer(httpProducer)).start - - // ----------------------------------------------------------------------- - // Publish subscribe examples - // ----------------------------------------------------------------------- - - // - // Cometd example commented out because camel-cometd is broken since Camel 2.3 - // - - //val cometdUri = "cometd://localhost:8111/test/abc?baseResource=file:target" - //val cometdSubscriber = actorOf(new Subscriber("cometd-subscriber", cometdUri)).start - //val cometdPublisher = actorOf(new Publisher("cometd-publisher", cometdUri)).start - - val jmsUri = "jms:topic:test" - val jmsSubscriber1 = actorOf(new Subscriber("jms-subscriber-1", jmsUri)).start - val jmsSubscriber2 = actorOf(new Subscriber("jms-subscriber-2", jmsUri)).start - val jmsPublisher = actorOf(new Publisher("jms-publisher", jmsUri)).start - - //val cometdPublisherBridge = actorOf(new PublisherBridge("jetty:http://0.0.0.0:8877/camel/pub/cometd", cometdPublisher)).start - val jmsPublisherBridge = actorOf(new PublisherBridge("jetty:http://0.0.0.0:8877/camel/pub/jms", jmsPublisher)).start - - // ----------------------------------------------------------------------- - // Actor un-publishing and re-publishing example - // ----------------------------------------------------------------------- - - actorOf[Consumer4].start // POSTing "stop" to http://0.0.0.0:8877/camel/stop stops and unpublishes this actor - actorOf[Consumer5].start // POSTing any msg to http://0.0.0.0:8877/camel/start starts and published Consumer4 again. - - // ----------------------------------------------------------------------- - // Active object example - // ----------------------------------------------------------------------- - - TypedActor.newInstance(classOf[TypedConsumer1], classOf[TypedConsumer1Impl]) -} - -/** - * @author Martin Krasser - */ -class CustomRouteBuilder extends RouteBuilder { - def configure { - val actorUri = "actor:%s" format classOf[Consumer2].getName - from("jetty:http://0.0.0.0:8877/camel/custom").to(actorUri) - from("direct:welcome").process(new Processor() { - def process(exchange: Exchange) { - exchange.getOut.setBody("Welcome %s" format exchange.getIn.getBody) - } - }) - } -} diff --git a/akka-samples/akka-sample-camel/src/main/scala/ClientApplication.scala b/akka-samples/akka-sample-camel/src/main/scala/ClientApplication.scala deleted file mode 100644 index 7b3d70df80..0000000000 --- a/akka-samples/akka-sample-camel/src/main/scala/ClientApplication.scala +++ /dev/null @@ -1,29 +0,0 @@ -package sample.camel - -import akka.actor.Actor._ -import akka.actor.TypedActor -import akka.camel.Message -import akka.remote.RemoteClient - -/** - * @author Martin Krasser - */ -object ClientApplication extends Application { - - val actor1 = actorOf[RemoteActor1].start - val actor2 = RemoteClient.actorFor("remote2", "localhost", 7777) - - val typedActor1 = TypedActor.newRemoteInstance( - classOf[RemoteTypedConsumer1], - classOf[RemoteTypedConsumer1Impl], "localhost", 7777) - - val typedActor2 = RemoteClient.typedActorFor( - classOf[RemoteTypedConsumer2], "remote3", "localhost", 7777) - - println(actor1 !! Message("actor1")) // activates and publishes actor remotely - println(actor2 !! Message("actor2")) // actor already activated and published remotely - - println(typedActor1.foo("x1", "y1")) // activates and publishes typed actor methods remotely - println(typedActor2.foo("x2", "y2")) // typed actor methods already activated and published remotely - -} diff --git a/akka-samples/akka-sample-camel/src/main/scala/ServerApplication.scala b/akka-samples/akka-sample-camel/src/main/scala/ServerApplication.scala deleted file mode 100644 index 40f68e510b..0000000000 --- a/akka-samples/akka-sample-camel/src/main/scala/ServerApplication.scala +++ /dev/null @@ -1,24 +0,0 @@ -package sample.camel - -import akka.actor.Actor._ -import akka.remote.RemoteNode -import akka.camel.CamelServiceManager -import akka.actor.TypedActor - -/** - * @author Martin Krasser - */ -object ServerApplication extends Application { - import CamelServiceManager._ - - startCamelService - - val ua = actorOf[RemoteActor2].start - val ta = TypedActor.newInstance( - classOf[RemoteTypedConsumer2], - classOf[RemoteTypedConsumer2Impl], 2000) - - RemoteNode.start("localhost", 7777) - RemoteNode.register("remote2", ua) - RemoteNode.registerTypedActor("remote3", ta) -} diff --git a/akka-samples/akka-sample-camel/src/main/scala/StandaloneApplication.scala b/akka-samples/akka-sample-camel/src/main/scala/StandaloneApplication.scala deleted file mode 100644 index 13717e17d4..0000000000 --- a/akka-samples/akka-sample-camel/src/main/scala/StandaloneApplication.scala +++ /dev/null @@ -1,107 +0,0 @@ -package sample.camel - -import org.apache.camel.impl.{DefaultCamelContext, SimpleRegistry} -import org.apache.camel.builder.RouteBuilder -import org.apache.camel.spring.spi.ApplicationContextRegistry -import org.springframework.context.support.ClassPathXmlApplicationContext - -import akka.actor.{Actor, ActorRegistry, TypedActor} -import akka.camel._ - -/** - * @author Martin Krasser - */ -object StandaloneApplication extends Application { - import CamelContextManager._ - import CamelServiceManager._ - - // 'externally' register typed actors - val registry = new SimpleRegistry - registry.put("sample", TypedActor.newInstance(classOf[BeanIntf], classOf[BeanImpl])) - - // customize CamelContext - CamelContextManager.init(new DefaultCamelContext(registry)) - CamelContextManager.mandatoryContext.addRoutes(new StandaloneApplicationRoute) - - startCamelService - - // access 'externally' registered typed actors - assert("hello msg1" == mandatoryContext.createProducerTemplate.requestBody("direct:test", "msg1")) - - mandatoryService.awaitEndpointActivation(1) { - // 'internally' register typed actor (requires CamelService) - TypedActor.newInstance(classOf[TypedConsumer2], classOf[TypedConsumer2Impl]) - } - - // access 'internally' (automatically) registered typed-actors - // (see @consume annotation value at TypedConsumer2.foo method) - assert("default: msg3" == mandatoryContext.createProducerTemplate.requestBody("direct:default", "msg3")) - - stopCamelService - - ActorRegistry.shutdownAll -} - -class StandaloneApplicationRoute extends RouteBuilder { - def configure = { - // route to typed actors (in SimpleRegistry) - from("direct:test").to("typed-actor:sample?method=foo") - } -} - -object StandaloneSpringApplication extends Application { - import CamelContextManager._ - - // load Spring application context - val appctx = new ClassPathXmlApplicationContext("/context-standalone.xml") - - // access 'externally' registered typed actors with typed-actor component - assert("hello msg3" == mandatoryTemplate.requestBody("direct:test3", "msg3")) - - appctx.close - - ActorRegistry.shutdownAll -} - -class StandaloneSpringApplicationRoute extends RouteBuilder { - def configure = { - // routes to typed actor (in ApplicationContextRegistry) - from("direct:test3").to("typed-actor:ta?method=foo") - } -} - -object StandaloneJmsApplication extends Application { - import CamelServiceManager._ - - val context = new ClassPathXmlApplicationContext("/context-jms.xml") - val registry = new ApplicationContextRegistry(context) - - // Init CamelContextManager with custom CamelContext - CamelContextManager.init(new DefaultCamelContext(registry)) - - startCamelService - - val jmsUri = "jms:topic:test" - val jmsPublisher = Actor.actorOf(new Publisher("jms-publisher", jmsUri)).start - - mandatoryService.awaitEndpointActivation(2) { - Actor.actorOf(new Subscriber("jms-subscriber-1", jmsUri)).start - Actor.actorOf(new Subscriber("jms-subscriber-2", jmsUri)).start - } - - // Send 10 messages to via publisher actor - for(i <- 1 to 10) { - jmsPublisher ! ("Akka rocks (%d)" format i) - } - - // Send 10 messages to JMS topic directly - for(i <- 1 to 10) { - CamelContextManager.mandatoryTemplate.sendBody(jmsUri, "Camel rocks (%d)" format i) - } - - // Wait a bit for subscribes to receive messages - Thread.sleep(1000) - - stopCamelService - ActorRegistry.shutdownAll -} diff --git a/akka-samples/akka-sample-camel/src/test/scala/HttpConcurrencyTestStress.scala b/akka-samples/akka-sample-camel/src/test/scala/HttpConcurrencyTestStress.scala deleted file mode 100644 index 27e8ec1800..0000000000 --- a/akka-samples/akka-sample-camel/src/test/scala/HttpConcurrencyTestStress.scala +++ /dev/null @@ -1,99 +0,0 @@ -package sample.camel - -import collection.mutable.Set - -import java.util.concurrent.CountDownLatch - -import org.junit._ -import org.scalatest.junit.JUnitSuite - -import akka.actor.Actor._ -import akka.actor.{ActorRegistry, ActorRef, Actor} -import akka.camel._ -import akka.camel.CamelServiceManager._ -import akka.routing.CyclicIterator -import akka.routing.Routing._ - -/** - * @author Martin Krasser - */ -class HttpConcurrencyTestStress extends JUnitSuite { - import HttpConcurrencyTestStress._ - - @Test def shouldProcessMessagesConcurrently = { - val num = 50 - val latch1 = new CountDownLatch(num) - val latch2 = new CountDownLatch(num) - val latch3 = new CountDownLatch(num) - val client1 = actorOf(new HttpClientActor("client1", latch1)).start - val client2 = actorOf(new HttpClientActor("client2", latch2)).start - val client3 = actorOf(new HttpClientActor("client3", latch3)).start - for (i <- 1 to num) { - client1 ! Message("client1", Map(Message.MessageExchangeId -> i)) - client2 ! Message("client2", Map(Message.MessageExchangeId -> i)) - client3 ! Message("client3", Map(Message.MessageExchangeId -> i)) - } - latch1.await - latch2.await - latch3.await - assert(num == (client1 !! "getCorrelationIdCount").as[Int].get) - assert(num == (client2 !! "getCorrelationIdCount").as[Int].get) - assert(num == (client3 !! "getCorrelationIdCount").as[Int].get) - } -} - -object HttpConcurrencyTestStress { - @BeforeClass - def beforeClass: Unit = { - startCamelService - - val workers = for (i <- 1 to 8) yield actorOf[HttpServerWorker].start - val balancer = loadBalancerActor(new CyclicIterator(workers.toList)) - - service.get.awaitEndpointActivation(1) { - actorOf(new HttpServerActor(balancer)).start - } - } - - @AfterClass - def afterClass = { - stopCamelService - ActorRegistry.shutdownAll - } - - class HttpClientActor(label: String, latch: CountDownLatch) extends Actor with Producer { - def endpointUri = "jetty:http://0.0.0.0:8855/echo" - var correlationIds = Set[Any]() - - override protected def receive = { - case "getCorrelationIdCount" => self.reply(correlationIds.size) - case msg => super.receive(msg) - } - - override protected def receiveAfterProduce = { - case msg: Message => { - val corr = msg.headers(Message.MessageExchangeId) - val body = msg.bodyAs[String] - correlationIds += corr - assert(label == body) - latch.countDown - print(".") - } - } - } - - class HttpServerActor(balancer: ActorRef) extends Actor with Consumer { - def endpointUri = "jetty:http://0.0.0.0:8855/echo" - var counter = 0 - - def receive = { - case msg => balancer forward msg - } - } - - class HttpServerWorker extends Actor { - protected def receive = { - case msg => self.reply(msg) - } - } -} diff --git a/akka-samples/akka-sample-chat/Buildfile b/akka-samples/akka-sample-chat/Buildfile deleted file mode 100644 index 814e6e4149..0000000000 --- a/akka-samples/akka-sample-chat/Buildfile +++ /dev/null @@ -1,48 +0,0 @@ -require 'buildr/scala' - -VERSION_NUMBER = "0.6" -GROUP = "se.scalablesolutions.akka" - -repositories.remote << "http://www.ibiblio.org/maven2/" -repositories.remote << "http://www.lag.net/repo" -repositories.remote << "http://multiverse.googlecode.com/svn/maven-repository/releases" - -AKKA = group('akka-remote', 'akka-comet', 'akka-util','akka-kernel', 'akka-rest', 'akka-util-java', - 'akka-security','akka-persistence-common', 'akka-persistence-redis', - 'akka-amqp', - :under=> 'se.scalablesolutions.akka', - :version => '0.6') -ASPECTJ = "org.codehaus.aspectwerkz:aspectwerkz-nodeps-jdk5:jar:2.1" -SBINARY = "sbinary:sbinary:jar:0.3" -COMMONS_IO = "commons-io:commons-io:jar:1.4" -CONFIGGY = "net.lag:configgy:jar:1.4.7" -JACKSON = group('jackson-core-asl', 'jackson-mapper-asl', - :under=> 'org.codehaus.jackson', - :version => '1.2.1') -MULTIVERSE = "org.multiverse:multiverse-alpha:jar:jar-with-dependencies:0.3" -NETTY = "org.jboss.netty:netty:jar:3.2.0.ALPHA2" -PROTOBUF = "com.google.protobuf:protobuf-java:jar:2.2.0" -REDIS = "com.redis:redisclient:jar:1.0.1" -SJSON = "sjson.json:sjson:jar:0.3" - -Project.local_task "run" - -desc "Akka Chat Sample Module" -define "akka-sample-chat" do - project.version = VERSION_NUMBER - project.group = GROUP - - compile.with AKKA, CONFIGGY - - p artifact(MULTIVERSE).to_s - - package(:jar) - - task "run" do - Java.java "scala.tools.nsc.MainGenericRunner", - :classpath => [ compile.dependencies, compile.target, - ASPECTJ, COMMONS_IO, JACKSON, NETTY, MULTIVERSE, PROTOBUF, REDIS, - SBINARY, SJSON], - :java_args => ["-server"] - end -end \ No newline at end of file diff --git a/akka-samples/akka-sample-chat/README b/akka-samples/akka-sample-chat/README deleted file mode 100644 index dff045d6f8..0000000000 --- a/akka-samples/akka-sample-chat/README +++ /dev/null @@ -1,32 +0,0 @@ -Akka Chat Client/Server Sample Application - -First we need to download, build and start up Redis: - -1. Download Redis from http://code.google.com/p/redis/downloads/list. -2. Step into the distribution. -3. Build: ‘make install’. -4. Run: ‘./redis-server’. -For details on how to set up Redis server have a look at http://code.google.com/p/redis/wiki/QuickStart. - -Then to run the sample: - -1. Install the Redis network storage. Download it from [http://code.google.com/p/redis/]. -2. Open up a shell and start up an instance of Redis. -3. Fire up two shells. For each of them: - - Step down into to the root of the Akka distribution. - - Set 'export AKKA_HOME=. - - Run 'sbt console' to start up a REPL (interpreter). -4. In the first REPL you get execute: - - scala> import sample.chat._ - - scala> import akka.actor.Actor._ - - scala> val chatService = actorOf[ChatService].start -5. In the second REPL you get execute: - - scala> import sample.chat._ - - scala> Runner.run -6. See the chat simulation run. -7. Run it again to see full speed after first initialization. - -Now you could test client reconnect by killing the console running the ChatService and start it up again. See the client reconnect take place in the REPL shell. - -That’s it. Have fun. - diff --git a/akka-samples/akka-sample-chat/src/main/scala/ChatServer.scala b/akka-samples/akka-sample-chat/src/main/scala/ChatServer.scala deleted file mode 100644 index aa34824bab..0000000000 --- a/akka-samples/akka-sample-chat/src/main/scala/ChatServer.scala +++ /dev/null @@ -1,228 +0,0 @@ -/** - * Copyright (C) 2009-2010 Scalable Solutions AB . - */ - -package sample.chat - -import scala.collection.mutable.HashMap - -import akka.actor.{SupervisorFactory, Actor, ActorRef, RemoteActor} -import akka.remote.{RemoteNode, RemoteClient} -import akka.persistence.common.PersistentVector -import akka.persistence.redis.RedisStorage -import akka.stm._ -import akka.config.Supervision.{OneForOneStrategy,Permanent} -import akka.util.Logging -import Actor._ - -/****************************************************************************** -Akka Chat Client/Server Sample Application - -First we need to download, build and start up Redis: - -1. Download Redis from http://code.google.com/p/redis/downloads/list. -2. Step into the distribution. -3. Build: ‘make install’. -4. Run: ‘./redis-server’. -For details on how to set up Redis server have a look at http://code.google.com/p/redis/wiki/QuickStart. - -Then to run the sample: - -1. Fire up two shells. For each of them: - - Step down into to the root of the Akka distribution. - - Set 'export AKKA_HOME=. - - Run 'sbt console' to start up a REPL (interpreter). -2. In the first REPL you get execute: - - scala> import sample.chat._ - - scala> import akka.actor.Actor._ - - scala> val chatService = actorOf[ChatService].start -3. In the second REPL you get execute: - - scala> import sample.chat._ - - scala> Runner.run -4. See the chat simulation run. -5. Run it again to see full speed after first initialization. - -That’s it. Have fun. - -******************************************************************************/ - -/** - * ChatServer's internal events. - */ -sealed trait Event -case class Login(user: String) extends Event -case class Logout(user: String) extends Event -case class GetChatLog(from: String) extends Event -case class ChatLog(log: List[String]) extends Event -case class ChatMessage(from: String, message: String) extends Event - -/** - * Chat client. - */ -class ChatClient(val name: String) { - val chat = RemoteClient.actorFor("chat:service", "localhost", 2552) - - def login = chat ! Login(name) - def logout = chat ! Logout(name) - def post(message: String) = chat ! ChatMessage(name, name + ": " + message) - def chatLog = (chat !! GetChatLog(name)).as[ChatLog].getOrElse(throw new Exception("Couldn't get the chat log from ChatServer")) -} - -/** - * Internal chat client session. - */ -class Session(user: String, storage: ActorRef) extends Actor { - private val loginTime = System.currentTimeMillis - private var userLog: List[String] = Nil - - log.info("New session for user [%s] has been created at [%s]", user, loginTime) - - def receive = { - case msg @ ChatMessage(from, message) => - userLog ::= message - storage ! msg - - case msg @ GetChatLog(_) => - storage forward msg - } -} - -/** - * Abstraction of chat storage holding the chat log. - */ -trait ChatStorage extends Actor - -/** - * Redis-backed chat storage implementation. - */ -class RedisChatStorage extends ChatStorage { - self.lifeCycle = Permanent - val CHAT_LOG = "akka.chat.log" - - private var chatLog = RedisStorage.getVector(CHAT_LOG) - - log.info("Redis-based chat storage is starting up...") - - def receive = { - case msg @ ChatMessage(from, message) => - log.debug("New chat message [%s]", message) - atomic { chatLog + message.getBytes("UTF-8") } - - case GetChatLog(_) => - val messageList = atomic { chatLog.map(bytes => new String(bytes, "UTF-8")).toList } - self.reply(ChatLog(messageList)) - } - - override def postRestart(reason: Throwable) = chatLog = RedisStorage.getVector(CHAT_LOG) -} - -/** - * Implements user session management. - *

    - * Uses self-type annotation (this: Actor =>) to declare that it needs to be mixed in with an Actor. - */ -trait SessionManagement { this: Actor => - - val storage: ActorRef // needs someone to provide the ChatStorage - val sessions = new HashMap[String, ActorRef] - - protected def sessionManagement: Receive = { - case Login(username) => - log.info("User [%s] has logged in", username) - val session = actorOf(new Session(username, storage)) - session.start - sessions += (username -> session) - - case Logout(username) => - log.info("User [%s] has logged out", username) - val session = sessions(username) - session.stop - sessions -= username - } - - protected def shutdownSessions = - sessions.foreach { case (_, session) => session.stop } -} - -/** - * Implements chat management, e.g. chat message dispatch. - *

    - * Uses self-type annotation (this: Actor =>) to declare that it needs to be mixed in with an Actor. - */ -trait ChatManagement { this: Actor => - val sessions: HashMap[String, ActorRef] // needs someone to provide the Session map - - protected def chatManagement: Receive = { - case msg @ ChatMessage(from, _) => sessions(from) ! msg - case msg @ GetChatLog(from) => sessions(from) forward msg - } -} - -/** - * Creates and links a RedisChatStorage. - */ -trait RedisChatStorageFactory { this: Actor => - val storage = this.self.spawnLink[RedisChatStorage] // starts and links ChatStorage -} - -/** - * Chat server. Manages sessions and redirects all other messages to the Session for the client. - */ -trait ChatServer extends Actor { - self.faultHandler = OneForOneStrategy(List(classOf[Exception]),5, 5000) - val storage: ActorRef - - log.info("Chat server is starting up...") - - // actor message handler - def receive = sessionManagement orElse chatManagement - - // abstract methods to be defined somewhere else - protected def chatManagement: Receive - protected def sessionManagement: Receive - protected def shutdownSessions(): Unit - - override def postStop = { - log.info("Chat server is shutting down...") - shutdownSessions - self.unlink(storage) - storage.stop - } -} - -/** - * Class encapsulating the full Chat Service. - * Start service by invoking: - *

    - * val chatService = Actor.actorOf[ChatService].start
    - * 
    - */ -class ChatService extends - ChatServer with - SessionManagement with - ChatManagement with - RedisChatStorageFactory { - override def preStart = { - RemoteNode.start("localhost", 2552) - RemoteNode.register("chat:service", self) - } -} - -/** - * Test runner emulating a chat session. - */ -object Runner { - def run = { - val client = new ChatClient("jonas") - - client.login - - client.post("Hi there") - println("CHAT LOG:\n\t" + client.chatLog.log.mkString("\n\t")) - - client.post("Hi again") - println("CHAT LOG:\n\t" + client.chatLog.log.mkString("\n\t")) - - client.logout - } -} diff --git a/akka-samples/akka-sample-fsm/src/main/scala/DiningHakkersOnBecome.scala b/akka-samples/akka-sample-fsm/src/main/scala/DiningHakkersOnBecome.scala deleted file mode 100644 index c762c2da7d..0000000000 --- a/akka-samples/akka-sample-fsm/src/main/scala/DiningHakkersOnBecome.scala +++ /dev/null @@ -1,139 +0,0 @@ -package sample.fsm.dining.become - -//Akka adaptation of -//http://www.dalnefre.com/wp/2010/08/dining-philosophers-in-humus/ - -import akka.actor.{Scheduler, ActorRef, Actor} -import akka.actor.Actor._ -import java.util.concurrent.TimeUnit - -/* - * First we define our messages, they basically speak for themselves - */ -sealed trait DiningHakkerMessage -case class Busy(chopstick: ActorRef) extends DiningHakkerMessage -case class Put(hakker: ActorRef) extends DiningHakkerMessage -case class Take(hakker: ActorRef) extends DiningHakkerMessage -case class Taken(chopstick: ActorRef) extends DiningHakkerMessage -object Eat extends DiningHakkerMessage -object Think extends DiningHakkerMessage - -/* - * A Chopstick is an actor, it can be taken, and put back - */ -class Chopstick(name: String) extends Actor { - self.id = name - - //When a Chopstick is taken by a hakker - //It will refuse to be taken by other hakkers - //But the owning hakker can put it back - def takenBy(hakker: ActorRef): Receive = { - case Take(otherHakker) => - otherHakker ! Busy(self) - case Put(`hakker`) => - become(available) - } - - //When a Chopstick is available, it can be taken by a hakker - def available: Receive = { - case Take(hakker) => - become(takenBy(hakker)) - hakker ! Taken(self) - } - - //A Chopstick begins its existence as available - def receive = available -} - -/* - * A hakker is an awesome dude or dudett who either thinks about hacking or has to eat ;-) - */ -class Hakker(name: String,left: ActorRef, right: ActorRef) extends Actor { - self.id = name - - //When a hakker is thinking it can become hungry - //and try to pick up its chopsticks and eat - def thinking: Receive = { - case Eat => - become(hungry) - left ! Take(self) - right ! Take(self) - } - - //When a hakker is hungry it tries to pick up its chopsticks and eat - //When it picks one up, it goes into wait for the other - //If the hakkers first attempt at grabbing a chopstick fails, - //it starts to wait for the response of the other grab - def hungry: Receive = { - case Taken(`left`) => - become(waiting_for(right,left)) - case Taken(`right`) => - become(waiting_for(left,right)) - case Busy(chopstick) => - become(denied_a_chopstick) - } - - //When a hakker is waiting for the last chopstick it can either obtain it - //and start eating, or the other chopstick was busy, and the hakker goes - //back to think about how he should obtain his chopsticks :-) - def waiting_for(chopstickToWaitFor: ActorRef, otherChopstick: ActorRef): Receive = { - case Taken(`chopstickToWaitFor`) => - log.info("%s has picked up %s and %s, and starts to eat",name,left.id,right.id) - become(eating) - Scheduler.scheduleOnce(self,Think,5,TimeUnit.SECONDS) - - case Busy(chopstick) => - become(thinking) - otherChopstick ! Put(self) - self ! Eat - } - - //When the results of the other grab comes back, - //he needs to put it back if he got the other one. - //Then go back and think and try to grab the chopsticks again - def denied_a_chopstick: Receive = { - case Taken(chopstick) => - become(thinking) - chopstick ! Put(self) - self ! Eat - case Busy(chopstick) => - become(thinking) - self ! Eat - } - - //When a hakker is eating, he can decide to start to think, - //then he puts down his chopsticks and starts to think - def eating: Receive = { - case Think => - become(thinking) - left ! Put(self) - right ! Put(self) - log.info("%s puts down his chopsticks and starts to think",name) - Scheduler.scheduleOnce(self,Eat,5,TimeUnit.SECONDS) - } - - //All hakkers start in a non-eating state - def receive = { - case Think => - log.info("%s starts to think",name) - become(thinking) - Scheduler.scheduleOnce(self,Eat,5,TimeUnit.SECONDS) - } -} - -/* - * Alright, here's our test-harness - */ -object DiningHakkers { - def run { - //Create 5 chopsticks - val chopsticks = for(i <- 1 to 5) yield actorOf(new Chopstick("Chopstick "+i)).start - //Create 5 awesome hakkers and assign them their left and right chopstick - val hakkers = for { - (name,i) <- List("Ghosh","Bonér","Klang","Krasser","Manie").zipWithIndex - } yield actorOf(new Hakker(name,chopsticks(i),chopsticks((i+1) % 5))).start - - //Signal all hakkers that they should start thinking, and watch the show - hakkers.foreach(_ ! Think) - } -} diff --git a/akka-samples/akka-sample-fsm/src/main/scala/DiningHakkersOnFsm.scala b/akka-samples/akka-sample-fsm/src/main/scala/DiningHakkersOnFsm.scala deleted file mode 100644 index bb90c981d5..0000000000 --- a/akka-samples/akka-sample-fsm/src/main/scala/DiningHakkersOnFsm.scala +++ /dev/null @@ -1,170 +0,0 @@ -package sample.fsm.dining.fsm - -import akka.actor.{ActorRef, Actor, FSM} -import Actor._ - -/* - * Some messages for the chopstick - */ -sealed trait ChopstickMessage -object Take extends ChopstickMessage -object Put extends ChopstickMessage -case class Taken(chopstick: ActorRef) extends ChopstickMessage -case class Busy(chopstick: ActorRef) extends ChopstickMessage - -/** - * Some states the chopstick can be in - */ -sealed trait ChopstickState -case object Available extends ChopstickState -case object Taken extends ChopstickState - -/** - * Some state container for the chopstick - */ -case class TakenBy(hakker: Option[ActorRef]) - -/* - * A chopstick is an actor, it can be taken, and put back - */ -class Chopstick(name: String) extends Actor with FSM[ChopstickState, TakenBy] { - self.id = name - - // When a chopstick is available, it can be taken by a some hakker - when(Available) { - case Event(Take, _) => - goto(Taken) using TakenBy(self.sender) replying Taken(self) - } - - // When a chopstick is taken by a hakker - // It will refuse to be taken by other hakkers - // But the owning hakker can put it back - when(Taken) { - case Event(Take, currentState) => - stay replying Busy(self) - case Event(Put, TakenBy(hakker)) if self.sender == hakker => - goto(Available) using TakenBy(None) - } - - // A chopstick begins its existence as available and taken by no one - startWith(Available, TakenBy(None)) -} - -/** - * Some fsm hakker messages - */ -sealed trait FSMHakkerMessage -object Think extends FSMHakkerMessage - -/** - * Some fsm hakker states - */ -sealed trait FSMHakkerState -case object Waiting extends FSMHakkerState -case object Thinking extends FSMHakkerState -case object Hungry extends FSMHakkerState -case object WaitForOtherChopstick extends FSMHakkerState -case object FirstChopstickDenied extends FSMHakkerState -case object Eating extends FSMHakkerState - -/** - * Some state container to keep track of which chopsticks we have - */ -case class TakenChopsticks(left: Option[ActorRef], right: Option[ActorRef]) - -/* - * A fsm hakker is an awesome dude or dudette who either thinks about hacking or has to eat ;-) - */ -class FSMHakker(name: String, left: ActorRef, right: ActorRef) extends Actor with FSM[FSMHakkerState, TakenChopsticks] { - self.id = name - - when(Waiting) { - case Event(Think, _) => - log.info("%s starts to think", name) - startThinking(5000) - } - - //When a hakker is thinking it can become hungry - //and try to pick up its chopsticks and eat - when(Thinking) { - case Event(StateTimeout, _) => - left ! Take - right ! Take - goto(Hungry) - } - - // When a hakker is hungry it tries to pick up its chopsticks and eat - // When it picks one up, it goes into wait for the other - // If the hakkers first attempt at grabbing a chopstick fails, - // it starts to wait for the response of the other grab - when(Hungry) { - case Event(Taken(`left`), _) => - goto(WaitForOtherChopstick) using TakenChopsticks(Some(left), None) - case Event(Taken(`right`), _) => - goto(WaitForOtherChopstick) using TakenChopsticks(None, Some(right)) - case Event(Busy(_), _) => - goto(FirstChopstickDenied) - } - - // When a hakker is waiting for the last chopstick it can either obtain it - // and start eating, or the other chopstick was busy, and the hakker goes - // back to think about how he should obtain his chopsticks :-) - when(WaitForOtherChopstick) { - case Event(Taken(`left`), TakenChopsticks(None, Some(right))) => startEating(left, right) - case Event(Taken(`right`), TakenChopsticks(Some(left), None)) => startEating(left, right) - case Event(Busy(chopstick), TakenChopsticks(leftOption, rightOption)) => - leftOption.foreach(_ ! Put) - rightOption.foreach(_ ! Put) - startThinking(10) - } - - private def startEating(left: ActorRef, right: ActorRef): State = { - log.info("%s has picked up %s and %s, and starts to eat", name, left.id, right.id) - goto(Eating) using TakenChopsticks(Some(left), Some(right)) until 5000 - } - - // When the results of the other grab comes back, - // he needs to put it back if he got the other one. - // Then go back and think and try to grab the chopsticks again - when(FirstChopstickDenied) { - case Event(Taken(secondChopstick), _) => - secondChopstick ! Put - startThinking(10) - case Event(Busy(chopstick), _) => - startThinking(10) - } - - // When a hakker is eating, he can decide to start to think, - // then he puts down his chopsticks and starts to think - when(Eating) { - case Event(StateTimeout, _) => - log.info("%s puts down his chopsticks and starts to think", name) - left ! Put - right ! Put - startThinking(5000) - } - - private def startThinking(period: Int): State = { - goto(Thinking) using TakenChopsticks(None, None) until period - } - - //All hakkers start waiting - startWith(Waiting, TakenChopsticks(None, None)) -} - -/* - * Alright, here's our test-harness - */ -object DiningHakkersOnFsm { - - def run = { - // Create 5 chopsticks - val chopsticks = for (i <- 1 to 5) yield actorOf(new Chopstick("Chopstick " + i)).start - // Create 5 awesome fsm hakkers and assign them their left and right chopstick - val hakkers = for{ - (name, i) <- List("Ghosh", "Bonér", "Klang", "Krasser", "Manie").zipWithIndex - } yield actorOf(new FSMHakker(name, chopsticks(i), chopsticks((i + 1) % 5))).start - - hakkers.foreach(_ ! Think) - } -} diff --git a/akka-samples/akka-sample-osgi/src/main/scala/osgiExample.scala b/akka-samples/akka-sample-osgi/src/main/scala/osgiExample.scala deleted file mode 100644 index 276c0033fb..0000000000 --- a/akka-samples/akka-sample-osgi/src/main/scala/osgiExample.scala +++ /dev/null @@ -1,33 +0,0 @@ -/** - * Copyright (C) 2009-2010 Scalable Solutions AB - */ - -package akka -package sample.osgi - -import actor.{ Actor, ActorRegistry } -import actor.Actor._ - -import org.osgi.framework.{ BundleActivator, BundleContext } - -class Activator extends BundleActivator { - - def start(context: BundleContext) { - println("Starting the OSGi example ...") - val echo = actorOf[EchoActor].start - val answer = (echo !! "OSGi example") - println(answer getOrElse "No answer!") - } - - def stop(context: BundleContext) { - ActorRegistry.shutdownAll() - println("Stopped the OSGi example.") - } -} - -class EchoActor extends Actor { - - override def receive = { - case x => self reply x - } -} diff --git a/akka-samples/akka-sample-pubsub/src/main/scala/RedisPubSub.scala b/akka-samples/akka-sample-pubsub/src/main/scala/RedisPubSub.scala deleted file mode 100644 index c6b6c57da9..0000000000 --- a/akka-samples/akka-sample-pubsub/src/main/scala/RedisPubSub.scala +++ /dev/null @@ -1,107 +0,0 @@ -/** - * Copyright (C) 2009-2010 Scalable Solutions AB . - */ - -package sample.pubsub - -import com.redis.{RedisClient, PubSubMessage, S, U, M} -import akka.persistence.redis._ -import akka.actor.Actor._ - -/** - * Sample Akka application for Redis PubSub - * - * Prerequisite: Need Redis Server running (the version that supports pubsub) - *
    - * 1. Download redis from http://github.com/antirez/redis
    - * 2. build using "make"
    - * 3. Run server as ./redis-server
    - * 
    - * - * For running this sample application :- - * - *
    - * 1. Open a shell and set AKKA_HOME to the distribution root
    - * 2. cd $AKKA_HOME
    - * 3. sbt console
    - * 4. import sample.pubsub._
    - * 5. Sub.sub("a", "b") // starts Subscription server & subscribes to channels "a" and "b"
    - *
    - * 6. Open up another shell similarly as the above and set AKKA_HOME
    - * 7. cd $AKKA_HOME
    - * 8. sbt console
    - * 9. import sample.pubsub._
    - * 10. Pub.publish("a", "hello") // the first shell should get the message
    - * 11. Pub.publish("c", "hi") // the first shell should NOT get this message
    - *
    - * 12. Open up a redis-client from where you installed redis and issue a publish command
    - *     ./redis-cli publish a "hi there" ## the first shell should get the message
    - *
    - * 13. Go back to the first shell
    - * 14. Sub.unsub("a") // should unsubscribe the first shell from channel "a"
    - *
    - * 15. Study the callback function defined below. It supports many other message formats.
    - *     In the second shell window do the following:
    - *     scala> Pub.publish("b", "+c")  // will subscribe the first window to channel "c"
    - *     scala> Pub.publish("b", "+d")  // will subscribe the first window to channel "d"
    - *     scala> Pub.publish("b", "-c")  // will unsubscribe the first window from channel "c"
    - *     scala> Pub.publish("b", "exit")  // will unsubscribe the first window from all channels
    - * 
    - */ - -object Pub { - println("starting publishing service ..") - val r = new RedisClient("localhost", 6379) - val p = actorOf(new Publisher(r)) - p.start - - def publish(channel: String, message: String) = { - p ! Publish(channel, message) - } -} - -object Sub { - println("starting subscription service ..") - val r = new RedisClient("localhost", 6379) - val s = actorOf(new Subscriber(r)) - s.start - s ! Register(callback) - - def sub(channels: String*) = { - s ! Subscribe(channels.toArray) - } - - def unsub(channels: String*) = { - s ! Unsubscribe(channels.toArray) - } - - def callback(pubsub: PubSubMessage) = pubsub match { - case S(channel, no) => println("subscribed to " + channel + " and count = " + no) - case U(channel, no) => println("unsubscribed from " + channel + " and count = " + no) - case M(channel, msg) => - msg match { - // exit will unsubscribe from all channels and stop subscription service - case "exit" => - println("unsubscribe all ..") - r.unsubscribe - - // message "+x" will subscribe to channel x - case x if x startsWith "+" => - val s: Seq[Char] = x - s match { - case Seq('+', rest @ _*) => r.subscribe(rest.toString){ m => } - } - - // message "-x" will unsubscribe from channel x - case x if x startsWith "-" => - val s: Seq[Char] = x - s match { - case Seq('-', rest @ _*) => r.unsubscribe(rest.toString) - } - - // other message receive - case x => - println("received message on channel " + channel + " as : " + x) - } - } -} diff --git a/akka-samples/akka-sample-rest-java/src/main/java/sample/rest/java/Boot.java b/akka-samples/akka-sample-rest-java/src/main/java/sample/rest/java/Boot.java deleted file mode 100644 index 530d396abb..0000000000 --- a/akka-samples/akka-sample-rest-java/src/main/java/sample/rest/java/Boot.java +++ /dev/null @@ -1,28 +0,0 @@ -/** - * Copyright (C) 2009-2010 Scalable Solutions AB - */ - -package sample.rest.java; - -import akka.config.TypedActorConfigurator; -import static akka.config.Supervision.*; - -public class Boot { - public final static TypedActorConfigurator configurator = new TypedActorConfigurator(); - static { - configurator.configure( - new OneForOneStrategy(new Class[]{Exception.class}, 3, 5000), - new SuperviseTypedActor[] { - new SuperviseTypedActor( - SimpleService.class, - SimpleServiceImpl.class, - permanent(), - 1000), - new SuperviseTypedActor( - PersistentSimpleService.class, - PersistentSimpleServiceImpl.class, - permanent(), - 1000) - }).supervise(); - } -} diff --git a/akka-samples/akka-sample-rest-java/src/main/java/sample/rest/java/PersistentSimpleService.java b/akka-samples/akka-sample-rest-java/src/main/java/sample/rest/java/PersistentSimpleService.java deleted file mode 100644 index 5ed4b65dcf..0000000000 --- a/akka-samples/akka-sample-rest-java/src/main/java/sample/rest/java/PersistentSimpleService.java +++ /dev/null @@ -1,9 +0,0 @@ -/** - * Copyright (C) 2009-2010 Scalable Solutions AB - */ - -package sample.rest.java; - -public interface PersistentSimpleService { - public String count(); -} diff --git a/akka-samples/akka-sample-rest-java/src/main/java/sample/rest/java/PersistentSimpleServiceImpl.java b/akka-samples/akka-sample-rest-java/src/main/java/sample/rest/java/PersistentSimpleServiceImpl.java deleted file mode 100644 index 5d84b27d02..0000000000 --- a/akka-samples/akka-sample-rest-java/src/main/java/sample/rest/java/PersistentSimpleServiceImpl.java +++ /dev/null @@ -1,52 +0,0 @@ -/** - * Copyright (C) 2009-2010 Scalable Solutions AB - */ - -package sample.rest.java; - -import akka.actor.TypedActor; -import akka.stm.Atomic; -import akka.persistence.common.PersistentMap; -import akka.persistence.cassandra.CassandraStorage; - -import java.nio.ByteBuffer; - -public class PersistentSimpleServiceImpl extends TypedActor implements PersistentSimpleService { - private String KEY = "COUNTER"; - - private boolean hasStartedTicking = false; - private final PersistentMap storage = CassandraStorage.newMap(); - - public String count() { - if (!hasStartedTicking) { - new Atomic() { - public Object atomically() { - storage.put(KEY.getBytes(), ByteBuffer.allocate(4).putInt(0).array()); - return null; - } - }.execute(); - hasStartedTicking = true; - return "Tick: 0\n"; - } else { - int counter = new Atomic() { - public Integer atomically() { - byte[] bytes = (byte[])storage.get(KEY.getBytes()).get(); - int count = ByteBuffer.wrap(bytes).getInt() + 1; - storage.put(KEY.getBytes(), ByteBuffer.allocate(4).putInt(count).array()); - return count; - } - }.execute(); - return "Tick: " + counter + "\n"; - } - } - - @Override - public void preRestart(Throwable cause) { - System.out.println("Prepare for restart by supervisor"); - } - - @Override - public void postRestart(Throwable cause) { - System.out.println("Reinitialize after restart by supervisor"); - } -} diff --git a/akka-samples/akka-sample-rest-java/src/main/java/sample/rest/java/PersistentSimpleServiceRest.java b/akka-samples/akka-sample-rest-java/src/main/java/sample/rest/java/PersistentSimpleServiceRest.java deleted file mode 100644 index 1a1467c250..0000000000 --- a/akka-samples/akka-sample-rest-java/src/main/java/sample/rest/java/PersistentSimpleServiceRest.java +++ /dev/null @@ -1,27 +0,0 @@ -/** - * Copyright (C) 2009-2010 Scalable Solutions AB - */ - -package sample.rest.java; - -import javax.ws.rs.Path; -import javax.ws.rs.GET; -import javax.ws.rs.Produces; - -/** - * Try service out by invoking (multiple times): - *
    - * curl http://localhost:9998/persistentjavacount
    - * 
    - * Or browse to the URL from a web browser. - */ -@Path("/persistentjavacount") -public class PersistentSimpleServiceRest { - private PersistentSimpleService service = (PersistentSimpleService) Boot.configurator.getInstance(PersistentSimpleService.class); - - @GET - @Produces({"application/json"}) - public String count() { - return service.count(); - } -} diff --git a/akka-samples/akka-sample-rest-java/src/main/java/sample/rest/java/Receiver.java b/akka-samples/akka-sample-rest-java/src/main/java/sample/rest/java/Receiver.java deleted file mode 100644 index 91654e7c15..0000000000 --- a/akka-samples/akka-sample-rest-java/src/main/java/sample/rest/java/Receiver.java +++ /dev/null @@ -1,9 +0,0 @@ -/** - * Copyright (C) 2009-2010 Scalable Solutions AB - */ - -package sample.rest.java; - -public interface Receiver { - SimpleService get(); -} diff --git a/akka-samples/akka-sample-rest-java/src/main/java/sample/rest/java/ReceiverImpl.java b/akka-samples/akka-sample-rest-java/src/main/java/sample/rest/java/ReceiverImpl.java deleted file mode 100644 index 4e00f831ab..0000000000 --- a/akka-samples/akka-sample-rest-java/src/main/java/sample/rest/java/ReceiverImpl.java +++ /dev/null @@ -1,13 +0,0 @@ -/** - * Copyright (C) 2009-2010 Scalable Solutions AB - */ - -package sample.rest.java; - -import akka.actor.TypedActor; - -public class ReceiverImpl extends TypedActor implements Receiver { - public SimpleService get() { - return (SimpleService) getContext().getSender(); - } -} diff --git a/akka-samples/akka-sample-rest-java/src/main/java/sample/rest/java/SimpleService.java b/akka-samples/akka-sample-rest-java/src/main/java/sample/rest/java/SimpleService.java deleted file mode 100644 index ca8ee5e34d..0000000000 --- a/akka-samples/akka-sample-rest-java/src/main/java/sample/rest/java/SimpleService.java +++ /dev/null @@ -1,9 +0,0 @@ -/** - * Copyright (C) 2009-2010 Scalable Solutions AB - */ - -package sample.rest.java; - -public interface SimpleService { - public String count(); -} diff --git a/akka-samples/akka-sample-rest-java/src/main/java/sample/rest/java/SimpleServiceImpl.java b/akka-samples/akka-sample-rest-java/src/main/java/sample/rest/java/SimpleServiceImpl.java deleted file mode 100644 index e0b95d3c92..0000000000 --- a/akka-samples/akka-sample-rest-java/src/main/java/sample/rest/java/SimpleServiceImpl.java +++ /dev/null @@ -1,51 +0,0 @@ -/** - * Copyright (C) 2009-2010 Scalable Solutions AB - */ - -package sample.rest.java; - -import akka.actor.TypedActor; -import akka.stm.Atomic; -import akka.stm.TransactionalMap; - -public class SimpleServiceImpl extends TypedActor implements SimpleService { - private String KEY = "COUNTER"; - - private boolean hasStartedTicking = false; - private final TransactionalMap storage = new TransactionalMap(); - private Receiver receiver = TypedActor.newInstance(Receiver.class, ReceiverImpl.class); - - public String count() { - if (!hasStartedTicking) { - new Atomic() { - public Object atomically() { - storage.put(KEY, 0); - return null; - } - }.execute(); - hasStartedTicking = true; - return "Tick: 0\n"; - } else { - // Grabs the sender address and returns it - //SimpleService sender = receiver.receive(); - int counter = new Atomic() { - public Integer atomically() { - int count = (Integer) storage.get(KEY).get() + 1; - storage.put(KEY, count); - return count; - } - }.execute(); - return "Tick: " + counter + "\n"; - } - } - - @Override - public void preRestart(Throwable cause) { - System.out.println("Prepare for restart by supervisor"); - } - - @Override - public void postRestart(Throwable cause) { - System.out.println("Reinitialize after restart by supervisor"); - } -} diff --git a/akka-samples/akka-sample-rest-java/src/main/java/sample/rest/java/SimpleServiceRest.java b/akka-samples/akka-sample-rest-java/src/main/java/sample/rest/java/SimpleServiceRest.java deleted file mode 100644 index eba7c85f2a..0000000000 --- a/akka-samples/akka-sample-rest-java/src/main/java/sample/rest/java/SimpleServiceRest.java +++ /dev/null @@ -1,27 +0,0 @@ -/** - * Copyright (C) 2009-2010 Scalable Solutions AB - */ - -package sample.rest.java; - -import javax.ws.rs.Path; -import javax.ws.rs.GET; -import javax.ws.rs.Produces; - -/** - * Try service out by invoking (multiple times): - *
    - * curl http://localhost:9998/javacount
    - * 
    - * Or browse to the URL from a web browser. - */ -@Path("/javacount") -public class SimpleServiceRest { - private SimpleService service = (SimpleService) Boot.configurator.getInstance(SimpleService.class); - - @GET - @Produces({"application/json"}) - public String count() { - return service.count(); - } -} diff --git a/akka-samples/akka-sample-rest-scala/src/main/scala/Boot.scala b/akka-samples/akka-sample-rest-scala/src/main/scala/Boot.scala deleted file mode 100644 index 1eeb9348a3..0000000000 --- a/akka-samples/akka-sample-rest-scala/src/main/scala/Boot.scala +++ /dev/null @@ -1,34 +0,0 @@ -/** - * Copyright 2010 Autodesk, Inc. All rights reserved. - * Licensed under Apache License, Version 2.0 (the "License"); you may not use this software except in compliance with the License. - * You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0. - */ - -package sample.rest.scala - -import akka.actor._ -import akka.actor.Actor._ -import akka.config.Supervision._ -import akka.http._ - - -/** - * Starts up the base services for http (jetty) - */ -class Boot { - val factory = SupervisorFactory( - SupervisorConfig( - OneForOneStrategy(List(classOf[Exception]), 3, 100), - // - // in this particular case, just boot the built-in default root endpoint - // - Supervise( - actorOf[RootEndpoint], - Permanent) :: - Supervise( - actorOf[SimpleAkkaAsyncHttpService], - Permanent) - :: Nil)) - factory.newInstance.start -} - diff --git a/akka-samples/akka-sample-rest-scala/src/main/scala/InterestingService.scala b/akka-samples/akka-sample-rest-scala/src/main/scala/InterestingService.scala deleted file mode 100644 index e00a7e8120..0000000000 --- a/akka-samples/akka-sample-rest-scala/src/main/scala/InterestingService.scala +++ /dev/null @@ -1,187 +0,0 @@ -/** - * Copyright 2010 Autodesk, Inc. All rights reserved. - * Licensed under Apache License, Version 2.0 (the "License"); you may not use this software except in compliance with the License. - * You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0. - */ - -package sample.mist - -import akka.actor._ -import akka.actor.Actor._ -import akka.http._ -import javax.servlet.http.HttpServletResponse - - - -/** - * Define a top level service endpoint - * - * @author Garrick Evans - */ -class InterestingService extends Actor with Endpoint { - // - // use the configurable dispatcher - // - self.dispatcher = Endpoint.Dispatcher - - final val ServiceRoot = "/interesting/" - final val Multi = ServiceRoot + "multi/" - - // - // The "multi" endpoint shows forking off multiple actions per request - // It is triggered by POSTing to http://localhost:9998/interesting/multi/{foo} - // Try with/without a header named "Test-Token" - // Try with/without a form parameter named "Data" - // - def hookMultiActionA(uri: String): Boolean = uri startsWith Multi - def provideMultiActionA(uri: String): ActorRef = actorOf(new ActionAActor(complete)).start - - def hookMultiActionB(uri: String): Boolean = uri startsWith Multi - def provideMultiActionB(uri: String): ActorRef = actorOf(new ActionBActor(complete)).start - - // - // this is where you want attach your endpoint hooks - // - override def preStart { - // - // we expect there to be one root and that it's already been started up - // obviously there are plenty of other ways to obtaining this actor - // the point is that we need to attach something (for starters anyway) - // to the root - // - val root = ActorRegistry.actorsFor(classOf[RootEndpoint]).head - root ! Endpoint.Attach(hookMultiActionA, provideMultiActionA) - root ! Endpoint.Attach(hookMultiActionB, provideMultiActionB) - } - - // - // since this actor isn't doing anything else (i.e. not handling other messages) - // just assign the receive func like so... - // otherwise you could do something like: - // def myrecv = {...} - // def receive = myrecv orElse handleHttpRequest - // - def receive = handleHttpRequest - - - // - // this guy completes requests after other actions have occured - // - lazy val complete = actorOf[ActionCompleteActor].start -} - -class ActionAActor(complete: ActorRef) extends Actor { - import javax.ws.rs.core.MediaType - - def receive = { - // - // handle a post request - // - case post:Post => { - // - // the expected content type of the request - // similar to @Consumes - // - if (post.request.getContentType startsWith MediaType.APPLICATION_FORM_URLENCODED) { - // - // the content type of the response. - // similar to @Produces annotation - // - post.response.setContentType(MediaType.TEXT_HTML) - - // - // get the resource name - // - val name = post.request.getRequestURI.substring("/interesting/multi/".length) - val response = if (name.length % 2 == 0) - "

    Action A verified request.

    " - else - "

    Action A could not verify request.

    " - - post.response.getWriter.write(response) - - // - // notify the next actor to coordinate the response - // - complete ! post - } - else { - post.UnsupportedMediaType("Content-Type request header missing or incorrect (was '" + post.request.getContentType + "' should be '" + MediaType.APPLICATION_FORM_URLENCODED + "')") - } - } - } -} - -class ActionBActor(complete:ActorRef) extends Actor { - import javax.ws.rs.core.MediaType - - def receive = { - // - // handle a post request - // - case post:Post => { - // - // the expected content type of the request - // similar to @Consumes - // - if (post.request.getContentType startsWith MediaType.APPLICATION_FORM_URLENCODED) { - // - // pull some headers and form params - // - def default(any: Any): String = "" - val token = post.getHeaderOrElse("Test-Token", default) - val data = post.getParameterOrElse("Data", default) - - val (resp, status) = (token, data) match { - case ("", _) => ("No token provided", HttpServletResponse.SC_FORBIDDEN) - case (_, "") => ("No data", HttpServletResponse.SC_ACCEPTED) - case _ => ("Data accepted", HttpServletResponse.SC_OK) - } - - // - // update the response body - // - post.response.getWriter.write(resp) - - // - // notify the next actor to coordinate the response - // - complete ! (post, status) - } - else { - post.UnsupportedMediaType("Content-Type request header missing or incorrect (was '" + post.request.getContentType + "' should be '" + MediaType.APPLICATION_FORM_URLENCODED + "')") - } - } - - case other: RequestMethod => - other.NotAllowed("Invalid method for this endpoint") - } -} - -class ActionCompleteActor extends Actor -{ - import collection.mutable.HashMap - - val requests = HashMap.empty[Int, Int] - - def receive = { - case req: RequestMethod => - if (requests contains req.hashCode) - complete(req) - else - requests += (req.hashCode -> 0) - - case t: Tuple2[RequestMethod, Int] => - if (requests contains t._1.hashCode) - complete(t._1) - else - requests += (t._1.hashCode -> t._2) - } - - def complete(req:RequestMethod) = requests.remove(req.hashCode) match { - case Some(HttpServletResponse.SC_FORBIDDEN) => req.Forbidden("") - case Some(HttpServletResponse.SC_ACCEPTED) => req.Accepted("") - case Some(_) => req.OK("") - case _ => - } -} \ No newline at end of file diff --git a/akka-samples/akka-sample-rest-scala/src/main/scala/SimpleAkkaAsyncHttpService.scala b/akka-samples/akka-sample-rest-scala/src/main/scala/SimpleAkkaAsyncHttpService.scala deleted file mode 100644 index 57aa3754b8..0000000000 --- a/akka-samples/akka-sample-rest-scala/src/main/scala/SimpleAkkaAsyncHttpService.scala +++ /dev/null @@ -1,144 +0,0 @@ -/** - * Copyright 2010 Autodesk, Inc. All rights reserved. - * Licensed under Apache License, Version 2.0 (the "License"); you may not use this software except in compliance with the License. - * You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0. - */ - -package sample.rest.scala - -import akka.actor._ -import akka.actor.Actor._ -import akka.http._ - - - -/** - * Define a top level service endpoint - * Usage: GET or POST to http://localhost:9998/simple/same or http://localhost:9998/simple/new - * - * @author Garrick Evans - */ -class SimpleAkkaAsyncHttpService extends Actor with Endpoint { - // - // use the configurable dispatcher - // - self.dispatcher = Endpoint.Dispatcher - - final val ServiceRoot = "/simple/" - final val ProvideSameActor = ServiceRoot + "same" - final val ProvideNewActor = ServiceRoot + "new" - - // - // there are different ways of doing this - in this case, we'll use a single hook function - // and discriminate in the provider; alternatively we can pair hooks & providers - // - def hook(uri: String): Boolean = uri match { - case ProvideSameActor | ProvideNewActor => true - case _ => false - } - - def provide(uri: String): ActorRef = uri match { - case ProvideSameActor => same - case _ => actorOf[BoringActor].start - } - - // - // this is where you want attach your endpoint hooks - // - override def preStart { - // - // we expect there to be one root and that it's already been started up - // obviously there are plenty of other ways to obtaining this actor - // the point is that we need to attach something (for starters anyway) - // to the root - // - val root = ActorRegistry.actorsFor(classOf[RootEndpoint]).head - root ! Endpoint.Attach(hook, provide) - } - - // - // since this actor isn't doing anything else (i.e. not handling other messages) - // just assign the receive func like so... - // otherwise you could do something like: - // def myrecv = {...} - // def receive = myrecv orElse handleHttpRequest - // - def receive = handleHttpRequest - - // - // this will be our "same" actor provided with ProvideSameActor endpoint is hit - // - lazy val same = actorOf[BoringActor].start -} - -/** - * Define a service handler to respond to some HTTP requests - */ -class BoringActor extends Actor { - import java.util.Date - import javax.ws.rs.core.MediaType - - var gets = 0 - var posts = 0 - var lastget: Option[Date] = None - var lastpost: Option[Date] = None - - def receive = { - // - // handle a get request - // - case get: Get => { - // - // the content type of the response. - // similar to @Produces annotation - // - get.response.setContentType(MediaType.TEXT_HTML) - - // - // "work" - // - gets += 1 - lastget = Some(new Date) - - // - // respond - // - val res = "

    Gets: "+gets+" Posts: "+posts+"

    Last Get: "+lastget.getOrElse("Never").toString+" Last Post: "+lastpost.getOrElse("Never").toString+"

    " - get.OK(res) - } - - // - // handle a post request - // - case post:Post => { - // - // the expected content type of the request - // similar to @Consumes - // - if (post.request.getContentType startsWith MediaType.APPLICATION_FORM_URLENCODED) { - // - // the content type of the response. - // similar to @Produces annotation - // - post.response.setContentType(MediaType.TEXT_HTML) - - // - // "work" - // - posts += 1 - lastpost = Some(new Date) - - // - // respond - // - val res = "

    Gets: "+gets+" Posts: "+posts+"

    Last Get: "+lastget.getOrElse("Never").toString+" Last Post: "+lastpost.getOrElse("Never").toString+"

    " - post.OK(res) - } - else { - post.UnsupportedMediaType("Content-Type request header missing or incorrect (was '" + post.request.getContentType + "' should be '" + MediaType.APPLICATION_FORM_URLENCODED + "')") - } - } - - case other: RequestMethod => other.NotAllowed("Invalid method for this endpoint") - } -} diff --git a/akka-samples/akka-sample-security/src/main/scala/SimpleService.scala b/akka-samples/akka-sample-security/src/main/scala/SimpleService.scala deleted file mode 100644 index bee8dd5bd9..0000000000 --- a/akka-samples/akka-sample-security/src/main/scala/SimpleService.scala +++ /dev/null @@ -1,157 +0,0 @@ -/** - * Copyright (C) 2009-2010 Scalable Solutions AB - */ - -package sample.security - -import akka.actor.{SupervisorFactory, Actor} -import akka.actor.Actor._ -import akka.config.Supervision._ -import akka.util.Logging -import akka.security.{BasicAuthenticationActor,BasicCredentials,SpnegoAuthenticationActor,DigestAuthenticationActor, UserInfo} -import akka.stm._ -import akka.stm.TransactionalMap -import akka.actor.ActorRegistry.actorFor - -class Boot { - val factory = SupervisorFactory( - SupervisorConfig( - OneForOneStrategy(List(classOf[Exception]), 3, 100), - // Dummy implementations of all authentication actors - // see akka.conf to enable one of these for the AkkaSecurityFilterFactory - Supervise( - actorOf[BasicAuthenticationService], - Permanent) :: - - // Supervise( - // actorOf[DigestAuthenticationService], - // Permanent) :: - // Supervise( - // actorOf[SpnegoAuthenticationService], - // Permanent) :: - - Supervise( - actorOf[SecureTickActor], - Permanent):: Nil)) - - val supervisor = factory.newInstance - supervisor.start -} - -/* - * In akka.conf you can set the FQN of any AuthenticationActor of your wish, under the property name: akka.http.authenticator - */ -class DigestAuthenticationService extends DigestAuthenticationActor { - //If you want to have a distributed nonce-map, you can use something like below, - //don't forget to configure your standalone Cassandra instance - // - //makeTransactionRequired - //override def mkNonceMap = Storage.newMap(CassandraStorageConfig()).asInstanceOf[scala.collection.mutable.Map[String,Long]] - - //Use an in-memory nonce-map as default - override def mkNonceMap = new scala.collection.mutable.HashMap[String, Long] - - //Change this to whatever you want - override def realm = "test" - - //Dummy method that allows you to log on with whatever username with the password "bar" - override def userInfo(username: String): Option[UserInfo] = Some(UserInfo(username, "bar", "ninja" :: "chef" :: Nil)) -} - -class BasicAuthenticationService extends BasicAuthenticationActor { - - //Change this to whatever you want - override def realm = "test" - - //Dummy method that allows you to log on with whatever username - def verify(odc: Option[BasicCredentials]): Option[UserInfo] = odc match { - case Some(dc) => userInfo(dc.username) - case _ => None - } - - //Dummy method that allows you to log on with whatever username with the password "bar" - def userInfo(username: String): Option[UserInfo] = Some(UserInfo(username, "bar", "ninja" :: "chef" :: Nil)) - -} - -class SpnegoAuthenticationService extends SpnegoAuthenticationActor { - def rolesFor(user: String) = "ninja" :: "chef" :: Nil - -} - -/** - * a REST Actor with class level paranoia settings to deny all access - * - * The interesting part is - * @RolesAllowed - * @PermitAll - * @DenyAll - */ -import java.lang.Integer -import javax.annotation.security.{RolesAllowed, DenyAll, PermitAll} -import javax.ws.rs.{GET, Path, Produces} - -@Path("/secureticker") -class SecureTickService { - - /** - * allow access for any user to "/secureticker/public" - */ - @GET - @Produces(Array("text/xml")) - @Path("/public") - @PermitAll - def publicTick = tick - - /** - * restrict access to "/secureticker/chef" users with "chef" role - */ - @GET - @Path("/chef") - @Produces(Array("text/xml")) - @RolesAllowed(Array("chef")) - def chefTick = tick - - /** - * access denied for any user to default Path "/secureticker/" - */ - @GET - @Produces(Array("text/xml")) - @DenyAll - def paranoiaTick = tick - - def tick = { - //Fetch the first actor of type PersistentSimpleServiceActor - //Send it the "Tick" message and expect a NdeSeq back - val result = for{a <- actorFor[SecureTickActor] - r <- (a !! "Tick").as[Integer]} yield r - //Return either the resulting NodeSeq or a default one - result match { - case (Some(counter)) => (Tick: {counter}) - case _ => (Error in counter) - } - } -} - -class SecureTickActor extends Actor with Logging { - private val KEY = "COUNTER" - private var hasStartedTicking = false - private val storage = TransactionalMap[String, Integer]() - def receive = { - case "Tick" => if (hasStartedTicking) { - val count = atomic { - val current = storage.get(KEY).get.intValue - val updated = current + 1 - storage.put(KEY, updated) - updated - } - self.reply(new Integer(count)) - } else { - atomic { - storage.put(KEY, 0) - } - hasStartedTicking = true - self.reply(new Integer(0)) - } - } -} diff --git a/akka-samples/akka-sample-security/src/main/webapp/WEB-INF/web.xml b/akka-samples/akka-sample-security/src/main/webapp/WEB-INF/web.xml deleted file mode 100644 index d532f0e3f2..0000000000 --- a/akka-samples/akka-sample-security/src/main/webapp/WEB-INF/web.xml +++ /dev/null @@ -1,19 +0,0 @@ - - - - akka-security-samples - - - AkkaServlet - akka.http.AkkaServlet - - - - AkkaServlet - /* - - - diff --git a/akka-spring/src/main/resources/META-INF/spring.handlers b/akka-spring/src/main/resources/META-INF/spring.handlers deleted file mode 100644 index 0812c4dd2e..0000000000 --- a/akka-spring/src/main/resources/META-INF/spring.handlers +++ /dev/null @@ -1 +0,0 @@ -http\://www.akkasource.org/schema/akka=akka.spring.AkkaNamespaceHandler diff --git a/akka-spring/src/main/resources/META-INF/spring.schemas b/akka-spring/src/main/resources/META-INF/spring.schemas deleted file mode 100644 index 37ec717596..0000000000 --- a/akka-spring/src/main/resources/META-INF/spring.schemas +++ /dev/null @@ -1 +0,0 @@ -http\://scalablesolutions.se/akka/akka-1.0-SNAPSHOT.xsd=akka/spring/akka-1.0-SNAPSHOT.xsd diff --git a/akka-spring/src/main/resources/akka/spring/akka-0.10.xsd b/akka-spring/src/main/resources/akka/spring/akka-0.10.xsd deleted file mode 100644 index e66090fe16..0000000000 --- a/akka-spring/src/main/resources/akka/spring/akka-0.10.xsd +++ /dev/null @@ -1,294 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - Name of the remote host. - - - - - - - Port of the remote host. - - - - - - - - - - - - - - - - - - Name of the interface implemented by implementation class. - - - - - - - Name of the implementation class. - - - - - - - Theh default timeout for '!!' invocations. - - - - - - - Set this to true if messages should have REQUIRES_NEW semantics. - - - - - - - Defines the lifecycle, can be either 'permanent' or 'temporary'. - - - - - - - Supported scopes are 'singleton' and 'prototype'. - - - - - - - - - - - - - - - - - - Name of the implementation class. - - - - - - - The default timeout for '!!' invocations. - - - - - - - Set this to true if messages should have REQUIRES_NEW semantics. - - - - - - - Defines the lifecycle, can be either 'permanent' or 'temporary'. - - - - - - - Supported scopes are 'singleton' and 'prototype'. - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - Failover scheme, can be one of 'AllForOne' or 'OneForOne'. - - - - - - - Maximal number of restarts. - - - - - - - Time range for maximal number of restart. - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/akka-spring/src/main/resources/akka/spring/akka-1.0-SNAPSHOT.xsd b/akka-spring/src/main/resources/akka/spring/akka-1.0-SNAPSHOT.xsd deleted file mode 100644 index 16e3bde6f3..0000000000 --- a/akka-spring/src/main/resources/akka/spring/akka-1.0-SNAPSHOT.xsd +++ /dev/null @@ -1,363 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - Name of the remote host. - - - - - - - Port of the remote host. - - - - - - - Management type for remote actors: client managed or server managed. - - - - - - - Custom service name for server managed actor. - - - - - - - - - - - - - - - - - - Name of the interface implemented by implementation class. - - - - - - - Name of the implementation class. - - - - - - - Bean instance behind the actor - - - - - - - The default timeout for '!!' invocations in milliseconds. - - - - - - - Defines the lifecycle, can be either 'permanent' or 'temporary'. - - - - - - - Supported scopes are 'singleton' and 'prototype'. - - - - - - - - - - - - - - - - - - Name of the implementation class. - - - - - - - Bean instance behind the actor - - - - - - - The default timeout for '!!' invocations in milliseconds. - - - - - - - Defines the lifecycle, can be either 'permanent' or 'temporary'. - - - - - - - Supported scopes are 'singleton' and 'prototype'. - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - Name of the remote host. - - - - - - - Port of the remote host. - - - - - - - Custom service name or class name for the server managed actor. - - - - - - - Name of the interface the typed actor implements. - - - - - - - - - - - - - - - Failover scheme, can be one of 'AllForOne' or 'OneForOne'. - - - - - - - Maximal number of restarts. - - - - - - - Time range for maximal number of restart. - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/akka-spring/src/main/scala/akka/ActorBeanDefinitionParser.scala b/akka-spring/src/main/scala/akka/ActorBeanDefinitionParser.scala deleted file mode 100644 index 2abb1024d8..0000000000 --- a/akka-spring/src/main/scala/akka/ActorBeanDefinitionParser.scala +++ /dev/null @@ -1,93 +0,0 @@ -/** - * Copyright (C) 2009-2010 Scalable Solutions AB - */ -package akka.spring - -import org.springframework.beans.factory.support.BeanDefinitionBuilder -import org.springframework.beans.factory.xml.AbstractSingleBeanDefinitionParser -import org.springframework.beans.factory.xml.ParserContext -import AkkaSpringConfigurationTags._ -import org.w3c.dom.Element - - -/** - * Parser for custom namespace configuration. - * @author michaelkober - */ -class TypedActorBeanDefinitionParser extends AbstractSingleBeanDefinitionParser with ActorParser { - /* - * @see org.springframework.beans.factory.xml.AbstractSingleBeanDefinitionParser#doParse(org.w3c.dom.Element, org.springframework.beans.factory.xml.ParserContext, org.springframework.beans.factory.support.BeanDefinitionBuilder) - */ - override def doParse(element: Element, parserContext: ParserContext, builder: BeanDefinitionBuilder) { - val typedActorConf = parseActor(element) - typedActorConf.typed = TYPED_ACTOR_TAG - typedActorConf.setAsProperties(builder) - } - - /* - * @see org.springframework.beans.factory.xml.AbstractSingleBeanDefinitionParser#getBeanClass(org.w3c.dom.Element) - */ - override def getBeanClass(element: Element): Class[_] = classOf[ActorFactoryBean] -} - - -/** - * Parser for custom namespace configuration. - * @author michaelkober - */ -class UntypedActorBeanDefinitionParser extends AbstractSingleBeanDefinitionParser with ActorParser { - /* - * @see org.springframework.beans.factory.xml.AbstractSingleBeanDefinitionParser#doParse(org.w3c.dom.Element, org.springframework.beans.factory.xml.ParserContext, org.springframework.beans.factory.support.BeanDefinitionBuilder) - */ - override def doParse(element: Element, parserContext: ParserContext, builder: BeanDefinitionBuilder) { - val untypedActorConf = parseActor(element) - untypedActorConf.typed = UNTYPED_ACTOR_TAG - untypedActorConf.setAsProperties(builder) - } - - /* - * @see org.springframework.beans.factory.xml.AbstractSingleBeanDefinitionParser#getBeanClass(org.w3c.dom.Element) - */ - override def getBeanClass(element: Element): Class[_] = classOf[ActorFactoryBean] -} - - -/** - * Parser for custom namespace configuration. - * @author michaelkober - */ -class ActorForBeanDefinitionParser extends AbstractSingleBeanDefinitionParser with ActorForParser { - /* - * @see org.springframework.beans.factory.xml.AbstractSingleBeanDefinitionParser#doParse(org.w3c.dom.Element, org.springframework.beans.factory.xml.ParserContext, org.springframework.beans.factory.support.BeanDefinitionBuilder) - */ - override def doParse(element: Element, parserContext: ParserContext, builder: BeanDefinitionBuilder) { - val actorForConf = parseActorFor(element) - actorForConf.setAsProperties(builder) - } - - /* - * @see org.springframework.beans.factory.xml.AbstractSingleBeanDefinitionParser#getBeanClass(org.w3c.dom.Element) - */ - override def getBeanClass(element: Element): Class[_] = classOf[ActorForFactoryBean] -} - -/** - * Parser for custom namespace configuration. - * @author michaelkober - */ -class ConfigBeanDefinitionParser extends AbstractSingleBeanDefinitionParser with ActorParser { - /* - * @see org.springframework.beans.factory.xml.AbstractSingleBeanDefinitionParser#doParse(org.w3c.dom.Element, org.springframework.beans.factory.xml.ParserContext, org.springframework.beans.factory.support.BeanDefinitionBuilder) - */ - override def doParse(element: Element, parserContext: ParserContext, builder: BeanDefinitionBuilder) { - val location = element.getAttribute(LOCATION) - builder.addPropertyValue(LOCATION, location) - } - - /* - * @see org.springframework.beans.factory.xml.AbstractSingleBeanDefinitionParser#getBeanClass(org.w3c.dom.Element) - */ - override def getBeanClass(element: Element): Class[_] = classOf[ConfiggyPropertyPlaceholderConfigurer] - - override def shouldGenerateId() = true -} diff --git a/akka-spring/src/main/scala/akka/ActorFactoryBean.scala b/akka-spring/src/main/scala/akka/ActorFactoryBean.scala deleted file mode 100644 index 0d3d407475..0000000000 --- a/akka-spring/src/main/scala/akka/ActorFactoryBean.scala +++ /dev/null @@ -1,265 +0,0 @@ -/** - * Copyright (C) 2009-2010 Scalable Solutions AB - */ - -package akka.spring - -import org.springframework.beans.{BeanUtils,BeansException,BeanWrapper,BeanWrapperImpl} -import akka.remote.{RemoteClient, RemoteServer} -import org.springframework.beans.factory.config.AbstractFactoryBean -import org.springframework.context.{ApplicationContext,ApplicationContextAware} -import org.springframework.util.StringUtils - -import akka.actor.{ActorRef, AspectInitRegistry, TypedActorConfiguration, TypedActor,Actor} -import akka.dispatch.MessageDispatcher -import akka.util.{Logging, Duration} -import scala.reflect.BeanProperty -import java.net.InetSocketAddress - -/** - * Exception to use when something goes wrong during bean creation. - * - * @author Johan Rask - */ -class AkkaBeansException(message: String, cause:Throwable) extends BeansException(message, cause) { - def this(message: String) = this(message, null) -} - -/** - * Factory bean for typed and untyped actors. - * - * @author michaelkober - * @author Johan Rask - * @author Martin Krasser - * @author Jonas Bonér - */ -class ActorFactoryBean extends AbstractFactoryBean[AnyRef] with Logging with ApplicationContextAware { - import StringReflect._ - import AkkaSpringConfigurationTags._ - - @BeanProperty var typed: String = "" - @BeanProperty var interface: String = "" - @BeanProperty var implementation: String = "" - @BeanProperty var beanRef: String = null - @BeanProperty var timeoutStr: String = "" - @BeanProperty var host: String = "" - @BeanProperty var port: String = "" - @BeanProperty var serverManaged: Boolean = false - @BeanProperty var serviceName: String = "" - @BeanProperty var lifecycle: String = "" - @BeanProperty var dispatcher: DispatcherProperties = _ - @BeanProperty var scope: String = VAL_SCOPE_SINGLETON - @BeanProperty var property: PropertyEntries = _ - @BeanProperty var applicationContext: ApplicationContext = _ - - lazy val timeout = parseTimeout - - private def parseTimeout() : Long = { - var result = -1L - try { - result = if (!timeoutStr.isEmpty) timeoutStr.toLong else -1L - } catch { - case nfe: NumberFormatException => - log.error(nfe, "could not parse timeout %s", timeoutStr) - throw nfe - } - result - } - - // Holds info about if deps have been set or not. Depends on - // if interface is specified or not. We must set deps on - // target instance if interface is specified - var hasSetDependecies = false - - override def isSingleton = scope.equals(VAL_SCOPE_SINGLETON) - - /* - * @see org.springframework.beans.factory.FactoryBean#getObjectType() - */ - def getObjectType: Class[AnyRef] = try { - implementation.toClass - } catch { - // required by contract to return null - case e: IllegalArgumentException => null - } - - /* - * @see org.springframework.beans.factory.config.AbstractFactoryBean#createInstance() - */ - def createInstance: AnyRef = { - val ref = typed match { - case TYPED_ACTOR_TAG => val typedActor = createTypedInstance() - setProperties(AspectInitRegistry.initFor(typedActor).targetInstance) - typedActor - case UNTYPED_ACTOR_TAG => val untypedActor = createUntypedInstance() - setProperties(untypedActor.actor) - untypedActor - case _ => throw new IllegalArgumentException("Unknown actor type") - } - ref - } - - private[akka] def createTypedInstance() : AnyRef = { - if ((interface eq null) || interface == "") throw new AkkaBeansException( - "The 'interface' part of the 'akka:actor' element in the Spring config file can't be null or empty string") - if (((implementation eq null) || implementation == "") && (beanRef eq null)) throw new AkkaBeansException( - "Either 'implementation' or 'ref' must be specified as attribute of the 'akka:typed-actor' element in the Spring config file ") - - val typedActor: AnyRef = if (beanRef eq null ) { - TypedActor.newInstance(interface.toClass, implementation.toClass, createConfig) - } - else - { - TypedActor.newInstance(interface.toClass, getBeanFactory().getBean(beanRef), createConfig) - } - - - if (isRemote && serverManaged) { - val server = RemoteServer.getOrCreateServer(new InetSocketAddress(host, port.toInt)) - if (serviceName.isEmpty) { - server.registerTypedActor(interface, typedActor) - } else { - server.registerTypedActor(serviceName, typedActor) - } - } - typedActor - } - - /** - * Create an UntypedActor. - */ - private[akka] def createUntypedInstance() : ActorRef = { - if (((implementation eq null) || implementation == "") && (beanRef eq null)) throw new AkkaBeansException( - "Either 'implementation' or 'ref' must be specified as attribute of the 'akka:untyped-actor' element in the Spring config file ") - val actorRef = if (beanRef eq null ) - Actor.actorOf(implementation.toClass) - else - Actor.actorOf(getBeanFactory().getBean(beanRef).asInstanceOf[Actor]) - - if (timeout > 0) { - actorRef.setTimeout(timeout) - } - if (isRemote) { - if (serverManaged) { - val server = RemoteServer.getOrCreateServer(new InetSocketAddress(host, port.toInt)) - if (serviceName.isEmpty) { - server.register(actorRef) - } else { - server.register(serviceName, actorRef) - } - } else { - actorRef.makeRemote(host, port.toInt) - } - } - if (hasDispatcher) { - if (dispatcher.dispatcherType != THREAD_BASED){ - actorRef.setDispatcher(dispatcherInstance()) - } else { - actorRef.setDispatcher(dispatcherInstance(Some(actorRef))) - } - } - actorRef - } - - /** - * Stop the typed actor if it is a singleton. - */ - override def destroyInstance(instance: AnyRef) { - typed match { - case TYPED_ACTOR_TAG => TypedActor.stop(instance) - case UNTYPED_ACTOR_TAG => instance.asInstanceOf[ActorRef].stop - } - } - - private def setProperties(ref: AnyRef): AnyRef = { - if (hasSetDependecies) return ref - log.debug("Processing properties and dependencies for implementation class\n\t[%s]", implementation) - val beanWrapper = new BeanWrapperImpl(ref); - if (ref.isInstanceOf[ApplicationContextAware]) { - log.debug("Setting application context") - beanWrapper.setPropertyValue("applicationContext", applicationContext) - } - for (entry <- property.entryList) { - val propertyDescriptor = BeanUtils.getPropertyDescriptor(ref.getClass, entry.name) - val method = propertyDescriptor.getWriteMethod - if (StringUtils.hasText(entry.ref)) { - log.debug("Setting property %s with bean ref %s using method %s", entry.name, entry.ref, method.getName) - method.invoke(ref,getBeanFactory().getBean(entry.ref)) - } else if(StringUtils.hasText(entry.value)) { - log.debug("Setting property %s with value %s using method %s", entry.name, entry.value, method.getName) - beanWrapper.setPropertyValue(entry.name,entry.value) - } else throw new AkkaBeansException("Either property@ref or property@value must be set on property element") - } - ref - } - - - private[akka] def createConfig: TypedActorConfiguration = { - val config = new TypedActorConfiguration().timeout(Duration(timeout, "millis")) - if (isRemote && !serverManaged) config.makeRemote(host, port.toInt) - if (hasDispatcher) { - if (dispatcher.dispatcherType != THREAD_BASED) { - config.dispatcher(dispatcherInstance()) - } else { - config.threadBasedDispatcher() - } - } - config - } - - private[akka] def isRemote = (host ne null) && (!host.isEmpty) - - private[akka] def hasDispatcher = - (dispatcher ne null) && - (dispatcher.dispatcherType ne null) && - (!dispatcher.dispatcherType.isEmpty) - - /** - * Create dispatcher instance with dispatcher properties. - * @param actorRef actorRef for thread based dispatcher - * @return new dispatcher instance - */ - private[akka] def dispatcherInstance(actorRef: Option[ActorRef] = None) : MessageDispatcher = { - import DispatcherFactoryBean._ - if (dispatcher.dispatcherType != THREAD_BASED) { - createNewInstance(dispatcher) - } else { - createNewInstance(dispatcher, actorRef) - } - } -} - -/** - * Factory bean for remote client actor-for. - * - * @author michaelkober - */ -class ActorForFactoryBean extends AbstractFactoryBean[AnyRef] with Logging with ApplicationContextAware { - import StringReflect._ - import AkkaSpringConfigurationTags._ - - @BeanProperty var interface: String = "" - @BeanProperty var host: String = "" - @BeanProperty var port: String = "" - @BeanProperty var serviceName: String = "" - @BeanProperty var applicationContext: ApplicationContext = _ - - override def isSingleton = false - - /* - * @see org.springframework.beans.factory.FactoryBean#getObjectType() - */ - def getObjectType: Class[AnyRef] = classOf[AnyRef] - - /* - * @see org.springframework.beans.factory.config.AbstractFactoryBean#createInstance() - */ - def createInstance: AnyRef = { - if (interface.isEmpty) { - RemoteClient.actorFor(serviceName, host, port.toInt) - } else { - RemoteClient.typedActorFor(interface.toClass, serviceName, host, port.toInt) - } - } -} - diff --git a/akka-spring/src/main/scala/akka/ActorParser.scala b/akka-spring/src/main/scala/akka/ActorParser.scala deleted file mode 100644 index da059d2f8e..0000000000 --- a/akka-spring/src/main/scala/akka/ActorParser.scala +++ /dev/null @@ -1,231 +0,0 @@ -/** - * Copyright (C) 2009-2010 Scalable Solutions AB - */ -package akka.spring - -import org.springframework.util.xml.DomUtils -import org.w3c.dom.Element -import scala.collection.JavaConversions._ -import akka.util.Logging - -/** - * Parser trait for custom namespace configuration for typed-actor. - * @author michaelkober - * @author Johan Rask - * @author Martin Krasser - */ -trait ActorParser extends BeanParser with DispatcherParser { - import AkkaSpringConfigurationTags._ - - /** - * Parses the given element and returns a TypedActorProperties. - * @param element dom element to parse - * @return configuration for the typed actor - */ - def parseActor(element: Element): ActorProperties = { - val objectProperties = new ActorProperties() - val remoteElement = DomUtils.getChildElementByTagName(element, REMOTE_TAG); - val dispatcherElement = DomUtils.getChildElementByTagName(element, DISPATCHER_TAG) - val propertyEntries = DomUtils.getChildElementsByTagName(element, PROPERTYENTRY_TAG) - - if (remoteElement ne null) { - objectProperties.host = mandatory(remoteElement, HOST) - objectProperties.port = mandatory(remoteElement, PORT) - objectProperties.serverManaged = (remoteElement.getAttribute(MANAGED_BY) ne null) && (remoteElement.getAttribute(MANAGED_BY).equals(SERVER_MANAGED)) - val serviceName = remoteElement.getAttribute(SERVICE_NAME) - if ((serviceName ne null) && (!serviceName.isEmpty)) { - objectProperties.serviceName = serviceName - objectProperties.serverManaged = true - } - } - - if (dispatcherElement ne null) { - val dispatcherProperties = parseDispatcher(dispatcherElement) - objectProperties.dispatcher = dispatcherProperties - } - - for (element <- propertyEntries) { - val entry = new PropertyEntry - entry.name = element.getAttribute("name"); - entry.value = element.getAttribute("value") - entry.ref = element.getAttribute("ref") - objectProperties.propertyEntries.add(entry) - } - - objectProperties.timeoutStr = element.getAttribute(TIMEOUT) - objectProperties.target = if (element.getAttribute(IMPLEMENTATION).isEmpty) null else element.getAttribute(IMPLEMENTATION) - objectProperties.beanRef = if (element.getAttribute(BEANREF).isEmpty) null else element.getAttribute(BEANREF) - - if (objectProperties.target == null && objectProperties.beanRef == null) { - throw new IllegalArgumentException("Mandatory attribute missing, you need to provide either implementation or ref ") - } - - if (element.hasAttribute(INTERFACE)) { - objectProperties.interface = element.getAttribute(INTERFACE) - } - if (element.hasAttribute(LIFECYCLE)) { - objectProperties.lifecycle = element.getAttribute(LIFECYCLE) - } - if (element.hasAttribute(SCOPE)) { - objectProperties.scope = element.getAttribute(SCOPE) - } - - objectProperties - } - -} - -/** - * Parser trait for custom namespace configuration for RemoteClient actor-for. - * @author michaelkober - */ -trait ActorForParser extends BeanParser { - import AkkaSpringConfigurationTags._ - - /** - * Parses the given element and returns a ActorForProperties. - * @param element dom element to parse - * @return configuration for the typed actor - */ - def parseActorFor(element: Element): ActorForProperties = { - val objectProperties = new ActorForProperties() - - objectProperties.host = mandatory(element, HOST) - objectProperties.port = mandatory(element, PORT) - objectProperties.serviceName = mandatory(element, SERVICE_NAME) - if (element.hasAttribute(INTERFACE)) { - objectProperties.interface = element.getAttribute(INTERFACE) - } - objectProperties - } - -} - -/** - * Base trait with utility methods for bean parsing. - */ -trait BeanParser extends Logging { - - /** - * Get a mandatory element attribute. - * @param element the element with the mandatory attribute - * @param attribute name of the mandatory attribute - */ - def mandatory(element: Element, attribute: String): String = { - if ((element.getAttribute(attribute) eq null) || (element.getAttribute(attribute).isEmpty)) { - throw new IllegalArgumentException("Mandatory attribute missing: " + attribute) - } else { - element.getAttribute(attribute) - } - } - - /** - * Get a mandatory child element. - * @param element the parent element - * @param childName name of the mandatory child element - */ - def mandatoryElement(element: Element, childName: String): Element = { - val childElement = DomUtils.getChildElementByTagName(element, childName); - if (childElement eq null) { - throw new IllegalArgumentException("Mandatory element missing: ''") - } else { - childElement - } - } - -} - - -/** - * Parser trait for custom namespace for Akka dispatcher configuration. - * @author michaelkober - */ -trait DispatcherParser extends BeanParser { - import AkkaSpringConfigurationTags._ - - /** - * Parses the given element and returns a DispatcherProperties. - * @param element dom element to parse - * @return configuration for the dispatcher - */ - def parseDispatcher(element: Element): DispatcherProperties = { - val properties = new DispatcherProperties() - var dispatcherElement = element - if (hasRef(element)) { - val ref = element.getAttribute(REF) - dispatcherElement = element.getOwnerDocument.getElementById(ref) - if (dispatcherElement eq null) { - throw new IllegalArgumentException("Referenced dispatcher not found: '" + ref + "'") - } - } - - properties.dispatcherType = mandatory(dispatcherElement, TYPE) - if (properties.dispatcherType == THREAD_BASED) { - val allowedParentNodes = "akka:typed-actor" :: "akka:untyped-actor" :: "typed-actor" :: "untyped-actor" :: Nil - if (!allowedParentNodes.contains(dispatcherElement.getParentNode.getNodeName)) { - throw new IllegalArgumentException("Thread based dispatcher must be nested in 'typed-actor' or 'untyped-actor' element!") - } - } - - if (properties.dispatcherType == HAWT) { // no name for HawtDispatcher - properties.name = dispatcherElement.getAttribute(NAME) - if (dispatcherElement.hasAttribute(AGGREGATE)) { - properties.aggregate = dispatcherElement.getAttribute(AGGREGATE).toBoolean - } - } else { - properties.name = mandatory(dispatcherElement, NAME) - } - - val threadPoolElement = DomUtils.getChildElementByTagName(dispatcherElement, THREAD_POOL_TAG); - if (threadPoolElement ne null) { - if (properties.dispatcherType == THREAD_BASED) { - throw new IllegalArgumentException("Element 'thread-pool' not allowed for this dispatcher type.") - } - val threadPoolProperties = parseThreadPool(threadPoolElement) - properties.threadPool = threadPoolProperties - } - properties - } - - /** - * Parses the given element and returns a ThreadPoolProperties. - * @param element dom element to parse - * @return configuration for the thread pool - */ - def parseThreadPool(element: Element): ThreadPoolProperties = { - val properties = new ThreadPoolProperties() - properties.queue = element.getAttribute(QUEUE) - if (element.hasAttribute(CAPACITY)) { - properties.capacity = element.getAttribute(CAPACITY).toInt - } - if (element.hasAttribute(BOUND)) { - properties.bound = element.getAttribute(BOUND).toInt - } - if (element.hasAttribute(FAIRNESS)) { - properties.fairness = element.getAttribute(FAIRNESS).toBoolean - } - if (element.hasAttribute(CORE_POOL_SIZE)) { - properties.corePoolSize = element.getAttribute(CORE_POOL_SIZE).toInt - } - if (element.hasAttribute(MAX_POOL_SIZE)) { - properties.maxPoolSize = element.getAttribute(MAX_POOL_SIZE).toInt - } - if (element.hasAttribute(KEEP_ALIVE)) { - properties.keepAlive = element.getAttribute(KEEP_ALIVE).toLong - } - if (element.hasAttribute(REJECTION_POLICY)) { - properties.rejectionPolicy = element.getAttribute(REJECTION_POLICY) - } - if (element.hasAttribute(MAILBOX_CAPACITY)) { - properties.mailboxCapacity = element.getAttribute(MAILBOX_CAPACITY).toInt - } - properties - } - - def hasRef(element: Element): Boolean = { - val ref = element.getAttribute(REF) - (ref ne null) && !ref.isEmpty - } - -} - diff --git a/akka-spring/src/main/scala/akka/ActorProperties.scala b/akka-spring/src/main/scala/akka/ActorProperties.scala deleted file mode 100644 index d0e7c49392..0000000000 --- a/akka-spring/src/main/scala/akka/ActorProperties.scala +++ /dev/null @@ -1,78 +0,0 @@ -/** - * Copyright (C) 2009-2010 Scalable Solutions AB - */ - -package akka.spring - -import org.springframework.beans.factory.support.BeanDefinitionBuilder -import AkkaSpringConfigurationTags._ - -/** - * Data container for actor configuration data. - * @author michaelkober - * @author Martin Krasser - */ -class ActorProperties { - var typed: String = "" - var target: String = "" - var beanRef: String = "" - var timeoutStr: String = "" - var interface: String = "" - var host: String = "" - var port: String = "" - var serverManaged: Boolean = false - var serviceName: String = "" - var lifecycle: String = "" - var scope:String = VAL_SCOPE_SINGLETON - var dispatcher: DispatcherProperties = _ - var propertyEntries = new PropertyEntries() - - - /** - * Sets the properties to the given builder. - * @param builder bean definition builder - */ - def setAsProperties(builder: BeanDefinitionBuilder) { - builder.addPropertyValue("typed", typed) - builder.addPropertyValue(HOST, host) - builder.addPropertyValue(PORT, port) - builder.addPropertyValue("serverManaged", serverManaged) - builder.addPropertyValue("serviceName", serviceName) - builder.addPropertyValue("timeoutStr", timeoutStr) - builder.addPropertyValue(IMPLEMENTATION, target) - builder.addPropertyValue("beanRef", beanRef) - builder.addPropertyValue(INTERFACE, interface) - builder.addPropertyValue(LIFECYCLE, lifecycle) - builder.addPropertyValue(SCOPE, scope) - builder.addPropertyValue(DISPATCHER_TAG, dispatcher) - builder.addPropertyValue(PROPERTYENTRY_TAG,propertyEntries) - } - - def timeout() : Long = { - if (!timeoutStr.isEmpty) timeoutStr.toLong else -1L - } - -} - -/** - * Data container for actor configuration data. - * @author michaelkober - */ -class ActorForProperties { - var interface: String = "" - var host: String = "" - var port: String = "" - var serviceName: String = "" - - /** - * Sets the properties to the given builder. - * @param builder bean definition builder - */ - def setAsProperties(builder: BeanDefinitionBuilder) { - builder.addPropertyValue(HOST, host) - builder.addPropertyValue(PORT, port) - builder.addPropertyValue("serviceName", serviceName) - builder.addPropertyValue(INTERFACE, interface) - } - -} diff --git a/akka-spring/src/main/scala/akka/AkkaNamespaceHandler.scala b/akka-spring/src/main/scala/akka/AkkaNamespaceHandler.scala deleted file mode 100644 index 38041a3ea4..0000000000 --- a/akka-spring/src/main/scala/akka/AkkaNamespaceHandler.scala +++ /dev/null @@ -1,23 +0,0 @@ -/** - * Copyright (C) 2009-2010 Scalable Solutions AB - */ -package akka.spring - -import org.springframework.beans.factory.xml.NamespaceHandlerSupport -import AkkaSpringConfigurationTags._ - -/** - * Custom spring namespace handler for Akka. - * @author michaelkober - */ -class AkkaNamespaceHandler extends NamespaceHandlerSupport { - def init = { - registerBeanDefinitionParser(CONFIG_TAG, new ConfigBeanDefinitionParser()); - registerBeanDefinitionParser(TYPED_ACTOR_TAG, new TypedActorBeanDefinitionParser()) - registerBeanDefinitionParser(UNTYPED_ACTOR_TAG, new UntypedActorBeanDefinitionParser()) - registerBeanDefinitionParser(SUPERVISION_TAG, new SupervisionBeanDefinitionParser()) - registerBeanDefinitionParser(DISPATCHER_TAG, new DispatcherBeanDefinitionParser()) - registerBeanDefinitionParser(CAMEL_SERVICE_TAG, new CamelServiceBeanDefinitionParser) - registerBeanDefinitionParser(ACTOR_FOR_TAG, new ActorForBeanDefinitionParser()); - } -} diff --git a/akka-spring/src/main/scala/akka/AkkaSpringConfigurationTags.scala b/akka-spring/src/main/scala/akka/AkkaSpringConfigurationTags.scala deleted file mode 100644 index 6548495070..0000000000 --- a/akka-spring/src/main/scala/akka/AkkaSpringConfigurationTags.scala +++ /dev/null @@ -1,115 +0,0 @@ -/** - * Copyright (C) 2009-2010 Scalable Solutions AB - */ -package akka.spring - -/** - * XML configuration tags. - * @author michaelkober - * @author Martin Krasser - */ -object AkkaSpringConfigurationTags { - - // --- TAGS - // - // top level tags - val CONFIG_TAG = "property-placeholder" - val TYPED_ACTOR_TAG = "typed-actor" - val UNTYPED_ACTOR_TAG = "untyped-actor" - val SUPERVISION_TAG = "supervision" - val DISPATCHER_TAG = "dispatcher" - val PROPERTYENTRY_TAG = "property" - val CAMEL_SERVICE_TAG = "camel-service" - val ACTOR_FOR_TAG = "actor-for" - - // actor sub tags - val REMOTE_TAG = "remote" - - // superivision sub tags - val TYPED_ACTORS_TAG = "typed-actors" - val UNTYPED_ACTORS_TAG = "untyped-actors" - val STRATEGY_TAG = "restart-strategy" - val TRAP_EXISTS_TAG = "trap-exits" - val TRAP_EXIT_TAG = "trap-exit" - - // dispatcher sub tags - val THREAD_POOL_TAG = "thread-pool" - - // camel-service sub tags - val CAMEL_CONTEXT_TAG = "camel-context" - - // --- ATTRIBUTES - // - // actor attributes - val TIMEOUT = "timeout" - val IMPLEMENTATION = "implementation" - val BEANREF = "ref" - val INTERFACE = "interface" - val HOST = "host" - val PORT = "port" - val MANAGED_BY = "managed-by" - val SERVICE_NAME = "service-name" - val LIFECYCLE = "lifecycle" - val SCOPE = "scope" - - // supervision attributes - val FAILOVER = "failover" - val RETRIES = "retries" - val TIME_RANGE = "timerange" - - // dispatcher attributes - val NAME = "name" - val REF = "ref" - val TYPE = "type" - val AGGREGATE = "aggregate" // HawtDispatcher - - // thread pool attributes - val QUEUE = "queue" - val CAPACITY = "capacity" - val FAIRNESS = "fairness" - val CORE_POOL_SIZE = "core-pool-size" - val MAX_POOL_SIZE = "max-pool-size" - val KEEP_ALIVE = "keep-alive" - val BOUND ="bound" - val REJECTION_POLICY ="rejection-policy" - val MAILBOX_CAPACITY ="mailbox-capacity" - - // config attribute - val LOCATION = "location" - - // --- VALUES - // - // Lifecycle - val VAL_LIFECYCYLE_TEMPORARY = "temporary" - val VAL_LIFECYCYLE_PERMANENT = "permanent" - - val VAL_SCOPE_SINGLETON = "singleton" - val VAL_SCOPE_PROTOTYPE = "prototype" - - // Failover - val VAL_ALL_FOR_ONE = "AllForOne" - val VAL_ONE_FOR_ONE = "OneForOne" - - // rejection policies - val VAL_ABORT_POLICY = "abort-policy" - val VAL_CALLER_RUNS_POLICY = "caller-runs-policy" - val VAL_DISCARD_OLDEST_POLICY = "discard-oldest-policy" - val VAL_DISCARD_POLICY = "discard-policy" - - // dispatcher queue types - val VAL_BOUNDED_LINKED_BLOCKING_QUEUE = "bounded-linked-blocking-queue" - val VAL_UNBOUNDED_LINKED_BLOCKING_QUEUE = "unbounded-linked-blocking-queue" - val VAL_SYNCHRONOUS_QUEUE = "synchronous-queue" - val VAL_BOUNDED_ARRAY_BLOCKING_QUEUE = "bounded-array-blocking-queue" - - // dispatcher types - val EXECUTOR_BASED_EVENT_DRIVEN = "executor-based-event-driven" - val EXECUTOR_BASED_EVENT_DRIVEN_WORK_STEALING = "executor-based-event-driven-work-stealing" - val THREAD_BASED = "thread-based" - val HAWT = "hawt" - - // managed by types - val SERVER_MANAGED = "server" - val CLIENT_MANAGED = "client" - -} diff --git a/akka-spring/src/main/scala/akka/CamelServiceBeanDefinitionParser.scala b/akka-spring/src/main/scala/akka/CamelServiceBeanDefinitionParser.scala deleted file mode 100644 index 4025a831a8..0000000000 --- a/akka-spring/src/main/scala/akka/CamelServiceBeanDefinitionParser.scala +++ /dev/null @@ -1,41 +0,0 @@ -/** - * Copyright (C) 2009-2010 Scalable Solutions AB - */ -package akka.spring - -import org.springframework.beans.factory.support.BeanDefinitionBuilder -import org.springframework.beans.factory.xml.{ParserContext, AbstractSingleBeanDefinitionParser} -import org.springframework.util.xml.DomUtils -import org.w3c.dom.Element - -import akka.spring.AkkaSpringConfigurationTags._ - - -/** - * Parser for <camel-service> elements. - * - * @author Martin Krasser - */ -class CamelServiceBeanDefinitionParser extends AbstractSingleBeanDefinitionParser { - /** - * Parses the <camel-service> element. If a nested <camel-context> element - * is defined then the referenced context is set on the {@link CamelServiceFactoryBean}. - */ - override def doParse(element: Element, parserContext: ParserContext, builder: BeanDefinitionBuilder) { - val camelContextElement = DomUtils.getChildElementByTagName(element, CAMEL_CONTEXT_TAG); - if (camelContextElement ne null) { - val camelContextReference = camelContextElement.getAttribute("ref") - builder.addPropertyReference("camelContext", camelContextReference) - } - } - - /** - * Returns the class of {@link CamelServiceFactoryBean} - */ - override def getBeanClass(element: Element): Class[_] = classOf[CamelServiceFactoryBean] - - /** - * Returns true. - */ - override def shouldGenerateIdAsFallback = true -} diff --git a/akka-spring/src/main/scala/akka/CamelServiceFactoryBean.scala b/akka-spring/src/main/scala/akka/CamelServiceFactoryBean.scala deleted file mode 100644 index 337413f0eb..0000000000 --- a/akka-spring/src/main/scala/akka/CamelServiceFactoryBean.scala +++ /dev/null @@ -1,45 +0,0 @@ -/** - * Copyright (C) 2009-2010 Scalable Solutions AB - */ -package akka.spring - -import org.apache.camel.CamelContext -import org.springframework.beans.factory.{DisposableBean, InitializingBean, FactoryBean} - -import akka.camel.{CamelContextManager, CamelService, CamelServiceFactory} - -/** - * Factory bean for a {@link CamelService}. - * - * @author Martin Krasser - */ -class CamelServiceFactoryBean extends FactoryBean[CamelService] with InitializingBean with DisposableBean { - @scala.reflect.BeanProperty var camelContext: CamelContext = _ - - var instance: CamelService = _ - - def isSingleton = true - - def getObjectType = classOf[CamelService] - - def getObject = instance - - /** - * Initializes the {@link CamelContextManager} with camelService if defined, then - * creates and starts the {@link CamelService} singleton. - */ - def afterPropertiesSet = { - if (camelContext ne null) { - CamelContextManager.init(camelContext) - } - instance = CamelServiceFactory.createCamelService - instance.start - } - - /** - * Stops the {@link CamelService} singleton. - */ - def destroy = { - instance.stop - } -} diff --git a/akka-spring/src/main/scala/akka/ConfiggyPropertyPlaceholderConfigurer.scala b/akka-spring/src/main/scala/akka/ConfiggyPropertyPlaceholderConfigurer.scala deleted file mode 100644 index e32c331688..0000000000 --- a/akka-spring/src/main/scala/akka/ConfiggyPropertyPlaceholderConfigurer.scala +++ /dev/null @@ -1,37 +0,0 @@ -/** - * Copyright (C) 2009-2010 Scalable Solutions AB - */ -package akka.spring - -import org.springframework.beans.factory.config.PropertyPlaceholderConfigurer -import org.springframework.core.io.Resource -import net.lag.configgy.Configgy -import java.util.Properties - -/** - * ConfiggyPropertyPlaceholderConfigurer. Property resource configurer for configgy files. - */ -class ConfiggyPropertyPlaceholderConfigurer extends PropertyPlaceholderConfigurer { - - /** - * Sets the akka properties as local properties, leaves the location empty. - * @param configgyResource akka.conf - */ - override def setLocation(configgyResource: Resource) { - if (configgyResource eq null) throw new IllegalArgumentException("Property 'config' must be set") - val properties = loadAkkaConfig(configgyResource) - setProperties(properties) - } - - /** - * Load the akka.conf and transform to properties. - */ - private def loadAkkaConfig(configgyResource: Resource) : Properties = { - Configgy.configure(configgyResource.getFile.getPath) - val config = Configgy.config - val properties = new Properties() - config.asMap.foreach {case (k, v) => properties.put(k, v); println("(k,v)=" + k + ", " + v)} - properties - } - -} diff --git a/akka-spring/src/main/scala/akka/DispatcherBeanDefinitionParser.scala b/akka-spring/src/main/scala/akka/DispatcherBeanDefinitionParser.scala deleted file mode 100644 index 4f2a40469f..0000000000 --- a/akka-spring/src/main/scala/akka/DispatcherBeanDefinitionParser.scala +++ /dev/null @@ -1,28 +0,0 @@ -/** - * Copyright (C) 2009-2010 Scalable Solutions AB - */ -package akka.spring - -import org.w3c.dom.Element -import org.springframework.beans.factory.support.BeanDefinitionBuilder -import org.springframework.beans.factory.xml.{ParserContext, AbstractSingleBeanDefinitionParser} - - -/** - * Parser for custom namespace configuration. - * @author michaelkober - */ -class DispatcherBeanDefinitionParser extends AbstractSingleBeanDefinitionParser with ActorParser with DispatcherParser { - /* - * @see org.springframework.beans.factory.xml.AbstractSingleBeanDefinitionParser#doParse(org.w3c.dom.Element, org.springframework.beans.factory.xml.ParserContext, org.springframework.beans.factory.support.BeanDefinitionBuilder) - */ - override def doParse(element: Element, parserContext: ParserContext, builder: BeanDefinitionBuilder) { - val dispatcherProperties = parseDispatcher(element) - dispatcherProperties.setAsProperties(builder) - } - - /* - * @see org.springframework.beans.factory.xml.AbstractSingleBeanDefinitionParser#getBeanClass(org.w3c.dom.Element) - */ - override def getBeanClass(element: Element): Class[_] = classOf[DispatcherFactoryBean] -} diff --git a/akka-spring/src/main/scala/akka/DispatcherFactoryBean.scala b/akka-spring/src/main/scala/akka/DispatcherFactoryBean.scala deleted file mode 100644 index bdcfca5d33..0000000000 --- a/akka-spring/src/main/scala/akka/DispatcherFactoryBean.scala +++ /dev/null @@ -1,110 +0,0 @@ -/** - * Copyright (C) 2009-2010 Scalable Solutions AB - */ -package akka.spring - -import org.springframework.beans.factory.config.AbstractFactoryBean -import akka.config.Supervision._ -import AkkaSpringConfigurationTags._ -import reflect.BeanProperty -import akka.actor.ActorRef -import java.util.concurrent.RejectedExecutionHandler -import java.util.concurrent.ThreadPoolExecutor.{DiscardPolicy, DiscardOldestPolicy, CallerRunsPolicy, AbortPolicy} -import akka.dispatch._ -import akka.util.Duration - -/** - * Reusable factory method for dispatchers. - */ -object DispatcherFactoryBean { - - /** - * factory method for dispatchers - * @param properties dispatcher properties - * @param actorRef actorRef needed for thread based dispatcher - */ - def createNewInstance(properties: DispatcherProperties, actorRef: Option[ActorRef] = None): MessageDispatcher = { - - //Creates a ThreadPoolConfigDispatcherBuilder and applies the configuration to it - def configureThreadPool(createDispatcher: => (ThreadPoolConfig) => MessageDispatcher): ThreadPoolConfigDispatcherBuilder = { - if ((properties.threadPool ne null) && (properties.threadPool.queue ne null)) { - import ThreadPoolConfigDispatcherBuilder.conf_? - import properties._ - val queueDef = Some(threadPool.queue) - val corePoolSize = if (threadPool.corePoolSize > -1) Some(threadPool.corePoolSize) else None - val maxPoolSize = if (threadPool.maxPoolSize > -1) Some(threadPool.maxPoolSize) else None - val keepAlive = if (threadPool.keepAlive > -1) Some(threadPool.keepAlive) else None - val executorBounds = if (threadPool.bound > -1) Some(threadPool.bound) else None - val flowHandler = threadPool.rejectionPolicy match { - case null | "" => None - case "abort-policy" => Some(new AbortPolicy()) - case "caller-runs-policy" => Some(new CallerRunsPolicy()) - case "discard-oldest-policy" => Some(new DiscardOldestPolicy()) - case "discard-policy" => Some(new DiscardPolicy()) - case x => throw new IllegalArgumentException("Unknown rejection-policy '" + x + "'") - } - - //Apply the following options to the config if they are present in the cfg - ThreadPoolConfigDispatcherBuilder(createDispatcher,ThreadPoolConfig()).configure( - conf_?(queueDef )(definition => definition match { - case VAL_BOUNDED_ARRAY_BLOCKING_QUEUE => - _.withNewThreadPoolWithArrayBlockingQueueWithCapacityAndFairness(threadPool.capacity,threadPool.fairness) - case VAL_UNBOUNDED_LINKED_BLOCKING_QUEUE if threadPool.capacity > -1 => - _.withNewThreadPoolWithLinkedBlockingQueueWithCapacity(threadPool.capacity) - case VAL_UNBOUNDED_LINKED_BLOCKING_QUEUE if threadPool.capacity <= 0 => - _.withNewThreadPoolWithLinkedBlockingQueueWithUnboundedCapacity - case VAL_BOUNDED_LINKED_BLOCKING_QUEUE => - _.withNewBoundedThreadPoolWithLinkedBlockingQueueWithUnboundedCapacity(threadPool.bound) - case VAL_SYNCHRONOUS_QUEUE => - _.withNewThreadPoolWithSynchronousQueueWithFairness(threadPool.fairness) - case unknown => - throw new IllegalArgumentException("Unknown queue type " + unknown) - }), - conf_?(keepAlive )(time => _.setKeepAliveTimeInMillis(time)), - conf_?(corePoolSize )(count => _.setCorePoolSize(count)), - conf_?(maxPoolSize )(count => _.setMaxPoolSize(count)), - conf_?(executorBounds)(bounds => _.setExecutorBounds(bounds)), - conf_?(flowHandler )(policy => _.setRejectionPolicy(policy))) - } - else - ThreadPoolConfigDispatcherBuilder(createDispatcher,ThreadPoolConfig()) - } - - //Create the dispatcher - properties.dispatcherType match { - case EXECUTOR_BASED_EVENT_DRIVEN => - configureThreadPool(poolConfig => new ExecutorBasedEventDrivenDispatcher(properties.name, poolConfig)).build - case EXECUTOR_BASED_EVENT_DRIVEN_WORK_STEALING => - configureThreadPool(poolConfig => new ExecutorBasedEventDrivenWorkStealingDispatcher(properties.name,Dispatchers.MAILBOX_TYPE,poolConfig)).build - case THREAD_BASED if actorRef.isEmpty => - throw new IllegalArgumentException("Need an ActorRef to create a thread based dispatcher.") - case THREAD_BASED if actorRef.isDefined => - Dispatchers.newThreadBasedDispatcher(actorRef.get) - case HAWT => - Dispatchers.newHawtDispatcher(properties.aggregate) - case unknown => - throw new IllegalArgumentException("Unknown dispatcher type " + unknown) - } - } -} - -/** - * Factory bean for supervisor configuration. - * @author michaelkober - */ -class DispatcherFactoryBean extends AbstractFactoryBean[MessageDispatcher] { - @BeanProperty var properties: DispatcherProperties = _ - - /* - * @see org.springframework.beans.factory.FactoryBean#getObjectType() - */ - def getObjectType: Class[MessageDispatcher] = classOf[MessageDispatcher] - - /* - * @see org.springframework.beans.factory.config.AbstractFactoryBean#createInstance() - */ - def createInstance: MessageDispatcher = { - import DispatcherFactoryBean._ - createNewInstance(properties) - } -} diff --git a/akka-spring/src/main/scala/akka/DispatcherProperties.scala b/akka-spring/src/main/scala/akka/DispatcherProperties.scala deleted file mode 100644 index 8dd33602df..0000000000 --- a/akka-spring/src/main/scala/akka/DispatcherProperties.scala +++ /dev/null @@ -1,61 +0,0 @@ -/** - * Copyright (C) 2009-2010 Scalable Solutions AB - */ -package akka.spring - -import org.springframework.beans.factory.support.BeanDefinitionBuilder - -/** - * Data container for dispatcher configuration data. - * @author michaelkober - */ -class DispatcherProperties { - var ref: String = "" - var dispatcherType: String = "" - var name: String = "" - var threadPool: ThreadPoolProperties = _ - var aggregate = true - - /** - * Sets the properties to the given builder. - * @param builder bean definition builder - */ - def setAsProperties(builder: BeanDefinitionBuilder) { - builder.addPropertyValue("properties", this) - } - - override def toString : String = { - "DispatcherProperties[ref=" + ref + - ", dispatcher-type=" + dispatcherType + - ", name=" + name + - ", threadPool=" + threadPool + "]" - } -} - -/** - * Data container for thread pool configuration data. - * @author michaelkober - */ -class ThreadPoolProperties { - var queue = "" - var bound = -1 - var capacity = -1 - var fairness = false - var corePoolSize = -1 - var maxPoolSize = -1 - var keepAlive = -1L - var rejectionPolicy = "" - var mailboxCapacity = -1 - - override def toString : String = { - "ThreadPoolProperties[queue=" + queue + - ", bound=" + bound + - ", capacity=" + capacity + - ", fairness=" + fairness + - ", corePoolSize=" + corePoolSize + - ", maxPoolSize=" + maxPoolSize + - ", keepAlive=" + keepAlive + - ", policy=" + rejectionPolicy + - ", mailboxCapacity=" + mailboxCapacity + "]" - } -} diff --git a/akka-spring/src/main/scala/akka/PropertyEntries.scala b/akka-spring/src/main/scala/akka/PropertyEntries.scala deleted file mode 100644 index 9f6493bbb3..0000000000 --- a/akka-spring/src/main/scala/akka/PropertyEntries.scala +++ /dev/null @@ -1,36 +0,0 @@ -/** - * Copyright (C) 2009-2010 Scalable Solutions AB - */ -package akka.spring - -import org.springframework.beans.factory.support.BeanDefinitionBuilder - -import scala.collection.mutable._ - -/** - * Simple container for Properties - * @author Johan Rask - */ -class PropertyEntries { - var entryList: ListBuffer[PropertyEntry] = ListBuffer[PropertyEntry]() - - def add(entry: PropertyEntry) = { - entryList.append(entry) - } -} - -/** - * Represents a property element - * @author Johan Rask - */ -class PropertyEntry { - var name: String = _ - var value: String = null - var ref: String = null - - - override def toString(): String = { - format("name = %s,value = %s, ref = %s", name, value, ref) - } -} - diff --git a/akka-spring/src/main/scala/akka/StringReflect.scala b/akka-spring/src/main/scala/akka/StringReflect.scala deleted file mode 100644 index 2b77f8caa6..0000000000 --- a/akka-spring/src/main/scala/akka/StringReflect.scala +++ /dev/null @@ -1,25 +0,0 @@ -/** - * Copyright (C) 2009-2010 Scalable Solutions AB - */ - -package akka.spring - -object StringReflect { - - /** - * Implicit conversion from String to StringReflect. - */ - implicit def string2StringReflect(x: String) = new StringReflect(x) -} - -/** - * Reflection helper class. - * @author michaelkober - */ -class StringReflect(val self: String) { - if ((self eq null) || self == "") throw new IllegalArgumentException("Class name can't be null or empty string [" + self + "]") - def toClass[T <: AnyRef]: Class[T] = { - val clazz = Class.forName(self) - clazz.asInstanceOf[Class[T]] - } -} diff --git a/akka-spring/src/main/scala/akka/SupervisionBeanDefinitionParser.scala b/akka-spring/src/main/scala/akka/SupervisionBeanDefinitionParser.scala deleted file mode 100644 index c4753d1d5b..0000000000 --- a/akka-spring/src/main/scala/akka/SupervisionBeanDefinitionParser.scala +++ /dev/null @@ -1,86 +0,0 @@ -/** - * Copyright (C) 2009-2010 Scalable Solutions AB - */ -package akka.spring - -import akka.util.Logging -import org.springframework.beans.factory.support.BeanDefinitionBuilder -import org.springframework.beans.factory.xml.{ParserContext, AbstractSingleBeanDefinitionParser} -import akka.config.Supervision._ -import AkkaSpringConfigurationTags._ - - -import org.w3c.dom.Element -import org.springframework.util.xml.DomUtils - - -/** - * Parser for custom namespace for Akka declarative supervisor configuration. - * @author michaelkober - */ -class SupervisionBeanDefinitionParser extends AbstractSingleBeanDefinitionParser with ActorParser { - /* (non-Javadoc) - * @see org.springframework.beans.factory.xml.AbstractSingleBeanDefinitionParser#doParse(org.w3c.dom.Element, org.springframework.beans.factory.xml.ParserContext, org.springframework.beans.factory.support.BeanDefinitionBuilder) - */ - override def doParse(element: Element, parserContext: ParserContext, builder: BeanDefinitionBuilder) { - parseSupervisor(element, builder) - } - - /** - * made accessible for testing - */ - private[akka] def parseSupervisor(element: Element, builder: BeanDefinitionBuilder) { - val strategyElement = mandatoryElement(element, STRATEGY_TAG) - val typedActorsElement = DomUtils.getChildElementByTagName(element, TYPED_ACTORS_TAG) - val untypedActorsElement = DomUtils.getChildElementByTagName(element, UNTYPED_ACTORS_TAG) - if ((typedActorsElement eq null) && (untypedActorsElement eq null)) { - throw new IllegalArgumentException("One of 'akka:typed-actors' or 'akka:untyped-actors' needed.") - } - parseRestartStrategy(strategyElement, builder) - if (typedActorsElement ne null) { - builder.addPropertyValue("typed", AkkaSpringConfigurationTags.TYPED_ACTOR_TAG) - parseTypedActorList(typedActorsElement, builder) - } else { - builder.addPropertyValue("typed", AkkaSpringConfigurationTags.UNTYPED_ACTOR_TAG) - parseUntypedActorList(untypedActorsElement, builder) - } - } - - private[akka] def parseRestartStrategy(element: Element, builder: BeanDefinitionBuilder) { - val failover = mandatory(element, FAILOVER) - val timeRange = mandatory(element, TIME_RANGE).toInt - val retries = mandatory(element, RETRIES).toInt - val trapExitsElement = mandatoryElement(element, TRAP_EXISTS_TAG) - val trapExceptions = parseTrapExits(trapExitsElement) - - val restartStrategy = failover match { - case "AllForOne" => new AllForOneStrategy(trapExceptions, retries, timeRange) - case "OneForOne" => new OneForOneStrategy(trapExceptions, retries, timeRange) - case _ => new OneForOneStrategy(trapExceptions, retries, timeRange) //Default to OneForOne - } - builder.addPropertyValue("restartStrategy", restartStrategy) - } - - private[akka] def parseTypedActorList(element: Element, builder: BeanDefinitionBuilder) { - val typedActors = DomUtils.getChildElementsByTagName(element, TYPED_ACTOR_TAG).toArray.toList.asInstanceOf[List[Element]] - val actorProperties = typedActors.map(parseActor(_)) - builder.addPropertyValue("supervised", actorProperties) - } - - private[akka] def parseUntypedActorList(element: Element, builder: BeanDefinitionBuilder) { - val untypedActors = DomUtils.getChildElementsByTagName(element, UNTYPED_ACTOR_TAG).toArray.toList.asInstanceOf[List[Element]] - val actorProperties = untypedActors.map(parseActor(_)) - builder.addPropertyValue("supervised", actorProperties) - } - - private def parseTrapExits(element: Element): Array[Class[_ <: Throwable]] = { - import StringReflect._ - val trapExits = DomUtils.getChildElementsByTagName(element, TRAP_EXIT_TAG).toArray.toList.asInstanceOf[List[Element]] - trapExits.map(DomUtils.getTextValue(_).toClass.asInstanceOf[Class[_ <: Throwable]]).toArray - } - - /* - * @see org.springframework.beans.factory.xml.AbstractSingleBeanDefinitionParser#getBeanClass(org.w3c.dom.Element) - */ - override def getBeanClass(element: Element): Class[_] = classOf[SupervisionFactoryBean] -} diff --git a/akka-spring/src/main/scala/akka/SupervisionFactoryBean.scala b/akka-spring/src/main/scala/akka/SupervisionFactoryBean.scala deleted file mode 100644 index 5e88374f18..0000000000 --- a/akka-spring/src/main/scala/akka/SupervisionFactoryBean.scala +++ /dev/null @@ -1,97 +0,0 @@ -/** - * Copyright (C) 2009-2010 Scalable Solutions AB - */ -package akka.spring - -import org.springframework.beans.factory.config.AbstractFactoryBean -import akka.config.Supervision._ -import akka.actor.{Supervisor, SupervisorFactory, Actor} -import AkkaSpringConfigurationTags._ -import reflect.BeanProperty -import akka.config.{TypedActorConfigurator, RemoteAddress} - -/** - * Factory bean for supervisor configuration. - * @author michaelkober - */ -class SupervisionFactoryBean extends AbstractFactoryBean[AnyRef] { - @BeanProperty var restartStrategy: FaultHandlingStrategy = _ - @BeanProperty var supervised: List[ActorProperties] = _ - @BeanProperty var typed: String = "" - - /* - * @see org.springframework.beans.factory.FactoryBean#getObjectType() - */ - def getObjectType: Class[AnyRef] = classOf[AnyRef] - - /* - * @see org.springframework.beans.factory.config.AbstractFactoryBean#createInstance() - */ - def createInstance: AnyRef = typed match { - case AkkaSpringConfigurationTags.TYPED_ACTOR_TAG => createInstanceForTypedActors - case AkkaSpringConfigurationTags.UNTYPED_ACTOR_TAG => createInstanceForUntypedActors - } - - private def createInstanceForTypedActors() : TypedActorConfigurator = { - val configurator = new TypedActorConfigurator() - configurator.configure( - restartStrategy, - supervised.map(createComponent(_)).toArray - ).supervise - - } - - private def createInstanceForUntypedActors() : Supervisor = { - val factory = new SupervisorFactory( - new SupervisorConfig( - restartStrategy, - supervised.map(createSupervise(_)))) - factory.newInstance - } - - /** - * Create configuration for TypedActor - */ - private[akka] def createComponent(props: ActorProperties): SuperviseTypedActor = { - import StringReflect._ - val lifeCycle = if (!props.lifecycle.isEmpty && props.lifecycle.equalsIgnoreCase(VAL_LIFECYCYLE_TEMPORARY)) Temporary else Permanent - val isRemote = (props.host ne null) && (!props.host.isEmpty) - val withInterface = (props.interface ne null) && (!props.interface.isEmpty) - if (isRemote) { - //val remote = new RemoteAddress(props.host, props.port) - val remote = new RemoteAddress(props.host, props.port.toInt) - if (withInterface) { - new SuperviseTypedActor(props.interface.toClass, props.target.toClass, lifeCycle, props.timeout, remote) - } else { - new SuperviseTypedActor(props.target.toClass, lifeCycle, props.timeout, remote) - } - } else { - if (withInterface) { - new SuperviseTypedActor(props.interface.toClass, props.target.toClass, lifeCycle, props.timeout) - } else { - new SuperviseTypedActor(props.target.toClass, lifeCycle, props.timeout) - } - } - } - - /** - * Create configuration for UntypedActor - */ - private[akka] def createSupervise(props: ActorProperties): Server = { - import StringReflect._ - val lifeCycle = if (!props.lifecycle.isEmpty && props.lifecycle.equalsIgnoreCase(VAL_LIFECYCYLE_TEMPORARY)) Temporary else Permanent - val isRemote = (props.host ne null) && (!props.host.isEmpty) - val actorRef = Actor.actorOf(props.target.toClass) - if (props.timeout > 0) { - actorRef.setTimeout(props.timeout) - } - - val supervise = if (isRemote) { - val remote = new RemoteAddress(props.host, props.port.toInt) - Supervise(actorRef, lifeCycle, remote) - } else { - Supervise(actorRef, lifeCycle) - } - supervise - } -} diff --git a/akka-spring/src/test/java/akka/spring/Pojo.java b/akka-spring/src/test/java/akka/spring/Pojo.java deleted file mode 100644 index 618adc8cc3..0000000000 --- a/akka-spring/src/test/java/akka/spring/Pojo.java +++ /dev/null @@ -1,51 +0,0 @@ -package akka.spring; - -import org.springframework.context.ApplicationContext; -import org.springframework.context.ApplicationContextAware; - -import javax.annotation.PreDestroy; -import javax.annotation.PostConstruct; - -import akka.actor.*; - -public class Pojo extends TypedActor implements PojoInf, ApplicationContextAware { - - private String stringFromVal; - private String stringFromRef; - - private boolean gotApplicationContext = false; - private boolean preStartInvoked = false; - - public boolean gotApplicationContext() { - return gotApplicationContext; - } - - public void setApplicationContext(ApplicationContext context) { - gotApplicationContext = true; - } - - public String getStringFromVal() { - return stringFromVal; - } - - public void setStringFromVal(String s) { - stringFromVal = s; - } - - public String getStringFromRef() { - return stringFromRef; - } - - public void setStringFromRef(String s) { - stringFromRef = s; - } - - @Override - public void preStart() { - preStartInvoked = true; - } - - public boolean isPreStartInvoked() { - return preStartInvoked; - } -} diff --git a/akka-spring/src/test/java/akka/spring/PojoInf.java b/akka-spring/src/test/java/akka/spring/PojoInf.java deleted file mode 100644 index f73ce35814..0000000000 --- a/akka-spring/src/test/java/akka/spring/PojoInf.java +++ /dev/null @@ -1,13 +0,0 @@ -package akka.spring; - -import javax.annotation.PreDestroy; -import javax.annotation.PostConstruct; - -public interface PojoInf { - - public String getStringFromVal(); - public String getStringFromRef(); - public boolean gotApplicationContext(); - public boolean isPreStartInvoked(); - -} diff --git a/akka-spring/src/test/java/akka/spring/SampleBean.java b/akka-spring/src/test/java/akka/spring/SampleBean.java deleted file mode 100644 index e23672d060..0000000000 --- a/akka-spring/src/test/java/akka/spring/SampleBean.java +++ /dev/null @@ -1,25 +0,0 @@ -package akka.spring; - -import akka.actor.*; - -public class SampleBean extends TypedActor implements SampleBeanIntf { - - private boolean down; - - public SampleBean() { - down = false; - } - - public boolean down() { - return down; - } - - public String foo(String s) { - return "hello " + s; - } - - @Override - public void postStop() { - down = true; - } - } diff --git a/akka-spring/src/test/java/akka/spring/SampleBeanIntf.java b/akka-spring/src/test/java/akka/spring/SampleBeanIntf.java deleted file mode 100644 index 365275f193..0000000000 --- a/akka-spring/src/test/java/akka/spring/SampleBeanIntf.java +++ /dev/null @@ -1,6 +0,0 @@ -package akka.spring; - -public interface SampleBeanIntf { - public boolean down(); - public String foo(String s); - } diff --git a/akka-spring/src/test/java/akka/spring/SampleRoute.java b/akka-spring/src/test/java/akka/spring/SampleRoute.java deleted file mode 100644 index fb3565661d..0000000000 --- a/akka-spring/src/test/java/akka/spring/SampleRoute.java +++ /dev/null @@ -1,11 +0,0 @@ -package akka.spring; - -import org.apache.camel.builder.RouteBuilder; - -public class SampleRoute extends RouteBuilder { - - @Override - public void configure() throws Exception { - from("direct:test").to("typed-actor:sample?method=foo"); - } -} diff --git a/akka-spring/src/test/java/akka/spring/foo/Bar.java b/akka-spring/src/test/java/akka/spring/foo/Bar.java deleted file mode 100644 index 36276ff108..0000000000 --- a/akka-spring/src/test/java/akka/spring/foo/Bar.java +++ /dev/null @@ -1,17 +0,0 @@ -package akka.spring.foo; - -import java.io.IOException; -import akka.actor.*; - -public class Bar extends TypedActor implements IBar { - - @Override - public String getBar() { - return "bar"; - } - - public void throwsIOException() throws IOException { - throw new IOException("some IO went wrong"); - } - -} diff --git a/akka-spring/src/test/java/akka/spring/foo/Foo.java b/akka-spring/src/test/java/akka/spring/foo/Foo.java deleted file mode 100644 index 189f146e51..0000000000 --- a/akka-spring/src/test/java/akka/spring/foo/Foo.java +++ /dev/null @@ -1,11 +0,0 @@ -package akka.spring.foo; - -import akka.actor.*; - -public class Foo extends TypedActor implements IFoo{ - - public String foo() { - return "foo"; - } - -} diff --git a/akka-spring/src/test/java/akka/spring/foo/IBar.java b/akka-spring/src/test/java/akka/spring/foo/IBar.java deleted file mode 100644 index 803b4ab50a..0000000000 --- a/akka-spring/src/test/java/akka/spring/foo/IBar.java +++ /dev/null @@ -1,7 +0,0 @@ -package akka.spring.foo; - -public interface IBar { - - String getBar(); - -} diff --git a/akka-spring/src/test/java/akka/spring/foo/IFoo.java b/akka-spring/src/test/java/akka/spring/foo/IFoo.java deleted file mode 100644 index e47809f3af..0000000000 --- a/akka-spring/src/test/java/akka/spring/foo/IFoo.java +++ /dev/null @@ -1,12 +0,0 @@ -package akka.spring.foo; - -/** - * Created by IntelliJ IDEA. - * User: michaelkober - * Date: Aug 11, 2010 - * Time: 12:49:58 PM - * To change this template use File | Settings | File Templates. - */ -public interface IFoo { - public String foo(); -} diff --git a/akka-spring/src/test/java/akka/spring/foo/IMyPojo.java b/akka-spring/src/test/java/akka/spring/foo/IMyPojo.java deleted file mode 100644 index 825d797cf2..0000000000 --- a/akka-spring/src/test/java/akka/spring/foo/IMyPojo.java +++ /dev/null @@ -1,19 +0,0 @@ -package akka.spring.foo; - -/** - * Created by IntelliJ IDEA. - * User: michaelkober - * Date: Aug 11, 2010 - * Time: 12:01:00 PM - * To change this template use File | Settings | File Templates. - */ -public interface IMyPojo { - public void oneWay(String message); - - public String getFoo(); - - public String longRunning(); - - - -} diff --git a/akka-spring/src/test/java/akka/spring/foo/MyPojo.java b/akka-spring/src/test/java/akka/spring/foo/MyPojo.java deleted file mode 100644 index 54019f53d2..0000000000 --- a/akka-spring/src/test/java/akka/spring/foo/MyPojo.java +++ /dev/null @@ -1,34 +0,0 @@ -package akka.spring.foo; - -import akka.actor.TypedActor; - -import java.util.concurrent.CountDownLatch; - -public class MyPojo extends TypedActor implements IMyPojo { - - public static CountDownLatch latch = new CountDownLatch(1); - public static String lastOneWayMessage = null; - private String foo = "foo"; - - - public MyPojo() { - } - - public String getFoo() { - return foo; - } - - public void oneWay(String message) { - lastOneWayMessage = message; - latch.countDown(); - } - - public String longRunning() { - try { - Thread.sleep(6000); - } catch (InterruptedException e) { - } - return "this took long"; - } - -} diff --git a/akka-spring/src/test/java/akka/spring/foo/PingActor.java b/akka-spring/src/test/java/akka/spring/foo/PingActor.java deleted file mode 100644 index b60441699a..0000000000 --- a/akka-spring/src/test/java/akka/spring/foo/PingActor.java +++ /dev/null @@ -1,73 +0,0 @@ -package akka.spring.foo; - -import akka.actor.UntypedActor; -import akka.actor.ActorRef; - -import org.springframework.context.ApplicationContext; -import org.springframework.context.ApplicationContextAware; - -import java.util.concurrent.CountDownLatch; - - -/** - * test class - */ -public class PingActor extends UntypedActor implements ApplicationContextAware { - - private String stringFromVal; - private String stringFromRef; - public static String lastMessage = null; - public static CountDownLatch latch = new CountDownLatch(1); - - - private boolean gotApplicationContext = false; - - - public void setApplicationContext(ApplicationContext context) { - gotApplicationContext = true; - } - - public boolean gotApplicationContext() { - return gotApplicationContext; - } - - public String getStringFromVal() { - return stringFromVal; - } - - public void setStringFromVal(String s) { - stringFromVal = s; - } - - public String getStringFromRef() { - return stringFromRef; - } - - public void setStringFromRef(String s) { - stringFromRef = s; - } - - private String longRunning() { - try { - Thread.sleep(6000); - } catch (InterruptedException e) { - } - return "this took long"; - } - - public void onReceive(Object message) throws Exception { - if (message instanceof String) { - lastMessage = (String) message; - if (message.equals("longRunning")) { - ActorRef pongActor = UntypedActor.actorOf(PongActor.class).start(); - pongActor.sendRequestReply("longRunning", getContext()); - } - latch.countDown(); - } else { - throw new IllegalArgumentException("Unknown message: " + message); - } - } - - -} - diff --git a/akka-spring/src/test/java/akka/spring/foo/PongActor.java b/akka-spring/src/test/java/akka/spring/foo/PongActor.java deleted file mode 100644 index d4f19078a6..0000000000 --- a/akka-spring/src/test/java/akka/spring/foo/PongActor.java +++ /dev/null @@ -1,18 +0,0 @@ -package akka.spring.foo; - -import akka.actor.UntypedActor; - -/** - * test class - */ -public class PongActor extends UntypedActor { - - public void onReceive(Object message) throws Exception { - if (message instanceof String) { - System.out.println("Pongeceived String message: " + message); - getContext().replyUnsafe(message + " from " + getContext().getUuid()); - } else { - throw new IllegalArgumentException("Unknown message: " + message); - } - } -} diff --git a/akka-spring/src/test/java/akka/spring/foo/StatefulPojo.java b/akka-spring/src/test/java/akka/spring/foo/StatefulPojo.java deleted file mode 100644 index 8f291d2a36..0000000000 --- a/akka-spring/src/test/java/akka/spring/foo/StatefulPojo.java +++ /dev/null @@ -1,58 +0,0 @@ -package akka.spring.foo; - -/* -import akka.stm.TransactionalMap; -import akka.stm.TransactionalVector; -import akka.stm.Ref; -import akka.actor.*; -import akka.stm.Atomic; - -public class StatefulPojo extends TypedActor { - private TransactionalMap mapState; - private TransactionalVector vectorState; - private Ref refState; - private boolean isInitialized = false; - - @Override - public void preStart() { - if(!isInitialized) { - isInitialized = new Atomic() { - public Boolean atomically() { - mapState = new TransactionalMap(); - vectorState = new TransactionalVector(); - refState = new Ref(); - return true; - } - }.execute(); - } - } - - public String getMapState(String key) { - return (String)mapState.get(key).get(); - } - - public String getVectorState() { - return (String)vectorState.last(); - } - - public String getRefState() { - return (String)refState.get().get(); - } - - public void setMapState(String key, String msg) { - mapState.put(key, msg); - } - - public void setVectorState(String msg) { - vectorState.add(msg); - } - - public void setRefState(String msg) { - refState.swap(msg); - } - - public boolean isInitialized() { - return isInitialized; - } -} -*/ diff --git a/akka-spring/src/test/resources/akka-test.conf b/akka-spring/src/test/resources/akka-test.conf deleted file mode 100644 index 2ade509c06..0000000000 --- a/akka-spring/src/test/resources/akka-test.conf +++ /dev/null @@ -1,13 +0,0 @@ -akka { - actor { - timeout = 2000 - } - remote { - server { - service = on - hostname = "localhost" # The hostname or IP that clients should connect to - port = 9995 # The port clients should connect to - connection-timeout = 1 - } - } -} diff --git a/akka-spring/src/test/resources/appContext.xml b/akka-spring/src/test/resources/appContext.xml deleted file mode 100644 index d000bd67f3..0000000000 --- a/akka-spring/src/test/resources/appContext.xml +++ /dev/null @@ -1,43 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - diff --git a/akka-spring/src/test/resources/appContextCamelServiceCustom.xml b/akka-spring/src/test/resources/appContextCamelServiceCustom.xml deleted file mode 100644 index c567d7ca32..0000000000 --- a/akka-spring/src/test/resources/appContextCamelServiceCustom.xml +++ /dev/null @@ -1,27 +0,0 @@ - - - - - - - - - - - - - - diff --git a/akka-spring/src/test/resources/appContextCamelServiceDefault.xml b/akka-spring/src/test/resources/appContextCamelServiceDefault.xml deleted file mode 100644 index e9f23a3f3d..0000000000 --- a/akka-spring/src/test/resources/appContextCamelServiceDefault.xml +++ /dev/null @@ -1,15 +0,0 @@ - - - - - diff --git a/akka-spring/src/test/resources/dispatcher-config.xml b/akka-spring/src/test/resources/dispatcher-config.xml deleted file mode 100644 index ffbf9dffc1..0000000000 --- a/akka-spring/src/test/resources/dispatcher-config.xml +++ /dev/null @@ -1,117 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - java.io.IOException - java.lang.NullPointerException - - - - - - - - - - - - - diff --git a/akka-spring/src/test/resources/failing-appContext.xml b/akka-spring/src/test/resources/failing-appContext.xml deleted file mode 100644 index 28187fe4ef..0000000000 --- a/akka-spring/src/test/resources/failing-appContext.xml +++ /dev/null @@ -1,20 +0,0 @@ - - - - - - - - - - - diff --git a/akka-spring/src/test/resources/property-config.xml b/akka-spring/src/test/resources/property-config.xml deleted file mode 100644 index b3f8adaa29..0000000000 --- a/akka-spring/src/test/resources/property-config.xml +++ /dev/null @@ -1,22 +0,0 @@ - - - - - - - - - - - - diff --git a/akka-spring/src/test/resources/server-managed-config.xml b/akka-spring/src/test/resources/server-managed-config.xml deleted file mode 100644 index 652ff7bbd7..0000000000 --- a/akka-spring/src/test/resources/server-managed-config.xml +++ /dev/null @@ -1,57 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/akka-spring/src/test/resources/supervisor-config.xml b/akka-spring/src/test/resources/supervisor-config.xml deleted file mode 100644 index 8dcdc25c56..0000000000 --- a/akka-spring/src/test/resources/supervisor-config.xml +++ /dev/null @@ -1,120 +0,0 @@ - - - - - - - java.io.IOException - java.lang.NullPointerException - - - - - - - - - - - - - - java.io.IOException - java.lang.NullPointerException - - - - - - - - - - - - - - - - - java.lang.Exception - - - - - - - - - - - - - - - diff --git a/akka-spring/src/test/resources/typed-actor-config.xml b/akka-spring/src/test/resources/typed-actor-config.xml deleted file mode 100644 index 7b994fca28..0000000000 --- a/akka-spring/src/test/resources/typed-actor-config.xml +++ /dev/null @@ -1,83 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - java.io.IOException - java.lang.NullPointerException - - - - - - - - - - - diff --git a/akka-spring/src/test/resources/untyped-actor-config.xml b/akka-spring/src/test/resources/untyped-actor-config.xml deleted file mode 100644 index e827f4c1de..0000000000 --- a/akka-spring/src/test/resources/untyped-actor-config.xml +++ /dev/null @@ -1,36 +0,0 @@ - - - - - - - - - - - - - - - - - - - - diff --git a/akka-spring/src/test/scala/ActorFactoryBeanTest.scala b/akka-spring/src/test/scala/ActorFactoryBeanTest.scala deleted file mode 100644 index e6e07ed5cd..0000000000 --- a/akka-spring/src/test/scala/ActorFactoryBeanTest.scala +++ /dev/null @@ -1,113 +0,0 @@ -/** - * Copyright (C) 2009-2010 Scalable Solutions AB - */ -package akka.spring - -import akka.actor.{ActorRegistry, ActorRef} -import akka.spring.foo.PingActor - -import org.junit.runner.RunWith -import org.springframework.context.support.ClassPathXmlApplicationContext -import org.scalatest.junit.JUnitRunner -import org.scalatest.{BeforeAndAfterAll, Spec} -import org.scalatest.matchers.ShouldMatchers - -/** - * Test for TypedActorFactoryBean - * @author michaelkober - */ -@RunWith(classOf[JUnitRunner]) -class ActorFactoryBeanTest extends Spec with ShouldMatchers with BeforeAndAfterAll { - override protected def afterAll = ActorRegistry.shutdownAll - - describe("A ActorFactoryBean") { - val bean = new ActorFactoryBean - it("should have java getters and setters for all properties") { - bean.setImplementation("java.lang.String") - assert(bean.getImplementation == "java.lang.String") - bean.setTimeoutStr("1000") - assert(bean.getTimeoutStr === "1000") - } - - it("should create a remote typed actor when a host is set") { - bean.setHost("some.host.com"); - assert(bean.isRemote) - } - - it("should create a typed actor with dispatcher if dispatcher is set") { - val props = new DispatcherProperties() - props.dispatcherType = "executor-based-event-driven" - bean.setDispatcher(props); - assert(bean.hasDispatcher) - } - - it("should return the object type") { - bean.setImplementation("java.lang.String") - assert(bean.getObjectType == classOf[String]) - } - - it("should create a proxy of type PojoInf") { - val bean = new ActorFactoryBean() - bean.setInterface("akka.spring.PojoInf") - bean.setImplementation("akka.spring.Pojo") - bean.timeoutStr = "1000" - bean.typed = AkkaSpringConfigurationTags.TYPED_ACTOR_TAG - val entries = new PropertyEntries() - val entry = new PropertyEntry() - entry.name = "stringFromVal" - entry.value = "tests rock" - entries.add(entry) - bean.setProperty(entries) - assert(classOf[PojoInf].isAssignableFrom(bean.getObjectType)) - - // Check that we have injected the dependency correctly - val target = bean.createInstance.asInstanceOf[PojoInf] - assert(target.getStringFromVal === entry.value) - } - - it("should create an application context and verify dependency injection for typed") { - var ctx = new ClassPathXmlApplicationContext("appContext.xml"); - val ta = ctx.getBean("typedActor").asInstanceOf[PojoInf]; - assert(ta.isPreStartInvoked) - assert(ta.getStringFromVal === "akka rocks") - assert(ta.getStringFromRef === "spring rocks") - assert(ta.gotApplicationContext) - ctx.close - } - - it("should create an application context and verify dependency injection for untyped actors") { - var ctx = new ClassPathXmlApplicationContext("appContext.xml") - val uta = ctx.getBean("untypedActor").asInstanceOf[ActorRef] - val ping = uta.actor.asInstanceOf[PingActor] - assert(ping.getStringFromVal === "akka rocks") - assert(ping.getStringFromRef === "spring rocks") - assert(ping.gotApplicationContext) - ctx.close - } - - it("should stop the created typed actor when scope is singleton and the context is closed") { - var ctx = new ClassPathXmlApplicationContext("appContext.xml"); - val target = ctx.getBean("untypedActor").asInstanceOf[ActorRef] - target.start - assert(target.isRunning) - ctx.close - assert(!target.isRunning) - } - - it("should stop the created untyped actor when scope is singleton and the context is closed") { - var ctx = new ClassPathXmlApplicationContext("appContext.xml"); - val target = ctx.getBean("bean-singleton").asInstanceOf[SampleBeanIntf] - assert(!target.down) - ctx.close - assert(target.down) - } - - it("should not stop the created typed actor when scope is prototype and the context is closed") { - var ctx = new ClassPathXmlApplicationContext("appContext.xml"); - val target = ctx.getBean("bean-prototype").asInstanceOf[SampleBeanIntf] - assert(!target.down) - ctx.close - assert(!target.down) - } - } -} diff --git a/akka-spring/src/test/scala/CamelServiceSpringFeatureTest.scala b/akka-spring/src/test/scala/CamelServiceSpringFeatureTest.scala deleted file mode 100644 index d10cb60265..0000000000 --- a/akka-spring/src/test/scala/CamelServiceSpringFeatureTest.scala +++ /dev/null @@ -1,42 +0,0 @@ -package akka.spring - -import org.apache.camel.impl.{SimpleRegistry, DefaultCamelContext} -import org.apache.camel.spring.SpringCamelContext -import org.scalatest.{BeforeAndAfterAll, BeforeAndAfterEach, FeatureSpec} -import org.springframework.context.support.ClassPathXmlApplicationContext - -import akka.camel.CamelContextManager -import akka.actor.{TypedActor, ActorRegistry} - -class CamelServiceSpringFeatureTest extends FeatureSpec with BeforeAndAfterEach with BeforeAndAfterAll { - override protected def beforeAll = { - ActorRegistry.shutdownAll - } - - override protected def afterEach = { - ActorRegistry.shutdownAll - } - - feature("start CamelService from Spring application context") { - import CamelContextManager._ - scenario("with a custom CamelContext and access a registered typed actor") { - val appctx = new ClassPathXmlApplicationContext("/appContextCamelServiceCustom.xml") - assert(mandatoryContext.isInstanceOf[SpringCamelContext]) - assert("hello sample" === mandatoryTemplate.requestBody("direct:test", "sample")) - appctx.close - } - - scenario("with a default CamelContext and access a registered typed actor") { - val appctx = new ClassPathXmlApplicationContext("/appContextCamelServiceDefault.xml") - // create a custom registry - val registry = new SimpleRegistry - registry.put("custom", TypedActor.newInstance(classOf[SampleBeanIntf], classOf[SampleBean])) - // set custom registry in DefaultCamelContext - assert(mandatoryContext.isInstanceOf[DefaultCamelContext]) - mandatoryContext.asInstanceOf[DefaultCamelContext].setRegistry(registry) - // access registered typed actor - assert("hello sample" === mandatoryTemplate.requestBody("typed-actor:custom?method=foo", "sample")) - appctx.close - } - } -} diff --git a/akka-spring/src/test/scala/ConfiggyPropertyPlaceholderConfigurerSpec.scala b/akka-spring/src/test/scala/ConfiggyPropertyPlaceholderConfigurerSpec.scala deleted file mode 100644 index 0c61b18952..0000000000 --- a/akka-spring/src/test/scala/ConfiggyPropertyPlaceholderConfigurerSpec.scala +++ /dev/null @@ -1,42 +0,0 @@ -/** - * Copyright (C) 2009-2010 Scalable Solutions AB - */ -package akka.spring - - -import foo.{IMyPojo, MyPojo, PingActor} -import akka.dispatch._ -import org.scalatest.FeatureSpec -import org.scalatest.matchers.ShouldMatchers -import org.scalatest.junit.JUnitRunner -import org.junit.runner.RunWith -import org.springframework.beans.factory.support.DefaultListableBeanFactory -import org.springframework.beans.factory.xml.XmlBeanDefinitionReader -import org.springframework.context.ApplicationContext -import org.springframework.context.support.ClassPathXmlApplicationContext -import org.springframework.core.io.{ClassPathResource, Resource} -import java.util.concurrent._ -import akka.actor.{UntypedActor, Actor, ActorRef} - - - - -/** - * Tests for spring configuration of typed actors. - * @author michaelkober - */ -@RunWith(classOf[JUnitRunner]) -class ConfiggyPropertyPlaceholderConfigurerSpec extends FeatureSpec with ShouldMatchers { - val EVENT_DRIVEN_PREFIX = "akka:event-driven:dispatcher:" - - feature("The ConfiggyPropertyPlaceholderConfigurator") { - - scenario("should provide the akkka config for spring") { - val context = new ClassPathXmlApplicationContext("/property-config.xml") - val actor1 = context.getBean("actor-1").asInstanceOf[ActorRef] - assert(actor1.remoteAddress.get.getHostName === "localhost") - assert(actor1.remoteAddress.get.getPort === 9995) - assert(actor1.timeout === 2000) - } - } -} diff --git a/akka-spring/src/test/scala/DispatcherBeanDefinitionParserTest.scala b/akka-spring/src/test/scala/DispatcherBeanDefinitionParserTest.scala deleted file mode 100644 index ef6c0c23cc..0000000000 --- a/akka-spring/src/test/scala/DispatcherBeanDefinitionParserTest.scala +++ /dev/null @@ -1,107 +0,0 @@ -/** - * Copyright (C) 2009-2010 Scalable Solutions AB - */ -package akka.spring - -import org.scalatest.Spec -import org.scalatest.matchers.ShouldMatchers -import org.scalatest.junit.JUnitRunner -import org.junit.runner.RunWith -import ScalaDom._ - -/** - * Test for DispatcherBeanDefinitionParser - * @author michaelkober - */ -@RunWith(classOf[JUnitRunner]) -class DispatcherBeanDefinitionParserTest extends Spec with ShouldMatchers { - describe("A DispatcherBeanDefinitionParser") { - val parser = new DispatcherBeanDefinitionParser() - - it("should be able to parse the dispatcher configuration") { - // executor-based-event-driven - val xml = - var props = parser.parseDispatcher(dom(xml).getDocumentElement); - assert(props ne null) - assert(props.dispatcherType === "executor-based-event-driven") - assert(props.name === "myDispatcher") - - // executor-based-event-driven-work-stealing - val xml2 = - props = parser.parseDispatcher(dom(xml2).getDocumentElement); - assert(props.dispatcherType === "executor-based-event-driven-work-stealing") - } - - it("should be able to parse the thread pool configuration") { - val xml = - val props = parser.parseThreadPool(dom(xml).getDocumentElement); - assert(props ne null) - assert(props.queue == "bounded-array-blocking-queue") - assert(props.capacity == 100) - assert(props.fairness) - assert(props.corePoolSize == 6) - assert(props.maxPoolSize == 40) - assert(props.keepAlive == 2000L) - assert(props.rejectionPolicy == "caller-runs-policy") - } - - it("should be able to parse the dispatcher with a thread pool configuration") { - val xml = - - - val props = parser.parseDispatcher(dom(xml).getDocumentElement); - assert(props ne null) - assert(props.dispatcherType == "executor-based-event-driven") - assert(props.name == "myDispatcher") - assert(props.threadPool.corePoolSize == 2) - assert(props.threadPool.maxPoolSize == 10) - assert(props.threadPool.keepAlive == 1000) - assert(props.threadPool.queue == "linked-blocking-queue") - } - - it("should throw IllegalArgumentException on not existing reference") { - val xml = - evaluating {parser.parseDispatcher(dom(xml).getDocumentElement)} should produce[IllegalArgumentException] - } - - it("should throw IllegalArgumentException on missing mandatory attributes") { - val xml = - evaluating {parser.parseDispatcher(dom(xml).getDocumentElement)} should produce[IllegalArgumentException] - } - - it("should throw IllegalArgumentException when configuring a thread based dispatcher without TypedActor or UntypedActor") { - val xml = - evaluating {parser.parseDispatcher(dom(xml).getDocumentElement)} should produce[IllegalArgumentException] - } - - it("should be able to parse the hawt dispatcher configuration") { - // hawt - val xml = - var props = parser.parseDispatcher(dom(xml).getDocumentElement); - assert(props ne null) - assert(props.dispatcherType === "hawt") - assert(props.aggregate === false) - } - } -} - - diff --git a/akka-spring/src/test/scala/DispatcherFactoryBeanTest.scala b/akka-spring/src/test/scala/DispatcherFactoryBeanTest.scala deleted file mode 100644 index 486ec8820c..0000000000 --- a/akka-spring/src/test/scala/DispatcherFactoryBeanTest.scala +++ /dev/null @@ -1,28 +0,0 @@ -/** - * Copyright (C) 2009-2010 Scalable Solutions AB - */ -package akka.spring - -import org.scalatest.Spec -import org.scalatest.matchers.ShouldMatchers -import org.scalatest.junit.JUnitRunner -import org.junit.runner.RunWith -import akka.config.Supervision._ -import akka.dispatch.MessageDispatcher - -@RunWith(classOf[JUnitRunner]) -class DispatcherFactoryBeanTest extends Spec with ShouldMatchers { - - describe("A DispatcherFactoryBean") { - val bean = new DispatcherFactoryBean - it("should have java getters and setters for the dispatcher properties") { - val props = new DispatcherProperties() - bean.setProperties(props) - assert(bean.getProperties == props) - } - - it("should return the object type MessageDispatcher") { - assert(bean.getObjectType == classOf[MessageDispatcher]) - } - } -} diff --git a/akka-spring/src/test/scala/DispatcherSpringFeatureTest.scala b/akka-spring/src/test/scala/DispatcherSpringFeatureTest.scala deleted file mode 100644 index ac90495e9b..0000000000 --- a/akka-spring/src/test/scala/DispatcherSpringFeatureTest.scala +++ /dev/null @@ -1,147 +0,0 @@ -/** - * Copyright (C) 2009-2010 Scalable Solutions AB - */ -package akka.spring - - -import foo.{IMyPojo, MyPojo, PingActor} -import akka.dispatch._ -import org.scalatest.FeatureSpec -import org.scalatest.matchers.ShouldMatchers -import org.scalatest.junit.JUnitRunner -import org.junit.runner.RunWith -import org.springframework.beans.factory.support.DefaultListableBeanFactory -import org.springframework.beans.factory.xml.XmlBeanDefinitionReader -import org.springframework.context.ApplicationContext -import org.springframework.context.support.ClassPathXmlApplicationContext -import org.springframework.core.io.{ClassPathResource, Resource} -import java.util.concurrent._ -import akka.actor.{UntypedActor, Actor, ActorRef} - -/** - * Tests for spring configuration of typed actors. - * @author michaelkober - */ -@RunWith(classOf[JUnitRunner]) -class DispatcherSpringFeatureTest extends FeatureSpec with ShouldMatchers { - val EVENT_DRIVEN_PREFIX = "akka:event-driven:dispatcher:" - - feature("Spring configuration") { - - scenario("get a executor-event-driven-dispatcher with array-blocking-queue from context") { - val context = new ClassPathXmlApplicationContext("/dispatcher-config.xml") - val dispatcher = context.getBean("executor-event-driven-dispatcher-1").asInstanceOf[ExecutorBasedEventDrivenDispatcher] - assert(dispatcher.name === EVENT_DRIVEN_PREFIX + "dispatcher-1") - val executor = getThreadPoolExecutorAndAssert(dispatcher) - assert(executor.getCorePoolSize() === 1) - assert(executor.getMaximumPoolSize() === 20) - assert(executor.getKeepAliveTime(TimeUnit.MILLISECONDS) === 3000) - assert(executor.getQueue().isInstanceOf[ArrayBlockingQueue[Runnable]]); - assert(executor.getQueue().remainingCapacity() === 100) - } - - - scenario("get a dispatcher via ref from context") { - val context = new ClassPathXmlApplicationContext("/dispatcher-config.xml") - val pojo = context.getBean("typed-actor-with-dispatcher-ref").asInstanceOf[IMyPojo] - assert(pojo ne null) - } - - scenario("get a executor-event-driven-dispatcher with blocking-queue with unbounded capacity from context") { - val context = new ClassPathXmlApplicationContext("/dispatcher-config.xml") - val dispatcher = context.getBean("executor-event-driven-dispatcher-2").asInstanceOf[ExecutorBasedEventDrivenDispatcher] - val executor = getThreadPoolExecutorAndAssert(dispatcher) - assert(executor.getQueue().isInstanceOf[BlockingQueue[Runnable]]) - assert(executor.getQueue().remainingCapacity() === Integer.MAX_VALUE) - assert(dispatcher.name === EVENT_DRIVEN_PREFIX + "dispatcher-2") - } -/* - scenario("get a executor-event-driven-dispatcher with bounded-blocking-queue and with bounded mailbox capacity") { - val context = new ClassPathXmlApplicationContext("/dispatcher-config.xml") - val dispatcher = context.getBean("executor-event-driven-dispatcher-mc").asInstanceOf[ExecutorBasedEventDrivenDispatcher] - assert(dispatcher.name === EVENT_DRIVEN_PREFIX + "dispatcher-mc") - val actorRef = UntypedActor.actorOf(classOf[PingActor]) - actorRef.dispatcher = dispatcher - actorRef.start - assert(actorRef.mailbox.isInstanceOf[BlockingQueue[MessageInvocation]]) - assert((actorRef.mailbox.asInstanceOf[BlockingQueue[MessageInvocation]]).remainingCapacity === 1000) - } -*/ - scenario("get a executor-event-driven-dispatcher with unbounded-linked-blocking-queue with bounded capacity from context") { - val context = new ClassPathXmlApplicationContext("/dispatcher-config.xml") - val dispatcher = context.getBean("executor-event-driven-dispatcher-4").asInstanceOf[ExecutorBasedEventDrivenDispatcher] - assert(dispatcher.name === EVENT_DRIVEN_PREFIX + "dispatcher-4") - val executor = getThreadPoolExecutorAndAssert(dispatcher) - assert(executor.getQueue().isInstanceOf[BlockingQueue[Runnable]]) - assert(executor.getQueue().remainingCapacity() === 55) - } - - scenario("get a executor-event-driven-dispatcher with unbounded-linked-blocking-queue with unbounded capacity from context") { - val context = new ClassPathXmlApplicationContext("/dispatcher-config.xml") - val dispatcher = context.getBean("executor-event-driven-dispatcher-5").asInstanceOf[ExecutorBasedEventDrivenDispatcher] - assert(dispatcher.name === EVENT_DRIVEN_PREFIX + "dispatcher-5") - val executor = getThreadPoolExecutorAndAssert(dispatcher) - assert(executor.getQueue().isInstanceOf[BlockingQueue[Runnable]]) - assert(executor.getQueue().remainingCapacity() === Integer.MAX_VALUE) - } - - scenario("get a executor-event-driven-dispatcher with synchronous-queue from context") { - val context = new ClassPathXmlApplicationContext("/dispatcher-config.xml") - val dispatcher = context.getBean("executor-event-driven-dispatcher-6").asInstanceOf[ExecutorBasedEventDrivenDispatcher] - assert(dispatcher.name === EVENT_DRIVEN_PREFIX + "dispatcher-6") - val executor = getThreadPoolExecutorAndAssert(dispatcher) - assert(executor.getQueue().isInstanceOf[SynchronousQueue[Runnable]]) - } - - scenario("get a executor-based-event-driven-work-stealing-dispatcher from context") { - val context = new ClassPathXmlApplicationContext("/dispatcher-config.xml") - val dispatcher = context.getBean("executor-based-event-driven-work-stealing-dispatcher").asInstanceOf[ExecutorBasedEventDrivenWorkStealingDispatcher] - assert(dispatcher ne null) - assert(dispatcher.name === "akka:event-driven-work-stealing:dispatcher:workStealingDispatcher") - val executor = getThreadPoolExecutorAndAssert(dispatcher) - assert(executor.getQueue().isInstanceOf[BlockingQueue[Runnable]]) - } - - scenario("get a hawt-dispatcher from context") { - val context = new ClassPathXmlApplicationContext("/dispatcher-config.xml") - val dispatcher = context.getBean("hawt-dispatcher").asInstanceOf[HawtDispatcher] - assert(dispatcher ne null) - assert(dispatcher.toString === "HawtDispatcher") - assert(dispatcher.aggregate === false) - } - - scenario("get a thread-based-dispatcher for typed actor from context") { - val context = new ClassPathXmlApplicationContext("/dispatcher-config.xml") - val pojo = context.getBean("typed-actor-with-thread-based-dispatcher").asInstanceOf[IMyPojo] - assert(pojo ne null) - } - - scenario("get a thread-based-dispatcher for untyped from context") { - val context = new ClassPathXmlApplicationContext("/dispatcher-config.xml") - val actorRef = context.getBean("untyped-actor-with-thread-based-dispatcher").asInstanceOf[ActorRef] - assert(actorRef.getActorClassName() === "akka.spring.foo.PingActor") - actorRef.start() - actorRef.sendOneWay("Hello") - assert(actorRef.getDispatcher.isInstanceOf[ThreadBasedDispatcher]) - } - } - - /** - * get ThreadPoolExecutor via reflection and assert that dispatcher is correct type - */ - private def getThreadPoolExecutorAndAssert(dispatcher: MessageDispatcher): ThreadPoolExecutor = { - - def unpackExecutorService(e: ExecutorService): ExecutorService = e match { - case b: ExecutorServiceDelegate => unpackExecutorService(b.executor) - case t: ThreadPoolExecutor => t - case e => throw new IllegalStateException("Illegal executor type: " + e) - } - - unpackExecutorService(dispatcher match { - case e: ExecutorBasedEventDrivenDispatcher => e.executorService.get() - case e: ExecutorBasedEventDrivenWorkStealingDispatcher => e.executorService.get() - case x => throw new IllegalStateException("Illegal dispatcher type: " + x) - }).asInstanceOf[ThreadPoolExecutor] - } - -} diff --git a/akka-spring/src/test/scala/ScalaDom.scala b/akka-spring/src/test/scala/ScalaDom.scala deleted file mode 100644 index 9319b0c328..0000000000 --- a/akka-spring/src/test/scala/ScalaDom.scala +++ /dev/null @@ -1,40 +0,0 @@ -package akka.spring -/** - * from http://stackoverflow.com/questions/2002685/any-conversion-from-scalas-xml-to-w3c-dom - */ - -object ScalaDom { - import scala.xml._ - import org.w3c.dom.{Document => JDocument, Node => JNode} - import javax.xml.parsers.DocumentBuilderFactory - - def dom(n: Node): JDocument = { - - val doc = DocumentBuilderFactory - .newInstance - .newDocumentBuilder - .getDOMImplementation - .createDocument(null, null, null) - - def build(node: Node, parent: JNode): Unit = { - val jnode: JNode = node match { - case e: Elem => { - val jn = doc.createElement(e.label) - e.attributes foreach { a => jn.setAttribute(a.key, a.value.mkString) } - jn - } - case a: Atom[_] => doc.createTextNode(a.text) - case c: Comment => doc.createComment(c.commentText) - case er: EntityRef => doc.createEntityReference(er.entityName) - case pi: ProcInstr => doc.createProcessingInstruction(pi.target, pi.proctext) - } - parent.appendChild(jnode) - node.child.map { build(_, jnode) } - } - - build(n, doc) - doc - - } -} - diff --git a/akka-spring/src/test/scala/SupervisionBeanDefinitionParserTest.scala b/akka-spring/src/test/scala/SupervisionBeanDefinitionParserTest.scala deleted file mode 100644 index e5b4fc2c70..0000000000 --- a/akka-spring/src/test/scala/SupervisionBeanDefinitionParserTest.scala +++ /dev/null @@ -1,127 +0,0 @@ -/** - * Copyright (C) 2009-2010 Scalable Solutions AB - */ -package akka.spring - -import org.scalatest.Spec -import org.scalatest.matchers.ShouldMatchers -import org.scalatest.junit.JUnitRunner -import org.junit.runner.RunWith -import ScalaDom._ - -import org.w3c.dom.Element -import org.springframework.beans.factory.support.BeanDefinitionBuilder -import akka.config.Supervision. {FaultHandlingStrategy, AllForOneStrategy} - -/** - * Test for SupervisionBeanDefinitionParser - * @author michaelkober - */ -@RunWith(classOf[JUnitRunner]) -class SupervisionBeanDefinitionParserTest extends Spec with ShouldMatchers { - private class Parser extends SupervisionBeanDefinitionParser - - describe("A SupervisionBeanDefinitionParser") { - val parser = new Parser() - val builder = BeanDefinitionBuilder.genericBeanDefinition("foo.bar.Foo") - - it("should be able to parse typed actor configuration") { - val props = parser.parseActor(createTypedActorElement); - assert(props ne null) - assert(props.timeout == 1000) - assert(props.target == "foo.bar.MyPojo") - } - - it("should parse the supervisor restart strategy") { - parser.parseSupervisor(createSupervisorElement, builder); - val strategy = builder.getBeanDefinition.getPropertyValues.getPropertyValue("restartStrategy").getValue.asInstanceOf[FaultHandlingStrategy] - assert(strategy ne null) - assert(strategy.isInstanceOf[AllForOneStrategy]) - expect(3) { strategy.asInstanceOf[AllForOneStrategy].maxNrOfRetries.get } - expect(1000) { strategy.asInstanceOf[AllForOneStrategy].withinTimeRange.get } - } - - it("should parse the supervised typed actors") { - parser.parseSupervisor(createSupervisorElement, builder); - val supervised = builder.getBeanDefinition.getPropertyValues.getPropertyValue("supervised").getValue.asInstanceOf[List[ActorProperties]] - assert(supervised ne null) - expect(4) { supervised.length } - val iterator = supervised.iterator - val prop1 = iterator.next - val prop2 = iterator.next - val prop3 = iterator.next - val prop4 = iterator.next - expect("foo.bar.Foo") { prop1.target } - expect("foo.bar.Bar") { prop2.target } - expect("foo.bar.MyPojo") { prop3.target } - expect("foo.bar.MyPojo") { prop4.target } - expect("permanent") { prop1.lifecycle } - expect("temporary") { prop4.lifecycle } - } - - it("should throw IllegalArgumentException on missing mandatory attributes") { - evaluating { parser.parseSupervisor(createSupervisorMissingAttribute, builder) } should produce [IllegalArgumentException] - } - - it("should throw IllegalArgumentException on missing mandatory elements") { - evaluating { parser.parseSupervisor(createSupervisorMissingElement, builder) } should produce [IllegalArgumentException] - } - } - - private def createTypedActorElement : Element = { - val xml = - dom(xml).getDocumentElement - } - - private def createSupervisorElement : Element = { - val xml = - - - java.io.IOException - java.lang.NullPointerException - - - - - - - - - - - - - - dom(xml).getDocumentElement - } - - - private def createSupervisorMissingAttribute : Element = { - val xml = - - - java.io.IOException - - - - - - - dom(xml).getDocumentElement - } - - private def createSupervisorMissingElement : Element = { - val xml = - - - - - - - - dom(xml).getDocumentElement - } -} - diff --git a/akka-spring/src/test/scala/SupervisionFactoryBeanTest.scala b/akka-spring/src/test/scala/SupervisionFactoryBeanTest.scala deleted file mode 100644 index 542b8a1377..0000000000 --- a/akka-spring/src/test/scala/SupervisionFactoryBeanTest.scala +++ /dev/null @@ -1,41 +0,0 @@ -/** - * Copyright (C) 2009-2010 Scalable Solutions AB - */ -package akka.spring - -import org.scalatest.Spec -import org.scalatest.matchers.ShouldMatchers -import org.scalatest.junit.JUnitRunner -import org.junit.runner.RunWith -import akka.config.Supervision._ -import akka.config.TypedActorConfigurator - -private[akka] class Foo - -@RunWith(classOf[JUnitRunner]) -class SupervisionFactoryBeanTest extends Spec with ShouldMatchers { - - val faultHandlingStrategy = new AllForOneStrategy(List(classOf[Exception]), 3, 1000) - val typedActors = List(createTypedActorProperties("akka.spring.Foo", "1000")) - - private def createTypedActorProperties(target: String, timeout: String) : ActorProperties = { - val properties = new ActorProperties() - properties.target = target - properties.timeoutStr = timeout - properties - } - - describe("A SupervisionFactoryBean") { - val bean = new SupervisionFactoryBean - it("should have java getters and setters for all properties") { - bean.setRestartStrategy(faultHandlingStrategy) - assert(bean.getRestartStrategy == faultHandlingStrategy) - bean.setSupervised(typedActors) - assert(bean.getSupervised == typedActors) - } - - it("should return the object type AnyRef") { - assert(bean.getObjectType == classOf[AnyRef]) - } - } -} diff --git a/akka-spring/src/test/scala/SupervisorSpringFeatureTest.scala b/akka-spring/src/test/scala/SupervisorSpringFeatureTest.scala deleted file mode 100644 index 2ce629ed38..0000000000 --- a/akka-spring/src/test/scala/SupervisorSpringFeatureTest.scala +++ /dev/null @@ -1,57 +0,0 @@ -/** - * Copyright (C) 2009-2010 Scalable Solutions AB - */ -package akka.spring - - -import akka.spring.foo.{IMyPojo, MyPojo, IFoo, IBar} -import akka.dispatch._ -import akka.config.TypedActorConfigurator -import akka.actor.Supervisor - -import org.scalatest.FeatureSpec -import org.scalatest.matchers.ShouldMatchers -import org.scalatest.junit.JUnitRunner -import org.junit.runner.RunWith -import org.springframework.beans.factory.support.DefaultListableBeanFactory -import org.springframework.beans.factory.xml.XmlBeanDefinitionReader -import org.springframework.context.ApplicationContext -import org.springframework.context.support.ClassPathXmlApplicationContext -import org.springframework.core.io.{ClassPathResource, Resource} -import java.util.concurrent._ - -/** - * Tests for spring configuration of supervisor hierarchies. - * @author michaelkober - */ -@RunWith(classOf[JUnitRunner]) -class SupervisorSpringFeatureTest extends FeatureSpec with ShouldMatchers { - - feature("Spring configuration") { - - scenario("get a supervisor for typed actors from context") { - val context = new ClassPathXmlApplicationContext("/supervisor-config.xml") - val myConfigurator = context.getBean("supervision1").asInstanceOf[TypedActorConfigurator] - // get TypedActors - val foo = myConfigurator.getInstance(classOf[IFoo]) - assert(foo ne null) - val bar = myConfigurator.getInstance(classOf[IBar]) - assert(bar ne null) - val pojo = myConfigurator.getInstance(classOf[IMyPojo]) - assert(pojo ne null) - } - - scenario("get a supervisor for untyped actors from context") { - val context = new ClassPathXmlApplicationContext("/supervisor-config.xml") - val supervisor = context.getBean("supervision-untyped-actors").asInstanceOf[Supervisor] - supervisor.children - } - - scenario("get a supervisor and dispatcher from context") { - val context = new ClassPathXmlApplicationContext("/supervisor-config.xml") - val myConfigurator = context.getBean("supervision-with-dispatcher").asInstanceOf[TypedActorConfigurator] - val foo = myConfigurator.getInstance(classOf[IFoo]) - assert(foo ne null) - } - } -} diff --git a/akka-spring/src/test/scala/TypedActorBeanDefinitionParserTest.scala b/akka-spring/src/test/scala/TypedActorBeanDefinitionParserTest.scala deleted file mode 100644 index f665784355..0000000000 --- a/akka-spring/src/test/scala/TypedActorBeanDefinitionParserTest.scala +++ /dev/null @@ -1,81 +0,0 @@ -/** - * Copyright (C) 2009-2010 Scalable Solutions AB - */ -package akka.spring - -import org.scalatest.Spec -import org.scalatest.matchers.ShouldMatchers -import org.scalatest.junit.JUnitRunner -import org.junit.runner.RunWith -import ScalaDom._ - -import org.w3c.dom.Element - -/** - * Test for TypedActorParser - * @author michaelkober - */ -@RunWith(classOf[JUnitRunner]) -class TypedActorBeanDefinitionParserTest extends Spec with ShouldMatchers { - private class Parser extends ActorParser - - describe("A TypedActorParser") { - val parser = new Parser() - it("should parse the typed actor configuration") { - val xml = - - - - val props = parser.parseActor(dom(xml).getDocumentElement); - assert(props ne null) - assert(props.timeout === 1000) - assert(props.target === "foo.bar.MyPojo") - assert(props.scope === "prototype") - assert(props.propertyEntries.entryList.size === 1) - } - - it("should throw IllegalArgumentException on missing mandatory attributes") { - val xml = - - evaluating { parser.parseActor(dom(xml).getDocumentElement) } should produce [IllegalArgumentException] - } - - it("should parse TypedActors configuration with dispatcher") { - val xml = - - - val props = parser.parseActor(dom(xml).getDocumentElement); - assert(props ne null) - assert(props.dispatcher.dispatcherType === "thread-based") - } - - it("should parse remote TypedActors configuration") { - val xml = - - - val props = parser.parseActor(dom(xml).getDocumentElement); - assert(props ne null) - assert(props.host === "com.some.host") - assert(props.port === "2552") - assert(!props.serverManaged) - } - - it("should parse remote server managed TypedActors configuration") { - val xml = - - - val props = parser.parseActor(dom(xml).getDocumentElement); - assert(props ne null) - assert(props.host === "com.some.host") - assert(props.port === "2552") - assert(props.serviceName === "my-service") - assert(props.serverManaged) - } - } -} diff --git a/akka-spring/src/test/scala/TypedActorSpringFeatureTest.scala b/akka-spring/src/test/scala/TypedActorSpringFeatureTest.scala deleted file mode 100644 index 36ff8d9f06..0000000000 --- a/akka-spring/src/test/scala/TypedActorSpringFeatureTest.scala +++ /dev/null @@ -1,153 +0,0 @@ -/** - * Copyright (C) 2009-2010 Scalable Solutions AB - */ -package akka.spring - - -import foo.{PingActor, IMyPojo, MyPojo} -import akka.dispatch.FutureTimeoutException -import org.scalatest.matchers.ShouldMatchers -import org.scalatest.junit.JUnitRunner -import org.junit.runner.RunWith -import org.springframework.beans.factory.support.DefaultListableBeanFactory -import org.springframework.beans.factory.xml.XmlBeanDefinitionReader -import org.springframework.context.ApplicationContext -import org.springframework.context.support.ClassPathXmlApplicationContext -import org.springframework.core.io.{ClassPathResource, Resource} -import org.scalatest.{BeforeAndAfterAll, FeatureSpec} -import akka.remote.{RemoteClient, RemoteServer, RemoteNode} -import java.util.concurrent.CountDownLatch -import akka.actor.{TypedActor, RemoteTypedActorOne, Actor} -import akka.actor.remote.RemoteTypedActorOneImpl - -/** - * Tests for spring configuration of typed actors. - * @author michaelkober - */ -@RunWith(classOf[JUnitRunner]) -class TypedActorSpringFeatureTest extends FeatureSpec with ShouldMatchers with BeforeAndAfterAll { - - var server1: RemoteServer = null - var server2: RemoteServer = null - - override def beforeAll = { - val actor = Actor.actorOf[PingActor] // FIXME: remove this line when ticket 425 is fixed - server1 = new RemoteServer() - server1.start("localhost", 9990) - server2 = new RemoteServer() - server2.start("localhost", 9992) - - val typedActor = TypedActor.newInstance(classOf[RemoteTypedActorOne], classOf[RemoteTypedActorOneImpl], 1000) - server1.registerTypedActor("typed-actor-service", typedActor) - } - - // make sure the servers shutdown cleanly after the test has finished - override def afterAll = { - try { - server1.shutdown - server2.shutdown - RemoteClient.shutdownAll - Thread.sleep(1000) - } catch { - case e => () - } - } - - def getTypedActorFromContext(config: String, id: String) : IMyPojo = { - MyPojo.latch = new CountDownLatch(1) - val context = new ClassPathXmlApplicationContext(config) - val myPojo: IMyPojo = context.getBean(id).asInstanceOf[IMyPojo] - myPojo - } - - feature("parse Spring application context") { - - scenario("akka:typed-actor and akka:supervision and akka:dispatcher can be used as top level elements") { - val context = new ClassPathResource("/typed-actor-config.xml") - val beanFactory = new DefaultListableBeanFactory() - val reader = new XmlBeanDefinitionReader(beanFactory) - reader.loadBeanDefinitions(context) - assert(beanFactory.containsBeanDefinition("simple-typed-actor")) - assert(beanFactory.containsBeanDefinition("remote-typed-actor")) - assert(beanFactory.containsBeanDefinition("supervision1")) - assert(beanFactory.containsBeanDefinition("dispatcher1")) - } - - scenario("get a typed actor") { - val myPojo = getTypedActorFromContext("/typed-actor-config.xml", "simple-typed-actor") - assert(myPojo.getFoo() === "foo") - myPojo.oneWay("hello 1") - MyPojo.latch.await - assert(MyPojo.lastOneWayMessage === "hello 1") - } - - scenario("get a typed actor of bean") { - val myPojo = getTypedActorFromContext("/typed-actor-config.xml", "simple-typed-actor-of-bean") - assert(myPojo.getFoo() === "foo") - myPojo.oneWay("hello 1") - MyPojo.latch.await - assert(MyPojo.lastOneWayMessage === "hello 1") - } - - scenario("FutureTimeoutException when timed out") { - val myPojo = getTypedActorFromContext("/typed-actor-config.xml", "simple-typed-actor") - evaluating {myPojo.longRunning()} should produce[FutureTimeoutException] - } - - scenario("typed-actor with timeout") { - val myPojo = getTypedActorFromContext("/typed-actor-config.xml", "simple-typed-actor-long-timeout") - assert(myPojo.longRunning() === "this took long"); - } - - scenario("get a remote typed-actor") { - val myPojo = getTypedActorFromContext("/typed-actor-config.xml", "remote-typed-actor") - assert(myPojo.getFoo() === "foo") - myPojo.oneWay("hello 3") - MyPojo.latch.await - assert(MyPojo.lastOneWayMessage === "hello 3") - } - - scenario("get a client-managed-remote-typed-actor") { - val myPojo = getTypedActorFromContext("/server-managed-config.xml", "client-managed-remote-typed-actor") - assert(myPojo.getFoo() === "foo") - myPojo.oneWay("hello client-managed-remote-typed-actor") - MyPojo.latch.await - assert(MyPojo.lastOneWayMessage === "hello client-managed-remote-typed-actor") - } - - scenario("get a server-managed-remote-typed-actor") { - val serverPojo = getTypedActorFromContext("/server-managed-config.xml", "server-managed-remote-typed-actor") - // - val myPojoProxy = RemoteClient.typedActorFor(classOf[IMyPojo], classOf[IMyPojo].getName, 5000L, "localhost", 9990) - assert(myPojoProxy.getFoo() === "foo") - myPojoProxy.oneWay("hello server-managed-remote-typed-actor") - MyPojo.latch.await - assert(MyPojo.lastOneWayMessage === "hello server-managed-remote-typed-actor") - } - - scenario("get a server-managed-remote-typed-actor-custom-id") { - val serverPojo = getTypedActorFromContext("/server-managed-config.xml", "server-managed-remote-typed-actor-custom-id") - // - val myPojoProxy = RemoteClient.typedActorFor(classOf[IMyPojo], "mypojo-service", 5000L, "localhost", 9990) - assert(myPojoProxy.getFoo() === "foo") - myPojoProxy.oneWay("hello server-managed-remote-typed-actor 2") - MyPojo.latch.await - assert(MyPojo.lastOneWayMessage === "hello server-managed-remote-typed-actor 2") - } - - scenario("get a client proxy for server-managed-remote-typed-actor") { - MyPojo.latch = new CountDownLatch(1) - val context = new ClassPathXmlApplicationContext("/server-managed-config.xml") - val myPojo: IMyPojo = context.getBean("server-managed-remote-typed-actor-custom-id").asInstanceOf[IMyPojo] - // get client proxy from spring context - val myPojoProxy = context.getBean("typed-client-1").asInstanceOf[IMyPojo] - assert(myPojoProxy.getFoo() === "foo") - myPojoProxy.oneWay("hello") - MyPojo.latch.await - } - - - } - -} - diff --git a/akka-spring/src/test/scala/UntypedActorSpringFeatureTest.scala b/akka-spring/src/test/scala/UntypedActorSpringFeatureTest.scala deleted file mode 100644 index aded68a559..0000000000 --- a/akka-spring/src/test/scala/UntypedActorSpringFeatureTest.scala +++ /dev/null @@ -1,152 +0,0 @@ -/** - * Copyright (C) 2009-2010 Scalable Solutions AB - */ -package akka.spring - - -import foo.PingActor -import akka.dispatch.ExecutorBasedEventDrivenWorkStealingDispatcher -import org.scalatest.matchers.ShouldMatchers -import org.scalatest.junit.JUnitRunner -import org.junit.runner.RunWith -import org.springframework.context.support.ClassPathXmlApplicationContext -import akka.remote.{RemoteClient, RemoteServer} -import org.scalatest.{BeforeAndAfterAll, FeatureSpec} - -import java.util.concurrent.CountDownLatch -import akka.actor.{RemoteActorRef, ActorRegistry, Actor, ActorRef} - -/** - * Tests for spring configuration of typed actors. - * @author michaelkober - */ -@RunWith(classOf[JUnitRunner]) -class UntypedActorSpringFeatureTest extends FeatureSpec with ShouldMatchers with BeforeAndAfterAll { - - var server1: RemoteServer = null - var server2: RemoteServer = null - - - override def beforeAll = { - val actor = Actor.actorOf[PingActor] // FIXME: remove this line when ticket 425 is fixed - server1 = new RemoteServer() - server1.start("localhost", 9990) - server2 = new RemoteServer() - server2.start("localhost", 9992) - } - - // make sure the servers shutdown cleanly after the test has finished - override def afterAll = { - try { - server1.shutdown - server2.shutdown - RemoteClient.shutdownAll - Thread.sleep(1000) - } catch { - case e => () - } - } - - - def getPingActorFromContext(config: String, id: String) : ActorRef = { - PingActor.latch = new CountDownLatch(1) - val context = new ClassPathXmlApplicationContext(config) - val pingActor = context.getBean(id).asInstanceOf[ActorRef] - assert(pingActor.getActorClassName() === "akka.spring.foo.PingActor") - pingActor.start() - } - - - feature("parse Spring application context") { - - scenario("get a untyped actor") { - val myactor = getPingActorFromContext("/untyped-actor-config.xml", "simple-untyped-actor") - myactor.sendOneWay("Hello") - PingActor.latch.await - assert(PingActor.lastMessage === "Hello") - assert(myactor.isDefinedAt("some string message")) - } - - scenario("untyped-actor of provided bean") { - val myactor = getPingActorFromContext("/untyped-actor-config.xml", "simple-untyped-actor-of-bean") - myactor.sendOneWay("Hello") - PingActor.latch.await - assert(PingActor.lastMessage === "Hello") - assert(myactor.isDefinedAt("some string message")) - } - - scenario("untyped-actor with timeout") { - val myactor = getPingActorFromContext("/untyped-actor-config.xml", "simple-untyped-actor-long-timeout") - assert(myactor.getTimeout() === 10000) - myactor.sendOneWay("Hello 2") - PingActor.latch.await - assert(PingActor.lastMessage === "Hello 2") - } - - scenario("get a remote typed-actor") { - val myactor = getPingActorFromContext("/untyped-actor-config.xml", "remote-untyped-actor") - myactor.sendOneWay("Hello 4") - assert(myactor.getRemoteAddress().isDefined) - assert(myactor.getRemoteAddress().get.getHostName() === "localhost") - assert(myactor.getRemoteAddress().get.getPort() === 9992) - PingActor.latch.await - assert(PingActor.lastMessage === "Hello 4") - } - - scenario("untyped-actor with custom dispatcher") { - val myactor = getPingActorFromContext("/untyped-actor-config.xml", "untyped-actor-with-dispatcher") - assert(myactor.getTimeout() === 1000) - assert(myactor.getDispatcher.isInstanceOf[ExecutorBasedEventDrivenWorkStealingDispatcher]) - myactor.sendOneWay("Hello 5") - PingActor.latch.await - assert(PingActor.lastMessage === "Hello 5") - } - - scenario("create client managed remote untyped-actor") { - val myactor = getPingActorFromContext("/server-managed-config.xml", "client-managed-remote-untyped-actor") - myactor.sendOneWay("Hello client managed remote untyped-actor") - PingActor.latch.await - assert(PingActor.lastMessage === "Hello client managed remote untyped-actor") - assert(myactor.getRemoteAddress().isDefined) - assert(myactor.getRemoteAddress().get.getHostName() === "localhost") - assert(myactor.getRemoteAddress().get.getPort() === 9990) - } - - scenario("create server managed remote untyped-actor") { - val myactor = getPingActorFromContext("/server-managed-config.xml", "server-managed-remote-untyped-actor") - val nrOfActors = ActorRegistry.actors.length - val actorRef = RemoteClient.actorFor("akka.spring.foo.PingActor", "localhost", 9990) - actorRef.sendOneWay("Hello server managed remote untyped-actor") - PingActor.latch.await - assert(PingActor.lastMessage === "Hello server managed remote untyped-actor") - assert(ActorRegistry.actors.length === nrOfActors) - } - - scenario("create server managed remote untyped-actor with custom service id") { - val myactor = getPingActorFromContext("/server-managed-config.xml", "server-managed-remote-untyped-actor-custom-id") - val nrOfActors = ActorRegistry.actors.length - val actorRef = RemoteClient.actorFor("ping-service", "localhost", 9990) - actorRef.sendOneWay("Hello server managed remote untyped-actor") - PingActor.latch.await - assert(PingActor.lastMessage === "Hello server managed remote untyped-actor") - assert(ActorRegistry.actors.length === nrOfActors) - } - - scenario("get client actor for server managed remote untyped-actor") { - PingActor.latch = new CountDownLatch(1) - val context = new ClassPathXmlApplicationContext("/server-managed-config.xml") - val pingActor = context.getBean("server-managed-remote-untyped-actor-custom-id").asInstanceOf[ActorRef] - assert(pingActor.getActorClassName() === "akka.spring.foo.PingActor") - pingActor.start() - val nrOfActors = ActorRegistry.actors.length - // get client actor ref from spring context - val actorRef = context.getBean("client-1").asInstanceOf[ActorRef] - assert(actorRef.isInstanceOf[RemoteActorRef]) - actorRef.sendOneWay("Hello") - PingActor.latch.await - assert(ActorRegistry.actors.length === nrOfActors) - } - - } -} - diff --git a/config/cassandra-akka-storage-conf.xml b/config/cassandra-akka-storage-conf.xml deleted file mode 100644 index f87d2eedf5..0000000000 --- a/config/cassandra-akka-storage-conf.xml +++ /dev/null @@ -1,395 +0,0 @@ - - - - - - - - akka - - - false - - - - - - - - - - - - - - - org.apache.cassandra.locator.RackUnawareStrategy - - - 1 - - - org.apache.cassandra.locator.EndPointSnitch - - - - - - org.apache.cassandra.auth.AllowAllAuthenticator - - - org.apache.cassandra.dht.RandomPartitioner - - - - - - cassandra/commitlog - - cassandra/data - - - - - - 127.0.0.1 - - - - - - - 10000 - - 128 - - - - - - localhost - - 7000 - - - localhost - - 9160 - - false - - - - - - - - auto - - - 512 - - - 64 - - - 32 - 8 - - - 64 - - - 64 - - 256 - - 0.3 - - 60 - - - 8 - 32 - - - periodic - - 10000 - - - - - 864000 - diff --git a/config/microkernel-server.xml b/config/microkernel-server.xml deleted file mode 100644 index 6a5ed730ed..0000000000 --- a/config/microkernel-server.xml +++ /dev/null @@ -1,97 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 300000 - 2 - false - 8443 - 20000 - 5000 - - - - - - - - - - - - - - - - / - - akka.http.AkkaMistServlet - /* - - - - - - - - - - - - - - - true - true - true - 1000 - - diff --git a/embedded-repo/com/amazonaws/aws-java-sdk/1.0.14/aws-java-sdk-1.0.14-javadoc.jar b/embedded-repo/com/amazonaws/aws-java-sdk/1.0.14/aws-java-sdk-1.0.14-javadoc.jar deleted file mode 100644 index 6ab90957c2..0000000000 Binary files a/embedded-repo/com/amazonaws/aws-java-sdk/1.0.14/aws-java-sdk-1.0.14-javadoc.jar and /dev/null differ diff --git a/embedded-repo/com/amazonaws/aws-java-sdk/1.0.14/aws-java-sdk-1.0.14-sources.jar b/embedded-repo/com/amazonaws/aws-java-sdk/1.0.14/aws-java-sdk-1.0.14-sources.jar deleted file mode 100644 index 0a97b3fa7b..0000000000 Binary files a/embedded-repo/com/amazonaws/aws-java-sdk/1.0.14/aws-java-sdk-1.0.14-sources.jar and /dev/null differ diff --git a/embedded-repo/com/amazonaws/aws-java-sdk/1.0.14/aws-java-sdk-1.0.14.jar b/embedded-repo/com/amazonaws/aws-java-sdk/1.0.14/aws-java-sdk-1.0.14.jar deleted file mode 100644 index a11205d066..0000000000 Binary files a/embedded-repo/com/amazonaws/aws-java-sdk/1.0.14/aws-java-sdk-1.0.14.jar and /dev/null differ diff --git a/embedded-repo/com/rabbitmq/rabbitmq-client/0.9.1/rabbitmq-client-0.9.1.jar b/embedded-repo/com/rabbitmq/rabbitmq-client/0.9.1/rabbitmq-client-0.9.1.jar deleted file mode 100644 index 776a3945a1..0000000000 Binary files a/embedded-repo/com/rabbitmq/rabbitmq-client/0.9.1/rabbitmq-client-0.9.1.jar and /dev/null differ diff --git a/embedded-repo/com/rabbitmq/rabbitmq-client/0.9.1/rabbitmq-client-0.9.1.pom b/embedded-repo/com/rabbitmq/rabbitmq-client/0.9.1/rabbitmq-client-0.9.1.pom deleted file mode 100644 index c78d868476..0000000000 --- a/embedded-repo/com/rabbitmq/rabbitmq-client/0.9.1/rabbitmq-client-0.9.1.pom +++ /dev/null @@ -1,8 +0,0 @@ - - - 4.0.0 - com.rabbitmq - rabbitmq-client - 0.9.1 - jar - \ No newline at end of file diff --git a/embedded-repo/com/redis/redisclient/2.8.0-1.4/redisclient-2.8.0-1.4.jar b/embedded-repo/com/redis/redisclient/2.8.0-1.4/redisclient-2.8.0-1.4.jar deleted file mode 100644 index b811e6ab92..0000000000 Binary files a/embedded-repo/com/redis/redisclient/2.8.0-1.4/redisclient-2.8.0-1.4.jar and /dev/null differ diff --git a/embedded-repo/com/redis/redisclient/2.8.0-2.0.1/redisclient-2.8.0-2.0.1.jar b/embedded-repo/com/redis/redisclient/2.8.0-2.0.1/redisclient-2.8.0-2.0.1.jar deleted file mode 100644 index 7709ef140b..0000000000 Binary files a/embedded-repo/com/redis/redisclient/2.8.0-2.0.1/redisclient-2.8.0-2.0.1.jar and /dev/null differ diff --git a/embedded-repo/com/redis/redisclient/2.8.0-2.0.1/redisclient-2.8.0-2.0.1.pom b/embedded-repo/com/redis/redisclient/2.8.0-2.0.1/redisclient-2.8.0-2.0.1.pom deleted file mode 100644 index 4010889e31..0000000000 --- a/embedded-repo/com/redis/redisclient/2.8.0-2.0.1/redisclient-2.8.0-2.0.1.pom +++ /dev/null @@ -1,8 +0,0 @@ - - - 4.0.0 - com.redis - redisclient - 2.8.0-2.0.1 - jar - diff --git a/embedded-repo/com/redis/redisclient/2.8.0-2.0.2/redisclient-2.8.0-2.0.2.jar b/embedded-repo/com/redis/redisclient/2.8.0-2.0.2/redisclient-2.8.0-2.0.2.jar deleted file mode 100644 index cbaf69ad80..0000000000 Binary files a/embedded-repo/com/redis/redisclient/2.8.0-2.0.2/redisclient-2.8.0-2.0.2.jar and /dev/null differ diff --git a/embedded-repo/com/redis/redisclient/2.8.0-2.0.2/redisclient-2.8.0-2.0.2.pom b/embedded-repo/com/redis/redisclient/2.8.0-2.0.2/redisclient-2.8.0-2.0.2.pom deleted file mode 100644 index 32e34d89a0..0000000000 --- a/embedded-repo/com/redis/redisclient/2.8.0-2.0.2/redisclient-2.8.0-2.0.2.pom +++ /dev/null @@ -1,8 +0,0 @@ - - - 4.0.0 - com.redis - redisclient - 2.8.0-2.0.2 - jar - diff --git a/embedded-repo/com/redis/redisclient/2.8.0-2.0.3/redisclient-2.8.0-2.0.3.jar b/embedded-repo/com/redis/redisclient/2.8.0-2.0.3/redisclient-2.8.0-2.0.3.jar deleted file mode 100644 index be75baa5aa..0000000000 Binary files a/embedded-repo/com/redis/redisclient/2.8.0-2.0.3/redisclient-2.8.0-2.0.3.jar and /dev/null differ diff --git a/embedded-repo/com/redis/redisclient/2.8.0-2.0.3/redisclient-2.8.0-2.0.3.pom b/embedded-repo/com/redis/redisclient/2.8.0-2.0.3/redisclient-2.8.0-2.0.3.pom deleted file mode 100644 index 7b1878f070..0000000000 --- a/embedded-repo/com/redis/redisclient/2.8.0-2.0.3/redisclient-2.8.0-2.0.3.pom +++ /dev/null @@ -1,8 +0,0 @@ - - - 4.0.0 - com.redis - redisclient - 2.8.0-2.0.3 - jar - diff --git a/embedded-repo/com/redis/redisclient/2.8.0-2.0/redisclient-2.8.0-2.0.jar b/embedded-repo/com/redis/redisclient/2.8.0-2.0/redisclient-2.8.0-2.0.jar deleted file mode 100644 index 66c18b6fbf..0000000000 Binary files a/embedded-repo/com/redis/redisclient/2.8.0-2.0/redisclient-2.8.0-2.0.jar and /dev/null differ diff --git a/embedded-repo/com/redis/redisclient/2.8.0-2.0/redisclient-2.8.0-2.0.pom b/embedded-repo/com/redis/redisclient/2.8.0-2.0/redisclient-2.8.0-2.0.pom deleted file mode 100644 index 12558da1c4..0000000000 --- a/embedded-repo/com/redis/redisclient/2.8.0-2.0/redisclient-2.8.0-2.0.pom +++ /dev/null @@ -1,8 +0,0 @@ - - - 4.0.0 - com.redis - redisclient - 2.8.0-2.0 - jar - \ No newline at end of file diff --git a/embedded-repo/com/redis/redisclient/2.8.0.RC7-1.4/redisclient-2.8.0.RC7-1.4.jar b/embedded-repo/com/redis/redisclient/2.8.0.RC7-1.4/redisclient-2.8.0.RC7-1.4.jar deleted file mode 100644 index d25fcfdccf..0000000000 Binary files a/embedded-repo/com/redis/redisclient/2.8.0.RC7-1.4/redisclient-2.8.0.RC7-1.4.jar and /dev/null differ diff --git a/embedded-repo/com/trifork/riak-java-pb-client/1.0-for-akka-by-ticktock/riak-java-pb-client-1.0-for-akka-by-ticktock.jar b/embedded-repo/com/trifork/riak-java-pb-client/1.0-for-akka-by-ticktock/riak-java-pb-client-1.0-for-akka-by-ticktock.jar deleted file mode 100644 index 053eb397c7..0000000000 Binary files a/embedded-repo/com/trifork/riak-java-pb-client/1.0-for-akka-by-ticktock/riak-java-pb-client-1.0-for-akka-by-ticktock.jar and /dev/null differ diff --git a/embedded-repo/org/apache/camel/camel-jetty/2.4.0.1/camel-jetty-2.4.0.1-sources.jar b/embedded-repo/org/apache/camel/camel-jetty/2.4.0.1/camel-jetty-2.4.0.1-sources.jar deleted file mode 100644 index 7aa1393153..0000000000 Binary files a/embedded-repo/org/apache/camel/camel-jetty/2.4.0.1/camel-jetty-2.4.0.1-sources.jar and /dev/null differ diff --git a/embedded-repo/org/apache/camel/camel-jetty/2.4.0.1/camel-jetty-2.4.0.1-sources.jar.md5 b/embedded-repo/org/apache/camel/camel-jetty/2.4.0.1/camel-jetty-2.4.0.1-sources.jar.md5 deleted file mode 100644 index 46500d76fc..0000000000 --- a/embedded-repo/org/apache/camel/camel-jetty/2.4.0.1/camel-jetty-2.4.0.1-sources.jar.md5 +++ /dev/null @@ -1 +0,0 @@ -607f775c6b2ec1954fe60717875aefea \ No newline at end of file diff --git a/embedded-repo/org/apache/camel/camel-jetty/2.4.0.1/camel-jetty-2.4.0.1-sources.jar.sha1 b/embedded-repo/org/apache/camel/camel-jetty/2.4.0.1/camel-jetty-2.4.0.1-sources.jar.sha1 deleted file mode 100644 index 3eb85256e2..0000000000 --- a/embedded-repo/org/apache/camel/camel-jetty/2.4.0.1/camel-jetty-2.4.0.1-sources.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -ee377a85bf07b2afb3a98157f926ebdb47a5e88c \ No newline at end of file diff --git a/embedded-repo/org/apache/camel/camel-jetty/2.4.0.1/camel-jetty-2.4.0.1.jar b/embedded-repo/org/apache/camel/camel-jetty/2.4.0.1/camel-jetty-2.4.0.1.jar deleted file mode 100644 index 7222c09136..0000000000 Binary files a/embedded-repo/org/apache/camel/camel-jetty/2.4.0.1/camel-jetty-2.4.0.1.jar and /dev/null differ diff --git a/embedded-repo/org/apache/camel/camel-jetty/2.4.0.1/camel-jetty-2.4.0.1.jar.md5 b/embedded-repo/org/apache/camel/camel-jetty/2.4.0.1/camel-jetty-2.4.0.1.jar.md5 deleted file mode 100644 index be8f3065c7..0000000000 --- a/embedded-repo/org/apache/camel/camel-jetty/2.4.0.1/camel-jetty-2.4.0.1.jar.md5 +++ /dev/null @@ -1 +0,0 @@ -ba1be87b58c03e8ae6f890ca87c74b5b \ No newline at end of file diff --git a/embedded-repo/org/apache/camel/camel-jetty/2.4.0.1/camel-jetty-2.4.0.1.jar.sha1 b/embedded-repo/org/apache/camel/camel-jetty/2.4.0.1/camel-jetty-2.4.0.1.jar.sha1 deleted file mode 100644 index aa86a31839..0000000000 --- a/embedded-repo/org/apache/camel/camel-jetty/2.4.0.1/camel-jetty-2.4.0.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -aaa96009d7e151f89703b4d932fc73ebcf9bc973 \ No newline at end of file diff --git a/embedded-repo/org/apache/camel/camel-jetty/2.4.0.1/camel-jetty-2.4.0.1.pom b/embedded-repo/org/apache/camel/camel-jetty/2.4.0.1/camel-jetty-2.4.0.1.pom deleted file mode 100644 index e3cdbce60d..0000000000 --- a/embedded-repo/org/apache/camel/camel-jetty/2.4.0.1/camel-jetty-2.4.0.1.pom +++ /dev/null @@ -1,144 +0,0 @@ - - - - - 4.0.0 - - - org.apache.camel - camel-parent - 2.4.0 - - - camel-jetty - bundle - Camel :: Jetty - Camel Jetty support - 2.4.0.1 - - - org.apache.camel.component.jetty.* - - - - - - org.apache.camel - camel-core - 2.4.0 - - - org.apache.camel - camel-http - 2.4.0 - - - org.eclipse.jetty - jetty-server - - - org.eclipse.jetty - jetty-security - - - org.eclipse.jetty - jetty-servlet - - - org.eclipse.jetty - jetty-servlets - ${jetty-version} - - - org.eclipse.jetty - jetty-client - - - org.eclipse.jetty - jetty-jmx - ${jetty-version} - - - - org.apache.camel - camel-test - 2.4.0 - test - - - org.apache.camel - camel-spring - 2.4.0 - test - - - javax.mail - mail - ${javax-mail-version} - test - - - - org.springframework - spring-context - true - test - - - org.springframework - spring-aop - true - test - - - org.springframework - spring-test - true - test - - - - junit - junit - test - - - log4j - log4j - test - - - - - - - - maven-surefire-plugin - - pertest - - - - **/*XXXTest.* - - - - - - - diff --git a/embedded-repo/org/apache/camel/camel-jetty/2.4.0.1/camel-jetty-2.4.0.1.pom.md5 b/embedded-repo/org/apache/camel/camel-jetty/2.4.0.1/camel-jetty-2.4.0.1.pom.md5 deleted file mode 100644 index 295aae7a23..0000000000 --- a/embedded-repo/org/apache/camel/camel-jetty/2.4.0.1/camel-jetty-2.4.0.1.pom.md5 +++ /dev/null @@ -1 +0,0 @@ -fba57baa166195ac2b2a013c3cc6d3f1 \ No newline at end of file diff --git a/embedded-repo/org/apache/camel/camel-jetty/2.4.0.1/camel-jetty-2.4.0.1.pom.sha1 b/embedded-repo/org/apache/camel/camel-jetty/2.4.0.1/camel-jetty-2.4.0.1.pom.sha1 deleted file mode 100644 index 7fb0b3347f..0000000000 --- a/embedded-repo/org/apache/camel/camel-jetty/2.4.0.1/camel-jetty-2.4.0.1.pom.sha1 +++ /dev/null @@ -1 +0,0 @@ -c41ff483ac35754c1d41b1823561935849b362ed \ No newline at end of file diff --git a/embedded-repo/org/apache/camel/camel-jetty/maven-metadata.xml b/embedded-repo/org/apache/camel/camel-jetty/maven-metadata.xml deleted file mode 100644 index ac53a45ab4..0000000000 --- a/embedded-repo/org/apache/camel/camel-jetty/maven-metadata.xml +++ /dev/null @@ -1,11 +0,0 @@ - - org.apache.camel - camel-jetty - 2.4.0.1 - - - 2.4.0.1 - - 20100723102939 - - diff --git a/embedded-repo/org/apache/camel/camel-jetty/maven-metadata.xml.md5 b/embedded-repo/org/apache/camel/camel-jetty/maven-metadata.xml.md5 deleted file mode 100644 index fcff48b3ce..0000000000 --- a/embedded-repo/org/apache/camel/camel-jetty/maven-metadata.xml.md5 +++ /dev/null @@ -1 +0,0 @@ -34f1efbcb11f7251390994d8f81598b2 \ No newline at end of file diff --git a/embedded-repo/org/apache/camel/camel-jetty/maven-metadata.xml.sha1 b/embedded-repo/org/apache/camel/camel-jetty/maven-metadata.xml.sha1 deleted file mode 100644 index 64ef58e71f..0000000000 --- a/embedded-repo/org/apache/camel/camel-jetty/maven-metadata.xml.sha1 +++ /dev/null @@ -1 +0,0 @@ -2e1bb47c5a8c19f98b70e5e6af450861933deacc \ No newline at end of file diff --git a/embedded-repo/org/apache/cassandra/cassandra/0.4.1/cassandra-0.4.1.jar b/embedded-repo/org/apache/cassandra/cassandra/0.4.1/cassandra-0.4.1.jar deleted file mode 100644 index 5c618e9cfc..0000000000 Binary files a/embedded-repo/org/apache/cassandra/cassandra/0.4.1/cassandra-0.4.1.jar and /dev/null differ diff --git a/embedded-repo/org/apache/cassandra/cassandra/0.4.1/cassandra-0.4.1.pom b/embedded-repo/org/apache/cassandra/cassandra/0.4.1/cassandra-0.4.1.pom deleted file mode 100755 index 7a59483ddd..0000000000 --- a/embedded-repo/org/apache/cassandra/cassandra/0.4.1/cassandra-0.4.1.pom +++ /dev/null @@ -1,8 +0,0 @@ - - - 4.0.0 - org.apache.cassandra - cassandra - 0.4.1 - jar - \ No newline at end of file diff --git a/embedded-repo/org/apache/cassandra/cassandra/0.5.0/cassandra-0.5.0.jar b/embedded-repo/org/apache/cassandra/cassandra/0.5.0/cassandra-0.5.0.jar deleted file mode 100644 index 05bfccec95..0000000000 Binary files a/embedded-repo/org/apache/cassandra/cassandra/0.5.0/cassandra-0.5.0.jar and /dev/null differ diff --git a/embedded-repo/org/apache/cassandra/cassandra/0.6.1/cassandra-0.6.1.jar b/embedded-repo/org/apache/cassandra/cassandra/0.6.1/cassandra-0.6.1.jar deleted file mode 100644 index c7c71b1750..0000000000 Binary files a/embedded-repo/org/apache/cassandra/cassandra/0.6.1/cassandra-0.6.1.jar and /dev/null differ diff --git a/embedded-repo/org/apache/cassandra/cassandra/0.6.1/cassandra-0.6.1.pom b/embedded-repo/org/apache/cassandra/cassandra/0.6.1/cassandra-0.6.1.pom deleted file mode 100755 index 4969b74564..0000000000 --- a/embedded-repo/org/apache/cassandra/cassandra/0.6.1/cassandra-0.6.1.pom +++ /dev/null @@ -1,8 +0,0 @@ - - - 4.0.0 - org.apache.cassandra - cassandra - 0.6.1 - jar - \ No newline at end of file diff --git a/embedded-repo/org/apache/cassandra/clhm-production/0.5.0/clhm-production-0.5.0.jar b/embedded-repo/org/apache/cassandra/clhm-production/0.5.0/clhm-production-0.5.0.jar deleted file mode 100644 index 028f505bb9..0000000000 Binary files a/embedded-repo/org/apache/cassandra/clhm-production/0.5.0/clhm-production-0.5.0.jar and /dev/null differ diff --git a/embedded-repo/org/apache/cassandra/clhm-production/0.6.1/clhm-production-0.6.1.jar b/embedded-repo/org/apache/cassandra/clhm-production/0.6.1/clhm-production-0.6.1.jar deleted file mode 100644 index 028f505bb9..0000000000 Binary files a/embedded-repo/org/apache/cassandra/clhm-production/0.6.1/clhm-production-0.6.1.jar and /dev/null differ diff --git a/embedded-repo/org/apache/cassandra/clhm-production/0.6.1/clhm-production-0.6.1.pom b/embedded-repo/org/apache/cassandra/clhm-production/0.6.1/clhm-production-0.6.1.pom deleted file mode 100755 index 432c1c225d..0000000000 --- a/embedded-repo/org/apache/cassandra/clhm-production/0.6.1/clhm-production-0.6.1.pom +++ /dev/null @@ -1,8 +0,0 @@ - - - 4.0.0 - org.apache.cassandra - clhm-production - 0.6.1 - jar - \ No newline at end of file diff --git a/embedded-repo/org/apache/cassandra/high-scale-lib/0.5.0/high-scale-lib-0.5.0.jar b/embedded-repo/org/apache/cassandra/high-scale-lib/0.5.0/high-scale-lib-0.5.0.jar deleted file mode 100644 index 421a436eed..0000000000 Binary files a/embedded-repo/org/apache/cassandra/high-scale-lib/0.5.0/high-scale-lib-0.5.0.jar and /dev/null differ diff --git a/embedded-repo/org/apache/cassandra/high-scale-lib/0.6.1/high-scale-lib-0.6.1.jar b/embedded-repo/org/apache/cassandra/high-scale-lib/0.6.1/high-scale-lib-0.6.1.jar deleted file mode 100644 index 421a436eed..0000000000 Binary files a/embedded-repo/org/apache/cassandra/high-scale-lib/0.6.1/high-scale-lib-0.6.1.jar and /dev/null differ diff --git a/embedded-repo/org/apache/cassandra/high-scale-lib/0.6.1/high-scale-lib-0.6.1.pom b/embedded-repo/org/apache/cassandra/high-scale-lib/0.6.1/high-scale-lib-0.6.1.pom deleted file mode 100755 index c361dbef9f..0000000000 --- a/embedded-repo/org/apache/cassandra/high-scale-lib/0.6.1/high-scale-lib-0.6.1.pom +++ /dev/null @@ -1,8 +0,0 @@ - - - 4.0.0 - org.apache.cassandra - high-scale-lib - 0.6.1 - jar - \ No newline at end of file diff --git a/embedded-repo/org/apache/hbase/hbase-core/0.20.6/hbase-core-0.20.6.jar b/embedded-repo/org/apache/hbase/hbase-core/0.20.6/hbase-core-0.20.6.jar deleted file mode 100644 index e74cf9017e..0000000000 Binary files a/embedded-repo/org/apache/hbase/hbase-core/0.20.6/hbase-core-0.20.6.jar and /dev/null differ diff --git a/embedded-repo/org/apache/hbase/hbase-core/0.20.6/hbase-core-0.20.6.pom b/embedded-repo/org/apache/hbase/hbase-core/0.20.6/hbase-core-0.20.6.pom deleted file mode 100644 index 19a8b54700..0000000000 --- a/embedded-repo/org/apache/hbase/hbase-core/0.20.6/hbase-core-0.20.6.pom +++ /dev/null @@ -1,8 +0,0 @@ - - - 4.0.0 - org.apache.hbase - hbase-core - 0.20.6 - jar - \ No newline at end of file diff --git a/embedded-repo/org/apache/hbase/hbase-test/0.20.6/hbase-test-0.20.6.jar b/embedded-repo/org/apache/hbase/hbase-test/0.20.6/hbase-test-0.20.6.jar deleted file mode 100644 index 34a65f908e..0000000000 Binary files a/embedded-repo/org/apache/hbase/hbase-test/0.20.6/hbase-test-0.20.6.jar and /dev/null differ diff --git a/embedded-repo/org/apache/hbase/hbase-test/0.20.6/hbase-test-0.20.6.pom b/embedded-repo/org/apache/hbase/hbase-test/0.20.6/hbase-test-0.20.6.pom deleted file mode 100644 index bdc80cc8b7..0000000000 --- a/embedded-repo/org/apache/hbase/hbase-test/0.20.6/hbase-test-0.20.6.pom +++ /dev/null @@ -1,8 +0,0 @@ - - - 4.0.0 - org.apache.hbase - hbase-test - 0.20.6 - jar - \ No newline at end of file diff --git a/embedded-repo/spy/memcached/2.5/memcached-2.5.jar b/embedded-repo/spy/memcached/2.5/memcached-2.5.jar deleted file mode 100644 index 87072eaaa0..0000000000 Binary files a/embedded-repo/spy/memcached/2.5/memcached-2.5.jar and /dev/null differ diff --git a/project/build/AkkaProject.scala b/project/build/AkkaProject.scala index 04cfe46324..9e150ea613 100644 --- a/project/build/AkkaProject.scala +++ b/project/build/AkkaProject.scala @@ -289,15 +289,7 @@ class AkkaParentProject(info: ProjectInfo) extends DefaultProject(info) { lazy val akka_stm = project("akka-stm", "akka-stm", new AkkaStmProject(_), akka_actor) lazy val akka_typed_actor = project("akka-typed-actor", "akka-typed-actor", new AkkaTypedActorProject(_), akka_stm) lazy val akka_remote = project("akka-remote", "akka-remote", new AkkaRemoteProject(_), akka_typed_actor) - lazy val akka_amqp = project("akka-amqp", "akka-amqp", new AkkaAMQPProject(_), akka_remote) - lazy val akka_http = project("akka-http", "akka-http", new AkkaHttpProject(_), akka_remote, akka_camel) - lazy val akka_camel = project("akka-camel", "akka-camel", new AkkaCamelProject(_), akka_remote) - lazy val akka_persistence = project("akka-persistence", "akka-persistence", new AkkaPersistenceParentProject(_)) - lazy val akka_spring = project("akka-spring", "akka-spring", new AkkaSpringProject(_), akka_remote, akka_camel) - lazy val akka_jta = project("akka-jta", "akka-jta", new AkkaJTAProject(_), akka_stm, akka_remote) - lazy val akka_kernel = project("akka-kernel", "akka-kernel", new AkkaKernelProject(_), - akka_remote, akka_jta, akka_http, akka_spring, akka_camel, akka_persistence, akka_amqp) - lazy val akka_osgi = project("akka-osgi", "akka-osgi", new AkkaOSGiParentProject(_)) + lazy val akka_http = project("akka-http", "akka-http", new AkkaHttpProject(_), akka_remote) lazy val akka_samples = project("akka-samples", "akka-samples", new AkkaSamplesParentProject(_)) // ------------------------------------------------------------------------------------------------------------------- @@ -475,23 +467,6 @@ class AkkaParentProject(info: ProjectInfo) extends DefaultProject(info) { override def bndImportPackage = "javax.transaction;version=1.1" :: super.bndImportPackage.toList } - // ------------------------------------------------------------------------------------------------------------------- - // akka-amqp subproject - // ------------------------------------------------------------------------------------------------------------------- - - class AkkaAMQPProject(info: ProjectInfo) extends AkkaDefaultProject(info, distPath) { - val commons_io = Dependencies.commons_io - val rabbit = Dependencies.rabbit - val protobuf = Dependencies.protobuf - - // testing - val junit = Dependencies.junit - val multiverse = Dependencies.multiverse - val scalatest = Dependencies.scalatest - - override def testOptions = createTestFilter( _.endsWith("Test") ) - } - // ------------------------------------------------------------------------------------------------------------------- // akka-http subproject // ------------------------------------------------------------------------------------------------------------------- @@ -527,314 +502,6 @@ class AkkaParentProject(info: ProjectInfo) extends DefaultProject(info) { override def testOptions = createTestFilter( _.endsWith("Test")) } - // ------------------------------------------------------------------------------------------------------------------- - // akka-persistence subproject - // ------------------------------------------------------------------------------------------------------------------- - - class AkkaPersistenceParentProject(info: ProjectInfo) extends ParentProject(info) { - override def disableCrossPaths = true - - lazy val akka_persistence_common = project("akka-persistence-common", "akka-persistence-common", - new AkkaPersistenceCommonProject(_), akka_remote, akka_stm) - lazy val akka_persistence_redis = project("akka-persistence-redis", "akka-persistence-redis", - new AkkaRedisProject(_), akka_persistence_common) - lazy val akka_persistence_mongo = project("akka-persistence-mongo", "akka-persistence-mongo", - new AkkaMongoProject(_), akka_persistence_common) - lazy val akka_persistence_cassandra = project("akka-persistence-cassandra", "akka-persistence-cassandra", - new AkkaCassandraProject(_), akka_persistence_common) - lazy val akka_persistence_hbase = project("akka-persistence-hbase", "akka-persistence-hbase", - new AkkaHbaseProject(_), akka_persistence_common) - lazy val akka_persistence_voldemort = project("akka-persistence-voldemort", "akka-persistence-voldemort", - new AkkaVoldemortProject(_), akka_persistence_common) - lazy val akka_persistence_riak = project("akka-persistence-riak", "akka-persistence-riak", - new AkkaRiakProject(_), akka_persistence_common) - lazy val akka_persistence_couchdb = project("akka-persistence-couchdb", "akka-persistence-couchdb", - new AkkaCouchDBProject(_), akka_persistence_common) - lazy val akka_persistence_memcached= project("akka-persistence-memcached", "akka-persistence-memcached", - new AkkaMemcachedProject(_), akka_persistence_common) - lazy val akka_persistence_simpledb= project("akka-persistence-simpledb", "akka-persistence-simpledb", - new AkkaSimpledbProject(_), akka_persistence_common) - } - - // ------------------------------------------------------------------------------------------------------------------- - // akka-persistence-common subproject - // ------------------------------------------------------------------------------------------------------------------- - - class AkkaPersistenceCommonProject(info: ProjectInfo) extends AkkaDefaultProject(info, distPath) { - val commons_pool = Dependencies.commons_pool - val thrift = Dependencies.thrift - val scalaj_coll = Dependencies.scalaj_coll - } - - // ------------------------------------------------------------------------------------------------------------------- - // akka-persistence-redis subproject - // ------------------------------------------------------------------------------------------------------------------- - - class AkkaRedisProject(info: ProjectInfo) extends AkkaDefaultProject(info, distPath) { - val commons_codec = Dependencies.commons_codec - val redis = Dependencies.redis - - override def testOptions = createTestFilter( _.endsWith("Test")) - } - - // ------------------------------------------------------------------------------------------------------------------- - // akka-persistence-mongo subproject - // ------------------------------------------------------------------------------------------------------------------- - - class AkkaMongoProject(info: ProjectInfo) extends AkkaDefaultProject(info, distPath) { - val mongo = Dependencies.mongo - val casbah = Dependencies.casbah - - override def testOptions = createTestFilter( _.endsWith("Test")) - } - - - // ------------------------------------------------------------------------------------------------------------------- - // akka-persistence-cassandra subproject - // ------------------------------------------------------------------------------------------------------------------- - - class AkkaCassandraProject(info: ProjectInfo) extends AkkaDefaultProject(info, distPath) { - val cassandra = Dependencies.cassandra - - // testing - val cassandra_clhm = Dependencies.cassandra_clhm - val commons_coll = Dependencies.commons_coll - val google_coll = Dependencies.google_coll - val high_scale = Dependencies.high_scale - - override def testOptions = createTestFilter( _.endsWith("Test")) - } - - // ------------------------------------------------------------------------------------------------------------------- - // akka-persistence-hbase subproject - // ------------------------------------------------------------------------------------------------------------------- - - class AkkaHbaseProject(info: ProjectInfo) extends AkkaDefaultProject(info, distPath) { - override def ivyXML = - - - - - - - - - - - - - - - - - - - - - - override def testOptions = createTestFilter( _.endsWith("Test") ) - } - - // ------------------------------------------------------------------------------------------------------------------- - // akka-persistence-voldemort subproject - // ------------------------------------------------------------------------------------------------------------------- - - class AkkaVoldemortProject(info: ProjectInfo) extends AkkaDefaultProject(info, distPath) { - val voldemort = Dependencies.voldemort - val voldemort_contrib = Dependencies.voldemort_contrib - val voldemort_needs_log4j = Dependencies.voldemort_needs_log4j - - //testing - val scalatest = Dependencies.scalatest - val google_coll = Dependencies.google_coll - val jdom = Dependencies.jdom - val jetty = Dependencies.vold_jetty - val velocity = Dependencies.velocity - val dbcp = Dependencies.dbcp - val sjson = Dependencies.sjson_test - - override def testOptions = createTestFilter({ s:String=> s.endsWith("Suite") || s.endsWith("Test")}) - } - - // ------------------------------------------------------------------------------------------------------------------- - // akka-persistence-riak subproject - // ------------------------------------------------------------------------------------------------------------------- - - class AkkaRiakProject(info: ProjectInfo) extends AkkaDefaultProject(info, distPath) { - val riak_pb = Dependencies.riak_pb_client - val protobuf = Dependencies.protobuf - //testing - val scalatest = Dependencies.scalatest - - - override def testOptions = createTestFilter(_.endsWith("Test")) - } - - // ------------------------------------------------------------------------------------------------------------------- - // akka-persistence-couchdb subproject - // ------------------------------------------------------------------------------------------------------------------- - - class AkkaCouchDBProject(info: ProjectInfo) extends AkkaDefaultProject(info, distPath) { - val couch = Dependencies.commonsHttpClient - val spec = Dependencies.specs - - override def testOptions = createTestFilter( _.endsWith("Test")) - } - - class AkkaMemcachedProject(info: ProjectInfo) extends AkkaDefaultProject(info, distPath) { - val memcached = Dependencies.spymemcached - val commons_codec = Dependencies.commons_codec - - val scalatest = Dependencies.scalatest - - override def testOptions = createTestFilter( _.endsWith("Test")) - } - - class AkkaSimpledbProject(info: ProjectInfo) extends AkkaDefaultProject(info, distPath) { - val memcached = Dependencies.simpledb - val commons_codec = Dependencies.commons_codec - val http = Dependencies.commonsHttpClient - - val scalatest = Dependencies.scalatest - - override def testOptions = createTestFilter( _.endsWith("Test")) - } - - // ------------------------------------------------------------------------------------------------------------------- - // akka-kernel subproject - // ------------------------------------------------------------------------------------------------------------------- - - class AkkaKernelProject(info: ProjectInfo) extends AkkaDefaultProject(info, distPath) - - // ------------------------------------------------------------------------------------------------------------------- - // akka-spring subproject - // ------------------------------------------------------------------------------------------------------------------- - - class AkkaSpringProject(info: ProjectInfo) extends AkkaDefaultProject(info, distPath) { - val spring_beans = Dependencies.spring_beans - val spring_context = Dependencies.spring_context - - // testing - val camel_spring = Dependencies.camel_spring - val junit = Dependencies.junit - val scalatest = Dependencies.scalatest - } - - // ------------------------------------------------------------------------------------------------------------------- - // akka-jta subproject - // ------------------------------------------------------------------------------------------------------------------- - - class AkkaJTAProject(info: ProjectInfo) extends AkkaDefaultProject(info, distPath) { - val atomikos_transactions = Dependencies.atomikos_transactions - val atomikos_transactions_api = Dependencies.atomikos_transactions_api - val atomikos_transactions_jta = Dependencies.atomikos_transactions_jta - //val jta_1_1 = Dependencies.jta_1_1 - //val atomikos_transactions_util = "com.atomikos" % "transactions-util" % "3.2.3" % "compile" - - //Testing - val junit = Dependencies.junit - val scalatest = Dependencies.scalatest - } - - // ------------------------------------------------------------------------------------------------------------------- - // OSGi stuff - // ------------------------------------------------------------------------------------------------------------------- - - class AkkaOSGiParentProject(info: ProjectInfo) extends ParentProject(info) { - override def disableCrossPaths = true - - lazy val akka_osgi_dependencies_bundle = project("akka-osgi-dependencies-bundle", "akka-osgi-dependencies-bundle", - new AkkaOSGiDependenciesBundleProject(_), akka_kernel, akka_jta) // akka_kernel does not depend on akka_jta (why?) therefore we list akka_jta here - lazy val akka_osgi_assembly = project("akka-osgi-assembly", "akka-osgi-assembly", - new AkkaOSGiAssemblyProject(_), akka_osgi_dependencies_bundle, akka_remote, akka_amqp, akka_http, - akka_camel, akka_spring, akka_jta, akka_persistence.akka_persistence_common, - akka_persistence.akka_persistence_redis, akka_persistence.akka_persistence_mongo, - akka_persistence.akka_persistence_cassandra,akka_persistence.akka_persistence_hbase, - akka_persistence.akka_persistence_voldemort) - } - - class AkkaOSGiDependenciesBundleProject(info: ProjectInfo) extends AkkaDefaultProject(info, distPath) with BNDPlugin { - override def bndClasspath = compileClasspath - override def bndPrivatePackage = Seq("") - override def bndImportPackage = Seq("*;resolution:=optional") - override def bndExportPackage = Seq( - "org.aopalliance.*;version=1.0.0", - - // Provided by other bundles - "!akka.*", - "!com.google.inject.*", - "!javax.transaction.*", - "!javax.ws.rs.*", - "!javax.jms.*", - "!javax.transaction,*", - "!org.apache.commons.io.*", - "!org.apache.commons.pool.*", - "!org.codehaus.jackson.*", - "!org.jboss.netty.*", - "!org.springframework.*", - "!org.apache.camel.*", - "!org.fusesource.commons.management.*", - - "*;version=0.0.0") - } - - class AkkaOSGiAssemblyProject(info: ProjectInfo) extends DefaultProject(info) { - override def disableCrossPaths = true - - // Scala bundle - val scala_bundle = "com.weiglewilczek.scala-lang-osgi" % "scala-library" % buildScalaVersion % "compile" intransitive - - // Camel bundles - val camel_core = Dependencies.camel_core.intransitive - val fusesource_commonman = "org.fusesource.commonman" % "commons-management" % "1.0" intransitive - - // Spring bundles - val spring_beans = Dependencies.spring_beans.intransitive - val spring_context = Dependencies.spring_context.intransitive - val spring_aop = "org.springframework" % "spring-aop" % SPRING_VERSION % "compile" intransitive - val spring_asm = "org.springframework" % "spring-asm" % SPRING_VERSION % "compile" intransitive - val spring_core = "org.springframework" % "spring-core" % SPRING_VERSION % "compile" intransitive - val spring_expression = "org.springframework" % "spring-expression" % SPRING_VERSION % "compile" intransitive - val spring_jms = "org.springframework" % "spring-jms" % SPRING_VERSION % "compile" intransitive - val spring_tx = "org.springframework" % "spring-tx" % SPRING_VERSION % "compile" intransitive - - val commons_codec = Dependencies.commons_codec.intransitive - val commons_io = Dependencies.commons_io.intransitive - val commons_pool = Dependencies.commons_pool.intransitive - val guicey = Dependencies.guicey.intransitive - val jackson = Dependencies.jackson.intransitive - val jackson_core = Dependencies.jackson_core.intransitive - val jsr311 = Dependencies.jsr311.intransitive - val jta_1_1 = Dependencies.jta_1_1.intransitive - val netty = Dependencies.netty.intransitive - val commons_fileupload = "commons-fileupload" % "commons-fileupload" % "1.2.1" % "compile" intransitive - val jms_1_1 = "org.apache.geronimo.specs" % "geronimo-jms_1.1_spec" % "1.1.1" % "compile" intransitive - val joda = "joda-time" % "joda-time" % "1.6" intransitive - - override def packageAction = - task { - val libs: Seq[Path] = managedClasspath(config("compile")).get.toSeq - val prjs: Seq[Path] = info.dependencies.toSeq.asInstanceOf[Seq[DefaultProject]] map { _.jarPath } - val all = libs ++ prjs - val destination = outputPath / "bundles" - FileUtilities.copyFlat(all, destination, log) - log info "Copied %s bundles to %s".format(all.size, destination) - None - } - - override def artifacts = Set.empty - } - - // ------------------------------------------------------------------------------------------------------------------- - // Test - // ------------------------------------------------------------------------------------------------------------------- - - class AkkaTypedActorTestProject(info: ProjectInfo) extends DefaultProject(info) { - // testing - val junit = "junit" % "junit" % "4.5" % "test" - val jmock = "org.jmock" % "jmock" % "2.4.0" % "test" - } - // ------------------------------------------------------------------------------------------------------------------- // Examples // ------------------------------------------------------------------------------------------------------------------- @@ -845,72 +512,17 @@ class AkkaParentProject(info: ProjectInfo) extends DefaultProject(info) { } class AkkaSampleChatProject(info: ProjectInfo) extends AkkaDefaultProject(info, deployPath) - class AkkaSamplePubSubProject(info: ProjectInfo) extends AkkaDefaultProject(info, deployPath) - class AkkaSampleFSMProject(info: ProjectInfo) extends AkkaDefaultProject(info, deployPath) - - class AkkaSampleRestJavaProject(info: ProjectInfo) extends AkkaDefaultProject(info, deployPath) - class AkkaSampleRemoteProject(info: ProjectInfo) extends AkkaDefaultProject(info, deployPath) - class AkkaSampleRestScalaProject(info: ProjectInfo) extends AkkaDefaultProject(info, deployPath) { - val jsr311 = Dependencies.jsr311 - } - - class AkkaSampleCamelProject(info: ProjectInfo) extends AkkaDefaultProject(info, deployPath) { - //Must be like this to be able to exclude the geronimo-servlet_2.4_spec which is a too old Servlet spec - override def ivyXML = - - - - - - - - - - - - - - - override def testOptions = createTestFilter( _.endsWith("Test")) - } - - class AkkaSampleSecurityProject(info: ProjectInfo) extends AkkaDefaultProject(info, deployPath) { - val commons_codec = Dependencies.commons_codec - val jsr250 = Dependencies.jsr250 - val jsr311 = Dependencies.jsr311 - } - - class AkkaSampleOSGiProject(info: ProjectInfo) extends AkkaDefaultProject(info, distPath) with BNDPlugin { - val osgi_core = Dependencies.osgi_core - override lazy val bndBundleActivator = Some("akka.sample.osgi.Activator") - override lazy val bndExportPackage = Nil // Necessary because of mixing-in AkkaDefaultProject which exports all ...akka.* packages! - } - class AkkaSamplesParentProject(info: ProjectInfo) extends ParentProject(info) { override def disableCrossPaths = true lazy val akka_sample_ants = project("akka-sample-ants", "akka-sample-ants", new AkkaSampleAntsProject(_), akka_stm) lazy val akka_sample_chat = project("akka-sample-chat", "akka-sample-chat", - new AkkaSampleChatProject(_), akka_kernel) - lazy val akka_sample_pubsub = project("akka-sample-pubsub", "akka-sample-pubsub", - new AkkaSamplePubSubProject(_), akka_kernel) - lazy val akka_sample_fsm = project("akka-sample-fsm", "akka-sample-fsm", - new AkkaSampleFSMProject(_), akka_kernel) - lazy val akka_sample_rest_java = project("akka-sample-rest-java", "akka-sample-rest-java", - new AkkaSampleRestJavaProject(_), akka_kernel) - lazy val akka_sample_rest_scala = project("akka-sample-rest-scala", "akka-sample-rest-scala", - new AkkaSampleRestScalaProject(_), akka_kernel) - lazy val akka_sample_camel = project("akka-sample-camel", "akka-sample-camel", - new AkkaSampleCamelProject(_), akka_kernel) - lazy val akka_sample_security = project("akka-sample-security", "akka-sample-security", - new AkkaSampleSecurityProject(_), akka_kernel) + new AkkaSampleChatProject(_), akka_remote) lazy val akka_sample_remote = project("akka-sample-remote", "akka-sample-remote", - new AkkaSampleRemoteProject(_), akka_kernel) - lazy val akka_sample_osgi = project("akka-sample-osgi", "akka-sample-osgi", - new AkkaSampleOSGiProject(_), akka_remote) + new AkkaSampleRemoteProject(_), akka_remote) } // -------------------------------------------------------------------------------------------------------------------