diff --git a/.gitignore b/.gitignore
index 99b643d34d..5b3ae3f4ba 100755
--- a/.gitignore
+++ b/.gitignore
@@ -11,7 +11,7 @@ data
out
logs
storage
-lib/akka*
+_dump
.manager
manifest.mf
semantic.cache
@@ -19,7 +19,8 @@ tm*.log
tm*.lck
tm.out
*.tm.epoch
-_dump
-WEB-INF
.DS_Store
+*.iws
+*.ipr
+*.iml
diff --git a/akka-actors/pom.xml b/akka-actors/pom.xml
new file mode 100644
index 0000000000..cb76e1fbb5
--- /dev/null
+++ b/akka-actors/pom.xml
@@ -0,0 +1,164 @@
+
+ 4.0.0
+
+ akka-actors
+ Akka Actors Module
+
+ jar
+
+
+ akka
+ se.scalablesolutions.akka
+ 0.6
+ ../pom.xml
+
+
+
+
+
+ akka-util-java
+ se.scalablesolutions.akka
+ 0.6
+
+
+ akka-util
+ se.scalablesolutions.akka
+ 0.6
+
+
+ org.scala-lang
+ scala-library
+ 2.7.5
+
+
+ org.codehaus.aspectwerkz
+ aspectwerkz-nodeps-jdk5
+ 2.1
+
+
+ org.codehaus.aspectwerkz
+ aspectwerkz-jdk5
+ 2.1
+
+
+ net.lag
+ configgy
+ 1.3
+
+
+ org.guiceyfruit
+ guice-core
+ 2.0-beta-4
+
+
+ org.jboss.netty
+ netty
+ 3.1.0.GA
+
+
+ org.scala-tools
+ javautils
+ 2.7.4-0.1
+
+
+
+
+ org.codehaus.jackson
+ jackson-core-asl
+ 1.1.0
+
+
+ org.codehaus.jackson
+ jackson-mapper-asl
+ 1.1.0
+
+
+ com.google.protobuf
+ protobuf-java
+ 2.1.0
+
+
+ sbinary
+ sbinary
+ 0.3
+
+
+ com.twitter
+ scala-json
+ 1.0
+
+
+ dispatch.json
+ dispatch-json
+ 0.5.2
+
+
+ dispatch.http
+ dispatch-http
+ 0.5.2
+
+
+ sjson.json
+ sjson
+ 0.1
+
+
+
+
+ org.slf4j
+ slf4j-log4j12
+ 1.4.3
+
+
+ org.slf4j
+ slf4j-api
+ 1.4.3
+
+
+ log4j
+ log4j
+ 1.2.13
+
+
+ commons-logging
+ commons-logging
+ 1.0.4
+
+
+
+
+ org.scala-tools.testing
+ scalatest
+ 0.9.5
+ test
+
+
+ junit
+ junit
+ 4.5
+ test
+
+
+
+
+
+
+ false
+ ../config
+
+ akka.conf
+ akka-reference.conf
+
+
+
+ false
+ src/main/resources
+
+ META-INF/*
+
+
+
+
+
diff --git a/kernel/src/main/resources/META-INF/aop.xml b/akka-actors/src/main/resources/META-INF/aop.xml
similarity index 77%
rename from kernel/src/main/resources/META-INF/aop.xml
rename to akka-actors/src/main/resources/META-INF/aop.xml
index 23bb4575ff..1cff849b2d 100755
--- a/kernel/src/main/resources/META-INF/aop.xml
+++ b/akka-actors/src/main/resources/META-INF/aop.xml
@@ -1,8 +1,8 @@
-
-
-
-
-
-
-
-
+
+
+
+
+
+
+
+
diff --git a/kernel/src/main/scala/actor/ActiveObject.scala b/akka-actors/src/main/scala/actor/ActiveObject.scala
similarity index 94%
rename from kernel/src/main/scala/actor/ActiveObject.scala
rename to akka-actors/src/main/scala/actor/ActiveObject.scala
index cb22fecbc1..7f2d2f8fa3 100644
--- a/kernel/src/main/scala/actor/ActiveObject.scala
+++ b/akka-actors/src/main/scala/actor/ActiveObject.scala
@@ -2,16 +2,16 @@
* Copyright (C) 2009 Scalable Solutions.
*/
-package se.scalablesolutions.akka.kernel.actor
+package se.scalablesolutions.akka.actor
import java.lang.reflect.{InvocationTargetException, Method}
import java.net.InetSocketAddress
-import kernel.reactor.{MessageDispatcher, FutureResult}
-import kernel.nio.protobuf.RemoteProtocol.{RemoteRequest, RemoteReply}
-import kernel.nio.{RemoteProtocolBuilder, RemoteClient, RemoteServer, RemoteRequestIdFactory}
-import kernel.config.ScalaConfig._
-import kernel.util._
+import reactor.{MessageDispatcher, FutureResult}
+import nio.protobuf.RemoteProtocol.{RemoteRequest, RemoteReply}
+import nio.{RemoteProtocolBuilder, RemoteClient, RemoteServer, RemoteRequestIdFactory}
+import config.ScalaConfig._
+import util._
import serialization.Serializer
import org.codehaus.aspectwerkz.intercept.{Advisable, AroundAdvice, Advice}
@@ -113,15 +113,15 @@ class ActiveObjectFactory {
ActiveObject.newInstance(intf, target, actor, Some(new InetSocketAddress(hostname, port)), timeout)
}
- private[kernel] def newInstance[T](target: Class[T], actor: Dispatcher, remoteAddress: Option[InetSocketAddress], timeout: Long): T = {
+ private[akka] def newInstance[T](target: Class[T], actor: Dispatcher, remoteAddress: Option[InetSocketAddress], timeout: Long): T = {
ActiveObject.newInstance(target, actor, remoteAddress, timeout)
}
- private[kernel] def newInstance[T](intf: Class[T], target: AnyRef, actor: Dispatcher, remoteAddress: Option[InetSocketAddress], timeout: Long): T = {
+ private[akka] def newInstance[T](intf: Class[T], target: AnyRef, actor: Dispatcher, remoteAddress: Option[InetSocketAddress], timeout: Long): T = {
ActiveObject.newInstance(intf, target, actor, remoteAddress, timeout)
}
- private[kernel] def supervise(restartStrategy: RestartStrategy, components: List[Supervise]): Supervisor =
+ private[akka] def supervise(restartStrategy: RestartStrategy, components: List[Supervise]): Supervisor =
ActiveObject.supervise(restartStrategy, components)
/*
@@ -219,7 +219,7 @@ object ActiveObject {
newInstance(intf, target, actor, Some(new InetSocketAddress(hostname, port)), timeout)
}
- private[kernel] def newInstance[T](target: Class[T], actor: Dispatcher, remoteAddress: Option[InetSocketAddress], timeout: Long): T = {
+ private[akka] def newInstance[T](target: Class[T], actor: Dispatcher, remoteAddress: Option[InetSocketAddress], timeout: Long): T = {
//if (getClass.getClassLoader.getResourceAsStream("META-INF/aop.xml") != null) println("000000000000000000000 FOUND AOP")
if (remoteAddress.isDefined) actor.makeRemote(remoteAddress.get)
val proxy = Proxy.newInstance(target, false, true)
@@ -230,7 +230,7 @@ object ActiveObject {
proxy.asInstanceOf[T]
}
- private[kernel] def newInstance[T](intf: Class[T], target: AnyRef, actor: Dispatcher, remoteAddress: Option[InetSocketAddress], timeout: Long): T = {
+ private[akka] def newInstance[T](intf: Class[T], target: AnyRef, actor: Dispatcher, remoteAddress: Option[InetSocketAddress], timeout: Long): T = {
//if (getClass.getClassLoader.getResourceAsStream("META-INF/aop.xml") != null) println("000000000000000000000 FOUND AOP")
if (remoteAddress.isDefined) actor.makeRemote(remoteAddress.get)
val proxy = Proxy.newInstance(Array(intf), Array(target), false, true)
@@ -242,7 +242,7 @@ object ActiveObject {
}
- private[kernel] def supervise(restartStrategy: RestartStrategy, components: List[Supervise]): Supervisor = {
+ private[akka] def supervise(restartStrategy: RestartStrategy, components: List[Supervise]): Supervisor = {
object factory extends SupervisorFactory {
override def getSupervisorConfig = SupervisorConfig(restartStrategy, components)
}
@@ -364,7 +364,7 @@ sealed class ActiveObjectAspect {
*
* @author Jonas Bonér
*/
-@serializable private[kernel] case class Invocation(val joinpoint: JoinPoint, val isOneWay: Boolean) {
+@serializable private[akka] case class Invocation(val joinpoint: JoinPoint, val isOneWay: Boolean) {
override def toString: String = synchronized {
"Invocation [joinpoint: " + joinpoint.toString + ", isOneWay: " + isOneWay + "]"
@@ -390,7 +390,7 @@ sealed class ActiveObjectAspect {
*
* @author Jonas Bonér
*/
-private[kernel] class Dispatcher(val callbacks: Option[RestartCallbacks]) extends Actor {
+private[akka] class Dispatcher(val callbacks: Option[RestartCallbacks]) extends Actor {
private val ZERO_ITEM_CLASS_ARRAY = Array[Class[_]]()
private val ZERO_ITEM_OBJECT_ARRAY = Array[Object[_]]()
diff --git a/kernel/src/main/scala/actor/Actor.scala b/akka-actors/src/main/scala/actor/Actor.scala
similarity index 94%
rename from kernel/src/main/scala/actor/Actor.scala
rename to akka-actors/src/main/scala/actor/Actor.scala
index 139e5d914b..21453a8bc2 100755
--- a/kernel/src/main/scala/actor/Actor.scala
+++ b/akka-actors/src/main/scala/actor/Actor.scala
@@ -2,7 +2,7 @@
* Copyright (C) 2009 Scalable Solutions.
*/
-package se.scalablesolutions.akka.kernel.actor
+package se.scalablesolutions.akka.actor
import com.google.protobuf.ByteString
import java.net.InetSocketAddress
@@ -16,9 +16,6 @@ import nio.protobuf.RemoteProtocol.RemoteRequest
import util.Logging
import serialization.{Serializer, Serializable, SerializationProtocol}
import nio.{RemoteProtocolBuilder, RemoteClient, RemoteServer, RemoteRequestIdFactory}
-import management.Management
-
-import com.twitter.service.Stats
import org.multiverse.utils.TransactionThreadLocal._
sealed abstract class LifecycleMessage
@@ -46,15 +43,15 @@ class ActorMessageInvoker(val actor: Actor) extends MessageInvoker {
* @author Jonas Bonér
*/
object Actor {
- val TIMEOUT = Kernel.config.getInt("akka.actor.timeout", 5000)
- val SERIALIZE_MESSAGES = Kernel.config.getBool("akka.actor.serialize-messages", false)
+ import Config._
+ val TIMEOUT = config.getInt("akka.actor.timeout", 5000)
+ val SERIALIZE_MESSAGES = config.getBool("akka.actor.serialize-messages", false)
}
/**
* @author Jonas Bonér
*/
trait Actor extends Logging with TransactionManagement {
- Stats.getCounter("NrOfActors").incr
ActorRegistry.register(this)
@volatile private[this] var isRunning: Boolean = false
@@ -65,7 +62,7 @@ trait Actor extends Logging with TransactionManagement {
private var config: Option[AnyRef] = None
@volatile protected[this] var isTransactional = false
@volatile protected[this] var remoteAddress: Option[InetSocketAddress] = None
- @volatile protected[kernel] var supervisor: Option[Actor] = None
+ @volatile protected[akka] var supervisor: Option[Actor] = None
protected[Actor] var mailbox: MessageQueue = _
protected[this] var senderFuture: Option[CompletableFutureResult] = None
protected[this] val linkedActors = new CopyOnWriteArraySet[Actor]
@@ -104,7 +101,7 @@ trait Actor extends Logging with TransactionManagement {
* .buildThreadPool
*
*/
- protected[kernel] var dispatcher: MessageDispatcher = {
+ protected[akka] var dispatcher: MessageDispatcher = {
val dispatcher = Dispatchers.newEventBasedThreadPoolDispatcher(getClass.getName)
mailbox = dispatcher.messageQueue
dispatcher.registerHandler(this, new ActorMessageInvoker(this))
@@ -363,8 +360,8 @@ trait Actor extends Logging with TransactionManagement {
*
* To be invoked from within the actor itself.
*/
- protected[this] def spawn(actorClass: Class[_]): Actor = {
- val actor = actorClass.newInstance.asInstanceOf[Actor]
+ protected[this] def spawn[T <: Actor](actorClass: Class[T]): T = {
+ val actor = actorClass.newInstance.asInstanceOf[T]
actor.dispatcher = dispatcher
actor.mailbox = mailbox
actor.start
@@ -376,8 +373,8 @@ trait Actor extends Logging with TransactionManagement {
*
* To be invoked from within the actor itself.
*/
- protected[this] def spawnRemote(actorClass: Class[_]): Actor = {
- val actor = actorClass.newInstance.asInstanceOf[Actor]
+ protected[this] def spawnRemote[T <: Actor](actorClass: Class[T]): T = {
+ val actor = actorClass.newInstance.asInstanceOf[T]
actor.makeRemote(RemoteServer.HOSTNAME, RemoteServer.PORT)
actor.dispatcher = dispatcher
actor.mailbox = mailbox
@@ -390,8 +387,8 @@ trait Actor extends Logging with TransactionManagement {
*
* To be invoked from within the actor itself.
*/
- protected[this] def spawnLink(actorClass: Class[_]): Actor = {
- val actor = spawn(actorClass)
+ protected[this] def spawnLink[T <: Actor](actorClass: Class[T]): T = {
+ val actor = spawn[T](actorClass)
link(actor)
actor
}
@@ -401,8 +398,8 @@ trait Actor extends Logging with TransactionManagement {
*
* To be invoked from within the actor itself.
*/
- protected[this] def spawnLinkRemote(actorClass: Class[_]): Actor = {
- val actor = spawn(actorClass)
+ protected[this] def spawnLinkRemote[T <: Actor](actorClass: Class[T]): T = {
+ val actor = spawn[T](actorClass)
actor.makeRemote(RemoteServer.HOSTNAME, RemoteServer.PORT)
link(actor)
actor
@@ -459,7 +456,7 @@ trait Actor extends Logging with TransactionManagement {
/**
* Callback for the dispatcher. E.g. single entry point to the user code and all protected[this] methods
*/
- private[kernel] def invoke(messageHandle: MessageInvocation) = synchronized {
+ private[akka] def invoke(messageHandle: MessageInvocation) = synchronized {
if (TransactionManagement.isTransactionalityEnabled) transactionalDispatch(messageHandle)
else dispatch(messageHandle)
}
@@ -547,8 +544,6 @@ trait Actor extends Logging with TransactionManagement {
}
private[this] def handleTrapExit(dead: Actor, reason: Throwable): Unit = {
- if (Management.RECORD_STATS) Stats.getCounter("NrOfFailures_" + dead.name).incr
-
if (trapExit) {
if (faultHandler.isDefined) {
faultHandler.get match {
@@ -566,7 +561,6 @@ trait Actor extends Logging with TransactionManagement {
linkedActors.toArray.toList.asInstanceOf[List[Actor]].foreach(_.restart(reason))
private[Actor] def restart(reason: AnyRef) = synchronized {
- if (Management.RECORD_STATS) Stats.getCounter("NrOfRestarts_" + name).incr
lifeCycleConfig match {
case None => throw new IllegalStateException("Server [" + id + "] does not have a life-cycle defined.")
@@ -594,7 +588,7 @@ trait Actor extends Logging with TransactionManagement {
}
}
- private[kernel] def registerSupervisorAsRemoteActor: Option[String] = synchronized {
+ private[akka] def registerSupervisorAsRemoteActor: Option[String] = synchronized {
if (supervisor.isDefined) {
RemoteClient.clientFor(remoteAddress.get).registerSupervisorForActor(this)
Some(supervisor.get.uuid)
@@ -602,7 +596,7 @@ trait Actor extends Logging with TransactionManagement {
}
- private[kernel] def swapDispatcher(disp: MessageDispatcher) = synchronized {
+ private[akka] def swapDispatcher(disp: MessageDispatcher) = synchronized {
dispatcher = disp
mailbox = dispatcher.messageQueue
dispatcher.registerHandler(this, new ActorMessageInvoker(this))
diff --git a/kernel/src/main/scala/actor/ActorRegistry.scala b/akka-actors/src/main/scala/actor/ActorRegistry.scala
similarity index 87%
rename from kernel/src/main/scala/actor/ActorRegistry.scala
rename to akka-actors/src/main/scala/actor/ActorRegistry.scala
index 3f4275cf9e..2a534918d1 100755
--- a/kernel/src/main/scala/actor/ActorRegistry.scala
+++ b/akka-actors/src/main/scala/actor/ActorRegistry.scala
@@ -1,33 +1,33 @@
-/**
- * Copyright (C) 2009 Scalable Solutions.
- */
-
-package se.scalablesolutions.akka.kernel.actor
-
-import kernel.util.Logging
-
-import scala.collection.jcl.HashMap
-
-/**
- * Registry holding all actor instances, mapped by class..
- *
- * @author Jonas Bonér
- */
-object ActorRegistry extends Logging {
- private val actors = new HashMap[String, List[Actor]]
-
- def actorsFor(clazz: Class[_]): List[Actor] = synchronized {
- actors.get(clazz.getName) match {
- case None => Nil
- case Some(instances) => instances
- }
- }
-
- def register(actor: Actor) = synchronized {
- val name = actor.getClass.getName
- actors.get(name) match {
- case Some(instances) => actors + (name -> (actor :: instances))
- case None => actors + (name -> (actor :: Nil))
- }
- }
-}
+/**
+ * Copyright (C) 2009 Scalable Solutions.
+ */
+
+package se.scalablesolutions.akka.actor
+
+import util.Logging
+
+import scala.collection.jcl.HashMap
+
+/**
+ * Registry holding all actor instances, mapped by class..
+ *
+ * @author Jonas Bonér
+ */
+object ActorRegistry extends Logging {
+ private val actors = new HashMap[String, List[Actor]]
+
+ def actorsFor(clazz: Class[_]): List[Actor] = synchronized {
+ actors.get(clazz.getName) match {
+ case None => Nil
+ case Some(instances) => instances
+ }
+ }
+
+ def register(actor: Actor) = synchronized {
+ val name = actor.getClass.getName
+ actors.get(name) match {
+ case Some(instances) => actors + (name -> (actor :: instances))
+ case None => actors + (name -> (actor :: Nil))
+ }
+ }
+}
diff --git a/akka-actors/src/main/scala/actor/Scheduler.scala b/akka-actors/src/main/scala/actor/Scheduler.scala
new file mode 100644
index 0000000000..42e4b7485a
--- /dev/null
+++ b/akka-actors/src/main/scala/actor/Scheduler.scala
@@ -0,0 +1,88 @@
+/*
+ * Copyright 2007 WorldWide Conferencing, LLC
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package se.scalablesolutions.akka.util
+
+import java.util.concurrent._
+import actor.{OneForOneStrategy, Actor}
+import config.ScalaConfig._
+
+import org.scala_tools.javautils.Imports._
+
+case object UnSchedule
+case class SchedulerException(msg: String, e: Throwable) extends RuntimeException(msg, e)
+
+/**
+ * Rework of David Pollak's ActorPing class in the Lift Project
+ * which is licensed under the Apache 2 License.
+ */
+class ScheduleActor(val receiver: Actor, val future: ScheduledFuture[AnyRef]) extends Actor with Logging {
+ lifeCycleConfig = Some(LifeCycle(Permanent, 100))
+
+ def receive: PartialFunction[Any, Unit] = {
+ case UnSchedule =>
+ Scheduler.stopSupervising(this)
+ future.cancel(true)
+ stop
+ }
+}
+
+object Scheduler extends Actor {
+ private var service = Executors.newSingleThreadScheduledExecutor(SchedulerThreadFactory)
+ private val schedulers = new ConcurrentHashMap[Actor, Actor]
+ faultHandler = Some(OneForOneStrategy(5, 5000))
+ trapExit = true
+ start
+
+ def schedule(receiver: Actor, message: AnyRef, initialDelay: Long, delay: Long, timeUnit: TimeUnit) = {
+ try {
+ startLink(new ScheduleActor(
+ receiver,
+ service.scheduleAtFixedRate(new java.lang.Runnable {
+ def run = receiver ! message;
+ }, initialDelay, delay, timeUnit).asInstanceOf[ScheduledFuture[AnyRef]]))
+ } catch {
+ case e => throw SchedulerException(message + " could not be scheduled on " + receiver, e)
+ }
+ }
+
+ def restart = service = Executors.newSingleThreadScheduledExecutor(SchedulerThreadFactory)
+
+ def stopSupervising(actor: Actor) = {
+ unlink(actor)
+ schedulers.remove(actor)
+ }
+
+ override def shutdown = {
+ schedulers.values.asScala.foreach(_ ! UnSchedule)
+ service.shutdown
+ }
+
+ def receive: PartialFunction[Any, Unit] = {
+ case _ => {} // ignore all messages
+ }
+}
+
+private object SchedulerThreadFactory extends ThreadFactory {
+ private var count = 0
+ val threadFactory = Executors.defaultThreadFactory()
+
+ def newThread(r: Runnable): Thread = {
+ val thread = threadFactory.newThread(r)
+ thread.setName("Scheduler-" + count)
+ thread.setDaemon(true)
+ thread
+ }
+}
+
+
diff --git a/kernel/src/main/scala/actor/Supervisor.scala b/akka-actors/src/main/scala/actor/Supervisor.scala
similarity index 96%
rename from kernel/src/main/scala/actor/Supervisor.scala
rename to akka-actors/src/main/scala/actor/Supervisor.scala
index 0b9f1dfc4c..e183cfcc96 100644
--- a/kernel/src/main/scala/actor/Supervisor.scala
+++ b/akka-actors/src/main/scala/actor/Supervisor.scala
@@ -2,12 +2,12 @@
* Copyright (C) 2009 Scalable Solutions.
*/
-package se.scalablesolutions.akka.kernel.actor
+package se.scalablesolutions.akka.actor
-import kernel.util.Logging
-import kernel.config.ScalaConfig._
-import kernel.config.{ConfiguratorRepository, Configurator}
-import kernel.util.Helpers._
+import util.Logging
+import config.ScalaConfig._
+import config.{ConfiguratorRepository, Configurator}
+import util.Helpers._
import java.util.concurrent.ConcurrentHashMap
diff --git a/kernel/src/main/scala/config/ActiveObjectGuiceConfigurator.scala b/akka-actors/src/main/scala/config/ActiveObjectGuiceConfigurator.scala
similarity index 88%
rename from kernel/src/main/scala/config/ActiveObjectGuiceConfigurator.scala
rename to akka-actors/src/main/scala/config/ActiveObjectGuiceConfigurator.scala
index d1b17782a8..9c8e57578d 100644
--- a/kernel/src/main/scala/config/ActiveObjectGuiceConfigurator.scala
+++ b/akka-actors/src/main/scala/config/ActiveObjectGuiceConfigurator.scala
@@ -2,16 +2,16 @@
* Copyright (C) 2009 Scalable Solutions.
*/
-package se.scalablesolutions.akka.kernel.config
+package se.scalablesolutions.akka.config
import com.google.inject._
import ScalaConfig._
-import kernel.actor.{Supervisor, ActiveObjectFactory, Dispatcher}
-import kernel.util.Logging
+import akka.actor.{Supervisor, ActiveObjectFactory, Dispatcher}
+import akka.util.Logging
-import org.apache.camel.impl.{DefaultCamelContext}
-import org.apache.camel.{CamelContext, Endpoint, Routes}
+//import org.apache.camel.impl.{DefaultCamelContext}
+//import org.apache.camel.{CamelContext, Endpoint, Routes}
import scala.collection.mutable.HashMap
@@ -21,8 +21,8 @@ import java.lang.reflect.Method
/**
* @author Jonas Bonér
*/
-class ActiveObjectGuiceConfigurator extends ActiveObjectConfigurator with CamelConfigurator with Logging {
- val AKKA_CAMEL_ROUTING_SCHEME = "akka"
+class ActiveObjectGuiceConfigurator extends ActiveObjectConfigurator with Logging { // with CamelConfigurator {
+ //val AKKA_CAMEL_ROUTING_SCHEME = "akka"
private var injector: Injector = _
private var supervisor: Option[Supervisor] = None
@@ -33,7 +33,7 @@ class ActiveObjectGuiceConfigurator extends ActiveObjectConfigurator with CamelC
private var configRegistry = new HashMap[Class[_], Component] // TODO is configRegistry needed?
private var activeObjectRegistry = new HashMap[Class[_], Tuple3[AnyRef, AnyRef, Component]]
private var activeObjectFactory = new ActiveObjectFactory
- private var camelContext = new DefaultCamelContext
+ //private var camelContext = new DefaultCamelContext
private var modules = new java.util.ArrayList[Module]
private var methodToUriRegistry = new HashMap[Method, String]
@@ -65,7 +65,7 @@ class ActiveObjectGuiceConfigurator extends ActiveObjectConfigurator with CamelC
if (c.intf.isDefined) c.intf.get
else c.target
}
-
+ /*
override def getRoutingEndpoint(uri: String): Endpoint = synchronized {
camelContext.getEndpoint(uri)
}
@@ -77,7 +77,7 @@ class ActiveObjectGuiceConfigurator extends ActiveObjectConfigurator with CamelC
override def getRoutingEndpoints(uri: String): java.util.Collection[Endpoint] = synchronized {
camelContext.getEndpoints(uri)
}
-
+ */
override def configure(restartStrategy: RestartStrategy, components: List[Component]):
ActiveObjectConfigurator = synchronized {
this.restartStrategy = restartStrategy
@@ -155,14 +155,14 @@ class ActiveObjectGuiceConfigurator extends ActiveObjectConfigurator with CamelC
modules.add(module)
this
}
-
+ /*
override def addRoutes(routes: Routes): ActiveObjectConfigurator = synchronized {
camelContext.addRoutes(routes)
this
}
override def getCamelContext: CamelContext = camelContext
-
+ */
def getGuiceModules: java.util.List[Module] = modules
def reset = synchronized {
@@ -172,21 +172,21 @@ class ActiveObjectGuiceConfigurator extends ActiveObjectConfigurator with CamelC
methodToUriRegistry = new HashMap[Method, String]
injector = null
restartStrategy = null
- camelContext = new DefaultCamelContext
+ //camelContext = new DefaultCamelContext
}
def stop = synchronized {
- camelContext.stop
+ //camelContext.stop
if (supervisor.isDefined) supervisor.get.stop
}
- def registerMethodForUri(method: Method, componentName: String) =
- methodToUriRegistry += method -> buildUri(method, componentName)
+// def registerMethodForUri(method: Method, componentName: String) =
+// methodToUriRegistry += method -> buildUri(method, componentName)
- def lookupUriFor(method: Method): String =
- methodToUriRegistry.getOrElse(method, throw new IllegalStateException("Could not find URI for method [" + method.getName + "]"))
+// def lookupUriFor(method: Method): String =
+// methodToUriRegistry.getOrElse(method, throw new IllegalStateException("Could not find URI for method [" + method.getName + "]"))
- def buildUri(method: Method, componentName: String): String =
- AKKA_CAMEL_ROUTING_SCHEME + ":" + componentName + "." + method.getName
+// def buildUri(method: Method, componentName: String): String =
+// AKKA_CAMEL_ROUTING_SCHEME + ":" + componentName + "." + method.getName
}
\ No newline at end of file
diff --git a/kernel/src/main/scala/config/ActiveObjectManager.scala b/akka-actors/src/main/scala/config/ActiveObjectManager.scala
similarity index 71%
rename from kernel/src/main/scala/config/ActiveObjectManager.scala
rename to akka-actors/src/main/scala/config/ActiveObjectManager.scala
index 2545d44b29..f5d49d426a 100644
--- a/kernel/src/main/scala/config/ActiveObjectManager.scala
+++ b/akka-actors/src/main/scala/config/ActiveObjectManager.scala
@@ -2,15 +2,15 @@
* Copyright (C) 2009 Scalable Solutions.
*/
-package se.scalablesolutions.akka.kernel.config
+package se.scalablesolutions.akka.config
-import akka.kernel.config.JavaConfig._
+import JavaConfig._
import com.google.inject._
import java.util._
-import org.apache.camel.impl.{JndiRegistry, DefaultCamelContext}
-import org.apache.camel.{Endpoint, Routes}
+//import org.apache.camel.impl.{JndiRegistry, DefaultCamelContext}
+//import org.apache.camel.{Endpoint, Routes}
/**
*
@@ -49,10 +49,10 @@ class ActiveObjectManager {
this
}
- def addRoutes(routes: Routes): ActiveObjectManager = {
- INSTANCE.addRoutes(routes)
- this
- }
+ //def addRoutes(routes: Routes): ActiveObjectManager = {
+ // INSTANCE.addRoutes(routes)
+ // this
+ // }
def getComponentInterfaces: List[Class[_]] = {
@@ -63,11 +63,11 @@ class ActiveObjectManager {
def getExternalDependency[T](clazz: Class[T]): T = INSTANCE.getExternalDependency(clazz)
- def getRoutingEndpoint(uri: String): Endpoint = INSTANCE.getRoutingEndpoint(uri)
+ //def getRoutingEndpoint(uri: String): Endpoint = INSTANCE.getRoutingEndpoint(uri)
- def getRoutingEndpoints: java.util.Collection[Endpoint] = INSTANCE.getRoutingEndpoints
+ //def getRoutingEndpoints: java.util.Collection[Endpoint] = INSTANCE.getRoutingEndpoints
- def getRoutingEndpoints(uri: String): java.util.Collection[Endpoint] = INSTANCE.getRoutingEndpoints(uri)
+ //def getRoutingEndpoints(uri: String): java.util.Collection[Endpoint] = INSTANCE.getRoutingEndpoints(uri)
def getGuiceModules: List[Module] = INSTANCE.getGuiceModules
diff --git a/kernel/src/main/scala/config/Config.scala b/akka-actors/src/main/scala/config/Config.scala
similarity index 82%
rename from kernel/src/main/scala/config/Config.scala
rename to akka-actors/src/main/scala/config/Config.scala
index 0df243a338..f9a6f25d1f 100644
--- a/kernel/src/main/scala/config/Config.scala
+++ b/akka-actors/src/main/scala/config/Config.scala
@@ -2,12 +2,12 @@
* Copyright (C) 2009 Scalable Solutions.
*/
-package se.scalablesolutions.akka.kernel.config
+package se.scalablesolutions.akka.config
import reflect.BeanProperty
-import kernel.actor.Actor
-import kernel.reactor.MessageDispatcher
+import actor.Actor
+import reactor.MessageDispatcher
/**
* Configuration classes - not to be used as messages.
@@ -95,7 +95,7 @@ object JavaConfig {
@BeanProperty val scheme: FailOverScheme,
@BeanProperty val maxNrOfRetries: Int,
@BeanProperty val withinTimeRange: Int) extends ConfigElement {
- def transform = se.scalablesolutions.akka.kernel.config.ScalaConfig.RestartStrategy(
+ def transform = se.scalablesolutions.akka.config.ScalaConfig.RestartStrategy(
scheme.transform, maxNrOfRetries, withinTimeRange)
}
@@ -103,35 +103,35 @@ object JavaConfig {
def this(scope: Scope, shutdownTime: Int) = this(scope, shutdownTime, null)
def transform = {
val callbackOption = if (callbacks == null) None else Some(callbacks.transform)
- se.scalablesolutions.akka.kernel.config.ScalaConfig.LifeCycle(scope.transform, shutdownTime, callbackOption)
+ se.scalablesolutions.akka.config.ScalaConfig.LifeCycle(scope.transform, shutdownTime, callbackOption)
}
}
class RestartCallbacks(@BeanProperty val preRestart: String, @BeanProperty val postRestart: String) {
- def transform = se.scalablesolutions.akka.kernel.config.ScalaConfig.RestartCallbacks(preRestart, postRestart)
+ def transform = se.scalablesolutions.akka.config.ScalaConfig.RestartCallbacks(preRestart, postRestart)
}
abstract class Scope extends ConfigElement {
- def transform: se.scalablesolutions.akka.kernel.config.ScalaConfig.Scope
+ def transform: se.scalablesolutions.akka.config.ScalaConfig.Scope
}
class Permanent extends Scope {
- override def transform = se.scalablesolutions.akka.kernel.config.ScalaConfig.Permanent
+ override def transform = se.scalablesolutions.akka.config.ScalaConfig.Permanent
}
class Transient extends Scope {
- override def transform = se.scalablesolutions.akka.kernel.config.ScalaConfig.Transient
+ override def transform = se.scalablesolutions.akka.config.ScalaConfig.Transient
}
class Temporary extends Scope {
- override def transform = se.scalablesolutions.akka.kernel.config.ScalaConfig.Temporary
+ override def transform = se.scalablesolutions.akka.config.ScalaConfig.Temporary
}
abstract class FailOverScheme extends ConfigElement {
- def transform: se.scalablesolutions.akka.kernel.config.ScalaConfig.FailOverScheme
+ def transform: se.scalablesolutions.akka.config.ScalaConfig.FailOverScheme
}
class AllForOne extends FailOverScheme {
- override def transform = se.scalablesolutions.akka.kernel.config.ScalaConfig.AllForOne
+ override def transform = se.scalablesolutions.akka.config.ScalaConfig.AllForOne
}
class OneForOne extends FailOverScheme {
- override def transform = se.scalablesolutions.akka.kernel.config.ScalaConfig.OneForOne
+ override def transform = se.scalablesolutions.akka.config.ScalaConfig.OneForOne
}
class RemoteAddress(@BeanProperty val hostname: String, @BeanProperty val port: Int)
@@ -167,11 +167,11 @@ object JavaConfig {
this(null, target, lifeCycle, timeout, dispatcher, remoteAddress)
def transform =
- se.scalablesolutions.akka.kernel.config.ScalaConfig.Component(intf, target, lifeCycle.transform, timeout, dispatcher,
- if (remoteAddress != null) se.scalablesolutions.akka.kernel.config.ScalaConfig.RemoteAddress(remoteAddress.hostname, remoteAddress.port) else null)
+ se.scalablesolutions.akka.config.ScalaConfig.Component(intf, target, lifeCycle.transform, timeout, dispatcher,
+ if (remoteAddress != null) se.scalablesolutions.akka.config.ScalaConfig.RemoteAddress(remoteAddress.hostname, remoteAddress.port) else null)
def newSupervised(actor: Actor) =
- se.scalablesolutions.akka.kernel.config.ScalaConfig.Supervise(actor, lifeCycle.transform)
+ se.scalablesolutions.akka.config.ScalaConfig.Supervise(actor, lifeCycle.transform)
}
}
\ No newline at end of file
diff --git a/kernel/src/main/scala/config/Configuration.scala b/akka-actors/src/main/scala/config/Configuration.scala
similarity index 97%
rename from kernel/src/main/scala/config/Configuration.scala
rename to akka-actors/src/main/scala/config/Configuration.scala
index d4e63123dc..5813cd37bf 100644
--- a/kernel/src/main/scala/config/Configuration.scala
+++ b/akka-actors/src/main/scala/config/Configuration.scala
@@ -2,7 +2,7 @@
* Copyright (C) 2009 Scalable Solutions.
*/
-package se.scalablesolutions.akka.kernel.config
+package se.scalablesolutions.akka.config
/*
import se.scalablesolutions.akka.kernel.{ActiveObject, ActiveObjectProxy}
diff --git a/kernel/src/main/scala/config/Configurator.scala b/akka-actors/src/main/scala/config/Configurator.scala
similarity index 94%
rename from kernel/src/main/scala/config/Configurator.scala
rename to akka-actors/src/main/scala/config/Configurator.scala
index bae832f459..c4a57e91a4 100644
--- a/kernel/src/main/scala/config/Configurator.scala
+++ b/akka-actors/src/main/scala/config/Configurator.scala
@@ -2,7 +2,7 @@
* Copyright (C) 2009 Scalable Solutions.
*/
-package se.scalablesolutions.akka.kernel.config
+package se.scalablesolutions.akka.config
import ScalaConfig.{RestartStrategy, Component}
diff --git a/akka-actors/src/main/scala/config/ConfiguratorRepository.scala b/akka-actors/src/main/scala/config/ConfiguratorRepository.scala
new file mode 100644
index 0000000000..049bf0ed4f
--- /dev/null
+++ b/akka-actors/src/main/scala/config/ConfiguratorRepository.scala
@@ -0,0 +1,29 @@
+/**
+ * Copyright (C) 2009 Scalable Solutions.
+ */
+
+package se.scalablesolutions.akka.config
+
+import scala.collection.mutable.HashSet
+
+import util.Logging
+
+object ConfiguratorRepository extends Logging {
+
+ private val configuration = new HashSet[Configurator]
+
+ def registerConfigurator(conf: Configurator) = synchronized {
+ configuration + conf
+ }
+
+ def getConfigurators: List[Configurator] = synchronized {
+ configuration.toList
+ //configurations.getOrElse(ctx, throw new IllegalArgumentException("No configuration for servlet context [" + ctx + "]"))
+ }
+}
+
+class ConfiguratorRepository extends Logging {
+ def registerConfigurator(conf: Configurator) = ConfiguratorRepository.registerConfigurator(conf)
+ def getConfigurators: List[Configurator] = ConfiguratorRepository.getConfigurators
+}
+
diff --git a/kernel/src/main/scala/nio/RemoteClient.scala b/akka-actors/src/main/scala/nio/RemoteClient.scala
similarity index 90%
rename from kernel/src/main/scala/nio/RemoteClient.scala
rename to akka-actors/src/main/scala/nio/RemoteClient.scala
index e91173c4f7..74e99b9fe8 100644
--- a/kernel/src/main/scala/nio/RemoteClient.scala
+++ b/akka-actors/src/main/scala/nio/RemoteClient.scala
@@ -2,17 +2,16 @@
* Copyright (C) 2009 Scalable Solutions.
*/
-package se.scalablesolutions.akka.kernel.nio
+package se.scalablesolutions.akka.nio
import java.net.InetSocketAddress
import java.util.concurrent.{Executors, ConcurrentMap, ConcurrentHashMap}
import protobuf.RemoteProtocol.{RemoteRequest, RemoteReply}
-import kernel.actor.{Exit, Actor}
-import kernel.reactor.{DefaultCompletableFutureResult, CompletableFutureResult}
+import actor.{Exit, Actor}
+import reactor.{DefaultCompletableFutureResult, CompletableFutureResult}
import serialization.{Serializer, Serializable, SerializationProtocol}
-import kernel.util.Logging
-import kernel.management.Management
+import util.Logging
import org.jboss.netty.bootstrap.ClientBootstrap
import org.jboss.netty.channel._
@@ -22,8 +21,6 @@ import org.jboss.netty.handler.codec.protobuf.{ProtobufDecoder, ProtobufEncoder}
import scala.collection.mutable.HashMap
-import com.twitter.service.Stats
-
/**
* @author Jonas Bonér
*/
@@ -48,8 +45,6 @@ object RemoteClient extends Logging {
*/
class RemoteClient(hostname: String, port: Int) extends Logging {
val name = "RemoteClient@" + hostname
- val NR_OF_BYTES_SENT = Stats.getCounter("NrOfBytesSent_" + name)
- val NR_OF_MESSAGES_SENT = Stats.getCounter("NrOfMessagesSent_" + name)
@volatile private var isRunning = false
private val futures = new ConcurrentHashMap[Long, CompletableFutureResult]
@@ -91,10 +86,6 @@ class RemoteClient(hostname: String, port: Int) extends Logging {
}
def send(request: RemoteRequest): Option[CompletableFutureResult] = if (isRunning) {
- if (Management.RECORD_STATS) {
- NR_OF_BYTES_SENT.incr(request.getSerializedSize)
- NR_OF_MESSAGES_SENT.incr
- }
if (request.getIsOneWay) {
connection.getChannel.write(request)
None
@@ -145,9 +136,6 @@ class RemoteClientHandler(val name: String,
val supervisors: ConcurrentMap[String, Actor])
extends SimpleChannelUpstreamHandler with Logging {
- val NR_OF_BYTES_RECEIVED = Stats.getCounter("NrOfBytesReceived_" + name)
- val NR_OF_MESSAGES_RECEIVED = Stats.getCounter("NrOfMessagesReceived_" + name)
-
override def handleUpstream(ctx: ChannelHandlerContext, event: ChannelEvent) = {
if (event.isInstanceOf[ChannelStateEvent] && event.asInstanceOf[ChannelStateEvent].getState != ChannelState.INTEREST_OPS) {
log.debug(event.toString)
@@ -160,10 +148,6 @@ class RemoteClientHandler(val name: String,
val result = event.getMessage
if (result.isInstanceOf[RemoteReply]) {
val reply = result.asInstanceOf[RemoteReply]
- if (Management.RECORD_STATS) {
- NR_OF_MESSAGES_RECEIVED.incr
- NR_OF_BYTES_RECEIVED.incr(reply.getSerializedSize)
- }
log.debug("Received RemoteReply[\n%s]", reply.toString)
val future = futures.get(reply.getId)
if (reply.getIsSuccessful) {
diff --git a/kernel/src/main/scala/nio/RemoteProtocolBuilder.scala b/akka-actors/src/main/scala/nio/RemoteProtocolBuilder.scala
similarity index 99%
rename from kernel/src/main/scala/nio/RemoteProtocolBuilder.scala
rename to akka-actors/src/main/scala/nio/RemoteProtocolBuilder.scala
index b492a2fe0c..1c846a42cd 100644
--- a/kernel/src/main/scala/nio/RemoteProtocolBuilder.scala
+++ b/akka-actors/src/main/scala/nio/RemoteProtocolBuilder.scala
@@ -2,7 +2,7 @@
* Copyright (C) 2009 Scalable Solutions.
*/
-package se.scalablesolutions.akka.kernel.nio
+package se.scalablesolutions.akka.nio
import akka.serialization.Serializable.SBinary
import com.google.protobuf.{Message, ByteString}
diff --git a/kernel/src/main/scala/nio/RemoteServer.scala b/akka-actors/src/main/scala/nio/RemoteServer.scala
similarity index 88%
rename from kernel/src/main/scala/nio/RemoteServer.scala
rename to akka-actors/src/main/scala/nio/RemoteServer.scala
index fda24718c4..b13b234fc4 100755
--- a/kernel/src/main/scala/nio/RemoteServer.scala
+++ b/akka-actors/src/main/scala/nio/RemoteServer.scala
@@ -2,18 +2,17 @@
* Copyright (C) 2009 Scalable Solutions.
*/
-package se.scalablesolutions.akka.kernel.nio
+package se.scalablesolutions.akka.nio
import java.lang.reflect.InvocationTargetException
import java.net.InetSocketAddress
import java.util.concurrent.{ConcurrentHashMap, Executors}
-import kernel.actor._
-import kernel.util._
+import actor._
+import util._
import protobuf.RemoteProtocol
import protobuf.RemoteProtocol.{RemoteReply, RemoteRequest}
import serialization.{Serializer, Serializable, SerializationProtocol}
-import kernel.management.Management
import org.jboss.netty.bootstrap.ServerBootstrap
import org.jboss.netty.channel._
@@ -21,8 +20,6 @@ import org.jboss.netty.channel.socket.nio.NioServerSocketChannelFactory
import org.jboss.netty.handler.codec.frame.{LengthFieldBasedFrameDecoder, LengthFieldPrepender}
import org.jboss.netty.handler.codec.protobuf.{ProtobufDecoder, ProtobufEncoder}
-import com.twitter.service.Stats
-
/**
* @author Jonas Bonér
*/
@@ -34,7 +31,7 @@ class RemoteServer extends Logging {
* @author Jonas Bonér
*/
object RemoteServer extends Logging {
- import kernel.Kernel.config
+ import Config.config
val HOSTNAME = config.getString("akka.remote.hostname", "localhost")
val PORT = config.getInt("akka.remote.port", 9999)
val CONNECTION_TIMEOUT_MILLIS = config.getInt("akka.remote.connection-timeout", 1000)
@@ -86,11 +83,6 @@ class RemoteServerPipelineFactory(name: String, loader: Option[ClassLoader]) ext
*/
@ChannelPipelineCoverage { val value = "all" }
class RemoteServerHandler(val name: String, val applicationLoader: Option[ClassLoader]) extends SimpleChannelUpstreamHandler with Logging {
- val NR_OF_BYTES_SENT = Stats.getCounter("NrOfBytesSent_" + name)
- val NR_OF_BYTES_RECEIVED = Stats.getCounter("NrOfBytesReceived_" + name)
- val NR_OF_MESSAGES_SENT = Stats.getCounter("NrOfMessagesSent_" + name)
- val NR_OF_MESSAGES_RECEIVED = Stats.getCounter("NrOfMessagesReceived_" + name)
-
private val activeObjectFactory = new ActiveObjectFactory
private val activeObjects = new ConcurrentHashMap[String, AnyRef]
private val actors = new ConcurrentHashMap[String, Actor]
@@ -115,10 +107,6 @@ class RemoteServerHandler(val name: String, val applicationLoader: Option[ClassL
}
private def handleRemoteRequest(request: RemoteRequest, channel: Channel) = {
- if (Management.RECORD_STATS) {
- NR_OF_MESSAGES_RECEIVED.incr
- NR_OF_BYTES_RECEIVED.incr(request.getSerializedSize)
- }
log.debug("Received RemoteRequest[\n%s]", request.toString)
if (request.getIsActor) dispatchToActor(request, channel)
else dispatchToActiveObject(request, channel)
@@ -143,10 +131,6 @@ class RemoteServerHandler(val name: String, val applicationLoader: Option[ClassL
if (request.hasSupervisorUuid) replyBuilder.setSupervisorUuid(request.getSupervisorUuid)
val replyMessage = replyBuilder.build
channel.write(replyMessage)
- if (Management.RECORD_STATS) {
- NR_OF_MESSAGES_SENT.incr
- NR_OF_BYTES_SENT.incr(replyMessage.getSerializedSize)
- }
} catch {
case e: Throwable =>
log.error("Could not invoke remote actor [%s] due to: %s", request.getTarget, e)
@@ -159,10 +143,6 @@ class RemoteServerHandler(val name: String, val applicationLoader: Option[ClassL
if (request.hasSupervisorUuid) replyBuilder.setSupervisorUuid(request.getSupervisorUuid)
val replyMessage = replyBuilder.build
channel.write(replyMessage)
- if (Management.RECORD_STATS) {
- NR_OF_MESSAGES_SENT.incr
- NR_OF_BYTES_SENT.incr(replyMessage.getSerializedSize)
- }
}
}
}
@@ -190,10 +170,6 @@ class RemoteServerHandler(val name: String, val applicationLoader: Option[ClassL
if (request.hasSupervisorUuid) replyBuilder.setSupervisorUuid(request.getSupervisorUuid)
val replyMessage = replyBuilder.build
channel.write(replyMessage)
- if (Management.RECORD_STATS) {
- NR_OF_MESSAGES_SENT.incr
- NR_OF_BYTES_SENT.incr(replyMessage.getSerializedSize)
- }
}
} catch {
case e: InvocationTargetException =>
@@ -207,10 +183,6 @@ class RemoteServerHandler(val name: String, val applicationLoader: Option[ClassL
if (request.hasSupervisorUuid) replyBuilder.setSupervisorUuid(request.getSupervisorUuid)
val replyMessage = replyBuilder.build
channel.write(replyMessage)
- if (Management.RECORD_STATS) {
- NR_OF_MESSAGES_SENT.incr
- NR_OF_BYTES_SENT.incr(replyMessage.getSerializedSize)
- }
case e: Throwable =>
log.error("Could not invoke remote active object [%s :: %s] due to: %s", request.getMethod, request.getTarget, e)
e.printStackTrace
@@ -222,10 +194,6 @@ class RemoteServerHandler(val name: String, val applicationLoader: Option[ClassL
if (request.hasSupervisorUuid) replyBuilder.setSupervisorUuid(request.getSupervisorUuid)
val replyMessage = replyBuilder.build
channel.write(replyMessage)
- if (Management.RECORD_STATS) {
- NR_OF_MESSAGES_SENT.incr
- NR_OF_BYTES_SENT.incr(replyMessage.getSerializedSize)
- }
}
}
diff --git a/kernel/src/main/scala/nio/RequestReply.scala b/akka-actors/src/main/scala/nio/RequestReply.scala
similarity index 97%
rename from kernel/src/main/scala/nio/RequestReply.scala
rename to akka-actors/src/main/scala/nio/RequestReply.scala
index b48732cf94..ce79653f06 100644
--- a/kernel/src/main/scala/nio/RequestReply.scala
+++ b/akka-actors/src/main/scala/nio/RequestReply.scala
@@ -2,11 +2,11 @@
* Copyright (C) 2009 Scalable Solutions.
*/
-package se.scalablesolutions.akka.kernel.nio
+package se.scalablesolutions.akka.nio
import java.util.concurrent.atomic.AtomicLong
-import kernel.stm.Transaction
-import kernel.util.HashCode
+import stm.Transaction
+import util.HashCode
// FIXME: will not work - can clash with other host's requests - need te prepend with hostname
object RemoteRequestIdFactory {
diff --git a/kernel/src/main/scala/reactor/Dispatchers.scala b/akka-actors/src/main/scala/reactor/Dispatchers.scala
similarity index 96%
rename from kernel/src/main/scala/reactor/Dispatchers.scala
rename to akka-actors/src/main/scala/reactor/Dispatchers.scala
index 5c4935bbd5..30846752b6 100644
--- a/kernel/src/main/scala/reactor/Dispatchers.scala
+++ b/akka-actors/src/main/scala/reactor/Dispatchers.scala
@@ -2,9 +2,9 @@
* Copyright (C) 2009 Scalable Solutions.
*/
-package se.scalablesolutions.akka.kernel.reactor
+package se.scalablesolutions.akka.reactor
-import kernel.actor.Actor
+import actor.Actor
/**
* Scala API. Dispatcher factory.
diff --git a/kernel/src/main/scala/reactor/EventBasedSingleThreadDispatcher.scala b/akka-actors/src/main/scala/reactor/EventBasedSingleThreadDispatcher.scala
similarity index 84%
rename from kernel/src/main/scala/reactor/EventBasedSingleThreadDispatcher.scala
rename to akka-actors/src/main/scala/reactor/EventBasedSingleThreadDispatcher.scala
index a4c7a0fc80..90518dbd73 100644
--- a/kernel/src/main/scala/reactor/EventBasedSingleThreadDispatcher.scala
+++ b/akka-actors/src/main/scala/reactor/EventBasedSingleThreadDispatcher.scala
@@ -8,16 +8,11 @@
*
* Based on code from the actorom actor framework by Sergio Bossa [http://code.google.com/p/actorom/].
*/
-package se.scalablesolutions.akka.kernel.reactor
+package se.scalablesolutions.akka.reactor
-import kernel.management.Management
-
-import java.util.{LinkedList, Queue, List}
-
-import com.twitter.service.Stats
+import java.util.{LinkedList, List}
class EventBasedSingleThreadDispatcher(name: String) extends MessageDispatcherBase(name) {
- val NR_OF_PROCESSED_MESSAGES = Stats.getCounter("NrOfProcessedMessage_" + name)
def start = if (!active) {
active = true
val messageDemultiplexer = new EventBasedSingleThreadDemultiplexer(queue)
@@ -28,7 +23,6 @@ class EventBasedSingleThreadDispatcher(name: String) extends MessageDispatcherBa
messageDemultiplexer.select
} catch { case e: InterruptedException => active = false }
val selectedInvocations = messageDemultiplexer.acquireSelectedInvocations
- if (Management.RECORD_STATS) NR_OF_PROCESSED_MESSAGES.incr(selectedInvocations.size)
val iter = selectedInvocations.iterator
while (iter.hasNext) {
val invocation = iter.next
diff --git a/kernel/src/main/scala/reactor/EventBasedThreadPoolDispatcher.scala b/akka-actors/src/main/scala/reactor/EventBasedThreadPoolDispatcher.scala
similarity index 97%
rename from kernel/src/main/scala/reactor/EventBasedThreadPoolDispatcher.scala
rename to akka-actors/src/main/scala/reactor/EventBasedThreadPoolDispatcher.scala
index 1f96769374..c63237f12c 100644
--- a/kernel/src/main/scala/reactor/EventBasedThreadPoolDispatcher.scala
+++ b/akka-actors/src/main/scala/reactor/EventBasedThreadPoolDispatcher.scala
@@ -2,9 +2,7 @@
* Copyright (C) 2009 Scalable Solutions.
*/
-package se.scalablesolutions.akka.kernel.reactor
-
-import kernel.management.{Management, ThreadPoolMBean}
+package se.scalablesolutions.akka.reactor
import java.util.concurrent._
import locks.ReentrantLock
@@ -12,8 +10,6 @@ import atomic.{AtomicLong, AtomicInteger}
import ThreadPoolExecutor.CallerRunsPolicy
import java.util.{Collection, HashSet, HashMap, LinkedList, List}
-import com.twitter.service.Stats
-
/**
* Implements the Reactor pattern as defined in: [http://www.cs.wustl.edu/~schmidt/PDF/reactor-siemens.pdf].
* See also this article: [http://today.java.net/cs/user/print/a/350].
@@ -63,7 +59,6 @@ import com.twitter.service.Stats
class EventBasedThreadPoolDispatcher(name: String, private val concurrentMode: Boolean) extends MessageDispatcherBase(name) {
def this(name: String) = this(name, false)
- val NR_OF_PROCESSED_MESSAGES = Stats.getCounter("NrOfProcessedMessages_" + name)
private val NR_START_THREADS = 16
private val NR_MAX_THREADS = 128
private val KEEP_ALIVE_TIME = 60000L // default is one minute
@@ -79,7 +74,6 @@ class EventBasedThreadPoolDispatcher(name: String, private val concurrentMode: B
def start = if (!active) {
active = true
- Management.registerMBean(new ThreadPoolMBean(threadPoolBuilder), "ThreadPool_" + name)
/**
* This dispatcher code is based on code from the actorom actor framework by Sergio Bossa [http://code.google.com/p/actorom/].
@@ -95,7 +89,6 @@ class EventBasedThreadPoolDispatcher(name: String, private val concurrentMode: B
} catch { case e: InterruptedException => active = false }
val selectedInvocations = messageDemultiplexer.acquireSelectedInvocations
val reservedInvocations = reserve(selectedInvocations)
- if (Management.RECORD_STATS) NR_OF_PROCESSED_MESSAGES.incr(reservedInvocations.size)
val it = reservedInvocations.entrySet.iterator
while (it.hasNext) {
val entry = it.next
diff --git a/kernel/src/main/scala/reactor/Future.scala b/akka-actors/src/main/scala/reactor/Future.scala
similarity index 98%
rename from kernel/src/main/scala/reactor/Future.scala
rename to akka-actors/src/main/scala/reactor/Future.scala
index c81a88b264..c43b212932 100644
--- a/kernel/src/main/scala/reactor/Future.scala
+++ b/akka-actors/src/main/scala/reactor/Future.scala
@@ -5,7 +5,7 @@
/**
* Based on code from the actorom actor framework by Sergio Bossa [http://code.google.com/p/actorom/].
*/
-package se.scalablesolutions.akka.kernel.reactor
+package se.scalablesolutions.akka.reactor
import java.util.concurrent.locks.{Lock, Condition, ReentrantLock}
import java.util.concurrent.TimeUnit
diff --git a/kernel/src/main/scala/reactor/MessageDispatcherBase.scala b/akka-actors/src/main/scala/reactor/MessageDispatcherBase.scala
similarity index 74%
rename from kernel/src/main/scala/reactor/MessageDispatcherBase.scala
rename to akka-actors/src/main/scala/reactor/MessageDispatcherBase.scala
index d47db197a5..8a6670bdc4 100644
--- a/kernel/src/main/scala/reactor/MessageDispatcherBase.scala
+++ b/akka-actors/src/main/scala/reactor/MessageDispatcherBase.scala
@@ -2,19 +2,15 @@
* Copyright (C) 2009 Scalable Solutions.
*/
-package se.scalablesolutions.akka.kernel.reactor
-
-import kernel.management.Management
+package se.scalablesolutions.akka.reactor
import java.util.{LinkedList, Queue, List}
import java.util.concurrent.{TimeUnit, BlockingQueue}
import java.util.HashMap
-import com.twitter.service.Stats
-
abstract class MessageDispatcherBase(val name: String) extends MessageDispatcher {
- //val CONCURRENT_MODE = kernel.Kernel.config.getBool("akka.actor.concurrent-mode", false)
+ //val CONCURRENT_MODE = Config.config.getBool("akka.actor.concurrent-mode", false)
val MILLISECONDS = TimeUnit.MILLISECONDS
val queue = new ReactiveMessageQueue(name)
var blockingQueue: BlockingQueue[Runnable] = _
@@ -23,12 +19,6 @@ abstract class MessageDispatcherBase(val name: String) extends MessageDispatcher
protected var selectorThread: Thread = _
protected val guard = new Object
- if (Management.RECORD_STATS) {
- Stats.makeGauge("SizeOfBlockingQueue_" + name) {
- guard.synchronized { blockingQueue.size.toDouble }
- }
- }
-
def messageQueue = queue
def registerHandler(key: AnyRef, handler: MessageInvoker) = guard.synchronized {
@@ -52,15 +42,9 @@ abstract class MessageDispatcherBase(val name: String) extends MessageDispatcher
}
class ReactiveMessageQueue(name: String) extends MessageQueue {
- private[kernel] val queue: Queue[MessageInvocation] = new LinkedList[MessageInvocation]
+ private[akka] val queue: Queue[MessageInvocation] = new LinkedList[MessageInvocation]
@volatile private var interrupted = false
- if (Management.RECORD_STATS) {
- Stats.makeGauge("SizeOfReactiveQueue_" + name) {
- queue.synchronized { queue.size.toDouble }
- }
- }
-
def append(handle: MessageInvocation) = queue.synchronized {
queue.offer(handle)
queue.notifyAll
diff --git a/kernel/src/main/scala/reactor/Reactor.scala b/akka-actors/src/main/scala/reactor/Reactor.scala
similarity index 92%
rename from kernel/src/main/scala/reactor/Reactor.scala
rename to akka-actors/src/main/scala/reactor/Reactor.scala
index 9e4983a6f8..a4bb3e3784 100644
--- a/kernel/src/main/scala/reactor/Reactor.scala
+++ b/akka-actors/src/main/scala/reactor/Reactor.scala
@@ -2,11 +2,11 @@
* Copyright (C) 2009 Scalable Solutions.
*/
-package se.scalablesolutions.akka.kernel.reactor
+package se.scalablesolutions.akka.reactor
import java.util.List
-import kernel.stm.Transaction
-import kernel.util.HashCode
+import stm.Transaction
+import util.HashCode
trait MessageQueue {
def append(handle: MessageInvocation)
diff --git a/kernel/src/main/scala/reactor/ThreadBasedDispatcher.scala b/akka-actors/src/main/scala/reactor/ThreadBasedDispatcher.scala
similarity index 74%
rename from kernel/src/main/scala/reactor/ThreadBasedDispatcher.scala
rename to akka-actors/src/main/scala/reactor/ThreadBasedDispatcher.scala
index fbae2d8c99..aa04414169 100644
--- a/kernel/src/main/scala/reactor/ThreadBasedDispatcher.scala
+++ b/akka-actors/src/main/scala/reactor/ThreadBasedDispatcher.scala
@@ -2,25 +2,20 @@
* Copyright (C) 2009 Scalable Solutions.
*/
-package se.scalablesolutions.akka.kernel.reactor
-
-import com.twitter.service.Stats
+package se.scalablesolutions.akka.reactor
import java.util.concurrent.LinkedBlockingQueue
import java.util.Queue
-import kernel.actor.{Actor, ActorMessageInvoker}
-import kernel.management.Management
+import actor.{Actor, ActorMessageInvoker}
/**
* Dedicates a unique thread for each actor passed in as reference. Served through its messageQueue.
* @author Jonas Bonér
*/
-class ThreadBasedDispatcher private[kernel] (val name: String, val messageHandler: MessageInvoker) extends MessageDispatcher {
+class ThreadBasedDispatcher private[akka] (val name: String, val messageHandler: MessageInvoker) extends MessageDispatcher {
def this(actor: Actor) = this(actor.getClass.getName, new ActorMessageInvoker(actor))
- val NR_OF_PROCESSED_MESSAGES = Stats.getCounter("NrOfProcessedMessages_" + name)
-
private val queue = new BlockingMessageQueue(name)
private var selectorThread: Thread = _
@volatile private var active: Boolean = false
@@ -33,7 +28,6 @@ class ThreadBasedDispatcher private[kernel] (val name: String, val messageHandle
override def run = {
while (active) {
try {
- if (Management.RECORD_STATS) NR_OF_PROCESSED_MESSAGES.incr
messageHandler.invoke(queue.take)
} catch { case e: InterruptedException => active = false }
}
@@ -52,12 +46,6 @@ class ThreadBasedDispatcher private[kernel] (val name: String, val messageHandle
}
class BlockingMessageQueue(name: String) extends MessageQueue {
- if (Management.RECORD_STATS) {
- Stats.makeGauge("SizeOfBlockingQueue_" + name) {
- queue.size.toDouble
- }
- }
-
// FIXME: configure the LBQ
private val queue = new LinkedBlockingQueue[MessageInvocation]
def append(handle: MessageInvocation) = queue.put(handle)
diff --git a/kernel/src/main/scala/serialization/Binary.scala b/akka-actors/src/main/scala/serialization/Binary.scala
similarity index 100%
rename from kernel/src/main/scala/serialization/Binary.scala
rename to akka-actors/src/main/scala/serialization/Binary.scala
diff --git a/kernel/src/main/scala/serialization/Serializable.scala b/akka-actors/src/main/scala/serialization/Serializable.scala
similarity index 94%
rename from kernel/src/main/scala/serialization/Serializable.scala
rename to akka-actors/src/main/scala/serialization/Serializable.scala
index 4faa220357..9312a859c8 100644
--- a/kernel/src/main/scala/serialization/Serializable.scala
+++ b/akka-actors/src/main/scala/serialization/Serializable.scala
@@ -6,10 +6,10 @@ package se.scalablesolutions.akka.serialization
import org.codehaus.jackson.map.ObjectMapper
import com.google.protobuf.Message
-import com.twitter.commons.Json
import reflect.Manifest
import sbinary.DefaultProtocol
import java.io.{StringWriter, ByteArrayOutputStream, ObjectOutputStream}
+import sjson.json.{Serializer=>SJSONSerializer}
object SerializationProtocol {
val SBINARY = 1
@@ -100,8 +100,8 @@ object Serializable {
* @author Jonas Bonér
*/
trait ScalaJSON extends JSON {
- def toJSON: String = Json.build(this).toString
- def toBytes: Array[Byte] = toJSON.getBytes("UTF-8")
+ def toJSON: String = new String(toBytes, "UTF-8")
+ def toBytes: Array[Byte] = SJSONSerializer.SJSON.out(this)
}
/**
diff --git a/kernel/src/main/scala/serialization/Serializer.scala b/akka-actors/src/main/scala/serialization/Serializer.scala
similarity index 94%
rename from kernel/src/main/scala/serialization/Serializer.scala
rename to akka-actors/src/main/scala/serialization/Serializer.scala
index bfa8bfe011..2d2917e0b5 100644
--- a/kernel/src/main/scala/serialization/Serializer.scala
+++ b/akka-actors/src/main/scala/serialization/Serializer.scala
@@ -9,7 +9,7 @@ import java.io.{ObjectOutputStream, ByteArrayOutputStream, ObjectInputStream, By
import reflect.{BeanProperty, Manifest}
import sbinary.DefaultProtocol
import org.codehaus.jackson.map.ObjectMapper
-import com.twitter.commons.Json
+import sjson.json.{Serializer=>SJSONSerializer}
/**
* @author Jonas Bonér
@@ -119,11 +119,11 @@ object Serializer {
object ScalaJSON extends Serializer {
def deepClone(obj: AnyRef): AnyRef = in(out(obj), None)
- def out(obj: AnyRef): Array[Byte] = Json.build(obj).toString.getBytes("UTF-8")
+ def out(obj: AnyRef): Array[Byte] = SJSONSerializer.SJSON.out(obj)
- def in(bytes: Array[Byte], clazz: Option[Class[_]]): AnyRef = Json.parse(new String(bytes, "UTF-8")).asInstanceOf[AnyRef]
+ def in(bytes: Array[Byte], clazz: Option[Class[_]]): AnyRef = SJSONSerializer.SJSON.in(bytes)
- def in(json: String): AnyRef = Json.parse(json).asInstanceOf[AnyRef]
+ def in(json: String): AnyRef = SJSONSerializer.SJSON.in(json)
}
/**
diff --git a/kernel/src/main/scala/stm/ChangeSet.scala b/akka-actors/src/main/scala/stm/ChangeSet.scala
similarity index 56%
rename from kernel/src/main/scala/stm/ChangeSet.scala
rename to akka-actors/src/main/scala/stm/ChangeSet.scala
index 5910714da8..440df8c18d 100644
--- a/kernel/src/main/scala/stm/ChangeSet.scala
+++ b/akka-actors/src/main/scala/stm/ChangeSet.scala
@@ -2,10 +2,10 @@
* Copyright (C) 2009 Scalable Solutions.
*/
-package se.scalablesolutions.akka.kernel.stm
+package se.scalablesolutions.akka.stm
-import kernel.state.{Transactional, TransactionalMap}
-import kernel.util.Helpers.ReadWriteLock
+import state.{Transactional, TransactionalMap}
+import util.Helpers.ReadWriteLock
import scala.collection.immutable.HashSet
@serializable
@@ -13,14 +13,14 @@ class ChangeSet {
private val lock = new ReadWriteLock
private var transactionalItems: Set[Transactional] = new HashSet
- private[kernel] def +(item: Transactional) = lock.withWriteLock {
+ private[akka] def +(item: Transactional) = lock.withWriteLock {
transactionalItems += item
}
- private[kernel] def items: List[Transactional] = lock.withReadLock {
+ private[akka] def items: List[Transactional] = lock.withReadLock {
transactionalItems.toList.asInstanceOf[List[Transactional]]
}
- private[kernel] def clear = lock.withWriteLock {
+ private[akka] def clear = lock.withWriteLock {
transactionalItems = new HashSet
}
diff --git a/kernel/src/main/scala/collection/HashTrie.scala b/akka-actors/src/main/scala/stm/HashTrie.scala
similarity index 100%
rename from kernel/src/main/scala/collection/HashTrie.scala
rename to akka-actors/src/main/scala/stm/HashTrie.scala
diff --git a/kernel/src/main/scala/util/ResultOrFailure.scala b/akka-actors/src/main/scala/stm/ResultOrFailure.scala
similarity index 91%
rename from kernel/src/main/scala/util/ResultOrFailure.scala
rename to akka-actors/src/main/scala/stm/ResultOrFailure.scala
index 69d3f90b38..f768add9c0 100644
--- a/kernel/src/main/scala/util/ResultOrFailure.scala
+++ b/akka-actors/src/main/scala/stm/ResultOrFailure.scala
@@ -1,60 +1,60 @@
-/**
- * Copyright (C) 2009 Scalable Solutions.
- */
-
-package se.scalablesolutions.akka.kernel.util
-
-import kernel.stm.Transaction
-
-/**
- * Reference that can hold either a typed value or an exception.
- *
- * Usage:
- *
- * scala> ResultOrFailure(1)
- * res0: ResultOrFailure[Int] = ResultOrFailure@a96606
- *
- * scala> res0()
- * res1: Int = 1
- *
- * scala> res0() = 3
- *
- * scala> res0()
- * res3: Int = 3
- *
- * scala> res0() = { println("Hello world"); 3}
- * Hello world
- *
- * scala> res0()
- * res5: Int = 3
- *
- * scala> res0() = error("Lets see what happens here...")
- *
- * scala> res0()
- * java.lang.RuntimeException: Lets see what happens here...
- * at ResultOrFailure.apply(RefExcept.scala:11)
- * at .(:6)
- * at .()
- * at Re...
- *
- *
- * @author Jonas Bonér
- */
-class ResultOrFailure[Payload](payload: Payload, val tx: Option[Transaction]) {
- private[this] var contents: Either[Throwable, Payload] = Right(payload)
-
- def update(value: => Payload) = {
- contents = try { Right(value) } catch { case (e : Throwable) => Left(e) }
- }
-
- def apply() = contents match {
- case Right(payload) => payload
- case Left(e) => throw e
- }
-
- override def toString(): String = "ResultOrFailure[" + contents + "]"
-}
-object ResultOrFailure {
- def apply[Payload](payload: Payload, tx: Option[Transaction]) = new ResultOrFailure(payload, tx)
- def apply[AnyRef](tx: Option[Transaction]) = new ResultOrFailure(new Object, tx)
-}
+/**
+ * Copyright (C) 2009 Scalable Solutions.
+ */
+
+package se.scalablesolutions.akka.util
+
+import stm.Transaction
+
+/**
+ * Reference that can hold either a typed value or an exception.
+ *
+ * Usage:
+ *
+ * scala> ResultOrFailure(1)
+ * res0: ResultOrFailure[Int] = ResultOrFailure@a96606
+ *
+ * scala> res0()
+ * res1: Int = 1
+ *
+ * scala> res0() = 3
+ *
+ * scala> res0()
+ * res3: Int = 3
+ *
+ * scala> res0() = { println("Hello world"); 3}
+ * Hello world
+ *
+ * scala> res0()
+ * res5: Int = 3
+ *
+ * scala> res0() = error("Lets see what happens here...")
+ *
+ * scala> res0()
+ * java.lang.RuntimeException: Lets see what happens here...
+ * at ResultOrFailure.apply(RefExcept.scala:11)
+ * at .(:6)
+ * at .()
+ * at Re...
+ *
+ *
+ * @author Jonas Bonér
+ */
+class ResultOrFailure[Payload](payload: Payload, val tx: Option[Transaction]) {
+ private[this] var contents: Either[Throwable, Payload] = Right(payload)
+
+ def update(value: => Payload) = {
+ contents = try { Right(value) } catch { case (e : Throwable) => Left(e) }
+ }
+
+ def apply() = contents match {
+ case Right(payload) => payload
+ case Left(e) => throw e
+ }
+
+ override def toString(): String = "ResultOrFailure[" + contents + "]"
+}
+object ResultOrFailure {
+ def apply[Payload](payload: Payload, tx: Option[Transaction]) = new ResultOrFailure(payload, tx)
+ def apply[AnyRef](tx: Option[Transaction]) = new ResultOrFailure(new Object, tx)
+}
diff --git a/kernel/src/main/scala/stm/Transaction.scala b/akka-actors/src/main/scala/stm/Transaction.scala
similarity index 98%
rename from kernel/src/main/scala/stm/Transaction.scala
rename to akka-actors/src/main/scala/stm/Transaction.scala
index bb47a397bb..f7aba599d8 100644
--- a/kernel/src/main/scala/stm/Transaction.scala
+++ b/akka-actors/src/main/scala/stm/Transaction.scala
@@ -2,10 +2,11 @@
* Copyright (C) 2009 Scalable Solutions.
*/
-package se.scalablesolutions.akka.kernel.stm
+package se.scalablesolutions.akka.stm
-import kernel.state.Transactional
-import kernel.util.Logging
+import java.util.concurrent.atomic.{AtomicInteger, AtomicLong}
+import state.Transactional
+import util.Logging
import org.multiverse.api.{Transaction => MultiverseTransaction}
import org.multiverse.stms.alpha.AlphaStm
@@ -304,7 +305,7 @@ object TransactionIdFactory {
throw new IllegalStateException("Expected ACTIVE or NEW transaction - current status [" + status + "]: " + toString)
// For reinitialize transaction after sending it over the wire
- private[kernel] def reinit = synchronized {
+ private[akka] def reinit = synchronized {
import net.lag.logging.{Logger, Level}
if (log == null) {
log = Logger.get(this.getClass.getName)
diff --git a/kernel/src/main/scala/stm/TransactionManagement.scala b/akka-actors/src/main/scala/stm/TransactionManagement.scala
similarity index 85%
rename from kernel/src/main/scala/stm/TransactionManagement.scala
rename to akka-actors/src/main/scala/stm/TransactionManagement.scala
index 2d0081c5e5..37ebeb4e29 100644
--- a/kernel/src/main/scala/stm/TransactionManagement.scala
+++ b/akka-actors/src/main/scala/stm/TransactionManagement.scala
@@ -2,12 +2,12 @@
* Copyright (C) 2009 Scalable Solutions.
*/
-package se.scalablesolutions.akka.kernel.stm
+package se.scalablesolutions.akka.stm
import java.util.concurrent.atomic.AtomicBoolean
-import kernel.reactor.MessageInvocation
-import kernel.util.Logging
+import reactor.MessageInvocation
+import util.Logging
import org.codehaus.aspectwerkz.proxy.Uuid // FIXME is java.util.UUID better?
import org.multiverse.utils.TransactionThreadLocal._
@@ -19,16 +19,17 @@ class TransactionAwareWrapperException(val cause: Throwable, val tx: Option[Tran
}
object TransactionManagement {
- val TIME_WAITING_FOR_COMPLETION = kernel.Kernel.config.getInt("akka.stm.wait-for-completion", 100)
- val NR_OF_TIMES_WAITING_FOR_COMPLETION = kernel.Kernel.config.getInt("akka.stm.wait-nr-of-times", 3)
- val TRANSACTION_ENABLED = new AtomicBoolean(kernel.Kernel.config.getBool("akka.stm.service", true))
+ import Config._
+ val TIME_WAITING_FOR_COMPLETION = config.getInt("akka.stm.wait-for-completion", 100)
+ val NR_OF_TIMES_WAITING_FOR_COMPLETION = config.getInt("akka.stm.wait-nr-of-times", 3)
+ val TRANSACTION_ENABLED = new AtomicBoolean(config.getBool("akka.stm.service", true))
// FIXME reenable 'akka.stm.restart-on-collision' when new STM is in place
- val RESTART_TRANSACTION_ON_COLLISION = false //kernel.Kernel.config.getBool("akka.stm.restart-on-collision", true)
+ val RESTART_TRANSACTION_ON_COLLISION = false //akka.Kernel.config.getBool("akka.stm.restart-on-collision", true)
def isTransactionalityEnabled = TRANSACTION_ENABLED.get
def disableTransactions = TRANSACTION_ENABLED.set(false)
- private[kernel] val threadBoundTx: ThreadLocal[Option[Transaction]] = new ThreadLocal[Option[Transaction]]() {
+ private[akka] val threadBoundTx: ThreadLocal[Option[Transaction]] = new ThreadLocal[Option[Transaction]]() {
override protected def initialValue: Option[Transaction] = None
}
}
@@ -40,7 +41,7 @@ trait TransactionManagement extends Logging {
protected[this] var messageToReschedule: Option[MessageInvocation] = None
import TransactionManagement.threadBoundTx
- private[kernel] var activeTx: Option[Transaction] = None
+ private[akka] var activeTx: Option[Transaction] = None
protected def startNewTransaction: Option[Transaction] = {
val newTx = new Transaction
diff --git a/kernel/src/main/scala/stm/TransactionWatcher.scala b/akka-actors/src/main/scala/stm/TransactionWatcher.scala
similarity index 98%
rename from kernel/src/main/scala/stm/TransactionWatcher.scala
rename to akka-actors/src/main/scala/stm/TransactionWatcher.scala
index 755a75f54d..2dd966c7ba 100644
--- a/kernel/src/main/scala/stm/TransactionWatcher.scala
+++ b/akka-actors/src/main/scala/stm/TransactionWatcher.scala
@@ -2,7 +2,7 @@
* Copyright (C) 2009 Scalable Solutions.
*/
-package se.scalablesolutions.akka.kernel.stm
+package se.scalablesolutions.akka.stm
/*
import kernel.util.Logging
diff --git a/akka-actors/src/main/scala/stm/TransactionalState.scala b/akka-actors/src/main/scala/stm/TransactionalState.scala
new file mode 100644
index 0000000000..c671a72ea5
--- /dev/null
+++ b/akka-actors/src/main/scala/stm/TransactionalState.scala
@@ -0,0 +1,245 @@
+/**
+ * Copyright (C) 2009 Scalable Solutions.
+ */
+
+package se.scalablesolutions.akka.state
+
+import stm.TransactionManagement
+import akka.collection._
+
+import org.codehaus.aspectwerkz.proxy.Uuid
+
+import scala.collection.mutable.{ArrayBuffer, HashMap}
+
+/**
+ * Scala API.
+ *
+ * Example Scala usage:
+ *
+ * val myMap = TransactionalState.newMap
+ *
+ */
+object TransactionalState extends TransactionalState
+
+/**
+ * Java API.
+ *
+ * Example Java usage:
+ *
+ * TransactionalState state = new TransactionalState();
+ * TransactionalMap myMap = state.newMap();
+ *
+ */
+class TransactionalState {
+ def newMap[K, V]: TransactionalMap[K, V] = new InMemoryTransactionalMap[K, V]
+ def newVector[T]: TransactionalVector[T] = new InMemoryTransactionalVector[T]
+ def newRef[T]: TransactionalRef[T] = new TransactionalRef[T]
+}
+
+/**
+ * @author Jonas Bonér
+ */
+@serializable
+trait Transactional {
+ // FIXME: won't work across the cluster
+ val uuid = Uuid.newUuid.toString
+
+ private[akka] def begin
+ private[akka] def commit
+ private[akka] def rollback
+
+ protected def verifyTransaction = {
+ val cflowTx = TransactionManagement.threadBoundTx.get
+ if (!cflowTx.isDefined) {
+ throw new IllegalStateException("Can't access transactional reference outside the scope of a transaction [" + this + "]")
+ } else {
+ cflowTx.get.register(this)
+ }
+ }
+}
+
+/**
+ * Base trait for all state implementations (persistent or in-memory).
+ *
+ * FIXME: Create Java versions using pcollections
+ *
+ * @author Jonas Bonér
+ */
+trait TransactionalMap[K, V] extends Transactional with scala.collection.mutable.Map[K, V] {
+ override def hashCode: Int = System.identityHashCode(this);
+ override def equals(other: Any): Boolean = false
+ def remove(key: K)
+}
+
+/**
+ * Not thread-safe, but should only be using from within an Actor, e.g. one single thread at a time.
+ *
+ * @author Jonas Bonér
+ */
+class InMemoryTransactionalMap[K, V] extends TransactionalMap[K, V] {
+ protected[akka] var state = new HashTrie[K, V]
+ protected[akka] var snapshot = state
+
+ // ---- For Transactional ----
+ override def begin = snapshot = state
+ override def commit = snapshot = state
+ override def rollback = state = snapshot
+
+ // ---- Overriding scala.collection.mutable.Map behavior ----
+ override def contains(key: K): Boolean = {
+ verifyTransaction
+ state.contains(key)
+ }
+
+ override def clear = {
+ verifyTransaction
+ state = new HashTrie[K, V]
+ }
+
+ override def size: Int = {
+ verifyTransaction
+ state.size
+ }
+
+ // ---- For scala.collection.mutable.Map ----
+ override def remove(key: K) = {
+ verifyTransaction
+ state = state - key
+ }
+
+ override def elements: Iterator[(K, V)] = {
+// verifyTransaction
+ state.elements
+ }
+
+ override def get(key: K): Option[V] = {
+ verifyTransaction
+ state.get(key)
+ }
+
+ override def put(key: K, value: V): Option[V] = {
+ verifyTransaction
+ val oldValue = state.get(key)
+ state = state.update(key, value)
+ oldValue
+ }
+
+ override def -=(key: K) = {
+ verifyTransaction
+ remove(key)
+ }
+
+ override def update(key: K, value: V) = {
+ verifyTransaction
+ put(key, value)
+ }
+}
+
+/**
+ * Base for all transactional vector implementations.
+ *
+ * @author Jonas Bonér
+ */
+abstract class TransactionalVector[T] extends Transactional with RandomAccessSeq[T] {
+ override def hashCode: Int = System.identityHashCode(this);
+ override def equals(other: Any): Boolean = false
+
+ def add(elem: T)
+
+ def get(index: Int): T
+
+ def getRange(start: Int, count: Int): List[T]
+}
+
+/**
+ * Implements an in-memory transactional vector.
+ *
+ * Not thread-safe, but should only be using from within an Actor, e.g. one single thread at a time.
+ *
+ * @author Jonas Bonér
+ */
+class InMemoryTransactionalVector[T] extends TransactionalVector[T] {
+ private[akka] var state: Vector[T] = EmptyVector
+ private[akka] var snapshot = state
+
+ def add(elem: T) = {
+ verifyTransaction
+ state = state + elem
+ }
+
+ def get(index: Int): T = {
+ verifyTransaction
+ state(index)
+ }
+
+ def getRange(start: Int, count: Int): List[T] = {
+ verifyTransaction
+ state.slice(start, count).toList.asInstanceOf[List[T]]
+ }
+
+ // ---- For Transactional ----
+ override def begin = snapshot = state
+
+ override def commit = snapshot = state
+
+ override def rollback = state = snapshot
+
+ // ---- For Seq ----
+ def length: Int = {
+ verifyTransaction
+ state.length
+ }
+
+ def apply(index: Int): T = {
+ verifyTransaction
+ state(index)
+ }
+
+ override def elements: Iterator[T] = {
+ //verifyTransaction
+ state.elements
+ }
+
+ override def toList: List[T] = {
+ verifyTransaction
+ state.toList
+ }
+}
+
+/**
+ * Implements a transactional reference.
+ *
+ * Not thread-safe, but should only be using from within an Actor, e.g. one single thread at a time.
+ *
+ * @author Jonas Bonér
+ */
+class TransactionalRef[T] extends Transactional {
+ private[akka] var ref: Option[T] = None
+ private[akka] var snapshot: Option[T] = None
+
+ override def begin = if (ref.isDefined) snapshot = Some(ref.get)
+
+ override def commit = if (ref.isDefined) snapshot = Some(ref.get)
+
+ override def rollback = if (snapshot.isDefined) ref = Some(snapshot.get)
+
+ def swap(elem: T) = {
+ verifyTransaction
+ ref = Some(elem)
+ }
+
+ def get: Option[T] = {
+ verifyTransaction
+ ref
+ }
+
+ def getOrElse(default: => T): T = {
+ verifyTransaction
+ ref.getOrElse(default)
+ }
+
+ def isDefined: Boolean = {
+ verifyTransaction
+ ref.isDefined
+ }
+}
diff --git a/kernel/src/main/scala/collection/Vector.scala b/akka-actors/src/main/scala/stm/Vector.scala
similarity index 100%
rename from kernel/src/main/scala/collection/Vector.scala
rename to akka-actors/src/main/scala/stm/Vector.scala
diff --git a/kernel/src/test/scala/ActorSpec.scala b/akka-actors/src/test/scala/ActorSpec.scala
similarity index 96%
rename from kernel/src/test/scala/ActorSpec.scala
rename to akka-actors/src/test/scala/ActorSpec.scala
index 5977301e7a..74ebd13f25 100644
--- a/kernel/src/test/scala/ActorSpec.scala
+++ b/akka-actors/src/test/scala/ActorSpec.scala
@@ -1,4 +1,4 @@
-package se.scalablesolutions.akka.kernel.actor
+package se.scalablesolutions.akka.actor
import java.util.concurrent.TimeUnit
diff --git a/kernel/src/test/scala/AllSuite.scala b/akka-actors/src/test/scala/AllSuite.scala
old mode 100755
new mode 100644
similarity index 79%
rename from kernel/src/test/scala/AllSuite.scala
rename to akka-actors/src/test/scala/AllSuite.scala
index 9609ec8b06..a5535c30b7
--- a/kernel/src/test/scala/AllSuite.scala
+++ b/akka-actors/src/test/scala/AllSuite.scala
@@ -2,7 +2,7 @@
* Copyright (C) 2009 Scalable Solutions.
*/
-package se.scalablesolutions.akka.kernel
+package se.scalablesolutions.akka
import org.scalatest._
@@ -12,7 +12,6 @@ import org.scalatest._
class AllSuite extends SuperSuite(
List(
- new SupervisorSpec
// new ActiveObjectSpec,
// new RestManagerSpec
)
diff --git a/kernel/src/test/scala/AllTest.scala b/akka-actors/src/test/scala/AllTest.scala
similarity index 71%
rename from kernel/src/test/scala/AllTest.scala
rename to akka-actors/src/test/scala/AllTest.scala
index a225bfb080..0132daea26 100644
--- a/kernel/src/test/scala/AllTest.scala
+++ b/akka-actors/src/test/scala/AllTest.scala
@@ -1,11 +1,12 @@
-package se.scalablesolutions.akka.kernel
+package se.scalablesolutions.akka
import junit.framework.Test
import junit.framework.TestCase
import junit.framework.TestSuite
-import kernel.actor.{ActorSpec, RemoteActorSpec, PersistentActorSpec, InMemoryActorSpec}
-import kernel.reactor.{EventBasedSingleThreadDispatcherTest, EventBasedThreadPoolDispatcherTest}
+import actor.{ActorSpec, RemoteActorSpec, InMemoryActorSpec, SupervisorSpec, RemoteSupervisorSpec}
+import reactor.{EventBasedSingleThreadDispatcherTest, EventBasedThreadPoolDispatcherTest}
+import util.SchedulerSpec
object AllTest extends TestCase {
def suite(): Test = {
@@ -16,8 +17,8 @@ object AllTest extends TestCase {
suite.addTestSuite(classOf[EventBasedThreadPoolDispatcherTest])
suite.addTestSuite(classOf[ActorSpec])
suite.addTestSuite(classOf[RemoteActorSpec])
- //suite.addTestSuite(classOf[PersistentActorSpec])
suite.addTestSuite(classOf[InMemoryActorSpec])
+ suite.addTestSuite(classOf[SchedulerSpec])
//suite.addTestSuite(classOf[TransactionClasherSpec])
suite
}
diff --git a/kernel/src/test/scala/CamelSpec.scala b/akka-actors/src/test/scala/CamelSpec.scala
similarity index 95%
rename from kernel/src/test/scala/CamelSpec.scala
rename to akka-actors/src/test/scala/CamelSpec.scala
index f988d9daa5..7f9a546613 100644
--- a/kernel/src/test/scala/CamelSpec.scala
+++ b/akka-actors/src/test/scala/CamelSpec.scala
@@ -2,11 +2,12 @@
* Copyright (C) 2009 Scalable Solutions.
*/
-package se.scalablesolutions.akka.kernel
+package se.scalablesolutions.akka.camel
-import akka.kernel.config.ActiveObjectGuiceConfigurator
+/*
+import config.ActiveObjectGuiceConfigurator
import annotation.oneway
-import kernel.config.ScalaConfig._
+import config.ScalaConfig._
import com.google.inject.{AbstractModule, Scopes}
//import com.jteigen.scalatest.JUnit4Runner
@@ -28,16 +29,16 @@ import org.apache.camel.builder.RouteBuilder
import org.apache.camel.impl.DefaultCamelContext
// REQUIRES: -Djava.naming.factory.initial=org.apache.camel.util.jndi.CamelInitialContextFactory
-
+*/
/**
* @author Jonas Bonér
- */
+ *
//@RunWith(classOf[JUnit4Runner])
class CamelSpec extends Spec with ShouldMatchers {
describe("A Camel routing scheme") {
it("should route message from direct:test to actor A using @Bean endpoint") {
-/*
+
val latch = new CountDownLatch(1);
val conf = new ActiveObjectGuiceConfigurator
@@ -80,7 +81,6 @@ class CamelSpec extends Spec with ShouldMatchers {
val received = latch.await(5, TimeUnit.SECONDS)
received should equal (true)
conf.stop
-*/
}
}
}
@@ -98,3 +98,4 @@ class CamelFooImpl extends CamelFoo {
class CamelBarImpl extends CamelBar {
def bar(msg: String) = msg + "return_bar "
}
+ */
diff --git a/kernel/src/test/scala/EventBasedSingleThreadDispatcherTest.scala b/akka-actors/src/test/scala/EventBasedSingleThreadDispatcherTest.scala
similarity index 98%
rename from kernel/src/test/scala/EventBasedSingleThreadDispatcherTest.scala
rename to akka-actors/src/test/scala/EventBasedSingleThreadDispatcherTest.scala
index 3ac4eee51a..758f9d6cd0 100644
--- a/kernel/src/test/scala/EventBasedSingleThreadDispatcherTest.scala
+++ b/akka-actors/src/test/scala/EventBasedSingleThreadDispatcherTest.scala
@@ -1,4 +1,4 @@
-package se.scalablesolutions.akka.kernel.reactor
+package se.scalablesolutions.akka.reactor
import java.util.concurrent.CountDownLatch
import java.util.concurrent.TimeUnit
diff --git a/kernel/src/test/scala/EventBasedThreadPoolDispatcherTest.scala b/akka-actors/src/test/scala/EventBasedThreadPoolDispatcherTest.scala
similarity index 99%
rename from kernel/src/test/scala/EventBasedThreadPoolDispatcherTest.scala
rename to akka-actors/src/test/scala/EventBasedThreadPoolDispatcherTest.scala
index c0b205d6f6..a57ad0b825 100644
--- a/kernel/src/test/scala/EventBasedThreadPoolDispatcherTest.scala
+++ b/akka-actors/src/test/scala/EventBasedThreadPoolDispatcherTest.scala
@@ -1,4 +1,4 @@
-package se.scalablesolutions.akka.kernel.reactor
+package se.scalablesolutions.akka.reactor
import java.util.concurrent.ThreadPoolExecutor.CallerRunsPolicy
import java.util.concurrent.atomic.AtomicBoolean
diff --git a/kernel/src/test/scala/InMemoryActorSpec.scala b/akka-actors/src/test/scala/InMemoryActorSpec.scala
similarity index 98%
rename from kernel/src/test/scala/InMemoryActorSpec.scala
rename to akka-actors/src/test/scala/InMemoryActorSpec.scala
index ca7be8eca5..85e3e32599 100644
--- a/kernel/src/test/scala/InMemoryActorSpec.scala
+++ b/akka-actors/src/test/scala/InMemoryActorSpec.scala
@@ -1,7 +1,7 @@
-package se.scalablesolutions.akka.kernel.actor
+package se.scalablesolutions.akka.actor
import junit.framework.TestCase
-import kernel.state.TransactionalState
+import state.TransactionalState
import org.junit.{Test, Before}
import org.junit.Assert._
@@ -28,7 +28,7 @@ class InMemStatefulActor extends Actor {
makeTransactionRequired
private val mapState = TransactionalState.newMap[String, String]
private val vectorState = TransactionalState.newVector[String]
- private val refState = TransactionalState.newRef[String]("")
+ private val refState = TransactionalState.newRef[String]
def receive: PartialFunction[Any, Unit] = {
case GetMapState(key) =>
diff --git a/kernel/src/test/scala/Messages.scala b/akka-actors/src/test/scala/Messages.scala
similarity index 96%
rename from kernel/src/test/scala/Messages.scala
rename to akka-actors/src/test/scala/Messages.scala
index 51e735db8c..7e4d5ca66f 100644
--- a/kernel/src/test/scala/Messages.scala
+++ b/akka-actors/src/test/scala/Messages.scala
@@ -2,7 +2,7 @@
* Copyright (C) 2009 Scalable Solutions.
*/
-package se.scalablesolutions.akka.kernel
+package se.scalablesolutions.akka
import akka.serialization.Serializable
diff --git a/kernel/src/test/scala/RemoteActorSpec.scala b/akka-actors/src/test/scala/RemoteActorSpec.scala
similarity index 94%
rename from kernel/src/test/scala/RemoteActorSpec.scala
rename to akka-actors/src/test/scala/RemoteActorSpec.scala
index 3387aa8eb0..f492189fe2 100644
--- a/kernel/src/test/scala/RemoteActorSpec.scala
+++ b/akka-actors/src/test/scala/RemoteActorSpec.scala
@@ -1,8 +1,9 @@
-package se.scalablesolutions.akka.kernel.actor
+package se.scalablesolutions.akka.actor
import java.util.concurrent.TimeUnit
import junit.framework.TestCase
-import kernel.nio.{RemoteServer, RemoteClient}
+
+import nio.{RemoteServer, RemoteClient}
import org.junit.{Test, Before}
import org.junit.Assert._
@@ -26,7 +27,7 @@ class RemoteActorSpecActorBidirectional extends Actor {
}
class RemoteActorSpec extends TestCase {
- kernel.Kernel.config
+ akka.Config.config
new Thread(new Runnable() {
def run = {
val server = new RemoteServer
diff --git a/kernel/src/test/scala/RemoteSupervisorSpec.scala b/akka-actors/src/test/scala/RemoteSupervisorSpec.scala
similarity index 95%
rename from kernel/src/test/scala/RemoteSupervisorSpec.scala
rename to akka-actors/src/test/scala/RemoteSupervisorSpec.scala
index c8e3770cec..e7425aedc2 100644
--- a/kernel/src/test/scala/RemoteSupervisorSpec.scala
+++ b/akka-actors/src/test/scala/RemoteSupervisorSpec.scala
@@ -2,12 +2,11 @@
* Copyright (C) 2009 Scalable Solutions.
*/
-package se.scalablesolutions.akka.kernel
+package se.scalablesolutions.akka.actor
import akka.serialization.BinaryString
-import kernel.nio.{RemoteClient, RemoteServer}
-import kernel.actor.{Supervisor, SupervisorFactory, Actor, StartSupervisor}
-import kernel.config.ScalaConfig._
+import nio.{RemoteClient, RemoteServer}
+import config.ScalaConfig._
//import com.jteigen.scalatest.JUnit4Runner
import org.junit.runner.RunWith
@@ -23,7 +22,7 @@ object Log {
//@RunWith(classOf[JUnit4Runner])
class RemoteSupervisorSpec extends junit.framework.TestCase with Suite {
- Kernel.config
+ akka.Config.config
new Thread(new Runnable() {
def run = {
val server = new RemoteServer
@@ -248,43 +247,43 @@ class RemoteSupervisorSpec extends junit.framework.TestCase with Suite {
/*
def testOneWayKillSingleActorOneForOne = {
- Log.messageLog = ""
+ Logg.messageLog = ""
val sup = getSingleActorOneForOneSupervisor
sup ! StartSupervisor
Thread.sleep(500)
pingpong1 ! BinaryString("Die")
Thread.sleep(500)
expect("DIE") {
- Log.messageLog
+ Logg.messageLog
}
}
def testOneWayCallKillCallSingleActorOneForOne = {
- Log.messageLog = ""
+ Logg.messageLog = ""
val sup = getSingleActorOneForOneSupervisor
sup ! StartSupervisor
Thread.sleep(500)
pingpong1 ! OneWay
Thread.sleep(500)
expect("oneway") {
- Log.oneWayLog
+ Logg.oneWayLog
}
pingpong1 ! BinaryString("Die")
Thread.sleep(500)
expect("DIE") {
- Log.messageLog
+ Logg.messageLog
}
pingpong1 ! OneWay
Thread.sleep(500)
expect("onewayoneway") {
- Log.oneWayLog
+ Logg.oneWayLog
}
}
*/
/*
def testOneWayKillSingleActorAllForOne = {
- Log.messageLog = ""
+ Logg.messageLog = ""
val sup = getSingleActorAllForOneSupervisor
sup ! StartSupervisor
Thread.sleep(500)
@@ -293,12 +292,12 @@ class RemoteSupervisorSpec extends junit.framework.TestCase with Suite {
}
Thread.sleep(500)
expect("DIE") {
- Log.messageLog
+ Logg.messageLog
}
}
def testOneWayCallKillCallSingleActorAllForOne = {
- Log.messageLog = ""
+ Logg.messageLog = ""
val sup = getSingleActorAllForOneSupervisor
sup ! StartSupervisor
Thread.sleep(500)
@@ -307,26 +306,26 @@ class RemoteSupervisorSpec extends junit.framework.TestCase with Suite {
}
Thread.sleep(500)
expect("ping") {
- Log.messageLog
+ Logg.messageLog
}
intercept(classOf[RuntimeException]) {
pingpong1 ! BinaryString("Die")
}
Thread.sleep(500)
expect("pingDIE") {
- Log.messageLog
+ Logg.messageLog
}
expect("pong") {
(pingpong1 ! BinaryString("Ping")).getOrElse("nil")
}
Thread.sleep(500)
expect("pingDIEping") {
- Log.messageLog
+ Logg.messageLog
}
}
def testOneWayKillMultipleActorsOneForOne = {
- Log.messageLog = ""
+ Logg.messageLog = ""
val sup = getMultipleActorsOneForOneConf
sup ! StartSupervisor
Thread.sleep(500)
@@ -335,12 +334,12 @@ class RemoteSupervisorSpec extends junit.framework.TestCase with Suite {
}
Thread.sleep(500)
expect("DIE") {
- Log.messageLog
+ Logg.messageLog
}
}
def tesOneWayCallKillCallMultipleActorsOneForOne = {
- Log.messageLog = ""
+ Logg.messageLog = ""
val sup = getMultipleActorsOneForOneConf
sup ! StartSupervisor
Thread.sleep(500)
@@ -357,14 +356,14 @@ class RemoteSupervisorSpec extends junit.framework.TestCase with Suite {
}
Thread.sleep(500)
expect("pingpingping") {
- Log.messageLog
+ Logg.messageLog
}
intercept(classOf[RuntimeException]) {
pingpong2 ! BinaryString("Die")
}
Thread.sleep(500)
expect("pingpingpingDIE") {
- Log.messageLog
+ Logg.messageLog
}
expect("pong") {
(pingpong1 ! BinaryString("Ping")).getOrElse("nil")
@@ -379,12 +378,12 @@ class RemoteSupervisorSpec extends junit.framework.TestCase with Suite {
}
Thread.sleep(500)
expect("pingpingpingDIEpingpingping") {
- Log.messageLog
+ Logg.messageLog
}
}
def testOneWayKillMultipleActorsAllForOne = {
- Log.messageLog = ""
+ Logg.messageLog = ""
val sup = getMultipleActorsAllForOneConf
sup ! StartSupervisor
Thread.sleep(500)
@@ -393,12 +392,12 @@ class RemoteSupervisorSpec extends junit.framework.TestCase with Suite {
}
Thread.sleep(500)
expect("DIEDIEDIE") {
- Log.messageLog
+ Logg.messageLog
}
}
def tesOneWayCallKillCallMultipleActorsAllForOne = {
- Log.messageLog = ""
+ Logg.messageLog = ""
val sup = getMultipleActorsAllForOneConf
sup ! StartSupervisor
Thread.sleep(500)
@@ -415,14 +414,14 @@ class RemoteSupervisorSpec extends junit.framework.TestCase with Suite {
}
Thread.sleep(500)
expect("pingpingping") {
- Log.messageLog
+ Logg.messageLog
}
intercept(classOf[RuntimeException]) {
pingpong2 ! BinaryString("Die")
}
Thread.sleep(500)
expect("pingpingpingDIEDIEDIE") {
- Log.messageLog
+ Logg.messageLog
}
expect("pong") {
(pingpong1 ! BinaryString("Ping")).getOrElse("nil")
@@ -437,14 +436,14 @@ class RemoteSupervisorSpec extends junit.framework.TestCase with Suite {
}
Thread.sleep(500)
expect("pingpingpingDIEDIEDIEpingpingping") {
- Log.messageLog
+ Logg.messageLog
}
}
*/
/*
def testNestedSupervisorsTerminateFirstLevelActorAllForOne = {
- Log.messageLog = ""
+ Logg.messageLog = ""
val sup = getNestedSupervisorsAllForOneConf
sup ! StartSupervisor
intercept(classOf[RuntimeException]) {
@@ -452,7 +451,7 @@ class RemoteSupervisorSpec extends junit.framework.TestCase with Suite {
}
Thread.sleep(500)
expect("DIEDIEDIE") {
- Log.messageLog
+ Logg.messageLog
}
}
*/
diff --git a/akka-actors/src/test/scala/SchedulerSpec.scala b/akka-actors/src/test/scala/SchedulerSpec.scala
new file mode 100644
index 0000000000..c1ee474ecf
--- /dev/null
+++ b/akka-actors/src/test/scala/SchedulerSpec.scala
@@ -0,0 +1,25 @@
+package se.scalablesolutions.akka.util
+
+import se.scalablesolutions.akka.actor.Actor
+
+import java.util.concurrent.TimeUnit
+
+import org.junit.Assert._
+
+class SchedulerSpec extends junit.framework.TestCase {
+
+ def testScheduler = {
+ var count = 0
+ case object Tick
+ val actor = new Actor() {
+ def receive: PartialFunction[Any, Unit] = {
+ case Tick => count += 1
+ }}
+ actor.start
+ Thread.sleep(1000)
+ Scheduler.schedule(actor, Tick, 0L, 1L, TimeUnit.SECONDS)
+ Thread.sleep(5000)
+ Scheduler.shutdown
+ assertTrue(count > 0)
+ }
+}
\ No newline at end of file
diff --git a/kernel/src/test/scala/SupervisorSpec.scala b/akka-actors/src/test/scala/SupervisorSpec.scala
similarity index 99%
rename from kernel/src/test/scala/SupervisorSpec.scala
rename to akka-actors/src/test/scala/SupervisorSpec.scala
index d10b987522..f9b77ceac6 100644
--- a/kernel/src/test/scala/SupervisorSpec.scala
+++ b/akka-actors/src/test/scala/SupervisorSpec.scala
@@ -2,10 +2,9 @@
* Copyright (C) 2009 Scalable Solutions.
*/
-package se.scalablesolutions.akka.kernel
+package se.scalablesolutions.akka.actor
-import kernel.actor.{Supervisor, SupervisorFactory, Actor, StartSupervisor}
-import kernel.config.ScalaConfig._
+import config.ScalaConfig._
//import com.jteigen.scalatest.JUnit4Runner
import org.junit.runner.RunWith
diff --git a/kernel/src/test/scala/ThreadBasedDispatcherTest.scala b/akka-actors/src/test/scala/ThreadBasedDispatcherTest.scala
similarity index 98%
rename from kernel/src/test/scala/ThreadBasedDispatcherTest.scala
rename to akka-actors/src/test/scala/ThreadBasedDispatcherTest.scala
index 1ad5c0b733..a76d02815d 100644
--- a/kernel/src/test/scala/ThreadBasedDispatcherTest.scala
+++ b/akka-actors/src/test/scala/ThreadBasedDispatcherTest.scala
@@ -1,4 +1,4 @@
-package se.scalablesolutions.akka.kernel.reactor
+package se.scalablesolutions.akka.reactor
import java.util.concurrent.CountDownLatch
import java.util.concurrent.TimeUnit
diff --git a/kernel/src/test/scala/TransactionClasherSpec.scala b/akka-actors/src/test/scala/TransactionClasherSpec.scala
similarity index 95%
rename from kernel/src/test/scala/TransactionClasherSpec.scala
rename to akka-actors/src/test/scala/TransactionClasherSpec.scala
index d955331225..5ef047198c 100644
--- a/kernel/src/test/scala/TransactionClasherSpec.scala
+++ b/akka-actors/src/test/scala/TransactionClasherSpec.scala
@@ -1,16 +1,12 @@
-package se.scalablesolutions.akka.kernel.actor
+package se.scalablesolutions.akka.actor
import junit.framework.TestCase
-import kernel.stm.TransactionRollbackException
+import stm.TransactionRollbackException
import org.junit.{Test, Before}
import org.junit.Assert._
-import kernel.state.TransactionalState
-
-object Log {
- var log = ""
-}
+import state.TransactionalState
class TxActor(clasher: Actor) extends Actor {
timeout = 1000000
diff --git a/akka-amqp/pom.xml b/akka-amqp/pom.xml
new file mode 100644
index 0000000000..5199ed0dc3
--- /dev/null
+++ b/akka-amqp/pom.xml
@@ -0,0 +1,40 @@
+
+ 4.0.0
+
+ akka-amqp
+ Akka AMQP Module
+
+ jar
+
+
+ akka
+ se.scalablesolutions.akka
+ 0.6
+ ../pom.xml
+
+
+
+
+ akka-util
+ se.scalablesolutions.akka
+ 0.6
+
+
+ akka-actors
+ se.scalablesolutions.akka
+ 0.6
+
+
+ com.rabbitmq
+ rabbitmq-client
+ 0.9.1
+
+
+ commons-io
+ commons-io
+ 1.4
+
+
+
+
diff --git a/akka-amqp/src/main/scala/AMQP.scala b/akka-amqp/src/main/scala/AMQP.scala
new file mode 100644
index 0000000000..361d26a175
--- /dev/null
+++ b/akka-amqp/src/main/scala/AMQP.scala
@@ -0,0 +1,374 @@
+/**
+ * Copyright (C) 2009 Scalable Solutions.
+ */
+
+package se.scalablesolutions.akka.amqp
+
+import com.rabbitmq.client.{AMQP => RabbitMQ, _}
+import com.rabbitmq.client.ConnectionFactory
+
+import actor.{OneForOneStrategy, Actor}
+import config.ScalaConfig._
+import util.{HashCode, Logging}
+import serialization.Serializer
+
+import scala.collection.mutable.HashMap
+
+import org.scala_tools.javautils.Imports._
+
+import java.util.concurrent.ConcurrentHashMap
+import java.util.{Timer, TimerTask}
+import java.io.IOException
+
+/**
+ * AMQP Actor API. Implements Client and Endpoint materialized as Actors.
+ *
+ *
+ * val endpoint = AMQP.newEndpoint(CONFIG, HOSTNAME, PORT, EXCHANGE, ExchangeType.Direct, Serializer.Java, None, 100)
+ *
+ * endpoint ! MessageConsumer(QUEUE, ROUTING_KEY, new Actor() {
+ * def receive: PartialFunction[Any, Unit] = {
+ * case Message(payload, _, _, _, _) => log.debug("Received message: %s", payload)
+ * }
+ * })
+ *
+ * val client = AMQP.newClient(CONFIG, HOSTNAME, PORT, EXCHANGE, Serializer.Java, None, None, 100)
+ * client ! Message("Hi", ROUTING_KEY)
+ *
+ *
+ * @author Jonas Bonér
+ */
+object AMQP extends Actor {
+ private val connections = new ConcurrentHashMap[FaultTolerantConnectionActor, FaultTolerantConnectionActor]
+ faultHandler = Some(OneForOneStrategy(5, 5000))
+ trapExit = true
+ start
+
+ // ====== MESSAGES =====
+ class Message(val payload: AnyRef, val routingKey: String, val mandatory: Boolean, val immediate: Boolean, val properties: RabbitMQ.BasicProperties) {
+ override def toString(): String = "Message[payload=" + payload + ", routingKey=" + routingKey + ", properties=" + properties + "]"
+ }
+ object Message {
+ def unapply(message: Message): Option[Tuple5[AnyRef, String, Boolean, Boolean, RabbitMQ.BasicProperties]] =
+ Some((message.payload, message.routingKey, message.mandatory, message.immediate, message.properties))
+ def apply(payload: AnyRef, routingKey: String, mandatory: Boolean, immediate: Boolean, properties: RabbitMQ.BasicProperties): Message =
+ new Message(payload, routingKey, mandatory, immediate, properties)
+ def apply(payload: AnyRef, routingKey: String): Message =
+ new Message(payload, routingKey, false, false, null)
+ }
+
+ case class MessageConsumer(queueName: String, routingKey: String, actor: Actor) {
+ var tag: Option[String] = None
+
+ override def toString(): String = "MessageConsumer[actor=" + actor + ", queue=" + queueName + ", routingKey=" + routingKey + "]"
+
+ override def hashCode(): Int = synchronized {
+ var result = HashCode.SEED
+ result = HashCode.hash(result, queueName)
+ result = HashCode.hash(result, routingKey)
+ result
+ }
+
+ override def equals(that: Any): Boolean = synchronized {
+ that != null &&
+ that.isInstanceOf[MessageConsumer] &&
+ that.asInstanceOf[MessageConsumer].queueName== queueName &&
+ that.asInstanceOf[MessageConsumer].routingKey == routingKey
+ }
+ }
+
+ case class CancelMessageConsumer(consumer: MessageConsumer)
+ case class Reconnect(delay: Long)
+ case class Failure(cause: Throwable)
+ case object Stop
+ // ===================
+
+ class MessageNotDeliveredException(
+ val message: String,
+ val replyCode: Int,
+ val replyText: String,
+ val exchange: String,
+ val routingKey: String,
+ val properties: RabbitMQ.BasicProperties,
+ val body: Array[Byte]) extends RuntimeException(message)
+
+ sealed trait ExchangeType
+ object ExchangeType {
+ case object Direct extends ExchangeType {
+ override def toString = "direct"
+ }
+ case object Topic extends ExchangeType {
+ override def toString = "topic"
+ }
+ case object Fanout extends ExchangeType {
+ override def toString = "fanout"
+ }
+ case object Match extends ExchangeType {
+ override def toString = "match"
+ }
+ }
+
+ def newClient(
+ config: ConnectionParameters,
+ hostname: String,
+ port: Int,
+ exchangeName: String,
+ serializer: Serializer,
+ returnListener: Option[ReturnListener],
+ shutdownListener: Option[ShutdownListener],
+ initReconnectDelay: Long): Client = {
+ val client = new Client(
+ new ConnectionFactory(config),
+ hostname, port,
+ exchangeName,
+ serializer,
+ returnListener,
+ shutdownListener,
+ initReconnectDelay)
+ startLink(client)
+ client
+ }
+
+ def newEndpoint(
+ config: ConnectionParameters,
+ hostname: String,
+ port: Int,
+ exchangeName: String,
+ exchangeType: ExchangeType,
+ serializer: Serializer,
+ shutdownListener: Option[ShutdownListener],
+ initReconnectDelay: Long): Endpoint = {
+ val endpoint = new Endpoint(
+ new ConnectionFactory(config),
+ hostname, port,
+ exchangeName,
+ exchangeType,
+ serializer,
+ shutdownListener,
+ initReconnectDelay)
+ startLink(endpoint)
+ endpoint
+ }
+
+ def stopConnection(connection: FaultTolerantConnectionActor) = {
+ connection ! Stop
+ unlink(connection)
+ connections.remove(connection)
+ }
+
+ override def shutdown = {
+ connections.values.asScala.foreach(_ ! Stop)
+ stop
+ }
+
+ /**
+ * AMQP client actor.
+ * Usage:
+ *
+ * val params = new ConnectionParameters
+ * params.setUsername("barack")
+ * params.setPassword("obama")
+ * params.setVirtualHost("/")
+ * params.setRequestedHeartbeat(0)
+ * val client = AMQP.newClient(params, "localhost", 5672, "exchangeName", Serializer.Java, None, None, 100)
+ * client ! Message("hi")
+ *
+ *
+ * @author Jonas Bonér
+ */
+ class Client private[amqp] (
+ val connectionFactory: ConnectionFactory,
+ val hostname: String,
+ val port: Int,
+ val exchangeName: String,
+ val serializer: Serializer,
+ val returnListener: Option[ReturnListener],
+ val shutdownListener: Option[ShutdownListener],
+ val initReconnectDelay: Long)
+ extends FaultTolerantConnectionActor {
+
+ setupChannel
+
+ log.info("AMQP.Client [%s] is started", toString)
+
+ def receive: PartialFunction[Any, Unit] = {
+ case message @ Message(payload, routingKey, mandatory, immediate, properties) =>
+ log.debug("Sending message [%s]", message)
+ channel.basicPublish(exchangeName, routingKey, mandatory, immediate, properties, serializer.out(payload))
+ case Stop =>
+ disconnect; stop
+ }
+
+ def setupChannel = {
+ connection = connectionFactory.newConnection(hostname, port)
+ channel = connection.createChannel
+ returnListener match {
+ case Some(listener) => channel.setReturnListener(listener)
+ case None => channel.setReturnListener(new ReturnListener() {
+ def handleBasicReturn(
+ replyCode: Int,
+ replyText: String,
+ exchange: String,
+ routingKey: String,
+ properties: RabbitMQ.BasicProperties,
+ body: Array[Byte]) = {
+ throw new MessageNotDeliveredException(
+ "Could not deliver message [" + body +
+ "] with reply code [" + replyCode +
+ "] with reply text [" + replyText +
+ "] and routing key [" + routingKey +
+ "] to exchange [" + exchange + "]",
+ replyCode, replyText, exchange, routingKey, properties, body)
+ }
+ })
+ }
+ if (shutdownListener.isDefined) connection.addShutdownListener(shutdownListener.get)
+ }
+
+ override def toString(): String = "AMQP.Client[hostname=" + hostname + ", port=" + port + ", exchange=" + exchangeName + "]"
+ }
+
+ /**
+ * @author Jonas Bonér
+ */
+ class Endpoint private[amqp] (
+ val connectionFactory: ConnectionFactory,
+ val hostname: String,
+ val port: Int,
+ exchangeName: String,
+ exchangeType: ExchangeType,
+ serializer: Serializer,
+ shutdownListener: Option[ShutdownListener],
+ val initReconnectDelay: Long)
+ extends FaultTolerantConnectionActor {
+
+ faultHandler = Some(OneForOneStrategy(5, 5000))
+ trapExit = true
+
+ val consumers = new HashMap[MessageConsumer, MessageConsumer]
+ val endpoint = this
+
+ setupChannel
+
+ log.info("AMQP.Endpoint [%s] is started", toString)
+
+ def setupChannel = {
+ connection = connectionFactory.newConnection(hostname, port)
+ channel = connection.createChannel
+ channel.exchangeDeclare(exchangeName, exchangeType.toString)
+ consumers.elements.toList.map(_._2).foreach(setupConsumer)
+ if (shutdownListener.isDefined) connection.addShutdownListener(shutdownListener.get)
+ }
+
+ def setupConsumer(consumer: MessageConsumer) = {
+ channel.queueDeclare(consumer.queueName)
+ channel.queueBind(consumer.queueName, exchangeName, consumer.routingKey)
+
+ val consumerTag = channel.basicConsume(consumer.queueName, false, new DefaultConsumer(channel) with Logging {
+ override def handleDelivery(tag: String, envelope: Envelope, properties: RabbitMQ.BasicProperties, payload: Array[Byte]) {
+ try {
+ consumer.actor ! Message(serializer.in(payload, None), envelope.getRoutingKey)
+ channel.basicAck(envelope.getDeliveryTag, false)
+ } catch {
+ case cause => endpoint ! Failure(cause) // pass on and rethrow exception in endpoint actor to trigger restart and reconnect
+ }
+ }
+
+ override def handleShutdownSignal(consumerTag: String, signal: ShutdownSignalException) = {
+ consumers.elements.toList.map(_._2).find(_.tag == consumerTag) match {
+ case None => log.warning("Could not find message consumer for tag [%s]; can't shut consumer down", consumerTag)
+ case Some(consumer) =>
+ log.warning("Message consumer [%s] is being shutdown by [%s] due to [%s]", consumer, signal.getReference, signal.getReason)
+ endpoint ! CancelMessageConsumer(consumer)
+ }
+ }
+ })
+ consumer.tag = Some(consumerTag)
+ }
+
+ def receive: PartialFunction[Any, Unit] = {
+ case consumer: MessageConsumer =>
+ startLink(consumer.actor)
+ consumers.put(consumer, consumer)
+ setupConsumer(consumer)
+ log.info("Message consumer is registered [%s]", consumer)
+
+ case CancelMessageConsumer(hash) =>
+ consumers.get(hash) match {
+ case None => log.warning("Can't unregister message consumer [%s]; no such consumer", hash)
+ case Some(consumer) =>
+ consumers - consumer
+ consumer.tag match {
+ case None => log.warning("Can't unregister message consumer [%s]; no consumer tag", consumer)
+ case Some(tag) =>
+ channel.basicCancel(tag)
+ unlink(consumer.actor)
+ consumer.actor.stop
+ log.info("Message consumer is cancelled and shut down [%s]", consumer)
+ }
+ }
+
+ case Reconnect(delay) => reconnect(delay)
+ case Failure(cause) => log.error(cause, ""); throw cause
+ case Stop => disconnect; stop
+ case unknown => throw new IllegalArgumentException("Unknown message [" + unknown + "] to AMQP Endpoint [" + this + "]")
+ }
+
+ override def toString(): String = "AMQP.Endpoint[hostname=" + hostname + ", port=" + port + ", exchange=" + exchangeName + ", type=" + exchangeType + "]"
+ }
+
+ trait FaultTolerantConnectionActor extends Actor {
+ lifeCycleConfig = Some(LifeCycle(Permanent, 100))
+
+ val reconnectionTimer = new Timer
+
+ var connection: Connection = _
+ var channel: Channel = _
+
+ val connectionFactory: ConnectionFactory
+ val hostname: String
+ val port: Int
+ val initReconnectDelay: Long
+
+ def setupChannel
+
+ protected def disconnect = {
+ try {
+ channel.close
+ } catch {
+ case e: IOException => log.error("Could not close AMQP channel %s:%s [%s]", hostname, port, this)
+ case _ => ()
+ }
+ try {
+ connection.close
+ log.debug("Disconnected AMQP connection at %s:%s [%s]", hostname, port, this)
+ } catch {
+ case e: IOException => log.error("Could not close AMQP connection %s:%s [%s]", hostname, port, this)
+ case _ => ()
+ }
+ }
+
+ protected def reconnect(delay: Long) = {
+ disconnect
+ try {
+ setupChannel
+ log.debug("Successfully reconnected to AMQP Server %s:%s [%s]", hostname, port, this)
+ } catch {
+ case e: Exception =>
+ val waitInMillis = delay * 2
+ val self = this
+ log.debug("Trying to reconnect to AMQP server in %n milliseconds [%s]", waitInMillis, this)
+ reconnectionTimer.schedule(new TimerTask() {
+ override def run = self ! Reconnect(waitInMillis)
+ }, delay)
+ }
+ }
+
+ override def preRestart(reason: AnyRef, config: Option[AnyRef]) = disconnect
+ override def postRestart(reason: AnyRef, config: Option[AnyRef]) = reconnect(initReconnectDelay)
+ }
+
+ def receive: PartialFunction[Any, Unit] = {
+ case _ => {} // ignore all messages
+ }
+}
diff --git a/akka-amqp/src/main/scala/ExampleSession.scala b/akka-amqp/src/main/scala/ExampleSession.scala
new file mode 100644
index 0000000000..b3ec9a22c1
--- /dev/null
+++ b/akka-amqp/src/main/scala/ExampleSession.scala
@@ -0,0 +1,57 @@
+/**
+ * Copyright (C) 2009 Scalable Solutions.
+ */
+
+package se.scalablesolutions.akka.amqp
+
+import akka.serialization.Serializer
+import com.rabbitmq.client.ConnectionParameters
+import actor.Actor
+
+object ExampleSession {
+ import AMQP._
+ val SERIALIZER = Serializer.Java
+ val CONFIG = new ConnectionParameters
+ val HOSTNAME = "localhost"
+ val PORT = 5672
+
+ val IM = "im.whitehouse.gov"
+ val CHAT = "chat.whitehouse.gov"
+
+ def main(args: Array[String]) = {
+ println("==== DIRECT ===")
+ direct
+
+ Thread.sleep(1000)
+
+ println("==== FANOUT ===")
+ fanout
+ }
+
+ def direct = {
+ val endpoint = AMQP.newEndpoint(CONFIG, HOSTNAME, PORT, IM, ExchangeType.Direct, SERIALIZER, None, 100)
+ endpoint ! MessageConsumer("@george_bush", "direct", new Actor() {
+ def receive: PartialFunction[Any, Unit] = {
+ case Message(payload, _, _, _, _) => log.info("@george_bush received message from: %s", payload)
+ }
+ })
+ val client = AMQP.newClient(CONFIG, HOSTNAME, PORT, IM, SERIALIZER, None, None, 100)
+ client ! Message("@jonas_boner: You sucked!!", "direct")
+ }
+
+ def fanout = {
+ val endpoint = AMQP.newEndpoint(CONFIG, HOSTNAME, PORT, CHAT, ExchangeType.Fanout, SERIALIZER, None, 100)
+ endpoint ! MessageConsumer("@george_bush", "", new Actor() {
+ def receive: PartialFunction[Any, Unit] = {
+ case Message(payload, _, _, _, _) => log.info("@george_bush received message from: %s", payload)
+ }
+ })
+ endpoint ! MessageConsumer("@barack_obama", "", new Actor() {
+ def receive: PartialFunction[Any, Unit] = {
+ case Message(payload, _, _, _, _) => log.info("@barack_obama received message from: %s", payload)
+ }
+ })
+ val client = AMQP.newClient(CONFIG, HOSTNAME, PORT, CHAT, SERIALIZER, None, None, 100)
+ client ! Message("@jonas_boner: I'm going surfing", "")
+ }
+}
\ No newline at end of file
diff --git a/akka-camel/pom.xml b/akka-camel/pom.xml
new file mode 100644
index 0000000000..797b8fef53
--- /dev/null
+++ b/akka-camel/pom.xml
@@ -0,0 +1,48 @@
+
+ 4.0.0
+
+ akka-camel
+ Akka Camel Module
+
+ jar
+
+
+ akka
+ se.scalablesolutions.akka
+ 0.6
+ ../pom.xml
+
+
+
+
+
+ akka-util
+ se.scalablesolutions.akka
+ 0.6
+
+
+ akka-actors
+ se.scalablesolutions.akka
+ 0.6
+
+
+ org.apache.camel
+ camel-core
+ 2.0-SNAPSHOT
+
+
+
+
+
+
+ false
+ src/main/resources
+
+ META-INF/*
+
+
+
+
+
diff --git a/kernel/src/main/resources/META-INF/services/org/apache/camel/component/akka b/akka-camel/src/main/resources/META-INF/services/org/apache/camel/component/akka
similarity index 100%
rename from kernel/src/main/resources/META-INF/services/org/apache/camel/component/akka
rename to akka-camel/src/main/resources/META-INF/services/org/apache/camel/component/akka
diff --git a/kernel/src/main/scala/camel/ActiveObjectComponent.scala b/akka-camel/src/main/scala/ActiveObjectComponent.scala
similarity index 93%
rename from kernel/src/main/scala/camel/ActiveObjectComponent.scala
rename to akka-camel/src/main/scala/ActiveObjectComponent.scala
index 39489156f9..f95fd3c2ed 100644
--- a/kernel/src/main/scala/camel/ActiveObjectComponent.scala
+++ b/akka-camel/src/main/scala/ActiveObjectComponent.scala
@@ -2,7 +2,7 @@
* Copyright (C) 2009 Scalable Solutions.
*/
-package se.scalablesolutions.akka.kernel.camel
+package se.scalablesolutions.akka.camel
import config.ActiveObjectConfigurator
diff --git a/kernel/src/main/scala/camel/ActiveObjectConsumer.scala b/akka-camel/src/main/scala/ActiveObjectConsumer.scala
similarity index 92%
rename from kernel/src/main/scala/camel/ActiveObjectConsumer.scala
rename to akka-camel/src/main/scala/ActiveObjectConsumer.scala
index e7be8b0dd9..f9f187de45 100644
--- a/kernel/src/main/scala/camel/ActiveObjectConsumer.scala
+++ b/akka-camel/src/main/scala/ActiveObjectConsumer.scala
@@ -2,11 +2,11 @@
* Copyright (C) 2009 Scalable Solutions.
*/
-package se.scalablesolutions.akka.kernel.camel
+package se.scalablesolutions.akka.camel
import java.util.concurrent.{BlockingQueue, ExecutorService, Executors, ThreadFactory, TimeUnit}
-import kernel.util.Logging
+import util.Logging
import org.apache.camel.{AsyncCallback, AsyncProcessor, Consumer, Exchange, Processor}
import org.apache.camel.impl.ServiceSupport
diff --git a/kernel/src/main/scala/camel/ActiveObjectEndpoint.scala b/akka-camel/src/main/scala/ActiveObjectEndpoint.scala
similarity index 94%
rename from kernel/src/main/scala/camel/ActiveObjectEndpoint.scala
rename to akka-camel/src/main/scala/ActiveObjectEndpoint.scala
index 4f9792a2d0..3999c0b897 100644
--- a/kernel/src/main/scala/camel/ActiveObjectEndpoint.scala
+++ b/akka-camel/src/main/scala/ActiveObjectEndpoint.scala
@@ -2,10 +2,10 @@
* Copyright (C) 2009 Scalable Solutions.
*/
-package se.scalablesolutions.akka.kernel.camel
+package se.scalablesolutions.akka.camel
-import kernel.config.ActiveObjectConfigurator
-import kernel.util.Logging
+import config.ActiveObjectConfigurator
+import util.Logging
import java.util.{ArrayList, HashSet, List, Set}
import java.util.concurrent.{BlockingQueue, CopyOnWriteArraySet, LinkedBlockingQueue}
diff --git a/kernel/src/main/scala/camel/ActiveObjectProducer.scala b/akka-camel/src/main/scala/ActiveObjectProducer.scala
similarity index 93%
rename from kernel/src/main/scala/camel/ActiveObjectProducer.scala
rename to akka-camel/src/main/scala/ActiveObjectProducer.scala
index f2af00517c..9494510097 100644
--- a/kernel/src/main/scala/camel/ActiveObjectProducer.scala
+++ b/akka-camel/src/main/scala/ActiveObjectProducer.scala
@@ -2,10 +2,10 @@
* Copyright (C) 2009 Scalable Solutions.
*/
-package se.scalablesolutions.akka.kernel.camel
+package se.scalablesolutions.akka.camel
import java.util.Collection
-import kernel.util.Logging;
+import util.Logging;
import java.util.concurrent.BlockingQueue;
import org.apache.camel.{Exchange, AsyncProcessor, AsyncCallback}
diff --git a/kernel/src/main/scala/config/CamelConfigurator.scala b/akka-camel/src/main/scala/CamelConfigurator.scala
similarity index 94%
rename from kernel/src/main/scala/config/CamelConfigurator.scala
rename to akka-camel/src/main/scala/CamelConfigurator.scala
index 9cebafed9b..680680f188 100644
--- a/kernel/src/main/scala/config/CamelConfigurator.scala
+++ b/akka-camel/src/main/scala/CamelConfigurator.scala
@@ -2,7 +2,7 @@
* Copyright (C) 2009 Scalable Solutions.
*/
-package se.scalablesolutions.akka.kernel.config
+package se.scalablesolutions.akka.config
import org.apache.camel.{Routes, CamelContext, Endpoint}
diff --git a/kernel/src/main/scala/camel/MessageDriven.scala b/akka-camel/src/main/scala/MessageDriven.scala
similarity index 82%
rename from kernel/src/main/scala/camel/MessageDriven.scala
rename to akka-camel/src/main/scala/MessageDriven.scala
index 889ddafeff..3e73a4101b 100644
--- a/kernel/src/main/scala/camel/MessageDriven.scala
+++ b/akka-camel/src/main/scala/MessageDriven.scala
@@ -2,7 +2,7 @@
* Copyright (C) 2009 Scalable Solutions.
*/
-package se.scalablesolutions.akka.kernel.camel
+package se.scalablesolutions.akka.camel
import org.apache.camel.Exchange
diff --git a/kernel/src/main/scala/camel/SupervisorAwareCamelContext.scala b/akka-camel/src/main/scala/SupervisorAwareCamelContext.scala
similarity index 75%
rename from kernel/src/main/scala/camel/SupervisorAwareCamelContext.scala
rename to akka-camel/src/main/scala/SupervisorAwareCamelContext.scala
index b56d3f8d82..4b9ee8b41d 100644
--- a/kernel/src/main/scala/camel/SupervisorAwareCamelContext.scala
+++ b/akka-camel/src/main/scala/SupervisorAwareCamelContext.scala
@@ -2,10 +2,10 @@
* Copyright (C) 2009 Scalable Solutions.
*/
-package se.scalablesolutions.akka.kernel.camel
+package se.scalablesolutions.akka.camel
-import kernel.actor.Supervisor
-import kernel.util.Logging
+import actor.Supervisor
+import util.Logging
import org.apache.camel.impl.{DefaultCamelContext, DefaultEndpoint, DefaultComponent}
/**
diff --git a/fun-test-java/pom.xml b/akka-fun-test-java/pom.xml
similarity index 100%
rename from fun-test-java/pom.xml
rename to akka-fun-test-java/pom.xml
diff --git a/fun-test-java/src/test/java/se/scalablesolutions/akka/api/ActiveObjectGuiceConfiguratorTest.java b/akka-fun-test-java/src/test/java/se/scalablesolutions/akka/api/ActiveObjectGuiceConfiguratorTest.java
similarity index 88%
rename from fun-test-java/src/test/java/se/scalablesolutions/akka/api/ActiveObjectGuiceConfiguratorTest.java
rename to akka-fun-test-java/src/test/java/se/scalablesolutions/akka/api/ActiveObjectGuiceConfiguratorTest.java
index fdc2a48c98..46538d0c48 100644
--- a/fun-test-java/src/test/java/se/scalablesolutions/akka/api/ActiveObjectGuiceConfiguratorTest.java
+++ b/akka-fun-test-java/src/test/java/se/scalablesolutions/akka/api/ActiveObjectGuiceConfiguratorTest.java
@@ -9,18 +9,19 @@ import com.google.inject.Scopes;
import junit.framework.TestCase;
-import se.scalablesolutions.akka.kernel.reactor.EventBasedThreadPoolDispatcher;
-import static se.scalablesolutions.akka.kernel.config.JavaConfig.*;
+import se.scalablesolutions.akka.Config;
+import se.scalablesolutions.akka.reactor.EventBasedThreadPoolDispatcher;
+import static se.scalablesolutions.akka.config.JavaConfig.*;
import java.util.concurrent.ThreadPoolExecutor;
public class ActiveObjectGuiceConfiguratorTest extends TestCase {
static String messageLog = "";
- final private se.scalablesolutions.akka.kernel.config.ActiveObjectManager conf = new se.scalablesolutions.akka.kernel.config.ActiveObjectManager();
+ final private se.scalablesolutions.akka.config.ActiveObjectManager conf = new se.scalablesolutions.akka.config.ActiveObjectManager();
protected void setUp() {
- se.scalablesolutions.akka.kernel.Kernel$.MODULE$.config();
+ Config.config();
EventBasedThreadPoolDispatcher dispatcher = new EventBasedThreadPoolDispatcher("name");
dispatcher
.withNewThreadPoolWithBoundedBlockingQueue(100)
@@ -103,7 +104,7 @@ public class ActiveObjectGuiceConfiguratorTest extends TestCase {
try {
foo.longRunning();
fail("exception should have been thrown");
- } catch (se.scalablesolutions.akka.kernel.reactor.FutureTimeoutException e) {
+ } catch (se.scalablesolutions.akka.reactor.FutureTimeoutException e) {
}
}
diff --git a/fun-test-java/src/test/java/se/scalablesolutions/akka/api/AllTest.java b/akka-fun-test-java/src/test/java/se/scalablesolutions/akka/api/AllTest.java
similarity index 100%
rename from fun-test-java/src/test/java/se/scalablesolutions/akka/api/AllTest.java
rename to akka-fun-test-java/src/test/java/se/scalablesolutions/akka/api/AllTest.java
diff --git a/fun-test-java/src/test/java/se/scalablesolutions/akka/api/Bar.java b/akka-fun-test-java/src/test/java/se/scalablesolutions/akka/api/Bar.java
similarity index 100%
rename from fun-test-java/src/test/java/se/scalablesolutions/akka/api/Bar.java
rename to akka-fun-test-java/src/test/java/se/scalablesolutions/akka/api/Bar.java
diff --git a/fun-test-java/src/test/java/se/scalablesolutions/akka/api/BarImpl.java b/akka-fun-test-java/src/test/java/se/scalablesolutions/akka/api/BarImpl.java
similarity index 100%
rename from fun-test-java/src/test/java/se/scalablesolutions/akka/api/BarImpl.java
rename to akka-fun-test-java/src/test/java/se/scalablesolutions/akka/api/BarImpl.java
diff --git a/fun-test-java/src/test/java/se/scalablesolutions/akka/api/Ext.java b/akka-fun-test-java/src/test/java/se/scalablesolutions/akka/api/Ext.java
similarity index 100%
rename from fun-test-java/src/test/java/se/scalablesolutions/akka/api/Ext.java
rename to akka-fun-test-java/src/test/java/se/scalablesolutions/akka/api/Ext.java
diff --git a/fun-test-java/src/test/java/se/scalablesolutions/akka/api/ExtImpl.java b/akka-fun-test-java/src/test/java/se/scalablesolutions/akka/api/ExtImpl.java
similarity index 100%
rename from fun-test-java/src/test/java/se/scalablesolutions/akka/api/ExtImpl.java
rename to akka-fun-test-java/src/test/java/se/scalablesolutions/akka/api/ExtImpl.java
diff --git a/fun-test-java/src/test/java/se/scalablesolutions/akka/api/Foo.java b/akka-fun-test-java/src/test/java/se/scalablesolutions/akka/api/Foo.java
similarity index 100%
rename from fun-test-java/src/test/java/se/scalablesolutions/akka/api/Foo.java
rename to akka-fun-test-java/src/test/java/se/scalablesolutions/akka/api/Foo.java
diff --git a/fun-test-java/src/test/java/se/scalablesolutions/akka/api/InMemFailer.java b/akka-fun-test-java/src/test/java/se/scalablesolutions/akka/api/InMemFailer.java
similarity index 100%
rename from fun-test-java/src/test/java/se/scalablesolutions/akka/api/InMemFailer.java
rename to akka-fun-test-java/src/test/java/se/scalablesolutions/akka/api/InMemFailer.java
diff --git a/fun-test-java/src/test/java/se/scalablesolutions/akka/api/InMemNestedStateTest.java b/akka-fun-test-java/src/test/java/se/scalablesolutions/akka/api/InMemNestedStateTest.java
similarity index 95%
rename from fun-test-java/src/test/java/se/scalablesolutions/akka/api/InMemNestedStateTest.java
rename to akka-fun-test-java/src/test/java/se/scalablesolutions/akka/api/InMemNestedStateTest.java
index 965637ca73..91f097036a 100644
--- a/fun-test-java/src/test/java/se/scalablesolutions/akka/api/InMemNestedStateTest.java
+++ b/akka-fun-test-java/src/test/java/se/scalablesolutions/akka/api/InMemNestedStateTest.java
@@ -4,10 +4,11 @@
package se.scalablesolutions.akka.api;
-import se.scalablesolutions.akka.kernel.config.*;
-import static se.scalablesolutions.akka.kernel.config.JavaConfig.*;
-import se.scalablesolutions.akka.kernel.actor.*;
-import se.scalablesolutions.akka.kernel.Kernel;
+import se.scalablesolutions.akka.Config;
+import se.scalablesolutions.akka.config.*;
+import static se.scalablesolutions.akka.config.JavaConfig.*;
+import se.scalablesolutions.akka.actor.*;
+import se.scalablesolutions.akka.Kernel;
import junit.framework.TestCase;
public class InMemNestedStateTest extends TestCase {
@@ -26,7 +27,7 @@ public class InMemNestedStateTest extends TestCase {
new Component(InMemFailer.class, new LifeCycle(new Permanent(), 1000), 1000)
//new Component("inmem-clasher", InMemClasher.class, InMemClasherImpl.class, new LifeCycle(new Permanent(), 1000), 100000)
}).inject().supervise();
- se.scalablesolutions.akka.kernel.Kernel$.MODULE$.config();
+ Config.config();
}
protected void tearDown() {
diff --git a/fun-test-java/src/test/java/se/scalablesolutions/akka/api/InMemStateful.java b/akka-fun-test-java/src/test/java/se/scalablesolutions/akka/api/InMemStateful.java
similarity index 91%
rename from fun-test-java/src/test/java/se/scalablesolutions/akka/api/InMemStateful.java
rename to akka-fun-test-java/src/test/java/se/scalablesolutions/akka/api/InMemStateful.java
index 2ee03a893e..5f836942ba 100644
--- a/fun-test-java/src/test/java/se/scalablesolutions/akka/api/InMemStateful.java
+++ b/akka-fun-test-java/src/test/java/se/scalablesolutions/akka/api/InMemStateful.java
@@ -3,14 +3,14 @@ package se.scalablesolutions.akka.api;
import se.scalablesolutions.akka.annotation.transactionrequired;
import se.scalablesolutions.akka.annotation.prerestart;
import se.scalablesolutions.akka.annotation.postrestart;
-import se.scalablesolutions.akka.kernel.state.*;
+import se.scalablesolutions.akka.state.*;
@transactionrequired
public class InMemStateful {
private TransactionalState factory = new TransactionalState();
- private TransactionalMap mapState = factory.newInMemoryMap();
- private TransactionalVector vectorState = factory.newInMemoryVector();
- private TransactionalRef refState = factory.newInMemoryRef();
+ private TransactionalMap mapState = factory.newMap();
+ private TransactionalVector vectorState = factory.newVector();
+ private TransactionalRef refState = factory.newRef();
public String getMapState(String key) {
return (String)mapState.get(key).get();
diff --git a/fun-test-java/src/test/java/se/scalablesolutions/akka/api/InMemStatefulNested.java b/akka-fun-test-java/src/test/java/se/scalablesolutions/akka/api/InMemStatefulNested.java
similarity index 87%
rename from fun-test-java/src/test/java/se/scalablesolutions/akka/api/InMemStatefulNested.java
rename to akka-fun-test-java/src/test/java/se/scalablesolutions/akka/api/InMemStatefulNested.java
index 0f6921f28b..ba13be243c 100644
--- a/fun-test-java/src/test/java/se/scalablesolutions/akka/api/InMemStatefulNested.java
+++ b/akka-fun-test-java/src/test/java/se/scalablesolutions/akka/api/InMemStatefulNested.java
@@ -1,14 +1,14 @@
package se.scalablesolutions.akka.api;
import se.scalablesolutions.akka.annotation.transactionrequired;
-import se.scalablesolutions.akka.kernel.state.*;
+import se.scalablesolutions.akka.state.*;
@transactionrequired
public class InMemStatefulNested {
private TransactionalState factory = new TransactionalState();
- private TransactionalMap mapState = factory.newInMemoryMap();
- private TransactionalVector vectorState = factory.newInMemoryVector();
- private TransactionalRef refState = factory.newInMemoryRef();
+ private TransactionalMap mapState = factory.newMap();
+ private TransactionalVector vectorState = factory.newVector();
+ private TransactionalRef refState = factory.newRef();
public String getMapState(String key) {
diff --git a/fun-test-java/src/test/java/se/scalablesolutions/akka/api/InMemoryStateTest.java b/akka-fun-test-java/src/test/java/se/scalablesolutions/akka/api/InMemoryStateTest.java
similarity index 96%
rename from fun-test-java/src/test/java/se/scalablesolutions/akka/api/InMemoryStateTest.java
rename to akka-fun-test-java/src/test/java/se/scalablesolutions/akka/api/InMemoryStateTest.java
index 866070a47e..ff1c789e1d 100644
--- a/fun-test-java/src/test/java/se/scalablesolutions/akka/api/InMemoryStateTest.java
+++ b/akka-fun-test-java/src/test/java/se/scalablesolutions/akka/api/InMemoryStateTest.java
@@ -6,10 +6,11 @@ package se.scalablesolutions.akka.api;
import junit.framework.TestCase;
-import se.scalablesolutions.akka.kernel.config.*;
-import static se.scalablesolutions.akka.kernel.config.JavaConfig.*;
-import se.scalablesolutions.akka.kernel.actor.*;
-import se.scalablesolutions.akka.kernel.Kernel;
+import se.scalablesolutions.akka.Config;
+import se.scalablesolutions.akka.config.*;
+import static se.scalablesolutions.akka.config.JavaConfig.*;
+import se.scalablesolutions.akka.actor.*;
+import se.scalablesolutions.akka.Kernel;
public class InMemoryStateTest extends TestCase {
static String messageLog = "";
@@ -18,7 +19,7 @@ public class InMemoryStateTest extends TestCase {
protected void setUp() {
- se.scalablesolutions.akka.kernel.Kernel$.MODULE$.config();
+ Config.config();
conf.configure(
new RestartStrategy(new AllForOne(), 3, 5000),
new Component[]{
diff --git a/fun-test-java/src/test/java/se/scalablesolutions/akka/api/JerseyFoo.java b/akka-fun-test-java/src/test/java/se/scalablesolutions/akka/api/JerseyFoo.java
similarity index 100%
rename from fun-test-java/src/test/java/se/scalablesolutions/akka/api/JerseyFoo.java
rename to akka-fun-test-java/src/test/java/se/scalablesolutions/akka/api/JerseyFoo.java
diff --git a/fun-test-java/src/test/java/se/scalablesolutions/akka/api/PersistenceManager.java b/akka-fun-test-java/src/test/java/se/scalablesolutions/akka/api/PersistenceManager.java
similarity index 52%
rename from fun-test-java/src/test/java/se/scalablesolutions/akka/api/PersistenceManager.java
rename to akka-fun-test-java/src/test/java/se/scalablesolutions/akka/api/PersistenceManager.java
index 8e498368d5..49cc8c0d0a 100644
--- a/fun-test-java/src/test/java/se/scalablesolutions/akka/api/PersistenceManager.java
+++ b/akka-fun-test-java/src/test/java/se/scalablesolutions/akka/api/PersistenceManager.java
@@ -4,9 +4,7 @@ public class PersistenceManager {
private static volatile boolean isRunning = false;
public static void init() {
if (!isRunning) {
- se.scalablesolutions.akka.kernel.Kernel.config();
- se.scalablesolutions.akka.kernel.Kernel.startCassandra();
- se.scalablesolutions.akka.kernel.Kernel.startRemoteService();
+ se.scalablesolutions.akka.Kernel.startRemoteService();
isRunning = true;
}
}
diff --git a/fun-test-java/src/test/java/se/scalablesolutions/akka/api/PersistentClasher.java b/akka-fun-test-java/src/test/java/se/scalablesolutions/akka/api/PersistentClasher.java
similarity index 84%
rename from fun-test-java/src/test/java/se/scalablesolutions/akka/api/PersistentClasher.java
rename to akka-fun-test-java/src/test/java/se/scalablesolutions/akka/api/PersistentClasher.java
index cf6cce7e68..1787548806 100644
--- a/fun-test-java/src/test/java/se/scalablesolutions/akka/api/PersistentClasher.java
+++ b/akka-fun-test-java/src/test/java/se/scalablesolutions/akka/api/PersistentClasher.java
@@ -1,7 +1,7 @@
package se.scalablesolutions.akka.api;
-import se.scalablesolutions.akka.kernel.state.TransactionalMap;
-import se.scalablesolutions.akka.kernel.state.CassandraPersistentTransactionalMap;
+import se.scalablesolutions.akka.state.TransactionalMap;
+import se.scalablesolutions.akka.state.CassandraPersistentTransactionalMap;
public class PersistentClasher {
private TransactionalMap state = new CassandraPersistentTransactionalMap();
diff --git a/fun-test-java/src/test/java/se/scalablesolutions/akka/api/PersistentFailer.java b/akka-fun-test-java/src/test/java/se/scalablesolutions/akka/api/PersistentFailer.java
similarity index 100%
rename from fun-test-java/src/test/java/se/scalablesolutions/akka/api/PersistentFailer.java
rename to akka-fun-test-java/src/test/java/se/scalablesolutions/akka/api/PersistentFailer.java
diff --git a/fun-test-java/src/test/java/se/scalablesolutions/akka/api/PersistentNestedStateTest.java b/akka-fun-test-java/src/test/java/se/scalablesolutions/akka/api/PersistentNestedStateTest.java
similarity index 96%
rename from fun-test-java/src/test/java/se/scalablesolutions/akka/api/PersistentNestedStateTest.java
rename to akka-fun-test-java/src/test/java/se/scalablesolutions/akka/api/PersistentNestedStateTest.java
index 04b6d47673..03e06fb7b0 100644
--- a/fun-test-java/src/test/java/se/scalablesolutions/akka/api/PersistentNestedStateTest.java
+++ b/akka-fun-test-java/src/test/java/se/scalablesolutions/akka/api/PersistentNestedStateTest.java
@@ -4,10 +4,10 @@
package se.scalablesolutions.akka.api;
-import se.scalablesolutions.akka.kernel.config.*;
-import static se.scalablesolutions.akka.kernel.config.JavaConfig.*;
-import se.scalablesolutions.akka.kernel.actor.*;
-import se.scalablesolutions.akka.kernel.Kernel;
+import se.scalablesolutions.akka.config.*;
+import static se.scalablesolutions.akka.config.JavaConfig.*;
+import se.scalablesolutions.akka.actor.*;
+ import se.scalablesolutions.akka.Kernel;
import junit.framework.TestCase;
diff --git a/fun-test-java/src/test/java/se/scalablesolutions/akka/api/PersistentStateTest.java b/akka-fun-test-java/src/test/java/se/scalablesolutions/akka/api/PersistentStateTest.java
similarity index 95%
rename from fun-test-java/src/test/java/se/scalablesolutions/akka/api/PersistentStateTest.java
rename to akka-fun-test-java/src/test/java/se/scalablesolutions/akka/api/PersistentStateTest.java
index 1145a68389..98b6b0d892 100644
--- a/fun-test-java/src/test/java/se/scalablesolutions/akka/api/PersistentStateTest.java
+++ b/akka-fun-test-java/src/test/java/se/scalablesolutions/akka/api/PersistentStateTest.java
@@ -4,10 +4,10 @@
package se.scalablesolutions.akka.api;
-import se.scalablesolutions.akka.kernel.config.*;
-import static se.scalablesolutions.akka.kernel.config.JavaConfig.*;
-import se.scalablesolutions.akka.kernel.actor.*;
-import se.scalablesolutions.akka.kernel.Kernel;
+import se.scalablesolutions.akka.config.*;
+import static se.scalablesolutions.akka.config.JavaConfig.*;
+import se.scalablesolutions.akka.actor.*;
+import se.scalablesolutions.akka.Kernel;
import junit.framework.TestCase;
diff --git a/fun-test-java/src/test/java/se/scalablesolutions/akka/api/PersistentStateful.java b/akka-fun-test-java/src/test/java/se/scalablesolutions/akka/api/PersistentStateful.java
similarity index 80%
rename from fun-test-java/src/test/java/se/scalablesolutions/akka/api/PersistentStateful.java
rename to akka-fun-test-java/src/test/java/se/scalablesolutions/akka/api/PersistentStateful.java
index 7561099270..3acf773644 100644
--- a/fun-test-java/src/test/java/se/scalablesolutions/akka/api/PersistentStateful.java
+++ b/akka-fun-test-java/src/test/java/se/scalablesolutions/akka/api/PersistentStateful.java
@@ -1,14 +1,14 @@
package se.scalablesolutions.akka.api;
import se.scalablesolutions.akka.annotation.transactionrequired;
-import se.scalablesolutions.akka.kernel.state.*;
+import se.scalablesolutions.akka.state.*;
@transactionrequired
public class PersistentStateful {
- private TransactionalState factory = new TransactionalState();
- private TransactionalMap mapState = factory.newPersistentMap(new CassandraStorageConfig());
- private TransactionalVector vectorState = factory.newPersistentVector(new CassandraStorageConfig());;
- private TransactionalRef refState = factory.newPersistentRef(new CassandraStorageConfig());
+ private PersistentState factory = new PersistentState();
+ private TransactionalMap mapState = factory.newMap(new CassandraStorageConfig());
+ private TransactionalVector vectorState = factory.newVector(new CassandraStorageConfig());;
+ private TransactionalRef refState = factory.newRef(new CassandraStorageConfig());
public String getMapState(String key) {
diff --git a/fun-test-java/src/test/java/se/scalablesolutions/akka/api/PersistentStatefulNested.java b/akka-fun-test-java/src/test/java/se/scalablesolutions/akka/api/PersistentStatefulNested.java
similarity index 75%
rename from fun-test-java/src/test/java/se/scalablesolutions/akka/api/PersistentStatefulNested.java
rename to akka-fun-test-java/src/test/java/se/scalablesolutions/akka/api/PersistentStatefulNested.java
index ac46efb051..6f26427118 100644
--- a/fun-test-java/src/test/java/se/scalablesolutions/akka/api/PersistentStatefulNested.java
+++ b/akka-fun-test-java/src/test/java/se/scalablesolutions/akka/api/PersistentStatefulNested.java
@@ -1,14 +1,14 @@
package se.scalablesolutions.akka.api;
import se.scalablesolutions.akka.annotation.transactionrequired;
-import se.scalablesolutions.akka.kernel.state.*;
+import se.scalablesolutions.akka.state.*;
@transactionrequired
public class PersistentStatefulNested {
- private TransactionalState factory = new TransactionalState();
- private TransactionalMap mapState = factory.newPersistentMap(new CassandraStorageConfig());
- private TransactionalVector vectorState = factory.newPersistentVector(new CassandraStorageConfig());;
- private TransactionalRef refState = factory.newPersistentRef(new CassandraStorageConfig());
+ private PersistentState factory = new PersistentState();
+ private TransactionalMap mapState = factory.newMap(new CassandraStorageConfig());
+ private TransactionalVector vectorState = factory.newVector(new CassandraStorageConfig());;
+ private TransactionalRef refState = factory.newRef(new CassandraStorageConfig());
public String getMapState(String key) {
diff --git a/fun-test-java/src/test/java/se/scalablesolutions/akka/api/ProtobufProtocol.java b/akka-fun-test-java/src/test/java/se/scalablesolutions/akka/api/ProtobufProtocol.java
similarity index 100%
rename from fun-test-java/src/test/java/se/scalablesolutions/akka/api/ProtobufProtocol.java
rename to akka-fun-test-java/src/test/java/se/scalablesolutions/akka/api/ProtobufProtocol.java
diff --git a/fun-test-java/src/test/java/se/scalablesolutions/akka/api/ProtobufProtocol.proto b/akka-fun-test-java/src/test/java/se/scalablesolutions/akka/api/ProtobufProtocol.proto
similarity index 100%
rename from fun-test-java/src/test/java/se/scalablesolutions/akka/api/ProtobufProtocol.proto
rename to akka-fun-test-java/src/test/java/se/scalablesolutions/akka/api/ProtobufProtocol.proto
diff --git a/fun-test-java/src/test/java/se/scalablesolutions/akka/api/ProtobufSerializationTest.java b/akka-fun-test-java/src/test/java/se/scalablesolutions/akka/api/ProtobufSerializationTest.java
similarity index 100%
rename from fun-test-java/src/test/java/se/scalablesolutions/akka/api/ProtobufSerializationTest.java
rename to akka-fun-test-java/src/test/java/se/scalablesolutions/akka/api/ProtobufSerializationTest.java
diff --git a/fun-test-java/src/test/java/se/scalablesolutions/akka/api/RemoteInMemoryStateTest.java b/akka-fun-test-java/src/test/java/se/scalablesolutions/akka/api/RemoteInMemoryStateTest.java
similarity index 94%
rename from fun-test-java/src/test/java/se/scalablesolutions/akka/api/RemoteInMemoryStateTest.java
rename to akka-fun-test-java/src/test/java/se/scalablesolutions/akka/api/RemoteInMemoryStateTest.java
index 5dd75cf826..b663cf7903 100644
--- a/fun-test-java/src/test/java/se/scalablesolutions/akka/api/RemoteInMemoryStateTest.java
+++ b/akka-fun-test-java/src/test/java/se/scalablesolutions/akka/api/RemoteInMemoryStateTest.java
@@ -4,8 +4,10 @@
package se.scalablesolutions.akka.api;
-import se.scalablesolutions.akka.kernel.actor.ActiveObjectFactory;
-import se.scalablesolutions.akka.kernel.nio.RemoteServer;
+import se.scalablesolutions.akka.Config;
+import se.scalablesolutions.akka.actor.ActiveObjectFactory;
+import se.scalablesolutions.akka.config.ActiveObjectManager;
+import se.scalablesolutions.akka.nio.RemoteServer;
import junit.framework.TestCase;
public class RemoteInMemoryStateTest extends TestCase {
@@ -19,9 +21,9 @@ public class RemoteInMemoryStateTest extends TestCase {
}
}).start();
try { Thread.currentThread().sleep(1000); } catch (Exception e) {}
- se.scalablesolutions.akka.kernel.Kernel$.MODULE$.config();
+ Config.config();
}
- final private se.scalablesolutions.akka.kernel.config.ActiveObjectManager conf = new se.scalablesolutions.akka.kernel.config.ActiveObjectManager();
+ final ActiveObjectManager conf = new ActiveObjectManager();
final private ActiveObjectFactory factory = new ActiveObjectFactory();
protected void tearDown() {
diff --git a/fun-test-java/src/test/java/se/scalablesolutions/akka/api/RemotePersistentStateTest.java b/akka-fun-test-java/src/test/java/se/scalablesolutions/akka/api/RemotePersistentStateTest.java
similarity index 94%
rename from fun-test-java/src/test/java/se/scalablesolutions/akka/api/RemotePersistentStateTest.java
rename to akka-fun-test-java/src/test/java/se/scalablesolutions/akka/api/RemotePersistentStateTest.java
index 970a072135..3c4c15dc30 100644
--- a/fun-test-java/src/test/java/se/scalablesolutions/akka/api/RemotePersistentStateTest.java
+++ b/akka-fun-test-java/src/test/java/se/scalablesolutions/akka/api/RemotePersistentStateTest.java
@@ -4,10 +4,10 @@
package se.scalablesolutions.akka.api;
-import se.scalablesolutions.akka.kernel.config.*;
-import static se.scalablesolutions.akka.kernel.config.JavaConfig.*;
-import se.scalablesolutions.akka.kernel.actor.*;
-import se.scalablesolutions.akka.kernel.Kernel;
+import se.scalablesolutions.akka.config.*;
+import static se.scalablesolutions.akka.config.JavaConfig.*;
+import se.scalablesolutions.akka.actor.*;
+import se.scalablesolutions.akka.Kernel;
import junit.framework.TestCase;
diff --git a/fun-test-java/src/test/java/se/scalablesolutions/akka/api/RestTest.java b/akka-fun-test-java/src/test/java/se/scalablesolutions/akka/api/RestTest.java
similarity index 93%
rename from fun-test-java/src/test/java/se/scalablesolutions/akka/api/RestTest.java
rename to akka-fun-test-java/src/test/java/se/scalablesolutions/akka/api/RestTest.java
index 7032aa3043..5dbe454b0b 100644
--- a/fun-test-java/src/test/java/se/scalablesolutions/akka/api/RestTest.java
+++ b/akka-fun-test-java/src/test/java/se/scalablesolutions/akka/api/RestTest.java
@@ -25,8 +25,8 @@ import java.net.URI;
import java.util.Map;
import java.util.HashMap;
-import se.scalablesolutions.akka.kernel.config.*;
-import static se.scalablesolutions.akka.kernel.config.JavaConfig.*;
+import se.scalablesolutions.akka.config.*;
+import static se.scalablesolutions.akka.config.JavaConfig.*;
public class RestTest extends TestCase {
@@ -67,7 +67,7 @@ public class RestTest extends TestCase {
*/
private static SelectorThread startJersey() {
try {
- Servlet servlet = new se.scalablesolutions.akka.kernel.rest.AkkaServlet();
+ Servlet servlet = new se.scalablesolutions.akka.rest.AkkaServlet();
ServletAdapter adapter = new ServletAdapter();
adapter.setServletInstance(servlet);
adapter.setContextPath(URI.getPath());
diff --git a/fun-test-java/testng.xml b/akka-fun-test-java/testng.xml
similarity index 100%
rename from fun-test-java/testng.xml
rename to akka-fun-test-java/testng.xml
diff --git a/kernel/pom.xml b/akka-kernel/pom.xml
similarity index 83%
rename from kernel/pom.xml
rename to akka-kernel/pom.xml
index b495dc6b34..f20026d7a7 100755
--- a/kernel/pom.xml
+++ b/akka-kernel/pom.xml
@@ -15,13 +15,35 @@
../pom.xml
-
+
- akka-util-java
+ akka-actors
se.scalablesolutions.akka
0.6
+
+ akka-persistence
+ se.scalablesolutions.akka
+ 0.6
+
+
+ akka-rest
+ se.scalablesolutions.akka
+ 0.6
+
+
+ akka-amqp
+ se.scalablesolutions.akka
+ 0.6
+
+
+ akka-camel
+ se.scalablesolutions.akka
+ 0.6
+
+
+
org.scala-lang
scala-library
@@ -37,11 +59,6 @@
aspectwerkz-jdk5
2.1
-
- com.twitter
- scala-stats
- 1.0
-
net.lag
configgy
@@ -49,14 +66,19 @@
org.guiceyfruit
- guice-core
- 2.0-beta-4
+ guiceyfruit-core
+ 2.0
org.apache.camel
camel-core
2.0-SNAPSHOT
+
+ org.guiceyfruit
+ guice-core
+ 2.0-beta-4
+
org.jboss.netty
netty
@@ -73,9 +95,15 @@
2.7.4-0.1
+<<<<<<< HEAD:kernel/pom.xml
org.multiverse
multiverse
0.3
+=======
+ com.rabbitmq
+ rabbitmq-client
+ 0.9.1
+>>>>>>> master:akka-kernel/pom.xml
@@ -100,9 +128,19 @@
0.3
- com.twitter
- scala-json
- 1.0
+ dispatch.json
+ dispatch-json
+ 0.5.2
+
+
+ dispatch.http
+ dispatch-http
+ 0.5.2
+
+
+ sjson.json
+ sjson
+ 0.1
@@ -134,6 +172,28 @@
1.5.1
+
+
+ org.slf4j
+ slf4j-log4j12
+ 1.4.3
+
+
+ org.slf4j
+ slf4j-api
+ 1.4.3
+
+
+ log4j
+ log4j
+ 1.2.13
+
+
+ commons-logging
+ commons-logging
+ 1.0.4
+
+
com.sun.grizzly
@@ -175,42 +235,6 @@
atmosphere-compat
0.3
-
-
-
- org.slf4j
- slf4j-log4j12
- 1.4.3
-
-
- org.slf4j
- slf4j-api
- 1.4.3
-
-
- log4j
- log4j
- 1.2.13
-
-
- commons-logging
- commons-logging
- 1.0.4
-
-
-
-
- org.scala-tools.testing
- scalatest
- 0.9.5
- test
-
-
- junit
- junit
- 4.5
- test
-
@@ -233,7 +257,7 @@
- se.scalablesolutions.akka.kernel.Kernel
+ se.scalablesolutions.akka.Kernel
diff --git a/kernel/src/main/scala/rest/AkkaServlet.scala b/akka-kernel/src/main/scala/AkkaServlet.scala
similarity index 89%
rename from kernel/src/main/scala/rest/AkkaServlet.scala
rename to akka-kernel/src/main/scala/AkkaServlet.scala
index 885d3e6321..15862ff43e 100755
--- a/kernel/src/main/scala/rest/AkkaServlet.scala
+++ b/akka-kernel/src/main/scala/AkkaServlet.scala
@@ -2,9 +2,8 @@
* Copyright (C) 2009 Scalable Solutions.
*/
-package se.scalablesolutions.akka.kernel.rest
+package se.scalablesolutions.akka.rest
-import kernel.Kernel
import config.ConfiguratorRepository
import util.Logging
@@ -32,8 +31,8 @@ import scala.collection.jcl.Conversions._
class AkkaServlet extends ServletContainer with AtmosphereServletProcessor with Logging {
override def initiate(rc: ResourceConfig, wa: WebApplication) = {
- Kernel.boot // will boot if not already booted by 'main'
- val configurators = ConfiguratorRepository.getConfiguratorsFor(getServletContext)
+ akka.Kernel.boot // will boot if not already booted by 'main'
+ val configurators = ConfiguratorRepository.getConfigurators
rc.getClasses.addAll(configurators.flatMap(_.getComponentInterfaces))
log.info("Starting AkkaServlet with ResourceFilters: " + rc.getProperty("com.sun.jersey.spi.container.ResourceFilters"));
@@ -50,7 +49,7 @@ class AkkaServlet extends ServletContainer with AtmosphereServletProcessor with
val isUsingStream = try {
event.getResponse.getWriter
false
- } catch {case e: IllegalStateException => true}
+ } catch { case e: IllegalStateException => true }
val data = event.getMessage.toString
if (isUsingStream) {
@@ -60,7 +59,7 @@ class AkkaServlet extends ServletContainer with AtmosphereServletProcessor with
event.getResponse.getWriter.write(data)
event.getResponse.getWriter.flush
}
- } else log.info("Null event message :/ req[%s] res[%s]", event.getRequest, event.getResponse)
+ } else log.info("Null event message: req[%s] res[%s]", event.getRequest, event.getResponse)
event
}
diff --git a/akka-kernel/src/main/scala/Kernel.scala b/akka-kernel/src/main/scala/Kernel.scala
new file mode 100644
index 0000000000..5dd8bcf190
--- /dev/null
+++ b/akka-kernel/src/main/scala/Kernel.scala
@@ -0,0 +1,237 @@
+/**
+ * Copyright (C) 2009 Scalable Solutions.
+ */
+
+package se.scalablesolutions.akka
+
+import com.sun.grizzly.http.SelectorThread
+import com.sun.grizzly.http.servlet.ServletAdapter
+import com.sun.grizzly.standalone.StaticStreamAlgorithm
+
+import javax.ws.rs.core.UriBuilder
+import java.io.File
+import java.net.URLClassLoader
+
+import rest.AkkaCometServlet
+import nio.RemoteServer
+import state.CassandraStorage
+import util.Logging
+
+/**
+ * @author Jonas Bonér
+ */
+object Kernel extends Logging {
+ import Config._
+
+ val BOOT_CLASSES = config.getList("akka.boot")
+ val RUN_REMOTE_SERVICE = config.getBool("akka.remote.service", true)
+ val STORAGE_SYSTEM = config.getString("akka.storage.system", "cassandra")
+ val RUN_REST_SERVICE = config.getBool("akka.rest.service", true)
+ val REST_HOSTNAME = config.getString("akka.rest.hostname", "localhost")
+ val REST_URL = "http://" + REST_HOSTNAME
+ val REST_PORT = config.getInt("akka.rest.port", 9998)
+
+ // FIXME add API to shut server down gracefully
+ @volatile private var hasBooted = false
+ private var remoteServer: RemoteServer = _
+ private var jerseySelectorThread: SelectorThread = _
+ private val startTime = System.currentTimeMillis
+ private var applicationLoader: Option[ClassLoader] = None
+
+ def main(args: Array[String]) = boot
+
+ def boot = synchronized {
+ if (!hasBooted) {
+ printBanner
+ log.info("Starting Akka...")
+
+ runApplicationBootClasses
+
+ if (RUN_REMOTE_SERVICE) startRemoteService
+ if (RUN_REST_SERVICE) startREST
+
+ Thread.currentThread.setContextClassLoader(getClass.getClassLoader)
+ log.info("Akka started successfully")
+ hasBooted = true
+ }
+ }
+
+
+ private[akka] def runApplicationBootClasses = {
+ val loader =
+ if (HOME.isDefined) {
+ val CONFIG = HOME.get + "/config"
+ val DEPLOY = HOME.get + "/deploy"
+ val DEPLOY_DIR = new File(DEPLOY)
+ if (!DEPLOY_DIR.exists) {log.error("Could not find a deploy directory at [" + DEPLOY + "]"); System.exit(-1)}
+ val toDeploy = for (f <- DEPLOY_DIR.listFiles().toArray.toList.asInstanceOf[List[File]]) yield f.toURL
+ //val toDeploy = DEPLOY_DIR.toURL :: (for (f <- DEPLOY_DIR.listFiles().toArray.toList.asInstanceOf[List[File]]) yield f.toURL)
+ log.info("Deploying applications from [%s]: [%s]", DEPLOY, toDeploy.toArray.toList)
+ new URLClassLoader(toDeploy.toArray, getClass.getClassLoader)
+ } else if (getClass.getClassLoader.getResourceAsStream("akka.conf") != null) {
+ getClass.getClassLoader
+ } else throw new IllegalStateException("AKKA_HOME is not defined and no 'akka.conf' can be found on the classpath, aborting")
+ for (clazz <- BOOT_CLASSES) {
+ log.info("Loading boot class [%s]", clazz)
+ loader.loadClass(clazz).newInstance
+ }
+ applicationLoader = Some(loader)
+ }
+
+ private[akka] def startRemoteService = {
+ // FIXME manage remote serve thread for graceful shutdown
+ val remoteServerThread = new Thread(new Runnable() {
+ def run = RemoteServer.start(applicationLoader)
+ }, "Akka Remote Service")
+ remoteServerThread.start
+ }
+
+ def startREST = {
+ val uri = UriBuilder.fromUri(REST_URL).port(REST_PORT).build()
+
+ val scheme = uri.getScheme
+ if (!scheme.equalsIgnoreCase("http")) throw new IllegalArgumentException("The URI scheme, of the URI " + REST_URL + ", must be equal (ignoring case) to 'http'")
+
+ val adapter = new ServletAdapter
+ adapter.setHandleStaticResources(true)
+ adapter.setServletInstance(new AkkaCometServlet)
+ adapter.setContextPath(uri.getPath)
+ if (HOME.isDefined) adapter.setRootFolder(HOME.get + "/deploy/root")
+ log.info("REST service root path: [" + adapter.getRootFolder + "] and context path [" + adapter.getContextPath + "] ")
+
+ val ah = new com.sun.grizzly.arp.DefaultAsyncHandler
+ ah.addAsyncFilter(new com.sun.grizzly.comet.CometAsyncFilter)
+ jerseySelectorThread = new SelectorThread
+ jerseySelectorThread.setAlgorithmClassName(classOf[StaticStreamAlgorithm].getName)
+ jerseySelectorThread.setPort(REST_PORT)
+ jerseySelectorThread.setAdapter(adapter)
+ jerseySelectorThread.setEnableAsyncExecution(true)
+ jerseySelectorThread.setAsyncHandler(ah)
+ jerseySelectorThread.listen
+
+ log.info("REST service started successfully. Listening to port [" + REST_PORT + "]")
+ }
+
+ private def printBanner = {
+ log.info(
+ """==============================
+ __ __
+ _____ | | _| | _______
+ \__ \ | |/ / |/ /\__ \
+ / __ \| <| < / __ \_
+ (____ /__|_ \__|_ \(____ /
+ \/ \/ \/ \/
+ """)
+ log.info(" Running version " + VERSION)
+ log.info("==============================")
+ }
+
+ private def cassandraBenchmark = {
+ val NR_ENTRIES = 100000
+
+ println("=================================================")
+ var start = System.currentTimeMillis
+ for (i <- 1 to NR_ENTRIES) CassandraStorage.insertMapStorageEntryFor("test", i.toString, "data")
+ var end = System.currentTimeMillis
+ println("Writes per second: " + NR_ENTRIES / ((end - start).toDouble / 1000))
+
+ println("=================================================")
+ start = System.currentTimeMillis
+ val entries = new scala.collection.mutable.ArrayBuffer[Tuple2[String, String]]
+ for (i <- 1 to NR_ENTRIES) entries += (i.toString, "data")
+ CassandraStorage.insertMapStorageEntriesFor("test", entries.toList)
+ end = System.currentTimeMillis
+ println("Writes per second - batch: " + NR_ENTRIES / ((end - start).toDouble / 1000))
+
+ println("=================================================")
+ start = System.currentTimeMillis
+ for (i <- 1 to NR_ENTRIES) CassandraStorage.getMapStorageEntryFor("test", i.toString)
+ end = System.currentTimeMillis
+ println("Reads per second: " + NR_ENTRIES / ((end - start).toDouble / 1000))
+
+ System.exit(0)
+ }
+}
+
+
+
+
+/*
+//import voldemort.client.{SocketStoreClientFactory, StoreClient, StoreClientFactory}
+//import voldemort.server.{VoldemortConfig, VoldemortServer}
+//import voldemort.versioning.Versioned
+
+ private[this] var storageFactory: StoreClientFactory = _
+ private[this] var storageServer: VoldemortServer = _
+*/
+
+// private[akka] def startVoldemort = {
+// val VOLDEMORT_SERVER_URL = "tcp://" + SERVER_URL
+// val VOLDEMORT_SERVER_PORT = 6666
+// val VOLDEMORT_BOOTSTRAP_URL = VOLDEMORT_SERVER_URL + ":" + VOLDEMORT_SERVER_PORT
+// // Start Voldemort server
+// val config = VoldemortConfig.loadFromVoldemortHome(Boot.HOME)
+// storageServer = new VoldemortServer(config)
+// storageServer.start
+// log.info("Replicated persistent storage server started at %s", VOLDEMORT_BOOTSTRAP_URL)
+//
+// // Create Voldemort client factory
+// val numThreads = 10
+// val maxQueuedRequests = 10
+// val maxConnectionsPerNode = 10
+// val maxTotalConnections = 100
+// storageFactory = new SocketStoreClientFactory(
+// numThreads,
+// numThreads,
+// maxQueuedRequests,
+// maxConnectionsPerNode,
+// maxTotalConnections,
+// VOLDEMORT_BOOTSTRAP_URL)
+//
+// val name = this.getClass.getName
+// val storage = getStorageFor("actors")
+//// val value = storage.get(name)
+// val value = new Versioned("state")
+// //value.setObject("state")
+// storage.put(name, value)
+// }
+//
+// private[akka] def getStorageFor(storageName: String): StoreClient[String, String] =
+// storageFactory.getStoreClient(storageName)
+
+// private[akka] def startZooKeeper = {
+//import org.apache.zookeeper.jmx.ManagedUtil
+//import org.apache.zookeeper.server.persistence.FileTxnSnapLog
+//import org.apache.zookeeper.server.ServerConfig
+//import org.apache.zookeeper.server.NIOServerCnxn
+// val ZOO_KEEPER_SERVER_URL = SERVER_URL
+// val ZOO_KEEPER_SERVER_PORT = 9898
+// try {
+// ManagedUtil.registerLog4jMBeans
+// ServerConfig.parse(args)
+// } catch {
+// case e: JMException => log.warning("Unable to register log4j JMX control: s%", e)
+// case e => log.fatal("Error in ZooKeeper config: s%", e)
+// }
+// val factory = new ZooKeeperServer.Factory() {
+// override def createConnectionFactory = new NIOServerCnxn.Factory(ServerConfig.getClientPort)
+// override def createServer = {
+// val server = new ZooKeeperServer
+// val txLog = new FileTxnSnapLog(
+// new File(ServerConfig.getDataLogDir),
+// new File(ServerConfig.getDataDir))
+// server.setTxnLogFactory(txLog)
+// server
+// }
+// }
+// try {
+// val zooKeeper = factory.createServer
+// zooKeeper.startup
+// log.info("ZooKeeper started")
+// // TODO: handle clean shutdown as below in separate thread
+// // val cnxnFactory = serverFactory.createConnectionFactory
+// // cnxnFactory.setZooKeeperServer(zooKeeper)
+// // cnxnFactory.join
+// // if (zooKeeper.isRunning) zooKeeper.shutdown
+// } catch { case e => log.fatal("Unexpected exception: s%",e) }
+// }
diff --git a/akka-persistence/pom.xml b/akka-persistence/pom.xml
new file mode 100644
index 0000000000..a0563f1cb0
--- /dev/null
+++ b/akka-persistence/pom.xml
@@ -0,0 +1,59 @@
+
+ 4.0.0
+
+ akka-persistence
+ Akka Persistence Module
+
+ jar
+
+
+ akka
+ se.scalablesolutions.akka
+ 0.6
+ ../pom.xml
+
+
+
+
+ akka-util
+ se.scalablesolutions.akka
+ 0.6
+
+
+ akka-actors
+ se.scalablesolutions.akka
+ 0.6
+
+
+
+
+ com.mongodb
+ mongo
+ 0.6
+
+
+
+
+ org.apache.cassandra
+ cassandra
+ 0.4.0-trunk
+
+
+ com.facebook
+ thrift
+ 1.0
+
+
+ com.facebook
+ fb303
+ 1.0
+
+
+ commons-pool
+ commons-pool
+ 1.5.1
+
+
+
+
diff --git a/kernel/src/main/scala/state/CassandraSession.scala b/akka-persistence/src/main/scala/CassandraSession.scala
similarity index 99%
rename from kernel/src/main/scala/state/CassandraSession.scala
rename to akka-persistence/src/main/scala/CassandraSession.scala
index a877b5bce1..fc2121a33c 100644
--- a/kernel/src/main/scala/state/CassandraSession.scala
+++ b/akka-persistence/src/main/scala/CassandraSession.scala
@@ -2,14 +2,14 @@
* Copyright (C) 2009 Scalable Solutions.
*/
-package se.scalablesolutions.akka.kernel.state
+package se.scalablesolutions.akka.state
import java.io.{Flushable, Closeable}
import util.Logging
import util.Helpers._
import serialization.Serializer
-import kernel.Kernel.config
+import akka.Config.config
import org.apache.cassandra.db.ColumnFamily
import org.apache.cassandra.service._
diff --git a/kernel/src/main/scala/state/CassandraStorage.scala b/akka-persistence/src/main/scala/CassandraStorage.scala
similarity index 79%
rename from kernel/src/main/scala/state/CassandraStorage.scala
rename to akka-persistence/src/main/scala/CassandraStorage.scala
index fdc6e607db..02f5f5dbec 100644
--- a/kernel/src/main/scala/state/CassandraStorage.scala
+++ b/akka-persistence/src/main/scala/CassandraStorage.scala
@@ -2,14 +2,14 @@
* Copyright (C) 2009 Scalable Solutions.
*/
-package se.scalablesolutions.akka.kernel.state
+package se.scalablesolutions.akka.state
import java.io.{Flushable, Closeable}
import util.Logging
import util.Helpers._
import serialization.Serializer
-import kernel.Kernel.config
+import akka.Config.config
import org.apache.cassandra.db.ColumnFamily
import org.apache.cassandra.service._
@@ -46,7 +46,7 @@ object CassandraStorage extends MapStorage
*/
private[this] val serializer: Serializer = {
- kernel.Kernel.config.getString("akka.storage.cassandra.storage-format", "java") match {
+ config.getString("akka.storage.cassandra.storage-format", "java") match {
case "scala-json" => Serializer.ScalaJSON
case "java-json" => Serializer.JavaJSON
case "protobuf" => Serializer.Protobuf
@@ -57,47 +57,28 @@ object CassandraStorage extends MapStorage
}
}
- private[this] var sessions: Option[CassandraSessionPool[_]] = None
-
- def start = synchronized {
- if (!isRunning) {
- try {
- sessions = Some(new CassandraSessionPool(
- KEYSPACE,
- StackPool(SocketProvider(CASSANDRA_SERVER_HOSTNAME, CASSANDRA_SERVER_PORT)),
- protocol,
- CONSISTENCY_LEVEL))
- log.info("Cassandra persistent storage has started up successfully");
- } catch {
- case e =>
- log.error("Could not start up Cassandra persistent storage")
- throw e
- }
- isRunning
- }
- }
-
- def stop = synchronized {
- if (isRunning && sessions.isDefined) sessions.get.close
- }
-
+ private[this] var sessions = new CassandraSessionPool(
+ KEYSPACE,
+ StackPool(SocketProvider(CASSANDRA_SERVER_HOSTNAME, CASSANDRA_SERVER_PORT)),
+ protocol,
+ CONSISTENCY_LEVEL)
// ===============================================================
// For Ref
// ===============================================================
- override def insertRefStorageFor(name: String, element: AnyRef) = if (sessions.isDefined) {
- sessions.get.withSession {
+ override def insertRefStorageFor(name: String, element: AnyRef) = {
+ sessions.withSession {
_ ++| (name,
new ColumnPath(REF_COLUMN_PARENT.getColumn_family, null, REF_KEY),
serializer.out(element),
System.currentTimeMillis,
CONSISTENCY_LEVEL)
}
- } else throw new IllegalStateException("CassandraStorage is not started")
+ }
- override def getRefStorageFor(name: String): Option[AnyRef] = if (sessions.isDefined) {
+ override def getRefStorageFor(name: String): Option[AnyRef] = {
try {
- val column: Option[Column] = sessions.get.withSession {
+ val column: Option[Column] = sessions.withSession {
_ | (name, new ColumnPath(REF_COLUMN_PARENT.getColumn_family, null, REF_KEY))
}
if (column.isDefined) Some(serializer.in(column.get.value, None))
@@ -107,37 +88,37 @@ object CassandraStorage extends MapStorage
e.printStackTrace
None
}
- } else throw new IllegalStateException("CassandraStorage is not started")
+ }
// ===============================================================
// For Vector
// ===============================================================
- override def insertVectorStorageEntryFor(name: String, element: AnyRef) = if (sessions.isDefined) {
- sessions.get.withSession {
+ override def insertVectorStorageEntryFor(name: String, element: AnyRef) = {
+ sessions.withSession {
_ ++| (name,
new ColumnPath(VECTOR_COLUMN_PARENT.getColumn_family, null, intToBytes(getVectorStorageSizeFor(name))),
serializer.out(element),
System.currentTimeMillis,
CONSISTENCY_LEVEL)
}
- } else throw new IllegalStateException("CassandraStorage is not started")
+ }
override def insertVectorStorageEntriesFor(name: String, elements: List[AnyRef]) = {
}
- override def getVectorStorageEntryFor(name: String, index: Int): AnyRef = if (sessions.isDefined) {
- val column: Option[Column] = sessions.get.withSession {
+ override def getVectorStorageEntryFor(name: String, index: Int): AnyRef = {
+ val column: Option[Column] = sessions.withSession {
_ | (name, new ColumnPath(VECTOR_COLUMN_PARENT.getColumn_family, null, intToBytes(index)))
}
if (column.isDefined) serializer.in(column.get.value, None)
else throw new NoSuchElementException("No element for vector [" + name + "] and index [" + index + "]")
- } else throw new IllegalStateException("CassandraStorage is not started")
+ }
- override def getVectorStorageRangeFor(name: String, start: Option[Int], finish: Option[Int], count: Int): List[AnyRef] = if (sessions.isDefined) {
+ override def getVectorStorageRangeFor(name: String, start: Option[Int], finish: Option[Int], count: Int): List[AnyRef] = {
val startBytes = if (start.isDefined) intToBytes(start.get) else null
val finishBytes = if (finish.isDefined) intToBytes(finish.get) else null
- val columns: List[Column] = sessions.get.withSession {
+ val columns: List[Column] = sessions.withSession {
_ / (name,
VECTOR_COLUMN_PARENT,
startBytes, finishBytes,
@@ -146,43 +127,43 @@ object CassandraStorage extends MapStorage
CONSISTENCY_LEVEL)
}
columns.map(column => serializer.in(column.value, None))
- } else throw new IllegalStateException("CassandraStorage is not started")
+ }
- override def getVectorStorageSizeFor(name: String): Int = if (sessions.isDefined) {
- sessions.get.withSession {
+ override def getVectorStorageSizeFor(name: String): Int = {
+ sessions.withSession {
_ |# (name, VECTOR_COLUMN_PARENT)
}
- } else throw new IllegalStateException("CassandraStorage is not started")
+ }
// ===============================================================
// For Map
// ===============================================================
- override def insertMapStorageEntryFor(name: String, key: AnyRef, element: AnyRef) = if (sessions.isDefined) {
- sessions.get.withSession {
+ override def insertMapStorageEntryFor(name: String, key: AnyRef, element: AnyRef) = {
+ sessions.withSession {
_ ++| (name,
new ColumnPath(MAP_COLUMN_PARENT.getColumn_family, null, serializer.out(key)),
serializer.out(element),
System.currentTimeMillis,
CONSISTENCY_LEVEL)
}
- } else throw new IllegalStateException("CassandraStorage is not started")
+ }
- override def insertMapStorageEntriesFor(name: String, entries: List[Tuple2[AnyRef, AnyRef]]) = if (sessions.isDefined) {
+ override def insertMapStorageEntriesFor(name: String, entries: List[Tuple2[AnyRef, AnyRef]]) = {
val cf2columns: java.util.Map[String, java.util.List[Column]] = new java.util.HashMap
for (entry <- entries) {
val columns: java.util.List[Column] = new java.util.ArrayList
columns.add(new Column(serializer.out(entry._1), serializer.out(entry._2), System.currentTimeMillis))
cf2columns.put(MAP_COLUMN_PARENT.getColumn_family, columns)
}
- sessions.get.withSession {
+ sessions.withSession {
_ ++| (new BatchMutation(name, cf2columns), CONSISTENCY_LEVEL)
}
- } else throw new IllegalStateException("CassandraStorage is not started")
+ }
- override def getMapStorageEntryFor(name: String, key: AnyRef): Option[AnyRef] = if (sessions.isDefined) {
+ override def getMapStorageEntryFor(name: String, key: AnyRef): Option[AnyRef] = {
try {
- val column: Option[Column] = sessions.get.withSession {
+ val column: Option[Column] = sessions.withSession {
_ | (name, new ColumnPath(MAP_COLUMN_PARENT.getColumn_family, null, serializer.out(key)))
}
if (column.isDefined) Some(serializer.in(column.get.value, None))
@@ -192,9 +173,9 @@ object CassandraStorage extends MapStorage
e.printStackTrace
None
}
- } else throw new IllegalStateException("CassandraStorage is not started")
+ }
- override def getMapStorageFor(name: String): List[Tuple2[AnyRef, AnyRef]] = if (sessions.isDefined) {
+ override def getMapStorageFor(name: String): List[Tuple2[AnyRef, AnyRef]] = {
throw new UnsupportedOperationException
/*
val columns = server.get_columns_since(name, MAP_COLUMN_FAMILY, -1)
@@ -204,35 +185,35 @@ object CassandraStorage extends MapStorage
col = (column.columnName, column.value)
} yield col
*/
- } else throw new IllegalStateException("CassandraStorage is not started")
+ }
- override def getMapStorageSizeFor(name: String): Int = if (sessions.isDefined) {
- sessions.get.withSession {
+ override def getMapStorageSizeFor(name: String): Int = {
+ sessions.withSession {
_ |# (name, MAP_COLUMN_PARENT)
}
- } else throw new IllegalStateException("CassandraStorage is not started")
+ }
override def removeMapStorageFor(name: String): Unit = removeMapStorageFor(name, null)
- override def removeMapStorageFor(name: String, key: AnyRef): Unit = if (sessions.isDefined) {
+ override def removeMapStorageFor(name: String, key: AnyRef): Unit = {
val keyBytes = if (key == null) null else serializer.out(key)
- sessions.get.withSession {
+ sessions.withSession {
_ -- (name,
new ColumnPathOrParent(MAP_COLUMN_PARENT.getColumn_family, null, keyBytes),
System.currentTimeMillis,
CONSISTENCY_LEVEL)
}
- } else throw new IllegalStateException("CassandraStorage is not started")
+ }
override def getMapStorageRangeFor(name: String, start: Option[AnyRef], finish: Option[AnyRef], count: Int):
- List[Tuple2[AnyRef, AnyRef]] = if (sessions.isDefined) {
+ List[Tuple2[AnyRef, AnyRef]] = {
val startBytes = if (start.isDefined) serializer.out(start.get) else null
val finishBytes = if (finish.isDefined) serializer.out(finish.get) else null
- val columns: List[Column] = sessions.get.withSession {
+ val columns: List[Column] = sessions.withSession {
_ / (name, MAP_COLUMN_PARENT, startBytes, finishBytes, IS_ASCENDING, count, CONSISTENCY_LEVEL)
}
columns.map(column => (column.name, serializer.in(column.value, None)))
- } else throw new IllegalStateException("CassandraStorage is not started")
+ }
}
/**
@@ -250,14 +231,14 @@ val REF_COLUMN_FAMILY = "ref:item"
val IS_ASCENDING = true
-val RUN_THRIFT_SERVICE = kernel.Kernel.config.getBool("akka.storage.cassandra.thrift-server.service", false)
+val RUN_THRIFT_SERVICE = akka.akka.config.getBool("akka.storage.cassandra.thrift-server.service", false)
val CONSISTENCY_LEVEL = {
-if (kernel.Kernel.config.getBool("akka.storage.cassandra.blocking", true)) 0
+if (akka.akka.config.getBool("akka.storage.cassandra.blocking", true)) 0
else 1 }
@volatile private[this] var isRunning = false
private[this] val serializer: Serializer = {
-kernel.Kernel.config.getString("akka.storage.cassandra.storage-format", "java") match {
+akka.akka.config.getString("akka.storage.cassandra.storage-format", "java") match {
case "scala-json" => Serializer.ScalaJSON
case "java-json" => Serializer.JavaJSON
case "protobuf" => Serializer.Protobuf
@@ -417,7 +398,7 @@ case object Start
case object Stop
private[this] val serverEngine: TThreadPoolServer = try {
-val pidFile = kernel.Kernel.config.getString("akka.storage.cassandra.thrift-server.pidfile", "akka.pid")
+val pidFile = akka.akka.config.getString("akka.storage.cassandra.thrift-server.pidfile", "akka.pid")
if (pidFile != null) new File(pidFile).deleteOnExit();
val listenPort = DatabaseDescriptor.getThriftPort
diff --git a/kernel/src/main/scala/state/DataFlowVariable.scala b/akka-persistence/src/main/scala/DataFlowVariable.scala
similarity index 99%
rename from kernel/src/main/scala/state/DataFlowVariable.scala
rename to akka-persistence/src/main/scala/DataFlowVariable.scala
index 17b851361e..ec47f0983f 100644
--- a/kernel/src/main/scala/state/DataFlowVariable.scala
+++ b/akka-persistence/src/main/scala/DataFlowVariable.scala
@@ -2,7 +2,7 @@
* Copyright (C) 2009 Scalable Solutions.
*/
-package se.scalablesolutions.akka.kernel.state
+package se.scalablesolutions.akka.state
import scala.actors.Actor
import scala.actors.OutputChannel
diff --git a/kernel/src/main/scala/state/MongoStorage.scala b/akka-persistence/src/main/scala/MongoStorage.scala
similarity index 84%
rename from kernel/src/main/scala/state/MongoStorage.scala
rename to akka-persistence/src/main/scala/MongoStorage.scala
index f72e1c7771..bea43fbf23 100644
--- a/kernel/src/main/scala/state/MongoStorage.scala
+++ b/akka-persistence/src/main/scala/MongoStorage.scala
@@ -1,291 +1,287 @@
-package se.scalablesolutions.akka.kernel.state
-
-import com.mongodb._
-import se.scalablesolutions.akka.kernel.util.Logging
-import serialization.{Serializer}
-import kernel.Kernel.config
-
-import java.util.{Map=>JMap, List=>JList, ArrayList=>JArrayList}
-
-/**
- * A module for supporting MongoDB based persistence.
- *
- * The module offers functionality for:
- * Persistent Maps
- * Persistent Vectors
- * Persistent Refs
- *
- * @author Debasish Ghosh
- */
-object MongoStorage extends MapStorage
- with VectorStorage with RefStorage with Logging {
-
- // enrich with null safe findOne
- class RichDBCollection(value: DBCollection) {
- def findOneNS(o: DBObject): Option[DBObject] = {
- value.findOne(o) match {
- case null => None
- case x => Some(x)
- }
- }
- }
-
- implicit def enrichDBCollection(c: DBCollection) = new RichDBCollection(c)
-
- val KEY = "key"
- val VALUE = "value"
- val COLLECTION = "akka_coll"
- val MONGODB_SERVER_HOSTNAME =
- config.getString("akka.storage.mongodb.hostname", "127.0.0.1")
- val MONGODB_SERVER_DBNAME =
- config.getString("akka.storage.mongodb.dbname", "testdb")
- val MONGODB_SERVER_PORT =
- config.getInt("akka.storage.mongodb.port", 27017)
-
- val db = new Mongo(MONGODB_SERVER_HOSTNAME,
- MONGODB_SERVER_PORT, MONGODB_SERVER_DBNAME)
- val coll = db.getCollection(COLLECTION)
-
- // @fixme: make this pluggable
- private[this] val serializer: Serializer = Serializer.ScalaJSON
-
- override def insertMapStorageEntryFor(name: String,
- key: AnyRef, value: AnyRef) {
- insertMapStorageEntriesFor(name, List((key, value)))
- }
-
- override def insertMapStorageEntriesFor(name: String,
- entries: List[Tuple2[AnyRef, AnyRef]]) {
- import java.util.{Map, HashMap}
-
- val m: Map[AnyRef, AnyRef] = new HashMap
- for ((k, v) <- entries) {
- m.put(k, serializer.out(v))
- }
-
- nullSafeFindOne(name) match {
- case None =>
- coll.insert(new BasicDBObject().append(KEY, name).append(VALUE, m))
- case Some(dbo) => {
- // collate the maps
- val o = dbo.get(VALUE).asInstanceOf[Map[AnyRef, AnyRef]]
- o.putAll(m)
-
- // remove existing reference
- removeMapStorageFor(name)
- // and insert
- coll.insert(new BasicDBObject().append(KEY, name).append(VALUE, o))
- }
- }
- }
-
- override def removeMapStorageFor(name: String) = {
- val q = new BasicDBObject
- q.put(KEY, name)
- coll.remove(q)
- }
-
- override def removeMapStorageFor(name: String, key: AnyRef) = {
- nullSafeFindOne(name) match {
- case None =>
- case Some(dbo) => {
- val orig = dbo.get(VALUE).asInstanceOf[DBObject].toMap
- orig.remove(key.asInstanceOf[String])
-
- // remove existing reference
- removeMapStorageFor(name)
- // and insert
- coll.insert(new BasicDBObject().append(KEY, name).append(VALUE, orig))
- }
- }
- }
-
- override def getMapStorageEntryFor(name: String,
- key: AnyRef): Option[AnyRef] = {
- getValueForKey(name, key.asInstanceOf[String])
- }
-
- override def getMapStorageSizeFor(name: String): Int = {
- nullSafeFindOne(name) match {
- case None => 0
- case Some(dbo) =>
- dbo.get(VALUE).asInstanceOf[JMap[String, AnyRef]].keySet.size
- }
- }
-
- override def getMapStorageFor(name: String): List[Tuple2[AnyRef, AnyRef]] = {
- val m =
- nullSafeFindOne(name) match {
- case None =>
- throw new Predef.NoSuchElementException(name + " not present")
- case Some(dbo) =>
- dbo.get(VALUE).asInstanceOf[JMap[String, AnyRef]]
- }
- val n =
- List(m.keySet.toArray: _*).asInstanceOf[List[String]]
- val vals =
- for(s <- n)
- yield (s, serializer.in(m.get(s).asInstanceOf[Array[Byte]], None))
- vals.asInstanceOf[List[Tuple2[String, AnyRef]]]
- }
-
- override def getMapStorageRangeFor(name: String, start: Option[AnyRef],
- finish: Option[AnyRef],
- count: Int): List[Tuple2[AnyRef, AnyRef]] = {
- val m =
- nullSafeFindOne(name) match {
- case None =>
- throw new Predef.NoSuchElementException(name + " not present")
- case Some(dbo) =>
- dbo.get(VALUE).asInstanceOf[JMap[String, AnyRef]]
- }
-
- /**
- * count is the max number of results to return. Start with
- * start or 0 (if start is not defined) and go until
- * you hit finish or count.
- */
- val s = if (start.isDefined) start.get.asInstanceOf[Int] else 0
- val cnt =
- if (finish.isDefined) {
- val f = finish.get.asInstanceOf[Int]
- if (f >= s) Math.min(count, (f - s)) else count
- }
- else count
-
- val n =
- List(m.keySet.toArray: _*).asInstanceOf[List[String]].sort((e1, e2) => (e1 compareTo e2) < 0).slice(s, s + cnt)
- val vals =
- for(s <- n)
- yield (s, serializer.in(m.get(s).asInstanceOf[Array[Byte]], None))
- vals.asInstanceOf[List[Tuple2[String, AnyRef]]]
- }
-
- private def getValueForKey(name: String, key: String): Option[AnyRef] = {
- try {
- nullSafeFindOne(name) match {
- case None => None
- case Some(dbo) =>
- Some(serializer.in(
- dbo.get(VALUE)
- .asInstanceOf[JMap[String, AnyRef]]
- .get(key).asInstanceOf[Array[Byte]], None))
- }
- } catch {
- case e =>
- throw new Predef.NoSuchElementException(e.getMessage)
- }
- }
-
- override def insertVectorStorageEntriesFor(name: String, elements: List[AnyRef]) = {
- val q = new BasicDBObject
- q.put(KEY, name)
-
- val currentList =
- coll.findOneNS(q) match {
- case None =>
- new JArrayList[AnyRef]
- case Some(dbo) =>
- dbo.get(VALUE).asInstanceOf[JArrayList[AnyRef]]
- }
- if (!currentList.isEmpty) {
- // record exists
- // remove before adding
- coll.remove(q)
- }
-
- // add to the current list
- elements.map(serializer.out(_)).foreach(currentList.add(_))
-
- coll.insert(
- new BasicDBObject()
- .append(KEY, name)
- .append(VALUE, currentList)
- )
- }
-
- override def insertVectorStorageEntryFor(name: String, element: AnyRef) = {
- insertVectorStorageEntriesFor(name, List(element))
- }
-
- override def getVectorStorageEntryFor(name: String, index: Int): AnyRef = {
- try {
- val o =
- nullSafeFindOne(name) match {
- case None =>
- throw new Predef.NoSuchElementException(name + " not present")
-
- case Some(dbo) =>
- dbo.get(VALUE).asInstanceOf[JList[AnyRef]]
- }
- serializer.in(
- o.get(index).asInstanceOf[Array[Byte]],
- None
- )
- } catch {
- case e =>
- throw new Predef.NoSuchElementException(e.getMessage)
- }
- }
-
- override def getVectorStorageRangeFor(name: String,
- start: Option[Int], finish: Option[Int], count: Int): List[AnyRef] = {
- try {
- val o =
- nullSafeFindOne(name) match {
- case None =>
- throw new Predef.NoSuchElementException(name + " not present")
-
- case Some(dbo) =>
- dbo.get(VALUE).asInstanceOf[JList[AnyRef]]
- }
-
- // pick the subrange and make a Scala list
- val l =
- List(o.subList(start.get, start.get + count).toArray: _*)
-
- for(e <- l)
- yield serializer.in(e.asInstanceOf[Array[Byte]], None)
- } catch {
- case e =>
- throw new Predef.NoSuchElementException(e.getMessage)
- }
- }
-
- override def getVectorStorageSizeFor(name: String): Int = {
- nullSafeFindOne(name) match {
- case None => 0
- case Some(dbo) =>
- dbo.get(VALUE).asInstanceOf[JList[AnyRef]].size
- }
- }
-
- private def nullSafeFindOne(name: String): Option[DBObject] = {
- val o = new BasicDBObject
- o.put(KEY, name)
- coll.findOneNS(o)
- }
-
- override def insertRefStorageFor(name: String, element: AnyRef) = {
- nullSafeFindOne(name) match {
- case None =>
- case Some(dbo) => {
- val q = new BasicDBObject
- q.put(KEY, name)
- coll.remove(q)
- }
- }
- coll.insert(
- new BasicDBObject()
- .append(KEY, name)
- .append(VALUE, serializer.out(element)))
- }
-
- override def getRefStorageFor(name: String): Option[AnyRef] = {
- nullSafeFindOne(name) match {
- case None => None
- case Some(dbo) =>
- Some(serializer.in(dbo.get(VALUE).asInstanceOf[Array[Byte]], None))
- }
- }
-}
+package se.scalablesolutions.akka.state
+
+import akka.util.Logging
+import serialization.{Serializer}
+import akka.Config.config
+import sjson.json.Serializer._
+
+import com.mongodb._
+
+import java.util.{Map=>JMap, List=>JList, ArrayList=>JArrayList}
+
+/**
+ * A module for supporting MongoDB based persistence.
+ *
+ * The module offers functionality for:
+ * Persistent Maps
+ * Persistent Vectors
+ * Persistent Refs
+ *
+ * @author Debasish Ghosh
+ */
+object MongoStorage extends MapStorage with VectorStorage with RefStorage with Logging {
+
+ // enrich with null safe findOne
+ class RichDBCollection(value: DBCollection) {
+ def findOneNS(o: DBObject): Option[DBObject] = {
+ value.findOne(o) match {
+ case null => None
+ case x => Some(x)
+ }
+ }
+ }
+
+ implicit def enrichDBCollection(c: DBCollection) = new RichDBCollection(c)
+
+ val KEY = "key"
+ val VALUE = "value"
+ val COLLECTION = "akka_coll"
+
+ val MONGODB_SERVER_HOSTNAME = config.getString("akka.storage.mongodb.hostname", "127.0.0.1")
+ val MONGODB_SERVER_DBNAME = config.getString("akka.storage.mongodb.dbname", "testdb")
+ val MONGODB_SERVER_PORT = config.getInt("akka.storage.mongodb.port", 27017)
+
+ val db = new Mongo(MONGODB_SERVER_HOSTNAME, MONGODB_SERVER_PORT, MONGODB_SERVER_DBNAME)
+ val coll = db.getCollection(COLLECTION)
+
+ // FIXME: make this pluggable
+ private[this] val serializer = SJSON
+
+ override def insertMapStorageEntryFor(name: String,
+ key: AnyRef, value: AnyRef) {
+ insertMapStorageEntriesFor(name, List((key, value)))
+ }
+
+ override def insertMapStorageEntriesFor(name: String,
+ entries: List[Tuple2[AnyRef, AnyRef]]) {
+ import java.util.{Map, HashMap}
+
+ val m: Map[AnyRef, AnyRef] = new HashMap
+ for ((k, v) <- entries) {
+ m.put(k, serializer.out(v))
+ }
+
+ nullSafeFindOne(name) match {
+ case None =>
+ coll.insert(new BasicDBObject().append(KEY, name).append(VALUE, m))
+ case Some(dbo) => {
+ // collate the maps
+ val o = dbo.get(VALUE).asInstanceOf[Map[AnyRef, AnyRef]]
+ o.putAll(m)
+
+ // remove existing reference
+ removeMapStorageFor(name)
+ // and insert
+ coll.insert(new BasicDBObject().append(KEY, name).append(VALUE, o))
+ }
+ }
+ }
+
+ override def removeMapStorageFor(name: String) = {
+ val q = new BasicDBObject
+ q.put(KEY, name)
+ coll.remove(q)
+ }
+
+ override def removeMapStorageFor(name: String, key: AnyRef) = {
+ nullSafeFindOne(name) match {
+ case None =>
+ case Some(dbo) => {
+ val orig = dbo.get(VALUE).asInstanceOf[DBObject].toMap
+ orig.remove(key.asInstanceOf[String])
+
+ // remove existing reference
+ removeMapStorageFor(name)
+ // and insert
+ coll.insert(new BasicDBObject().append(KEY, name).append(VALUE, orig))
+ }
+ }
+ }
+
+ override def getMapStorageEntryFor(name: String,
+ key: AnyRef): Option[AnyRef] = {
+ getValueForKey(name, key.asInstanceOf[String])
+ }
+
+ override def getMapStorageSizeFor(name: String): Int = {
+ nullSafeFindOne(name) match {
+ case None => 0
+ case Some(dbo) =>
+ dbo.get(VALUE).asInstanceOf[JMap[String, AnyRef]].keySet.size
+ }
+ }
+
+ override def getMapStorageFor(name: String): List[Tuple2[AnyRef, AnyRef]] = {
+ val m =
+ nullSafeFindOne(name) match {
+ case None =>
+ throw new Predef.NoSuchElementException(name + " not present")
+ case Some(dbo) =>
+ dbo.get(VALUE).asInstanceOf[JMap[String, AnyRef]]
+ }
+ val n =
+ List(m.keySet.toArray: _*).asInstanceOf[List[String]]
+ val vals =
+ for(s <- n)
+ yield (s, serializer.in[AnyRef](m.get(s).asInstanceOf[Array[Byte]]))
+ vals.asInstanceOf[List[Tuple2[String, AnyRef]]]
+ }
+
+ override def getMapStorageRangeFor(name: String, start: Option[AnyRef],
+ finish: Option[AnyRef],
+ count: Int): List[Tuple2[AnyRef, AnyRef]] = {
+ val m =
+ nullSafeFindOne(name) match {
+ case None =>
+ throw new Predef.NoSuchElementException(name + " not present")
+ case Some(dbo) =>
+ dbo.get(VALUE).asInstanceOf[JMap[String, AnyRef]]
+ }
+
+ /**
+ * count is the max number of results to return. Start with
+ * start or 0 (if start is not defined) and go until
+ * you hit finish or count.
+ */
+ val s = if (start.isDefined) start.get.asInstanceOf[Int] else 0
+ val cnt =
+ if (finish.isDefined) {
+ val f = finish.get.asInstanceOf[Int]
+ if (f >= s) Math.min(count, (f - s)) else count
+ }
+ else count
+
+ val n =
+ List(m.keySet.toArray: _*).asInstanceOf[List[String]].sort((e1, e2) => (e1 compareTo e2) < 0).slice(s, s + cnt)
+ val vals =
+ for(s <- n)
+ yield (s, serializer.in[AnyRef](m.get(s).asInstanceOf[Array[Byte]]))
+ vals.asInstanceOf[List[Tuple2[String, AnyRef]]]
+ }
+
+ private def getValueForKey(name: String, key: String): Option[AnyRef] = {
+ try {
+ nullSafeFindOne(name) match {
+ case None => None
+ case Some(dbo) =>
+ Some(serializer.in[AnyRef](
+ dbo.get(VALUE)
+ .asInstanceOf[JMap[String, AnyRef]]
+ .get(key).asInstanceOf[Array[Byte]]))
+ }
+ } catch {
+ case e =>
+ throw new Predef.NoSuchElementException(e.getMessage)
+ }
+ }
+
+ override def insertVectorStorageEntriesFor(name: String, elements: List[AnyRef]) = {
+ val q = new BasicDBObject
+ q.put(KEY, name)
+
+ val currentList =
+ coll.findOneNS(q) match {
+ case None =>
+ new JArrayList[AnyRef]
+ case Some(dbo) =>
+ dbo.get(VALUE).asInstanceOf[JArrayList[AnyRef]]
+ }
+ if (!currentList.isEmpty) {
+ // record exists
+ // remove before adding
+ coll.remove(q)
+ }
+
+ // add to the current list
+ elements.map(serializer.out(_)).foreach(currentList.add(_))
+
+ coll.insert(
+ new BasicDBObject()
+ .append(KEY, name)
+ .append(VALUE, currentList)
+ )
+ }
+
+ override def insertVectorStorageEntryFor(name: String, element: AnyRef) = {
+ insertVectorStorageEntriesFor(name, List(element))
+ }
+
+ override def getVectorStorageEntryFor(name: String, index: Int): AnyRef = {
+ try {
+ val o =
+ nullSafeFindOne(name) match {
+ case None =>
+ throw new Predef.NoSuchElementException(name + " not present")
+
+ case Some(dbo) =>
+ dbo.get(VALUE).asInstanceOf[JList[AnyRef]]
+ }
+ serializer.in[AnyRef](
+ o.get(index).asInstanceOf[Array[Byte]])
+ } catch {
+ case e =>
+ throw new Predef.NoSuchElementException(e.getMessage)
+ }
+ }
+
+ override def getVectorStorageRangeFor(name: String,
+ start: Option[Int], finish: Option[Int], count: Int): List[AnyRef] = {
+ try {
+ val o =
+ nullSafeFindOne(name) match {
+ case None =>
+ throw new Predef.NoSuchElementException(name + " not present")
+
+ case Some(dbo) =>
+ dbo.get(VALUE).asInstanceOf[JList[AnyRef]]
+ }
+
+ // pick the subrange and make a Scala list
+ val l =
+ List(o.subList(start.get, start.get + count).toArray: _*)
+
+ for(e <- l)
+ yield serializer.in[AnyRef](e.asInstanceOf[Array[Byte]])
+ } catch {
+ case e =>
+ throw new Predef.NoSuchElementException(e.getMessage)
+ }
+ }
+
+ override def getVectorStorageSizeFor(name: String): Int = {
+ nullSafeFindOne(name) match {
+ case None => 0
+ case Some(dbo) =>
+ dbo.get(VALUE).asInstanceOf[JList[AnyRef]].size
+ }
+ }
+
+ private def nullSafeFindOne(name: String): Option[DBObject] = {
+ val o = new BasicDBObject
+ o.put(KEY, name)
+ coll.findOneNS(o)
+ }
+
+ override def insertRefStorageFor(name: String, element: AnyRef) = {
+ nullSafeFindOne(name) match {
+ case None =>
+ case Some(dbo) => {
+ val q = new BasicDBObject
+ q.put(KEY, name)
+ coll.remove(q)
+ }
+ }
+ coll.insert(
+ new BasicDBObject()
+ .append(KEY, name)
+ .append(VALUE, serializer.out(element)))
+ }
+
+ override def getRefStorageFor(name: String): Option[AnyRef] = {
+ nullSafeFindOne(name) match {
+ case None => None
+ case Some(dbo) =>
+ Some(serializer.in[AnyRef](dbo.get(VALUE).asInstanceOf[Array[Byte]]))
+ }
+ }
+}
diff --git a/akka-persistence/src/main/scala/PersistentState.scala b/akka-persistence/src/main/scala/PersistentState.scala
new file mode 100644
index 0000000000..8617f05c7e
--- /dev/null
+++ b/akka-persistence/src/main/scala/PersistentState.scala
@@ -0,0 +1,334 @@
+/**
+ * Copyright (C) 2009 Scalable Solutions.
+ */
+
+package se.scalablesolutions.akka.state
+
+import stm.TransactionManagement
+import akka.collection._
+
+import org.codehaus.aspectwerkz.proxy.Uuid
+
+import scala.collection.mutable.{ArrayBuffer, HashMap}
+
+sealed abstract class PersistentStateConfig
+abstract class PersistentStorageConfig extends PersistentStateConfig
+case class CassandraStorageConfig extends PersistentStorageConfig
+case class TerracottaStorageConfig extends PersistentStorageConfig
+case class TokyoCabinetStorageConfig extends PersistentStorageConfig
+case class MongoStorageConfig extends PersistentStorageConfig
+
+/**
+ * Scala API.
+ *
+ * Example Scala usage:
+ *
+ * val myMap = PersistentState.newMap(CassandraStorageConfig)
+ *
+ */
+object PersistentState extends PersistentState
+
+/**
+ * Java API.
+ *
+ * Example Java usage:
+ *
+ * PersistentState state = new PersistentState();
+ * TransactionalMap myMap = state.newMap(new CassandraStorageConfig());
+ *
+ */
+class PersistentState {
+ def newMap(config: PersistentStorageConfig): TransactionalMap[AnyRef, AnyRef] = config match {
+ case CassandraStorageConfig() => new CassandraPersistentTransactionalMap
+ case MongoStorageConfig() => new MongoPersistentTransactionalMap
+ case TerracottaStorageConfig() => throw new UnsupportedOperationException
+ case TokyoCabinetStorageConfig() => throw new UnsupportedOperationException
+ }
+
+ def newVector(config: PersistentStorageConfig): TransactionalVector[AnyRef] = config match {
+ case CassandraStorageConfig() => new CassandraPersistentTransactionalVector
+ case MongoStorageConfig() => new MongoPersistentTransactionalVector
+ case TerracottaStorageConfig() => throw new UnsupportedOperationException
+ case TokyoCabinetStorageConfig() => throw new UnsupportedOperationException
+ }
+
+ def newRef(config: PersistentStorageConfig): TransactionalRef[AnyRef] = config match {
+ case CassandraStorageConfig() => new CassandraPersistentTransactionalRef
+ case MongoStorageConfig() => new MongoPersistentTransactionalRef
+ case TerracottaStorageConfig() => throw new UnsupportedOperationException
+ case TokyoCabinetStorageConfig() => throw new UnsupportedOperationException
+ }
+}
+
+/**
+ * Base class for all persistent transactional map implementations should extend.
+ * Implements a Unit of Work, records changes into a change set.
+ *
+ * Not thread-safe, but should only be using from within an Actor, e.g. one single thread at a time.
+ *
+ * @author Jonas Bonér
+ */
+abstract class PersistentTransactionalMap[K, V] extends TransactionalMap[K, V] {
+
+ // FIXME: need to handle remove in another changeSet
+ protected[akka] val changeSet = new HashMap[K, V]
+
+ def getRange(start: Option[AnyRef], count: Int)
+
+ // ---- For Transactional ----
+ override def begin = {}
+
+ override def rollback = changeSet.clear
+
+ // ---- For scala.collection.mutable.Map ----
+ override def put(key: K, value: V): Option[V] = {
+ verifyTransaction
+ changeSet += key -> value
+ None // always return None to speed up writes (else need to go to DB to get
+ }
+
+ override def -=(key: K) = remove(key)
+
+ override def update(key: K, value: V) = put(key, value)
+}
+
+/**
+ * Implementation of PersistentTransactionalMap for every concrete
+ * storage will have the same workflow. This abstracts the workflow.
+ *
+ * Subclasses just need to provide the actual concrete instance for the
+ * abstract val storage.
+ *
+ * @author Jonas Bonér
+ */
+abstract class TemplatePersistentTransactionalMap extends PersistentTransactionalMap[AnyRef, AnyRef] {
+
+ // to be concretized in subclasses
+ val storage: MapStorage
+
+ override def remove(key: AnyRef) = {
+ verifyTransaction
+ if (changeSet.contains(key)) changeSet -= key
+ else storage.removeMapStorageFor(uuid, key)
+ }
+
+ override def getRange(start: Option[AnyRef], count: Int) =
+ getRange(start, None, count)
+
+ def getRange(start: Option[AnyRef], finish: Option[AnyRef], count: Int) = {
+ verifyTransaction
+ try {
+ storage.getMapStorageRangeFor(uuid, start, finish, count)
+ } catch {
+ case e: Exception => Nil
+ }
+ }
+
+ // ---- For Transactional ----
+ override def commit = {
+ storage.insertMapStorageEntriesFor(uuid, changeSet.toList)
+ changeSet.clear
+ }
+
+ // ---- Overriding scala.collection.mutable.Map behavior ----
+ override def clear = {
+ verifyTransaction
+ try {
+ storage.removeMapStorageFor(uuid)
+ } catch {
+ case e: Exception => {}
+ }
+ }
+
+ override def contains(key: AnyRef): Boolean = {
+ try {
+ verifyTransaction
+ storage.getMapStorageEntryFor(uuid, key).isDefined
+ } catch {
+ case e: Exception => false
+ }
+ }
+
+ override def size: Int = {
+ verifyTransaction
+ try {
+ storage.getMapStorageSizeFor(uuid)
+ } catch {
+ case e: Exception => 0
+ }
+ }
+
+ // ---- For scala.collection.mutable.Map ----
+ override def get(key: AnyRef): Option[AnyRef] = {
+ verifyTransaction
+ // if (changeSet.contains(key)) changeSet.get(key)
+ // else {
+ val result = try {
+ storage.getMapStorageEntryFor(uuid, key)
+ } catch {
+ case e: Exception => None
+ }
+ result
+ //}
+ }
+
+ override def elements: Iterator[Tuple2[AnyRef, AnyRef]] = {
+ //verifyTransaction
+ new Iterator[Tuple2[AnyRef, AnyRef]] {
+ private val originalList: List[Tuple2[AnyRef, AnyRef]] = try {
+ storage.getMapStorageFor(uuid)
+ } catch {
+ case e: Throwable => Nil
+ }
+ private var elements = originalList.reverse
+ override def next: Tuple2[AnyRef, AnyRef]= synchronized {
+ val element = elements.head
+ elements = elements.tail
+ element
+ }
+ override def hasNext: Boolean = synchronized { !elements.isEmpty }
+ }
+ }
+}
+
+
+/**
+ * Implements a persistent transactional map based on the Cassandra distributed P2P key-value storage.
+ *
+ * @author Debasish Ghosh
+ */
+class CassandraPersistentTransactionalMap extends TemplatePersistentTransactionalMap {
+ val storage = CassandraStorage
+}
+
+/**
+ * Implements a persistent transactional map based on the MongoDB distributed P2P key-value storage.
+ *
+ * @author Debasish Ghosh
+ */
+class MongoPersistentTransactionalMap extends TemplatePersistentTransactionalMap {
+ val storage = MongoStorage
+}
+
+/**
+ * Base class for all persistent transactional vector implementations should extend.
+ * Implements a Unit of Work, records changes into a change set.
+ *
+ * Not thread-safe, but should only be using from within an Actor, e.g. one single thread at a time.
+ *
+ * @author Jonas Bonér
+ */
+abstract class PersistentTransactionalVector[T] extends TransactionalVector[T] {
+
+ // FIXME: need to handle remove in another changeSet
+ protected[akka] val changeSet = new ArrayBuffer[T]
+
+ // ---- For Transactional ----
+ override def begin = {}
+
+ override def rollback = changeSet.clear
+
+ // ---- For TransactionalVector ----
+ override def add(value: T) = {
+ verifyTransaction
+ changeSet += value
+ }
+}
+
+/**
+ * Implements a template for a concrete persistent transactional vector based storage.
+ *
+ * @author Debasish Ghosh
+ */
+abstract class TemplatePersistentTransactionalVector extends PersistentTransactionalVector[AnyRef] {
+
+ val storage: VectorStorage
+
+ // ---- For TransactionalVector ----
+ override def get(index: Int): AnyRef = {
+ verifyTransaction
+ if (changeSet.size > index) changeSet(index)
+ else storage.getVectorStorageEntryFor(uuid, index)
+ }
+
+ override def getRange(start: Int, count: Int): List[AnyRef] =
+ getRange(Some(start), None, count)
+
+ def getRange(start: Option[Int], finish: Option[Int], count: Int): List[AnyRef] = {
+ verifyTransaction
+ storage.getVectorStorageRangeFor(uuid, start, finish, count)
+ }
+
+ override def length: Int = {
+ verifyTransaction
+ storage.getVectorStorageSizeFor(uuid)
+ }
+
+ override def apply(index: Int): AnyRef = get(index)
+
+ override def first: AnyRef = get(0)
+
+ override def last: AnyRef = {
+ verifyTransaction
+ val l = length
+ if (l == 0) throw new NoSuchElementException("Vector is empty")
+ get(length - 1)
+ }
+
+ // ---- For Transactional ----
+ override def commit = {
+ // FIXME: should use batch function once the bug is resolved
+ for (element <- changeSet) storage.insertVectorStorageEntryFor(uuid, element)
+ changeSet.clear
+ }
+}
+
+/**
+ * Implements a persistent transactional vector based on the Cassandra distributed P2P key-value storage.
+ *
+ * @author Debaissh Ghosh
+ */
+class CassandraPersistentTransactionalVector extends TemplatePersistentTransactionalVector {
+ val storage = CassandraStorage
+}
+
+/**
+ * Implements a persistent transactional vector based on the MongoDB distributed P2P key-value storage.
+ *
+ * @author Debaissh Ghosh
+ */
+class MongoPersistentTransactionalVector extends TemplatePersistentTransactionalVector {
+ val storage = MongoStorage
+}
+
+abstract class TemplatePersistentTransactionalRef extends TransactionalRef[AnyRef] {
+ val storage: RefStorage
+
+ override def commit = if (ref.isDefined) {
+ storage.insertRefStorageFor(uuid, ref.get)
+ ref = None
+ }
+
+ override def rollback = ref = None
+
+ override def get: Option[AnyRef] = {
+ verifyTransaction
+ storage.getRefStorageFor(uuid)
+ }
+
+ override def isDefined: Boolean = get.isDefined
+
+ override def getOrElse(default: => AnyRef): AnyRef = {
+ val ref = get
+ if (ref.isDefined) ref
+ else default
+ }
+}
+
+class CassandraPersistentTransactionalRef extends TemplatePersistentTransactionalRef {
+ val storage = CassandraStorage
+}
+
+class MongoPersistentTransactionalRef extends TemplatePersistentTransactionalRef {
+ val storage = MongoStorage
+}
diff --git a/kernel/src/main/scala/state/Pool.scala b/akka-persistence/src/main/scala/Pool.scala
similarity index 98%
rename from kernel/src/main/scala/state/Pool.scala
rename to akka-persistence/src/main/scala/Pool.scala
index 6391645562..697366b7d6 100644
--- a/kernel/src/main/scala/state/Pool.scala
+++ b/akka-persistence/src/main/scala/Pool.scala
@@ -2,7 +2,7 @@
* Copyright (C) 2009 Scalable Solutions.
*/
-package se.scalablesolutions.akka.kernel.state
+package se.scalablesolutions.akka.state
import org.apache.commons.pool._
import org.apache.commons.pool.impl._
diff --git a/kernel/src/main/scala/state/Storage.scala b/akka-persistence/src/main/scala/Storage.scala
similarity index 87%
rename from kernel/src/main/scala/state/Storage.scala
rename to akka-persistence/src/main/scala/Storage.scala
index 927c4f0361..a041a932e0 100644
--- a/kernel/src/main/scala/state/Storage.scala
+++ b/akka-persistence/src/main/scala/Storage.scala
@@ -1,8 +1,7 @@
-package se.scalablesolutions.akka.kernel.state
+package se.scalablesolutions.akka.state
// abstracts persistence storage
-trait Storage {
-}
+trait Storage
// for Maps
trait MapStorage extends Storage {
@@ -13,8 +12,7 @@ trait MapStorage extends Storage {
def getMapStorageEntryFor(name: String, key: AnyRef): Option[AnyRef]
def getMapStorageSizeFor(name: String): Int
def getMapStorageFor(name: String): List[Tuple2[AnyRef, AnyRef]]
- def getMapStorageRangeFor(name: String, start: Option[AnyRef],
- finish: Option[AnyRef], count: Int): List[Tuple2[AnyRef, AnyRef]]
+ def getMapStorageRangeFor(name: String, start: Option[AnyRef], finish: Option[AnyRef], count: Int): List[Tuple2[AnyRef, AnyRef]]
}
// for vectors
diff --git a/akka-persistence/src/test/scala/AllTest.scala b/akka-persistence/src/test/scala/AllTest.scala
new file mode 100644
index 0000000000..60374da92d
--- /dev/null
+++ b/akka-persistence/src/test/scala/AllTest.scala
@@ -0,0 +1,18 @@
+package se.scalablesolutions.akka
+
+import akka.state.{MongoStorageSpec, MongoPersistentActorSpec, CassandraPersistentActorSpec}
+import junit.framework.Test
+import junit.framework.TestCase
+import junit.framework.TestSuite
+
+object AllTest extends TestCase {
+ def suite(): Test = {
+ val suite = new TestSuite("All Scala tests")
+ //suite.addTestSuite(classOf[CassandraPersistentActorSpec])
+ //suite.addTestSuite(classOf[MongoPersistentActorSpec])
+ //suite.addTestSuite(classOf[MongoStorageSpec])
+ suite
+ }
+
+ def main(args: Array[String]) = junit.textui.TestRunner.run(suite)
+}
\ No newline at end of file
diff --git a/kernel/src/test/scala/PersistentActorSpec.scala b/akka-persistence/src/test/scala/CassandraPersistentActorSpec.scala
similarity index 75%
rename from kernel/src/test/scala/PersistentActorSpec.scala
rename to akka-persistence/src/test/scala/CassandraPersistentActorSpec.scala
index 0d8b464dd1..53a4d5e625 100644
--- a/kernel/src/test/scala/PersistentActorSpec.scala
+++ b/akka-persistence/src/test/scala/CassandraPersistentActorSpec.scala
@@ -1,22 +1,38 @@
-package se.scalablesolutions.akka.kernel.actor
+package se.scalablesolutions.akka.state
+import akka.actor.Actor
import java.util.concurrent.locks.ReentrantLock
import java.util.concurrent.TimeUnit
import junit.framework.TestCase
-import kernel.Kernel
-import kernel.reactor._
+import reactor._
-import kernel.state.{CassandraStorageConfig, TransactionalState}
import org.junit.{Test, Before}
import org.junit.Assert._
-class PersistentActor extends Actor {
+case class GetMapState(key: String)
+case object GetVectorState
+case object GetVectorSize
+case object GetRefState
+
+case class SetMapState(key: String, value: String)
+case class SetVectorState(key: String)
+case class SetRefState(key: String)
+case class Success(key: String, value: String)
+case class Failure(key: String, value: String, failer: Actor)
+
+case class SetMapStateOneWay(key: String, value: String)
+case class SetVectorStateOneWay(key: String)
+case class SetRefStateOneWay(key: String)
+case class SuccessOneWay(key: String, value: String)
+case class FailureOneWay(key: String, value: String, failer: Actor)
+
+class CassandraPersistentActor extends Actor {
timeout = 100000
makeTransactionRequired
- private val mapState = TransactionalState.newPersistentMap(CassandraStorageConfig())
- private val vectorState = TransactionalState.newPersistentVector(CassandraStorageConfig())
- private val refState = TransactionalState.newPersistentRef(CassandraStorageConfig())
+ private val mapState = PersistentState.newMap(CassandraStorageConfig())
+ private val vectorState = PersistentState.newVector(CassandraStorageConfig())
+ private val refState = PersistentState.newRef(CassandraStorageConfig())
def receive: PartialFunction[Any, Unit] = {
case GetMapState(key) =>
@@ -56,19 +72,11 @@ class PersistentActor extends Actor {
}
}
-object PersistenceManager {
- @volatile var isRunning = false
- def init = if (!isRunning) {
- Kernel.startCassandra
- isRunning = true
- }
-}
-class PersistentActorSpec extends TestCase {
- PersistenceManager.init
+class CassandraPersistentActorSpec extends TestCase {
@Test
def testMapShouldNotRollbackStateForStatefulServerInCaseOfSuccess = {
- val stateful = new PersistentActor
+ val stateful = new CassandraPersistentActor
stateful.start
stateful !! SetMapState("testShouldNotRollbackStateForStatefulServerInCaseOfSuccess", "init") // set init state
stateful !! Success("testShouldNotRollbackStateForStatefulServerInCaseOfSuccess", "new state") // transactionrequired
@@ -77,7 +85,7 @@ class PersistentActorSpec extends TestCase {
@Test
def testMapShouldRollbackStateForStatefulServerInCaseOfFailure = {
- val stateful = new PersistentActor
+ val stateful = new CassandraPersistentActor
stateful.start
stateful !! SetMapState("testShouldRollbackStateForStatefulServerInCaseOfFailure", "init") // set init state
val failer = new PersistentFailerActor
@@ -91,7 +99,7 @@ class PersistentActorSpec extends TestCase {
@Test
def testVectorShouldNotRollbackStateForStatefulServerInCaseOfSuccess = {
- val stateful = new PersistentActor
+ val stateful = new CassandraPersistentActor
stateful.start
stateful !! SetVectorState("init") // set init state
stateful !! Success("testShouldNotRollbackStateForStatefulServerInCaseOfSuccess", "new state") // transactionrequired
@@ -100,7 +108,7 @@ class PersistentActorSpec extends TestCase {
@Test
def testVectorShouldRollbackStateForStatefulServerInCaseOfFailure = {
- val stateful = new PersistentActor
+ val stateful = new CassandraPersistentActor
stateful.start
stateful !! SetVectorState("init") // set init state
val failer = new PersistentFailerActor
@@ -114,7 +122,7 @@ class PersistentActorSpec extends TestCase {
@Test
def testRefShouldNotRollbackStateForStatefulServerInCaseOfSuccess = {
- val stateful = new PersistentActor
+ val stateful = new CassandraPersistentActor
stateful.start
stateful !! SetRefState("init") // set init state
stateful !! Success("testShouldNotRollbackStateForStatefulServerInCaseOfSuccess", "new state") // transactionrequired
@@ -123,7 +131,7 @@ class PersistentActorSpec extends TestCase {
@Test
def testRefShouldRollbackStateForStatefulServerInCaseOfFailure = {
- val stateful = new PersistentActor
+ val stateful = new CassandraPersistentActor
stateful.start
stateful !! SetRefState("init") // set init state
val failer = new PersistentFailerActor
diff --git a/kernel/src/test/scala/MongoPersistentActorSpec.scala b/akka-persistence/src/test/scala/MongoPersistentActorSpec.scala
similarity index 71%
rename from kernel/src/test/scala/MongoPersistentActorSpec.scala
rename to akka-persistence/src/test/scala/MongoPersistentActorSpec.scala
index eb5e04c85e..830c36bfb5 100644
--- a/kernel/src/test/scala/MongoPersistentActorSpec.scala
+++ b/akka-persistence/src/test/scala/MongoPersistentActorSpec.scala
@@ -1,11 +1,11 @@
-package se.scalablesolutions.akka.kernel.actor
-
+package se.scalablesolutions.akka.state
+import akka.actor.Actor
import junit.framework.TestCase
import org.junit.{Test, Before}
import org.junit.Assert._
-
-import kernel.state.{MongoStorageConfig, TransactionalState}
+import dispatch.json._
+import dispatch.json.Js._
/**
* A persistent actor based on MongoDB storage.
@@ -29,9 +29,9 @@ case object LogSize
class BankAccountActor extends Actor {
makeTransactionRequired
private val accountState =
- TransactionalState.newPersistentMap(MongoStorageConfig())
+ PersistentState.newMap(MongoStorageConfig())
private val txnLog =
- TransactionalState.newPersistentVector(MongoStorageConfig())
+ PersistentState.newVector(MongoStorageConfig())
def receive: PartialFunction[Any, Unit] = {
// check balance
@@ -45,7 +45,10 @@ class BankAccountActor extends Actor {
val m: BigInt =
accountState.get(accountNo) match {
case None => 0
- case Some(v) => BigInt(v.asInstanceOf[String])
+ case Some(v) => {
+ val JsNumber(n) = v.asInstanceOf[JsValue]
+ BigInt(n.toString)
+ }
}
accountState.put(accountNo, (m - amount))
if (amount > m)
@@ -75,7 +78,10 @@ class BankAccountActor extends Actor {
val m: BigInt =
accountState.get(accountNo) match {
case None => 0
- case Some(v) => BigInt(v.asInstanceOf[String])
+ case Some(v) => {
+ val JsNumber(n) = v.asInstanceOf[JsValue]
+ BigInt(n.toString)
+ }
}
accountState.put(accountNo, (m + amount))
reply(m + amount)
@@ -84,14 +90,6 @@ class BankAccountActor extends Actor {
reply(txnLog.length.asInstanceOf[AnyRef])
}
}
-/*
-@serializable class PersistentFailerActor extends Actor {
- makeTransactionRequired
- def receive: PartialFunction[Any, Unit] = {
- case "Failure" =>
- throw new RuntimeException("expected")
- }
-}*/
class MongoPersistentActorSpec extends TestCase {
@Test
@@ -102,16 +100,19 @@ class MongoPersistentActorSpec extends TestCase {
failer.start
bactor !! Credit("a-123", 5000)
bactor !! Debit("a-123", 3000, failer)
- assertEquals(BigInt(2000),
- BigInt((bactor !! Balance("a-123")).get.asInstanceOf[String]))
+ val b = (bactor !! Balance("a-123")).get.asInstanceOf[JsValue]
+ val JsNumber(n) = b
+ assertEquals(BigInt(2000), BigInt(n.toString))
bactor !! Credit("a-123", 7000)
- assertEquals(BigInt(9000),
- BigInt((bactor !! Balance("a-123")).get.asInstanceOf[String]))
+ val b1 = (bactor !! Balance("a-123")).get.asInstanceOf[JsValue]
+ val JsNumber(n1) = b1
+ assertEquals(BigInt(9000), BigInt(n1.toString))
bactor !! Debit("a-123", 8000, failer)
- assertEquals(BigInt(1000),
- BigInt((bactor !! Balance("a-123")).get.asInstanceOf[String]))
+ val b2 = (bactor !! Balance("a-123")).get.asInstanceOf[JsValue]
+ val JsNumber(n2) = b2
+ assertEquals(BigInt(1000), BigInt(n2.toString))
assertEquals(7, (bactor !! LogSize).get)
}
@@ -120,8 +121,10 @@ class MongoPersistentActorSpec extends TestCase {
val bactor = new BankAccountActor
bactor.start
bactor !! Credit("a-123", 5000)
- assertEquals(BigInt(5000),
- BigInt((bactor !! Balance("a-123")).get.asInstanceOf[String]))
+
+ val b = (bactor !! Balance("a-123")).get.asInstanceOf[JsValue]
+ val JsNumber(n) = b
+ assertEquals(BigInt(5000), BigInt(n.toString))
val failer = new PersistentFailerActor
failer.start
@@ -130,8 +133,9 @@ class MongoPersistentActorSpec extends TestCase {
fail("should throw exception")
} catch { case e: RuntimeException => {}}
- assertEquals(BigInt(5000),
- BigInt((bactor !! Balance("a-123")).get.asInstanceOf[String]))
+ val b1 = (bactor !! Balance("a-123")).get.asInstanceOf[JsValue]
+ val JsNumber(n1) = b1
+ assertEquals(BigInt(5000), BigInt(n1.toString))
// should not count the failed one
assertEquals(3, (bactor !! LogSize).get)
@@ -142,8 +146,9 @@ class MongoPersistentActorSpec extends TestCase {
val bactor = new BankAccountActor
bactor.start
bactor !! Credit("a-123", 5000)
- assertEquals(BigInt(5000),
- BigInt((bactor !! Balance("a-123")).get.asInstanceOf[String]))
+ val b = (bactor !! Balance("a-123")).get.asInstanceOf[JsValue]
+ val JsNumber(n) = b
+ assertEquals(BigInt(5000), BigInt(n.toString))
val failer = new PersistentFailerActor
failer.start
@@ -152,8 +157,9 @@ class MongoPersistentActorSpec extends TestCase {
fail("should throw exception")
} catch { case e: RuntimeException => {}}
- assertEquals(BigInt(5000),
- BigInt((bactor !! Balance("a-123")).get.asInstanceOf[String]))
+ val b1 = (bactor !! Balance("a-123")).get.asInstanceOf[JsValue]
+ val JsNumber(n1) = b1
+ assertEquals(BigInt(5000), BigInt(n1.toString))
// should not count the failed one
assertEquals(3, (bactor !! LogSize).get)
diff --git a/kernel/src/test/scala/MongoStorageSpec.scala b/akka-persistence/src/test/scala/MongoStorageSpec.scala
similarity index 78%
rename from kernel/src/test/scala/MongoStorageSpec.scala
rename to akka-persistence/src/test/scala/MongoStorageSpec.scala
index c80afb00a7..fd0a50ada4 100644
--- a/kernel/src/test/scala/MongoStorageSpec.scala
+++ b/akka-persistence/src/test/scala/MongoStorageSpec.scala
@@ -1,306 +1,337 @@
-package se.scalablesolutions.akka.kernel.state
-
-import junit.framework.TestCase
-
-import org.junit.{Test, Before}
-import org.junit.Assert._
-
-class MongoStorageSpec extends TestCase {
-
- val changeSetV = new scala.collection.mutable.ArrayBuffer[AnyRef]
- val changeSetM = new scala.collection.mutable.HashMap[AnyRef, AnyRef]
-
- override def setUp = {
- MongoStorage.coll.drop
- }
-
- @Test
- def testVectorInsertForTransactionId = {
- changeSetV += "debasish" // string
- changeSetV += List(1, 2, 3) // Scala List
- changeSetV += List(100, 200)
- MongoStorage.insertVectorStorageEntriesFor("U-A1", changeSetV.toList)
- assertEquals(
- 3,
- MongoStorage.getVectorStorageSizeFor("U-A1"))
- changeSetV.clear
-
- // changeSetV should be reinitialized
- changeSetV += List(12, 23, 45)
- changeSetV += "maulindu"
- MongoStorage.insertVectorStorageEntriesFor("U-A1", changeSetV.toList)
- assertEquals(
- 5,
- MongoStorage.getVectorStorageSizeFor("U-A1"))
-
- // add more to the same changeSetV
- changeSetV += "ramanendu"
- changeSetV += Map(1 -> "dg", 2 -> "mc")
-
- // add for a diff transaction
- MongoStorage.insertVectorStorageEntriesFor("U-A2", changeSetV.toList)
- assertEquals(
- 4,
- MongoStorage.getVectorStorageSizeFor("U-A2"))
-
- // previous transaction change set should remain same
- assertEquals(
- 5,
- MongoStorage.getVectorStorageSizeFor("U-A1"))
-
- // test single element entry
- MongoStorage.insertVectorStorageEntryFor("U-A1", Map(1->1, 2->4, 3->9))
- assertEquals(
- 6,
- MongoStorage.getVectorStorageSizeFor("U-A1"))
- }
-
- @Test
- def testVectorFetchForKeys = {
-
- // initially everything 0
- assertEquals(
- 0,
- MongoStorage.getVectorStorageSizeFor("U-A2"))
-
- assertEquals(
- 0,
- MongoStorage.getVectorStorageSizeFor("U-A1"))
-
- // get some stuff
- changeSetV += "debasish"
- changeSetV += List(12, 13, 14)
- MongoStorage.insertVectorStorageEntriesFor("U-A1", changeSetV.toList)
-
- assertEquals(
- 2,
- MongoStorage.getVectorStorageSizeFor("U-A1"))
-
- assertEquals(
- "debasish",
- MongoStorage.getVectorStorageEntryFor("U-A1", 0).asInstanceOf[String])
-
- assertEquals(
- List(12, 13, 14),
- MongoStorage.getVectorStorageEntryFor("U-A1", 1).asInstanceOf[List[Int]])
-
- changeSetV.clear
- changeSetV += Map(1->1, 2->4, 3->9)
- changeSetV += BigInt(2310)
- changeSetV += List(100, 200, 300)
- MongoStorage.insertVectorStorageEntriesFor("U-A1", changeSetV.toList)
-
- assertEquals(
- 5,
- MongoStorage.getVectorStorageSizeFor("U-A1"))
-
- val r =
- MongoStorage.getVectorStorageRangeFor("U-A1", Some(1), None, 3)
-
- assertEquals(3, r.size)
- assertEquals(List(12, 13, 14), r(0).asInstanceOf[List[Int]])
- }
-
- @Test
- def testVectorFetchForNonExistentKeys = {
- try {
- MongoStorage.getVectorStorageEntryFor("U-A1", 1)
- fail("should throw an exception")
- } catch {case e: Predef.NoSuchElementException => {}}
-
- try {
- MongoStorage.getVectorStorageRangeFor("U-A1", Some(2), None, 12)
- fail("should throw an exception")
- } catch {case e: Predef.NoSuchElementException => {}}
- }
-
- @Test
- def testMapInsertForTransactionId = {
- case class Foo(no: Int, name: String)
- fillMap
-
- // add some more to changeSet
- changeSetM += "5" -> Foo(12, "dg")
- changeSetM += "6" -> java.util.Calendar.getInstance.getTime
-
- // insert all into Mongo
- MongoStorage.insertMapStorageEntriesFor("U-M1", changeSetM.toList)
- assertEquals(
- 6,
- MongoStorage.getMapStorageSizeFor("U-M1"))
-
- // individual insert api
- MongoStorage.insertMapStorageEntryFor("U-M1", "7", "akka")
- MongoStorage.insertMapStorageEntryFor("U-M1", "8", List(23, 25))
- assertEquals(
- 8,
- MongoStorage.getMapStorageSizeFor("U-M1"))
-
- // add the same changeSet for another transaction
- MongoStorage.insertMapStorageEntriesFor("U-M2", changeSetM.toList)
- assertEquals(
- 6,
- MongoStorage.getMapStorageSizeFor("U-M2"))
-
- // the first transaction should remain the same
- assertEquals(
- 8,
- MongoStorage.getMapStorageSizeFor("U-M1"))
- changeSetM.clear
- }
-
- @Test
- def testMapContents = {
- fillMap
- MongoStorage.insertMapStorageEntriesFor("U-M1", changeSetM.toList)
- MongoStorage.getMapStorageEntryFor("U-M1", "2") match {
- case Some(x) => assertEquals("peter", x.asInstanceOf[String])
- case None => fail("should fetch peter")
- }
- MongoStorage.getMapStorageEntryFor("U-M1", "4") match {
- case Some(x) => assertEquals(3, x.asInstanceOf[List[Int]].size)
- case None => fail("should fetch list")
- }
- MongoStorage.getMapStorageEntryFor("U-M1", "3") match {
- case Some(x) => assertEquals(2, x.asInstanceOf[List[Int]].size)
- case None => fail("should fetch list")
- }
-
- // get the entire map
- val l: List[Tuple2[AnyRef, AnyRef]] =
- MongoStorage.getMapStorageFor("U-M1")
-
- assertEquals(4, l.size)
- assertTrue(l.map(_._1).contains("1"))
- assertTrue(l.map(_._1).contains("2"))
- assertTrue(l.map(_._1).contains("3"))
- assertTrue(l.map(_._1).contains("4"))
-
- assertTrue(l.map(_._2).contains("john"))
-
- // trying to fetch for a non-existent transaction will throw
- try {
- MongoStorage.getMapStorageFor("U-M2")
- fail("should throw an exception")
- } catch {case e: Predef.NoSuchElementException => {}}
-
- changeSetM.clear
- }
-
- @Test
- def testMapContentsByRange = {
- fillMap
- changeSetM += "5" -> Map(1 -> "dg", 2 -> "mc")
- MongoStorage.insertMapStorageEntriesFor("U-M1", changeSetM.toList)
-
- // specify start and count
- val l: List[Tuple2[AnyRef, AnyRef]] =
- MongoStorage.getMapStorageRangeFor(
- "U-M1", Some(Integer.valueOf(2)), None, 3)
-
- assertEquals(3, l.size)
- assertEquals("3", l(0)._1.asInstanceOf[String])
- assertEquals(List(100, 200), l(0)._2.asInstanceOf[List[Int]])
- assertEquals("4", l(1)._1.asInstanceOf[String])
- assertEquals(List(10, 20, 30), l(1)._2.asInstanceOf[List[Int]])
-
- // specify start, finish and count where finish - start == count
- assertEquals(3,
- MongoStorage.getMapStorageRangeFor(
- "U-M1", Some(Integer.valueOf(2)), Some(Integer.valueOf(5)), 3).size)
-
- // specify start, finish and count where finish - start > count
- assertEquals(3,
- MongoStorage.getMapStorageRangeFor(
- "U-M1", Some(Integer.valueOf(2)), Some(Integer.valueOf(9)), 3).size)
-
- // do not specify start or finish
- assertEquals(3,
- MongoStorage.getMapStorageRangeFor(
- "U-M1", None, None, 3).size)
-
- // specify finish and count
- assertEquals(3,
- MongoStorage.getMapStorageRangeFor(
- "U-M1", None, Some(Integer.valueOf(3)), 3).size)
-
- // specify start, finish and count where finish < start
- assertEquals(3,
- MongoStorage.getMapStorageRangeFor(
- "U-M1", Some(Integer.valueOf(2)), Some(Integer.valueOf(1)), 3).size)
-
- changeSetM.clear
- }
-
- @Test
- def testMapStorageRemove = {
- fillMap
- changeSetM += "5" -> Map(1 -> "dg", 2 -> "mc")
-
- MongoStorage.insertMapStorageEntriesFor("U-M1", changeSetM.toList)
- assertEquals(5,
- MongoStorage.getMapStorageSizeFor("U-M1"))
-
- // remove key "3"
- MongoStorage.removeMapStorageFor("U-M1", "3")
- assertEquals(4,
- MongoStorage.getMapStorageSizeFor("U-M1"))
-
- try {
- MongoStorage.getMapStorageEntryFor("U-M1", "3")
- fail("should throw exception")
- } catch { case e => {}}
-
- // remove the whole stuff
- MongoStorage.removeMapStorageFor("U-M1")
-
- try {
- MongoStorage.getMapStorageFor("U-M1")
- fail("should throw exception")
- } catch { case e: NoSuchElementException => {}}
-
- changeSetM.clear
- }
-
- private def fillMap = {
- changeSetM += "1" -> "john"
- changeSetM += "2" -> "peter"
- changeSetM += "3" -> List(100, 200)
- changeSetM += "4" -> List(10, 20, 30)
- changeSetM
- }
-
- @Test
- def testRefStorage = {
- MongoStorage.getRefStorageFor("U-R1") match {
- case None =>
- case Some(o) => fail("should be None")
- }
-
- val m = Map("1"->1, "2"->4, "3"->9)
- MongoStorage.insertRefStorageFor("U-R1", m)
- MongoStorage.getRefStorageFor("U-R1") match {
- case None => fail("should not be empty")
- case Some(r) => {
- val a = r.asInstanceOf[Map[String, Int]]
- assertEquals(a.size, 3)
- assertEquals(a.get("1").get, 1)
- assertEquals(a.get("2").get, 4)
- assertEquals(a.get("3").get, 9)
- }
- }
-
- // insert another one
- // the previous one should be replaced
- val b = List("100", "jonas")
- MongoStorage.insertRefStorageFor("U-R1", b)
- MongoStorage.getRefStorageFor("U-R1") match {
- case None => fail("should not be empty")
- case Some(r) => {
- val a = r.asInstanceOf[List[String]]
- assertEquals("100", a(0))
- assertEquals("jonas", a(1))
- }
- }
- }
-}
+package se.scalablesolutions.akka.state
+
+import junit.framework.TestCase
+
+import org.junit.{Test, Before}
+import org.junit.Assert._
+import dispatch.json._
+import dispatch.json.Js._
+
+class MongoStorageSpec extends TestCase {
+
+ val changeSetV = new scala.collection.mutable.ArrayBuffer[AnyRef]
+ val changeSetM = new scala.collection.mutable.HashMap[AnyRef, AnyRef]
+
+ override def setUp = {
+ MongoStorage.coll.drop
+ }
+
+ @Test
+ def testVectorInsertForTransactionId = {
+ changeSetV += "debasish" // string
+ changeSetV += List(1, 2, 3) // Scala List
+ changeSetV += List(100, 200)
+ MongoStorage.insertVectorStorageEntriesFor("U-A1", changeSetV.toList)
+ assertEquals(
+ 3,
+ MongoStorage.getVectorStorageSizeFor("U-A1"))
+ changeSetV.clear
+
+ // changeSetV should be reinitialized
+ changeSetV += List(12, 23, 45)
+ changeSetV += "maulindu"
+ MongoStorage.insertVectorStorageEntriesFor("U-A1", changeSetV.toList)
+ assertEquals(
+ 5,
+ MongoStorage.getVectorStorageSizeFor("U-A1"))
+
+ // add more to the same changeSetV
+ changeSetV += "ramanendu"
+ changeSetV += Map(1 -> "dg", 2 -> "mc")
+
+ // add for a diff transaction
+ MongoStorage.insertVectorStorageEntriesFor("U-A2", changeSetV.toList)
+ assertEquals(
+ 4,
+ MongoStorage.getVectorStorageSizeFor("U-A2"))
+
+ // previous transaction change set should remain same
+ assertEquals(
+ 5,
+ MongoStorage.getVectorStorageSizeFor("U-A1"))
+
+ // test single element entry
+ MongoStorage.insertVectorStorageEntryFor("U-A1", Map(1->1, 2->4, 3->9))
+ assertEquals(
+ 6,
+ MongoStorage.getVectorStorageSizeFor("U-A1"))
+ }
+
+ @Test
+ def testVectorFetchForKeys = {
+
+ // initially everything 0
+ assertEquals(
+ 0,
+ MongoStorage.getVectorStorageSizeFor("U-A2"))
+
+ assertEquals(
+ 0,
+ MongoStorage.getVectorStorageSizeFor("U-A1"))
+
+ // get some stuff
+ changeSetV += "debasish"
+ changeSetV += List(BigDecimal(12), BigDecimal(13), BigDecimal(14))
+ MongoStorage.insertVectorStorageEntriesFor("U-A1", changeSetV.toList)
+
+ assertEquals(
+ 2,
+ MongoStorage.getVectorStorageSizeFor("U-A1"))
+
+ val JsString(str) = MongoStorage.getVectorStorageEntryFor("U-A1", 0).asInstanceOf[JsString]
+ assertEquals("debasish", str)
+
+ import dispatch.json.Js._
+
+ val l = MongoStorage.getVectorStorageEntryFor("U-A1", 1).asInstanceOf[JsValue]
+ val num_list = list ! num
+ val num_list(l0) = l
+ assertEquals(List(12, 13, 14), l0)
+
+ changeSetV.clear
+ changeSetV += Map(1->1, 2->4, 3->9)
+ changeSetV += BigInt(2310)
+ changeSetV += List(100, 200, 300)
+ MongoStorage.insertVectorStorageEntriesFor("U-A1", changeSetV.toList)
+
+ assertEquals(
+ 5,
+ MongoStorage.getVectorStorageSizeFor("U-A1"))
+
+ val r =
+ MongoStorage.getVectorStorageRangeFor("U-A1", Some(1), None, 3)
+
+ assertEquals(3, r.size)
+ val lr = r(0).asInstanceOf[JsValue]
+ val num_list(l1) = lr
+ assertEquals(List(12, 13, 14), l1)
+ }
+
+ @Test
+ def testVectorFetchForNonExistentKeys = {
+ try {
+ MongoStorage.getVectorStorageEntryFor("U-A1", 1)
+ fail("should throw an exception")
+ } catch {case e: Predef.NoSuchElementException => {}}
+
+ try {
+ MongoStorage.getVectorStorageRangeFor("U-A1", Some(2), None, 12)
+ fail("should throw an exception")
+ } catch {case e: Predef.NoSuchElementException => {}}
+ }
+
+ @Test
+ def testMapInsertForTransactionId = {
+ case class Foo(no: Int, name: String)
+ fillMap
+
+ // add some more to changeSet
+ changeSetM += "5" -> Foo(12, "dg")
+ changeSetM += "6" -> java.util.Calendar.getInstance.getTime
+
+ // insert all into Mongo
+ MongoStorage.insertMapStorageEntriesFor("U-M1", changeSetM.toList)
+ assertEquals(
+ 6,
+ MongoStorage.getMapStorageSizeFor("U-M1"))
+
+ // individual insert api
+ MongoStorage.insertMapStorageEntryFor("U-M1", "7", "akka")
+ MongoStorage.insertMapStorageEntryFor("U-M1", "8", List(23, 25))
+ assertEquals(
+ 8,
+ MongoStorage.getMapStorageSizeFor("U-M1"))
+
+ // add the same changeSet for another transaction
+ MongoStorage.insertMapStorageEntriesFor("U-M2", changeSetM.toList)
+ assertEquals(
+ 6,
+ MongoStorage.getMapStorageSizeFor("U-M2"))
+
+ // the first transaction should remain the same
+ assertEquals(
+ 8,
+ MongoStorage.getMapStorageSizeFor("U-M1"))
+ changeSetM.clear
+ }
+
+ @Test
+ def testMapContents = {
+ fillMap
+ MongoStorage.insertMapStorageEntriesFor("U-M1", changeSetM.toList)
+ MongoStorage.getMapStorageEntryFor("U-M1", "2") match {
+ case Some(x) => {
+ val JsString(str) = x.asInstanceOf[JsValue]
+ assertEquals("peter", str)
+ }
+ case None => fail("should fetch peter")
+ }
+ MongoStorage.getMapStorageEntryFor("U-M1", "4") match {
+ case Some(x) => {
+ val num_list = list ! num
+ val num_list(l0) = x.asInstanceOf[JsValue]
+ assertEquals(3, l0.size)
+ }
+ case None => fail("should fetch list")
+ }
+ MongoStorage.getMapStorageEntryFor("U-M1", "3") match {
+ case Some(x) => {
+ val num_list = list ! num
+ val num_list(l0) = x.asInstanceOf[JsValue]
+ assertEquals(2, l0.size)
+ }
+ case None => fail("should fetch list")
+ }
+
+ // get the entire map
+ val l: List[Tuple2[AnyRef, AnyRef]] =
+ MongoStorage.getMapStorageFor("U-M1")
+
+ assertEquals(4, l.size)
+ assertTrue(l.map(_._1).contains("1"))
+ assertTrue(l.map(_._1).contains("2"))
+ assertTrue(l.map(_._1).contains("3"))
+ assertTrue(l.map(_._1).contains("4"))
+
+ val JsString(str) = l.filter(_._1 == "2").first._2
+ assertEquals(str, "peter")
+
+ // trying to fetch for a non-existent transaction will throw
+ try {
+ MongoStorage.getMapStorageFor("U-M2")
+ fail("should throw an exception")
+ } catch {case e: Predef.NoSuchElementException => {}}
+
+ changeSetM.clear
+ }
+
+ @Test
+ def testMapContentsByRange = {
+ fillMap
+ changeSetM += "5" -> Map(1 -> "dg", 2 -> "mc")
+ MongoStorage.insertMapStorageEntriesFor("U-M1", changeSetM.toList)
+
+ // specify start and count
+ val l: List[Tuple2[AnyRef, AnyRef]] =
+ MongoStorage.getMapStorageRangeFor(
+ "U-M1", Some(Integer.valueOf(2)), None, 3)
+
+ assertEquals(3, l.size)
+ assertEquals("3", l(0)._1.asInstanceOf[String])
+ val lst = l(0)._2.asInstanceOf[JsValue]
+ val num_list = list ! num
+ val num_list(l0) = lst
+ assertEquals(List(100, 200), l0)
+ assertEquals("4", l(1)._1.asInstanceOf[String])
+ val ls = l(1)._2.asInstanceOf[JsValue]
+ val num_list(l1) = ls
+ assertEquals(List(10, 20, 30), l1)
+
+ // specify start, finish and count where finish - start == count
+ assertEquals(3,
+ MongoStorage.getMapStorageRangeFor(
+ "U-M1", Some(Integer.valueOf(2)), Some(Integer.valueOf(5)), 3).size)
+
+ // specify start, finish and count where finish - start > count
+ assertEquals(3,
+ MongoStorage.getMapStorageRangeFor(
+ "U-M1", Some(Integer.valueOf(2)), Some(Integer.valueOf(9)), 3).size)
+
+ // do not specify start or finish
+ assertEquals(3,
+ MongoStorage.getMapStorageRangeFor(
+ "U-M1", None, None, 3).size)
+
+ // specify finish and count
+ assertEquals(3,
+ MongoStorage.getMapStorageRangeFor(
+ "U-M1", None, Some(Integer.valueOf(3)), 3).size)
+
+ // specify start, finish and count where finish < start
+ assertEquals(3,
+ MongoStorage.getMapStorageRangeFor(
+ "U-M1", Some(Integer.valueOf(2)), Some(Integer.valueOf(1)), 3).size)
+
+ changeSetM.clear
+ }
+
+ @Test
+ def testMapStorageRemove = {
+ fillMap
+ changeSetM += "5" -> Map(1 -> "dg", 2 -> "mc")
+
+ MongoStorage.insertMapStorageEntriesFor("U-M1", changeSetM.toList)
+ assertEquals(5,
+ MongoStorage.getMapStorageSizeFor("U-M1"))
+
+ // remove key "3"
+ MongoStorage.removeMapStorageFor("U-M1", "3")
+ assertEquals(4,
+ MongoStorage.getMapStorageSizeFor("U-M1"))
+
+ try {
+ MongoStorage.getMapStorageEntryFor("U-M1", "3")
+ fail("should throw exception")
+ } catch { case e => {}}
+
+ // remove the whole stuff
+ MongoStorage.removeMapStorageFor("U-M1")
+
+ try {
+ MongoStorage.getMapStorageFor("U-M1")
+ fail("should throw exception")
+ } catch { case e: NoSuchElementException => {}}
+
+ changeSetM.clear
+ }
+
+ private def fillMap = {
+ changeSetM += "1" -> "john"
+ changeSetM += "2" -> "peter"
+ changeSetM += "3" -> List(100, 200)
+ changeSetM += "4" -> List(10, 20, 30)
+ changeSetM
+ }
+
+ @Test
+ def testRefStorage = {
+ MongoStorage.getRefStorageFor("U-R1") match {
+ case None =>
+ case Some(o) => fail("should be None")
+ }
+
+ val m = Map("1"->1, "2"->4, "3"->9)
+ MongoStorage.insertRefStorageFor("U-R1", m)
+ MongoStorage.getRefStorageFor("U-R1") match {
+ case None => fail("should not be empty")
+ case Some(r) => {
+ val a = r.asInstanceOf[JsValue]
+ val m1 = Symbol("1") ? num
+ val m2 = Symbol("2") ? num
+ val m3 = Symbol("3") ? num
+
+ val m1(n1) = a
+ val m2(n2) = a
+ val m3(n3) = a
+
+ assertEquals(n1, 1)
+ assertEquals(n2, 4)
+ assertEquals(n3, 9)
+ }
+ }
+
+ // insert another one
+ // the previous one should be replaced
+ val b = List("100", "jonas")
+ MongoStorage.insertRefStorageFor("U-R1", b)
+ MongoStorage.getRefStorageFor("U-R1") match {
+ case None => fail("should not be empty")
+ case Some(r) => {
+ val a = r.asInstanceOf[JsValue]
+ val str_lst = list ! str
+ val str_lst(l) = a
+ assertEquals(b, l)
+ }
+ }
+ }
+}
diff --git a/akka-rest/pom.xml b/akka-rest/pom.xml
new file mode 100644
index 0000000000..6408d17d4a
--- /dev/null
+++ b/akka-rest/pom.xml
@@ -0,0 +1,73 @@
+
+ 4.0.0
+
+ akka-rest
+ Akka REST Module
+
+ jar
+
+
+ akka
+ se.scalablesolutions.akka
+ 0.6
+ ../pom.xml
+
+
+
+
+
+ akka-util
+ se.scalablesolutions.akka
+ 0.6
+
+
+ akka-actors
+ se.scalablesolutions.akka
+ 0.6
+
+
+
+
+ com.sun.grizzly
+ grizzly-comet-webserver
+ 1.8.6.3
+
+
+ com.sun.jersey
+ jersey-server
+ 1.1.1-ea
+
+
+ com.sun.jersey
+ jersey-json
+ 1.1.1-ea
+
+
+ javax.ws.rs
+ jsr311-api
+ 1.0
+
+
+ com.sun.jersey.contribs
+ jersey-scala
+ 1.1.2-ea-SNAPSHOT
+
+
+ org.atmosphere
+ atmosphere-core
+ 0.3
+
+
+ org.atmosphere
+ atmosphere-portable-runtime
+ 0.3
+
+
+ org.atmosphere
+ atmosphere-compat
+ 0.3
+
+
+
diff --git a/kernel/src/main/scala/rest/ActorComponentProvider.scala b/akka-rest/src/main/scala/ActorComponentProvider.scala
similarity index 89%
rename from kernel/src/main/scala/rest/ActorComponentProvider.scala
rename to akka-rest/src/main/scala/ActorComponentProvider.scala
index f7a577f61f..4985bc48de 100755
--- a/kernel/src/main/scala/rest/ActorComponentProvider.scala
+++ b/akka-rest/src/main/scala/ActorComponentProvider.scala
@@ -2,12 +2,12 @@
* Copyright (C) 2009 Scalable Solutions.
*/
-package se.scalablesolutions.akka.kernel.rest
+package se.scalablesolutions.akka.rest
import com.sun.jersey.core.spi.component.ioc.IoCFullyManagedComponentProvider
-import kernel.config.Configurator
-import kernel.util.Logging
+import config.Configurator
+import util.Logging
class ActorComponentProvider(val clazz: Class[_], val configurators: List[Configurator])
extends IoCFullyManagedComponentProvider with Logging {
diff --git a/kernel/src/main/scala/rest/ActorComponentProviderFactory.scala b/akka-rest/src/main/scala/ActorComponentProviderFactory.scala
similarity index 91%
rename from kernel/src/main/scala/rest/ActorComponentProviderFactory.scala
rename to akka-rest/src/main/scala/ActorComponentProviderFactory.scala
index 1a46e44ff8..5917ea2ea8 100755
--- a/kernel/src/main/scala/rest/ActorComponentProviderFactory.scala
+++ b/akka-rest/src/main/scala/ActorComponentProviderFactory.scala
@@ -2,17 +2,15 @@
* Copyright (C) 2009 Scalable Solutions.
*/
-package se.scalablesolutions.akka.kernel.rest
+package se.scalablesolutions.akka.rest
-import kernel.Kernel
-import util.Logging
import javax.ws.rs.core.Context
import com.sun.jersey.core.spi.component.ioc.{IoCComponentProvider,IoCComponentProviderFactory}
import com.sun.jersey.core.spi.component.{ComponentContext}
import config.Configurator
-
+import util.Logging
class ActorComponentProviderFactory(val configurators: List[Configurator])
extends IoCComponentProviderFactory with Logging {
diff --git a/kernel/src/main/scala/rest/NodeWriter.scala b/akka-rest/src/main/scala/NodeWriter.scala
similarity index 95%
rename from kernel/src/main/scala/rest/NodeWriter.scala
rename to akka-rest/src/main/scala/NodeWriter.scala
index c301d9c2b5..58c127b411 100755
--- a/kernel/src/main/scala/rest/NodeWriter.scala
+++ b/akka-rest/src/main/scala/NodeWriter.scala
@@ -2,7 +2,7 @@
* Copyright (C) 2009 Scalable Solutions.
*/
-package se.scalablesolutions.akka.kernel.rest
+package se.scalablesolutions.akka.rest
import java.io.OutputStream
import java.lang.annotation.Annotation
diff --git a/samples-java/pom.xml b/akka-samples-java/pom.xml
similarity index 72%
rename from samples-java/pom.xml
rename to akka-samples-java/pom.xml
index fc6c4840f9..2a7cf2adff 100644
--- a/samples-java/pom.xml
+++ b/akka-samples-java/pom.xml
@@ -10,20 +10,40 @@
akka
se.scalablesolutions.akka
- 0.5
+ 0.6
../pom.xml
-
- akka-kernel
- se.scalablesolutions.akka
- 0.5
-
akka-util-java
se.scalablesolutions.akka
- 0.5
+ 0.6
+
+
+ akka-util
+ se.scalablesolutions.akka
+ 0.6
+
+
+ akka-actors
+ se.scalablesolutions.akka
+ 0.6
+
+
+ akka-persistence
+ se.scalablesolutions.akka
+ 0.6
+
+
+ akka-rest
+ se.scalablesolutions.akka
+ 0.6
+
+
+ akka-kernel
+ se.scalablesolutions.akka
+ 0.6
javax.ws.rs
diff --git a/samples-java/src/main/java/sample/java/Boot.java b/akka-samples-java/src/main/java/sample/java/Boot.java
similarity index 61%
rename from samples-java/src/main/java/sample/java/Boot.java
rename to akka-samples-java/src/main/java/sample/java/Boot.java
index 9c21f31faf..cd94aea2d2 100644
--- a/samples-java/src/main/java/sample/java/Boot.java
+++ b/akka-samples-java/src/main/java/sample/java/Boot.java
@@ -1,7 +1,7 @@
package sample.java;
-import se.scalablesolutions.akka.kernel.config.ActiveObjectManager;
-import static se.scalablesolutions.akka.kernel.config.JavaConfig.*;
+import se.scalablesolutions.akka.config.ActiveObjectManager;
+import static se.scalablesolutions.akka.config.JavaConfig.*;
public class Boot {
final private ActiveObjectManager manager = new ActiveObjectManager();
@@ -13,6 +13,10 @@ public class Boot {
new Component(
sample.java.SimpleService.class,
new LifeCycle(new Permanent(), 1000),
+ 1000),
+ new Component(
+ sample.java.PersistentSimpleService.class,
+ new LifeCycle(new Permanent(), 1000),
1000)
}).supervise();
}
diff --git a/akka-samples-java/src/main/java/sample/java/PersistentSimpleService.java b/akka-samples-java/src/main/java/sample/java/PersistentSimpleService.java
new file mode 100644
index 0000000000..947a0f8b3f
--- /dev/null
+++ b/akka-samples-java/src/main/java/sample/java/PersistentSimpleService.java
@@ -0,0 +1,58 @@
+/**
+ * Copyright (C) 2009 Scalable Solutions.
+ */
+
+package sample.java;
+
+import javax.ws.rs.Path;
+import javax.ws.rs.GET;
+import javax.ws.rs.Produces;
+
+import se.scalablesolutions.akka.annotation.transactionrequired;
+import se.scalablesolutions.akka.annotation.prerestart;
+import se.scalablesolutions.akka.annotation.postrestart;
+import se.scalablesolutions.akka.state.TransactionalState;
+import se.scalablesolutions.akka.state.PersistentState;
+import se.scalablesolutions.akka.state.TransactionalMap;
+import se.scalablesolutions.akka.state.CassandraStorageConfig;
+
+/**
+ * Try service out by invoking (multiple times):
+ *
+ * curl http://localhost:9998/persistentjavacount
+ *
+ * Or browse to the URL from a web browser.
+ */
+@Path("/persistentjavacount")
+@transactionrequired
+public class PersistentSimpleService {
+ private String KEY = "COUNTER";
+
+ private boolean hasStartedTicking = false;
+ private PersistentState factory = new PersistentState();
+ private TransactionalMap