removed trailing spaces

This commit is contained in:
Jonas Bonér 2010-10-29 16:33:31 +02:00
parent c27f971a42
commit c276c621bf
92 changed files with 927 additions and 927 deletions

View file

@ -394,7 +394,7 @@ trait Actor extends Logging {
/** /**
* Changes tha Actor's behavior to become the new 'Receive' (PartialFunction[Any, Unit]) handler. * Changes tha Actor's behavior to become the new 'Receive' (PartialFunction[Any, Unit]) handler.
* Puts the behavior on top of the hotswap stack. * Puts the behavior on top of the hotswap stack.
*/ */
def become(behavior: Receive): Unit = self.hotswap = self.hotswap.push(behavior) def become(behavior: Receive): Unit = self.hotswap = self.hotswap.push(behavior)

View file

@ -805,7 +805,7 @@ class LocalActorRef private[akka] (
receiveTimeout = None receiveTimeout = None
cancelReceiveTimeout cancelReceiveTimeout
dispatcher.detach(this) dispatcher.detach(this)
transactorConfig = transactorConfig.copy(factory = None) transactorConfig = transactorConfig.copy(factory = None)
_status = ActorRefInternals.SHUTDOWN _status = ActorRefInternals.SHUTDOWN
actor.postStop actor.postStop
ActorRegistry.unregister(this) ActorRegistry.unregister(this)
@ -1067,7 +1067,7 @@ class LocalActorRef private[akka] (
case Temporary => shutDownTemporaryActor(this) case Temporary => shutDownTemporaryActor(this)
case _ => case _ =>
val failedActor = actorInstance.get val failedActor = actorInstance.get
// either permanent or none where default is permanent // either permanent or none where default is permanent
Actor.log.info("Restarting actor [%s] configured as PERMANENT.", id) Actor.log.info("Restarting actor [%s] configured as PERMANENT.", id)
Actor.log.debug("Restarting linked actors for actor [%s].", id) Actor.log.debug("Restarting linked actors for actor [%s].", id)

View file

@ -41,12 +41,12 @@ object ActorRegistry extends ListenerManagement {
private val actorsById = new Index[String,ActorRef] private val actorsById = new Index[String,ActorRef]
private val remoteActorSets = Map[Address, RemoteActorSet]() private val remoteActorSets = Map[Address, RemoteActorSet]()
private val guard = new ReadWriteGuard private val guard = new ReadWriteGuard
/** /**
* Returns all actors in the system. * Returns all actors in the system.
*/ */
def actors: Array[ActorRef] = filter(_ => true) def actors: Array[ActorRef] = filter(_ => true)
/** /**
* Returns the number of actors in the system. * Returns the number of actors in the system.
*/ */
@ -435,4 +435,4 @@ class Index[K <: AnyRef,V <: AnyRef : Manifest] {
* Removes all keys and all values * Removes all keys and all values
*/ */
def clear = foreach { case (k,v) => remove(k,v) } def clear = foreach { case (k,v) => remove(k,v) }
} }

View file

@ -80,7 +80,7 @@ case class SupervisorFactory(val config: SupervisorConfig) extends Logging {
def newInstance: Supervisor = newInstanceFor(config) def newInstance: Supervisor = newInstanceFor(config)
def newInstanceFor(config: SupervisorConfig): Supervisor = { def newInstanceFor(config: SupervisorConfig): Supervisor = {
val supervisor = new Supervisor(config.restartStrategy) val supervisor = new Supervisor(config.restartStrategy)
supervisor.configure(config) supervisor.configure(config)
supervisor.start supervisor.start

View file

@ -55,7 +55,7 @@ object Config {
case value => Some(value) case value => Some(value)
} }
(envConf orElse systemConf).map("akka." + _ + ".conf").getOrElse("akka.conf") (envConf orElse systemConf).map("akka." + _ + ".conf").getOrElse("akka.conf")
} }
if (System.getProperty("akka.config", "") != "") { if (System.getProperty("akka.config", "") != "") {
@ -74,8 +74,8 @@ object Config {
val configFile = HOME.getOrElse(throwNoAkkaHomeException) + "/config/" + confName val configFile = HOME.getOrElse(throwNoAkkaHomeException) + "/config/" + confName
Configgy.configure(configFile) Configgy.configure(configFile)
ConfigLogger.log.info( ConfigLogger.log.info(
"AKKA_HOME is defined as [%s], config loaded from [%s].", "AKKA_HOME is defined as [%s], config loaded from [%s].",
HOME.getOrElse(throwNoAkkaHomeException), HOME.getOrElse(throwNoAkkaHomeException),
configFile) configFile)
} catch { } catch {
case e: ParseException => throw new ConfigurationException( case e: ParseException => throw new ConfigurationException(
@ -106,7 +106,7 @@ object Config {
CConfig.fromString("<akka></akka>") // default empty config CConfig.fromString("<akka></akka>") // default empty config
} }
} }
val CONFIG_VERSION = config.getString("akka.version", VERSION) val CONFIG_VERSION = config.getString("akka.version", VERSION)
if (VERSION != CONFIG_VERSION) throw new ConfigurationException( if (VERSION != CONFIG_VERSION) throw new ConfigurationException(
"Akka JAR version [" + VERSION + "] is different than the provided config version [" + CONFIG_VERSION + "]") "Akka JAR version [" + VERSION + "] is different than the provided config version [" + CONFIG_VERSION + "]")
@ -115,9 +115,9 @@ object Config {
val startTime = System.currentTimeMillis val startTime = System.currentTimeMillis
def uptime = (System.currentTimeMillis - startTime) / 1000 def uptime = (System.currentTimeMillis - startTime) / 1000
def throwNoAkkaHomeException = throw new ConfigurationException( def throwNoAkkaHomeException = throw new ConfigurationException(
"Akka home is not defined. Either:" + "Akka home is not defined. Either:" +
"\n\t1. Define 'AKKA_HOME' environment variable pointing to the root of the Akka distribution." + "\n\t1. Define 'AKKA_HOME' environment variable pointing to the root of the Akka distribution." +
"\n\t2. Add the '-Dakka.home=...' option pointing to the root of the Akka distribution.") "\n\t2. Add the '-Dakka.home=...' option pointing to the root of the Akka distribution.")
} }

View file

@ -35,7 +35,7 @@ object Supervision {
def this(actorRef: ActorRef, lifeCycle: LifeCycle) = def this(actorRef: ActorRef, lifeCycle: LifeCycle) =
this(actorRef, lifeCycle, None) this(actorRef, lifeCycle, None)
} }
object Supervise { object Supervise {
def apply(actorRef: ActorRef, lifeCycle: LifeCycle, remoteAddress: RemoteAddress) = new Supervise(actorRef, lifeCycle, remoteAddress) def apply(actorRef: ActorRef, lifeCycle: LifeCycle, remoteAddress: RemoteAddress) = new Supervise(actorRef, lifeCycle, remoteAddress)
def apply(actorRef: ActorRef, lifeCycle: LifeCycle) = new Supervise(actorRef, lifeCycle, None) def apply(actorRef: ActorRef, lifeCycle: LifeCycle) = new Supervise(actorRef, lifeCycle, None)
@ -158,4 +158,4 @@ object Supervision {
def this(target: Class[_], lifeCycle: LifeCycle, timeout: Long, transactionRequired: Boolean, dispatcher: MessageDispatcher, remoteAddress: RemoteAddress) = def this(target: Class[_], lifeCycle: LifeCycle, timeout: Long, transactionRequired: Boolean, dispatcher: MessageDispatcher, remoteAddress: RemoteAddress) =
this(null: Class[_], target, lifeCycle, timeout, transactionRequired, dispatcher, remoteAddress) this(null: Class[_], target, lifeCycle, timeout, transactionRequired, dispatcher, remoteAddress)
} }
} }

View file

@ -143,7 +143,7 @@ object DataFlow {
out ! Exit out ! Exit
throw e throw e
} }
result.getOrElse(throw new DataFlowVariableException("Timed out (after " + timeoutMs + " milliseconds) while waiting for result")) result.getOrElse(throw new DataFlowVariableException("Timed out (after " + timeoutMs + " milliseconds) while waiting for result"))
} }
} }

View file

@ -94,7 +94,7 @@ object Dispatchers extends Logging {
* <p/> * <p/>
* E.g. each actor consumes its own thread. * E.g. each actor consumes its own thread.
*/ */
def newThreadBasedDispatcher(actor: ActorRef, mailboxCapacity: Int, pushTimeOut: Duration) = def newThreadBasedDispatcher(actor: ActorRef, mailboxCapacity: Int, pushTimeOut: Duration) =
new ThreadBasedDispatcher(actor, mailboxCapacity, pushTimeOut) new ThreadBasedDispatcher(actor, mailboxCapacity, pushTimeOut)
/** /**

View file

@ -73,19 +73,19 @@ class ExecutorBasedEventDrivenDispatcher(
val config: ThreadPoolConfig = ThreadPoolConfig()) val config: ThreadPoolConfig = ThreadPoolConfig())
extends MessageDispatcher { extends MessageDispatcher {
def this(_name: String, throughput: Int, throughputDeadlineTime: Int, mailboxType: MailboxType) = def this(_name: String, throughput: Int, throughputDeadlineTime: Int, mailboxType: MailboxType) =
this(_name, throughput, throughputDeadlineTime, mailboxType,ThreadPoolConfig()) // Needed for Java API usage this(_name, throughput, throughputDeadlineTime, mailboxType,ThreadPoolConfig()) // Needed for Java API usage
def this(_name: String, throughput: Int, mailboxType: MailboxType) = def this(_name: String, throughput: Int, mailboxType: MailboxType) =
this(_name, throughput, Dispatchers.THROUGHPUT_DEADLINE_TIME_MILLIS, mailboxType) // Needed for Java API usage this(_name, throughput, Dispatchers.THROUGHPUT_DEADLINE_TIME_MILLIS, mailboxType) // Needed for Java API usage
def this(_name: String, throughput: Int) = def this(_name: String, throughput: Int) =
this(_name, throughput, Dispatchers.THROUGHPUT_DEADLINE_TIME_MILLIS, Dispatchers.MAILBOX_TYPE) // Needed for Java API usage this(_name, throughput, Dispatchers.THROUGHPUT_DEADLINE_TIME_MILLIS, Dispatchers.MAILBOX_TYPE) // Needed for Java API usage
def this(_name: String, _config: ThreadPoolConfig) = def this(_name: String, _config: ThreadPoolConfig) =
this(_name, Dispatchers.THROUGHPUT, Dispatchers.THROUGHPUT_DEADLINE_TIME_MILLIS, Dispatchers.MAILBOX_TYPE, _config) this(_name, Dispatchers.THROUGHPUT, Dispatchers.THROUGHPUT_DEADLINE_TIME_MILLIS, Dispatchers.MAILBOX_TYPE, _config)
def this(_name: String) = def this(_name: String) =
this(_name, Dispatchers.THROUGHPUT, Dispatchers.THROUGHPUT_DEADLINE_TIME_MILLIS, Dispatchers.MAILBOX_TYPE) // Needed for Java API usage this(_name, Dispatchers.THROUGHPUT, Dispatchers.THROUGHPUT_DEADLINE_TIME_MILLIS, Dispatchers.MAILBOX_TYPE) // Needed for Java API usage
val name = "akka:event-driven:dispatcher:" + _name val name = "akka:event-driven:dispatcher:" + _name
@ -111,7 +111,7 @@ class ExecutorBasedEventDrivenDispatcher(
case UnboundedMailbox(blocking) => new DefaultUnboundedMessageQueue(blocking) with ExecutableMailbox { case UnboundedMailbox(blocking) => new DefaultUnboundedMessageQueue(blocking) with ExecutableMailbox {
def dispatcher = ExecutorBasedEventDrivenDispatcher.this def dispatcher = ExecutorBasedEventDrivenDispatcher.this
} }
case BoundedMailbox(blocking, capacity, pushTimeOut) => case BoundedMailbox(blocking, capacity, pushTimeOut) =>
new DefaultBoundedMessageQueue(capacity, pushTimeOut, blocking) with ExecutableMailbox { new DefaultBoundedMessageQueue(capacity, pushTimeOut, blocking) with ExecutableMailbox {
def dispatcher = ExecutorBasedEventDrivenDispatcher.this def dispatcher = ExecutorBasedEventDrivenDispatcher.this
@ -175,7 +175,7 @@ class ExecutorBasedEventDrivenDispatcher(
trait ExecutableMailbox extends Runnable { self: MessageQueue => trait ExecutableMailbox extends Runnable { self: MessageQueue =>
def dispatcher: ExecutorBasedEventDrivenDispatcher def dispatcher: ExecutorBasedEventDrivenDispatcher
final def run = { final def run = {
val reschedule = try { val reschedule = try {
try { processMailbox() } catch { case ie: InterruptedException => true } try { processMailbox() } catch { case ie: InterruptedException => true }

View file

@ -41,7 +41,7 @@ class ExecutorBasedEventDrivenWorkStealingDispatcher(
def this(_name: String) = this(_name, Dispatchers.MAILBOX_TYPE,ThreadPoolConfig()) def this(_name: String) = this(_name, Dispatchers.MAILBOX_TYPE,ThreadPoolConfig())
//implicit def actorRef2actor(actorRef: ActorRef): Actor = actorRef.actor //implicit def actorRef2actor(actorRef: ActorRef): Actor = actorRef.actor
val mailboxType = Some(_mailboxType) val mailboxType = Some(_mailboxType)
val name = "akka:event-driven-work-stealing:dispatcher:" + _name val name = "akka:event-driven-work-stealing:dispatcher:" + _name

View file

@ -143,7 +143,7 @@ class HawtDispatcher(val aggregate: Boolean = true, val parent: DispatchQueue =
import HawtDispatcher._ import HawtDispatcher._
val mailboxType: Option[MailboxType] = None val mailboxType: Option[MailboxType] = None
private[akka] def start { retainNonDaemon } private[akka] def start { retainNonDaemon }
private[akka] def shutdown { releaseNonDaemon } private[akka] def shutdown { releaseNonDaemon }

View file

@ -35,7 +35,7 @@ sealed trait MailboxType
abstract class TransientMailboxType(val blocking: Boolean = false) extends MailboxType abstract class TransientMailboxType(val blocking: Boolean = false) extends MailboxType
case class UnboundedMailbox(block: Boolean = false) extends TransientMailboxType(block) case class UnboundedMailbox(block: Boolean = false) extends TransientMailboxType(block)
case class BoundedMailbox( case class BoundedMailbox(
block: Boolean = false, block: Boolean = false,
val capacity: Int = { if (Dispatchers.MAILBOX_CAPACITY < 0) Int.MaxValue else Dispatchers.MAILBOX_CAPACITY }, val capacity: Int = { if (Dispatchers.MAILBOX_CAPACITY < 0) Int.MaxValue else Dispatchers.MAILBOX_CAPACITY },
val pushTimeOut: Duration = Dispatchers.MAILBOX_PUSH_TIME_OUT) extends TransientMailboxType(block) { val pushTimeOut: Duration = Dispatchers.MAILBOX_PUSH_TIME_OUT) extends TransientMailboxType(block) {
if (capacity < 0) throw new IllegalArgumentException("The capacity for BoundedMailbox can not be negative") if (capacity < 0) throw new IllegalArgumentException("The capacity for BoundedMailbox can not be negative")
@ -52,9 +52,9 @@ case class ZooKeeperBasedDurableMailbox(ser: EnterpriseModule.Serializer) extend
case class AMQPBasedDurableMailbox(ser: EnterpriseModule.Serializer) extends DurableMailboxType(ser) case class AMQPBasedDurableMailbox(ser: EnterpriseModule.Serializer) extends DurableMailboxType(ser)
case class JMSBasedDurableMailbox(ser: EnterpriseModule.Serializer) extends DurableMailboxType(ser) case class JMSBasedDurableMailbox(ser: EnterpriseModule.Serializer) extends DurableMailboxType(ser)
class DefaultUnboundedMessageQueue(blockDequeue: Boolean) class DefaultUnboundedMessageQueue(blockDequeue: Boolean)
extends LinkedBlockingQueue[MessageInvocation] with MessageQueue { extends LinkedBlockingQueue[MessageInvocation] with MessageQueue {
final def enqueue(handle: MessageInvocation) { final def enqueue(handle: MessageInvocation) {
this add handle this add handle
} }
@ -65,7 +65,7 @@ class DefaultUnboundedMessageQueue(blockDequeue: Boolean)
} }
} }
class DefaultBoundedMessageQueue(capacity: Int, pushTimeOut: Duration, blockDequeue: Boolean) class DefaultBoundedMessageQueue(capacity: Int, pushTimeOut: Duration, blockDequeue: Boolean)
extends LinkedBlockingQueue[MessageInvocation](capacity) with MessageQueue { extends LinkedBlockingQueue[MessageInvocation](capacity) with MessageQueue {
final def enqueue(handle: MessageInvocation) { final def enqueue(handle: MessageInvocation) {
@ -84,7 +84,7 @@ class DefaultBoundedMessageQueue(capacity: Int, pushTimeOut: Duration, blockDequ
* @author <a href="http://jonasboner.com">Jonas Bon&#233;r</a> * @author <a href="http://jonasboner.com">Jonas Bon&#233;r</a>
*/ */
trait MailboxFactory { trait MailboxFactory {
val mailboxType: Option[MailboxType] val mailboxType: Option[MailboxType]
/** /**
@ -105,4 +105,4 @@ trait MailboxFactory {
* Creates and returns a durable mailbox for the given actor. * Creates and returns a durable mailbox for the given actor.
*/ */
private[akka] def createDurableMailbox(actorRef: ActorRef, mailboxType: DurableMailboxType): AnyRef private[akka] def createDurableMailbox(actorRef: ActorRef, mailboxType: DurableMailboxType): AnyRef
} }

View file

@ -95,7 +95,7 @@ trait MessageDispatcher extends MailboxFactory with Logging {
} }
} }
} }
private[akka] def unregister(actorRef: ActorRef) = { private[akka] def unregister(actorRef: ActorRef) = {
if (uuids remove actorRef.uuid) { if (uuids remove actorRef.uuid) {
actorRef.mailbox = null actorRef.mailbox = null
@ -176,4 +176,4 @@ trait MessageDispatcher extends MailboxFactory with Logging {
* Returns the size of the mailbox for the specified actor * Returns the size of the mailbox for the specified actor
*/ */
def mailboxSize(actorRef: ActorRef): Int def mailboxSize(actorRef: ActorRef): Int
} }

View file

@ -261,4 +261,4 @@ trait LazyExecutorService extends ExecutorServiceDelegate {
class LazyExecutorServiceWrapper(executorFactory: => ExecutorService) extends LazyExecutorService { class LazyExecutorServiceWrapper(executorFactory: => ExecutorService) extends LazyExecutorService {
def createExecutor = executorFactory def createExecutor = executorFactory
} }

View file

@ -75,4 +75,4 @@ object Option {
implicit def java2ScalaOption[A](o: Option[A]): scala.Option[A] = o.asScala implicit def java2ScalaOption[A](o: Option[A]): scala.Option[A] = o.asScala
implicit def scala2JavaOption[A](o: scala.Option[A]): Option[A] = option(o.get) implicit def scala2JavaOption[A](o: scala.Option[A]): Option[A] = option(o.get)
} }

View file

@ -17,7 +17,7 @@ trait InfiniteIterator[T] extends Iterator[T]
*/ */
class CyclicIterator[T](items: List[T]) extends InfiniteIterator[T] { class CyclicIterator[T](items: List[T]) extends InfiniteIterator[T] {
def this(items: java.util.List[T]) = this(items.toList) def this(items: java.util.List[T]) = this(items.toList)
@volatile private[this] var current: List[T] = items @volatile private[this] var current: List[T] = items
def hasNext = items != Nil def hasNext = items != Nil

View file

@ -70,4 +70,4 @@ abstract class UntypedLoadBalancer extends UntypedDispatcher {
else null else null
override def isDefinedAt(msg: Any) = seq.exists( _.isDefinedAt(msg) ) override def isDefinedAt(msg: Any) = seq.exists( _.isDefinedAt(msg) )
} }

View file

@ -92,7 +92,7 @@ object Transaction {
private[this] val persistentStateMap = new HashMap[String, Committable with Abortable] private[this] val persistentStateMap = new HashMap[String, Committable with Abortable]
private[akka] val depth = new AtomicInteger(0) private[akka] val depth = new AtomicInteger(0)
val jta: Option[JtaModule.TransactionContainer] = val jta: Option[JtaModule.TransactionContainer] =
if (JTA_AWARE) Some(JtaModule.createTransactionContainer) if (JTA_AWARE) Some(JtaModule.createTransactionContainer)
else None else None

View file

@ -20,4 +20,4 @@ class Address(val hostname: String, val port: Int) {
that.asInstanceOf[Address].hostname == hostname && that.asInstanceOf[Address].hostname == hostname &&
that.asInstanceOf[Address].port == port that.asInstanceOf[Address].port == port
} }
} }

View file

@ -18,8 +18,8 @@ object Crypt extends Logging {
def md5(text: String): String = md5(unifyLineSeparator(text).getBytes("ASCII")) def md5(text: String): String = md5(unifyLineSeparator(text).getBytes("ASCII"))
def md5(bytes: Array[Byte]): String = digest(bytes, MessageDigest.getInstance("MD5")) def md5(bytes: Array[Byte]): String = digest(bytes, MessageDigest.getInstance("MD5"))
def sha1(text: String): String = sha1(unifyLineSeparator(text).getBytes("ASCII")) def sha1(text: String): String = sha1(unifyLineSeparator(text).getBytes("ASCII"))
def sha1(bytes: Array[Byte]): String = digest(bytes, MessageDigest.getInstance("SHA1")) def sha1(bytes: Array[Byte]): String = digest(bytes, MessageDigest.getInstance("SHA1"))
@ -33,8 +33,8 @@ object Crypt extends Logging {
def digest(bytes: Array[Byte], md: MessageDigest): String = { def digest(bytes: Array[Byte], md: MessageDigest): String = {
md.update(bytes) md.update(bytes)
hexify(md.digest) hexify(md.digest)
} }
def hexify(bytes: Array[Byte]): String = { def hexify(bytes: Array[Byte]): String = {
val builder = new StringBuilder val builder = new StringBuilder
bytes.foreach { byte => builder.append(hex.charAt((byte & 0xF) >> 4)).append(hex.charAt(byte & 0xF)) } bytes.foreach { byte => builder.append(hex.charAt((byte & 0xF) >> 4)).append(hex.charAt(byte & 0xF)) }

View file

@ -45,7 +45,7 @@ object Helpers extends Logging {
log.warning(e, "Cannot narrow %s to expected type %s!", o, implicitly[Manifest[T]].erasure.getName) log.warning(e, "Cannot narrow %s to expected type %s!", o, implicitly[Manifest[T]].erasure.getName)
None None
} }
/** /**
* Reference that can hold either a typed value or an exception. * Reference that can hold either a typed value or an exception.
* *
@ -61,13 +61,13 @@ object Helpers extends Logging {
* *
* scala> res0() * scala> res0()
* res3: Int = 3 * res3: Int = 3
* *
* scala> res0() = { println("Hello world"); 3} * scala> res0() = { println("Hello world"); 3}
* Hello world * Hello world
* *
* scala> res0() * scala> res0()
* res5: Int = 3 * res5: Int = 3
* *
* scala> res0() = error("Lets see what happens here...") * scala> res0() = error("Lets see what happens here...")
* *
* scala> res0() * scala> res0()
@ -80,15 +80,15 @@ object Helpers extends Logging {
*/ */
class ResultOrError[R](result: R){ class ResultOrError[R](result: R){
private[this] var contents: Either[R, Throwable] = Left(result) private[this] var contents: Either[R, Throwable] = Left(result)
def update(value: => R) = { def update(value: => R) = {
contents = try { contents = try {
Left(value) Left(value)
} catch { } catch {
case (error : Throwable) => Right(error) case (error : Throwable) => Right(error)
} }
} }
def apply() = contents match { def apply() = contents match {
case Left(result) => result case Left(result) => result
case Right(error) => throw error.fillInStackTrace case Right(error) => throw error.fillInStackTrace

View file

@ -86,7 +86,7 @@ class SimpleLock {
} }
} else None } else None
} }
def ifPossibleApply[T,R](value: T)(function: (T) => R): Option[R] = { def ifPossibleApply[T,R](value: T)(function: (T) => R): Option[R] = {
if (tryLock()) { if (tryLock()) {
try { try {

View file

@ -208,10 +208,10 @@ object ReflectiveAccess extends Logging {
def enqueue(message: MessageInvocation) def enqueue(message: MessageInvocation)
def dequeue: MessageInvocation def dequeue: MessageInvocation
} }
type Serializer = { type Serializer = {
def toBinary(obj: AnyRef): Array[Byte] def toBinary(obj: AnyRef): Array[Byte]
def fromBinary(bytes: Array[Byte], clazz: Option[Class[_]]): AnyRef def fromBinary(bytes: Array[Byte], clazz: Option[Class[_]]): AnyRef
} }
lazy val isEnterpriseEnabled = clusterObjectInstance.isDefined lazy val isEnterpriseEnabled = clusterObjectInstance.isDefined
@ -219,7 +219,7 @@ object ReflectiveAccess extends Logging {
val clusterObjectInstance: Option[AnyRef] = val clusterObjectInstance: Option[AnyRef] =
getObjectFor("akka.cluster.Cluster$") getObjectFor("akka.cluster.Cluster$")
val serializerClass: Option[Class[_]] = val serializerClass: Option[Class[_]] =
getClassFor("akka.serialization.Serializer") getClassFor("akka.serialization.Serializer")
def ensureEnterpriseEnabled = if (!isEnterpriseEnabled) throw new ModuleNotAvailableException( def ensureEnterpriseEnabled = if (!isEnterpriseEnabled) throw new ModuleNotAvailableException(

View file

@ -32,7 +32,7 @@ object ActorFireForgetRequestReplySpec {
class SenderActor(replyActor: ActorRef) extends Actor { class SenderActor(replyActor: ActorRef) extends Actor {
def receive = { def receive = {
case "Init" => case "Init" =>
replyActor ! "Send" replyActor ! "Send"
case "Reply" => { case "Reply" => {
state.s = "Reply" state.s = "Reply"

View file

@ -98,4 +98,4 @@ class ActorRefSpec extends
serverRef.stop serverRef.stop
} }
} }
} }

View file

@ -57,7 +57,7 @@ object FSMActorSpec {
} }
startWith(Locked, CodeState("", code)) startWith(Locked, CodeState("", code))
whenUnhandled { whenUnhandled {
case Event(_, stateData) => { case Event(_, stateData) => {
log.info("Unhandled") log.info("Unhandled")

View file

@ -9,7 +9,7 @@ import java.util.concurrent.CyclicBarrier
class HotSwapSpec extends WordSpec with MustMatchers { class HotSwapSpec extends WordSpec with MustMatchers {
"An Actor" should { "An Actor" should {
"be able to hotswap its behavior with HotSwap(..)" in { "be able to hotswap its behavior with HotSwap(..)" in {
val barrier = new CyclicBarrier(2) val barrier = new CyclicBarrier(2)
@volatile var _log = "" @volatile var _log = ""
@ -17,7 +17,7 @@ class HotSwapSpec extends WordSpec with MustMatchers {
def receive = { case _ => _log += "default" } def receive = { case _ => _log += "default" }
}).start }).start
a ! HotSwap { a ! HotSwap {
case _ => case _ =>
_log += "swapped" _log += "swapped"
barrier.await barrier.await
} }
@ -31,17 +31,17 @@ class HotSwapSpec extends WordSpec with MustMatchers {
@volatile var _log = "" @volatile var _log = ""
val a = actorOf(new Actor { val a = actorOf(new Actor {
def receive = { def receive = {
case "init" => case "init" =>
_log += "init" _log += "init"
barrier.await barrier.await
case "swap" => become({ case "swap" => become({
case _ => case _ =>
_log += "swapped" _log += "swapped"
barrier.await barrier.await
}) })
} }
}).start }).start
a ! "init" a ! "init"
barrier.await barrier.await
_log must be ("init") _log must be ("init")
@ -72,7 +72,7 @@ class HotSwapSpec extends WordSpec with MustMatchers {
barrier.reset barrier.reset
_log = "" _log = ""
a ! HotSwap { a ! HotSwap {
case "swapped" => case "swapped" =>
_log += "swapped" _log += "swapped"
barrier.await barrier.await
} }
@ -104,21 +104,21 @@ class HotSwapSpec extends WordSpec with MustMatchers {
@volatile var _log = "" @volatile var _log = ""
val a = actorOf(new Actor { val a = actorOf(new Actor {
def receive = { def receive = {
case "init" => case "init" =>
_log += "init" _log += "init"
barrier.await barrier.await
case "swap" => case "swap" =>
become({ become({
case "swapped" => case "swapped" =>
_log += "swapped" _log += "swapped"
barrier.await barrier.await
case "revert" => case "revert" =>
unbecome unbecome
}) })
barrier.await barrier.await
} }
}).start }).start
a ! "init" a ! "init"
barrier.await barrier.await
_log must be ("init") _log must be ("init")

View file

@ -147,7 +147,7 @@ class RestartStrategySpec extends JUnitSuite {
slave ! Ping slave ! Ping
slave ! Crash slave ! Crash
assert(secondRestartLatch.tryAwait(1, TimeUnit.SECONDS)) assert(secondRestartLatch.tryAwait(1, TimeUnit.SECONDS))
assert(secondPingLatch.tryAwait(1, TimeUnit.SECONDS)) assert(secondPingLatch.tryAwait(1, TimeUnit.SECONDS))
@ -224,7 +224,7 @@ class RestartStrategySpec extends JUnitSuite {
val restartLatch,stopLatch,maxNoOfRestartsLatch = new StandardLatch val restartLatch,stopLatch,maxNoOfRestartsLatch = new StandardLatch
val countDownLatch = new CountDownLatch(2) val countDownLatch = new CountDownLatch(2)
val boss = actorOf(new Actor{ val boss = actorOf(new Actor{
self.faultHandler = OneForOneStrategy(List(classOf[Throwable]), None, Some(1000)) self.faultHandler = OneForOneStrategy(List(classOf[Throwable]), None, Some(1000))
protected def receive = { protected def receive = {
@ -238,7 +238,7 @@ class RestartStrategySpec extends JUnitSuite {
case Ping => countDownLatch.countDown case Ping => countDownLatch.countDown
case Crash => throw new Exception("Crashing...") case Crash => throw new Exception("Crashing...")
} }
override def postRestart(reason: Throwable) = { override def postRestart(reason: Throwable) = {
restartLatch.open restartLatch.open
} }

View file

@ -69,7 +69,7 @@ class DataFlowTest extends Spec with ShouldMatchers with BeforeAndAfterAll {
result.get should equal (sum(0,ints(0,1000))) result.get should equal (sum(0,ints(0,1000)))
List(x,y,z).foreach(_.shutdown) List(x,y,z).foreach(_.shutdown)
} }
/*it("should be able to join streams") { /*it("should be able to join streams") {
import DataFlow._ import DataFlow._
ActorRegistry.shutdownAll ActorRegistry.shutdownAll
@ -95,7 +95,7 @@ class DataFlowTest extends Spec with ShouldMatchers with BeforeAndAfterAll {
result.set(producer.map(x => x * x).foldLeft(0)(_ + _)) result.set(producer.map(x => x * x).foldLeft(0)(_ + _))
latch.countDown latch.countDown
} }
latch.await(3,TimeUnit.SECONDS) should equal (true) latch.await(3,TimeUnit.SECONDS) should equal (true)
result.get should equal (332833500) result.get should equal (332833500)
} }
@ -131,7 +131,7 @@ class DataFlowTest extends Spec with ShouldMatchers with BeforeAndAfterAll {
thread { ints(0, 1000, producer) } thread { ints(0, 1000, producer) }
thread { sum(0, producer, consumer) } thread { sum(0, producer, consumer) }
thread { recurseSum(consumer) } thread { recurseSum(consumer) }
latch.await(15,TimeUnit.SECONDS) should equal (true) latch.await(15,TimeUnit.SECONDS) should equal (true)
}*/ }*/
@ -162,4 +162,4 @@ class DataFlowTest extends Spec with ShouldMatchers with BeforeAndAfterAll {
latch.await(2,TimeUnit.SECONDS) should equal (true) latch.await(2,TimeUnit.SECONDS) should equal (true)
}*/ }*/
} }
} }

View file

@ -296,4 +296,4 @@ class HawtDispatcherModelTest extends ActorModelSpec {
class ExecutorBasedEventDrivenWorkStealingDispatcherModelTest extends ActorModelSpec { class ExecutorBasedEventDrivenWorkStealingDispatcherModelTest extends ActorModelSpec {
def newInterceptedDispatcher = new ExecutorBasedEventDrivenWorkStealingDispatcher("foo") with MessageDispatcherInterceptor def newInterceptedDispatcher = new ExecutorBasedEventDrivenWorkStealingDispatcher("foo") with MessageDispatcherInterceptor
} }

View file

@ -2,4 +2,4 @@ package akka.japi
import org.scalatest.junit.JUnitSuite import org.scalatest.junit.JUnitSuite
class JavaAPITest extends JavaAPITestBase with JUnitSuite class JavaAPITest extends JavaAPITestBase with JUnitSuite

View file

@ -214,7 +214,7 @@ public class ExampleSessionJava {
scala.Option<RemoteProtocol.AddressProtocol> response = scala.Option<RemoteProtocol.AddressProtocol> response =
protobufRpcClient.call(RemoteProtocol.AddressProtocol.newBuilder().setHostname("localhost").setPort(4321).build()); protobufRpcClient.call(RemoteProtocol.AddressProtocol.newBuilder().setHostname("localhost").setPort(4321).build());
System.out.println("### >> Got response: " + response); System.out.println("### >> Got response: " + response);
} }
} }

View file

@ -256,7 +256,7 @@ object AMQP {
} }
// Needed for Java API usage // Needed for Java API usage
def send(request: O, replyTo: String): Unit = { def send(request: O, replyTo: String): Unit = {
send(request, Some(replyTo)) send(request, Some(replyTo))
} }
def send(request: O, replyTo: Option[String] = None) = { def send(request: O, replyTo: Option[String] = None) = {
@ -311,7 +311,7 @@ object AMQP {
def newStringConsumer(connection: ActorRef, def newStringConsumer(connection: ActorRef,
handler: Procedure[String], handler: Procedure[String],
exchangeName: String): ActorRef = { exchangeName: String): ActorRef = {
newStringConsumer(connection, handler.apply _, Some(exchangeName)) newStringConsumer(connection, handler.apply _, Some(exchangeName))
} }
// Needed for Java API usage // Needed for Java API usage
@ -328,7 +328,7 @@ object AMQP {
exchangeName: String, exchangeName: String,
routingKey: String, routingKey: String,
queueName: String): ActorRef = { queueName: String): ActorRef = {
newStringConsumer(connection, handler.apply _, Some(exchangeName), Some(routingKey), Some(queueName)) newStringConsumer(connection, handler.apply _, Some(exchangeName), Some(routingKey), Some(queueName))
} }
def newStringConsumer(connection: ActorRef, def newStringConsumer(connection: ActorRef,

View file

@ -283,7 +283,7 @@ object RPC {
def newStringRpcClient(connection: ActorRef, def newStringRpcClient(connection: ActorRef,
exchange: String): RpcClient[String, String] = { exchange: String): RpcClient[String, String] = {
newStringRpcClient(connection, exchange, None) newStringRpcClient(connection, exchange, None)
} }
// Needed for Java API usage // Needed for Java API usage
def newStringRpcClient(connection: ActorRef, def newStringRpcClient(connection: ActorRef,

View file

@ -143,7 +143,7 @@ trait CamelContextLifecycle extends Logging {
* Initializes this lifecycle object with the given CamelContext. For the passed * Initializes this lifecycle object with the given CamelContext. For the passed
* CamelContext, stream-caching is enabled. If applications want to disable stream- * CamelContext, stream-caching is enabled. If applications want to disable stream-
* caching they can do so after this method returned and prior to calling start. * caching they can do so after this method returned and prior to calling start.
* This method also registers a new TypedActorComponent at the passes CamelContext * This method also registers a new TypedActorComponent at the passes CamelContext
* under a name defined by TypedActorComponent.InternalSchema. * under a name defined by TypedActorComponent.InternalSchema.
*/ */
def init(context: CamelContext) { def init(context: CamelContext) {

View file

@ -22,7 +22,7 @@ trait Consumer { self: Actor =>
/** /**
* Determines whether two-way communications between an endpoint and this consumer actor * Determines whether two-way communications between an endpoint and this consumer actor
* should be done in blocking or non-blocking mode (default is non-blocking). This method * should be done in blocking or non-blocking mode (default is non-blocking). This method
* doesn't have any effect on one-way communications (they'll never block). * doesn't have any effect on one-way communications (they'll never block).
*/ */
def blocking = false def blocking = false
} }

View file

@ -112,7 +112,7 @@ class ActorEndpoint(uri: String,
* <li>If the exchange pattern is out-capable and <code>blocking</code> is set to * <li>If the exchange pattern is out-capable and <code>blocking</code> is set to
* <code>false</code> then the producer sends the message using the ! operator, together * <code>false</code> then the producer sends the message using the ! operator, together
* with a callback handler. The callback handler is an <code>ActorRef</code> that can be * with a callback handler. The callback handler is an <code>ActorRef</code> that can be
* used by the receiving actor to asynchronously reply to the route that is sending the * used by the receiving actor to asynchronously reply to the route that is sending the
* message.</li> * message.</li>
* <li>If the exchange pattern is in-only then the producer sends the message using the * <li>If the exchange pattern is in-only then the producer sends the message using the
* ! operator.</li> * ! operator.</li>

View file

@ -37,4 +37,4 @@ class AkkaBroadcaster extends org.atmosphere.jersey.util.JerseySimpleBroadcaster
protected override def broadcast(r: Resource, e : Event) { protected override def broadcast(r: Resource, e : Event) {
caster ! ((r,e)) caster ! ((r,e))
} }
} }

View file

@ -26,4 +26,4 @@ class DefaultAkkaLoader extends AkkaLoader {
*/ */
object Main extends DefaultAkkaLoader { object Main extends DefaultAkkaLoader {
def main(args: Array[String]) = boot def main(args: Array[String]) = boot
} }

View file

@ -33,14 +33,14 @@ trait EmbeddedAppServer extends Bootable with Logging {
super.onLoad super.onLoad
if (config.getBool("akka.rest.service", true)) { if (config.getBool("akka.rest.service", true)) {
log.info("Attempting to start Akka REST service (Jersey)") log.info("Attempting to start Akka REST service (Jersey)")
System.setProperty("jetty.port",REST_PORT.toString) System.setProperty("jetty.port",REST_PORT.toString)
System.setProperty("jetty.host",REST_HOSTNAME) System.setProperty("jetty.host",REST_HOSTNAME)
System.setProperty("jetty.home",HOME.getOrElse(throwNoAkkaHomeException) + "/deploy/root") System.setProperty("jetty.home",HOME.getOrElse(throwNoAkkaHomeException) + "/deploy/root")
val configuration = new XmlConfiguration( val configuration = new XmlConfiguration(
new File(HOME.getOrElse(throwNoAkkaHomeException) + "/config/microkernel-server.xml").toURI.toURL) new File(HOME.getOrElse(throwNoAkkaHomeException) + "/config/microkernel-server.xml").toURI.toURL)
server = Option(configuration.configure.asInstanceOf[Server]) map { s => //Set the correct classloader to our contexts server = Option(configuration.configure.asInstanceOf[Server]) map { s => //Set the correct classloader to our contexts
applicationLoader foreach { loader => applicationLoader foreach { loader =>
//We need to provide the correct classloader to the servlets //We need to provide the correct classloader to the servlets

View file

@ -13,4 +13,4 @@ class ReflectiveAccessSpec extends JUnitSuite {
ReflectiveAccess.JtaModule.ensureJtaEnabled ReflectiveAccess.JtaModule.ensureJtaEnabled
assert(ReflectiveAccess.JtaModule.transactionContainerObjectInstance.isDefined) assert(ReflectiveAccess.JtaModule.transactionContainerObjectInstance.isDefined)
} }
} }

View file

@ -22,4 +22,4 @@ object Kernel extends DefaultAkkaLoader {
case x: BootableRemoteActorService => x.startRemoteService case x: BootableRemoteActorService => x.startRemoteService
case _ => case _ =>
}) })
} }

View file

@ -20,4 +20,4 @@ class CassandraTicket343TestIntegration extends Ticket343Test {
def getMap: (String) => PersistentMap[Array[Byte], Array[Byte]] = CassandraStorage.getMap def getMap: (String) => PersistentMap[Array[Byte], Array[Byte]] = CassandraStorage.getMap
} }

View file

@ -627,4 +627,4 @@ private[akka] trait CommonStorageBackend extends MapStorageBackend[Array[Byte],
} }
} }

View file

@ -158,4 +158,4 @@ trait MapStorageBackendTest extends Spec with ShouldMatchers with BeforeAndAfter
} }
} }

View file

@ -120,4 +120,4 @@ trait QueueStorageBackendTest extends Spec with ShouldMatchers with BeforeAndAft
} }
} }
} }

View file

@ -49,4 +49,4 @@ trait RefStorageBackendTest extends Spec with ShouldMatchers with BeforeAndAfter
} }
} }
} }

View file

@ -32,4 +32,4 @@ trait SortedSetStorageBackendTest extends Spec with ShouldMatchers with BeforeAn
} }
} }

View file

@ -180,4 +180,4 @@ trait VectorStorageBackendTest extends Spec with ShouldMatchers with BeforeAndAf
} }
} }

View file

@ -23,7 +23,7 @@ object CouchDBStorage extends Storage {
/** /**
* Implements a persistent transactional map based on the CouchDB storage. * Implements a persistent transactional map based on the CouchDB storage.
* *
* @author * @author
*/ */
class CouchDBPersistentMap(id: String) extends PersistentMapBinary { class CouchDBPersistentMap(id: String) extends PersistentMapBinary {
val uuid = id val uuid = id
@ -34,7 +34,7 @@ class CouchDBPersistentMap(id: String) extends PersistentMapBinary {
* Implements a persistent transactional vector based on the CouchDB * Implements a persistent transactional vector based on the CouchDB
* storage. * storage.
* *
* @author * @author
*/ */
class CouchDBPersistentVector(id: String) extends PersistentVector[Array[Byte]] { class CouchDBPersistentVector(id: String) extends PersistentVector[Array[Byte]] {
val uuid = id val uuid = id
@ -44,4 +44,4 @@ class CouchDBPersistentVector(id: String) extends PersistentVector[Array[Byte]]
class CouchDBPersistentRef(id: String) extends PersistentRef[Array[Byte]] { class CouchDBPersistentRef(id: String) extends PersistentRef[Array[Byte]] {
val uuid = id val uuid = id
val storage = CouchDBStorageBackend val storage = CouchDBStorageBackend
} }

View file

@ -23,9 +23,9 @@ private [akka] object CouchDBStorageBackend extends
RefStorageBackend[Array[Byte]] with RefStorageBackend[Array[Byte]] with
Logging { Logging {
import dispatch.json._ import dispatch.json._
implicit object widgetWrites extends Writes[Map[String,Any]] { implicit object widgetWrites extends Writes[Map[String,Any]] {
def writes(o: Map[String,Any]): JsValue = JsValue(o) def writes(o: Map[String,Any]): JsValue = JsValue(o)
} }
@ -39,7 +39,7 @@ private [akka] object CouchDBStorageBackend extends
val delete = new DeleteMethod(URL) val delete = new DeleteMethod(URL)
client.executeMethod(delete) client.executeMethod(delete)
} }
def create() = { def create() = {
val client = new HttpClient() val client = new HttpClient()
val put = new PutMethod(URL) val put = new PutMethod(URL)
@ -48,16 +48,16 @@ private [akka] object CouchDBStorageBackend extends
client.executeMethod(put) client.executeMethod(put)
put.getResponseBodyAsString put.getResponseBodyAsString
} }
private def storeMap(name: String, postfix: String, entries: List[(Array[Byte], Array[Byte])]) ={ private def storeMap(name: String, postfix: String, entries: List[(Array[Byte], Array[Byte])]) ={
var m = entries.map(e=>(new String(e._1) -> new String(e._2))).toMap + ("_id" -> (name + postfix)) var m = entries.map(e=>(new String(e._1) -> new String(e._2))).toMap + ("_id" -> (name + postfix))
val dataJson = JsonSerialization.tojson(m) val dataJson = JsonSerialization.tojson(m)
postData(URL, dataJson.toString) postData(URL, dataJson.toString)
} }
private def storeMap(name: String, postfix: String, entries: Map[String, Any]) ={ private def storeMap(name: String, postfix: String, entries: Map[String, Any]) ={
postData(URL, JsonSerialization.tojson(entries + ("_id" -> (name + postfix))).toString) postData(URL, JsonSerialization.tojson(entries + ("_id" -> (name + postfix))).toString)
} }
private def getResponseForNameAsMap(name: String, postfix: String): Option[Map[String, Any]] = { private def getResponseForNameAsMap(name: String, postfix: String): Option[Map[String, Any]] = {
getResponse(URL + name + postfix).flatMap(JSON.parseFull(_)).asInstanceOf[Option[Map[String, Any]]] getResponse(URL + name + postfix).flatMap(JSON.parseFull(_)).asInstanceOf[Option[Map[String, Any]]]
@ -65,43 +65,43 @@ private [akka] object CouchDBStorageBackend extends
def insertMapStorageEntriesFor(name: String, entries: List[(Array[Byte], Array[Byte])]) ={ def insertMapStorageEntriesFor(name: String, entries: List[(Array[Byte], Array[Byte])]) ={
val newDoc = getResponseForNameAsMap(name, "_map").getOrElse(Map[String, Any]()) ++ val newDoc = getResponseForNameAsMap(name, "_map").getOrElse(Map[String, Any]()) ++
entries.map(e => (new String(e._1) -> new String(e._2))).toMap entries.map(e => (new String(e._1) -> new String(e._2))).toMap
storeMap(name, "_map", newDoc) storeMap(name, "_map", newDoc)
} }
def insertMapStorageEntryFor(name: String, key: Array[Byte], value: Array[Byte])={ def insertMapStorageEntryFor(name: String, key: Array[Byte], value: Array[Byte])={
insertMapStorageEntriesFor(name, List((key, value))) insertMapStorageEntriesFor(name, List((key, value)))
} }
def removeMapStorageFor(name: String) { def removeMapStorageFor(name: String) {
lazy val url = URL + name + "_map" lazy val url = URL + name + "_map"
findDocRev(name + "_map").foreach(deleteData(url, _)) findDocRev(name + "_map").foreach(deleteData(url, _))
} }
def removeMapStorageFor(name: String, key: Array[Byte]): Unit = { def removeMapStorageFor(name: String, key: Array[Byte]): Unit = {
lazy val sKey = new String(key) lazy val sKey = new String(key)
// if we can't find the map for name, then we don't need to delete it. // if we can't find the map for name, then we don't need to delete it.
getResponseForNameAsMap(name, "_map").foreach(doc => storeMap(name, "_map", doc - sKey)) getResponseForNameAsMap(name, "_map").foreach(doc => storeMap(name, "_map", doc - sKey))
} }
def getMapStorageEntryFor(name: String, key: Array[Byte]): Option[Array[Byte]] = { def getMapStorageEntryFor(name: String, key: Array[Byte]): Option[Array[Byte]] = {
lazy val sKey = new String(key) lazy val sKey = new String(key)
getResponseForNameAsMap(name, "_map").flatMap(_.get(sKey)).asInstanceOf[Option[String]].map(_.getBytes) getResponseForNameAsMap(name, "_map").flatMap(_.get(sKey)).asInstanceOf[Option[String]].map(_.getBytes)
} }
def getMapStorageSizeFor(name: String): Int = getMapStorageFor(name).size def getMapStorageSizeFor(name: String): Int = getMapStorageFor(name).size
def getMapStorageFor(name: String): List[(Array[Byte], Array[Byte])] = { def getMapStorageFor(name: String): List[(Array[Byte], Array[Byte])] = {
val m = getResponseForNameAsMap(name, "_map").map(_ - ("_id", "_rev")).getOrElse(Map[String, Any]()) val m = getResponseForNameAsMap(name, "_map").map(_ - ("_id", "_rev")).getOrElse(Map[String, Any]())
m.toList.map(e => (e._1.getBytes, e._2.asInstanceOf[String].getBytes)) m.toList.map(e => (e._1.getBytes, e._2.asInstanceOf[String].getBytes))
} }
def getMapStorageRangeFor(name: String, start: Option[Array[Byte]], finish: Option[Array[Byte]], count: Int): List[(Array[Byte], Array[Byte])] = { def getMapStorageRangeFor(name: String, start: Option[Array[Byte]], finish: Option[Array[Byte]], count: Int): List[(Array[Byte], Array[Byte])] = {
val m = getResponseForNameAsMap(name, "_map").map(_ - ("_id", "_rev")).getOrElse(Map[String, Any]()) val m = getResponseForNameAsMap(name, "_map").map(_ - ("_id", "_rev")).getOrElse(Map[String, Any]())
val keys = m.keys.toList.sortWith(_ < _) val keys = m.keys.toList.sortWith(_ < _)
// if the supplied start is not defined, get the head of keys // if the supplied start is not defined, get the head of keys
val s = start.map(new String(_)).getOrElse(keys.head) val s = start.map(new String(_)).getOrElse(keys.head)
@ -131,7 +131,7 @@ private [akka] object CouchDBStorageBackend extends
storeMap(name, "_vector", m + ("vector" -> v.updated(index, new String(elem)))) storeMap(name, "_vector", m + ("vector" -> v.updated(index, new String(elem))))
} }
} }
def getVectorStorageEntryFor(name: String, index: Int): Array[Byte] ={ def getVectorStorageEntryFor(name: String, index: Int): Array[Byte] ={
val v = getResponseForNameAsMap(name, "_vector").flatMap(_.get("vector")).getOrElse(List[String]()).asInstanceOf[List[String]] val v = getResponseForNameAsMap(name, "_vector").flatMap(_.get("vector")).getOrElse(List[String]()).asInstanceOf[List[String]]
if (v.indices.contains(index)) if (v.indices.contains(index))
@ -139,7 +139,7 @@ private [akka] object CouchDBStorageBackend extends
else else
Array[Byte]() Array[Byte]()
} }
def getVectorStorageSizeFor(name: String): Int ={ def getVectorStorageSizeFor(name: String): Int ={
getResponseForNameAsMap(name, "_vector").flatMap(_.get("vector")).map(_.asInstanceOf[List[String]].size).getOrElse(0) getResponseForNameAsMap(name, "_vector").flatMap(_.get("vector")).map(_.asInstanceOf[List[String]].size).getOrElse(0)
} }
@ -151,12 +151,12 @@ private [akka] object CouchDBStorageBackend extends
val c = if (count == 0) v.length else count val c = if (count == 0) v.length else count
v.slice(s, scala.math.min(s + c, f)).map(_.getBytes) v.slice(s, scala.math.min(s + c, f)).map(_.getBytes)
} }
def insertRefStorageFor(name: String, element: Array[Byte]) ={ def insertRefStorageFor(name: String, element: Array[Byte]) ={
val newDoc = getResponseForNameAsMap(name, "_ref").getOrElse(Map[String, Any]()) + ("ref" -> new String(element)) val newDoc = getResponseForNameAsMap(name, "_ref").getOrElse(Map[String, Any]()) + ("ref" -> new String(element))
storeMap(name, "_ref", newDoc) storeMap(name, "_ref", newDoc)
} }
def getRefStorageFor(name: String): Option[Array[Byte]] ={ def getRefStorageFor(name: String): Option[Array[Byte]] ={
getResponseForNameAsMap(name, "_ref").flatMap(_.get("ref")).map(_.asInstanceOf[String].getBytes) getResponseForNameAsMap(name, "_ref").flatMap(_.get("ref")).map(_.asInstanceOf[String].getBytes)
} }
@ -171,7 +171,7 @@ private [akka] object CouchDBStorageBackend extends
val delete = new DeleteMethod(url) val delete = new DeleteMethod(url)
delete.setRequestHeader("If-Match", rev) delete.setRequestHeader("If-Match", rev)
client.executeMethod(delete) client.executeMethod(delete)
val response = delete.getResponseBodyAsString() val response = delete.getResponseBodyAsString()
if (response != null) if (response != null)
Some(response) Some(response)
@ -191,7 +191,7 @@ private [akka] object CouchDBStorageBackend extends
else else
None None
} }
private def getResponse(url: String): Option[String] = { private def getResponse(url: String): Option[String] = {
val client = new HttpClient() val client = new HttpClient()
val method = new GetMethod(url) val method = new GetMethod(url)

View file

@ -15,14 +15,14 @@ import java.util.{Calendar, Date}
@RunWith(classOf[JUnitRunner]) @RunWith(classOf[JUnitRunner])
class CouchDBStorageBackendSpec extends Specification { class CouchDBStorageBackendSpec extends Specification {
doBeforeSpec { doBeforeSpec {
CouchDBStorageBackend.create() CouchDBStorageBackend.create()
} }
doAfterSpec { doAfterSpec {
CouchDBStorageBackend.drop() CouchDBStorageBackend.drop()
} }
"CouchDBStorageBackend store and query in map" should { "CouchDBStorageBackend store and query in map" should {
"enter 4 entries for transaction T-1" in { "enter 4 entries for transaction T-1" in {
insertMapStorageEntryFor("T-1", "debasish.company".getBytes, "anshinsoft".getBytes) insertMapStorageEntryFor("T-1", "debasish.company".getBytes, "anshinsoft".getBytes)
@ -34,13 +34,13 @@ class CouchDBStorageBackendSpec extends Specification {
new String(getMapStorageEntryFor("T-1", "debasish.language".getBytes).get) mustEqual("java") new String(getMapStorageEntryFor("T-1", "debasish.language".getBytes).get) mustEqual("java")
getMapStorageSizeFor("T-1") mustEqual(4) getMapStorageSizeFor("T-1") mustEqual(4)
} }
"enter key/values for another transaction T-2" in { "enter key/values for another transaction T-2" in {
insertMapStorageEntryFor("T-2", "debasish.age".getBytes, "49".getBytes) insertMapStorageEntryFor("T-2", "debasish.age".getBytes, "49".getBytes)
insertMapStorageEntryFor("T-2", "debasish.spouse".getBytes, "paramita".getBytes) insertMapStorageEntryFor("T-2", "debasish.spouse".getBytes, "paramita".getBytes)
getMapStorageSizeFor("T-2") mustEqual(2) getMapStorageSizeFor("T-2") mustEqual(2)
} }
"remove map storage for T-99" in { "remove map storage for T-99" in {
insertMapStorageEntryFor("T-99", "provider".getBytes, "googleapp".getBytes) insertMapStorageEntryFor("T-99", "provider".getBytes, "googleapp".getBytes)
insertMapStorageEntryFor("T-99", "quota".getBytes, "100mb".getBytes) insertMapStorageEntryFor("T-99", "quota".getBytes, "100mb".getBytes)
@ -67,7 +67,7 @@ class CouchDBStorageBackendSpec extends Specification {
insertMapStorageEntryFor("T-11", "david".getBytes, toByteArray[Long](d / 2)) insertMapStorageEntryFor("T-11", "david".getBytes, toByteArray[Long](d / 2))
getMapStorageSizeFor("T-11") mustEqual(4) getMapStorageSizeFor("T-11") mustEqual(4)
fromByteArray[Long](getMapStorageEntryFor("T-11", "steve".getBytes).get) mustEqual(d) fromByteArray[Long](getMapStorageEntryFor("T-11", "steve".getBytes).get) mustEqual(d)
} }
} }
@ -98,7 +98,7 @@ class CouchDBStorageBackendSpec extends Specification {
getMapStorageRangeFor("T-5", getMapStorageRangeFor("T-5",
Some("trade.account".getBytes), Some("trade.account".getBytes),
None, 0).map(e => (new String(e._1), new String(e._2))).size mustEqual(7) None, 0).map(e => (new String(e._1), new String(e._2))).size mustEqual(7)
removeMapStorageFor("T-5") removeMapStorageFor("T-5")
} }
} }
@ -113,7 +113,7 @@ class CouchDBStorageBackendSpec extends Specification {
getMapStorageSizeFor("T-31") mustEqual(1) getMapStorageSizeFor("T-31") mustEqual(1)
fromByteArray[Name](getMapStorageEntryFor("T-31", "debasish".getBytes).getOrElse(Array[Byte]())) mustEqual(n) fromByteArray[Name](getMapStorageEntryFor("T-31", "debasish".getBytes).getOrElse(Array[Byte]())) mustEqual(n)
removeMapStorageFor("T-31") removeMapStorageFor("T-31")
} }
} }
"Store and query in vectors" should { "Store and query in vectors" should {
@ -135,7 +135,7 @@ class CouchDBStorageBackendSpec extends Specification {
"write a Name object and fetch it properly" in { "write a Name object and fetch it properly" in {
val dtb = Calendar.getInstance.getTime val dtb = Calendar.getInstance.getTime
val n = Name(100, "debasish ghosh", "kolkata", dtb, Some(dtb)) val n = Name(100, "debasish ghosh", "kolkata", dtb, Some(dtb))
insertVectorStorageEntryFor("T-31", toByteArray[Name](n)) insertVectorStorageEntryFor("T-31", toByteArray[Name](n))
getVectorStorageSizeFor("T-31") mustEqual(1) getVectorStorageSizeFor("T-31") mustEqual(1)
fromByteArray[Name](getVectorStorageEntryFor("T-31", 0)) mustEqual(n) fromByteArray[Name](getVectorStorageEntryFor("T-31", 0)) mustEqual(n)
@ -159,21 +159,21 @@ class CouchDBStorageBackendSpec extends Specification {
fromByteArray[Name](getRefStorageFor("T-4").get) mustEqual(n) fromByteArray[Name](getRefStorageFor("T-4").get) mustEqual(n)
} }
} }
"Mix the 3 different types storage with the same name" should { "Mix the 3 different types storage with the same name" should {
"work independently without inference each other" in { "work independently without inference each other" in {
insertVectorStorageEntryFor("SameName", "v1".getBytes) insertVectorStorageEntryFor("SameName", "v1".getBytes)
insertMapStorageEntryFor("SameName", "vector".getBytes, "map_value_v".getBytes) insertMapStorageEntryFor("SameName", "vector".getBytes, "map_value_v".getBytes)
insertVectorStorageEntryFor("SameName", "v2".getBytes) insertVectorStorageEntryFor("SameName", "v2".getBytes)
insertMapStorageEntryFor("SameName", "ref".getBytes, "map_value_r".getBytes) insertMapStorageEntryFor("SameName", "ref".getBytes, "map_value_r".getBytes)
insertVectorStorageEntryFor("SameName", "v3".getBytes) insertVectorStorageEntryFor("SameName", "v3".getBytes)
insertRefStorageFor("SameName", "I am a ref!".getBytes) insertRefStorageFor("SameName", "I am a ref!".getBytes)
getMapStorageSizeFor("SameName") mustEqual(2) getMapStorageSizeFor("SameName") mustEqual(2)
new String(getMapStorageEntryFor("SameName", "vector".getBytes).get) mustEqual("map_value_v") new String(getMapStorageEntryFor("SameName", "vector".getBytes).get) mustEqual("map_value_v")
new String(getMapStorageEntryFor("SameName", "ref".getBytes).get) mustEqual("map_value_r") new String(getMapStorageEntryFor("SameName", "ref".getBytes).get) mustEqual("map_value_r")
getVectorStorageSizeFor("SameName") mustEqual(3) getVectorStorageSizeFor("SameName") mustEqual(3)
new String(getRefStorageFor("SameName").get) mustEqual("I am a ref!") new String(getRefStorageFor("SameName").get) mustEqual("I am a ref!")
} }
} }
} }

View file

@ -39,7 +39,7 @@ private[akka] object HbaseStorageBackend extends MapStorageBackend[Array[Byte],
CONFIGURATION.set("hbase.zookeeper.quorum", HBASE_ZOOKEEPER_QUORUM) CONFIGURATION.set("hbase.zookeeper.quorum", HBASE_ZOOKEEPER_QUORUM)
init init
def init { def init {
val ADMIN = new HBaseAdmin(CONFIGURATION) val ADMIN = new HBaseAdmin(CONFIGURATION)
@ -50,14 +50,14 @@ private[akka] object HbaseStorageBackend extends MapStorageBackend[Array[Byte],
ADMIN.enableTable(REF_TABLE_NAME) ADMIN.enableTable(REF_TABLE_NAME)
} }
REF_TABLE = new HTable(CONFIGURATION, REF_TABLE_NAME); REF_TABLE = new HTable(CONFIGURATION, REF_TABLE_NAME);
if (!ADMIN.tableExists(VECTOR_TABLE_NAME)) { if (!ADMIN.tableExists(VECTOR_TABLE_NAME)) {
ADMIN.createTable(new HTableDescriptor(VECTOR_TABLE_NAME)) ADMIN.createTable(new HTableDescriptor(VECTOR_TABLE_NAME))
ADMIN.disableTable(VECTOR_TABLE_NAME) ADMIN.disableTable(VECTOR_TABLE_NAME)
ADMIN.addColumn(VECTOR_TABLE_NAME, new HColumnDescriptor(VECTOR_ELEMENT_COLUMN_FAMILY_NAME)) ADMIN.addColumn(VECTOR_TABLE_NAME, new HColumnDescriptor(VECTOR_ELEMENT_COLUMN_FAMILY_NAME))
ADMIN.enableTable(VECTOR_TABLE_NAME); ADMIN.enableTable(VECTOR_TABLE_NAME);
} }
VECTOR_TABLE = new HTable(CONFIGURATION, VECTOR_TABLE_NAME) VECTOR_TABLE = new HTable(CONFIGURATION, VECTOR_TABLE_NAME)
if (!ADMIN.tableExists(MAP_TABLE_NAME)) { if (!ADMIN.tableExists(MAP_TABLE_NAME)) {
ADMIN.createTable(new HTableDescriptor(MAP_TABLE_NAME)) ADMIN.createTable(new HTableDescriptor(MAP_TABLE_NAME))
@ -65,9 +65,9 @@ private[akka] object HbaseStorageBackend extends MapStorageBackend[Array[Byte],
ADMIN.addColumn(MAP_TABLE_NAME, new HColumnDescriptor(MAP_ELEMENT_COLUMN_FAMILY_NAME)) ADMIN.addColumn(MAP_TABLE_NAME, new HColumnDescriptor(MAP_ELEMENT_COLUMN_FAMILY_NAME))
ADMIN.enableTable(MAP_TABLE_NAME); ADMIN.enableTable(MAP_TABLE_NAME);
} }
MAP_TABLE = new HTable(CONFIGURATION, MAP_TABLE_NAME) MAP_TABLE = new HTable(CONFIGURATION, MAP_TABLE_NAME)
} }
def drop { def drop {
val ADMIN = new HBaseAdmin(CONFIGURATION) val ADMIN = new HBaseAdmin(CONFIGURATION)
@ -82,10 +82,10 @@ private[akka] object HbaseStorageBackend extends MapStorageBackend[Array[Byte],
if (ADMIN.tableExists(MAP_TABLE_NAME)) { if (ADMIN.tableExists(MAP_TABLE_NAME)) {
ADMIN.disableTable(MAP_TABLE_NAME) ADMIN.disableTable(MAP_TABLE_NAME)
ADMIN.deleteTable(MAP_TABLE_NAME) ADMIN.deleteTable(MAP_TABLE_NAME)
} }
init init
} }
// =============================================================== // ===============================================================
// For Ref // For Ref
// =============================================================== // ===============================================================
@ -143,7 +143,7 @@ private[akka] object HbaseStorageBackend extends MapStorageBackend[Array[Byte],
def getVectorStorageRangeFor(name: String, start: Option[Int], finish: Option[Int], count: Int): List[Array[Byte]] = { def getVectorStorageRangeFor(name: String, start: Option[Int], finish: Option[Int], count: Int): List[Array[Byte]] = {
import scala.math._ import scala.math._
val row = new Get(Bytes.toBytes(name)) val row = new Get(Bytes.toBytes(name))
val result = VECTOR_TABLE.get(row) val result = VECTOR_TABLE.get(row)
val size = result.size val size = result.size
@ -156,7 +156,7 @@ private[akka] object HbaseStorageBackend extends MapStorageBackend[Array[Byte],
e = finish.get - 1 e = finish.get - 1
} else { } else {
b = start.getOrElse(0) b = start.getOrElse(0)
e = finish.getOrElse(min(b + count - 1, size - 1)) e = finish.getOrElse(min(b + count - 1, size - 1))
} }
for(i <- b to e) { for(i <- b to e) {
val colnum = size - i - 1 val colnum = size - i - 1
@ -168,7 +168,7 @@ private[akka] object HbaseStorageBackend extends MapStorageBackend[Array[Byte],
def getVectorStorageSizeFor(name: String): Int = { def getVectorStorageSizeFor(name: String): Int = {
val row = new Get(Bytes.toBytes(name)) val row = new Get(Bytes.toBytes(name))
val result = VECTOR_TABLE.get(row) val result = VECTOR_TABLE.get(row)
if (result.isEmpty) if (result.isEmpty)
0 0
else else
@ -190,7 +190,7 @@ private[akka] object HbaseStorageBackend extends MapStorageBackend[Array[Byte],
def getMapStorageEntryFor(name: String, key: Array[Byte]): Option[Array[Byte]] = { def getMapStorageEntryFor(name: String, key: Array[Byte]): Option[Array[Byte]] = {
val row = new Get(Bytes.toBytes(name)) val row = new Get(Bytes.toBytes(name))
val result = MAP_TABLE.get(row) val result = MAP_TABLE.get(row)
Option(result.getValue(Bytes.toBytes(MAP_ELEMENT_COLUMN_FAMILY_NAME), key)) Option(result.getValue(Bytes.toBytes(MAP_ELEMENT_COLUMN_FAMILY_NAME), key))
} }

View file

@ -83,7 +83,7 @@ class HbasePersistentActorSpecTestIntegration extends JUnitSuite with BeforeAndA
override def beforeAll { override def beforeAll {
testUtil.startMiniCluster testUtil.startMiniCluster
} }
override def afterAll { override def afterAll {
testUtil.shutdownMiniCluster testUtil.shutdownMiniCluster
} }
@ -92,7 +92,7 @@ class HbasePersistentActorSpecTestIntegration extends JUnitSuite with BeforeAndA
def beforeEach { def beforeEach {
HbaseStorageBackend.drop HbaseStorageBackend.drop
} }
@After @After
def afterEach { def afterEach {
HbaseStorageBackend.drop HbaseStorageBackend.drop

View file

@ -8,17 +8,17 @@ import org.scalatest.BeforeAndAfterEach
class HbaseStorageSpecTestIntegration extends class HbaseStorageSpecTestIntegration extends
Spec with Spec with
ShouldMatchers with ShouldMatchers with
BeforeAndAfterAll with BeforeAndAfterAll with
BeforeAndAfterEach { BeforeAndAfterEach {
import org.apache.hadoop.hbase.HBaseTestingUtility import org.apache.hadoop.hbase.HBaseTestingUtility
val testUtil = new HBaseTestingUtility val testUtil = new HBaseTestingUtility
override def beforeAll { override def beforeAll {
testUtil.startMiniCluster testUtil.startMiniCluster
} }
override def afterAll { override def afterAll {
testUtil.shutdownMiniCluster testUtil.shutdownMiniCluster
} }
@ -26,11 +26,11 @@ BeforeAndAfterEach {
override def beforeEach { override def beforeEach {
HbaseStorageBackend.drop HbaseStorageBackend.drop
} }
override def afterEach { override def afterEach {
HbaseStorageBackend.drop HbaseStorageBackend.drop
} }
describe("persistent maps") { describe("persistent maps") {
it("should insert with single key and value") { it("should insert with single key and value") {
import HbaseStorageBackend._ import HbaseStorageBackend._
@ -70,8 +70,8 @@ BeforeAndAfterEach {
it("should do proper range queries") { it("should do proper range queries") {
import HbaseStorageBackend._ import HbaseStorageBackend._
val l = List( val l = List(
("bjarne stroustrup", "c++"), ("bjarne stroustrup", "c++"),
("martin odersky", "scala"), ("martin odersky", "scala"),
("james gosling", "java"), ("james gosling", "java"),
("yukihiro matsumoto", "ruby"), ("yukihiro matsumoto", "ruby"),
("slava pestov", "factor"), ("slava pestov", "factor"),
@ -82,11 +82,11 @@ BeforeAndAfterEach {
("guido van rossum", "python"), ("guido van rossum", "python"),
("james strachan", "groovy")) ("james strachan", "groovy"))
val rl = List( val rl = List(
("james gosling", "java"), ("james gosling", "java"),
("james strachan", "groovy"), ("james strachan", "groovy"),
("larry wall", "perl"), ("larry wall", "perl"),
("martin odersky", "scala"), ("martin odersky", "scala"),
("ola bini", "ioke"), ("rich hickey", "clojure"), ("ola bini", "ioke"), ("rich hickey", "clojure"),
("slava pestov", "factor")) ("slava pestov", "factor"))
insertMapStorageEntriesFor("t1", l.map { case (k, v) => (k.getBytes, v.getBytes) }) insertMapStorageEntriesFor("t1", l.map { case (k, v) => (k.getBytes, v.getBytes) })
getMapStorageSizeFor("t1") should equal(l.size) getMapStorageSizeFor("t1") should equal(l.size)
@ -166,7 +166,7 @@ BeforeAndAfterEach {
describe("persistent refs") { describe("persistent refs") {
it("should insert a ref") { it("should insert a ref") {
import HbaseStorageBackend._ import HbaseStorageBackend._
insertRefStorageFor("t1", "martin odersky".getBytes) insertRefStorageFor("t1", "martin odersky".getBytes)
new String(getRefStorageFor("t1").get) should equal("martin odersky") new String(getRefStorageFor("t1").get) should equal("martin odersky")
insertRefStorageFor("t1", "james gosling".getBytes) insertRefStorageFor("t1", "james gosling".getBytes)

View file

@ -80,7 +80,7 @@ object Storage {
kvs2add.foreach {kv => kvs2add.foreach {kv =>
fooMap += (kv._1.getBytes, kv._2.getBytes) fooMap += (kv._1.getBytes, kv._2.getBytes)
} }
ks2rem.foreach {k => ks2rem.foreach {k =>
fooMap -= k.getBytes fooMap -= k.getBytes
}} }}
@ -94,7 +94,7 @@ object Storage {
} }
self.reply(true) self.reply(true)
case PUT_WITH_SLICE(kvs2add, from, cnt) => case PUT_WITH_SLICE(kvs2add, from, cnt) =>
val v = atomic { val v = atomic {
kvs2add.foreach {kv => kvs2add.foreach {kv =>
fooMap += (kv._1.getBytes, kv._2.getBytes) fooMap += (kv._1.getBytes, kv._2.getBytes)
@ -103,7 +103,7 @@ object Storage {
} }
self.reply(v: List[(Array[Byte], Array[Byte])]) self.reply(v: List[(Array[Byte], Array[Byte])])
case PUT_REM_WITH_SLICE(kvs2add, ks2rem, from, cnt) => case PUT_REM_WITH_SLICE(kvs2add, ks2rem, from, cnt) =>
val v = atomic { val v = atomic {
kvs2add.foreach {kv => kvs2add.foreach {kv =>
fooMap += (kv._1.getBytes, kv._2.getBytes) fooMap += (kv._1.getBytes, kv._2.getBytes)
@ -125,7 +125,7 @@ object Storage {
def receive = { def receive = {
case VADD(v) => case VADD(v) =>
val size = val size =
atomic { atomic {
fooVector + v.getBytes fooVector + v.getBytes
fooVector length fooVector length
@ -148,7 +148,7 @@ object Storage {
self.reply(els) self.reply(els)
case VUPD_AND_ABORT(index, value) => case VUPD_AND_ABORT(index, value) =>
val l = val l =
atomic { atomic {
fooVector.update(index, value.getBytes) fooVector.update(index, value.getBytes)
// force fail // force fail
@ -173,13 +173,13 @@ import Storage._
class HbaseTicket343SpecTestIntegration extends Spec with ShouldMatchers with BeforeAndAfterAll with BeforeAndAfterEach { class HbaseTicket343SpecTestIntegration extends Spec with ShouldMatchers with BeforeAndAfterAll with BeforeAndAfterEach {
import org.apache.hadoop.hbase.HBaseTestingUtility import org.apache.hadoop.hbase.HBaseTestingUtility
val testUtil = new HBaseTestingUtility val testUtil = new HBaseTestingUtility
override def beforeAll { override def beforeAll {
testUtil.startMiniCluster testUtil.startMiniCluster
} }
override def afterAll { override def afterAll {
testUtil.shutdownMiniCluster testUtil.shutdownMiniCluster
} }
@ -187,7 +187,7 @@ class HbaseTicket343SpecTestIntegration extends Spec with ShouldMatchers with Be
override def beforeEach { override def beforeEach {
HbaseStorageBackend.drop HbaseStorageBackend.drop
} }
override def afterEach { override def afterEach {
HbaseStorageBackend.drop HbaseStorageBackend.drop
} }
@ -315,7 +315,7 @@ class HbaseTicket343SpecTestIntegration extends Spec with ShouldMatchers with Be
(proc !! VADD("nilanjan")).getOrElse("VADD failed") should equal(4) (proc !! VADD("nilanjan")).getOrElse("VADD failed") should equal(4)
evaluating { evaluating {
(proc !! VUPD_AND_ABORT(0, "virat")).getOrElse("VUPD_AND_ABORT failed") (proc !! VUPD_AND_ABORT(0, "virat")).getOrElse("VUPD_AND_ABORT failed")
} should produce [Exception] } should produce [Exception]
// update aborts and hence values will remain unchanged // update aborts and hence values will remain unchanged

View file

@ -48,10 +48,10 @@ private[akka] object MongoStorageBackend extends
db.safely { db => db.safely { db =>
val q: DBObject = MongoDBObject(KEY -> name) val q: DBObject = MongoDBObject(KEY -> name)
coll.findOne(q) match { coll.findOne(q) match {
case Some(dbo) => case Some(dbo) =>
entries.foreach { case (k, v) => dbo += new String(k) -> v } entries.foreach { case (k, v) => dbo += new String(k) -> v }
db.safely { db => coll.update(q, dbo, true, false) } db.safely { db => coll.update(q, dbo, true, false) }
case None => case None =>
val builder = MongoDBObject.newBuilder val builder = MongoDBObject.newBuilder
builder += KEY -> name builder += KEY -> name
entries.foreach { case (k, v) => builder += new String(k) -> v } entries.foreach { case (k, v) => builder += new String(k) -> v }
@ -79,7 +79,7 @@ private[akka] object MongoStorageBackend extends
} }
def getMapStorageEntryFor(name: String, key: Array[Byte]): Option[Array[Byte]] = queryFor(name) { (q, dbo) => def getMapStorageEntryFor(name: String, key: Array[Byte]): Option[Array[Byte]] = queryFor(name) { (q, dbo) =>
dbo.map { d => dbo.map { d =>
d.getAs[Array[Byte]](new String(key)) d.getAs[Array[Byte]](new String(key))
}.getOrElse(None) }.getOrElse(None)
} }
@ -132,7 +132,7 @@ private[akka] object MongoStorageBackend extends
db.safely { db => db.safely { db =>
coll.findOne(q) match { coll.findOne(q) match {
// exists : need to update // exists : need to update
case Some(dbo) => case Some(dbo) =>
dbo -= KEY dbo -= KEY
dbo -= "_id" dbo -= "_id"
val listBuilder = MongoDBList.newBuilder val listBuilder = MongoDBList.newBuilder
@ -146,7 +146,7 @@ private[akka] object MongoStorageBackend extends
coll.update(q, builder.result.asDBObject, true, false) coll.update(q, builder.result.asDBObject, true, false)
// new : just add // new : just add
case None => case None =>
val listBuilder = MongoDBList.newBuilder val listBuilder = MongoDBList.newBuilder
listBuilder ++= elements listBuilder ++= elements
@ -166,7 +166,7 @@ private[akka] object MongoStorageBackend extends
} }
def getVectorStorageEntryFor(name: String, index: Int): Array[Byte] = queryFor(name) { (q, dbo) => def getVectorStorageEntryFor(name: String, index: Int): Array[Byte] = queryFor(name) { (q, dbo) =>
dbo.map { d => dbo.map { d =>
d(index.toString).asInstanceOf[Array[Byte]] d(index.toString).asInstanceOf[Array[Byte]]
}.getOrElse(Array.empty[Byte]) }.getOrElse(Array.empty[Byte])
} }
@ -207,12 +207,12 @@ private[akka] object MongoStorageBackend extends
db.safely { db => db.safely { db =>
coll.findOne(q) match { coll.findOne(q) match {
// exists : need to update // exists : need to update
case Some(dbo) => case Some(dbo) =>
dbo += ((REF, element)) dbo += ((REF, element))
coll.update(q, dbo, true, false) coll.update(q, dbo, true, false)
// not found : make one // not found : make one
case None => case None =>
val builder = MongoDBObject.newBuilder val builder = MongoDBObject.newBuilder
builder += KEY -> name builder += KEY -> name
builder += REF -> element builder += REF -> element
@ -222,7 +222,7 @@ private[akka] object MongoStorageBackend extends
} }
def getRefStorageFor(name: String): Option[Array[Byte]] = queryFor(name) { (q, dbo) => def getRefStorageFor(name: String): Option[Array[Byte]] = queryFor(name) { (q, dbo) =>
dbo.map { d => dbo.map { d =>
d.getAs[Array[Byte]](REF) d.getAs[Array[Byte]](REF)
}.getOrElse(None) }.getOrElse(None)
} }

View file

@ -60,8 +60,8 @@ class MongoStorageSpec extends
it("should do proper range queries") { it("should do proper range queries") {
import MongoStorageBackend._ import MongoStorageBackend._
val l = List( val l = List(
("bjarne stroustrup", "c++"), ("bjarne stroustrup", "c++"),
("martin odersky", "scala"), ("martin odersky", "scala"),
("james gosling", "java"), ("james gosling", "java"),
("yukihiro matsumoto", "ruby"), ("yukihiro matsumoto", "ruby"),
("slava pestov", "factor"), ("slava pestov", "factor"),

View file

@ -80,7 +80,7 @@ object Storage {
kvs2add.foreach {kv => kvs2add.foreach {kv =>
fooMap += (kv._1.getBytes, kv._2.getBytes) fooMap += (kv._1.getBytes, kv._2.getBytes)
} }
ks2rem.foreach {k => ks2rem.foreach {k =>
fooMap -= k.getBytes fooMap -= k.getBytes
}} }}
@ -94,7 +94,7 @@ object Storage {
} }
self.reply(true) self.reply(true)
case PUT_WITH_SLICE(kvs2add, from, cnt) => case PUT_WITH_SLICE(kvs2add, from, cnt) =>
val v = atomic { val v = atomic {
kvs2add.foreach {kv => kvs2add.foreach {kv =>
fooMap += (kv._1.getBytes, kv._2.getBytes) fooMap += (kv._1.getBytes, kv._2.getBytes)
@ -103,7 +103,7 @@ object Storage {
} }
self.reply(v: List[(Array[Byte], Array[Byte])]) self.reply(v: List[(Array[Byte], Array[Byte])])
case PUT_REM_WITH_SLICE(kvs2add, ks2rem, from, cnt) => case PUT_REM_WITH_SLICE(kvs2add, ks2rem, from, cnt) =>
val v = atomic { val v = atomic {
kvs2add.foreach {kv => kvs2add.foreach {kv =>
fooMap += (kv._1.getBytes, kv._2.getBytes) fooMap += (kv._1.getBytes, kv._2.getBytes)
@ -125,7 +125,7 @@ object Storage {
def receive = { def receive = {
case VADD(v) => case VADD(v) =>
val size = val size =
atomic { atomic {
fooVector + v.getBytes fooVector + v.getBytes
fooVector length fooVector length
@ -148,7 +148,7 @@ object Storage {
self.reply(els) self.reply(els)
case VUPD_AND_ABORT(index, value) => case VUPD_AND_ABORT(index, value) =>
val l = val l =
atomic { atomic {
fooVector.update(index, value.getBytes) fooVector.update(index, value.getBytes)
// force fail // force fail
@ -315,7 +315,7 @@ class MongoTicket343Spec extends
(proc !! VADD("nilanjan")).getOrElse("VADD failed") should equal(4) (proc !! VADD("nilanjan")).getOrElse("VADD failed") should equal(4)
evaluating { evaluating {
(proc !! VUPD_AND_ABORT(0, "virat")).getOrElse("VUPD_AND_ABORT failed") (proc !! VUPD_AND_ABORT(0, "virat")).getOrElse("VUPD_AND_ABORT failed")
} should produce [Exception] } should produce [Exception]
// update aborts and hence values will remain unchanged // update aborts and hence values will remain unchanged

View file

@ -32,7 +32,7 @@ object CommonsCodec {
import CommonsCodec._ import CommonsCodec._
import CommonsCodec.Base64StringEncoder._ import CommonsCodec.Base64StringEncoder._
/** /**
* A module for supporting Redis based persistence. * A module for supporting Redis based persistence.
* <p/> * <p/>
@ -76,7 +76,7 @@ private [akka] object RedisStorageBackend extends
/** /**
* Map storage in Redis. * Map storage in Redis.
* <p/> * <p/>
* Maps are stored as key/value pairs in redis. * Maps are stored as key/value pairs in redis.
*/ */
def insertMapStorageEntryFor(name: String, key: Array[Byte], value: Array[Byte]): Unit = withErrorHandling { def insertMapStorageEntryFor(name: String, key: Array[Byte], value: Array[Byte]): Unit = withErrorHandling {
insertMapStorageEntriesFor(name, List((key, value))) insertMapStorageEntriesFor(name, List((key, value)))
@ -135,7 +135,7 @@ private [akka] object RedisStorageBackend extends
def getMapStorageFor(name: String): List[(Array[Byte], Array[Byte])] = withErrorHandling { def getMapStorageFor(name: String): List[(Array[Byte], Array[Byte])] = withErrorHandling {
db.keys("%s:*".format(name)) db.keys("%s:*".format(name))
.map { keys => .map { keys =>
keys.map(key => (makeKeyFromRedisKey(key.get)._2, stringToByteArray(db.get(key.get).get))).toList keys.map(key => (makeKeyFromRedisKey(key.get)._2, stringToByteArray(db.get(key.get).get))).toList
}.getOrElse { }.getOrElse {
throw new NoSuchElementException(name + " not present") throw new NoSuchElementException(name + " not present")
@ -302,10 +302,10 @@ private [akka] object RedisStorageBackend extends
// add item to sorted set identified by name // add item to sorted set identified by name
def zadd(name: String, zscore: String, item: Array[Byte]): Boolean = withErrorHandling { def zadd(name: String, zscore: String, item: Array[Byte]): Boolean = withErrorHandling {
db.zadd(name, zscore, byteArrayToString(item)) db.zadd(name, zscore, byteArrayToString(item))
.map { e => .map { e =>
e match { e match {
case 1 => true case 1 => true
case _ => false case _ => false
} }
}.getOrElse(false) }.getOrElse(false)
} }
@ -313,10 +313,10 @@ private [akka] object RedisStorageBackend extends
// remove item from sorted set identified by name // remove item from sorted set identified by name
def zrem(name: String, item: Array[Byte]): Boolean = withErrorHandling { def zrem(name: String, item: Array[Byte]): Boolean = withErrorHandling {
db.zrem(name, byteArrayToString(item)) db.zrem(name, byteArrayToString(item))
.map { e => .map { e =>
e match { e match {
case 1 => true case 1 => true
case _ => false case _ => false
} }
}.getOrElse(false) }.getOrElse(false)
} }
@ -331,7 +331,7 @@ private [akka] object RedisStorageBackend extends
} }
def zrange(name: String, start: Int, end: Int): List[Array[Byte]] = withErrorHandling { def zrange(name: String, start: Int, end: Int): List[Array[Byte]] = withErrorHandling {
db.zrange(name, start.toString, end.toString, RedisClient.ASC, false) db.zrange(name, start.toString, end.toString, RedisClient.ASC, false)
.map(_.map(e => stringToByteArray(e.get))) .map(_.map(e => stringToByteArray(e.get)))
.getOrElse { .getOrElse {
throw new NoSuchElementException(name + " not present") throw new NoSuchElementException(name + " not present")

View file

@ -76,7 +76,7 @@ object Storage {
} }
self.reply(v) self.reply(v)
case MSET(kvs) => case MSET(kvs) =>
atomic { atomic {
kvs.foreach {kv => kvs.foreach {kv =>
fooMap += (kv._1.getBytes, kv._2.getBytes) fooMap += (kv._1.getBytes, kv._2.getBytes)
@ -84,13 +84,13 @@ object Storage {
} }
self.reply(kvs.size) self.reply(kvs.size)
case REMOVE_AFTER_PUT(kvs2add, ks2rem) => case REMOVE_AFTER_PUT(kvs2add, ks2rem) =>
val v = val v =
atomic { atomic {
kvs2add.foreach {kv => kvs2add.foreach {kv =>
fooMap += (kv._1.getBytes, kv._2.getBytes) fooMap += (kv._1.getBytes, kv._2.getBytes)
} }
ks2rem.foreach {k => ks2rem.foreach {k =>
fooMap -= k.getBytes fooMap -= k.getBytes
} }
@ -98,7 +98,7 @@ object Storage {
} }
self.reply(v) self.reply(v)
case CLEAR_AFTER_PUT(kvs2add) => case CLEAR_AFTER_PUT(kvs2add) =>
atomic { atomic {
kvs2add.foreach {kv => kvs2add.foreach {kv =>
fooMap += (kv._1.getBytes, kv._2.getBytes) fooMap += (kv._1.getBytes, kv._2.getBytes)
@ -107,8 +107,8 @@ object Storage {
} }
self.reply(true) self.reply(true)
case PUT_WITH_SLICE(kvs2add, from, cnt) => case PUT_WITH_SLICE(kvs2add, from, cnt) =>
val v = val v =
atomic { atomic {
kvs2add.foreach {kv => kvs2add.foreach {kv =>
fooMap += (kv._1.getBytes, kv._2.getBytes) fooMap += (kv._1.getBytes, kv._2.getBytes)
@ -117,8 +117,8 @@ object Storage {
} }
self.reply(v: List[(Array[Byte], Array[Byte])]) self.reply(v: List[(Array[Byte], Array[Byte])])
case PUT_REM_WITH_SLICE(kvs2add, ks2rem, from, cnt) => case PUT_REM_WITH_SLICE(kvs2add, ks2rem, from, cnt) =>
val v = val v =
atomic { atomic {
kvs2add.foreach {kv => kvs2add.foreach {kv =>
fooMap += (kv._1.getBytes, kv._2.getBytes) fooMap += (kv._1.getBytes, kv._2.getBytes)
@ -140,7 +140,7 @@ object Storage {
def receive = { def receive = {
case VADD(v) => case VADD(v) =>
val size = val size =
atomic { atomic {
fooVector + v.getBytes fooVector + v.getBytes
fooVector length fooVector length
@ -163,7 +163,7 @@ object Storage {
self.reply(els) self.reply(els)
case VUPD_AND_ABORT(index, value) => case VUPD_AND_ABORT(index, value) =>
val l = val l =
atomic { atomic {
fooVector.update(index, value.getBytes) fooVector.update(index, value.getBytes)
// force fail // force fail
@ -347,7 +347,7 @@ class RedisTicket343Spec extends
(proc !! VADD("nilanjan")).getOrElse("VADD failed") should equal(4) (proc !! VADD("nilanjan")).getOrElse("VADD failed") should equal(4)
evaluating { evaluating {
(proc !! VUPD_AND_ABORT(0, "virat")).getOrElse("VUPD_AND_ABORT failed") (proc !! VUPD_AND_ABORT(0, "virat")).getOrElse("VUPD_AND_ABORT failed")
} should produce [Exception] } should produce [Exception]
// update aborts and hence values will remain unchanged // update aborts and hence values will remain unchanged

View file

@ -120,4 +120,4 @@ private[akka] object RiakStorageBackend extends CommonStorageBackend {
} }
} }

View file

@ -20,4 +20,4 @@ class RiakTicket343TestIntegration extends Ticket343Test {
def getMap: (String) => PersistentMap[Array[Byte], Array[Byte]] = RiakStorage.getMap def getMap: (String) => PersistentMap[Array[Byte], Array[Byte]] = RiakStorage.getMap
} }

View file

@ -142,4 +142,4 @@ private[akka] object VoldemortStorageBackend extends CommonStorageBackend {
} }
} }

View file

@ -64,4 +64,4 @@
</value-serializer> </value-serializer>
</store> </store>
</stores> </stores>

View file

@ -37,4 +37,4 @@ trait EmbeddedVoldemort extends BeforeAndAfterAll with Logging {
override protected def afterAll(): Unit = { override protected def afterAll(): Unit = {
server.stop server.stop
} }
} }

View file

@ -180,4 +180,4 @@ object VoldemortStorageBackendSuite {
value.getBytes("UTF-8") value.getBytes("UTF-8")
} }
} }

View file

@ -20,4 +20,4 @@ class VoldemortTicket343Test extends Ticket343Test with EmbeddedVoldemort {
def getMap: (String) => PersistentMap[Array[Byte], Array[Byte]] = VoldemortStorage.getMap def getMap: (String) => PersistentMap[Array[Byte], Array[Byte]] = VoldemortStorage.getMap
} }

View file

@ -69,7 +69,7 @@ object RemoteNode extends RemoteServer
object RemoteServer { object RemoteServer {
val UUID_PREFIX = "uuid:" val UUID_PREFIX = "uuid:"
val SECURE_COOKIE: Option[String] = { val SECURE_COOKIE: Option[String] = {
val cookie = config.getString("akka.remote.secure-cookie", "") val cookie = config.getString("akka.remote.secure-cookie", "")
if (cookie == "") None if (cookie == "") None
else Some(cookie) else Some(cookie)
@ -80,7 +80,7 @@ object RemoteServer {
"Configuration option 'akka.remote.server.require-cookie' is turned on but no secure cookie is defined in 'akka.remote.secure-cookie'.") "Configuration option 'akka.remote.server.require-cookie' is turned on but no secure cookie is defined in 'akka.remote.secure-cookie'.")
requireCookie requireCookie
} }
val UNTRUSTED_MODE = config.getBool("akka.remote.server.untrusted-mode", false) val UNTRUSTED_MODE = config.getBool("akka.remote.server.untrusted-mode", false)
val HOSTNAME = config.getString("akka.remote.server.hostname", "localhost") val HOSTNAME = config.getString("akka.remote.server.hostname", "localhost")
val PORT = config.getInt("akka.remote.server.port", 2552) val PORT = config.getInt("akka.remote.server.port", 2552)
@ -312,7 +312,7 @@ class RemoteServer extends Logging with ListenerManagement {
def unregister(id: String):Unit = synchronized { def unregister(id: String):Unit = synchronized {
if (_isRunning) { if (_isRunning) {
log.info("Unregistering server side remote actor with id [%s]", id) log.info("Unregistering server side remote actor with id [%s]", id)
if (id.startsWith(UUID_PREFIX)) actorsByUuid.remove(id.substring(UUID_PREFIX.length)) if (id.startsWith(UUID_PREFIX)) actorsByUuid.remove(id.substring(UUID_PREFIX.length))
else { else {
val actorRef = actors get id val actorRef = actors get id
actorsByUuid.remove(actorRef.uuid, actorRef) actorsByUuid.remove(actorRef.uuid, actorRef)
@ -477,11 +477,11 @@ class RemoteServerHandler(
val actorInfo = request.getActorInfo val actorInfo = request.getActorInfo
log.debug("Dispatching to remote actor [%s:%s]", actorInfo.getTarget, actorInfo.getUuid) log.debug("Dispatching to remote actor [%s:%s]", actorInfo.getTarget, actorInfo.getUuid)
val actorRef = val actorRef =
try { try {
createActor(actorInfo).start createActor(actorInfo).start
} catch { } catch {
case e: SecurityException => case e: SecurityException =>
channel.write(createErrorReplyMessage(e, request, true)) channel.write(createErrorReplyMessage(e, request, true))
server.notifyListeners(RemoteServerError(e, server)) server.notifyListeners(RemoteServerError(e, server))
return return
@ -493,10 +493,10 @@ class RemoteServerHandler(
else None else None
message match { // first match on system messages message match { // first match on system messages
case RemoteActorSystemMessage.Stop => case RemoteActorSystemMessage.Stop =>
if (RemoteServer.UNTRUSTED_MODE) throw new SecurityException("Remote server is operating is untrusted mode, can not stop the actor") if (RemoteServer.UNTRUSTED_MODE) throw new SecurityException("Remote server is operating is untrusted mode, can not stop the actor")
else actorRef.stop else actorRef.stop
case _: LifeCycleMessage if (RemoteServer.UNTRUSTED_MODE) => case _: LifeCycleMessage if (RemoteServer.UNTRUSTED_MODE) =>
throw new SecurityException("Remote server is operating is untrusted mode, can not pass on a LifeCycleMessage to the remote actor") throw new SecurityException("Remote server is operating is untrusted mode, can not pass on a LifeCycleMessage to the remote actor")
case _ => // then match on user defined messages case _ => // then match on user defined messages
@ -613,7 +613,7 @@ class RemoteServerHandler(
val timeout = actorInfo.getTimeout val timeout = actorInfo.getTimeout
val actorRefOrNull = findActorByIdOrUuid(id, uuidFrom(uuid.getHigh,uuid.getLow).toString) val actorRefOrNull = findActorByIdOrUuid(id, uuidFrom(uuid.getHigh,uuid.getLow).toString)
if (actorRefOrNull eq null) { if (actorRefOrNull eq null) {
try { try {
if (RemoteServer.UNTRUSTED_MODE) throw new SecurityException( if (RemoteServer.UNTRUSTED_MODE) throw new SecurityException(
@ -687,10 +687,10 @@ class RemoteServerHandler(
private def authenticateRemoteClient(request: RemoteRequestProtocol, ctx: ChannelHandlerContext) = { private def authenticateRemoteClient(request: RemoteRequestProtocol, ctx: ChannelHandlerContext) = {
val attachment = ctx.getAttachment val attachment = ctx.getAttachment
if ((attachment ne null) && if ((attachment ne null) &&
attachment.isInstanceOf[String] && attachment.isInstanceOf[String] &&
attachment.asInstanceOf[String] == CHANNEL_INIT) { // is first time around, channel initialization attachment.asInstanceOf[String] == CHANNEL_INIT) { // is first time around, channel initialization
ctx.setAttachment(null) ctx.setAttachment(null)
val clientAddress = ctx.getChannel.getRemoteAddress.toString val clientAddress = ctx.getChannel.getRemoteAddress.toString
if (!request.hasCookie) throw new SecurityException( if (!request.hasCookie) throw new SecurityException(
"The remote client [" + clientAddress + "] does not have a secure cookie.") "The remote client [" + clientAddress + "] does not have a secure cookie.")

View file

@ -93,7 +93,7 @@ object Serializable {
* case class Address(street: String, city: String, zip: String) * case class Address(street: String, city: String, zip: String)
* extends ScalaJSON[Address] { * extends ScalaJSON[Address] {
* *
* implicit val AddressFormat: Format[Address] = * implicit val AddressFormat: Format[Address] =
* asProduct3("street", "city", "zip")(Address)(Address.unapply(_).get) * asProduct3("street", "city", "zip")(Address)(Address.unapply(_).get)
* *
* import dispatch.json._ * import dispatch.json._
@ -113,9 +113,9 @@ object Serializable {
* @author <a href="http://jonasboner.com">Jonas Bon&#233;r</a> * @author <a href="http://jonasboner.com">Jonas Bon&#233;r</a>
*/ */
trait ScalaJSON[T] extends JSON { trait ScalaJSON[T] extends JSON {
def toJSON: String def toJSON: String
def fromJSON(js: String): T def fromJSON(js: String): T
def toBytes: Array[Byte] def toBytes: Array[Byte]
def fromBytes(bytes: Array[Byte]): T def fromBytes(bytes: Array[Byte]): T
} }
} }

View file

@ -116,7 +116,7 @@ object ActorSerialization {
if (serializeMailBox == true) { if (serializeMailBox == true) {
val messages = val messages =
actorRef.mailbox match { actorRef.mailbox match {
case q: java.util.Queue[MessageInvocation] => case q: java.util.Queue[MessageInvocation] =>
val l = new scala.collection.mutable.ListBuffer[MessageInvocation] val l = new scala.collection.mutable.ListBuffer[MessageInvocation]
@ -125,7 +125,7 @@ object ActorSerialization {
l l
} }
val requestProtocols = val requestProtocols =
messages.map(m => messages.map(m =>
RemoteActorSerialization.createRemoteRequestProtocolBuilder( RemoteActorSerialization.createRemoteRequestProtocolBuilder(
actorRef, actorRef,
@ -172,7 +172,7 @@ object ActorSerialization {
if (protocol.hasSupervisor) Some(RemoteActorSerialization.fromProtobufToRemoteActorRef(protocol.getSupervisor, loader)) if (protocol.hasSupervisor) Some(RemoteActorSerialization.fromProtobufToRemoteActorRef(protocol.getSupervisor, loader))
else None else None
val hotswap = val hotswap =
if (serializer.isDefined && protocol.hasHotswapStack) serializer.get if (serializer.isDefined && protocol.hasHotswapStack) serializer.get
.fromBinary(protocol.getHotswapStack.toByteArray, Some(classOf[Stack[PartialFunction[Any, Unit]]])) .fromBinary(protocol.getHotswapStack.toByteArray, Some(classOf[Stack[PartialFunction[Any, Unit]]]))
.asInstanceOf[Stack[PartialFunction[Any, Unit]]] .asInstanceOf[Stack[PartialFunction[Any, Unit]]]

View file

@ -15,47 +15,47 @@ public final class ProtobufProtocol {
initFields(); initFields();
} }
private ProtobufPOJO(boolean noInit) {} private ProtobufPOJO(boolean noInit) {}
private static final ProtobufPOJO defaultInstance; private static final ProtobufPOJO defaultInstance;
public static ProtobufPOJO getDefaultInstance() { public static ProtobufPOJO getDefaultInstance() {
return defaultInstance; return defaultInstance;
} }
public ProtobufPOJO getDefaultInstanceForType() { public ProtobufPOJO getDefaultInstanceForType() {
return defaultInstance; return defaultInstance;
} }
public static final com.google.protobuf.Descriptors.Descriptor public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() { getDescriptor() {
return akka.actor.ProtobufProtocol.internal_static_akka_actor_ProtobufPOJO_descriptor; return akka.actor.ProtobufProtocol.internal_static_akka_actor_ProtobufPOJO_descriptor;
} }
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() { internalGetFieldAccessorTable() {
return akka.actor.ProtobufProtocol.internal_static_akka_actor_ProtobufPOJO_fieldAccessorTable; return akka.actor.ProtobufProtocol.internal_static_akka_actor_ProtobufPOJO_fieldAccessorTable;
} }
// required uint64 id = 1; // required uint64 id = 1;
public static final int ID_FIELD_NUMBER = 1; public static final int ID_FIELD_NUMBER = 1;
private boolean hasId; private boolean hasId;
private long id_ = 0L; private long id_ = 0L;
public boolean hasId() { return hasId; } public boolean hasId() { return hasId; }
public long getId() { return id_; } public long getId() { return id_; }
// required string name = 2; // required string name = 2;
public static final int NAME_FIELD_NUMBER = 2; public static final int NAME_FIELD_NUMBER = 2;
private boolean hasName; private boolean hasName;
private java.lang.String name_ = ""; private java.lang.String name_ = "";
public boolean hasName() { return hasName; } public boolean hasName() { return hasName; }
public java.lang.String getName() { return name_; } public java.lang.String getName() { return name_; }
// required bool status = 3; // required bool status = 3;
public static final int STATUS_FIELD_NUMBER = 3; public static final int STATUS_FIELD_NUMBER = 3;
private boolean hasStatus; private boolean hasStatus;
private boolean status_ = false; private boolean status_ = false;
public boolean hasStatus() { return hasStatus; } public boolean hasStatus() { return hasStatus; }
public boolean getStatus() { return status_; } public boolean getStatus() { return status_; }
private void initFields() { private void initFields() {
} }
public final boolean isInitialized() { public final boolean isInitialized() {
@ -64,7 +64,7 @@ public final class ProtobufProtocol {
if (!hasStatus) return false; if (!hasStatus) return false;
return true; return true;
} }
public void writeTo(com.google.protobuf.CodedOutputStream output) public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException { throws java.io.IOException {
getSerializedSize(); getSerializedSize();
@ -79,12 +79,12 @@ public final class ProtobufProtocol {
} }
getUnknownFields().writeTo(output); getUnknownFields().writeTo(output);
} }
private int memoizedSerializedSize = -1; private int memoizedSerializedSize = -1;
public int getSerializedSize() { public int getSerializedSize() {
int size = memoizedSerializedSize; int size = memoizedSerializedSize;
if (size != -1) return size; if (size != -1) return size;
size = 0; size = 0;
if (hasId()) { if (hasId()) {
size += com.google.protobuf.CodedOutputStream size += com.google.protobuf.CodedOutputStream
@ -102,7 +102,7 @@ public final class ProtobufProtocol {
memoizedSerializedSize = size; memoizedSerializedSize = size;
return size; return size;
} }
public static akka.actor.ProtobufProtocol.ProtobufPOJO parseFrom( public static akka.actor.ProtobufProtocol.ProtobufPOJO parseFrom(
com.google.protobuf.ByteString data) com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException { throws com.google.protobuf.InvalidProtocolBufferException {
@ -169,31 +169,31 @@ public final class ProtobufProtocol {
return newBuilder().mergeFrom(input, extensionRegistry) return newBuilder().mergeFrom(input, extensionRegistry)
.buildParsed(); .buildParsed();
} }
public static Builder newBuilder() { return Builder.create(); } public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); } public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(akka.actor.ProtobufProtocol.ProtobufPOJO prototype) { public static Builder newBuilder(akka.actor.ProtobufProtocol.ProtobufPOJO prototype) {
return newBuilder().mergeFrom(prototype); return newBuilder().mergeFrom(prototype);
} }
public Builder toBuilder() { return newBuilder(this); } public Builder toBuilder() { return newBuilder(this); }
public static final class Builder extends public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder> { com.google.protobuf.GeneratedMessage.Builder<Builder> {
private akka.actor.ProtobufProtocol.ProtobufPOJO result; private akka.actor.ProtobufProtocol.ProtobufPOJO result;
// Construct using akka.actor.ProtobufProtocol.ProtobufPOJO.newBuilder() // Construct using akka.actor.ProtobufProtocol.ProtobufPOJO.newBuilder()
private Builder() {} private Builder() {}
private static Builder create() { private static Builder create() {
Builder builder = new Builder(); Builder builder = new Builder();
builder.result = new akka.actor.ProtobufProtocol.ProtobufPOJO(); builder.result = new akka.actor.ProtobufProtocol.ProtobufPOJO();
return builder; return builder;
} }
protected akka.actor.ProtobufProtocol.ProtobufPOJO internalGetResult() { protected akka.actor.ProtobufProtocol.ProtobufPOJO internalGetResult() {
return result; return result;
} }
public Builder clear() { public Builder clear() {
if (result == null) { if (result == null) {
throw new IllegalStateException( throw new IllegalStateException(
@ -202,20 +202,20 @@ public final class ProtobufProtocol {
result = new akka.actor.ProtobufProtocol.ProtobufPOJO(); result = new akka.actor.ProtobufProtocol.ProtobufPOJO();
return this; return this;
} }
public Builder clone() { public Builder clone() {
return create().mergeFrom(result); return create().mergeFrom(result);
} }
public com.google.protobuf.Descriptors.Descriptor public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() { getDescriptorForType() {
return akka.actor.ProtobufProtocol.ProtobufPOJO.getDescriptor(); return akka.actor.ProtobufProtocol.ProtobufPOJO.getDescriptor();
} }
public akka.actor.ProtobufProtocol.ProtobufPOJO getDefaultInstanceForType() { public akka.actor.ProtobufProtocol.ProtobufPOJO getDefaultInstanceForType() {
return akka.actor.ProtobufProtocol.ProtobufPOJO.getDefaultInstance(); return akka.actor.ProtobufProtocol.ProtobufPOJO.getDefaultInstance();
} }
public boolean isInitialized() { public boolean isInitialized() {
return result.isInitialized(); return result.isInitialized();
} }
@ -225,7 +225,7 @@ public final class ProtobufProtocol {
} }
return buildPartial(); return buildPartial();
} }
private akka.actor.ProtobufProtocol.ProtobufPOJO buildParsed() private akka.actor.ProtobufProtocol.ProtobufPOJO buildParsed()
throws com.google.protobuf.InvalidProtocolBufferException { throws com.google.protobuf.InvalidProtocolBufferException {
if (!isInitialized()) { if (!isInitialized()) {
@ -234,7 +234,7 @@ public final class ProtobufProtocol {
} }
return buildPartial(); return buildPartial();
} }
public akka.actor.ProtobufProtocol.ProtobufPOJO buildPartial() { public akka.actor.ProtobufProtocol.ProtobufPOJO buildPartial() {
if (result == null) { if (result == null) {
throw new IllegalStateException( throw new IllegalStateException(
@ -244,7 +244,7 @@ public final class ProtobufProtocol {
result = null; result = null;
return returnMe; return returnMe;
} }
public Builder mergeFrom(com.google.protobuf.Message other) { public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof akka.actor.ProtobufProtocol.ProtobufPOJO) { if (other instanceof akka.actor.ProtobufProtocol.ProtobufPOJO) {
return mergeFrom((akka.actor.ProtobufProtocol.ProtobufPOJO)other); return mergeFrom((akka.actor.ProtobufProtocol.ProtobufPOJO)other);
@ -253,7 +253,7 @@ public final class ProtobufProtocol {
return this; return this;
} }
} }
public Builder mergeFrom(akka.actor.ProtobufProtocol.ProtobufPOJO other) { public Builder mergeFrom(akka.actor.ProtobufProtocol.ProtobufPOJO other) {
if (other == akka.actor.ProtobufProtocol.ProtobufPOJO.getDefaultInstance()) return this; if (other == akka.actor.ProtobufProtocol.ProtobufPOJO.getDefaultInstance()) return this;
if (other.hasId()) { if (other.hasId()) {
@ -268,7 +268,7 @@ public final class ProtobufProtocol {
this.mergeUnknownFields(other.getUnknownFields()); this.mergeUnknownFields(other.getUnknownFields());
return this; return this;
} }
public Builder mergeFrom( public Builder mergeFrom(
com.google.protobuf.CodedInputStream input, com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry) com.google.protobuf.ExtensionRegistryLite extensionRegistry)
@ -305,8 +305,8 @@ public final class ProtobufProtocol {
} }
} }
} }
// required uint64 id = 1; // required uint64 id = 1;
public boolean hasId() { public boolean hasId() {
return result.hasId(); return result.hasId();
@ -324,7 +324,7 @@ public final class ProtobufProtocol {
result.id_ = 0L; result.id_ = 0L;
return this; return this;
} }
// required string name = 2; // required string name = 2;
public boolean hasName() { public boolean hasName() {
return result.hasName(); return result.hasName();
@ -345,7 +345,7 @@ public final class ProtobufProtocol {
result.name_ = getDefaultInstance().getName(); result.name_ = getDefaultInstance().getName();
return this; return this;
} }
// required bool status = 3; // required bool status = 3;
public boolean hasStatus() { public boolean hasStatus() {
return result.hasStatus(); return result.hasStatus();
@ -363,19 +363,19 @@ public final class ProtobufProtocol {
result.status_ = false; result.status_ = false;
return this; return this;
} }
// @@protoc_insertion_point(builder_scope:akka.actor.ProtobufPOJO) // @@protoc_insertion_point(builder_scope:akka.actor.ProtobufPOJO)
} }
static { static {
defaultInstance = new ProtobufPOJO(true); defaultInstance = new ProtobufPOJO(true);
akka.actor.ProtobufProtocol.internalForceInit(); akka.actor.ProtobufProtocol.internalForceInit();
defaultInstance.initFields(); defaultInstance.initFields();
} }
// @@protoc_insertion_point(class_scope:akka.actor.ProtobufPOJO) // @@protoc_insertion_point(class_scope:akka.actor.ProtobufPOJO)
} }
public static final class Counter extends public static final class Counter extends
com.google.protobuf.GeneratedMessage { com.google.protobuf.GeneratedMessage {
// Use Counter.newBuilder() to construct. // Use Counter.newBuilder() to construct.
@ -383,40 +383,40 @@ public final class ProtobufProtocol {
initFields(); initFields();
} }
private Counter(boolean noInit) {} private Counter(boolean noInit) {}
private static final Counter defaultInstance; private static final Counter defaultInstance;
public static Counter getDefaultInstance() { public static Counter getDefaultInstance() {
return defaultInstance; return defaultInstance;
} }
public Counter getDefaultInstanceForType() { public Counter getDefaultInstanceForType() {
return defaultInstance; return defaultInstance;
} }
public static final com.google.protobuf.Descriptors.Descriptor public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() { getDescriptor() {
return akka.actor.ProtobufProtocol.internal_static_akka_actor_Counter_descriptor; return akka.actor.ProtobufProtocol.internal_static_akka_actor_Counter_descriptor;
} }
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() { internalGetFieldAccessorTable() {
return akka.actor.ProtobufProtocol.internal_static_akka_actor_Counter_fieldAccessorTable; return akka.actor.ProtobufProtocol.internal_static_akka_actor_Counter_fieldAccessorTable;
} }
// required uint32 count = 1; // required uint32 count = 1;
public static final int COUNT_FIELD_NUMBER = 1; public static final int COUNT_FIELD_NUMBER = 1;
private boolean hasCount; private boolean hasCount;
private int count_ = 0; private int count_ = 0;
public boolean hasCount() { return hasCount; } public boolean hasCount() { return hasCount; }
public int getCount() { return count_; } public int getCount() { return count_; }
private void initFields() { private void initFields() {
} }
public final boolean isInitialized() { public final boolean isInitialized() {
if (!hasCount) return false; if (!hasCount) return false;
return true; return true;
} }
public void writeTo(com.google.protobuf.CodedOutputStream output) public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException { throws java.io.IOException {
getSerializedSize(); getSerializedSize();
@ -425,12 +425,12 @@ public final class ProtobufProtocol {
} }
getUnknownFields().writeTo(output); getUnknownFields().writeTo(output);
} }
private int memoizedSerializedSize = -1; private int memoizedSerializedSize = -1;
public int getSerializedSize() { public int getSerializedSize() {
int size = memoizedSerializedSize; int size = memoizedSerializedSize;
if (size != -1) return size; if (size != -1) return size;
size = 0; size = 0;
if (hasCount()) { if (hasCount()) {
size += com.google.protobuf.CodedOutputStream size += com.google.protobuf.CodedOutputStream
@ -440,7 +440,7 @@ public final class ProtobufProtocol {
memoizedSerializedSize = size; memoizedSerializedSize = size;
return size; return size;
} }
public static akka.actor.ProtobufProtocol.Counter parseFrom( public static akka.actor.ProtobufProtocol.Counter parseFrom(
com.google.protobuf.ByteString data) com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException { throws com.google.protobuf.InvalidProtocolBufferException {
@ -507,31 +507,31 @@ public final class ProtobufProtocol {
return newBuilder().mergeFrom(input, extensionRegistry) return newBuilder().mergeFrom(input, extensionRegistry)
.buildParsed(); .buildParsed();
} }
public static Builder newBuilder() { return Builder.create(); } public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); } public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(akka.actor.ProtobufProtocol.Counter prototype) { public static Builder newBuilder(akka.actor.ProtobufProtocol.Counter prototype) {
return newBuilder().mergeFrom(prototype); return newBuilder().mergeFrom(prototype);
} }
public Builder toBuilder() { return newBuilder(this); } public Builder toBuilder() { return newBuilder(this); }
public static final class Builder extends public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder> { com.google.protobuf.GeneratedMessage.Builder<Builder> {
private akka.actor.ProtobufProtocol.Counter result; private akka.actor.ProtobufProtocol.Counter result;
// Construct using akka.actor.ProtobufProtocol.Counter.newBuilder() // Construct using akka.actor.ProtobufProtocol.Counter.newBuilder()
private Builder() {} private Builder() {}
private static Builder create() { private static Builder create() {
Builder builder = new Builder(); Builder builder = new Builder();
builder.result = new akka.actor.ProtobufProtocol.Counter(); builder.result = new akka.actor.ProtobufProtocol.Counter();
return builder; return builder;
} }
protected akka.actor.ProtobufProtocol.Counter internalGetResult() { protected akka.actor.ProtobufProtocol.Counter internalGetResult() {
return result; return result;
} }
public Builder clear() { public Builder clear() {
if (result == null) { if (result == null) {
throw new IllegalStateException( throw new IllegalStateException(
@ -540,20 +540,20 @@ public final class ProtobufProtocol {
result = new akka.actor.ProtobufProtocol.Counter(); result = new akka.actor.ProtobufProtocol.Counter();
return this; return this;
} }
public Builder clone() { public Builder clone() {
return create().mergeFrom(result); return create().mergeFrom(result);
} }
public com.google.protobuf.Descriptors.Descriptor public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() { getDescriptorForType() {
return akka.actor.ProtobufProtocol.Counter.getDescriptor(); return akka.actor.ProtobufProtocol.Counter.getDescriptor();
} }
public akka.actor.ProtobufProtocol.Counter getDefaultInstanceForType() { public akka.actor.ProtobufProtocol.Counter getDefaultInstanceForType() {
return akka.actor.ProtobufProtocol.Counter.getDefaultInstance(); return akka.actor.ProtobufProtocol.Counter.getDefaultInstance();
} }
public boolean isInitialized() { public boolean isInitialized() {
return result.isInitialized(); return result.isInitialized();
} }
@ -563,7 +563,7 @@ public final class ProtobufProtocol {
} }
return buildPartial(); return buildPartial();
} }
private akka.actor.ProtobufProtocol.Counter buildParsed() private akka.actor.ProtobufProtocol.Counter buildParsed()
throws com.google.protobuf.InvalidProtocolBufferException { throws com.google.protobuf.InvalidProtocolBufferException {
if (!isInitialized()) { if (!isInitialized()) {
@ -572,7 +572,7 @@ public final class ProtobufProtocol {
} }
return buildPartial(); return buildPartial();
} }
public akka.actor.ProtobufProtocol.Counter buildPartial() { public akka.actor.ProtobufProtocol.Counter buildPartial() {
if (result == null) { if (result == null) {
throw new IllegalStateException( throw new IllegalStateException(
@ -582,7 +582,7 @@ public final class ProtobufProtocol {
result = null; result = null;
return returnMe; return returnMe;
} }
public Builder mergeFrom(com.google.protobuf.Message other) { public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof akka.actor.ProtobufProtocol.Counter) { if (other instanceof akka.actor.ProtobufProtocol.Counter) {
return mergeFrom((akka.actor.ProtobufProtocol.Counter)other); return mergeFrom((akka.actor.ProtobufProtocol.Counter)other);
@ -591,7 +591,7 @@ public final class ProtobufProtocol {
return this; return this;
} }
} }
public Builder mergeFrom(akka.actor.ProtobufProtocol.Counter other) { public Builder mergeFrom(akka.actor.ProtobufProtocol.Counter other) {
if (other == akka.actor.ProtobufProtocol.Counter.getDefaultInstance()) return this; if (other == akka.actor.ProtobufProtocol.Counter.getDefaultInstance()) return this;
if (other.hasCount()) { if (other.hasCount()) {
@ -600,7 +600,7 @@ public final class ProtobufProtocol {
this.mergeUnknownFields(other.getUnknownFields()); this.mergeUnknownFields(other.getUnknownFields());
return this; return this;
} }
public Builder mergeFrom( public Builder mergeFrom(
com.google.protobuf.CodedInputStream input, com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry) com.google.protobuf.ExtensionRegistryLite extensionRegistry)
@ -629,8 +629,8 @@ public final class ProtobufProtocol {
} }
} }
} }
// required uint32 count = 1; // required uint32 count = 1;
public boolean hasCount() { public boolean hasCount() {
return result.hasCount(); return result.hasCount();
@ -648,19 +648,19 @@ public final class ProtobufProtocol {
result.count_ = 0; result.count_ = 0;
return this; return this;
} }
// @@protoc_insertion_point(builder_scope:akka.actor.Counter) // @@protoc_insertion_point(builder_scope:akka.actor.Counter)
} }
static { static {
defaultInstance = new Counter(true); defaultInstance = new Counter(true);
akka.actor.ProtobufProtocol.internalForceInit(); akka.actor.ProtobufProtocol.internalForceInit();
defaultInstance.initFields(); defaultInstance.initFields();
} }
// @@protoc_insertion_point(class_scope:akka.actor.Counter) // @@protoc_insertion_point(class_scope:akka.actor.Counter)
} }
public static final class DualCounter extends public static final class DualCounter extends
com.google.protobuf.GeneratedMessage { com.google.protobuf.GeneratedMessage {
// Use DualCounter.newBuilder() to construct. // Use DualCounter.newBuilder() to construct.
@ -668,40 +668,40 @@ public final class ProtobufProtocol {
initFields(); initFields();
} }
private DualCounter(boolean noInit) {} private DualCounter(boolean noInit) {}
private static final DualCounter defaultInstance; private static final DualCounter defaultInstance;
public static DualCounter getDefaultInstance() { public static DualCounter getDefaultInstance() {
return defaultInstance; return defaultInstance;
} }
public DualCounter getDefaultInstanceForType() { public DualCounter getDefaultInstanceForType() {
return defaultInstance; return defaultInstance;
} }
public static final com.google.protobuf.Descriptors.Descriptor public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() { getDescriptor() {
return akka.actor.ProtobufProtocol.internal_static_akka_actor_DualCounter_descriptor; return akka.actor.ProtobufProtocol.internal_static_akka_actor_DualCounter_descriptor;
} }
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() { internalGetFieldAccessorTable() {
return akka.actor.ProtobufProtocol.internal_static_akka_actor_DualCounter_fieldAccessorTable; return akka.actor.ProtobufProtocol.internal_static_akka_actor_DualCounter_fieldAccessorTable;
} }
// required uint32 count1 = 1; // required uint32 count1 = 1;
public static final int COUNT1_FIELD_NUMBER = 1; public static final int COUNT1_FIELD_NUMBER = 1;
private boolean hasCount1; private boolean hasCount1;
private int count1_ = 0; private int count1_ = 0;
public boolean hasCount1() { return hasCount1; } public boolean hasCount1() { return hasCount1; }
public int getCount1() { return count1_; } public int getCount1() { return count1_; }
// required uint32 count2 = 2; // required uint32 count2 = 2;
public static final int COUNT2_FIELD_NUMBER = 2; public static final int COUNT2_FIELD_NUMBER = 2;
private boolean hasCount2; private boolean hasCount2;
private int count2_ = 0; private int count2_ = 0;
public boolean hasCount2() { return hasCount2; } public boolean hasCount2() { return hasCount2; }
public int getCount2() { return count2_; } public int getCount2() { return count2_; }
private void initFields() { private void initFields() {
} }
public final boolean isInitialized() { public final boolean isInitialized() {
@ -709,7 +709,7 @@ public final class ProtobufProtocol {
if (!hasCount2) return false; if (!hasCount2) return false;
return true; return true;
} }
public void writeTo(com.google.protobuf.CodedOutputStream output) public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException { throws java.io.IOException {
getSerializedSize(); getSerializedSize();
@ -721,12 +721,12 @@ public final class ProtobufProtocol {
} }
getUnknownFields().writeTo(output); getUnknownFields().writeTo(output);
} }
private int memoizedSerializedSize = -1; private int memoizedSerializedSize = -1;
public int getSerializedSize() { public int getSerializedSize() {
int size = memoizedSerializedSize; int size = memoizedSerializedSize;
if (size != -1) return size; if (size != -1) return size;
size = 0; size = 0;
if (hasCount1()) { if (hasCount1()) {
size += com.google.protobuf.CodedOutputStream size += com.google.protobuf.CodedOutputStream
@ -740,7 +740,7 @@ public final class ProtobufProtocol {
memoizedSerializedSize = size; memoizedSerializedSize = size;
return size; return size;
} }
public static akka.actor.ProtobufProtocol.DualCounter parseFrom( public static akka.actor.ProtobufProtocol.DualCounter parseFrom(
com.google.protobuf.ByteString data) com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException { throws com.google.protobuf.InvalidProtocolBufferException {
@ -807,31 +807,31 @@ public final class ProtobufProtocol {
return newBuilder().mergeFrom(input, extensionRegistry) return newBuilder().mergeFrom(input, extensionRegistry)
.buildParsed(); .buildParsed();
} }
public static Builder newBuilder() { return Builder.create(); } public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); } public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(akka.actor.ProtobufProtocol.DualCounter prototype) { public static Builder newBuilder(akka.actor.ProtobufProtocol.DualCounter prototype) {
return newBuilder().mergeFrom(prototype); return newBuilder().mergeFrom(prototype);
} }
public Builder toBuilder() { return newBuilder(this); } public Builder toBuilder() { return newBuilder(this); }
public static final class Builder extends public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder> { com.google.protobuf.GeneratedMessage.Builder<Builder> {
private akka.actor.ProtobufProtocol.DualCounter result; private akka.actor.ProtobufProtocol.DualCounter result;
// Construct using akka.actor.ProtobufProtocol.DualCounter.newBuilder() // Construct using akka.actor.ProtobufProtocol.DualCounter.newBuilder()
private Builder() {} private Builder() {}
private static Builder create() { private static Builder create() {
Builder builder = new Builder(); Builder builder = new Builder();
builder.result = new akka.actor.ProtobufProtocol.DualCounter(); builder.result = new akka.actor.ProtobufProtocol.DualCounter();
return builder; return builder;
} }
protected akka.actor.ProtobufProtocol.DualCounter internalGetResult() { protected akka.actor.ProtobufProtocol.DualCounter internalGetResult() {
return result; return result;
} }
public Builder clear() { public Builder clear() {
if (result == null) { if (result == null) {
throw new IllegalStateException( throw new IllegalStateException(
@ -840,20 +840,20 @@ public final class ProtobufProtocol {
result = new akka.actor.ProtobufProtocol.DualCounter(); result = new akka.actor.ProtobufProtocol.DualCounter();
return this; return this;
} }
public Builder clone() { public Builder clone() {
return create().mergeFrom(result); return create().mergeFrom(result);
} }
public com.google.protobuf.Descriptors.Descriptor public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() { getDescriptorForType() {
return akka.actor.ProtobufProtocol.DualCounter.getDescriptor(); return akka.actor.ProtobufProtocol.DualCounter.getDescriptor();
} }
public akka.actor.ProtobufProtocol.DualCounter getDefaultInstanceForType() { public akka.actor.ProtobufProtocol.DualCounter getDefaultInstanceForType() {
return akka.actor.ProtobufProtocol.DualCounter.getDefaultInstance(); return akka.actor.ProtobufProtocol.DualCounter.getDefaultInstance();
} }
public boolean isInitialized() { public boolean isInitialized() {
return result.isInitialized(); return result.isInitialized();
} }
@ -863,7 +863,7 @@ public final class ProtobufProtocol {
} }
return buildPartial(); return buildPartial();
} }
private akka.actor.ProtobufProtocol.DualCounter buildParsed() private akka.actor.ProtobufProtocol.DualCounter buildParsed()
throws com.google.protobuf.InvalidProtocolBufferException { throws com.google.protobuf.InvalidProtocolBufferException {
if (!isInitialized()) { if (!isInitialized()) {
@ -872,7 +872,7 @@ public final class ProtobufProtocol {
} }
return buildPartial(); return buildPartial();
} }
public akka.actor.ProtobufProtocol.DualCounter buildPartial() { public akka.actor.ProtobufProtocol.DualCounter buildPartial() {
if (result == null) { if (result == null) {
throw new IllegalStateException( throw new IllegalStateException(
@ -882,7 +882,7 @@ public final class ProtobufProtocol {
result = null; result = null;
return returnMe; return returnMe;
} }
public Builder mergeFrom(com.google.protobuf.Message other) { public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof akka.actor.ProtobufProtocol.DualCounter) { if (other instanceof akka.actor.ProtobufProtocol.DualCounter) {
return mergeFrom((akka.actor.ProtobufProtocol.DualCounter)other); return mergeFrom((akka.actor.ProtobufProtocol.DualCounter)other);
@ -891,7 +891,7 @@ public final class ProtobufProtocol {
return this; return this;
} }
} }
public Builder mergeFrom(akka.actor.ProtobufProtocol.DualCounter other) { public Builder mergeFrom(akka.actor.ProtobufProtocol.DualCounter other) {
if (other == akka.actor.ProtobufProtocol.DualCounter.getDefaultInstance()) return this; if (other == akka.actor.ProtobufProtocol.DualCounter.getDefaultInstance()) return this;
if (other.hasCount1()) { if (other.hasCount1()) {
@ -903,7 +903,7 @@ public final class ProtobufProtocol {
this.mergeUnknownFields(other.getUnknownFields()); this.mergeUnknownFields(other.getUnknownFields());
return this; return this;
} }
public Builder mergeFrom( public Builder mergeFrom(
com.google.protobuf.CodedInputStream input, com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry) com.google.protobuf.ExtensionRegistryLite extensionRegistry)
@ -936,8 +936,8 @@ public final class ProtobufProtocol {
} }
} }
} }
// required uint32 count1 = 1; // required uint32 count1 = 1;
public boolean hasCount1() { public boolean hasCount1() {
return result.hasCount1(); return result.hasCount1();
@ -955,7 +955,7 @@ public final class ProtobufProtocol {
result.count1_ = 0; result.count1_ = 0;
return this; return this;
} }
// required uint32 count2 = 2; // required uint32 count2 = 2;
public boolean hasCount2() { public boolean hasCount2() {
return result.hasCount2(); return result.hasCount2();
@ -973,19 +973,19 @@ public final class ProtobufProtocol {
result.count2_ = 0; result.count2_ = 0;
return this; return this;
} }
// @@protoc_insertion_point(builder_scope:akka.actor.DualCounter) // @@protoc_insertion_point(builder_scope:akka.actor.DualCounter)
} }
static { static {
defaultInstance = new DualCounter(true); defaultInstance = new DualCounter(true);
akka.actor.ProtobufProtocol.internalForceInit(); akka.actor.ProtobufProtocol.internalForceInit();
defaultInstance.initFields(); defaultInstance.initFields();
} }
// @@protoc_insertion_point(class_scope:akka.actor.DualCounter) // @@protoc_insertion_point(class_scope:akka.actor.DualCounter)
} }
private static com.google.protobuf.Descriptors.Descriptor private static com.google.protobuf.Descriptors.Descriptor
internal_static_akka_actor_ProtobufPOJO_descriptor; internal_static_akka_actor_ProtobufPOJO_descriptor;
private static private static
@ -1001,7 +1001,7 @@ public final class ProtobufProtocol {
private static private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_akka_actor_DualCounter_fieldAccessorTable; internal_static_akka_actor_DualCounter_fieldAccessorTable;
public static com.google.protobuf.Descriptors.FileDescriptor public static com.google.protobuf.Descriptors.FileDescriptor
getDescriptor() { getDescriptor() {
return descriptor; return descriptor;
@ -1053,8 +1053,8 @@ public final class ProtobufProtocol {
new com.google.protobuf.Descriptors.FileDescriptor[] { new com.google.protobuf.Descriptors.FileDescriptor[] {
}, assigner); }, assigner);
} }
public static void internalForceInit() {} public static void internalForceInit() {}
// @@protoc_insertion_point(outer_class_scope) // @@protoc_insertion_point(outer_class_scope)
} }

View file

@ -61,7 +61,7 @@ object ClientInitiatedRemoteActorSpec {
var prefix = "default-" var prefix = "default-"
var count = 0 var count = 0
def receive = { def receive = {
case "incrPrefix" => count += 1; prefix = "" + count + "-" case "incrPrefix" => count += 1; prefix = "" + count + "-"
case msg: String => self.reply(prefix + msg) case msg: String => self.reply(prefix + msg)
} }
} }

View file

@ -34,4 +34,4 @@ class RemoteAgentSpec extends JUnitSuite {
assert(a() == 20, "Remote agent should be updated properly") assert(a() == 20, "Remote agent should be updated properly")
a.close a.close
} }
} }

View file

@ -194,7 +194,7 @@ class ServerInitiatedRemoteActorSpec extends JUnitSuite {
ref1.stop ref1.stop
ref2 ! "OneWay" ref2 ! "OneWay"
ref2.stop ref2.stop
} }
@Test @Test

View file

@ -14,7 +14,7 @@ object Serializables {
case class Shop(store: String, item: String, price: Int) extends case class Shop(store: String, item: String, price: Int) extends
ScalaJSON[Shop] { ScalaJSON[Shop] {
implicit val ShopFormat: sjson.json.Format[Shop] = implicit val ShopFormat: sjson.json.Format[Shop] =
asProduct3("store", "item", "price")(Shop)(Shop.unapply(_).get) asProduct3("store", "item", "price")(Shop)(Shop.unapply(_).get)
def toJSON: String = JsValue.toJson(tojson(this)) def toJSON: String = JsValue.toJson(tojson(this))
@ -27,7 +27,7 @@ object Serializables {
implicit val MyMessageFormat: sjson.json.Format[MyMessage] = implicit val MyMessageFormat: sjson.json.Format[MyMessage] =
asProduct2("id", "value")(MyMessage)(MyMessage.unapply(_).get) asProduct2("id", "value")(MyMessage)(MyMessage.unapply(_).get)
case class MyJsonObject(val key: String, val map: Map[String, Int], case class MyJsonObject(val key: String, val map: Map[String, Int],
val standAloneInt: Int) extends ScalaJSON[MyJsonObject] { val standAloneInt: Int) extends ScalaJSON[MyJsonObject] {
implicit val MyJsonObjectFormat: sjson.json.Format[MyJsonObject] = implicit val MyJsonObjectFormat: sjson.json.Format[MyJsonObject] =
asProduct3("key", "map", "standAloneInt")(MyJsonObject)(MyJsonObject.unapply(_).get) asProduct3("key", "map", "standAloneInt")(MyJsonObject)(MyJsonObject.unapply(_).get)

View file

@ -11,15 +11,15 @@ import akka.serialization.Serializer.ScalaJSON
object Protocols { object Protocols {
import sjson.json.DefaultProtocol._ import sjson.json.DefaultProtocol._
case class Shop(store: String, item: String, price: Int) case class Shop(store: String, item: String, price: Int)
implicit val ShopFormat: sjson.json.Format[Shop] = implicit val ShopFormat: sjson.json.Format[Shop] =
asProduct3("store", "item", "price")(Shop)(Shop.unapply(_).get) asProduct3("store", "item", "price")(Shop)(Shop.unapply(_).get)
case class MyMessage(val id: String, val value: Tuple2[String, Int]) case class MyMessage(val id: String, val value: Tuple2[String, Int])
implicit val MyMessageFormat: sjson.json.Format[MyMessage] = implicit val MyMessageFormat: sjson.json.Format[MyMessage] =
asProduct2("id", "value")(MyMessage)(MyMessage.unapply(_).get) asProduct2("id", "value")(MyMessage)(MyMessage.unapply(_).get)
case class MyJsonObject(val key: String, val map: Map[String, Int], case class MyJsonObject(val key: String, val map: Map[String, Int],
val standAloneInt: Int) val standAloneInt: Int)
implicit val MyJsonObjectFormat: sjson.json.Format[MyJsonObject] = implicit val MyJsonObjectFormat: sjson.json.Format[MyJsonObject] =
asProduct3("key", "map", "standAloneInt")(MyJsonObject)(MyJsonObject.unapply(_).get) asProduct3("key", "map", "standAloneInt")(MyJsonObject)(MyJsonObject.unapply(_).get)
} }

View file

@ -228,7 +228,7 @@ class MyActorWithDualCounter extends Actor {
case "hello" => case "hello" =>
count = count + 1 count = count + 1
self.reply("world " + count) self.reply("world " + count)
case "swap" => case "swap" =>
become { case "hello" => self.reply("swapped") } become { case "hello" => self.reply("swapped") }
} }
} }

View file

@ -90,4 +90,4 @@ class ConfigBeanDefinitionParser extends AbstractSingleBeanDefinitionParser with
override def getBeanClass(element: Element): Class[_] = classOf[ConfiggyPropertyPlaceholderConfigurer] override def getBeanClass(element: Element): Class[_] = classOf[ConfiggyPropertyPlaceholderConfigurer]
override def shouldGenerateId() = true override def shouldGenerateId() = true
} }

View file

@ -105,7 +105,7 @@ class ActorFactoryBean extends AbstractFactoryBean[AnyRef] with Logging with App
if ((implementation eq null) || implementation == "") throw new AkkaBeansException( if ((implementation eq null) || implementation == "") throw new AkkaBeansException(
"The 'implementation' part of the 'akka:typed-actor' element in the Spring config file can't be null or empty string") "The 'implementation' part of the 'akka:typed-actor' element in the Spring config file can't be null or empty string")
val typedActor: AnyRef = TypedActor.newInstance(interface.toClass, implementation.toClass, createConfig) val typedActor: AnyRef = TypedActor.newInstance(interface.toClass, implementation.toClass, createConfig)
if (isRemote && serverManaged) { if (isRemote && serverManaged) {
val server = RemoteServer.getOrCreateServer(new InetSocketAddress(host, port.toInt)) val server = RemoteServer.getOrCreateServer(new InetSocketAddress(host, port.toInt))
if (serviceName.isEmpty) { if (serviceName.isEmpty) {

View file

@ -9,13 +9,13 @@ import net.lag.configgy.Configgy
import java.util.Properties import java.util.Properties
/** /**
* ConfiggyPropertyPlaceholderConfigurer. Property resource configurer for configgy files. * ConfiggyPropertyPlaceholderConfigurer. Property resource configurer for configgy files.
*/ */
class ConfiggyPropertyPlaceholderConfigurer extends PropertyPlaceholderConfigurer { class ConfiggyPropertyPlaceholderConfigurer extends PropertyPlaceholderConfigurer {
/** /**
* Sets the akka properties as local properties, leaves the location empty. * Sets the akka properties as local properties, leaves the location empty.
* @param configgyResource akka.conf * @param configgyResource akka.conf
*/ */
override def setLocation(configgyResource: Resource) { override def setLocation(configgyResource: Resource) {
if (configgyResource eq null) throw new IllegalArgumentException("Property 'config' must be set") if (configgyResource eq null) throw new IllegalArgumentException("Property 'config' must be set")
@ -34,4 +34,4 @@ class ConfiggyPropertyPlaceholderConfigurer extends PropertyPlaceholderConfigure
properties properties
} }
} }

View file

@ -55,7 +55,7 @@ class ThreadPoolProperties {
", corePoolSize=" + corePoolSize + ", corePoolSize=" + corePoolSize +
", maxPoolSize=" + maxPoolSize + ", maxPoolSize=" + maxPoolSize +
", keepAlive=" + keepAlive + ", keepAlive=" + keepAlive +
", policy=" + rejectionPolicy + ", policy=" + rejectionPolicy +
", mailboxCapacity=" + mailboxCapacity + "]" ", mailboxCapacity=" + mailboxCapacity + "]"
} }
} }

View file

@ -39,4 +39,4 @@ class ConfiggyPropertyPlaceholderConfigurerSpec extends FeatureSpec with ShouldM
assert(actor1.timeout === 2000) assert(actor1.timeout === 2000)
} }
} }
} }

View file

@ -133,7 +133,7 @@ class TypedActorSpringFeatureTest extends FeatureSpec with ShouldMatchers with B
myPojoProxy.oneWay("hello server-managed-remote-typed-actor 2") myPojoProxy.oneWay("hello server-managed-remote-typed-actor 2")
MyPojo.latch.await MyPojo.latch.await
assert(MyPojo.lastOneWayMessage === "hello server-managed-remote-typed-actor 2") assert(MyPojo.lastOneWayMessage === "hello server-managed-remote-typed-actor 2")
} }
scenario("get a client proxy for server-managed-remote-typed-actor") { scenario("get a client proxy for server-managed-remote-typed-actor") {
MyPojo.latch = new CountDownLatch(1) MyPojo.latch = new CountDownLatch(1)
@ -144,7 +144,7 @@ class TypedActorSpringFeatureTest extends FeatureSpec with ShouldMatchers with B
assert(myPojoProxy.getFoo() === "foo") assert(myPojoProxy.getFoo() === "foo")
myPojoProxy.oneWay("hello") myPojoProxy.oneWay("hello")
MyPojo.latch.await MyPojo.latch.await
} }
} }

View file

@ -45,7 +45,7 @@ class UntypedActorSpringFeatureTest extends FeatureSpec with ShouldMatchers with
} catch { } catch {
case e => () case e => ()
} }
} }
def getPingActorFromContext(config: String, id: String) : ActorRef = { def getPingActorFromContext(config: String, id: String) : ActorRef = {

View file

@ -332,7 +332,7 @@ object TypedActorConfiguration {
def apply(transactionRequired: Boolean) : TypedActorConfiguration = { def apply(transactionRequired: Boolean) : TypedActorConfiguration = {
if (transactionRequired) { if (transactionRequired) {
new TypedActorConfiguration().makeTransactionRequired new TypedActorConfiguration().makeTransactionRequired
} else new TypedActorConfiguration() } else new TypedActorConfiguration()
} }
} }
@ -558,7 +558,7 @@ object TypedActor extends Logging {
/** /**
* Java API. * Java API.
*/ */
def newRemoteInstance[T](intfClass: Class[T], factory: TypedActorFactory, hostname: String, port: Int) : T = def newRemoteInstance[T](intfClass: Class[T], factory: TypedActorFactory, hostname: String, port: Int) : T =
newRemoteInstance(intfClass, factory.create, hostname, port) newRemoteInstance(intfClass, factory.create, hostname, port)
@ -582,7 +582,7 @@ object TypedActor extends Logging {
/** /**
* Create a proxy for a RemoteActorRef representing a server managed remote typed actor. * Create a proxy for a RemoteActorRef representing a server managed remote typed actor.
* *
*/ */
private[akka] def createProxyForRemoteActorRef[T](intfClass: Class[T], actorRef: ActorRef): T = { private[akka] def createProxyForRemoteActorRef[T](intfClass: Class[T], actorRef: ActorRef): T = {
@ -758,7 +758,7 @@ object TypedActor extends Logging {
*/ */
@Aspect("perInstance") @Aspect("perInstance")
private[akka] sealed class ServerManagedTypedActorAspect extends ActorAspect { private[akka] sealed class ServerManagedTypedActorAspect extends ActorAspect {
@Around("execution(* *.*(..)) && this(akka.actor.ServerManagedTypedActor)") @Around("execution(* *.*(..)) && this(akka.actor.ServerManagedTypedActor)")
def invoke(joinPoint: JoinPoint): AnyRef = { def invoke(joinPoint: JoinPoint): AnyRef = {
if (!isInitialized) initialize(joinPoint) if (!isInitialized) initialize(joinPoint)

View file

@ -49,7 +49,7 @@ object TypedActorSpec {
case msg: String => println("got " + msg) case msg: String => println("got " + msg)
} }
} }
} }

View file

@ -7,7 +7,7 @@
akka { akka {
version = "1.0-SNAPSHOT" # Akka version, checked against the runtime version of Akka. version = "1.0-SNAPSHOT" # Akka version, checked against the runtime version of Akka.
time-unit = "seconds" # Default timeout time unit for all timeout properties throughout the config time-unit = "seconds" # Default timeout time unit for all timeout properties throughout the config
# These boot classes are loaded (and created) automatically when the Akka Microkernel boots up # These boot classes are loaded (and created) automatically when the Akka Microkernel boots up
@ -54,7 +54,7 @@ akka {
# #
# The following are only used for ExecutorBasedEventDriven # The following are only used for ExecutorBasedEventDriven
# and only if mailbox-capacity > 0 # and only if mailbox-capacity > 0
mailbox-push-timeout-time = 10 # Specifies the timeout to add a new message to a mailbox that is full - negative number means infinite timeout mailbox-push-timeout-time = 10 # Specifies the timeout to add a new message to a mailbox that is full - negative number means infinite timeout
# (in unit defined by the time-unit property) # (in unit defined by the time-unit property)
} }
} }
@ -109,7 +109,7 @@ akka {
} }
remote { remote {
secure-cookie = "050E0A0D0D06010A00000900040D060F0C09060B" # generate your own with '$AKKA_HOME/scripts/generate_secure_cookie.sh' or using 'Crypt.generateSecureCookie' secure-cookie = "050E0A0D0D06010A00000900040D060F0C09060B" # generate your own with '$AKKA_HOME/scripts/generate_secure_cookie.sh' or using 'Crypt.generateSecureCookie'
compression-scheme = "zlib" # Options: "zlib" (lzf to come), leave out for no compression compression-scheme = "zlib" # Options: "zlib" (lzf to come), leave out for no compression
@ -186,7 +186,7 @@ akka {
vector = "Vectors" # Voldemort Store Used to Persist Vector Sizes. Use identity serializer for keys, identity serializer for values vector = "Vectors" # Voldemort Store Used to Persist Vector Sizes. Use identity serializer for keys, identity serializer for values
queue = "Queues" # Voldemort Store Used to Persist Vector Values. Use identity serializer for keys, identity serializer for values queue = "Queues" # Voldemort Store Used to Persist Vector Values. Use identity serializer for keys, identity serializer for values
} }
client { # The KeyValue pairs under client are converted to java Properties and used to construct the Voldemort ClientConfig client { # The KeyValue pairs under client are converted to java Properties and used to construct the Voldemort ClientConfig
bootstrap_urls = "tcp://localhost:6666" # All Valid Voldemort Client properties are valid here, in string form bootstrap_urls = "tcp://localhost:6666" # All Valid Voldemort Client properties are valid here, in string form
} }

View file

@ -16,10 +16,10 @@
</encoder> </encoder>
</appender> </appender>
<logger name="akka" level="DEBUG"/> <logger name="akka" level="DEBUG"/>
<logger name="org.mortbay.log" level="ERROR"/> <logger name="org.mortbay.log" level="ERROR"/>
<logger name="org.apache.jasper" level="ERROR"/> <logger name="org.apache.jasper" level="ERROR"/>
<root level="DEBUG"> <root level="DEBUG">
<appender-ref ref="stdout"/> <appender-ref ref="stdout"/>
</root> </root>

View file

@ -63,7 +63,7 @@
--> -->
<!-- =========================================================== --> <!-- =========================================================== -->
<!-- Set handler Collection Structure --> <!-- Set handler Collection Structure -->
<!-- =========================================================== --> <!-- =========================================================== -->
<Set name="handler"> <Set name="handler">
<New id="Handlers" class="org.eclipse.jetty.server.handler.HandlerCollection"> <New id="Handlers" class="org.eclipse.jetty.server.handler.HandlerCollection">
@ -94,4 +94,4 @@
<Set name="sendDateHeader">true</Set> <Set name="sendDateHeader">true</Set>
<Set name="gracefulShutdown">1000</Set> <Set name="gracefulShutdown">1000</Set>
</Configure> </Configure>

View file

@ -515,7 +515,7 @@ class AkkaParentProject(info: ProjectInfo) extends DefaultProject(info) {
class AkkaCamelProject(info: ProjectInfo) extends AkkaDefaultProject(info, distPath) { class AkkaCamelProject(info: ProjectInfo) extends AkkaDefaultProject(info, distPath) {
val camel_core = Dependencies.camel_core val camel_core = Dependencies.camel_core
override def testOptions = createTestFilter( _.endsWith("Test")) override def testOptions = createTestFilter( _.endsWith("Test"))
} }
@ -690,7 +690,7 @@ class AkkaParentProject(info: ProjectInfo) extends DefaultProject(info) {
val atomikos_transactions_jta = Dependencies.atomikos_transactions_jta val atomikos_transactions_jta = Dependencies.atomikos_transactions_jta
//val jta_1_1 = Dependencies.jta_1_1 //val jta_1_1 = Dependencies.jta_1_1
//val atomikos_transactions_util = "com.atomikos" % "transactions-util" % "3.2.3" % "compile" //val atomikos_transactions_util = "com.atomikos" % "transactions-util" % "3.2.3" % "compile"
//Testing //Testing
val junit = Dependencies.junit val junit = Dependencies.junit
val scalatest = Dependencies.scalatest val scalatest = Dependencies.scalatest
@ -767,7 +767,7 @@ class AkkaParentProject(info: ProjectInfo) extends DefaultProject(info) {
val commons_fileupload = "commons-fileupload" % "commons-fileupload" % "1.2.1" % "compile" intransitive val commons_fileupload = "commons-fileupload" % "commons-fileupload" % "1.2.1" % "compile" intransitive
val jms_1_1 = "org.apache.geronimo.specs" % "geronimo-jms_1.1_spec" % "1.1.1" % "compile" intransitive val jms_1_1 = "org.apache.geronimo.specs" % "geronimo-jms_1.1_spec" % "1.1.1" % "compile" intransitive
val joda = "joda-time" % "joda-time" % "1.6" intransitive val joda = "joda-time" % "joda-time" % "1.6" intransitive
override def packageAction = override def packageAction =
task { task {
val libs: Seq[Path] = managedClasspath(config("compile")).get.toSeq val libs: Seq[Path] = managedClasspath(config("compile")).get.toSeq