removed trailing spaces

This commit is contained in:
Jonas Bonér 2010-10-29 16:33:31 +02:00
parent c27f971a42
commit c276c621bf
92 changed files with 927 additions and 927 deletions

View file

@ -394,7 +394,7 @@ trait Actor extends Logging {
/**
* Changes tha Actor's behavior to become the new 'Receive' (PartialFunction[Any, Unit]) handler.
* Puts the behavior on top of the hotswap stack.
* Puts the behavior on top of the hotswap stack.
*/
def become(behavior: Receive): Unit = self.hotswap = self.hotswap.push(behavior)

View file

@ -805,7 +805,7 @@ class LocalActorRef private[akka] (
receiveTimeout = None
cancelReceiveTimeout
dispatcher.detach(this)
transactorConfig = transactorConfig.copy(factory = None)
transactorConfig = transactorConfig.copy(factory = None)
_status = ActorRefInternals.SHUTDOWN
actor.postStop
ActorRegistry.unregister(this)
@ -1067,7 +1067,7 @@ class LocalActorRef private[akka] (
case Temporary => shutDownTemporaryActor(this)
case _ =>
val failedActor = actorInstance.get
// either permanent or none where default is permanent
Actor.log.info("Restarting actor [%s] configured as PERMANENT.", id)
Actor.log.debug("Restarting linked actors for actor [%s].", id)

View file

@ -41,12 +41,12 @@ object ActorRegistry extends ListenerManagement {
private val actorsById = new Index[String,ActorRef]
private val remoteActorSets = Map[Address, RemoteActorSet]()
private val guard = new ReadWriteGuard
/**
* Returns all actors in the system.
*/
def actors: Array[ActorRef] = filter(_ => true)
/**
* Returns the number of actors in the system.
*/
@ -435,4 +435,4 @@ class Index[K <: AnyRef,V <: AnyRef : Manifest] {
* Removes all keys and all values
*/
def clear = foreach { case (k,v) => remove(k,v) }
}
}

View file

@ -80,7 +80,7 @@ case class SupervisorFactory(val config: SupervisorConfig) extends Logging {
def newInstance: Supervisor = newInstanceFor(config)
def newInstanceFor(config: SupervisorConfig): Supervisor = {
def newInstanceFor(config: SupervisorConfig): Supervisor = {
val supervisor = new Supervisor(config.restartStrategy)
supervisor.configure(config)
supervisor.start

View file

@ -55,7 +55,7 @@ object Config {
case value => Some(value)
}
(envConf orElse systemConf).map("akka." + _ + ".conf").getOrElse("akka.conf")
(envConf orElse systemConf).map("akka." + _ + ".conf").getOrElse("akka.conf")
}
if (System.getProperty("akka.config", "") != "") {
@ -74,8 +74,8 @@ object Config {
val configFile = HOME.getOrElse(throwNoAkkaHomeException) + "/config/" + confName
Configgy.configure(configFile)
ConfigLogger.log.info(
"AKKA_HOME is defined as [%s], config loaded from [%s].",
HOME.getOrElse(throwNoAkkaHomeException),
"AKKA_HOME is defined as [%s], config loaded from [%s].",
HOME.getOrElse(throwNoAkkaHomeException),
configFile)
} catch {
case e: ParseException => throw new ConfigurationException(
@ -106,7 +106,7 @@ object Config {
CConfig.fromString("<akka></akka>") // default empty config
}
}
val CONFIG_VERSION = config.getString("akka.version", VERSION)
if (VERSION != CONFIG_VERSION) throw new ConfigurationException(
"Akka JAR version [" + VERSION + "] is different than the provided config version [" + CONFIG_VERSION + "]")
@ -115,9 +115,9 @@ object Config {
val startTime = System.currentTimeMillis
def uptime = (System.currentTimeMillis - startTime) / 1000
def throwNoAkkaHomeException = throw new ConfigurationException(
"Akka home is not defined. Either:" +
"Akka home is not defined. Either:" +
"\n\t1. Define 'AKKA_HOME' environment variable pointing to the root of the Akka distribution." +
"\n\t2. Add the '-Dakka.home=...' option pointing to the root of the Akka distribution.")
}

View file

@ -35,7 +35,7 @@ object Supervision {
def this(actorRef: ActorRef, lifeCycle: LifeCycle) =
this(actorRef, lifeCycle, None)
}
object Supervise {
def apply(actorRef: ActorRef, lifeCycle: LifeCycle, remoteAddress: RemoteAddress) = new Supervise(actorRef, lifeCycle, remoteAddress)
def apply(actorRef: ActorRef, lifeCycle: LifeCycle) = new Supervise(actorRef, lifeCycle, None)
@ -158,4 +158,4 @@ object Supervision {
def this(target: Class[_], lifeCycle: LifeCycle, timeout: Long, transactionRequired: Boolean, dispatcher: MessageDispatcher, remoteAddress: RemoteAddress) =
this(null: Class[_], target, lifeCycle, timeout, transactionRequired, dispatcher, remoteAddress)
}
}
}

View file

@ -143,7 +143,7 @@ object DataFlow {
out ! Exit
throw e
}
result.getOrElse(throw new DataFlowVariableException("Timed out (after " + timeoutMs + " milliseconds) while waiting for result"))
}
}

View file

@ -94,7 +94,7 @@ object Dispatchers extends Logging {
* <p/>
* E.g. each actor consumes its own thread.
*/
def newThreadBasedDispatcher(actor: ActorRef, mailboxCapacity: Int, pushTimeOut: Duration) =
def newThreadBasedDispatcher(actor: ActorRef, mailboxCapacity: Int, pushTimeOut: Duration) =
new ThreadBasedDispatcher(actor, mailboxCapacity, pushTimeOut)
/**

View file

@ -73,19 +73,19 @@ class ExecutorBasedEventDrivenDispatcher(
val config: ThreadPoolConfig = ThreadPoolConfig())
extends MessageDispatcher {
def this(_name: String, throughput: Int, throughputDeadlineTime: Int, mailboxType: MailboxType) =
def this(_name: String, throughput: Int, throughputDeadlineTime: Int, mailboxType: MailboxType) =
this(_name, throughput, throughputDeadlineTime, mailboxType,ThreadPoolConfig()) // Needed for Java API usage
def this(_name: String, throughput: Int, mailboxType: MailboxType) =
def this(_name: String, throughput: Int, mailboxType: MailboxType) =
this(_name, throughput, Dispatchers.THROUGHPUT_DEADLINE_TIME_MILLIS, mailboxType) // Needed for Java API usage
def this(_name: String, throughput: Int) =
def this(_name: String, throughput: Int) =
this(_name, throughput, Dispatchers.THROUGHPUT_DEADLINE_TIME_MILLIS, Dispatchers.MAILBOX_TYPE) // Needed for Java API usage
def this(_name: String, _config: ThreadPoolConfig) =
this(_name, Dispatchers.THROUGHPUT, Dispatchers.THROUGHPUT_DEADLINE_TIME_MILLIS, Dispatchers.MAILBOX_TYPE, _config)
def this(_name: String) =
def this(_name: String) =
this(_name, Dispatchers.THROUGHPUT, Dispatchers.THROUGHPUT_DEADLINE_TIME_MILLIS, Dispatchers.MAILBOX_TYPE) // Needed for Java API usage
val name = "akka:event-driven:dispatcher:" + _name
@ -111,7 +111,7 @@ class ExecutorBasedEventDrivenDispatcher(
case UnboundedMailbox(blocking) => new DefaultUnboundedMessageQueue(blocking) with ExecutableMailbox {
def dispatcher = ExecutorBasedEventDrivenDispatcher.this
}
case BoundedMailbox(blocking, capacity, pushTimeOut) =>
new DefaultBoundedMessageQueue(capacity, pushTimeOut, blocking) with ExecutableMailbox {
def dispatcher = ExecutorBasedEventDrivenDispatcher.this
@ -175,7 +175,7 @@ class ExecutorBasedEventDrivenDispatcher(
trait ExecutableMailbox extends Runnable { self: MessageQueue =>
def dispatcher: ExecutorBasedEventDrivenDispatcher
final def run = {
val reschedule = try {
try { processMailbox() } catch { case ie: InterruptedException => true }

View file

@ -41,7 +41,7 @@ class ExecutorBasedEventDrivenWorkStealingDispatcher(
def this(_name: String) = this(_name, Dispatchers.MAILBOX_TYPE,ThreadPoolConfig())
//implicit def actorRef2actor(actorRef: ActorRef): Actor = actorRef.actor
val mailboxType = Some(_mailboxType)
val name = "akka:event-driven-work-stealing:dispatcher:" + _name

View file

@ -143,7 +143,7 @@ class HawtDispatcher(val aggregate: Boolean = true, val parent: DispatchQueue =
import HawtDispatcher._
val mailboxType: Option[MailboxType] = None
private[akka] def start { retainNonDaemon }
private[akka] def shutdown { releaseNonDaemon }

View file

@ -35,7 +35,7 @@ sealed trait MailboxType
abstract class TransientMailboxType(val blocking: Boolean = false) extends MailboxType
case class UnboundedMailbox(block: Boolean = false) extends TransientMailboxType(block)
case class BoundedMailbox(
block: Boolean = false,
block: Boolean = false,
val capacity: Int = { if (Dispatchers.MAILBOX_CAPACITY < 0) Int.MaxValue else Dispatchers.MAILBOX_CAPACITY },
val pushTimeOut: Duration = Dispatchers.MAILBOX_PUSH_TIME_OUT) extends TransientMailboxType(block) {
if (capacity < 0) throw new IllegalArgumentException("The capacity for BoundedMailbox can not be negative")
@ -52,9 +52,9 @@ case class ZooKeeperBasedDurableMailbox(ser: EnterpriseModule.Serializer) extend
case class AMQPBasedDurableMailbox(ser: EnterpriseModule.Serializer) extends DurableMailboxType(ser)
case class JMSBasedDurableMailbox(ser: EnterpriseModule.Serializer) extends DurableMailboxType(ser)
class DefaultUnboundedMessageQueue(blockDequeue: Boolean)
class DefaultUnboundedMessageQueue(blockDequeue: Boolean)
extends LinkedBlockingQueue[MessageInvocation] with MessageQueue {
final def enqueue(handle: MessageInvocation) {
this add handle
}
@ -65,7 +65,7 @@ class DefaultUnboundedMessageQueue(blockDequeue: Boolean)
}
}
class DefaultBoundedMessageQueue(capacity: Int, pushTimeOut: Duration, blockDequeue: Boolean)
class DefaultBoundedMessageQueue(capacity: Int, pushTimeOut: Duration, blockDequeue: Boolean)
extends LinkedBlockingQueue[MessageInvocation](capacity) with MessageQueue {
final def enqueue(handle: MessageInvocation) {
@ -84,7 +84,7 @@ class DefaultBoundedMessageQueue(capacity: Int, pushTimeOut: Duration, blockDequ
* @author <a href="http://jonasboner.com">Jonas Bon&#233;r</a>
*/
trait MailboxFactory {
val mailboxType: Option[MailboxType]
/**
@ -105,4 +105,4 @@ trait MailboxFactory {
* Creates and returns a durable mailbox for the given actor.
*/
private[akka] def createDurableMailbox(actorRef: ActorRef, mailboxType: DurableMailboxType): AnyRef
}
}

View file

@ -95,7 +95,7 @@ trait MessageDispatcher extends MailboxFactory with Logging {
}
}
}
private[akka] def unregister(actorRef: ActorRef) = {
if (uuids remove actorRef.uuid) {
actorRef.mailbox = null
@ -176,4 +176,4 @@ trait MessageDispatcher extends MailboxFactory with Logging {
* Returns the size of the mailbox for the specified actor
*/
def mailboxSize(actorRef: ActorRef): Int
}
}

View file

@ -261,4 +261,4 @@ trait LazyExecutorService extends ExecutorServiceDelegate {
class LazyExecutorServiceWrapper(executorFactory: => ExecutorService) extends LazyExecutorService {
def createExecutor = executorFactory
}
}

View file

@ -75,4 +75,4 @@ object Option {
implicit def java2ScalaOption[A](o: Option[A]): scala.Option[A] = o.asScala
implicit def scala2JavaOption[A](o: scala.Option[A]): Option[A] = option(o.get)
}
}

View file

@ -17,7 +17,7 @@ trait InfiniteIterator[T] extends Iterator[T]
*/
class CyclicIterator[T](items: List[T]) extends InfiniteIterator[T] {
def this(items: java.util.List[T]) = this(items.toList)
@volatile private[this] var current: List[T] = items
def hasNext = items != Nil

View file

@ -70,4 +70,4 @@ abstract class UntypedLoadBalancer extends UntypedDispatcher {
else null
override def isDefinedAt(msg: Any) = seq.exists( _.isDefinedAt(msg) )
}
}

View file

@ -92,7 +92,7 @@ object Transaction {
private[this] val persistentStateMap = new HashMap[String, Committable with Abortable]
private[akka] val depth = new AtomicInteger(0)
val jta: Option[JtaModule.TransactionContainer] =
val jta: Option[JtaModule.TransactionContainer] =
if (JTA_AWARE) Some(JtaModule.createTransactionContainer)
else None

View file

@ -20,4 +20,4 @@ class Address(val hostname: String, val port: Int) {
that.asInstanceOf[Address].hostname == hostname &&
that.asInstanceOf[Address].port == port
}
}
}

View file

@ -18,8 +18,8 @@ object Crypt extends Logging {
def md5(text: String): String = md5(unifyLineSeparator(text).getBytes("ASCII"))
def md5(bytes: Array[Byte]): String = digest(bytes, MessageDigest.getInstance("MD5"))
def sha1(text: String): String = sha1(unifyLineSeparator(text).getBytes("ASCII"))
def sha1(text: String): String = sha1(unifyLineSeparator(text).getBytes("ASCII"))
def sha1(bytes: Array[Byte]): String = digest(bytes, MessageDigest.getInstance("SHA1"))
@ -33,8 +33,8 @@ object Crypt extends Logging {
def digest(bytes: Array[Byte], md: MessageDigest): String = {
md.update(bytes)
hexify(md.digest)
}
}
def hexify(bytes: Array[Byte]): String = {
val builder = new StringBuilder
bytes.foreach { byte => builder.append(hex.charAt((byte & 0xF) >> 4)).append(hex.charAt(byte & 0xF)) }

View file

@ -45,7 +45,7 @@ object Helpers extends Logging {
log.warning(e, "Cannot narrow %s to expected type %s!", o, implicitly[Manifest[T]].erasure.getName)
None
}
/**
* Reference that can hold either a typed value or an exception.
*
@ -61,13 +61,13 @@ object Helpers extends Logging {
*
* scala> res0()
* res3: Int = 3
*
*
* scala> res0() = { println("Hello world"); 3}
* Hello world
*
* scala> res0()
* res5: Int = 3
*
*
* scala> res0() = error("Lets see what happens here...")
*
* scala> res0()
@ -80,15 +80,15 @@ object Helpers extends Logging {
*/
class ResultOrError[R](result: R){
private[this] var contents: Either[R, Throwable] = Left(result)
def update(value: => R) = {
contents = try {
Left(value)
} catch {
case (error : Throwable) => Right(error)
}
contents = try {
Left(value)
} catch {
case (error : Throwable) => Right(error)
}
}
def apply() = contents match {
case Left(result) => result
case Right(error) => throw error.fillInStackTrace

View file

@ -86,7 +86,7 @@ class SimpleLock {
}
} else None
}
def ifPossibleApply[T,R](value: T)(function: (T) => R): Option[R] = {
if (tryLock()) {
try {

View file

@ -208,10 +208,10 @@ object ReflectiveAccess extends Logging {
def enqueue(message: MessageInvocation)
def dequeue: MessageInvocation
}
type Serializer = {
def toBinary(obj: AnyRef): Array[Byte]
def fromBinary(bytes: Array[Byte], clazz: Option[Class[_]]): AnyRef
def fromBinary(bytes: Array[Byte], clazz: Option[Class[_]]): AnyRef
}
lazy val isEnterpriseEnabled = clusterObjectInstance.isDefined
@ -219,7 +219,7 @@ object ReflectiveAccess extends Logging {
val clusterObjectInstance: Option[AnyRef] =
getObjectFor("akka.cluster.Cluster$")
val serializerClass: Option[Class[_]] =
val serializerClass: Option[Class[_]] =
getClassFor("akka.serialization.Serializer")
def ensureEnterpriseEnabled = if (!isEnterpriseEnabled) throw new ModuleNotAvailableException(

View file

@ -32,7 +32,7 @@ object ActorFireForgetRequestReplySpec {
class SenderActor(replyActor: ActorRef) extends Actor {
def receive = {
case "Init" =>
case "Init" =>
replyActor ! "Send"
case "Reply" => {
state.s = "Reply"

View file

@ -98,4 +98,4 @@ class ActorRefSpec extends
serverRef.stop
}
}
}
}

View file

@ -57,7 +57,7 @@ object FSMActorSpec {
}
startWith(Locked, CodeState("", code))
whenUnhandled {
case Event(_, stateData) => {
log.info("Unhandled")

View file

@ -9,7 +9,7 @@ import java.util.concurrent.CyclicBarrier
class HotSwapSpec extends WordSpec with MustMatchers {
"An Actor" should {
"be able to hotswap its behavior with HotSwap(..)" in {
val barrier = new CyclicBarrier(2)
@volatile var _log = ""
@ -17,7 +17,7 @@ class HotSwapSpec extends WordSpec with MustMatchers {
def receive = { case _ => _log += "default" }
}).start
a ! HotSwap {
case _ =>
case _ =>
_log += "swapped"
barrier.await
}
@ -31,17 +31,17 @@ class HotSwapSpec extends WordSpec with MustMatchers {
@volatile var _log = ""
val a = actorOf(new Actor {
def receive = {
case "init" =>
case "init" =>
_log += "init"
barrier.await
case "swap" => become({
case _ =>
case _ =>
_log += "swapped"
barrier.await
})
}
}).start
a ! "init"
barrier.await
_log must be ("init")
@ -72,7 +72,7 @@ class HotSwapSpec extends WordSpec with MustMatchers {
barrier.reset
_log = ""
a ! HotSwap {
case "swapped" =>
case "swapped" =>
_log += "swapped"
barrier.await
}
@ -104,21 +104,21 @@ class HotSwapSpec extends WordSpec with MustMatchers {
@volatile var _log = ""
val a = actorOf(new Actor {
def receive = {
case "init" =>
case "init" =>
_log += "init"
barrier.await
case "swap" =>
case "swap" =>
become({
case "swapped" =>
case "swapped" =>
_log += "swapped"
barrier.await
case "revert" =>
case "revert" =>
unbecome
})
barrier.await
}
}).start
a ! "init"
barrier.await
_log must be ("init")

View file

@ -147,7 +147,7 @@ class RestartStrategySpec extends JUnitSuite {
slave ! Ping
slave ! Crash
assert(secondRestartLatch.tryAwait(1, TimeUnit.SECONDS))
assert(secondPingLatch.tryAwait(1, TimeUnit.SECONDS))
@ -224,7 +224,7 @@ class RestartStrategySpec extends JUnitSuite {
val restartLatch,stopLatch,maxNoOfRestartsLatch = new StandardLatch
val countDownLatch = new CountDownLatch(2)
val boss = actorOf(new Actor{
self.faultHandler = OneForOneStrategy(List(classOf[Throwable]), None, Some(1000))
protected def receive = {
@ -238,7 +238,7 @@ class RestartStrategySpec extends JUnitSuite {
case Ping => countDownLatch.countDown
case Crash => throw new Exception("Crashing...")
}
override def postRestart(reason: Throwable) = {
restartLatch.open
}

View file

@ -69,7 +69,7 @@ class DataFlowTest extends Spec with ShouldMatchers with BeforeAndAfterAll {
result.get should equal (sum(0,ints(0,1000)))
List(x,y,z).foreach(_.shutdown)
}
/*it("should be able to join streams") {
import DataFlow._
ActorRegistry.shutdownAll
@ -95,7 +95,7 @@ class DataFlowTest extends Spec with ShouldMatchers with BeforeAndAfterAll {
result.set(producer.map(x => x * x).foldLeft(0)(_ + _))
latch.countDown
}
latch.await(3,TimeUnit.SECONDS) should equal (true)
result.get should equal (332833500)
}
@ -131,7 +131,7 @@ class DataFlowTest extends Spec with ShouldMatchers with BeforeAndAfterAll {
thread { ints(0, 1000, producer) }
thread { sum(0, producer, consumer) }
thread { recurseSum(consumer) }
latch.await(15,TimeUnit.SECONDS) should equal (true)
}*/
@ -162,4 +162,4 @@ class DataFlowTest extends Spec with ShouldMatchers with BeforeAndAfterAll {
latch.await(2,TimeUnit.SECONDS) should equal (true)
}*/
}
}
}

View file

@ -296,4 +296,4 @@ class HawtDispatcherModelTest extends ActorModelSpec {
class ExecutorBasedEventDrivenWorkStealingDispatcherModelTest extends ActorModelSpec {
def newInterceptedDispatcher = new ExecutorBasedEventDrivenWorkStealingDispatcher("foo") with MessageDispatcherInterceptor
}
}

View file

@ -2,4 +2,4 @@ package akka.japi
import org.scalatest.junit.JUnitSuite
class JavaAPITest extends JavaAPITestBase with JUnitSuite
class JavaAPITest extends JavaAPITestBase with JUnitSuite

View file

@ -214,7 +214,7 @@ public class ExampleSessionJava {
scala.Option<RemoteProtocol.AddressProtocol> response =
protobufRpcClient.call(RemoteProtocol.AddressProtocol.newBuilder().setHostname("localhost").setPort(4321).build());
System.out.println("### >> Got response: " + response);
}
}

View file

@ -256,7 +256,7 @@ object AMQP {
}
// Needed for Java API usage
def send(request: O, replyTo: String): Unit = {
send(request, Some(replyTo))
send(request, Some(replyTo))
}
def send(request: O, replyTo: Option[String] = None) = {
@ -311,7 +311,7 @@ object AMQP {
def newStringConsumer(connection: ActorRef,
handler: Procedure[String],
exchangeName: String): ActorRef = {
newStringConsumer(connection, handler.apply _, Some(exchangeName))
newStringConsumer(connection, handler.apply _, Some(exchangeName))
}
// Needed for Java API usage
@ -328,7 +328,7 @@ object AMQP {
exchangeName: String,
routingKey: String,
queueName: String): ActorRef = {
newStringConsumer(connection, handler.apply _, Some(exchangeName), Some(routingKey), Some(queueName))
newStringConsumer(connection, handler.apply _, Some(exchangeName), Some(routingKey), Some(queueName))
}
def newStringConsumer(connection: ActorRef,

View file

@ -283,7 +283,7 @@ object RPC {
def newStringRpcClient(connection: ActorRef,
exchange: String): RpcClient[String, String] = {
newStringRpcClient(connection, exchange, None)
}
}
// Needed for Java API usage
def newStringRpcClient(connection: ActorRef,

View file

@ -143,7 +143,7 @@ trait CamelContextLifecycle extends Logging {
* Initializes this lifecycle object with the given CamelContext. For the passed
* CamelContext, stream-caching is enabled. If applications want to disable stream-
* caching they can do so after this method returned and prior to calling start.
* This method also registers a new TypedActorComponent at the passes CamelContext
* This method also registers a new TypedActorComponent at the passes CamelContext
* under a name defined by TypedActorComponent.InternalSchema.
*/
def init(context: CamelContext) {

View file

@ -22,7 +22,7 @@ trait Consumer { self: Actor =>
/**
* Determines whether two-way communications between an endpoint and this consumer actor
* should be done in blocking or non-blocking mode (default is non-blocking). This method
* doesn't have any effect on one-way communications (they'll never block).
* doesn't have any effect on one-way communications (they'll never block).
*/
def blocking = false
}

View file

@ -112,7 +112,7 @@ class ActorEndpoint(uri: String,
* <li>If the exchange pattern is out-capable and <code>blocking</code> is set to
* <code>false</code> then the producer sends the message using the ! operator, together
* with a callback handler. The callback handler is an <code>ActorRef</code> that can be
* used by the receiving actor to asynchronously reply to the route that is sending the
* used by the receiving actor to asynchronously reply to the route that is sending the
* message.</li>
* <li>If the exchange pattern is in-only then the producer sends the message using the
* ! operator.</li>

View file

@ -37,4 +37,4 @@ class AkkaBroadcaster extends org.atmosphere.jersey.util.JerseySimpleBroadcaster
protected override def broadcast(r: Resource, e : Event) {
caster ! ((r,e))
}
}
}

View file

@ -26,4 +26,4 @@ class DefaultAkkaLoader extends AkkaLoader {
*/
object Main extends DefaultAkkaLoader {
def main(args: Array[String]) = boot
}
}

View file

@ -33,14 +33,14 @@ trait EmbeddedAppServer extends Bootable with Logging {
super.onLoad
if (config.getBool("akka.rest.service", true)) {
log.info("Attempting to start Akka REST service (Jersey)")
System.setProperty("jetty.port",REST_PORT.toString)
System.setProperty("jetty.host",REST_HOSTNAME)
System.setProperty("jetty.home",HOME.getOrElse(throwNoAkkaHomeException) + "/deploy/root")
val configuration = new XmlConfiguration(
new File(HOME.getOrElse(throwNoAkkaHomeException) + "/config/microkernel-server.xml").toURI.toURL)
server = Option(configuration.configure.asInstanceOf[Server]) map { s => //Set the correct classloader to our contexts
applicationLoader foreach { loader =>
//We need to provide the correct classloader to the servlets

View file

@ -13,4 +13,4 @@ class ReflectiveAccessSpec extends JUnitSuite {
ReflectiveAccess.JtaModule.ensureJtaEnabled
assert(ReflectiveAccess.JtaModule.transactionContainerObjectInstance.isDefined)
}
}
}

View file

@ -22,4 +22,4 @@ object Kernel extends DefaultAkkaLoader {
case x: BootableRemoteActorService => x.startRemoteService
case _ =>
})
}
}

View file

@ -20,4 +20,4 @@ class CassandraTicket343TestIntegration extends Ticket343Test {
def getMap: (String) => PersistentMap[Array[Byte], Array[Byte]] = CassandraStorage.getMap
}
}

View file

@ -627,4 +627,4 @@ private[akka] trait CommonStorageBackend extends MapStorageBackend[Array[Byte],
}
}
}

View file

@ -158,4 +158,4 @@ trait MapStorageBackendTest extends Spec with ShouldMatchers with BeforeAndAfter
}
}
}

View file

@ -120,4 +120,4 @@ trait QueueStorageBackendTest extends Spec with ShouldMatchers with BeforeAndAft
}
}
}
}

View file

@ -49,4 +49,4 @@ trait RefStorageBackendTest extends Spec with ShouldMatchers with BeforeAndAfter
}
}
}
}

View file

@ -32,4 +32,4 @@ trait SortedSetStorageBackendTest extends Spec with ShouldMatchers with BeforeAn
}
}
}

View file

@ -180,4 +180,4 @@ trait VectorStorageBackendTest extends Spec with ShouldMatchers with BeforeAndAf
}
}
}

View file

@ -23,7 +23,7 @@ object CouchDBStorage extends Storage {
/**
* Implements a persistent transactional map based on the CouchDB storage.
*
* @author
* @author
*/
class CouchDBPersistentMap(id: String) extends PersistentMapBinary {
val uuid = id
@ -34,7 +34,7 @@ class CouchDBPersistentMap(id: String) extends PersistentMapBinary {
* Implements a persistent transactional vector based on the CouchDB
* storage.
*
* @author
* @author
*/
class CouchDBPersistentVector(id: String) extends PersistentVector[Array[Byte]] {
val uuid = id
@ -44,4 +44,4 @@ class CouchDBPersistentVector(id: String) extends PersistentVector[Array[Byte]]
class CouchDBPersistentRef(id: String) extends PersistentRef[Array[Byte]] {
val uuid = id
val storage = CouchDBStorageBackend
}
}

View file

@ -23,9 +23,9 @@ private [akka] object CouchDBStorageBackend extends
RefStorageBackend[Array[Byte]] with
Logging {
import dispatch.json._
implicit object widgetWrites extends Writes[Map[String,Any]] {
def writes(o: Map[String,Any]): JsValue = JsValue(o)
}
@ -39,7 +39,7 @@ private [akka] object CouchDBStorageBackend extends
val delete = new DeleteMethod(URL)
client.executeMethod(delete)
}
def create() = {
val client = new HttpClient()
val put = new PutMethod(URL)
@ -48,16 +48,16 @@ private [akka] object CouchDBStorageBackend extends
client.executeMethod(put)
put.getResponseBodyAsString
}
private def storeMap(name: String, postfix: String, entries: List[(Array[Byte], Array[Byte])]) ={
var m = entries.map(e=>(new String(e._1) -> new String(e._2))).toMap + ("_id" -> (name + postfix))
val dataJson = JsonSerialization.tojson(m)
postData(URL, dataJson.toString)
}
private def storeMap(name: String, postfix: String, entries: Map[String, Any]) ={
postData(URL, JsonSerialization.tojson(entries + ("_id" -> (name + postfix))).toString)
}
}
private def getResponseForNameAsMap(name: String, postfix: String): Option[Map[String, Any]] = {
getResponse(URL + name + postfix).flatMap(JSON.parseFull(_)).asInstanceOf[Option[Map[String, Any]]]
@ -65,43 +65,43 @@ private [akka] object CouchDBStorageBackend extends
def insertMapStorageEntriesFor(name: String, entries: List[(Array[Byte], Array[Byte])]) ={
val newDoc = getResponseForNameAsMap(name, "_map").getOrElse(Map[String, Any]()) ++
val newDoc = getResponseForNameAsMap(name, "_map").getOrElse(Map[String, Any]()) ++
entries.map(e => (new String(e._1) -> new String(e._2))).toMap
storeMap(name, "_map", newDoc)
}
def insertMapStorageEntryFor(name: String, key: Array[Byte], value: Array[Byte])={
insertMapStorageEntriesFor(name, List((key, value)))
}
def removeMapStorageFor(name: String) {
lazy val url = URL + name + "_map"
findDocRev(name + "_map").foreach(deleteData(url, _))
}
def removeMapStorageFor(name: String, key: Array[Byte]): Unit = {
lazy val sKey = new String(key)
// if we can't find the map for name, then we don't need to delete it.
getResponseForNameAsMap(name, "_map").foreach(doc => storeMap(name, "_map", doc - sKey))
}
def getMapStorageEntryFor(name: String, key: Array[Byte]): Option[Array[Byte]] = {
lazy val sKey = new String(key)
getResponseForNameAsMap(name, "_map").flatMap(_.get(sKey)).asInstanceOf[Option[String]].map(_.getBytes)
}
def getMapStorageSizeFor(name: String): Int = getMapStorageFor(name).size
def getMapStorageFor(name: String): List[(Array[Byte], Array[Byte])] = {
val m = getResponseForNameAsMap(name, "_map").map(_ - ("_id", "_rev")).getOrElse(Map[String, Any]())
m.toList.map(e => (e._1.getBytes, e._2.asInstanceOf[String].getBytes))
}
def getMapStorageRangeFor(name: String, start: Option[Array[Byte]], finish: Option[Array[Byte]], count: Int): List[(Array[Byte], Array[Byte])] = {
val m = getResponseForNameAsMap(name, "_map").map(_ - ("_id", "_rev")).getOrElse(Map[String, Any]())
val keys = m.keys.toList.sortWith(_ < _)
// if the supplied start is not defined, get the head of keys
val s = start.map(new String(_)).getOrElse(keys.head)
@ -131,7 +131,7 @@ private [akka] object CouchDBStorageBackend extends
storeMap(name, "_vector", m + ("vector" -> v.updated(index, new String(elem))))
}
}
def getVectorStorageEntryFor(name: String, index: Int): Array[Byte] ={
val v = getResponseForNameAsMap(name, "_vector").flatMap(_.get("vector")).getOrElse(List[String]()).asInstanceOf[List[String]]
if (v.indices.contains(index))
@ -139,7 +139,7 @@ private [akka] object CouchDBStorageBackend extends
else
Array[Byte]()
}
def getVectorStorageSizeFor(name: String): Int ={
getResponseForNameAsMap(name, "_vector").flatMap(_.get("vector")).map(_.asInstanceOf[List[String]].size).getOrElse(0)
}
@ -151,12 +151,12 @@ private [akka] object CouchDBStorageBackend extends
val c = if (count == 0) v.length else count
v.slice(s, scala.math.min(s + c, f)).map(_.getBytes)
}
def insertRefStorageFor(name: String, element: Array[Byte]) ={
val newDoc = getResponseForNameAsMap(name, "_ref").getOrElse(Map[String, Any]()) + ("ref" -> new String(element))
storeMap(name, "_ref", newDoc)
}
def getRefStorageFor(name: String): Option[Array[Byte]] ={
getResponseForNameAsMap(name, "_ref").flatMap(_.get("ref")).map(_.asInstanceOf[String].getBytes)
}
@ -171,7 +171,7 @@ private [akka] object CouchDBStorageBackend extends
val delete = new DeleteMethod(url)
delete.setRequestHeader("If-Match", rev)
client.executeMethod(delete)
val response = delete.getResponseBodyAsString()
if (response != null)
Some(response)
@ -191,7 +191,7 @@ private [akka] object CouchDBStorageBackend extends
else
None
}
private def getResponse(url: String): Option[String] = {
val client = new HttpClient()
val method = new GetMethod(url)

View file

@ -15,14 +15,14 @@ import java.util.{Calendar, Date}
@RunWith(classOf[JUnitRunner])
class CouchDBStorageBackendSpec extends Specification {
doBeforeSpec {
doBeforeSpec {
CouchDBStorageBackend.create()
}
doAfterSpec {
CouchDBStorageBackend.drop()
}
"CouchDBStorageBackend store and query in map" should {
"enter 4 entries for transaction T-1" in {
insertMapStorageEntryFor("T-1", "debasish.company".getBytes, "anshinsoft".getBytes)
@ -34,13 +34,13 @@ class CouchDBStorageBackendSpec extends Specification {
new String(getMapStorageEntryFor("T-1", "debasish.language".getBytes).get) mustEqual("java")
getMapStorageSizeFor("T-1") mustEqual(4)
}
"enter key/values for another transaction T-2" in {
insertMapStorageEntryFor("T-2", "debasish.age".getBytes, "49".getBytes)
insertMapStorageEntryFor("T-2", "debasish.spouse".getBytes, "paramita".getBytes)
getMapStorageSizeFor("T-2") mustEqual(2)
}
"remove map storage for T-99" in {
insertMapStorageEntryFor("T-99", "provider".getBytes, "googleapp".getBytes)
insertMapStorageEntryFor("T-99", "quota".getBytes, "100mb".getBytes)
@ -67,7 +67,7 @@ class CouchDBStorageBackendSpec extends Specification {
insertMapStorageEntryFor("T-11", "david".getBytes, toByteArray[Long](d / 2))
getMapStorageSizeFor("T-11") mustEqual(4)
fromByteArray[Long](getMapStorageEntryFor("T-11", "steve".getBytes).get) mustEqual(d)
fromByteArray[Long](getMapStorageEntryFor("T-11", "steve".getBytes).get) mustEqual(d)
}
}
@ -98,7 +98,7 @@ class CouchDBStorageBackendSpec extends Specification {
getMapStorageRangeFor("T-5",
Some("trade.account".getBytes),
None, 0).map(e => (new String(e._1), new String(e._2))).size mustEqual(7)
removeMapStorageFor("T-5")
}
}
@ -113,7 +113,7 @@ class CouchDBStorageBackendSpec extends Specification {
getMapStorageSizeFor("T-31") mustEqual(1)
fromByteArray[Name](getMapStorageEntryFor("T-31", "debasish".getBytes).getOrElse(Array[Byte]())) mustEqual(n)
removeMapStorageFor("T-31")
}
}
}
"Store and query in vectors" should {
@ -135,7 +135,7 @@ class CouchDBStorageBackendSpec extends Specification {
"write a Name object and fetch it properly" in {
val dtb = Calendar.getInstance.getTime
val n = Name(100, "debasish ghosh", "kolkata", dtb, Some(dtb))
insertVectorStorageEntryFor("T-31", toByteArray[Name](n))
getVectorStorageSizeFor("T-31") mustEqual(1)
fromByteArray[Name](getVectorStorageEntryFor("T-31", 0)) mustEqual(n)
@ -159,21 +159,21 @@ class CouchDBStorageBackendSpec extends Specification {
fromByteArray[Name](getRefStorageFor("T-4").get) mustEqual(n)
}
}
"Mix the 3 different types storage with the same name" should {
"work independently without inference each other" in {
insertVectorStorageEntryFor("SameName", "v1".getBytes)
insertMapStorageEntryFor("SameName", "vector".getBytes, "map_value_v".getBytes)
insertVectorStorageEntryFor("SameName", "v2".getBytes)
insertMapStorageEntryFor("SameName", "ref".getBytes, "map_value_r".getBytes)
insertMapStorageEntryFor("SameName", "ref".getBytes, "map_value_r".getBytes)
insertVectorStorageEntryFor("SameName", "v3".getBytes)
insertRefStorageFor("SameName", "I am a ref!".getBytes)
getMapStorageSizeFor("SameName") mustEqual(2)
new String(getMapStorageEntryFor("SameName", "vector".getBytes).get) mustEqual("map_value_v")
new String(getMapStorageEntryFor("SameName", "ref".getBytes).get) mustEqual("map_value_r")
getVectorStorageSizeFor("SameName") mustEqual(3)
new String(getRefStorageFor("SameName").get) mustEqual("I am a ref!")
new String(getRefStorageFor("SameName").get) mustEqual("I am a ref!")
}
}
}

View file

@ -39,7 +39,7 @@ private[akka] object HbaseStorageBackend extends MapStorageBackend[Array[Byte],
CONFIGURATION.set("hbase.zookeeper.quorum", HBASE_ZOOKEEPER_QUORUM)
init
def init {
val ADMIN = new HBaseAdmin(CONFIGURATION)
@ -50,14 +50,14 @@ private[akka] object HbaseStorageBackend extends MapStorageBackend[Array[Byte],
ADMIN.enableTable(REF_TABLE_NAME)
}
REF_TABLE = new HTable(CONFIGURATION, REF_TABLE_NAME);
if (!ADMIN.tableExists(VECTOR_TABLE_NAME)) {
ADMIN.createTable(new HTableDescriptor(VECTOR_TABLE_NAME))
ADMIN.disableTable(VECTOR_TABLE_NAME)
ADMIN.addColumn(VECTOR_TABLE_NAME, new HColumnDescriptor(VECTOR_ELEMENT_COLUMN_FAMILY_NAME))
ADMIN.enableTable(VECTOR_TABLE_NAME);
}
VECTOR_TABLE = new HTable(CONFIGURATION, VECTOR_TABLE_NAME)
VECTOR_TABLE = new HTable(CONFIGURATION, VECTOR_TABLE_NAME)
if (!ADMIN.tableExists(MAP_TABLE_NAME)) {
ADMIN.createTable(new HTableDescriptor(MAP_TABLE_NAME))
@ -65,9 +65,9 @@ private[akka] object HbaseStorageBackend extends MapStorageBackend[Array[Byte],
ADMIN.addColumn(MAP_TABLE_NAME, new HColumnDescriptor(MAP_ELEMENT_COLUMN_FAMILY_NAME))
ADMIN.enableTable(MAP_TABLE_NAME);
}
MAP_TABLE = new HTable(CONFIGURATION, MAP_TABLE_NAME)
MAP_TABLE = new HTable(CONFIGURATION, MAP_TABLE_NAME)
}
def drop {
val ADMIN = new HBaseAdmin(CONFIGURATION)
@ -82,10 +82,10 @@ private[akka] object HbaseStorageBackend extends MapStorageBackend[Array[Byte],
if (ADMIN.tableExists(MAP_TABLE_NAME)) {
ADMIN.disableTable(MAP_TABLE_NAME)
ADMIN.deleteTable(MAP_TABLE_NAME)
}
}
init
}
// ===============================================================
// For Ref
// ===============================================================
@ -143,7 +143,7 @@ private[akka] object HbaseStorageBackend extends MapStorageBackend[Array[Byte],
def getVectorStorageRangeFor(name: String, start: Option[Int], finish: Option[Int], count: Int): List[Array[Byte]] = {
import scala.math._
val row = new Get(Bytes.toBytes(name))
val result = VECTOR_TABLE.get(row)
val size = result.size
@ -156,7 +156,7 @@ private[akka] object HbaseStorageBackend extends MapStorageBackend[Array[Byte],
e = finish.get - 1
} else {
b = start.getOrElse(0)
e = finish.getOrElse(min(b + count - 1, size - 1))
e = finish.getOrElse(min(b + count - 1, size - 1))
}
for(i <- b to e) {
val colnum = size - i - 1
@ -168,7 +168,7 @@ private[akka] object HbaseStorageBackend extends MapStorageBackend[Array[Byte],
def getVectorStorageSizeFor(name: String): Int = {
val row = new Get(Bytes.toBytes(name))
val result = VECTOR_TABLE.get(row)
if (result.isEmpty)
0
else
@ -190,7 +190,7 @@ private[akka] object HbaseStorageBackend extends MapStorageBackend[Array[Byte],
def getMapStorageEntryFor(name: String, key: Array[Byte]): Option[Array[Byte]] = {
val row = new Get(Bytes.toBytes(name))
val result = MAP_TABLE.get(row)
Option(result.getValue(Bytes.toBytes(MAP_ELEMENT_COLUMN_FAMILY_NAME), key))
}

View file

@ -83,7 +83,7 @@ class HbasePersistentActorSpecTestIntegration extends JUnitSuite with BeforeAndA
override def beforeAll {
testUtil.startMiniCluster
}
override def afterAll {
testUtil.shutdownMiniCluster
}
@ -92,7 +92,7 @@ class HbasePersistentActorSpecTestIntegration extends JUnitSuite with BeforeAndA
def beforeEach {
HbaseStorageBackend.drop
}
@After
def afterEach {
HbaseStorageBackend.drop

View file

@ -8,17 +8,17 @@ import org.scalatest.BeforeAndAfterEach
class HbaseStorageSpecTestIntegration extends
Spec with
ShouldMatchers with
BeforeAndAfterAll with
BeforeAndAfterAll with
BeforeAndAfterEach {
import org.apache.hadoop.hbase.HBaseTestingUtility
val testUtil = new HBaseTestingUtility
override def beforeAll {
testUtil.startMiniCluster
}
override def afterAll {
testUtil.shutdownMiniCluster
}
@ -26,11 +26,11 @@ BeforeAndAfterEach {
override def beforeEach {
HbaseStorageBackend.drop
}
override def afterEach {
HbaseStorageBackend.drop
}
describe("persistent maps") {
it("should insert with single key and value") {
import HbaseStorageBackend._
@ -70,8 +70,8 @@ BeforeAndAfterEach {
it("should do proper range queries") {
import HbaseStorageBackend._
val l = List(
("bjarne stroustrup", "c++"),
("martin odersky", "scala"),
("bjarne stroustrup", "c++"),
("martin odersky", "scala"),
("james gosling", "java"),
("yukihiro matsumoto", "ruby"),
("slava pestov", "factor"),
@ -82,11 +82,11 @@ BeforeAndAfterEach {
("guido van rossum", "python"),
("james strachan", "groovy"))
val rl = List(
("james gosling", "java"),
("james strachan", "groovy"),
("larry wall", "perl"),
("martin odersky", "scala"),
("ola bini", "ioke"), ("rich hickey", "clojure"),
("james gosling", "java"),
("james strachan", "groovy"),
("larry wall", "perl"),
("martin odersky", "scala"),
("ola bini", "ioke"), ("rich hickey", "clojure"),
("slava pestov", "factor"))
insertMapStorageEntriesFor("t1", l.map { case (k, v) => (k.getBytes, v.getBytes) })
getMapStorageSizeFor("t1") should equal(l.size)
@ -166,7 +166,7 @@ BeforeAndAfterEach {
describe("persistent refs") {
it("should insert a ref") {
import HbaseStorageBackend._
insertRefStorageFor("t1", "martin odersky".getBytes)
new String(getRefStorageFor("t1").get) should equal("martin odersky")
insertRefStorageFor("t1", "james gosling".getBytes)

View file

@ -80,7 +80,7 @@ object Storage {
kvs2add.foreach {kv =>
fooMap += (kv._1.getBytes, kv._2.getBytes)
}
ks2rem.foreach {k =>
fooMap -= k.getBytes
}}
@ -94,7 +94,7 @@ object Storage {
}
self.reply(true)
case PUT_WITH_SLICE(kvs2add, from, cnt) =>
case PUT_WITH_SLICE(kvs2add, from, cnt) =>
val v = atomic {
kvs2add.foreach {kv =>
fooMap += (kv._1.getBytes, kv._2.getBytes)
@ -103,7 +103,7 @@ object Storage {
}
self.reply(v: List[(Array[Byte], Array[Byte])])
case PUT_REM_WITH_SLICE(kvs2add, ks2rem, from, cnt) =>
case PUT_REM_WITH_SLICE(kvs2add, ks2rem, from, cnt) =>
val v = atomic {
kvs2add.foreach {kv =>
fooMap += (kv._1.getBytes, kv._2.getBytes)
@ -125,7 +125,7 @@ object Storage {
def receive = {
case VADD(v) =>
val size =
val size =
atomic {
fooVector + v.getBytes
fooVector length
@ -148,7 +148,7 @@ object Storage {
self.reply(els)
case VUPD_AND_ABORT(index, value) =>
val l =
val l =
atomic {
fooVector.update(index, value.getBytes)
// force fail
@ -173,13 +173,13 @@ import Storage._
class HbaseTicket343SpecTestIntegration extends Spec with ShouldMatchers with BeforeAndAfterAll with BeforeAndAfterEach {
import org.apache.hadoop.hbase.HBaseTestingUtility
val testUtil = new HBaseTestingUtility
override def beforeAll {
testUtil.startMiniCluster
}
override def afterAll {
testUtil.shutdownMiniCluster
}
@ -187,7 +187,7 @@ class HbaseTicket343SpecTestIntegration extends Spec with ShouldMatchers with Be
override def beforeEach {
HbaseStorageBackend.drop
}
override def afterEach {
HbaseStorageBackend.drop
}
@ -315,7 +315,7 @@ class HbaseTicket343SpecTestIntegration extends Spec with ShouldMatchers with Be
(proc !! VADD("nilanjan")).getOrElse("VADD failed") should equal(4)
evaluating {
(proc !! VUPD_AND_ABORT(0, "virat")).getOrElse("VUPD_AND_ABORT failed")
(proc !! VUPD_AND_ABORT(0, "virat")).getOrElse("VUPD_AND_ABORT failed")
} should produce [Exception]
// update aborts and hence values will remain unchanged

View file

@ -48,10 +48,10 @@ private[akka] object MongoStorageBackend extends
db.safely { db =>
val q: DBObject = MongoDBObject(KEY -> name)
coll.findOne(q) match {
case Some(dbo) =>
case Some(dbo) =>
entries.foreach { case (k, v) => dbo += new String(k) -> v }
db.safely { db => coll.update(q, dbo, true, false) }
case None =>
case None =>
val builder = MongoDBObject.newBuilder
builder += KEY -> name
entries.foreach { case (k, v) => builder += new String(k) -> v }
@ -79,7 +79,7 @@ private[akka] object MongoStorageBackend extends
}
def getMapStorageEntryFor(name: String, key: Array[Byte]): Option[Array[Byte]] = queryFor(name) { (q, dbo) =>
dbo.map { d =>
dbo.map { d =>
d.getAs[Array[Byte]](new String(key))
}.getOrElse(None)
}
@ -132,7 +132,7 @@ private[akka] object MongoStorageBackend extends
db.safely { db =>
coll.findOne(q) match {
// exists : need to update
case Some(dbo) =>
case Some(dbo) =>
dbo -= KEY
dbo -= "_id"
val listBuilder = MongoDBList.newBuilder
@ -146,7 +146,7 @@ private[akka] object MongoStorageBackend extends
coll.update(q, builder.result.asDBObject, true, false)
// new : just add
case None =>
case None =>
val listBuilder = MongoDBList.newBuilder
listBuilder ++= elements
@ -166,7 +166,7 @@ private[akka] object MongoStorageBackend extends
}
def getVectorStorageEntryFor(name: String, index: Int): Array[Byte] = queryFor(name) { (q, dbo) =>
dbo.map { d =>
dbo.map { d =>
d(index.toString).asInstanceOf[Array[Byte]]
}.getOrElse(Array.empty[Byte])
}
@ -207,12 +207,12 @@ private[akka] object MongoStorageBackend extends
db.safely { db =>
coll.findOne(q) match {
// exists : need to update
case Some(dbo) =>
case Some(dbo) =>
dbo += ((REF, element))
coll.update(q, dbo, true, false)
// not found : make one
case None =>
case None =>
val builder = MongoDBObject.newBuilder
builder += KEY -> name
builder += REF -> element
@ -222,7 +222,7 @@ private[akka] object MongoStorageBackend extends
}
def getRefStorageFor(name: String): Option[Array[Byte]] = queryFor(name) { (q, dbo) =>
dbo.map { d =>
dbo.map { d =>
d.getAs[Array[Byte]](REF)
}.getOrElse(None)
}

View file

@ -60,8 +60,8 @@ class MongoStorageSpec extends
it("should do proper range queries") {
import MongoStorageBackend._
val l = List(
("bjarne stroustrup", "c++"),
("martin odersky", "scala"),
("bjarne stroustrup", "c++"),
("martin odersky", "scala"),
("james gosling", "java"),
("yukihiro matsumoto", "ruby"),
("slava pestov", "factor"),

View file

@ -80,7 +80,7 @@ object Storage {
kvs2add.foreach {kv =>
fooMap += (kv._1.getBytes, kv._2.getBytes)
}
ks2rem.foreach {k =>
fooMap -= k.getBytes
}}
@ -94,7 +94,7 @@ object Storage {
}
self.reply(true)
case PUT_WITH_SLICE(kvs2add, from, cnt) =>
case PUT_WITH_SLICE(kvs2add, from, cnt) =>
val v = atomic {
kvs2add.foreach {kv =>
fooMap += (kv._1.getBytes, kv._2.getBytes)
@ -103,7 +103,7 @@ object Storage {
}
self.reply(v: List[(Array[Byte], Array[Byte])])
case PUT_REM_WITH_SLICE(kvs2add, ks2rem, from, cnt) =>
case PUT_REM_WITH_SLICE(kvs2add, ks2rem, from, cnt) =>
val v = atomic {
kvs2add.foreach {kv =>
fooMap += (kv._1.getBytes, kv._2.getBytes)
@ -125,7 +125,7 @@ object Storage {
def receive = {
case VADD(v) =>
val size =
val size =
atomic {
fooVector + v.getBytes
fooVector length
@ -148,7 +148,7 @@ object Storage {
self.reply(els)
case VUPD_AND_ABORT(index, value) =>
val l =
val l =
atomic {
fooVector.update(index, value.getBytes)
// force fail
@ -315,7 +315,7 @@ class MongoTicket343Spec extends
(proc !! VADD("nilanjan")).getOrElse("VADD failed") should equal(4)
evaluating {
(proc !! VUPD_AND_ABORT(0, "virat")).getOrElse("VUPD_AND_ABORT failed")
(proc !! VUPD_AND_ABORT(0, "virat")).getOrElse("VUPD_AND_ABORT failed")
} should produce [Exception]
// update aborts and hence values will remain unchanged

View file

@ -32,7 +32,7 @@ object CommonsCodec {
import CommonsCodec._
import CommonsCodec.Base64StringEncoder._
/**
* A module for supporting Redis based persistence.
* <p/>
@ -76,7 +76,7 @@ private [akka] object RedisStorageBackend extends
/**
* Map storage in Redis.
* <p/>
* Maps are stored as key/value pairs in redis.
* Maps are stored as key/value pairs in redis.
*/
def insertMapStorageEntryFor(name: String, key: Array[Byte], value: Array[Byte]): Unit = withErrorHandling {
insertMapStorageEntriesFor(name, List((key, value)))
@ -135,7 +135,7 @@ private [akka] object RedisStorageBackend extends
def getMapStorageFor(name: String): List[(Array[Byte], Array[Byte])] = withErrorHandling {
db.keys("%s:*".format(name))
.map { keys =>
.map { keys =>
keys.map(key => (makeKeyFromRedisKey(key.get)._2, stringToByteArray(db.get(key.get).get))).toList
}.getOrElse {
throw new NoSuchElementException(name + " not present")
@ -302,10 +302,10 @@ private [akka] object RedisStorageBackend extends
// add item to sorted set identified by name
def zadd(name: String, zscore: String, item: Array[Byte]): Boolean = withErrorHandling {
db.zadd(name, zscore, byteArrayToString(item))
.map { e =>
.map { e =>
e match {
case 1 => true
case _ => false
case _ => false
}
}.getOrElse(false)
}
@ -313,10 +313,10 @@ private [akka] object RedisStorageBackend extends
// remove item from sorted set identified by name
def zrem(name: String, item: Array[Byte]): Boolean = withErrorHandling {
db.zrem(name, byteArrayToString(item))
.map { e =>
.map { e =>
e match {
case 1 => true
case _ => false
case _ => false
}
}.getOrElse(false)
}
@ -331,7 +331,7 @@ private [akka] object RedisStorageBackend extends
}
def zrange(name: String, start: Int, end: Int): List[Array[Byte]] = withErrorHandling {
db.zrange(name, start.toString, end.toString, RedisClient.ASC, false)
db.zrange(name, start.toString, end.toString, RedisClient.ASC, false)
.map(_.map(e => stringToByteArray(e.get)))
.getOrElse {
throw new NoSuchElementException(name + " not present")

View file

@ -76,7 +76,7 @@ object Storage {
}
self.reply(v)
case MSET(kvs) =>
case MSET(kvs) =>
atomic {
kvs.foreach {kv =>
fooMap += (kv._1.getBytes, kv._2.getBytes)
@ -84,13 +84,13 @@ object Storage {
}
self.reply(kvs.size)
case REMOVE_AFTER_PUT(kvs2add, ks2rem) =>
case REMOVE_AFTER_PUT(kvs2add, ks2rem) =>
val v =
atomic {
kvs2add.foreach {kv =>
fooMap += (kv._1.getBytes, kv._2.getBytes)
}
ks2rem.foreach {k =>
fooMap -= k.getBytes
}
@ -98,7 +98,7 @@ object Storage {
}
self.reply(v)
case CLEAR_AFTER_PUT(kvs2add) =>
case CLEAR_AFTER_PUT(kvs2add) =>
atomic {
kvs2add.foreach {kv =>
fooMap += (kv._1.getBytes, kv._2.getBytes)
@ -107,8 +107,8 @@ object Storage {
}
self.reply(true)
case PUT_WITH_SLICE(kvs2add, from, cnt) =>
val v =
case PUT_WITH_SLICE(kvs2add, from, cnt) =>
val v =
atomic {
kvs2add.foreach {kv =>
fooMap += (kv._1.getBytes, kv._2.getBytes)
@ -117,8 +117,8 @@ object Storage {
}
self.reply(v: List[(Array[Byte], Array[Byte])])
case PUT_REM_WITH_SLICE(kvs2add, ks2rem, from, cnt) =>
val v =
case PUT_REM_WITH_SLICE(kvs2add, ks2rem, from, cnt) =>
val v =
atomic {
kvs2add.foreach {kv =>
fooMap += (kv._1.getBytes, kv._2.getBytes)
@ -140,7 +140,7 @@ object Storage {
def receive = {
case VADD(v) =>
val size =
val size =
atomic {
fooVector + v.getBytes
fooVector length
@ -163,7 +163,7 @@ object Storage {
self.reply(els)
case VUPD_AND_ABORT(index, value) =>
val l =
val l =
atomic {
fooVector.update(index, value.getBytes)
// force fail
@ -347,7 +347,7 @@ class RedisTicket343Spec extends
(proc !! VADD("nilanjan")).getOrElse("VADD failed") should equal(4)
evaluating {
(proc !! VUPD_AND_ABORT(0, "virat")).getOrElse("VUPD_AND_ABORT failed")
(proc !! VUPD_AND_ABORT(0, "virat")).getOrElse("VUPD_AND_ABORT failed")
} should produce [Exception]
// update aborts and hence values will remain unchanged

View file

@ -120,4 +120,4 @@ private[akka] object RiakStorageBackend extends CommonStorageBackend {
}
}
}

View file

@ -20,4 +20,4 @@ class RiakTicket343TestIntegration extends Ticket343Test {
def getMap: (String) => PersistentMap[Array[Byte], Array[Byte]] = RiakStorage.getMap
}
}

View file

@ -142,4 +142,4 @@ private[akka] object VoldemortStorageBackend extends CommonStorageBackend {
}
}
}

View file

@ -64,4 +64,4 @@
</value-serializer>
</store>
</stores>
</stores>

View file

@ -37,4 +37,4 @@ trait EmbeddedVoldemort extends BeforeAndAfterAll with Logging {
override protected def afterAll(): Unit = {
server.stop
}
}
}

View file

@ -180,4 +180,4 @@ object VoldemortStorageBackendSuite {
value.getBytes("UTF-8")
}
}
}

View file

@ -20,4 +20,4 @@ class VoldemortTicket343Test extends Ticket343Test with EmbeddedVoldemort {
def getMap: (String) => PersistentMap[Array[Byte], Array[Byte]] = VoldemortStorage.getMap
}
}

View file

@ -69,7 +69,7 @@ object RemoteNode extends RemoteServer
object RemoteServer {
val UUID_PREFIX = "uuid:"
val SECURE_COOKIE: Option[String] = {
val SECURE_COOKIE: Option[String] = {
val cookie = config.getString("akka.remote.secure-cookie", "")
if (cookie == "") None
else Some(cookie)
@ -80,7 +80,7 @@ object RemoteServer {
"Configuration option 'akka.remote.server.require-cookie' is turned on but no secure cookie is defined in 'akka.remote.secure-cookie'.")
requireCookie
}
val UNTRUSTED_MODE = config.getBool("akka.remote.server.untrusted-mode", false)
val HOSTNAME = config.getString("akka.remote.server.hostname", "localhost")
val PORT = config.getInt("akka.remote.server.port", 2552)
@ -312,7 +312,7 @@ class RemoteServer extends Logging with ListenerManagement {
def unregister(id: String):Unit = synchronized {
if (_isRunning) {
log.info("Unregistering server side remote actor with id [%s]", id)
if (id.startsWith(UUID_PREFIX)) actorsByUuid.remove(id.substring(UUID_PREFIX.length))
if (id.startsWith(UUID_PREFIX)) actorsByUuid.remove(id.substring(UUID_PREFIX.length))
else {
val actorRef = actors get id
actorsByUuid.remove(actorRef.uuid, actorRef)
@ -477,11 +477,11 @@ class RemoteServerHandler(
val actorInfo = request.getActorInfo
log.debug("Dispatching to remote actor [%s:%s]", actorInfo.getTarget, actorInfo.getUuid)
val actorRef =
val actorRef =
try {
createActor(actorInfo).start
} catch {
case e: SecurityException =>
case e: SecurityException =>
channel.write(createErrorReplyMessage(e, request, true))
server.notifyListeners(RemoteServerError(e, server))
return
@ -493,10 +493,10 @@ class RemoteServerHandler(
else None
message match { // first match on system messages
case RemoteActorSystemMessage.Stop =>
case RemoteActorSystemMessage.Stop =>
if (RemoteServer.UNTRUSTED_MODE) throw new SecurityException("Remote server is operating is untrusted mode, can not stop the actor")
else actorRef.stop
case _: LifeCycleMessage if (RemoteServer.UNTRUSTED_MODE) =>
case _: LifeCycleMessage if (RemoteServer.UNTRUSTED_MODE) =>
throw new SecurityException("Remote server is operating is untrusted mode, can not pass on a LifeCycleMessage to the remote actor")
case _ => // then match on user defined messages
@ -613,7 +613,7 @@ class RemoteServerHandler(
val timeout = actorInfo.getTimeout
val actorRefOrNull = findActorByIdOrUuid(id, uuidFrom(uuid.getHigh,uuid.getLow).toString)
if (actorRefOrNull eq null) {
try {
if (RemoteServer.UNTRUSTED_MODE) throw new SecurityException(
@ -687,10 +687,10 @@ class RemoteServerHandler(
private def authenticateRemoteClient(request: RemoteRequestProtocol, ctx: ChannelHandlerContext) = {
val attachment = ctx.getAttachment
if ((attachment ne null) &&
attachment.isInstanceOf[String] &&
if ((attachment ne null) &&
attachment.isInstanceOf[String] &&
attachment.asInstanceOf[String] == CHANNEL_INIT) { // is first time around, channel initialization
ctx.setAttachment(null)
ctx.setAttachment(null)
val clientAddress = ctx.getChannel.getRemoteAddress.toString
if (!request.hasCookie) throw new SecurityException(
"The remote client [" + clientAddress + "] does not have a secure cookie.")

View file

@ -93,7 +93,7 @@ object Serializable {
* case class Address(street: String, city: String, zip: String)
* extends ScalaJSON[Address] {
*
* implicit val AddressFormat: Format[Address] =
* implicit val AddressFormat: Format[Address] =
* asProduct3("street", "city", "zip")(Address)(Address.unapply(_).get)
*
* import dispatch.json._
@ -113,9 +113,9 @@ object Serializable {
* @author <a href="http://jonasboner.com">Jonas Bon&#233;r</a>
*/
trait ScalaJSON[T] extends JSON {
def toJSON: String
def toJSON: String
def fromJSON(js: String): T
def toBytes: Array[Byte]
def toBytes: Array[Byte]
def fromBytes(bytes: Array[Byte]): T
}
}

View file

@ -116,7 +116,7 @@ object ActorSerialization {
if (serializeMailBox == true) {
val messages =
val messages =
actorRef.mailbox match {
case q: java.util.Queue[MessageInvocation] =>
val l = new scala.collection.mutable.ListBuffer[MessageInvocation]
@ -125,7 +125,7 @@ object ActorSerialization {
l
}
val requestProtocols =
val requestProtocols =
messages.map(m =>
RemoteActorSerialization.createRemoteRequestProtocolBuilder(
actorRef,
@ -172,7 +172,7 @@ object ActorSerialization {
if (protocol.hasSupervisor) Some(RemoteActorSerialization.fromProtobufToRemoteActorRef(protocol.getSupervisor, loader))
else None
val hotswap =
val hotswap =
if (serializer.isDefined && protocol.hasHotswapStack) serializer.get
.fromBinary(protocol.getHotswapStack.toByteArray, Some(classOf[Stack[PartialFunction[Any, Unit]]]))
.asInstanceOf[Stack[PartialFunction[Any, Unit]]]

View file

@ -15,47 +15,47 @@ public final class ProtobufProtocol {
initFields();
}
private ProtobufPOJO(boolean noInit) {}
private static final ProtobufPOJO defaultInstance;
public static ProtobufPOJO getDefaultInstance() {
return defaultInstance;
}
public ProtobufPOJO getDefaultInstanceForType() {
return defaultInstance;
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return akka.actor.ProtobufProtocol.internal_static_akka_actor_ProtobufPOJO_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return akka.actor.ProtobufProtocol.internal_static_akka_actor_ProtobufPOJO_fieldAccessorTable;
}
// required uint64 id = 1;
public static final int ID_FIELD_NUMBER = 1;
private boolean hasId;
private long id_ = 0L;
public boolean hasId() { return hasId; }
public long getId() { return id_; }
// required string name = 2;
public static final int NAME_FIELD_NUMBER = 2;
private boolean hasName;
private java.lang.String name_ = "";
public boolean hasName() { return hasName; }
public java.lang.String getName() { return name_; }
// required bool status = 3;
public static final int STATUS_FIELD_NUMBER = 3;
private boolean hasStatus;
private boolean status_ = false;
public boolean hasStatus() { return hasStatus; }
public boolean getStatus() { return status_; }
private void initFields() {
}
public final boolean isInitialized() {
@ -64,7 +64,7 @@ public final class ProtobufProtocol {
if (!hasStatus) return false;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
@ -79,12 +79,12 @@ public final class ProtobufProtocol {
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
if (hasId()) {
size += com.google.protobuf.CodedOutputStream
@ -102,7 +102,7 @@ public final class ProtobufProtocol {
memoizedSerializedSize = size;
return size;
}
public static akka.actor.ProtobufProtocol.ProtobufPOJO parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
@ -169,31 +169,31 @@ public final class ProtobufProtocol {
return newBuilder().mergeFrom(input, extensionRegistry)
.buildParsed();
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(akka.actor.ProtobufProtocol.ProtobufPOJO prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder> {
private akka.actor.ProtobufProtocol.ProtobufPOJO result;
// Construct using akka.actor.ProtobufProtocol.ProtobufPOJO.newBuilder()
private Builder() {}
private static Builder create() {
Builder builder = new Builder();
builder.result = new akka.actor.ProtobufProtocol.ProtobufPOJO();
return builder;
}
protected akka.actor.ProtobufProtocol.ProtobufPOJO internalGetResult() {
return result;
}
public Builder clear() {
if (result == null) {
throw new IllegalStateException(
@ -202,20 +202,20 @@ public final class ProtobufProtocol {
result = new akka.actor.ProtobufProtocol.ProtobufPOJO();
return this;
}
public Builder clone() {
return create().mergeFrom(result);
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return akka.actor.ProtobufProtocol.ProtobufPOJO.getDescriptor();
}
public akka.actor.ProtobufProtocol.ProtobufPOJO getDefaultInstanceForType() {
return akka.actor.ProtobufProtocol.ProtobufPOJO.getDefaultInstance();
}
public boolean isInitialized() {
return result.isInitialized();
}
@ -225,7 +225,7 @@ public final class ProtobufProtocol {
}
return buildPartial();
}
private akka.actor.ProtobufProtocol.ProtobufPOJO buildParsed()
throws com.google.protobuf.InvalidProtocolBufferException {
if (!isInitialized()) {
@ -234,7 +234,7 @@ public final class ProtobufProtocol {
}
return buildPartial();
}
public akka.actor.ProtobufProtocol.ProtobufPOJO buildPartial() {
if (result == null) {
throw new IllegalStateException(
@ -244,7 +244,7 @@ public final class ProtobufProtocol {
result = null;
return returnMe;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof akka.actor.ProtobufProtocol.ProtobufPOJO) {
return mergeFrom((akka.actor.ProtobufProtocol.ProtobufPOJO)other);
@ -253,7 +253,7 @@ public final class ProtobufProtocol {
return this;
}
}
public Builder mergeFrom(akka.actor.ProtobufProtocol.ProtobufPOJO other) {
if (other == akka.actor.ProtobufProtocol.ProtobufPOJO.getDefaultInstance()) return this;
if (other.hasId()) {
@ -268,7 +268,7 @@ public final class ProtobufProtocol {
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
@ -305,8 +305,8 @@ public final class ProtobufProtocol {
}
}
}
// required uint64 id = 1;
public boolean hasId() {
return result.hasId();
@ -324,7 +324,7 @@ public final class ProtobufProtocol {
result.id_ = 0L;
return this;
}
// required string name = 2;
public boolean hasName() {
return result.hasName();
@ -345,7 +345,7 @@ public final class ProtobufProtocol {
result.name_ = getDefaultInstance().getName();
return this;
}
// required bool status = 3;
public boolean hasStatus() {
return result.hasStatus();
@ -363,19 +363,19 @@ public final class ProtobufProtocol {
result.status_ = false;
return this;
}
// @@protoc_insertion_point(builder_scope:akka.actor.ProtobufPOJO)
}
static {
defaultInstance = new ProtobufPOJO(true);
akka.actor.ProtobufProtocol.internalForceInit();
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:akka.actor.ProtobufPOJO)
}
public static final class Counter extends
com.google.protobuf.GeneratedMessage {
// Use Counter.newBuilder() to construct.
@ -383,40 +383,40 @@ public final class ProtobufProtocol {
initFields();
}
private Counter(boolean noInit) {}
private static final Counter defaultInstance;
public static Counter getDefaultInstance() {
return defaultInstance;
}
public Counter getDefaultInstanceForType() {
return defaultInstance;
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return akka.actor.ProtobufProtocol.internal_static_akka_actor_Counter_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return akka.actor.ProtobufProtocol.internal_static_akka_actor_Counter_fieldAccessorTable;
}
// required uint32 count = 1;
public static final int COUNT_FIELD_NUMBER = 1;
private boolean hasCount;
private int count_ = 0;
public boolean hasCount() { return hasCount; }
public int getCount() { return count_; }
private void initFields() {
}
public final boolean isInitialized() {
if (!hasCount) return false;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
@ -425,12 +425,12 @@ public final class ProtobufProtocol {
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
if (hasCount()) {
size += com.google.protobuf.CodedOutputStream
@ -440,7 +440,7 @@ public final class ProtobufProtocol {
memoizedSerializedSize = size;
return size;
}
public static akka.actor.ProtobufProtocol.Counter parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
@ -507,31 +507,31 @@ public final class ProtobufProtocol {
return newBuilder().mergeFrom(input, extensionRegistry)
.buildParsed();
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(akka.actor.ProtobufProtocol.Counter prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder> {
private akka.actor.ProtobufProtocol.Counter result;
// Construct using akka.actor.ProtobufProtocol.Counter.newBuilder()
private Builder() {}
private static Builder create() {
Builder builder = new Builder();
builder.result = new akka.actor.ProtobufProtocol.Counter();
return builder;
}
protected akka.actor.ProtobufProtocol.Counter internalGetResult() {
return result;
}
public Builder clear() {
if (result == null) {
throw new IllegalStateException(
@ -540,20 +540,20 @@ public final class ProtobufProtocol {
result = new akka.actor.ProtobufProtocol.Counter();
return this;
}
public Builder clone() {
return create().mergeFrom(result);
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return akka.actor.ProtobufProtocol.Counter.getDescriptor();
}
public akka.actor.ProtobufProtocol.Counter getDefaultInstanceForType() {
return akka.actor.ProtobufProtocol.Counter.getDefaultInstance();
}
public boolean isInitialized() {
return result.isInitialized();
}
@ -563,7 +563,7 @@ public final class ProtobufProtocol {
}
return buildPartial();
}
private akka.actor.ProtobufProtocol.Counter buildParsed()
throws com.google.protobuf.InvalidProtocolBufferException {
if (!isInitialized()) {
@ -572,7 +572,7 @@ public final class ProtobufProtocol {
}
return buildPartial();
}
public akka.actor.ProtobufProtocol.Counter buildPartial() {
if (result == null) {
throw new IllegalStateException(
@ -582,7 +582,7 @@ public final class ProtobufProtocol {
result = null;
return returnMe;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof akka.actor.ProtobufProtocol.Counter) {
return mergeFrom((akka.actor.ProtobufProtocol.Counter)other);
@ -591,7 +591,7 @@ public final class ProtobufProtocol {
return this;
}
}
public Builder mergeFrom(akka.actor.ProtobufProtocol.Counter other) {
if (other == akka.actor.ProtobufProtocol.Counter.getDefaultInstance()) return this;
if (other.hasCount()) {
@ -600,7 +600,7 @@ public final class ProtobufProtocol {
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
@ -629,8 +629,8 @@ public final class ProtobufProtocol {
}
}
}
// required uint32 count = 1;
public boolean hasCount() {
return result.hasCount();
@ -648,19 +648,19 @@ public final class ProtobufProtocol {
result.count_ = 0;
return this;
}
// @@protoc_insertion_point(builder_scope:akka.actor.Counter)
}
static {
defaultInstance = new Counter(true);
akka.actor.ProtobufProtocol.internalForceInit();
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:akka.actor.Counter)
}
public static final class DualCounter extends
com.google.protobuf.GeneratedMessage {
// Use DualCounter.newBuilder() to construct.
@ -668,40 +668,40 @@ public final class ProtobufProtocol {
initFields();
}
private DualCounter(boolean noInit) {}
private static final DualCounter defaultInstance;
public static DualCounter getDefaultInstance() {
return defaultInstance;
}
public DualCounter getDefaultInstanceForType() {
return defaultInstance;
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return akka.actor.ProtobufProtocol.internal_static_akka_actor_DualCounter_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return akka.actor.ProtobufProtocol.internal_static_akka_actor_DualCounter_fieldAccessorTable;
}
// required uint32 count1 = 1;
public static final int COUNT1_FIELD_NUMBER = 1;
private boolean hasCount1;
private int count1_ = 0;
public boolean hasCount1() { return hasCount1; }
public int getCount1() { return count1_; }
// required uint32 count2 = 2;
public static final int COUNT2_FIELD_NUMBER = 2;
private boolean hasCount2;
private int count2_ = 0;
public boolean hasCount2() { return hasCount2; }
public int getCount2() { return count2_; }
private void initFields() {
}
public final boolean isInitialized() {
@ -709,7 +709,7 @@ public final class ProtobufProtocol {
if (!hasCount2) return false;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
@ -721,12 +721,12 @@ public final class ProtobufProtocol {
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
if (hasCount1()) {
size += com.google.protobuf.CodedOutputStream
@ -740,7 +740,7 @@ public final class ProtobufProtocol {
memoizedSerializedSize = size;
return size;
}
public static akka.actor.ProtobufProtocol.DualCounter parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
@ -807,31 +807,31 @@ public final class ProtobufProtocol {
return newBuilder().mergeFrom(input, extensionRegistry)
.buildParsed();
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(akka.actor.ProtobufProtocol.DualCounter prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder> {
private akka.actor.ProtobufProtocol.DualCounter result;
// Construct using akka.actor.ProtobufProtocol.DualCounter.newBuilder()
private Builder() {}
private static Builder create() {
Builder builder = new Builder();
builder.result = new akka.actor.ProtobufProtocol.DualCounter();
return builder;
}
protected akka.actor.ProtobufProtocol.DualCounter internalGetResult() {
return result;
}
public Builder clear() {
if (result == null) {
throw new IllegalStateException(
@ -840,20 +840,20 @@ public final class ProtobufProtocol {
result = new akka.actor.ProtobufProtocol.DualCounter();
return this;
}
public Builder clone() {
return create().mergeFrom(result);
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return akka.actor.ProtobufProtocol.DualCounter.getDescriptor();
}
public akka.actor.ProtobufProtocol.DualCounter getDefaultInstanceForType() {
return akka.actor.ProtobufProtocol.DualCounter.getDefaultInstance();
}
public boolean isInitialized() {
return result.isInitialized();
}
@ -863,7 +863,7 @@ public final class ProtobufProtocol {
}
return buildPartial();
}
private akka.actor.ProtobufProtocol.DualCounter buildParsed()
throws com.google.protobuf.InvalidProtocolBufferException {
if (!isInitialized()) {
@ -872,7 +872,7 @@ public final class ProtobufProtocol {
}
return buildPartial();
}
public akka.actor.ProtobufProtocol.DualCounter buildPartial() {
if (result == null) {
throw new IllegalStateException(
@ -882,7 +882,7 @@ public final class ProtobufProtocol {
result = null;
return returnMe;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof akka.actor.ProtobufProtocol.DualCounter) {
return mergeFrom((akka.actor.ProtobufProtocol.DualCounter)other);
@ -891,7 +891,7 @@ public final class ProtobufProtocol {
return this;
}
}
public Builder mergeFrom(akka.actor.ProtobufProtocol.DualCounter other) {
if (other == akka.actor.ProtobufProtocol.DualCounter.getDefaultInstance()) return this;
if (other.hasCount1()) {
@ -903,7 +903,7 @@ public final class ProtobufProtocol {
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
@ -936,8 +936,8 @@ public final class ProtobufProtocol {
}
}
}
// required uint32 count1 = 1;
public boolean hasCount1() {
return result.hasCount1();
@ -955,7 +955,7 @@ public final class ProtobufProtocol {
result.count1_ = 0;
return this;
}
// required uint32 count2 = 2;
public boolean hasCount2() {
return result.hasCount2();
@ -973,19 +973,19 @@ public final class ProtobufProtocol {
result.count2_ = 0;
return this;
}
// @@protoc_insertion_point(builder_scope:akka.actor.DualCounter)
}
static {
defaultInstance = new DualCounter(true);
akka.actor.ProtobufProtocol.internalForceInit();
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:akka.actor.DualCounter)
}
private static com.google.protobuf.Descriptors.Descriptor
internal_static_akka_actor_ProtobufPOJO_descriptor;
private static
@ -1001,7 +1001,7 @@ public final class ProtobufProtocol {
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_akka_actor_DualCounter_fieldAccessorTable;
public static com.google.protobuf.Descriptors.FileDescriptor
getDescriptor() {
return descriptor;
@ -1053,8 +1053,8 @@ public final class ProtobufProtocol {
new com.google.protobuf.Descriptors.FileDescriptor[] {
}, assigner);
}
public static void internalForceInit() {}
// @@protoc_insertion_point(outer_class_scope)
}

View file

@ -61,7 +61,7 @@ object ClientInitiatedRemoteActorSpec {
var prefix = "default-"
var count = 0
def receive = {
case "incrPrefix" => count += 1; prefix = "" + count + "-"
case "incrPrefix" => count += 1; prefix = "" + count + "-"
case msg: String => self.reply(prefix + msg)
}
}

View file

@ -34,4 +34,4 @@ class RemoteAgentSpec extends JUnitSuite {
assert(a() == 20, "Remote agent should be updated properly")
a.close
}
}
}

View file

@ -194,7 +194,7 @@ class ServerInitiatedRemoteActorSpec extends JUnitSuite {
ref1.stop
ref2 ! "OneWay"
ref2.stop
}
@Test

View file

@ -14,7 +14,7 @@ object Serializables {
case class Shop(store: String, item: String, price: Int) extends
ScalaJSON[Shop] {
implicit val ShopFormat: sjson.json.Format[Shop] =
implicit val ShopFormat: sjson.json.Format[Shop] =
asProduct3("store", "item", "price")(Shop)(Shop.unapply(_).get)
def toJSON: String = JsValue.toJson(tojson(this))
@ -27,7 +27,7 @@ object Serializables {
implicit val MyMessageFormat: sjson.json.Format[MyMessage] =
asProduct2("id", "value")(MyMessage)(MyMessage.unapply(_).get)
case class MyJsonObject(val key: String, val map: Map[String, Int],
case class MyJsonObject(val key: String, val map: Map[String, Int],
val standAloneInt: Int) extends ScalaJSON[MyJsonObject] {
implicit val MyJsonObjectFormat: sjson.json.Format[MyJsonObject] =
asProduct3("key", "map", "standAloneInt")(MyJsonObject)(MyJsonObject.unapply(_).get)

View file

@ -11,15 +11,15 @@ import akka.serialization.Serializer.ScalaJSON
object Protocols {
import sjson.json.DefaultProtocol._
case class Shop(store: String, item: String, price: Int)
implicit val ShopFormat: sjson.json.Format[Shop] =
implicit val ShopFormat: sjson.json.Format[Shop] =
asProduct3("store", "item", "price")(Shop)(Shop.unapply(_).get)
case class MyMessage(val id: String, val value: Tuple2[String, Int])
implicit val MyMessageFormat: sjson.json.Format[MyMessage] =
asProduct2("id", "value")(MyMessage)(MyMessage.unapply(_).get)
case class MyJsonObject(val key: String, val map: Map[String, Int],
val standAloneInt: Int)
case class MyJsonObject(val key: String, val map: Map[String, Int],
val standAloneInt: Int)
implicit val MyJsonObjectFormat: sjson.json.Format[MyJsonObject] =
asProduct3("key", "map", "standAloneInt")(MyJsonObject)(MyJsonObject.unapply(_).get)
}

View file

@ -228,7 +228,7 @@ class MyActorWithDualCounter extends Actor {
case "hello" =>
count = count + 1
self.reply("world " + count)
case "swap" =>
case "swap" =>
become { case "hello" => self.reply("swapped") }
}
}

View file

@ -90,4 +90,4 @@ class ConfigBeanDefinitionParser extends AbstractSingleBeanDefinitionParser with
override def getBeanClass(element: Element): Class[_] = classOf[ConfiggyPropertyPlaceholderConfigurer]
override def shouldGenerateId() = true
}
}

View file

@ -105,7 +105,7 @@ class ActorFactoryBean extends AbstractFactoryBean[AnyRef] with Logging with App
if ((implementation eq null) || implementation == "") throw new AkkaBeansException(
"The 'implementation' part of the 'akka:typed-actor' element in the Spring config file can't be null or empty string")
val typedActor: AnyRef = TypedActor.newInstance(interface.toClass, implementation.toClass, createConfig)
val typedActor: AnyRef = TypedActor.newInstance(interface.toClass, implementation.toClass, createConfig)
if (isRemote && serverManaged) {
val server = RemoteServer.getOrCreateServer(new InetSocketAddress(host, port.toInt))
if (serviceName.isEmpty) {

View file

@ -9,13 +9,13 @@ import net.lag.configgy.Configgy
import java.util.Properties
/**
* ConfiggyPropertyPlaceholderConfigurer. Property resource configurer for configgy files.
* ConfiggyPropertyPlaceholderConfigurer. Property resource configurer for configgy files.
*/
class ConfiggyPropertyPlaceholderConfigurer extends PropertyPlaceholderConfigurer {
/**
* Sets the akka properties as local properties, leaves the location empty.
* @param configgyResource akka.conf
* @param configgyResource akka.conf
*/
override def setLocation(configgyResource: Resource) {
if (configgyResource eq null) throw new IllegalArgumentException("Property 'config' must be set")
@ -34,4 +34,4 @@ class ConfiggyPropertyPlaceholderConfigurer extends PropertyPlaceholderConfigure
properties
}
}
}

View file

@ -55,7 +55,7 @@ class ThreadPoolProperties {
", corePoolSize=" + corePoolSize +
", maxPoolSize=" + maxPoolSize +
", keepAlive=" + keepAlive +
", policy=" + rejectionPolicy +
", policy=" + rejectionPolicy +
", mailboxCapacity=" + mailboxCapacity + "]"
}
}

View file

@ -39,4 +39,4 @@ class ConfiggyPropertyPlaceholderConfigurerSpec extends FeatureSpec with ShouldM
assert(actor1.timeout === 2000)
}
}
}
}

View file

@ -133,7 +133,7 @@ class TypedActorSpringFeatureTest extends FeatureSpec with ShouldMatchers with B
myPojoProxy.oneWay("hello server-managed-remote-typed-actor 2")
MyPojo.latch.await
assert(MyPojo.lastOneWayMessage === "hello server-managed-remote-typed-actor 2")
}
}
scenario("get a client proxy for server-managed-remote-typed-actor") {
MyPojo.latch = new CountDownLatch(1)
@ -144,7 +144,7 @@ class TypedActorSpringFeatureTest extends FeatureSpec with ShouldMatchers with B
assert(myPojoProxy.getFoo() === "foo")
myPojoProxy.oneWay("hello")
MyPojo.latch.await
}
}
}

View file

@ -45,7 +45,7 @@ class UntypedActorSpringFeatureTest extends FeatureSpec with ShouldMatchers with
} catch {
case e => ()
}
}
}
def getPingActorFromContext(config: String, id: String) : ActorRef = {

View file

@ -332,7 +332,7 @@ object TypedActorConfiguration {
def apply(transactionRequired: Boolean) : TypedActorConfiguration = {
if (transactionRequired) {
new TypedActorConfiguration().makeTransactionRequired
} else new TypedActorConfiguration()
} else new TypedActorConfiguration()
}
}
@ -558,7 +558,7 @@ object TypedActor extends Logging {
/**
* Java API.
*/
*/
def newRemoteInstance[T](intfClass: Class[T], factory: TypedActorFactory, hostname: String, port: Int) : T =
newRemoteInstance(intfClass, factory.create, hostname, port)
@ -582,7 +582,7 @@ object TypedActor extends Logging {
/**
* Create a proxy for a RemoteActorRef representing a server managed remote typed actor.
*
*
*/
private[akka] def createProxyForRemoteActorRef[T](intfClass: Class[T], actorRef: ActorRef): T = {
@ -758,7 +758,7 @@ object TypedActor extends Logging {
*/
@Aspect("perInstance")
private[akka] sealed class ServerManagedTypedActorAspect extends ActorAspect {
@Around("execution(* *.*(..)) && this(akka.actor.ServerManagedTypedActor)")
def invoke(joinPoint: JoinPoint): AnyRef = {
if (!isInitialized) initialize(joinPoint)

View file

@ -49,7 +49,7 @@ object TypedActorSpec {
case msg: String => println("got " + msg)
}
}
}

View file

@ -7,7 +7,7 @@
akka {
version = "1.0-SNAPSHOT" # Akka version, checked against the runtime version of Akka.
time-unit = "seconds" # Default timeout time unit for all timeout properties throughout the config
# These boot classes are loaded (and created) automatically when the Akka Microkernel boots up
@ -54,7 +54,7 @@ akka {
#
# The following are only used for ExecutorBasedEventDriven
# and only if mailbox-capacity > 0
mailbox-push-timeout-time = 10 # Specifies the timeout to add a new message to a mailbox that is full - negative number means infinite timeout
mailbox-push-timeout-time = 10 # Specifies the timeout to add a new message to a mailbox that is full - negative number means infinite timeout
# (in unit defined by the time-unit property)
}
}
@ -109,7 +109,7 @@ akka {
}
remote {
secure-cookie = "050E0A0D0D06010A00000900040D060F0C09060B" # generate your own with '$AKKA_HOME/scripts/generate_secure_cookie.sh' or using 'Crypt.generateSecureCookie'
compression-scheme = "zlib" # Options: "zlib" (lzf to come), leave out for no compression
@ -186,7 +186,7 @@ akka {
vector = "Vectors" # Voldemort Store Used to Persist Vector Sizes. Use identity serializer for keys, identity serializer for values
queue = "Queues" # Voldemort Store Used to Persist Vector Values. Use identity serializer for keys, identity serializer for values
}
client { # The KeyValue pairs under client are converted to java Properties and used to construct the Voldemort ClientConfig
bootstrap_urls = "tcp://localhost:6666" # All Valid Voldemort Client properties are valid here, in string form
}

View file

@ -16,10 +16,10 @@
</encoder>
</appender>
<logger name="akka" level="DEBUG"/>
<logger name="org.mortbay.log" level="ERROR"/>
<logger name="org.apache.jasper" level="ERROR"/>
<root level="DEBUG">
<appender-ref ref="stdout"/>
</root>

View file

@ -63,7 +63,7 @@
-->
<!-- =========================================================== -->
<!-- Set handler Collection Structure -->
<!-- Set handler Collection Structure -->
<!-- =========================================================== -->
<Set name="handler">
<New id="Handlers" class="org.eclipse.jetty.server.handler.HandlerCollection">
@ -94,4 +94,4 @@
<Set name="sendDateHeader">true</Set>
<Set name="gracefulShutdown">1000</Set>
</Configure>
</Configure>

View file

@ -515,7 +515,7 @@ class AkkaParentProject(info: ProjectInfo) extends DefaultProject(info) {
class AkkaCamelProject(info: ProjectInfo) extends AkkaDefaultProject(info, distPath) {
val camel_core = Dependencies.camel_core
override def testOptions = createTestFilter( _.endsWith("Test"))
}
@ -690,7 +690,7 @@ class AkkaParentProject(info: ProjectInfo) extends DefaultProject(info) {
val atomikos_transactions_jta = Dependencies.atomikos_transactions_jta
//val jta_1_1 = Dependencies.jta_1_1
//val atomikos_transactions_util = "com.atomikos" % "transactions-util" % "3.2.3" % "compile"
//Testing
val junit = Dependencies.junit
val scalatest = Dependencies.scalatest
@ -767,7 +767,7 @@ class AkkaParentProject(info: ProjectInfo) extends DefaultProject(info) {
val commons_fileupload = "commons-fileupload" % "commons-fileupload" % "1.2.1" % "compile" intransitive
val jms_1_1 = "org.apache.geronimo.specs" % "geronimo-jms_1.1_spec" % "1.1.1" % "compile" intransitive
val joda = "joda-time" % "joda-time" % "1.6" intransitive
override def packageAction =
task {
val libs: Seq[Path] = managedClasspath(config("compile")).get.toSeq