diff --git a/akka-camel/src/main/scala/CamelService.scala b/akka-camel/src/main/scala/CamelService.scala index f689e6fe44..65a6a44fe5 100644 --- a/akka-camel/src/main/scala/CamelService.scala +++ b/akka-camel/src/main/scala/CamelService.scala @@ -100,4 +100,4 @@ object CamelService { * */ class DefaultCamelService extends CamelService { -} \ No newline at end of file +} diff --git a/akka-camel/src/main/scala/ConsumerPublisher.scala b/akka-camel/src/main/scala/ConsumerPublisher.scala index 9df41a7d92..953ac90746 100644 --- a/akka-camel/src/main/scala/ConsumerPublisher.scala +++ b/akka-camel/src/main/scala/ConsumerPublisher.scala @@ -167,7 +167,7 @@ private[camel] class PublishRequestor extends Actor { protected def receive = { case ActorRegistered(actor) => for (event <- ConsumerRegistered.forConsumer(actor)) deliverCurrentEvent(event) - case ActorUnregistered(actor) => + case ActorUnregistered(actor) => for (event <- ConsumerUnregistered.forConsumer(actor)) deliverCurrentEvent(event) case AspectInitRegistered(proxy, init) => for (event <- ConsumerMethodRegistered.forConsumer(proxy, init)) deliverCurrentEvent(event) diff --git a/akka-camel/src/main/scala/component/ActiveObjectComponent.scala b/akka-camel/src/main/scala/component/ActiveObjectComponent.scala index d65c7e6528..05fa026e04 100644 --- a/akka-camel/src/main/scala/component/ActiveObjectComponent.scala +++ b/akka-camel/src/main/scala/component/ActiveObjectComponent.scala @@ -105,4 +105,4 @@ class ActiveObjectInfo(context: CamelContext, clazz: Class[_], strategy: Paramet introspect(superclass) } } -} \ No newline at end of file +} diff --git a/akka-camel/src/test/scala/ConsumerMethodRegisteredTest.scala b/akka-camel/src/test/scala/ConsumerMethodRegisteredTest.scala index 66d63d69e8..493030fa4a 100644 --- a/akka-camel/src/test/scala/ConsumerMethodRegisteredTest.scala +++ b/akka-camel/src/test/scala/ConsumerMethodRegisteredTest.scala @@ -43,4 +43,4 @@ class ConsumerMethodRegisteredTest extends JUnitSuite { assert(registered.size === 0) } -} \ No newline at end of file +} diff --git a/akka-camel/src/test/scala/RemoteConsumerTest.scala b/akka-camel/src/test/scala/RemoteConsumerTest.scala index 4e2aa59b24..149585df4b 100644 --- a/akka-camel/src/test/scala/RemoteConsumerTest.scala +++ b/akka-camel/src/test/scala/RemoteConsumerTest.scala @@ -86,4 +86,4 @@ object RemoteConsumerTest { case m: Message => self.reply("remote actor: %s" format m.body) } } -} \ No newline at end of file +} diff --git a/akka-camel/src/test/scala/support/TestSupport.scala b/akka-camel/src/test/scala/support/TestSupport.scala index a4a0d177ab..61118ef3e3 100644 --- a/akka-camel/src/test/scala/support/TestSupport.scala +++ b/akka-camel/src/test/scala/support/TestSupport.scala @@ -17,7 +17,7 @@ trait TestActor extends Actor { handler(msg) } } - + def handler: Handler } diff --git a/akka-core/src/main/scala/actor/ActiveObject.scala b/akka-core/src/main/scala/actor/ActiveObject.scala index 118c1063ae..0201391940 100644 --- a/akka-core/src/main/scala/actor/ActiveObject.scala +++ b/akka-core/src/main/scala/actor/ActiveObject.scala @@ -666,12 +666,12 @@ private[akka] class Dispatcher(transactionalRequired: Boolean, var callbacks: Op private var context: Option[ActiveObjectContext] = None private var targetClass:Class[_] = _ - + def this(transactionalRequired: Boolean) = this(transactionalRequired,None) private[actor] def initialize(targetClass: Class[_], targetInstance: AnyRef, ctx: Option[ActiveObjectContext]) = { - + if (transactionalRequired || targetClass.isAnnotationPresent(Annotations.transactionrequired)) self.makeTransactionRequired self.id = targetClass.getName @@ -753,10 +753,10 @@ private[akka] class Dispatcher(transactionalRequired: Boolean, var callbacks: Op override def postRestart(reason: Throwable) { try { - + if (postRestart.isDefined) { postRestart.get.invoke(target.get, ZERO_ITEM_OBJECT_ARRAY: _*) - } + } } catch { case e: InvocationTargetException => throw e.getCause } } diff --git a/akka-core/src/main/scala/actor/Actor.scala b/akka-core/src/main/scala/actor/Actor.scala index 15453e0e34..5f06543382 100644 --- a/akka-core/src/main/scala/actor/Actor.scala +++ b/akka-core/src/main/scala/actor/Actor.scala @@ -50,7 +50,7 @@ trait SerializableActor extends Actor trait StatelessSerializableActor extends SerializableActor /** - * Mix in this trait to create a serializable actor, serializable through + * Mix in this trait to create a serializable actor, serializable through * a custom serialization protocol. This actor is the serialized state. * * @author Jonas Bonér @@ -61,7 +61,7 @@ trait StatefulSerializerSerializableActor extends SerializableActor { } /** - * Mix in this trait to create a serializable actor, serializable through + * Mix in this trait to create a serializable actor, serializable through * a custom serialization protocol. This actor is wrapping serializable state. * * @author Jonas Bonér diff --git a/akka-core/src/main/scala/actor/ActorRef.scala b/akka-core/src/main/scala/actor/ActorRef.scala index d3d9dc28b5..d07e18a314 100644 --- a/akka-core/src/main/scala/actor/ActorRef.scala +++ b/akka-core/src/main/scala/actor/ActorRef.scala @@ -33,28 +33,28 @@ import com.google.protobuf.ByteString /** * The ActorRef object can be used to deserialize ActorRef instances from of its binary representation * or its Protocol Buffers (protobuf) Message representation to a Actor.actorOf instance. - * + * *

* Binary -> ActorRef: *

  *   val actorRef = ActorRef.fromBinaryToRemoteActorRef(bytes)
  *   actorRef ! message // send message to remote actor through its reference
  * 
- * + * *

* Protobuf Message -> RemoteActorRef: *

  *   val actorRef = ActorRef.fromBinaryToRemoteActorRef(protobufMessage)
  *   actorRef ! message // send message to remote actor through its reference
  * 
- * + * *

* Protobuf Message -> LocalActorRef: *

  *   val actorRef = ActorRef.fromBinaryToLocalActorRef(protobufMessage)
  *   actorRef ! message // send message to local actor through its reference
  * 
- * + * * @author Jonas Bonér */ object ActorRef { @@ -102,19 +102,19 @@ object ActorRef { */ private[akka] def fromProtobufToLocalActorRef(protocol: SerializedActorRefProtocol, loader: Option[ClassLoader]): ActorRef = { Actor.log.debug("Deserializing SerializedActorRefProtocol to LocalActorRef:\n" + protocol) - - val serializer = if (protocol.hasSerializerClassname) { + + val serializer = if (protocol.hasSerializerClassname) { val serializerClass = if (loader.isDefined) loader.get.loadClass(protocol.getSerializerClassname) else Class.forName(protocol.getSerializerClassname) Some(serializerClass.newInstance.asInstanceOf[Serializer]) } else None - + val lifeCycle = if (protocol.hasLifeCycle) { val lifeCycleProtocol = protocol.getLifeCycle val restartCallbacks = - if (lifeCycleProtocol.hasPreRestart || lifeCycleProtocol.hasPostRestart) + if (lifeCycleProtocol.hasPreRestart || lifeCycleProtocol.hasPostRestart) Some(RestartCallbacks(lifeCycleProtocol.getPreRestart, lifeCycleProtocol.getPostRestart)) else None Some(if (lifeCycleProtocol.getLifeCycle == LifeCycleType.PERMANENT) LifeCycle(Permanent, restartCallbacks) @@ -126,7 +126,7 @@ object ActorRef { if (protocol.hasSupervisor) Some(fromProtobufToRemoteActorRef(protocol.getSupervisor, loader)) else None - + val hotswap = if (serializer.isDefined && protocol.hasHotswapStack) Some(serializer.get .fromBinary(protocol.getHotswapStack.toByteArray, Some(classOf[PartialFunction[Any, Unit]])) @@ -218,13 +218,13 @@ trait ActorRef extends TransactionManagement { /** * User overridable callback/setting. - * + * *

* Set trapExit to the list of exception classes that the actor should be able to trap * from the actor it is supervising. When the supervising actor throws these exceptions * then they will trigger a restart. *

- * + * * Trap no exceptions: *

    * trapExit = Nil
@@ -348,23 +348,23 @@ trait ActorRef extends TransactionManagement {
    * Is the actor able to handle the message passed in as arguments?
    */
   def isDefinedAt(message: Any): Boolean = actor.isDefinedAt(message)
-  
+
   /**
    * Is the actor is serializable?
    */
-  def isSerializable: Boolean = actor.isInstanceOf[SerializableActor]  
-  
+  def isSerializable: Boolean = actor.isInstanceOf[SerializableActor]
+
   /**
    * Returns the 'Serializer' instance for the Actor as an Option.
    * 

* It returns 'Some(serializer)' if the Actor is extending the StatefulSerializerSerializableActor * trait (which has a Serializer defined) and 'None' if not. */ - def serializer: Option[Serializer] = - if (actor.isInstanceOf[StatefulSerializerSerializableActor]) + def serializer: Option[Serializer] = + if (actor.isInstanceOf[StatefulSerializerSerializableActor]) Some(actor.asInstanceOf[StatefulSerializerSerializableActor].serializer) else None - + /** * Only for internal use. UUID is effectively final. */ @@ -720,13 +720,13 @@ trait ActorRef extends TransactionManagement { sealed class LocalActorRef private[akka]( private[this] var actorFactory: Either[Option[Class[_ <: Actor]], Option[() => Actor]] = Left(None)) extends ActorRef { - + private var isDeserialized = false private var loader: Option[ClassLoader] = None private[akka] def this(clazz: Class[_ <: Actor]) = this(Left(Some(clazz))) private[akka] def this(factory: () => Actor) = this(Right(Some(factory))) - + // used only for deserialization private[akka] def this(__uuid: String, __id: String, @@ -757,8 +757,8 @@ sealed class LocalActorRef private[akka]( instance } else throw new IllegalStateException( "Can't deserialize Actor that is not an instance of one of:\n" + - "\n\t- StatelessSerializableActor" + - "\n\t- StatefulSerializerSerializableActor" + + "\n\t- StatelessSerializableActor" + + "\n\t- StatefulSerializerSerializableActor" + "\n\t- StatefulWrappedSerializableActor") }) loader = Some(__loader) @@ -809,7 +809,7 @@ sealed class LocalActorRef private[akka]( RemoteServer.registerActor(homeAddress, uuid, this) registeredInRemoteNodeDuringSerialization = true } - + RemoteActorRefProtocol.newBuilder .setUuid(uuid) .setActorClassname(actorClass.getName) @@ -831,7 +831,7 @@ sealed class LocalActorRef private[akka]( } val builder = LifeCycleProtocol.newBuilder lifeCycle match { - case Some(LifeCycle(scope, None)) => + case Some(LifeCycle(scope, None)) => setScope(builder, scope) Some(builder.build) case Some(LifeCycle(scope, Some(callbacks))) => @@ -869,7 +869,7 @@ sealed class LocalActorRef private[akka]( builder.addMessages(createRemoteRequestProtocolBuilder( message.message, message.senderFuture.isEmpty, message.sender)) message = mailbox.poll - } + } builder.build } @@ -881,13 +881,13 @@ sealed class LocalActorRef private[akka]( /** * Serializes the ActorRef instance into a byte array (Array[Byte]). */ - def toBinary: Array[Byte] = { + def toBinary: Array[Byte] = { val protocol = if (isSerializable) toSerializedActorRefProtocol else toRemoteActorRefProtocol Actor.log.debug("Serializing ActorRef to binary:\n" + protocol) protocol.toByteArray } - + /** * Returns the class for the Actor instance that is managed by the ActorRef. */ @@ -897,7 +897,7 @@ sealed class LocalActorRef private[akka]( * Returns the class name for the Actor instance that is managed by the ActorRef. */ def actorClassName: String = actorClass.getName - + /** * Sets the dispatcher for this actor. Needs to be invoked before the actor is started. */ @@ -1316,7 +1316,7 @@ sealed class LocalActorRef private[akka]( lifeCycle.get match { case LifeCycle(scope, _) => { scope match { - case Permanent => + case Permanent => Actor.log.info("Restarting actor [%s] configured as PERMANENT.", id) restartLinkedActors(reason) Actor.log.debug("Restarting linked actors for actor [%s].", id) @@ -1338,7 +1338,7 @@ sealed class LocalActorRef private[akka]( } } } - + protected[akka] def restartLinkedActors(reason: Throwable) = guard.withGuard { linkedActorsAsList.foreach { actorRef => if (actorRef.lifeCycle.isEmpty) actorRef.lifeCycle = Some(LifeCycle(Permanent)) @@ -1366,7 +1366,7 @@ sealed class LocalActorRef private[akka]( _supervisor.foreach(_ ! UnlinkAndStop(this)) } } - + protected[akka] def registerSupervisorAsRemoteActor: Option[String] = guard.withGuard { if (_supervisor.isDefined) { RemoteClient.clientFor(remoteAddress.get).registerSupervisorForActor(this) diff --git a/akka-core/src/main/scala/actor/BootableActorLoaderService.scala b/akka-core/src/main/scala/actor/BootableActorLoaderService.scala index c244af86bd..22878b416a 100644 --- a/akka-core/src/main/scala/actor/BootableActorLoaderService.scala +++ b/akka-core/src/main/scala/actor/BootableActorLoaderService.scala @@ -18,7 +18,7 @@ class AkkaDeployClassLoader(urls : List[URL], parent : ClassLoader) extends URLC val normalResult = super.findResources(resource) if(normalResult.hasMoreElements) normalResult else findDeployed(resource) } - + def findDeployed(resource : String) = new Enumeration[URL]{ private val it = getURLs.flatMap( listClassesInPackage(_,resource) ).iterator def hasMoreElements = it.hasNext diff --git a/akka-core/src/main/scala/actor/Scheduler.scala b/akka-core/src/main/scala/actor/Scheduler.scala index 41d418c6e8..0d1fa03766 100644 --- a/akka-core/src/main/scala/actor/Scheduler.scala +++ b/akka-core/src/main/scala/actor/Scheduler.scala @@ -79,4 +79,4 @@ private object SchedulerThreadFactory extends ThreadFactory { thread.setDaemon(true) thread } -} \ No newline at end of file +} diff --git a/akka-core/src/main/scala/dispatch/ExecutorBasedEventDrivenDispatcher.scala b/akka-core/src/main/scala/dispatch/ExecutorBasedEventDrivenDispatcher.scala index 9120e34bdf..3ceec873a7 100644 --- a/akka-core/src/main/scala/dispatch/ExecutorBasedEventDrivenDispatcher.scala +++ b/akka-core/src/main/scala/dispatch/ExecutorBasedEventDrivenDispatcher.scala @@ -57,7 +57,7 @@ import se.scalablesolutions.akka.actor.ActorRef * @param throughput positive integer indicates the dispatcher will only process so much messages at a time from the * mailbox, without checking the mailboxes of other actors. Zero or negative means the dispatcher * always continues until the mailbox is empty. - * Larger values (or zero or negative) increase througput, smaller values increase fairness + * Larger values (or zero or negative) increase througput, smaller values increase fairness */ class ExecutorBasedEventDrivenDispatcher(_name: String, throughput: Int = Dispatchers.THROUGHPUT) extends MessageDispatcher with ThreadPoolBuilder { def this(_name: String) = this(_name, Dispatchers.THROUGHPUT) // Needed for Java API usage diff --git a/akka-core/src/main/scala/dispatch/MessageHandling.scala b/akka-core/src/main/scala/dispatch/MessageHandling.scala index 54d627258d..c2e74ceb1d 100644 --- a/akka-core/src/main/scala/dispatch/MessageHandling.scala +++ b/akka-core/src/main/scala/dispatch/MessageHandling.scala @@ -23,7 +23,7 @@ final class MessageInvocation(val receiver: ActorRef, val transactionSet: Option[CountDownCommitBarrier]) { if (receiver eq null) throw new IllegalArgumentException("receiver is null") - def invoke = try { + def invoke = try { receiver.invoke(this) } catch { case e: NullPointerException => throw new ActorInitializationException( diff --git a/akka-core/src/main/scala/remote/MessageSerializer.scala b/akka-core/src/main/scala/remote/MessageSerializer.scala index 3e1ba14aac..9e61293cc6 100644 --- a/akka-core/src/main/scala/remote/MessageSerializer.scala +++ b/akka-core/src/main/scala/remote/MessageSerializer.scala @@ -77,7 +77,7 @@ object MessageSerializer { builder.setMessage(ByteString.copyFrom(serializable.toBytes)) builder.setMessageManifest(ByteString.copyFromUtf8(serializable.getClass.getName)) } - + private def box(value: Any): AnyRef = value match { case value: Boolean => new java.lang.Boolean(value) case value: Char => new java.lang.Character(value) diff --git a/akka-core/src/main/scala/remote/RemoteServer.scala b/akka-core/src/main/scala/remote/RemoteServer.scala index f7bc664725..6127ac9f62 100644 --- a/akka-core/src/main/scala/remote/RemoteServer.scala +++ b/akka-core/src/main/scala/remote/RemoteServer.scala @@ -324,7 +324,7 @@ class RemoteServerHandler( applicationLoader.foreach(MessageSerializer.setClassLoader(_)) /** - * ChannelOpen overridden to store open channels for a clean shutdown of a RemoteServer. + * ChannelOpen overridden to store open channels for a clean shutdown of a RemoteServer. * If a channel is closed before, it is automatically removed from the open channels group. */ override def channelOpen(ctx: ChannelHandlerContext, event: ChannelStateEvent) { @@ -364,7 +364,7 @@ class RemoteServerHandler( val actorRef = createActor(request.getTarget, request.getUuid, request.getTimeout) actorRef.start val message = MessageSerializer.deserialize(request.getMessage) - val sender = + val sender = if (request.hasSender) Some(ActorRef.fromProtobufToRemoteActorRef(request.getSender, applicationLoader)) else None if (request.getIsOneWay) actorRef.!(message)(sender) diff --git a/akka-core/src/main/scala/routing/Iterators.scala b/akka-core/src/main/scala/routing/Iterators.scala index 64ea87551b..6e73af08e4 100644 --- a/akka-core/src/main/scala/routing/Iterators.scala +++ b/akka-core/src/main/scala/routing/Iterators.scala @@ -6,12 +6,12 @@ package se.scalablesolutions.akka.routing import se.scalablesolutions.akka.actor.ActorRef -/** +/** * An Iterator that is either always empty or yields an infinite number of Ts. */ trait InfiniteIterator[T] extends Iterator[T] -/** +/** * CyclicIterator is a round-robin style InfiniteIterator that cycles the supplied List. */ class CyclicIterator[T](items: List[T]) extends InfiniteIterator[T] { diff --git a/akka-core/src/main/scala/routing/Routing.scala b/akka-core/src/main/scala/routing/Routing.scala index c343afbda3..a5242c72b9 100644 --- a/akka-core/src/main/scala/routing/Routing.scala +++ b/akka-core/src/main/scala/routing/Routing.scala @@ -8,12 +8,12 @@ import se.scalablesolutions.akka.actor.{Actor, ActorRef} import se.scalablesolutions.akka.actor.Actor._ object Routing { - + type PF[A, B] = PartialFunction[A, B] - /** + /** * Creates a new PartialFunction whose isDefinedAt is a combination - * of the two parameters, and whose apply is first to call filter.apply + * of the two parameters, and whose apply is first to call filter.apply * and then filtered.apply. */ def filter[A, B](filter: PF[A, Unit], filtered: PF[A, B]): PF[A, B] = { @@ -22,13 +22,13 @@ object Routing { filtered(a) } - /** + /** * Interceptor is a filter(x,y) where x.isDefinedAt is considered to be always true. */ def intercept[A, B](interceptor: (A) => Unit, interceptee: PF[A, B]): PF[A, B] = filter({case a if a.isInstanceOf[A] => interceptor(a)}, interceptee) - /** + /** * Creates a LoadBalancer from the thunk-supplied InfiniteIterator. */ def loadBalancerActor(actors: => InfiniteIterator[ActorRef]): ActorRef = @@ -36,7 +36,7 @@ object Routing { val seq = actors }).start - /** + /** * Creates a Dispatcher given a routing and a message-transforming function. */ def dispatcherActor(routing: PF[Any, ActorRef], msgTransformer: (Any) => Any): ActorRef = @@ -45,14 +45,14 @@ object Routing { def routes = routing }).start - /** + /** * Creates a Dispatcher given a routing. */ def dispatcherActor(routing: PF[Any, ActorRef]): ActorRef = actorOf(new Actor with Dispatcher { def routes = routing }).start - /** + /** * Creates an actor that pipes all incoming messages to * both another actor and through the supplied function */ diff --git a/akka-core/src/main/scala/stm/TransactionFactory.scala b/akka-core/src/main/scala/stm/TransactionFactory.scala index a7a81c4212..56982bb759 100644 --- a/akka-core/src/main/scala/stm/TransactionFactory.scala +++ b/akka-core/src/main/scala/stm/TransactionFactory.scala @@ -76,7 +76,7 @@ object TransactionConfig { /** * For configuring multiverse transactions. - * + * *

familyName - Family name for transactions. Useful for debugging. *

readonly - Sets transaction as readonly. Readonly transactions are cheaper. *

maxRetries - The maximum number of times a transaction will retry. diff --git a/akka-core/src/main/scala/util/ListenerManagement.scala b/akka-core/src/main/scala/util/ListenerManagement.scala index bdee9d1c1c..7316beba64 100644 --- a/akka-core/src/main/scala/util/ListenerManagement.scala +++ b/akka-core/src/main/scala/util/ListenerManagement.scala @@ -9,7 +9,7 @@ import java.util.concurrent.CopyOnWriteArrayList import se.scalablesolutions.akka.actor.ActorRef /** - * A manager for listener actors. Intended for mixin by observables. + * A manager for listener actors. Intended for mixin by observables. * * @author Martin Krasser */ @@ -46,4 +46,4 @@ trait ListenerManagement extends Logging { else log.warning("Can't notify [%s] since it is not running.", listener) } } -} \ No newline at end of file +} diff --git a/akka-core/src/test/scala/ActiveObjectGuiceConfiguratorSpec.scala b/akka-core/src/test/scala/ActiveObjectGuiceConfiguratorSpec.scala index c8fd436e1c..2a4a6af393 100644 --- a/akka-core/src/test/scala/ActiveObjectGuiceConfiguratorSpec.scala +++ b/akka-core/src/test/scala/ActiveObjectGuiceConfiguratorSpec.scala @@ -22,13 +22,13 @@ import se.scalablesolutions.akka.dispatch.FutureTimeoutException @RunWith(classOf[JUnitRunner]) class ActiveObjectGuiceConfiguratorSpec extends - Spec with - ShouldMatchers with - BeforeAndAfterAll { + Spec with + ShouldMatchers with + BeforeAndAfterAll { private val conf = new ActiveObjectConfigurator private var messageLog = "" - + override def beforeAll { Config.config val dispatcher = Dispatchers.newExecutorBasedEventDrivenDispatcher("test") @@ -52,7 +52,7 @@ class ActiveObjectGuiceConfiguratorSpec extends ).toArray).inject.supervise } - + override def afterAll = conf.stop describe("ActiveObjectGuiceConfigurator") { @@ -76,11 +76,11 @@ class ActiveObjectGuiceConfiguratorSpec extends val str = conf.getInstance(classOf[String]) fail("exception should have been thrown") } catch { - case e: Exception => + case e: Exception => classOf[IllegalStateException] should equal(e.getClass) } } - + it("should be able to invoke active object") { messageLog = "" val foo = conf.getInstance(classOf[Foo]) @@ -109,7 +109,7 @@ class ActiveObjectGuiceConfiguratorSpec extends foo.longRunning fail("exception should have been thrown") } catch { - case e: FutureTimeoutException => + case e: FutureTimeoutException => classOf[FutureTimeoutException] should equal(e.getClass) } } @@ -121,7 +121,7 @@ class ActiveObjectGuiceConfiguratorSpec extends foo.throwsException fail("exception should have been thrown") } catch { - case e: RuntimeException => + case e: RuntimeException => classOf[RuntimeException] should equal(e.getClass) } } diff --git a/akka-core/src/test/scala/AgentSpec.scala b/akka-core/src/test/scala/AgentSpec.scala index 859b409ad1..de4326c646 100644 --- a/akka-core/src/test/scala/AgentSpec.scala +++ b/akka-core/src/test/scala/AgentSpec.scala @@ -59,9 +59,9 @@ class AgentSpec extends junit.framework.TestCase with Suite with MustMatchers { agent.close tx.stop } - + /* - // Strange test - do we really need it? + // Strange test - do we really need it? @Test def testDoingAgentGetInEnlosingTransactionShouldYieldException = { case object Go val latch = new CountDownLatch(1) diff --git a/akka-core/src/test/scala/Bench.scala b/akka-core/src/test/scala/Bench.scala index 211c6787a8..8e3a44f3a0 100644 --- a/akka-core/src/test/scala/Bench.scala +++ b/akka-core/src/test/scala/Bench.scala @@ -8,24 +8,24 @@ package se.scalablesolutions.akka.actor import se.scalablesolutions.akka.actor.Actor._ object Chameneos { - + sealed trait ChameneosEvent case class Meet(from: ActorRef, colour: Colour) extends ChameneosEvent case class Change(colour: Colour) extends ChameneosEvent case class MeetingCount(count: Int) extends ChameneosEvent case object Exit extends ChameneosEvent - + abstract class Colour case object RED extends Colour case object YELLOW extends Colour case object BLUE extends Colour case object FADED extends Colour - + val colours = Array[Colour](BLUE, RED, YELLOW) - + var start = 0L var end = 0L - + class Chameneo(var mall: ActorRef, var colour: Colour, cid: Int) extends Actor { var meetings = 0 self.start @@ -36,12 +36,12 @@ object Chameneos { colour = complement(otherColour) meetings = meetings +1 from ! Change(colour) - mall ! Meet(self, colour) + mall ! Meet(self, colour) case Change(newColour) => colour = newColour meetings = meetings +1 - mall ! Meet(self, colour) + mall ! Meet(self, colour) case Exit => colour = FADED @@ -77,11 +77,11 @@ object Chameneos { var waitingChameneo: Option[ActorRef] = None var sumMeetings = 0 var numFaded = 0 - + override def init = { for (i <- 0 until numChameneos) actorOf(new Chameneo(self, colours(i % 3), i)) } - + def receive = { case MeetingCount(i) => numFaded += 1 @@ -90,13 +90,13 @@ object Chameneos { Chameneos.end = System.currentTimeMillis self.stop } - + case msg @ Meet(a, c) => if (n > 0) { waitingChameneo match { case Some(chameneo) => n -= 1 - chameneo ! msg + chameneo ! msg waitingChameneo = None case None => waitingChameneo = self.sender } @@ -106,7 +106,7 @@ object Chameneos { } } } - + def run { // System.setProperty("akka.config", "akka.conf") Chameneos.start = System.currentTimeMillis diff --git a/akka-core/src/test/scala/NestedTransactionalActiveObjectSpec.scala b/akka-core/src/test/scala/NestedTransactionalActiveObjectSpec.scala index 0f2e3d5499..f0bf3f6c02 100644 --- a/akka-core/src/test/scala/NestedTransactionalActiveObjectSpec.scala +++ b/akka-core/src/test/scala/NestedTransactionalActiveObjectSpec.scala @@ -20,13 +20,13 @@ import se.scalablesolutions.akka.actor._ /* @RunWith(classOf[JUnitRunner]) class NestedTransactionalActiveObjectSpec extends - Spec with - ShouldMatchers with - BeforeAndAfterAll { + Spec with + ShouldMatchers with + BeforeAndAfterAll { private val conf = new ActiveObjectConfigurator private var messageLog = "" - + override def beforeAll { Config.config conf.configure( @@ -43,7 +43,7 @@ class NestedTransactionalActiveObjectSpec extends 10000) ).toArray).supervise } - + override def afterAll { conf.stop } diff --git a/akka-core/src/test/scala/RemoteSupervisorSpec.scala b/akka-core/src/test/scala/RemoteSupervisorSpec.scala index 4a5ad1a5ef..e1a0c80192 100644 --- a/akka-core/src/test/scala/RemoteSupervisorSpec.scala +++ b/akka-core/src/test/scala/RemoteSupervisorSpec.scala @@ -84,7 +84,7 @@ object RemoteSupervisorSpec { */ class RemoteSupervisorSpec extends JUnitSuite { import RemoteSupervisorSpec._ - + var pingpong1: ActorRef = _ var pingpong2: ActorRef = _ var pingpong3: ActorRef = _ diff --git a/akka-core/src/test/scala/RemoteTransactionalActiveObjectSpec.scala b/akka-core/src/test/scala/RemoteTransactionalActiveObjectSpec.scala index 2ba9d08296..a3710bf12c 100644 --- a/akka-core/src/test/scala/RemoteTransactionalActiveObjectSpec.scala +++ b/akka-core/src/test/scala/RemoteTransactionalActiveObjectSpec.scala @@ -24,22 +24,22 @@ object RemoteTransactionalActiveObjectSpec { @RunWith(classOf[JUnitRunner]) class RemoteTransactionalActiveObjectSpec extends - Spec with - ShouldMatchers with - BeforeAndAfterAll { + Spec with + ShouldMatchers with + BeforeAndAfterAll { import RemoteTransactionalActiveObjectSpec._ Config.config private val conf = new ActiveObjectConfigurator - private var messageLog = "" - + private var messageLog = "" + override def beforeAll = { server = new RemoteServer() server.start(HOSTNAME, PORT) - Thread.sleep(1000) + Thread.sleep(1000) } - + override def afterAll = { conf.stop try { diff --git a/akka-core/src/test/scala/RoutingSpec.scala b/akka-core/src/test/scala/RoutingSpec.scala index a9d488a5fe..65d667617b 100644 --- a/akka-core/src/test/scala/RoutingSpec.scala +++ b/akka-core/src/test/scala/RoutingSpec.scala @@ -49,7 +49,7 @@ class RoutingSpec extends junit.framework.TestCase with Suite with MustMatchers result.isDefined must be (true) result.get must be(21) - + for(a <- List(t1,t2,d)) a.stop } @@ -122,8 +122,8 @@ class RoutingSpec extends junit.framework.TestCase with Suite with MustMatchers } @Test def testIsDefinedAt = { - import se.scalablesolutions.akka.actor.ActorRef - + import se.scalablesolutions.akka.actor.ActorRef + val (testMsg1,testMsg2,testMsg3,testMsg4) = ("test1","test2","test3","test4") val t1 = actorOf( new Actor() { @@ -132,21 +132,21 @@ class RoutingSpec extends junit.framework.TestCase with Suite with MustMatchers case `testMsg2` => self.reply(7) } } ).start - + val t2 = actorOf( new Actor() { def receive = { case `testMsg1` => self.reply(3) case `testMsg2` => self.reply(7) } } ).start - + val t3 = actorOf( new Actor() { def receive = { case `testMsg1` => self.reply(3) case `testMsg2` => self.reply(7) } } ).start - + val t4 = actorOf( new Actor() { def receive = { case `testMsg1` => self.reply(3) @@ -156,7 +156,7 @@ class RoutingSpec extends junit.framework.TestCase with Suite with MustMatchers val d1 = loadBalancerActor(new SmallestMailboxFirstIterator(t1 :: t2 :: Nil)) val d2 = loadBalancerActor(new CyclicIterator[ActorRef](t3 :: t4 :: Nil)) - + t1.isDefinedAt(testMsg1) must be (true) t1.isDefinedAt(testMsg3) must be (false) t2.isDefinedAt(testMsg1) must be (true) diff --git a/akka-core/src/test/scala/SerializableActorSpec.scala b/akka-core/src/test/scala/SerializableActorSpec.scala index d24e82cf98..b4a864db37 100644 --- a/akka-core/src/test/scala/SerializableActorSpec.scala +++ b/akka-core/src/test/scala/SerializableActorSpec.scala @@ -28,7 +28,7 @@ class SerializableActorSpec extends actor2.start (actor2 !! "hello").getOrElse("_") should equal("world 2") } - + it("should be able to serialize and deserialize a ProtobufSerializableActor") { val actor1 = actorOf[ProtobufSerializableTestActor].start (actor1 !! "hello").getOrElse("_") should equal("world 1") @@ -51,7 +51,7 @@ class SerializableActorSpec extends actor2.start (actor2 !! "hello").getOrElse("_") should equal("world") } - + it("should be able to serialize and deserialize a StatelessSerializableTestActorWithMessagesInMailbox") { val actor1 = actorOf[StatelessSerializableTestActorWithMessagesInMailbox].start (actor1 ! "hello") diff --git a/akka-core/src/test/scala/SupervisorSpec.scala b/akka-core/src/test/scala/SupervisorSpec.scala index 98025e8139..fcbedd476b 100644 --- a/akka-core/src/test/scala/SupervisorSpec.scala +++ b/akka-core/src/test/scala/SupervisorSpec.scala @@ -97,7 +97,7 @@ object SupervisorSpec { class Master extends Actor { self.trapExit = classOf[Exception] :: Nil - self.faultHandler = Some(OneForOneStrategy(5, 1000)) + self.faultHandler = Some(OneForOneStrategy(5, 1000)) val temp = self.spawnLink[TemporaryActor] override def receive = { case Die => temp !! (Die, 5000) diff --git a/akka-core/src/test/scala/TransactionalActiveObjectSpec.scala b/akka-core/src/test/scala/TransactionalActiveObjectSpec.scala index 0c008e7902..604e371638 100644 --- a/akka-core/src/test/scala/TransactionalActiveObjectSpec.scala +++ b/akka-core/src/test/scala/TransactionalActiveObjectSpec.scala @@ -19,13 +19,13 @@ import se.scalablesolutions.akka.actor._ /* @RunWith(classOf[JUnitRunner]) class TransactionalActiveObjectSpec extends - Spec with - ShouldMatchers with - BeforeAndAfterAll { + Spec with + ShouldMatchers with + BeforeAndAfterAll { private val conf = new ActiveObjectConfigurator private var messageLog = "" - + override def beforeAll { Config.config conf.configure( @@ -40,7 +40,7 @@ class TransactionalActiveObjectSpec extends 10000)).toArray ).supervise } - + override def afterAll { conf.stop } @@ -61,7 +61,7 @@ class TransactionalActiveObjectSpec extends stateful.setMapState("testShouldRollbackStateForStatefulServerInCaseOfFailure", "init") val failer = conf.getInstance(classOf[ActiveObjectFailer]) try { - stateful.failure("testShouldRollbackStateForStatefulServerInCaseOfFailure", "new state", failer) + stateful.failure("testShouldRollbackStateForStatefulServerInCaseOfFailure", "new state", failer) fail("should have thrown an exception") } catch { case e => {} } stateful.getMapState("testShouldRollbackStateForStatefulServerInCaseOfFailure") should equal("init") @@ -73,7 +73,7 @@ class TransactionalActiveObjectSpec extends stateful.setVectorState("init") // set init state val failer = conf.getInstance(classOf[ActiveObjectFailer]) try { - stateful.failure("testShouldRollbackStateForStatefulServerInCaseOfFailure", "new state", failer) + stateful.failure("testShouldRollbackStateForStatefulServerInCaseOfFailure", "new state", failer) fail("should have thrown an exception") } catch { case e => {} } stateful.getVectorState should equal("init") @@ -93,7 +93,7 @@ class TransactionalActiveObjectSpec extends stateful.setRefState("init") // set init state val failer = conf.getInstance(classOf[ActiveObjectFailer]) try { - stateful.failure("testShouldRollbackStateForStatefulServerInCaseOfFailure", "new state", failer) + stateful.failure("testShouldRollbackStateForStatefulServerInCaseOfFailure", "new state", failer) fail("should have thrown an exception") } catch { case e => {} } stateful.getRefState should equal("init") @@ -103,9 +103,9 @@ class TransactionalActiveObjectSpec extends val stateful = conf.getInstance(classOf[TransactionalActiveObject]) stateful.init stateful.setRefState("init") // set init state - stateful.success("testShouldNotRollbackStateForStatefulServerInCaseOfSuccess", "new state") + stateful.success("testShouldNotRollbackStateForStatefulServerInCaseOfSuccess", "new state") stateful.getRefState should equal("new state") } } } -*/ \ No newline at end of file +*/ diff --git a/akka-http/src/main/scala/ListWriter.scala b/akka-http/src/main/scala/ListWriter.scala index 2a653ae90b..6556004e38 100644 --- a/akka-http/src/main/scala/ListWriter.scala +++ b/akka-http/src/main/scala/ListWriter.scala @@ -16,17 +16,17 @@ import javax.ws.rs.Produces @Produces(Array("application/json")) class ListWriter extends MessageBodyWriter[List[_]] { - def isWriteable(aClass: Class[_], - aType: java.lang.reflect.Type, - annotations: Array[java.lang.annotation.Annotation], + def isWriteable(aClass: Class[_], + aType: java.lang.reflect.Type, + annotations: Array[java.lang.annotation.Annotation], mediaType: MediaType) = classOf[List[_]].isAssignableFrom(aClass) || aClass == ::.getClass - def getSize(list: List[_], - aClass: Class[_], - aType: java.lang.reflect.Type, - annotations: Array[java.lang.annotation.Annotation], - mediaType: MediaType) = + def getSize(list: List[_], + aClass: Class[_], + aType: java.lang.reflect.Type, + annotations: Array[java.lang.annotation.Annotation], + mediaType: MediaType) = -1L def writeTo(list: List[_], diff --git a/akka-persistence/akka-persistence-redis/src/main/scala/RedisStorageBackend.scala b/akka-persistence/akka-persistence-redis/src/main/scala/RedisStorageBackend.scala index ad758f9999..33b1c04a73 100644 --- a/akka-persistence/akka-persistence-redis/src/main/scala/RedisStorageBackend.scala +++ b/akka-persistence/akka-persistence-redis/src/main/scala/RedisStorageBackend.scala @@ -247,7 +247,7 @@ private [akka] object RedisStorageBackend extends db.lindex(new String(encode(name.getBytes)), index) match { case None => throw new NoSuchElementException(name + " does not have element at " + index) - case Some(e) => + case Some(e) => stringToByteArray(e) } } @@ -280,7 +280,7 @@ private [akka] object RedisStorageBackend extends db.llen(new String(encode(name.getBytes))) match { case None => throw new NoSuchElementException(name + " not present") - case Some(l) => + case Some(l) => l } } diff --git a/akka-persistence/akka-persistence-redis/src/test/scala/RedisInconsistentSizeBugTest.scala b/akka-persistence/akka-persistence-redis/src/test/scala/RedisInconsistentSizeBugTest.scala index 74d1e95cc9..1e760784c9 100644 --- a/akka-persistence/akka-persistence-redis/src/test/scala/RedisInconsistentSizeBugTest.scala +++ b/akka-persistence/akka-persistence-redis/src/test/scala/RedisInconsistentSizeBugTest.scala @@ -28,7 +28,7 @@ case class SETFOO(s: String) object SampleStorage { class RedisSampleStorage extends Actor { - self.lifeCycle = Some(LifeCycle(Permanent)) + self.lifeCycle = Some(LifeCycle(Permanent)) val EVENT_MAP = "akka.sample.map" private var eventMap = atomic { RedisStorage.getMap(EVENT_MAP) } diff --git a/akka-persistence/akka-persistence-redis/src/test/scala/RedisStorageBackendSpec.scala b/akka-persistence/akka-persistence-redis/src/test/scala/RedisStorageBackendSpec.scala index 8a8021b3c5..d7a7d579bf 100644 --- a/akka-persistence/akka-persistence-redis/src/test/scala/RedisStorageBackendSpec.scala +++ b/akka-persistence/akka-persistence-redis/src/test/scala/RedisStorageBackendSpec.scala @@ -60,7 +60,7 @@ class RedisStorageBackendSpec extends describe("Store and query long value in maps") { it("should enter 4 entries in redis for transaction T-1") { val d = Calendar.getInstance.getTime.getTime - insertMapStorageEntryFor("T-11", "debasish".getBytes, + insertMapStorageEntryFor("T-11", "debasish".getBytes, toByteArray[Long](d)) getMapStorageSizeFor("T-11") should equal(1) @@ -230,14 +230,14 @@ class RedisStorageBackendSpec extends object NameSerialization { implicit object DateFormat extends Format[Date] { - def reads(in : Input) = + def reads(in : Input) = new Date(read[Long](in)) def writes(out: Output, value: Date) = write[Long](out, value.getTime) } - case class Name(id: Int, name: String, + case class Name(id: Int, name: String, address: String, dateOfBirth: Date, dateDied: Option[Date]) implicit val NameFormat: Format[Name] = diff --git a/akka-samples/akka-sample-camel/src/main/scala/StandaloneApplication.scala b/akka-samples/akka-sample-camel/src/main/scala/StandaloneApplication.scala index b7e834fcba..0a7304ba0e 100644 --- a/akka-samples/akka-sample-camel/src/main/scala/StandaloneApplication.scala +++ b/akka-samples/akka-sample-camel/src/main/scala/StandaloneApplication.scala @@ -80,4 +80,4 @@ class StandaloneSpringApplicationRoute extends RouteBuilder { // routes to active object (in ApplicationContextRegistry) from("direct:test3").to("active-object:pojo3?method=foo") } -} \ No newline at end of file +} diff --git a/akka-samples/akka-sample-rest-scala/src/main/scala/SimpleService.scala b/akka-samples/akka-sample-rest-scala/src/main/scala/SimpleService.scala index 870739bf8f..fc96bba182 100644 --- a/akka-samples/akka-sample-rest-scala/src/main/scala/SimpleService.scala +++ b/akka-samples/akka-sample-rest-scala/src/main/scala/SimpleService.scala @@ -124,7 +124,7 @@ class PersistentSimpleServiceActor extends Transactor { case "Tick" => if (hasStartedTicking) { val bytes = storage.get(KEY.getBytes).get val counter = Integer.parseInt(new String(bytes, "UTF8")) - storage.put(KEY.getBytes, (counter + 1).toString.getBytes ) + storage.put(KEY.getBytes, (counter + 1).toString.getBytes ) // val bytes = storage.get(KEY.getBytes).get // val counter = ByteBuffer.wrap(bytes).getInt // storage.put(KEY.getBytes, ByteBuffer.allocate(4).putInt(counter + 1).array) diff --git a/akka-spring/src/main/scala/ActiveObjectFactoryBean.scala b/akka-spring/src/main/scala/ActiveObjectFactoryBean.scala index f8473f295b..4f6ea37148 100644 --- a/akka-spring/src/main/scala/ActiveObjectFactoryBean.scala +++ b/akka-spring/src/main/scala/ActiveObjectFactoryBean.scala @@ -7,8 +7,8 @@ package se.scalablesolutions.akka.spring import java.beans.PropertyDescriptor import java.lang.reflect.Method -import org.springframework.beans.BeanWrapperImpl -import org.springframework.beans.BeanWrapper +import org.springframework.beans.BeanWrapperImpl +import org.springframework.beans.BeanWrapper import org.springframework.beans.BeanUtils import org.springframework.util.ReflectionUtils import org.springframework.util.StringUtils @@ -66,14 +66,14 @@ class ActiveObjectFactoryBean extends AbstractFactoryBean[AnyRef] with Logging { if (isRemote) argumentList += "r" if (hasInterface) argumentList += "i" if (hasDispatcher) argumentList += "d" - + setProperties( create(argumentList)) } /** * This method manages element by injecting either - * values () and bean references () + * values () and bean references () */ private def setProperties(ref:AnyRef) : AnyRef = { log.debug("Processing properties and dependencies for target class %s",target) @@ -81,7 +81,7 @@ class ActiveObjectFactoryBean extends AbstractFactoryBean[AnyRef] with Logging { for(entry <- property.entryList) { val propertyDescriptor = BeanUtils.getPropertyDescriptor(ref.getClass,entry.name) val method = propertyDescriptor.getWriteMethod(); - + if(StringUtils.hasText(entry.ref)) { log.debug("Setting property %s with bean ref %s using method %s", entry.name,entry.ref,method.getName) @@ -130,8 +130,8 @@ class ActiveObjectFactoryBean extends AbstractFactoryBean[AnyRef] with Logging { } } - def aNewInstance[T <: AnyRef](clazz: Class[T]) : T = { - clazz.newInstance().asInstanceOf[T] + def aNewInstance[T <: AnyRef](clazz: Class[T]) : T = { + clazz.newInstance().asInstanceOf[T] } /** diff --git a/akka-spring/src/main/scala/AkkaBeansException.scala b/akka-spring/src/main/scala/AkkaBeansException.scala index cdaaaf5bec..8cbffa86f7 100644 --- a/akka-spring/src/main/scala/AkkaBeansException.scala +++ b/akka-spring/src/main/scala/AkkaBeansException.scala @@ -7,8 +7,8 @@ import org.springframework.beans.BeansException @author Johan Rask */ class AkkaBeansException(errorMsg:String,t:Throwable) extends BeansException(errorMsg,t) { - + def this(errorMsg:String) = { this(errorMsg,null) } -} \ No newline at end of file +} diff --git a/akka-spring/src/main/scala/CamelServiceBeanDefinitionParser.scala b/akka-spring/src/main/scala/CamelServiceBeanDefinitionParser.scala index f8234ec8c7..c7f95d8a3a 100644 --- a/akka-spring/src/main/scala/CamelServiceBeanDefinitionParser.scala +++ b/akka-spring/src/main/scala/CamelServiceBeanDefinitionParser.scala @@ -38,4 +38,4 @@ class CamelServiceBeanDefinitionParser extends AbstractSingleBeanDefinitionParse * Returns true. */ override def shouldGenerateIdAsFallback = true -} \ No newline at end of file +} diff --git a/akka-spring/src/main/scala/CamelServiceFactoryBean.scala b/akka-spring/src/main/scala/CamelServiceFactoryBean.scala index 040473951e..50a3bd748c 100644 --- a/akka-spring/src/main/scala/CamelServiceFactoryBean.scala +++ b/akka-spring/src/main/scala/CamelServiceFactoryBean.scala @@ -41,4 +41,4 @@ class CamelServiceFactoryBean extends FactoryBean[CamelService] with Initializin def destroy = { instance.unload } -} \ No newline at end of file +} diff --git a/akka-spring/src/main/scala/PropertyEntries.scala b/akka-spring/src/main/scala/PropertyEntries.scala index 60f1dc3b77..aa2843064c 100644 --- a/akka-spring/src/main/scala/PropertyEntries.scala +++ b/akka-spring/src/main/scala/PropertyEntries.scala @@ -15,4 +15,4 @@ class PropertyEntries { def add(entry:PropertyEntry) = { entryList.append(entry) } -} \ No newline at end of file +} diff --git a/akka-spring/src/main/scala/PropertyEntry.scala b/akka-spring/src/main/scala/PropertyEntry.scala index eb19479f79..4d1aaa1a44 100644 --- a/akka-spring/src/main/scala/PropertyEntry.scala +++ b/akka-spring/src/main/scala/PropertyEntry.scala @@ -1,7 +1,7 @@ package se.scalablesolutions.akka.spring /** -* Represents a property element +* Represents a property element * @author Johan Rask */ class PropertyEntry { @@ -9,9 +9,9 @@ class PropertyEntry { var name:String = _ var value:String = null var ref:String = null - - + + override def toString(): String = { format("name = %s,value = %s, ref = %s", name,value,ref) } -} \ No newline at end of file +} diff --git a/akka-spring/src/test/scala/ActiveObjectFactoryBeanTest.scala b/akka-spring/src/test/scala/ActiveObjectFactoryBeanTest.scala index bfa6a27443..f1c11d0eee 100644 --- a/akka-spring/src/test/scala/ActiveObjectFactoryBeanTest.scala +++ b/akka-spring/src/test/scala/ActiveObjectFactoryBeanTest.scala @@ -49,7 +49,7 @@ class ActiveObjectFactoryBeanTest extends Spec with ShouldMatchers { } it("should create a proxy of type ResourceEditor") { - val bean = new ActiveObjectFactoryBean() + val bean = new ActiveObjectFactoryBean() // we must have a java class here bean.setTarget("org.springframework.core.io.ResourceEditor") val entries = new PropertyEntries() @@ -66,7 +66,7 @@ class ActiveObjectFactoryBeanTest extends Spec with ShouldMatchers { } it("should create an application context and inject a string dependency") { - var ctx = new ClassPathXmlApplicationContext("appContext.xml"); + var ctx = new ClassPathXmlApplicationContext("appContext.xml"); val target:ResourceEditor = ctx.getBean("bean").asInstanceOf[ResourceEditor] assert(target.getSource === "someString") }