Remove procedure syntax (#25362)

This commit is contained in:
kenji yoshida 2018-07-25 20:38:27 +09:00 committed by Johan Andrén
parent 50979d599c
commit 5b3b191bac
180 changed files with 403 additions and 403 deletions

View file

@ -167,7 +167,7 @@ private[akka] final class TestProbeImpl[M](name: String, system: ActorSystem[_])
override def expectNoMessage(): Unit =
expectNoMessage_internal(settings.ExpectNoMessageDefaultTimeout.dilated)
private def expectNoMessage_internal(max: FiniteDuration) {
private def expectNoMessage_internal(max: FiniteDuration): Unit = {
val o = receiveOne(max)
assert(o == null, s"received unexpected message $o")
lastWasNoMessage = true

View file

@ -26,7 +26,7 @@ class AkkaExceptionSpec extends WordSpec with Matchers {
}
}
def verify(clazz: java.lang.Class[_]) {
def verify(clazz: java.lang.Class[_]): Unit = {
clazz.getConstructor(Array(classOf[String]): _*)
}
}

View file

@ -37,7 +37,7 @@ object ActorConfigurationVerificationSpec {
class ActorConfigurationVerificationSpec extends AkkaSpec(ActorConfigurationVerificationSpec.config) with DefaultTimeout with BeforeAndAfterEach {
import ActorConfigurationVerificationSpec._
override def atStartup {
override def atStartup: Unit = {
system.eventStream.publish(Mute(EventFilter[ConfigurationException]("")))
}

View file

@ -133,7 +133,7 @@ class ActorCreationPerfSpec extends AkkaSpec(ActorCreationPerfSpec.config) with
override val reportMetricsEnabled = metricsConfig.getBoolean("akka.test.actor.ActorPerfSpec.report-metrics")
override val forceGcEnabled = metricsConfig.getBoolean("akka.test.actor.ActorPerfSpec.force-gc")
def runWithCounterInside(metricName: String, scenarioName: String, number: Int, propsCreator: () Props) {
def runWithCounterInside(metricName: String, scenarioName: String, number: Int, propsCreator: () Props): Unit = {
val hist = histogram(BlockingTimeKey / metricName)
val driver = system.actorOf(Props(classOf[TimingDriver], hist), scenarioName)
@ -178,7 +178,7 @@ class ActorCreationPerfSpec extends AkkaSpec(ActorCreationPerfSpec.config) with
after diff before
}
def registerTests(name: String, propsCreator: () Props) {
def registerTests(name: String, propsCreator: () Props): Unit = {
val scenarioName = name.replaceAll("""[^\w]""", "")
s"warm-up before: $name" taggedAs PerformanceTest in {

View file

@ -22,8 +22,8 @@ object ActorLifeCycleSpec {
def report(msg: Any) = testActor ! message(msg)
def message(msg: Any): Tuple3[Any, String, Int] = (msg, id, currentGen)
val currentGen = generationProvider.getAndIncrement()
override def preStart() { report("preStart") }
override def postStop() { report("postStop") }
override def preStart(): Unit = { report("preStart") }
override def postStop(): Unit = { report("postStop") }
def receive = { case "status" sender() ! message("OK") }
}
@ -40,8 +40,8 @@ class ActorLifeCycleSpec extends AkkaSpec("akka.actor.serialize-messages=off") w
val supervisor = system.actorOf(Props(classOf[Supervisor], OneForOneStrategy(maxNrOfRetries = 3)(List(classOf[Exception]))))
val gen = new AtomicInteger(0)
val restarterProps = Props(new LifeCycleTestActor(testActor, id, gen) {
override def preRestart(reason: Throwable, message: Option[Any]) { report("preRestart") }
override def postRestart(reason: Throwable) { report("postRestart") }
override def preRestart(reason: Throwable, message: Option[Any]): Unit = { report("preRestart") }
override def postRestart(reason: Throwable): Unit = { report("postRestart") }
}).withDeploy(Deploy.local)
val restarter = Await.result((supervisor ? restarterProps).mapTo[ActorRef], timeout.duration)
@ -119,7 +119,7 @@ class ActorLifeCycleSpec extends AkkaSpec("akka.actor.serialize-messages=off") w
"log failues in postStop" in {
val a = system.actorOf(Props(new Actor {
def receive = Actor.emptyBehavior
override def postStop { throw new Exception("hurrah") }
override def postStop: Unit = { throw new Exception("hurrah") }
}))
EventFilter[Exception]("hurrah", occurrences = 1) intercept {
a ! PoisonPill

View file

@ -175,7 +175,7 @@ class ActorLookupSpec extends AkkaSpec with DefaultTimeout {
val all = Seq(c1, c2, c21)
"find actors by looking up their path" in {
def check(looker: ActorRef, pathOf: ActorRef, result: ActorRef) {
def check(looker: ActorRef, pathOf: ActorRef, result: ActorRef): Unit = {
Await.result(looker ? LookupPath(pathOf.path), timeout.duration) should ===(result)
}
for {
@ -185,7 +185,7 @@ class ActorLookupSpec extends AkkaSpec with DefaultTimeout {
}
"find actors by looking up their string representation" in {
def check(looker: ActorRef, pathOf: ActorRef, result: ActorRef) {
def check(looker: ActorRef, pathOf: ActorRef, result: ActorRef): Unit = {
Await.result(looker ? LookupString(pathOf.path.toString), timeout.duration) should ===(result)
// with uid
Await.result(looker ? LookupString(pathOf.path.toSerializationFormat), timeout.duration) should ===(result)
@ -199,7 +199,7 @@ class ActorLookupSpec extends AkkaSpec with DefaultTimeout {
}
"find actors by looking up their root-anchored relative path" in {
def check(looker: ActorRef, pathOf: ActorRef, result: ActorRef) {
def check(looker: ActorRef, pathOf: ActorRef, result: ActorRef): Unit = {
Await.result(looker ? LookupString(pathOf.path.toStringWithoutAddress), timeout.duration) should ===(result)
Await.result(looker ? LookupString(pathOf.path.elements.mkString("/", "/", "/")), timeout.duration) should ===(result)
}
@ -210,7 +210,7 @@ class ActorLookupSpec extends AkkaSpec with DefaultTimeout {
}
"find actors by looking up their relative path" in {
def check(looker: ActorRef, result: ActorRef, elems: String*) {
def check(looker: ActorRef, result: ActorRef, elems: String*): Unit = {
Await.result(looker ? LookupElems(elems), timeout.duration) should ===(result)
Await.result(looker ? LookupString(elems mkString "/"), timeout.duration) should ===(result)
Await.result(looker ? LookupString(elems mkString ("", "/", "/")), timeout.duration) should ===(result)
@ -226,7 +226,7 @@ class ActorLookupSpec extends AkkaSpec with DefaultTimeout {
}
"find system-generated actors" in {
def check(target: ActorRef) {
def check(target: ActorRef): Unit = {
for (looker all) {
Await.result(looker ? LookupPath(target.path), timeout.duration) should ===(target)
Await.result(looker ? LookupString(target.path.toString), timeout.duration) should ===(target)
@ -241,12 +241,12 @@ class ActorLookupSpec extends AkkaSpec with DefaultTimeout {
"return deadLetters or EmptyLocalActorRef, respectively, for non-existing paths" in {
import scala.collection.JavaConverters._
def checkOne(looker: ActorRef, query: Query, result: ActorRef) {
def checkOne(looker: ActorRef, query: Query, result: ActorRef): Unit = {
val lookup = Await.result(looker ? query, timeout.duration)
lookup.getClass should be(result.getClass)
lookup should ===(result)
}
def check(looker: ActorRef) {
def check(looker: ActorRef): Unit = {
val lookname = looker.path.elements.mkString("", "/", "/")
for (
(l, r) Seq(

View file

@ -184,7 +184,7 @@ class ActorSelectionSpec extends AkkaSpec with DefaultTimeout {
val all = Seq(c1, c2, c21)
"select actors by their path" in {
def check(looker: ActorRef, pathOf: ActorRef, result: ActorRef) {
def check(looker: ActorRef, pathOf: ActorRef, result: ActorRef): Unit = {
askNode(looker, SelectPath(pathOf.path)) should ===(Some(result))
}
for {
@ -194,7 +194,7 @@ class ActorSelectionSpec extends AkkaSpec with DefaultTimeout {
}
"select actors by their string path representation" in {
def check(looker: ActorRef, pathOf: ActorRef, result: ActorRef) {
def check(looker: ActorRef, pathOf: ActorRef, result: ActorRef): Unit = {
askNode(looker, SelectString(pathOf.path.toStringWithoutAddress)) should ===(Some(result))
// with trailing /
askNode(looker, SelectString(pathOf.path.toStringWithoutAddress + "/")) should ===(Some(result))
@ -206,7 +206,7 @@ class ActorSelectionSpec extends AkkaSpec with DefaultTimeout {
}
"select actors by their root-anchored relative path" in {
def check(looker: ActorRef, pathOf: ActorRef, result: ActorRef) {
def check(looker: ActorRef, pathOf: ActorRef, result: ActorRef): Unit = {
askNode(looker, SelectString(pathOf.path.toStringWithoutAddress)) should ===(Some(result))
askNode(looker, SelectString(pathOf.path.elements.mkString("/", "/", "/"))) should ===(Some(result))
}
@ -217,7 +217,7 @@ class ActorSelectionSpec extends AkkaSpec with DefaultTimeout {
}
"select actors by their relative path" in {
def check(looker: ActorRef, result: ActorRef, elems: String*) {
def check(looker: ActorRef, result: ActorRef, elems: String*): Unit = {
askNode(looker, SelectString(elems mkString "/")) should ===(Some(result))
askNode(looker, SelectString(elems mkString ("", "/", "/"))) should ===(Some(result))
}
@ -232,7 +232,7 @@ class ActorSelectionSpec extends AkkaSpec with DefaultTimeout {
}
"find system-generated actors" in {
def check(target: ActorRef) {
def check(target: ActorRef): Unit = {
for (looker all) {
askNode(looker, SelectPath(target.path)) should ===(Some(target))
askNode(looker, SelectString(target.path.toString)) should ===(Some(target))
@ -247,11 +247,11 @@ class ActorSelectionSpec extends AkkaSpec with DefaultTimeout {
"return deadLetters or ActorIdentity(None), respectively, for non-existing paths" in {
import scala.collection.JavaConverters._
def checkOne(looker: ActorRef, query: Query, result: Option[ActorRef]) {
def checkOne(looker: ActorRef, query: Query, result: Option[ActorRef]): Unit = {
val lookup = askNode(looker, query)
lookup should ===(result)
}
def check(looker: ActorRef) {
def check(looker: ActorRef): Unit = {
val lookname = looker.path.elements.mkString("", "/", "/")
for (
(l, r) Seq(

View file

@ -42,7 +42,7 @@ object ActorSystemSpec {
}
}
override def preRestart(cause: Throwable, msg: Option[Any]) {
override def preRestart(cause: Throwable, msg: Option[Any]): Unit = {
if (master ne null) {
master ! "failed with " + cause + " while processing " + msg
}

View file

@ -105,7 +105,7 @@ class JavaActorWithStashSpec extends StashJavaAPI with JUnitSuiteLike
class ActorWithStashSpec extends AkkaSpec(ActorWithStashSpec.testConf) with DefaultTimeout with BeforeAndAfterEach {
import ActorWithStashSpec._
override def atStartup {
override def atStartup: Unit = {
system.eventStream.publish(Mute(EventFilter[Exception]("Crashing...")))
}

View file

@ -235,7 +235,7 @@ class FSMActorSpec extends AkkaSpec(Map("akka.actor.debug.fsm" → true)) with I
}
})
def checkTimersActive(active: Boolean) {
def checkTimersActive(active: Boolean): Unit = {
for (timer timerNames) fsmref.isTimerActive(timer) should ===(active)
fsmref.isStateTimerActive should ===(active)
}

View file

@ -39,7 +39,7 @@ object FSMTransitionSpec {
case Event("reply", _) stay replying "reply"
}
initialize()
override def preRestart(reason: Throwable, msg: Option[Any]) { target ! "restarted" }
override def preRestart(reason: Throwable, msg: Option[Any]): Unit = { target ! "restarted" }
}
class OtherFSM(target: ActorRef) extends Actor with FSM[Int, Int] {

View file

@ -18,7 +18,7 @@ import akka.pattern.ask
class RestartStrategySpec extends AkkaSpec("akka.actor.serialize-messages = off") with DefaultTimeout {
override def atStartup {
override def atStartup: Unit = {
system.eventStream.publish(Mute(EventFilter[Exception]("Crashing...")))
}

View file

@ -531,15 +531,15 @@ class LightArrayRevolverSchedulerSpec extends AkkaSpec(SchedulerSpec.testConfRev
trait Driver {
def wakeUp(d: FiniteDuration): Unit
def expectWait(): FiniteDuration
def expectWait(d: FiniteDuration) { expectWait() should ===(d) }
def expectWait(d: FiniteDuration): Unit = { expectWait() should ===(d) }
def probe: TestProbe
def step: FiniteDuration
def close(): Unit
}
val localEC = new ExecutionContext {
def execute(runnable: Runnable) { runnable.run() }
def reportFailure(t: Throwable) { t.printStackTrace() }
def execute(runnable: Runnable): Unit = { runnable.run() }
def reportFailure(t: Throwable): Unit = { t.printStackTrace() }
}
def withScheduler(start: Long = 0L, _startTick: Int = 0, config: Config = ConfigFactory.empty)(thunk: (Scheduler with Closeable, Driver) Unit): Unit = {

View file

@ -14,5 +14,5 @@ class Supervisor(override val supervisorStrategy: SupervisorStrategy) extends Ac
case x: Props sender() ! context.actorOf(x)
}
// need to override the default of stopping all children upon restart, tests rely on keeping them around
override def preRestart(cause: Throwable, msg: Option[Any]) {}
override def preRestart(cause: Throwable, msg: Option[Any]): Unit = {}
}

View file

@ -41,7 +41,7 @@ object SupervisorHierarchySpec {
case p: Props sender() ! context.actorOf(p)
}
// test relies on keeping children around during restart
override def preRestart(cause: Throwable, msg: Option[Any]) {}
override def preRestart(cause: Throwable, msg: Option[Any]): Unit = {}
override def postRestart(reason: Throwable) = {
countDown.countDown()
}
@ -135,7 +135,7 @@ object SupervisorHierarchySpec {
var failed = false
var suspended = false
def abort(msg: String) {
def abort(msg: String): Unit = {
listener ! ErrorLog(msg, log)
log = Vector(Event("log sent", identityHashCode(this)))
context.parent ! Abort
@ -150,7 +150,7 @@ object SupervisorHierarchySpec {
def suspendCount = context.asInstanceOf[ActorCell].mailbox.suspendCount
override def preStart {
override def preStart: Unit = {
log :+= Event("started", identityHashCode(this))
listener ! Ready(self)
val s = size - 1 // subtract myself
@ -221,7 +221,7 @@ object SupervisorHierarchySpec {
Resume
})
override def postRestart(cause: Throwable) {
override def postRestart(cause: Throwable): Unit = {
val state = stateCache.get(self.path)
log = state.log
log :+= Event("restarted " + suspendCount + " " + cause, identityHashCode(this))
@ -244,7 +244,7 @@ object SupervisorHierarchySpec {
}
}
override def postStop {
override def postStop: Unit = {
if (failed || suspended) {
listener ! ErrorLog("not resumed (" + failed + ", " + suspended + ")", log)
val state = stateCache.get(self)
@ -433,15 +433,15 @@ object SupervisorHierarchySpec {
var hierarchy: ActorRef = _
override def preRestart(cause: Throwable, msg: Option[Any]) {
override def preRestart(cause: Throwable, msg: Option[Any]): Unit = {
throw ActorKilledException("I want to DIE")
}
override def postRestart(cause: Throwable) {
override def postRestart(cause: Throwable): Unit = {
throw ActorKilledException("I said I wanted to DIE, dammit!")
}
override def postStop {
override def postStop: Unit = {
testActor ! "stressTestStopped"
}

View file

@ -39,7 +39,7 @@ class SupervisorMiscSpec extends AkkaSpec(SupervisorMiscSpec.config) with Defaul
OneForOneStrategy(maxNrOfRetries = 3, withinTimeRange = 5 seconds)(List(classOf[Exception])))))
val workerProps = Props(new Actor {
override def postRestart(cause: Throwable) { countDownLatch.countDown() }
override def postRestart(cause: Throwable): Unit = { countDownLatch.countDown() }
def receive = {
case "status" this.sender() ! "OK"
case _ this.context.stop(self)

View file

@ -51,7 +51,7 @@ object SupervisorSpec {
throw e
}
override def postRestart(reason: Throwable) {
override def postRestart(reason: Throwable): Unit = {
sendTo ! reason.getMessage
}
}
@ -164,7 +164,7 @@ class SupervisorSpec extends AkkaSpec(SupervisorSpec.config) with BeforeAndAfter
(pingpong1, pingpong2, pingpong3, topSupervisor)
}
override def atStartup() {
override def atStartup(): Unit = {
system.eventStream.publish(Mute(EventFilter[RuntimeException](ExceptionMessage)))
}
@ -206,10 +206,10 @@ class SupervisorSpec extends AkkaSpec(SupervisorSpec.config) with BeforeAndAfter
var postRestarts = 0
var preStarts = 0
var postStops = 0
override def preRestart(reason: Throwable, message: Option[Any]) { preRestarts += 1; testActor ! ("preRestart" + preRestarts) }
override def postRestart(reason: Throwable) { postRestarts += 1; testActor ! ("postRestart" + postRestarts) }
override def preStart() { preStarts += 1; testActor ! ("preStart" + preStarts) }
override def postStop() { postStops += 1; testActor ! ("postStop" + postStops) }
override def preRestart(reason: Throwable, message: Option[Any]): Unit = { preRestarts += 1; testActor ! ("preRestart" + preRestarts) }
override def postRestart(reason: Throwable): Unit = { postRestarts += 1; testActor ! ("postRestart" + postRestarts) }
override def preStart(): Unit = { preStarts += 1; testActor ! ("preStart" + preStarts) }
override def postStop(): Unit = { postStops += 1; testActor ! ("postStop" + postStops) }
def receive = {
case "crash" { testActor ! "crashed"; throw new RuntimeException("Expected") }
case "ping" sender() ! "pong"
@ -384,7 +384,7 @@ class SupervisorSpec extends AkkaSpec(SupervisorSpec.config) with BeforeAndAfter
val init = inits.getAndIncrement()
if (init % 3 == 1) throw new IllegalStateException("Don't wanna!")
override def preRestart(cause: Throwable, msg: Option[Any]) {
override def preRestart(cause: Throwable, msg: Option[Any]): Unit = {
if (init % 3 == 0) throw new IllegalStateException("Don't wanna!")
}

View file

@ -23,7 +23,7 @@ class SupervisorTreeSpec extends AkkaSpec("akka.actor.serialize-messages = off")
def receive = {
case p: Props sender() ! context.actorOf(p)
}
override def preRestart(cause: Throwable, msg: Option[Any]) { testActor ! self.path }
override def preRestart(cause: Throwable, msg: Option[Any]): Unit = { testActor ! self.path }
})
val headActor = system.actorOf(p)
val middleActor = Await.result((headActor ? p).mapTo[ActorRef], timeout.duration)

View file

@ -19,7 +19,7 @@ class Ticket669Spec extends AkkaSpec with BeforeAndAfterAll with ImplicitSender
import Ticket669Spec._
// TODO: does this really make sense?
override def atStartup() {
override def atStartup(): Unit = {
Thread.interrupted() //remove interrupted status.
}
@ -56,11 +56,11 @@ object Ticket669Spec {
case msg throw new Exception("test")
}
override def preRestart(reason: scala.Throwable, msg: Option[Any]) {
override def preRestart(reason: scala.Throwable, msg: Option[Any]): Unit = {
sender() ! "failure1"
}
override def postStop() {
override def postStop(): Unit = {
sender() ! "failure2"
}
}

View file

@ -103,7 +103,7 @@ object TypedActorSpec {
def nullReturn(): Any = null
def incr()
def incr(): Unit
@throws(classOf[TimeoutException])
def read(): Int
@ -148,7 +148,7 @@ object TypedActorSpec {
var internalNumber = 0
def incr() {
def incr(): Unit = {
internalNumber += 1
}

View file

@ -75,7 +75,7 @@ object ActorModelSpec {
}
}
override def postRestart(reason: Throwable) {
override def postRestart(reason: Throwable): Unit = {
interceptor.getStats(self).restarts.incrementAndGet()
}
@ -128,40 +128,40 @@ object ActorModelSpec {
}
protected[akka] abstract override def suspend(actor: ActorCell) {
protected[akka] abstract override def suspend(actor: ActorCell): Unit = {
getStats(actor.self).suspensions.incrementAndGet()
super.suspend(actor)
}
protected[akka] abstract override def resume(actor: ActorCell) {
protected[akka] abstract override def resume(actor: ActorCell): Unit = {
super.resume(actor)
getStats(actor.self).resumes.incrementAndGet()
}
protected[akka] abstract override def register(actor: ActorCell) {
protected[akka] abstract override def register(actor: ActorCell): Unit = {
assert(getStats(actor.self).registers.incrementAndGet() == 1)
super.register(actor)
}
protected[akka] abstract override def unregister(actor: ActorCell) {
protected[akka] abstract override def unregister(actor: ActorCell): Unit = {
assert(getStats(actor.self).unregisters.incrementAndGet() == 1)
super.unregister(actor)
}
protected[akka] abstract override def dispatch(receiver: ActorCell, invocation: Envelope) {
protected[akka] abstract override def dispatch(receiver: ActorCell, invocation: Envelope): Unit = {
val stats = getStats(receiver.self)
stats.msgsReceived.incrementAndGet()
super.dispatch(receiver, invocation)
}
protected[akka] abstract override def shutdown() {
protected[akka] abstract override def shutdown(): Unit = {
stops.incrementAndGet()
super.shutdown()
}
}
def assertDispatcher(dispatcher: MessageDispatcherInterceptor)(
stops: Long = dispatcher.stops.get())(implicit system: ActorSystem) {
stops: Long = dispatcher.stops.get())(implicit system: ActorSystem): Unit = {
val deadline = System.currentTimeMillis + dispatcher.shutdownTimeout.toMillis * 5
try {
await(deadline)(stops == dispatcher.stops.get)
@ -173,12 +173,12 @@ object ActorModelSpec {
}
}
def assertCountDown(latch: CountDownLatch, wait: Long, hint: String) {
def assertCountDown(latch: CountDownLatch, wait: Long, hint: String): Unit = {
if (!latch.await(wait, TimeUnit.MILLISECONDS))
fail("Failed to count down within " + wait + " millis (count at " + latch.getCount + "). " + hint)
}
def assertNoCountDown(latch: CountDownLatch, wait: Long, hint: String) {
def assertNoCountDown(latch: CountDownLatch, wait: Long, hint: String): Unit = {
if (latch.await(wait, TimeUnit.MILLISECONDS))
fail("Expected count down to fail after " + wait + " millis. " + hint)
}
@ -193,7 +193,7 @@ object ActorModelSpec {
unregisters: Long = 0,
msgsReceived: Long = 0,
msgsProcessed: Long = 0,
restarts: Long = 0)(implicit system: ActorSystem) {
restarts: Long = 0)(implicit system: ActorSystem): Unit = {
assertRef(actorRef, dispatcher)(
suspensions,
resumes,
@ -211,7 +211,7 @@ object ActorModelSpec {
unregisters: Long = statsFor(actorRef, dispatcher).unregisters.get(),
msgsReceived: Long = statsFor(actorRef, dispatcher).msgsReceived.get(),
msgsProcessed: Long = statsFor(actorRef, dispatcher).msgsProcessed.get(),
restarts: Long = statsFor(actorRef, dispatcher).restarts.get())(implicit system: ActorSystem) {
restarts: Long = statsFor(actorRef, dispatcher).restarts.get())(implicit system: ActorSystem): Unit = {
val stats = statsFor(actorRef, Option(dispatcher).getOrElse(actorRef.asInstanceOf[ActorRefWithCell].underlying.asInstanceOf[ActorCell].dispatcher))
val deadline = System.currentTimeMillis + 1000
try {
@ -334,7 +334,7 @@ abstract class ActorModelSpec(config: String) extends AkkaSpec(config) with Defa
system.stop(a)
}
def spawn(f: Unit) {
def spawn(f: Unit): Unit = {
(new Thread {
override def run(): Unit =
try f catch {
@ -367,7 +367,7 @@ abstract class ActorModelSpec(config: String) extends AkkaSpec(config) with Defa
val dispatcher = interceptedDispatcher()
val props = Props[DispatcherActor].withDispatcher(dispatcher.id)
def flood(num: Int) {
def flood(num: Int): Unit = {
val cachedMessage = CountDownNStop(new CountDownLatch(num))
val stopLatch = new CountDownLatch(num)
val keepAliveLatch = new CountDownLatch(1)

View file

@ -213,8 +213,8 @@ class ExecutionContextSpec extends AkkaSpec with DefaultTimeout {
val submissions = new AtomicInteger(0)
val counter = new AtomicInteger(0)
val underlying = new ExecutionContext {
override def execute(r: Runnable) { submissions.incrementAndGet(); ExecutionContext.global.execute(r) }
override def reportFailure(t: Throwable) { ExecutionContext.global.reportFailure(t) }
override def execute(r: Runnable): Unit = { submissions.incrementAndGet(); ExecutionContext.global.execute(r) }
override def reportFailure(t: Throwable): Unit = { ExecutionContext.global.reportFailure(t) }
}
val throughput = 25
val sec = SerializedSuspendableExecutionContext(throughput)(underlying)
@ -245,8 +245,8 @@ class ExecutionContextSpec extends AkkaSpec with DefaultTimeout {
val submissions = new AtomicInteger(0)
val counter = new AtomicInteger(0)
val underlying = new ExecutionContext {
override def execute(r: Runnable) { submissions.incrementAndGet(); ExecutionContext.global.execute(r) }
override def reportFailure(t: Throwable) { ExecutionContext.global.reportFailure(t) }
override def execute(r: Runnable): Unit = { submissions.incrementAndGet(); ExecutionContext.global.execute(r) }
override def reportFailure(t: Throwable): Unit = { ExecutionContext.global.reportFailure(t) }
}
val throughput = 25
val sec = SerializedSuspendableExecutionContext(throughput)(underlying)

View file

@ -92,7 +92,7 @@ abstract class MailboxSpec extends AkkaSpec with BeforeAndAfterAll with BeforeAn
q.hasMessages should ===(expected != 0)
}
def ensureSingleConsumerEnqueueDequeue(config: MailboxType) {
def ensureSingleConsumerEnqueueDequeue(config: MailboxType): Unit = {
val q = factory(config)
ensureMailboxSize(q, 0)
q.dequeue should ===(null)
@ -112,7 +112,7 @@ abstract class MailboxSpec extends AkkaSpec with BeforeAndAfterAll with BeforeAn
ensureMailboxSize(q, 0)
}
def ensureInitialMailboxState(config: MailboxType, q: MessageQueue) {
def ensureInitialMailboxState(config: MailboxType, q: MessageQueue): Unit = {
q should not be null
q match {
case aQueue: BlockingQueue[_]

View file

@ -48,7 +48,7 @@ class PriorityDispatcherSpec extends AkkaSpec(PriorityDispatcherSpec.config) wit
}
}
def testOrdering(dispatcherKey: String) {
def testOrdering(dispatcherKey: String): Unit = {
val msgs = (1 to 100) toList
// It's important that the actor under test is not a top level actor

View file

@ -51,7 +51,7 @@ class StablePriorityDispatcherSpec extends AkkaSpec(StablePriorityDispatcherSpec
testOrdering(dispatcherKey)
}
def testOrdering(dispatcherKey: String) {
def testOrdering(dispatcherKey: String): Unit = {
val msgs = (1 to 200) toList
val shuffled = scala.util.Random.shuffle(msgs)

View file

@ -258,7 +258,7 @@ class ActorEventBusSpec(conf: Config) extends EventBusSpec("ActorEventBus", conf
disposeSubscriber(system, a2)
}
private def expectUnsubscribedByUnsubscriber(p: TestProbe, a: ActorRef) {
private def expectUnsubscribedByUnsubscriber(p: TestProbe, a: ActorRef): Unit = {
val expectedMsg = s"actor $a has terminated, unsubscribing it from $bus"
p.fishForMessage(1 second, hint = expectedMsg) {
case Logging.Debug(_, _, msg) if msg equals expectedMsg true
@ -266,7 +266,7 @@ class ActorEventBusSpec(conf: Config) extends EventBusSpec("ActorEventBus", conf
}
}
private def expectUnregisterFromUnsubscriber(p: TestProbe, a: ActorRef) {
private def expectUnregisterFromUnsubscriber(p: TestProbe, a: ActorRef): Unit = {
val expectedMsg = s"unregistered watch of $a in $bus"
p.fishForMessage(1 second, hint = expectedMsg) {
case Logging.Debug(_, _, msg) if msg equals expectedMsg true

View file

@ -406,7 +406,7 @@ class EventStreamSpec extends AkkaSpec(EventStreamSpec.config) {
}
private def verifyLevel(bus: LoggingBus, level: Logging.LogLevel) {
private def verifyLevel(bus: LoggingBus, level: Logging.LogLevel): Unit = {
import Logging._
val allmsg = Seq(Debug("", null, "debug"), Info("", null, "info"), Warning("", null, "warning"), Error("", null, "error"))
val msg = allmsg filter (_.level <= level)
@ -414,7 +414,7 @@ class EventStreamSpec extends AkkaSpec(EventStreamSpec.config) {
msg foreach (expectMsg(_))
}
private def fishForDebugMessage(a: TestProbe, messagePrefix: String, max: Duration = 3 seconds) {
private def fishForDebugMessage(a: TestProbe, messagePrefix: String, max: Duration = 3 seconds): Unit = {
a.fishForMessage(max, hint = "expected debug message prefix: " + messagePrefix) {
case Logging.Debug(_, _, msg: String) if msg startsWith messagePrefix true
case other false

View file

@ -43,13 +43,13 @@ class LoggingReceiveSpec extends WordSpec with BeforeAndAfterAll {
appAuto.eventStream.publish(filter)
appLifecycle.eventStream.publish(filter)
def ignoreMute(t: TestKit) {
def ignoreMute(t: TestKit): Unit = {
t.ignoreMsg {
case (_: TestEvent.Mute | _: TestEvent.UnMute) true
}
}
override def afterAll() {
override def afterAll(): Unit = {
TestKit.shutdownActorSystem(appLogging)
TestKit.shutdownActorSystem(appAuto)
TestKit.shutdownActorSystem(appLifecycle)

View file

@ -36,12 +36,12 @@ class JavaLoggerSpec extends AkkaSpec(JavaLoggerSpec.config) {
val logger = logging.Logger.getLogger(classOf[JavaLoggerSpec.LogProducer].getName)
logger.setUseParentHandlers(false) // turn off output of test LogRecords
logger.addHandler(new logging.Handler {
def publish(record: logging.LogRecord) {
def publish(record: logging.LogRecord): Unit = {
testActor ! record
}
def flush() {}
def close() {}
def flush(): Unit = {}
def close(): Unit = {}
})
val producer = system.actorOf(Props[JavaLoggerSpec.LogProducer], name = "log")

View file

@ -145,7 +145,7 @@ class TcpListenerSpec extends AkkaSpec("""
registerCallReceiver.expectMsg(if (pullMode) 0 else OP_ACCEPT)
def bindListener() {
def bindListener(): Unit = {
listener ! new ChannelRegistration {
def enableInterest(op: Int): Unit = interestCallReceiver.ref ! op
def disableInterest(op: Int): Unit = interestCallReceiver.ref ! -op

View file

@ -24,7 +24,7 @@ class RandomSpec extends AkkaSpec with DefaultTimeout with ImplicitSender {
case "hello" sender() ! "world"
}
override def postStop() {
override def postStop(): Unit = {
stopLatch.countDown()
}
})), "random-shutdown")
@ -88,7 +88,7 @@ class RandomSpec extends AkkaSpec with DefaultTimeout with ImplicitSender {
case "hello" helloLatch.countDown()
}
override def postStop() {
override def postStop(): Unit = {
stopLatch.countDown()
}
})), "random-broadcast")

View file

@ -31,7 +31,7 @@ class RoundRobinSpec extends AkkaSpec with DefaultTimeout with ImplicitSender {
case "hello" helloLatch.countDown()
}
override def postStop() {
override def postStop(): Unit = {
stopLatch.countDown()
}
})), "round-robin-shutdown")
@ -85,7 +85,7 @@ class RoundRobinSpec extends AkkaSpec with DefaultTimeout with ImplicitSender {
case "hello" helloLatch.countDown()
}
override def postStop() {
override def postStop(): Unit = {
stopLatch.countDown()
}
})), "round-robin-broadcast")

View file

@ -31,13 +31,13 @@ object TailChoppingSpec {
class TailChoppingSpec extends AkkaSpec with DefaultTimeout with ImplicitSender {
import TailChoppingSpec._
def oneOfShouldEqual(what: Any, default: Any, ref: ActorRef*)(f: ActorRef Any) {
def oneOfShouldEqual(what: Any, default: Any, ref: ActorRef*)(f: ActorRef Any): Unit = {
val results = ref.map(p f(p))
results.count(_ == what) should equal(1)
results.count(_ == default) should equal(results.size - 1)
}
def allShouldEqual(what: Any, ref: ActorRef*)(f: ActorRef Any) {
def allShouldEqual(what: Any, ref: ActorRef*)(f: ActorRef Any): Unit = {
val results = ref.map(p f(p))
results.count(_ == what) should equal(results.size)
}

View file

@ -15,7 +15,7 @@ import scala.util.Success
object ReceptionistApiSpec {
def compileOnlySpec() {
def compileOnlySpec(): Unit = {
// some dummy prerequisites
implicit val timeout: Timeout = 3.seconds
val service: ActorRef[String] = ???

View file

@ -410,7 +410,7 @@ private[akka] object LocalActorRefProvider {
}
// guardian MUST NOT lose its children during restart
override def preRestart(cause: Throwable, msg: Option[Any]) {}
override def preRestart(cause: Throwable, msg: Option[Any]): Unit = {}
}
/**
@ -457,7 +457,7 @@ private[akka] object LocalActorRefProvider {
}
// guardian MUST NOT lose its children during restart
override def preRestart(cause: Throwable, msg: Option[Any]) {}
override def preRestart(cause: Throwable, msg: Option[Any]): Unit = {}
}
}
@ -653,7 +653,7 @@ private[akka] class LocalActorRefProvider private[akka] (
tempContainer.removeChild(path.name)
}
private[akka] def init(_system: ActorSystemImpl) {
private[akka] def init(_system: ActorSystemImpl): Unit = {
system = _system
rootGuardian.start()
// chain death watchers so that killing guardian stops the application

View file

@ -854,8 +854,8 @@ private[akka] class ActorSystemImpl(
}
def start(): this.type = _start
def registerOnTermination[T](code: T) { registerOnTermination(new Runnable { def run = code }) }
def registerOnTermination(code: Runnable) { terminationCallbacks.add(code) }
def registerOnTermination[T](code: T): Unit = { registerOnTermination(new Runnable { def run = code }) }
def registerOnTermination(code: Runnable): Unit = { terminationCallbacks.add(code) }
override def terminate(): Future[Terminated] = {
if (!settings.LogDeadLettersDuringShutdown) logDeadLetterListener foreach stop
@ -950,7 +950,7 @@ private[akka] class ActorSystemImpl(
def hasExtension(ext: ExtensionId[_ <: Extension]): Boolean = findExtension(ext) != null
private def loadExtensions() {
private def loadExtensions(): Unit = {
/**
* @param throwOnLoadFail Throw exception when an extension fails to load (needed for backwards compatibility)
*/

View file

@ -510,7 +510,7 @@ trait FSM[S, D] extends Actor with Listeners with ActorLogging {
implicit final def total2pf(transitionHandler: (S, S) Unit): TransitionHandler =
new TransitionHandler {
def isDefinedAt(in: (S, S)) = true
def apply(in: (S, S)) { transitionHandler(in._1, in._2) }
def apply(in: (S, S)): Unit = { transitionHandler(in._1, in._2) }
}
/**
@ -622,7 +622,7 @@ trait FSM[S, D] extends Actor with Listeners with ActorLogging {
* transition handling
*/
private var transitionEvent: List[TransitionHandler] = Nil
private def handleTransition(prev: S, next: S) {
private def handleTransition(prev: S, next: S): Unit = {
val tuple = (prev, next)
for (te transitionEvent) { if (te.isDefinedAt(tuple)) te(tuple) }
}
@ -789,7 +789,7 @@ trait LoggingFSM[S, D] extends FSM[S, D] { this: Actor ⇒
private var pos = 0
private var full = false
private def advance() {
private def advance(): Unit = {
val n = pos + 1
if (n == logDepth) {
full = true

View file

@ -65,13 +65,13 @@ trait Inbox { this: ActorDSL.type ⇒
var clientsByTimeout = TreeSet.empty[Query]
var printedWarning = false
def enqueueQuery(q: Query) {
def enqueueQuery(q: Query): Unit = {
val query = q withClient sender()
clients enqueue query
clientsByTimeout += query
}
def enqueueMessage(msg: Any) {
def enqueueMessage(msg: Any): Unit = {
if (messages.size < size) messages enqueue msg
else {
if (!printedWarning) {
@ -215,7 +215,7 @@ trait Inbox { this: ActorDSL.type ⇒
* Overridden finalizer which will try to stop the actor once this Inbox
* is no longer referenced.
*/
override def finalize() {
override def finalize(): Unit = {
system.stop(receiver)
}
}

View file

@ -139,7 +139,7 @@ private[akka] trait FaultHandling { this: ActorCell ⇒
}
}
protected def terminate() {
protected def terminate(): Unit = {
setReceiveTimeout(Duration.Undefined)
cancelReceiveTimeout
@ -200,7 +200,7 @@ private[akka] trait FaultHandling { this: ActorCell ⇒
}
}
private def finishTerminate() {
private def finishTerminate(): Unit = {
val a = actor
/* The following order is crucial for things to work properly. Only change this if you're very confident and lucky.
*

View file

@ -24,7 +24,7 @@ private[akka] trait ReceiveTimeout { this: ActorCell ⇒
final def setReceiveTimeout(timeout: Duration): Unit = receiveTimeoutData = receiveTimeoutData.copy(_1 = timeout)
final def checkReceiveTimeout() {
final def checkReceiveTimeout(): Unit = {
val recvtimeout = receiveTimeoutData
recvtimeout._1 match {
case f: FiniteDuration

View file

@ -192,7 +192,7 @@ abstract class MessageDispatcher(val configurator: MessageDispatcherConfigurator
*
* INTERNAL API
*/
protected[akka] def register(actor: ActorCell) {
protected[akka] def register(actor: ActorCell): Unit = {
if (debug) actors.put(this, actor.self)
addInhabitants(+1)
}
@ -202,7 +202,7 @@ abstract class MessageDispatcher(val configurator: MessageDispatcherConfigurator
*
* INTERNAL API
*/
protected[akka] def unregister(actor: ActorCell) {
protected[akka] def unregister(actor: ActorCell): Unit = {
if (debug) actors.remove(this, actor.self)
addInhabitants(-1)
val mailBox = actor.swapMailbox(mailboxes.deadLetterMailbox)
@ -212,7 +212,7 @@ abstract class MessageDispatcher(val configurator: MessageDispatcherConfigurator
private val shutdownAction = new Runnable {
@tailrec
final def run() {
final def run(): Unit = {
shutdownSchedule match {
case SCHEDULED
try {
@ -260,14 +260,14 @@ abstract class MessageDispatcher(val configurator: MessageDispatcherConfigurator
*
* INTERNAL API
*/
protected[akka] def systemDispatch(receiver: ActorCell, invocation: SystemMessage)
protected[akka] def systemDispatch(receiver: ActorCell, invocation: SystemMessage): Unit
/**
* Will be called when the dispatcher is to queue an invocation for execution
*
* INTERNAL API
*/
protected[akka] def dispatch(receiver: ActorCell, invocation: Envelope)
protected[akka] def dispatch(receiver: ActorCell, invocation: Envelope): Unit
/**
* Suggest to register the provided mailbox for execution
@ -295,7 +295,7 @@ abstract class MessageDispatcher(val configurator: MessageDispatcherConfigurator
/**
* INTERNAL API
*/
protected[akka] def executeTask(invocation: TaskInvocation)
protected[akka] def executeTask(invocation: TaskInvocation): Unit
/**
* Called one time every time an actor is detached from this dispatcher and this dispatcher has no actors left attached

View file

@ -73,7 +73,7 @@ private[akka] class CachingConfig(_config: Config) extends Config {
case e e
}
def checkValid(reference: Config, restrictToPaths: String*) {
def checkValid(reference: Config, restrictToPaths: String*): Unit = {
config.checkValid(reference, restrictToPaths: _*)
}

View file

@ -67,7 +67,7 @@ class Dispatcher(
/**
* INTERNAL API
*/
protected[akka] def executeTask(invocation: TaskInvocation) {
protected[akka] def executeTask(invocation: TaskInvocation): Unit = {
try {
executorService execute invocation
} catch {

View file

@ -271,7 +271,7 @@ private[akka] abstract class Mailbox(val messageQueue: MessageQueue)
* becomes closed (because of processing a Terminate message), dump all
* already dequeued message to deadLetters.
*/
final def processAllSystemMessages() {
final def processAllSystemMessages(): Unit = {
var interruption: Throwable = null
var messageList = systemDrain(SystemMessageList.LNil)
while ((messageList.nonEmpty) && !isClosed) {
@ -909,7 +909,7 @@ object BoundedControlAwareMailbox {
}
}
private def signalNotFull() {
private def signalNotFull(): Unit = {
putLock.lock()
try {
@ -919,7 +919,7 @@ object BoundedControlAwareMailbox {
}
}
private final def enqueueWithTimeout(q: Queue[Envelope], receiver: ActorRef, envelope: Envelope) {
private final def enqueueWithTimeout(q: Queue[Envelope], receiver: ActorRef, envelope: Envelope): Unit = {
var remaining = pushTimeOut.toNanos
putLock.lockInterruptibly()

View file

@ -210,7 +210,7 @@ trait ExecutorServiceDelegate extends ExecutorService {
def execute(command: Runnable) = executor.execute(command)
def shutdown() { executor.shutdown() }
def shutdown(): Unit = { executor.shutdown() }
def shutdownNow() = executor.shutdownNow()

View file

@ -353,7 +353,7 @@ private[akka] final class AffinityPoolConfigurator(config: Config, prerequisites
}
trait RejectionHandler {
def reject(command: Runnable, service: ExecutorService)
def reject(command: Runnable, service: ExecutorService): Unit
}
trait RejectionHandlerFactory {

View file

@ -20,7 +20,7 @@ protected[akka] class ActorClassificationUnsubscriber(bus: ManagedActorClassific
private var atSeq = 0
private def nextSeq = atSeq + 1
override def preStart() {
override def preStart(): Unit = {
super.preStart()
if (debug) context.system.eventStream.publish(Logging.Debug(simpleName(getClass), getClass, s"will monitor $bus"))
}

View file

@ -56,7 +56,7 @@ class EventStream(sys: ActorSystem, private val debug: Boolean) extends LoggingB
ret
}
override def unsubscribe(subscriber: ActorRef) {
override def unsubscribe(subscriber: ActorRef): Unit = {
if (subscriber eq null) throw new IllegalArgumentException("subscriber is null")
super.unsubscribe(subscriber)
if (debug) publish(Logging.Debug(simpleName(this), this.getClass, "unsubscribing " + subscriber + " from all channels"))

View file

@ -24,7 +24,7 @@ protected[akka] class EventStreamUnsubscriber(eventStream: EventStream, debug: B
import EventStreamUnsubscriber._
override def preStart() {
override def preStart(): Unit = {
if (debug) eventStream.publish(Logging.Debug(simpleName(getClass), getClass, s"registering unsubscriber with $eventStream"))
eventStream initUnsubscriber self
}

View file

@ -73,7 +73,7 @@ trait LoggingBus extends ActorEventBus {
_logLevel = level
}
private def setUpStdoutLogger(config: Settings) {
private def setUpStdoutLogger(config: Settings): Unit = {
val level = levelFor(config.StdoutLogLevel) getOrElse {
// only log initialization errors directly with StandardOutLogger.print
StandardOutLogger.print(Error(new LoggerException, simpleName(this), this.getClass, "unknown akka.stdout-loglevel " + config.StdoutLogLevel))
@ -89,7 +89,7 @@ trait LoggingBus extends ActorEventBus {
/**
* Internal Akka use only
*/
private[akka] def startStdoutLogger(config: Settings) {
private[akka] def startStdoutLogger(config: Settings): Unit = {
setUpStdoutLogger(config)
publish(Debug(simpleName(this), this.getClass, "StandardOutLogger started"))
}
@ -97,7 +97,7 @@ trait LoggingBus extends ActorEventBus {
/**
* Internal Akka use only
*/
private[akka] def startDefaultLoggers(system: ActorSystemImpl) {
private[akka] def startDefaultLoggers(system: ActorSystemImpl): Unit = {
val logName = simpleName(this) + "(" + system + ")"
val level = levelFor(system.settings.LogLevel) getOrElse {
// only log initialization errors directly with StandardOutLogger.print
@ -152,7 +152,7 @@ trait LoggingBus extends ActorEventBus {
/**
* Internal Akka use only
*/
private[akka] def stopDefaultLoggers(system: ActorSystem) {
private[akka] def stopDefaultLoggers(system: ActorSystem): Unit = {
val level = _logLevel // volatile access before reading loggers
if (!(loggers contains StandardOutLogger)) {
setUpStdoutLogger(system.settings)
@ -1203,7 +1203,7 @@ trait LoggingAdapter {
* Log message at info level.
* @see [[LoggingAdapter]]
*/
def info(message: String) { if (isInfoEnabled) notifyInfo(message) }
def info(message: String): Unit = { if (isInfoEnabled) notifyInfo(message) }
/**
* Message template with 1 replacement argument.
*
@ -1232,7 +1232,7 @@ trait LoggingAdapter {
* Log message at debug level.
* @see [[LoggingAdapter]]
*/
def debug(message: String) { if (isDebugEnabled) notifyDebug(message) }
def debug(message: String): Unit = { if (isDebugEnabled) notifyDebug(message) }
/**
* Message template with 1 replacement argument.
*
@ -1260,7 +1260,7 @@ trait LoggingAdapter {
/**
* Log message at the specified log level.
*/
def log(level: Logging.LogLevel, message: String) { if (isEnabled(level)) notifyLog(level, message) }
def log(level: Logging.LogLevel, message: String): Unit = { if (isEnabled(level)) notifyLog(level, message) }
/**
* Message template with 1 replacement argument.
*

View file

@ -29,7 +29,7 @@ class JavaLogger extends Actor with RequiresMessageQueue[LoggerMessageQueueSeman
case InitializeLogger(_) sender() ! LoggerInitialized
}
def log(level: logging.Level, cause: Throwable, event: LogEvent) {
def log(level: logging.Level, cause: Throwable, event: LogEvent): Unit = {
val logger = Logger(event.logClass, event.logSource)
val record = new logging.LogRecord(level, String.valueOf(event.message))
record.setLoggerName(logger.getName)

View file

@ -9,7 +9,7 @@ import scala.util.control.NonFatal
trait BufferPool {
def acquire(): ByteBuffer
def release(buf: ByteBuffer)
def release(buf: ByteBuffer): Unit
}
/**

View file

@ -47,7 +47,7 @@ private[io] trait ChannelRegistry {
* Registers the given channel with the selector, creates a ChannelRegistration instance for it
* and dispatches it back to the channelActor calling this `register`
*/
def register(channel: SelectableChannel, initialOps: Int)(implicit channelActor: ActorRef)
def register(channel: SelectableChannel, initialOps: Int)(implicit channelActor: ActorRef): Unit
}
/**
@ -241,8 +241,8 @@ private[io] object SelectionHandler {
// FIXME: Add possibility to signal failure of task to someone
private abstract class Task extends Runnable {
def tryRun()
def run() {
def tryRun(): Unit
def run(): Unit = {
try tryRun()
catch {
case _: CancelledKeyException // ok, can be triggered while setting interest ops

View file

@ -124,7 +124,7 @@ private[io] class TcpListener(
} else if (bind.pullMode) limit else BatchAcceptLimit
}
override def postStop() {
override def postStop(): Unit = {
try {
if (channel.isOpen) {
log.debug("Closing serverSocketChannel after being stopped")

View file

@ -84,7 +84,7 @@ private[io] class UdpListener(
}
def doReceive(registration: ChannelRegistration, handler: ActorRef): Unit = {
@tailrec def innerReceive(readsLeft: Int, buffer: ByteBuffer) {
@tailrec def innerReceive(readsLeft: Int, buffer: ByteBuffer): Unit = {
buffer.clear()
buffer.limit(DirectBufferSize)

View file

@ -17,7 +17,7 @@ private[akka] object DomainName {
(name.length + 2).toShort
}
def write(it: ByteStringBuilder, name: String) {
def write(it: ByteStringBuilder, name: String): Unit = {
for (label name.split('.')) {
it.putByte(label.length.toByte)
for (c label) {

View file

@ -695,7 +695,7 @@ class CircuitBreaker(
*
* @return Promise which executes listener in supplied [[scala.concurrent.ExecutionContext]]
*/
protected def notifyTransitionListeners() {
protected def notifyTransitionListeners(): Unit = {
if (hasListeners) {
val iterator = listeners.iterator
while (iterator.hasNext) {

View file

@ -42,7 +42,7 @@ trait PipeToSupport {
final class PipeableCompletionStage[T](val future: CompletionStage[T])(implicit executionContext: ExecutionContext) {
def pipeTo(recipient: ActorRef)(implicit sender: ActorRef = Actor.noSender): CompletionStage[T] = {
future whenComplete new BiConsumer[T, Throwable] {
override def accept(t: T, ex: Throwable) {
override def accept(t: T, ex: Throwable): Unit = {
if (t != null) recipient ! t
if (ex != null) recipient ! Status.Failure(ex)
}
@ -50,7 +50,7 @@ trait PipeToSupport {
}
def pipeToSelection(recipient: ActorSelection)(implicit sender: ActorRef = Actor.noSender): CompletionStage[T] = {
future whenComplete new BiConsumer[T, Throwable] {
override def accept(t: T, ex: Throwable) {
override def accept(t: T, ex: Throwable): Unit = {
if (t != null) recipient ! t
if (ex != null) recipient ! Status.Failure(ex)
}

View file

@ -37,12 +37,12 @@ class BoundedBlockingQueue[E <: AnyRef](
protected def createNotEmptyCondition(): Condition = lock.newCondition()
protected def createNotFullCondition(): Condition = lock.newCondition()
def put(e: E) { //Blocks until not full
def put(e: E): Unit = { //Blocks until not full
if (e eq null) throw new NullPointerException
lock.lockInterruptibly()
try {
@tailrec def putElement() {
@tailrec def putElement(): Unit = {
if (backing.size() < maxCapacity) {
require(backing.offer(e))
notEmpty.signal()
@ -148,7 +148,7 @@ class BoundedBlockingQueue[E <: AnyRef](
try backing.contains(e) finally lock.unlock()
}
override def clear() {
override def clear(): Unit = {
lock.lock()
try {
backing.clear()
@ -244,7 +244,7 @@ class BoundedBlockingQueue[E <: AnyRef](
elements(last).asInstanceOf[E]
}
override def remove() {
override def remove(): Unit = {
if (last < 0) throw new IllegalStateException
val target = elements(last)
last = -1 //To avoid 2 subsequent removes without a next in between

View file

@ -618,12 +618,12 @@ object ByteString {
@SerialVersionUID(1L)
private class SerializationProxy(@transient private var orig: ByteString) extends Serializable {
private def writeObject(out: ObjectOutputStream) {
private def writeObject(out: ObjectOutputStream): Unit = {
out.writeByte(orig.byteStringCompanion.SerializationIdentity)
orig.writeToOutputStream(out)
}
private def readObject(in: ObjectInputStream) {
private def readObject(in: ObjectInputStream): Unit = {
val serializationId = in.readByte()
orig = Companion(from = serializationId).readFromInputStream(in)

View file

@ -41,7 +41,7 @@ private[akka] final class SerializedSuspendableExecutionContext(throughput: Int)
val c = state.get
state.compareAndSet(c, c | newState) || addState(newState)
}
@tailrec private final def remState(oldState: Int) {
@tailrec private final def remState(oldState: Int): Unit = {
val c = state.get
if (state.compareAndSet(c, c & ~oldState)) attach() else remState(oldState)
}

View file

@ -98,7 +98,7 @@ class LevelDbBatchingBenchmark {
// TOOLS
private def deleteStorage(sys: ActorSystem) {
private def deleteStorage(sys: ActorSystem): Unit = {
val storageLocations = List(
"akka.persistence.journal.leveldb.dir",
"akka.persistence.journal.leveldb-shared.store.dir",

View file

@ -25,7 +25,7 @@ trait Consumer extends Actor with CamelSupport {
* Registers the consumer endpoint. Note: when overriding this method, be sure to
* call 'super.preRestart', otherwise the consumer endpoint will not be registered.
*/
override def preStart() {
override def preStart(): Unit = {
super.preStart()
// Possible FIXME. registering the endpoint here because of problems
// with order of execution of trait body in the Java version (UntypedConsumerActor)
@ -34,7 +34,7 @@ trait Consumer extends Actor with CamelSupport {
register()
}
private[this] def register() {
private[this] def register(): Unit = {
camel.supervisor ! Register(self, endpointUri, Some(ConsumerConfig(activationTimeout, replyTimeout, autoAck, onRouteDefinition)))
}

View file

@ -18,12 +18,12 @@ trait ProducerSupport extends Actor with CamelSupport {
private[this] var messages = Vector.empty[(ActorRef, Any)]
private[this] var producerChild: Option[ActorRef] = None
override def preStart() {
override def preStart(): Unit = {
super.preStart()
register()
}
private[this] def register() { camel.supervisor ! Register(self, endpointUri) }
private[this] def register(): Unit = { camel.supervisor ! Register(self, endpointUri) }
/**
* CamelMessage headers to copy by default from request message to response-message.

View file

@ -146,9 +146,9 @@ private[camel] class Registry(activationTracker: ActorRef) extends Actor with Ca
}
}
private def deRegisterConsumer(actorRef: ActorRef) { consumerRegistrar ! DeRegister(actorRef) }
private def deRegisterConsumer(actorRef: ActorRef): Unit = { consumerRegistrar ! DeRegister(actorRef) }
private def deRegisterProducer(actorRef: ActorRef) { producerRegistrar ! DeRegister(actorRef) }
private def deRegisterProducer(actorRef: ActorRef): Unit = { producerRegistrar ! DeRegister(actorRef) }
}
/**

View file

@ -126,7 +126,7 @@ private[camel] class ActorProducer(val endpoint: ActorEndpoint, camel: Camel) ex
*/
private[camel] def processExchangeAdapter(exchange: CamelExchangeAdapter): Unit = {
val isDone = new CountDownLatch(1)
processExchangeAdapter(exchange, new AsyncCallback { def done(doneSync: Boolean) { isDone.countDown() } })
processExchangeAdapter(exchange, new AsyncCallback { def done(doneSync: Boolean): Unit = { isDone.countDown() } })
isDone.await(endpoint.replyTimeout.length, endpoint.replyTimeout.unit)
}

View file

@ -37,7 +37,7 @@ class ActivationIntegrationTest extends WordSpec with Matchers with SharedCamelS
def endpointUri = "direct:a3"
def receive = { case _ {} }
override def postStop() {
override def postStop(): Unit = {
super.postStop()
latch.countDown()
}
@ -69,7 +69,7 @@ class ActivationIntegrationTest extends WordSpec with Matchers with SharedCamelS
class TestConsumer(uri: String, latch: TestLatch) extends Consumer {
def endpointUri = uri
override def preStart() {
override def preStart(): Unit = {
Await.ready(latch, 60 seconds)
super.preStart()
}

View file

@ -62,7 +62,7 @@ class ConcurrentActivationTest extends WordSpec with Matchers with NonSharedCame
// should be the size of the activated activated producers and consumers
deactivations.size should ===(2 * number * number)
def partitionNames(refs: immutable.Seq[ActorRef]) = refs.map(_.path.name).partition(_.startsWith("concurrent-test-echo-consumer"))
def assertContainsSameElements(lists: (Seq[_], Seq[_])) {
def assertContainsSameElements(lists: (Seq[_], Seq[_])): Unit = {
val (a, b) = lists
a.intersect(b).size should ===(a.size)
}
@ -144,7 +144,7 @@ class Registrar(val start: Int, val number: Int, activationsPromise: Promise[Lis
}
}
def add(actor: Actor, name: String) {
def add(actor: Actor, name: String): Unit = {
val ref = context.actorOf(Props(actor), name)
actorRefs = actorRefs + ref
val result = camel.activationFutureFor(ref)

View file

@ -72,7 +72,7 @@ class ConsumerIntegrationTest extends WordSpec with Matchers with NonSharedCamel
case m: CamelMessage sender() ! "received " + m.bodyAs[String]
}
override def postRestart(reason: Throwable) {
override def postRestart(reason: Throwable): Unit = {
restarted.countDown()
}
}, "direct-a2")
@ -182,7 +182,7 @@ class ErrorThrowingConsumer(override val endpointUri: String) extends Consumer {
def receive = {
case msg: CamelMessage throw new TestException("error: %s" format msg.body)
}
override def preRestart(reason: Throwable, message: Option[Any]) {
override def preRestart(reason: Throwable, message: Option[Any]): Unit = {
super.preRestart(reason, message)
sender() ! Failure(reason)
}
@ -197,7 +197,7 @@ class ErrorRespondingConsumer(override val endpointUri: String) extends Consumer
rd.onException(classOf[TestException]).handled(true).transform(Builder.body.append(" has an error")).end
}
final override def preRestart(reason: Throwable, message: Option[Any]) {
final override def preRestart(reason: Throwable, message: Option[Any]): Unit = {
super.preRestart(reason, message)
sender() ! Failure(reason)
}
@ -213,7 +213,7 @@ class FailingOnceConsumer(override val endpointUri: String) extends Consumer {
throw new TestException("rejected: %s" format msg.body)
}
final override def preRestart(reason: Throwable, message: Option[Any]) {
final override def preRestart(reason: Throwable, message: Option[Any]): Unit = {
super.preRestart(reason, message)
sender() ! Failure(reason)
}

View file

@ -27,7 +27,7 @@ class ProducerFeatureTest extends TestKit(ActorSystem("ProducerFeatureTest", Akk
import ProducerFeatureTest._
implicit def camel = CamelExtension(system)
override protected def afterAll() {
override protected def afterAll(): Unit = {
super.afterAll()
shutdown()
}
@ -37,9 +37,9 @@ class ProducerFeatureTest extends TestKit(ActorSystem("ProducerFeatureTest", Akk
camelContext.setUseBreadcrumb(false)
val timeoutDuration = 1 second
implicit val timeout = Timeout(timeoutDuration)
override protected def beforeAll { camelContext.addRoutes(new TestRoute(system)) }
override protected def beforeAll: Unit = { camelContext.addRoutes(new TestRoute(system)) }
override protected def afterEach { mockEndpoint.reset() }
override protected def afterEach: Unit = { mockEndpoint.reset() }
"A Producer on a sync Camel route" must {
@ -279,7 +279,7 @@ object ProducerFeatureTest {
else msg
}
override def postStop() {
override def postStop(): Unit = {
for (msg lastMessage; aref lastSender) context.parent ! ((aref, msg))
super.postStop()
}
@ -288,7 +288,7 @@ object ProducerFeatureTest {
class TestProducer(uri: String, upper: Boolean = false) extends Actor with Producer {
def endpointUri = uri
override def preRestart(reason: Throwable, message: Option[Any]) {
override def preRestart(reason: Throwable, message: Option[Any]): Unit = {
//overriding on purpose so it doesn't try to deRegister and reRegister at restart,
// which would cause a deadletter message in the test output.
}
@ -339,7 +339,7 @@ object ProducerFeatureTest {
class TestRoute(system: ActorSystem) extends RouteBuilder {
val responder = system.actorOf(Props[TestResponder], name = "TestResponder")
def configure {
def configure: Unit = {
from("direct:forward-test-1").to("mock:mock")
// for one-way messaging tests
from("direct:producer-test-1").to("mock:mock")

View file

@ -21,7 +21,7 @@ private[camel] object TestSupport {
def start(actor: Actor, name: String)(implicit system: ActorSystem, timeout: Timeout): ActorRef =
Await.result(CamelExtension(system).activationFutureFor(system.actorOf(Props(actor), name))(timeout, system.dispatcher), timeout.duration)
def stop(actorRef: ActorRef)(implicit system: ActorSystem, timeout: Timeout) {
def stop(actorRef: ActorRef)(implicit system: ActorSystem, timeout: Timeout): Unit = {
system.stop(actorRef)
Await.result(CamelExtension(system).deactivationFutureFor(actorRef)(timeout, system.dispatcher), timeout.duration)
}
@ -51,7 +51,7 @@ private[camel] object TestSupport {
implicit lazy val system = ActorSystem("SharedCamelSystem", AkkaSpec.testConf)
implicit lazy val camel = CamelExtension(system)
abstract override protected def afterAll() {
abstract override protected def afterAll(): Unit = {
super.afterAll()
TestKit.shutdownActorSystem(system)
}
@ -61,13 +61,13 @@ private[camel] object TestSupport {
implicit var system: ActorSystem = _
implicit var camel: Camel = _
override protected def beforeEach() {
override protected def beforeEach(): Unit = {
super.beforeEach()
system = ActorSystem("NonSharedCamelSystem", AkkaSpec.testConf)
camel = CamelExtension(system)
}
override protected def afterEach() {
override protected def afterEach(): Unit = {
TestKit.shutdownActorSystem(system)
super.afterEach()
}

View file

@ -79,7 +79,7 @@ class UntypedProducerTest extends WordSpec with Matchers with BeforeAndAfterAll
object UntypedProducerTest {
class TestRoute extends RouteBuilder {
def configure {
def configure: Unit = {
from("direct:forward-test-1").to("mock:mock")
from("direct:producer-test-1").process(new Processor() {
def process(exchange: Exchange) = {

View file

@ -13,14 +13,14 @@ import akka.camel.internal.ActivationProtocol._
class ActivationTrackerTest extends TestKit(ActorSystem("ActivationTrackerTest")) with WordSpecLike with Matchers with BeforeAndAfterAll with BeforeAndAfterEach with GivenWhenThen {
override protected def afterAll() { shutdown() }
override protected def afterAll(): Unit = { shutdown() }
var actor: TestProbe = _
var awaiting: Awaiting = _
var anotherAwaiting: Awaiting = _
val cause = new Exception("cause of failure")
override protected def beforeEach() {
override protected def beforeEach(): Unit = {
actor = TestProbe()
awaiting = new Awaiting(actor)
anotherAwaiting = new Awaiting(actor)

View file

@ -113,7 +113,7 @@ class ActorProducerTest extends TestKit(ActorSystem("ActorProducerTest")) with W
"response is not sent by actor" must {
val latch = TestLatch(1)
val callback = new AsyncCallback {
def done(doneSync: Boolean) {
def done(doneSync: Boolean): Unit = {
latch.countDown()
}
}
@ -339,7 +339,7 @@ private[camel] trait ActorProducerFixture extends MockitoSugar with BeforeAndAft
var actorEndpointPath: ActorEndpointPath = _
var actorComponent: ActorComponent = _
override protected def beforeEach() {
override protected def beforeEach(): Unit = {
asyncCallback = createAsyncCallback
probe = TestProbe()
@ -380,7 +380,7 @@ private[camel] trait ActorProducerFixture extends MockitoSugar with BeforeAndAft
message = CamelMessage(null, null)
}
override protected def afterAll() {
override protected def afterAll(): Unit = {
shutdown()
}
@ -396,12 +396,12 @@ private[camel] trait ActorProducerFixture extends MockitoSugar with BeforeAndAft
class TestAsyncCallback extends AsyncCallback {
def expectNoCallWithin(duration: Duration): Unit =
if (callbackReceived.await(duration.length, duration.unit)) fail("NOT expected callback, but received one!")
def awaitCalled(timeout: Duration = 1 second) { valueWithin(1 second) }
def awaitCalled(timeout: Duration = 1 second): Unit = { valueWithin(1 second) }
val callbackReceived = new CountDownLatch(1)
val callbackValue = new AtomicBoolean()
def done(doneSync: Boolean) {
def done(doneSync: Boolean): Unit = {
callbackValue set doneSync
callbackReceived.countDown()
}
@ -422,7 +422,7 @@ private[camel] trait ActorProducerFixture extends MockitoSugar with BeforeAndAft
endpoint
}
def prepareMocks(actor: ActorRef, message: CamelMessage = message, outCapable: Boolean) {
def prepareMocks(actor: ActorRef, message: CamelMessage = message, outCapable: Boolean): Unit = {
when(actorEndpointPath.findActorIn(any[ActorSystem])) thenReturn Option(actor)
when(exchange.toRequestMessage(any[Map[String, Any]])) thenReturn message
when(exchange.isOutCapable) thenReturn outCapable

View file

@ -107,12 +107,12 @@ abstract class ClusterShardingFailureSpec(config: ClusterShardingFailureSpecConf
val storageLocations = List(new File(system.settings.config.getString(
"akka.cluster.sharding.distributed-data.durable.lmdb.dir")).getParentFile)
override protected def atStartup() {
override protected def atStartup(): Unit = {
storageLocations.foreach(dir if (dir.exists) FileUtils.deleteQuietly(dir))
enterBarrier("startup")
}
override protected def afterTermination() {
override protected def afterTermination(): Unit = {
storageLocations.foreach(dir if (dir.exists) FileUtils.deleteQuietly(dir))
}

View file

@ -88,12 +88,12 @@ abstract class ClusterShardingGracefulShutdownSpec(config: ClusterShardingGracef
val storageLocations = List(new File(system.settings.config.getString(
"akka.cluster.sharding.distributed-data.durable.lmdb.dir")).getParentFile)
override protected def atStartup() {
override protected def atStartup(): Unit = {
storageLocations.foreach(dir if (dir.exists) FileUtils.deleteQuietly(dir))
enterBarrier("startup")
}
override protected def afterTermination() {
override protected def afterTermination(): Unit = {
storageLocations.foreach(dir if (dir.exists) FileUtils.deleteQuietly(dir))
}

View file

@ -107,12 +107,12 @@ abstract class ClusterShardingLeavingSpec(config: ClusterShardingLeavingSpecConf
val storageLocations = List(new File(system.settings.config.getString(
"akka.cluster.sharding.distributed-data.durable.lmdb.dir")).getParentFile)
override protected def atStartup() {
override protected def atStartup(): Unit = {
storageLocations.foreach(dir if (dir.exists) FileUtils.deleteQuietly(dir))
enterBarrier("startup")
}
override protected def afterTermination() {
override protected def afterTermination(): Unit = {
storageLocations.foreach(dir if (dir.exists) FileUtils.deleteQuietly(dir))
}

View file

@ -87,12 +87,12 @@ abstract class ClusterShardingMinMembersSpec(config: ClusterShardingMinMembersSp
val storageLocations = List(new File(system.settings.config.getString(
"akka.cluster.sharding.distributed-data.durable.lmdb.dir")).getParentFile)
override protected def atStartup() {
override protected def atStartup(): Unit = {
storageLocations.foreach(dir if (dir.exists) FileUtils.deleteQuietly(dir))
enterBarrier("startup")
}
override protected def afterTermination() {
override protected def afterTermination(): Unit = {
storageLocations.foreach(dir if (dir.exists) FileUtils.deleteQuietly(dir))
}

View file

@ -133,12 +133,12 @@ abstract class ClusterShardingRememberEntitiesNewExtractorSpec(config: ClusterSh
val storageLocations = List(new File(system.settings.config.getString(
"akka.cluster.sharding.distributed-data.durable.lmdb.dir")).getParentFile)
override protected def atStartup() {
override protected def atStartup(): Unit = {
storageLocations.foreach(dir if (dir.exists) FileUtils.deleteQuietly(dir))
enterBarrier("startup")
}
override protected def afterTermination() {
override protected def afterTermination(): Unit = {
storageLocations.foreach(dir if (dir.exists) FileUtils.deleteQuietly(dir))
}

View file

@ -110,12 +110,12 @@ abstract class ClusterShardingRememberEntitiesSpec(config: ClusterShardingRememb
val storageLocations = List(new File(system.settings.config.getString(
"akka.cluster.sharding.distributed-data.durable.lmdb.dir")).getParentFile)
override protected def atStartup() {
override protected def atStartup(): Unit = {
storageLocations.foreach(dir if (dir.exists) FileUtils.deleteQuietly(dir))
enterBarrier("startup")
}
override protected def afterTermination() {
override protected def afterTermination(): Unit = {
storageLocations.foreach(dir if (dir.exists) FileUtils.deleteQuietly(dir))
}

View file

@ -262,12 +262,12 @@ abstract class ClusterShardingSpec(config: ClusterShardingSpecConfig) extends Mu
val storageLocations = List(new File(system.settings.config.getString(
"akka.cluster.sharding.distributed-data.durable.lmdb.dir")).getParentFile)
override protected def atStartup() {
override protected def atStartup(): Unit = {
storageLocations.foreach(dir if (dir.exists) FileUtils.deleteQuietly(dir))
enterBarrier("startup")
}
override protected def afterTermination() {
override protected def afterTermination(): Unit = {
storageLocations.foreach(dir if (dir.exists) FileUtils.deleteQuietly(dir))
}

View file

@ -199,7 +199,7 @@ final class ClusterSingletonProxy(singletonManagerPath: String, settings: Cluste
/**
* Discard old singleton ActorRef and send a periodic message to self to identify the singleton.
*/
def identifySingleton() {
def identifySingleton(): Unit = {
import context.dispatcher
log.debug("Creating singleton identification timer...")
identifyCounter += 1

View file

@ -29,7 +29,7 @@ object DistributedPubSubMediatorRouterSpec {
}
trait DistributedPubSubMediatorRouterSpec { this: WordSpecLike with TestKit with ImplicitSender
def nonUnwrappingPubSub(mediator: ActorRef, testActor: ActorRef, msg: Any) {
def nonUnwrappingPubSub(mediator: ActorRef, testActor: ActorRef, msg: Any): Unit = {
val path = testActor.path.toStringWithoutAddress

View file

@ -55,7 +55,7 @@ object ClusterSingletonProxySpec {
"user/singletonManager",
settings = ClusterSingletonProxySettings(system)), s"singletonProxy-${cluster.selfAddress.port.getOrElse(0)}")
def testProxy(msg: String) {
def testProxy(msg: String): Unit = {
val probe = TestProbe()
probe.send(proxy, msg)
// 25 seconds to make sure the singleton was started up

View file

@ -523,7 +523,7 @@ private[cluster] final class ClusterDomainEventPublisher extends Actor with Acto
var membershipState: MembershipState = emptyMembershipState
def selfDc = cluster.settings.SelfDataCenter
override def preRestart(reason: Throwable, message: Option[Any]) {
override def preRestart(reason: Throwable, message: Option[Any]): Unit = {
// don't postStop when restarted, no children to stop
}

View file

@ -112,19 +112,19 @@ trait ClusterNodeMBean {
* The address format is `akka.tcp://actor-system-name@hostname:port`.
* A 'Join(thisNodeAddress)' command is sent to the node to join.
*/
def join(address: String)
def join(address: String): Unit
/**
* Send command to issue state transition to LEAVING for the node specified by 'address'.
* The address format is `akka.tcp://actor-system-name@hostname:port`
*/
def leave(address: String)
def leave(address: String): Unit
/**
* Send command to DOWN the node specified by 'address'.
* The address format is `akka.tcp://actor-system-name@hostname:port`
*/
def down(address: String)
def down(address: String): Unit
}
/**

View file

@ -187,7 +187,7 @@ class GossipSpec extends WordSpec with Matchers {
val g2 = (Gossip(members = SortedSet(a1, b1, c1, d1)) :+ vclockNode).seen(a1.uniqueAddress).seen(c1.uniqueAddress)
val g3 = (g1 copy (version = g2.version)).seen(d1.uniqueAddress)
def checkMerged(merged: Gossip) {
def checkMerged(merged: Gossip): Unit = {
val seen = merged.overview.seen.toSeq
seen.length should ===(0)

View file

@ -46,7 +46,7 @@ class JavaLogger extends Actor with RequiresMessageQueue[LoggerMessageQueueSeman
}
@inline
def log(level: logging.Level, cause: Throwable, event: LogEvent) {
def log(level: logging.Level, cause: Throwable, event: LogEvent): Unit = {
val logger = logging.Logger.getLogger(event.logSource)
val record = new logging.LogRecord(level, String.valueOf(event.message))
record.setLoggerName(logger.getName)
@ -90,7 +90,7 @@ trait JavaLoggingAdapter extends LoggingAdapter {
log(logging.Level.CONFIG, null, message)
@inline
def log(level: logging.Level, cause: Throwable, message: String) {
def log(level: logging.Level, cause: Throwable, message: String): Unit = {
val record = new logging.LogRecord(level, message)
record.setLoggerName(logger.getName)
record.setThrown(cause)
@ -106,7 +106,7 @@ trait JavaLoggingAdapter extends LoggingAdapter {
}
// it is unfortunate that this workaround is needed
private def updateSource(record: logging.LogRecord) {
private def updateSource(record: logging.LogRecord): Unit = {
val stack = Thread.currentThread.getStackTrace
val source = stack.find {
frame

View file

@ -271,7 +271,7 @@ class ReliableProxy(targetPath: ActorPath, retryAfter: FiniteDuration,
case _ SupervisorStrategy.Escalate
}
override def postStop() {
override def postStop(): Unit = {
logDebug("Stopping proxy and sending {} messages to subscribers in Unsent", stateData.size)
gossip(ProxyTerminated(self, Unsent(stateData)))
super.postStop()
@ -381,7 +381,7 @@ class ReliableProxy(targetPath: ActorPath, retryAfter: FiniteDuration,
*
* This and nextBackoff are meant to be implemented by subclasses.
*/
def resetBackoff() {}
def resetBackoff(): Unit = {}
/**
* Returns the next retry interval duration. By default each interval is the same, reconnectAfter.

View file

@ -36,7 +36,7 @@ class ReliableProxySpec extends MultiNodeSpec(ReliableProxySpec) with STMultiNod
override def initialParticipants = roles.size
override def afterEach() {
override def afterEach(): Unit = {
runOn(local) {
testConductor.passThrough(local, remote, Direction.Both).await
}

View file

@ -32,12 +32,12 @@ class JavaLoggerSpec extends AkkaSpec(JavaLoggerSpec.config) {
val logger = logging.Logger.getLogger("akka://JavaLoggerSpec/user/log")
logger.setUseParentHandlers(false) // turn off output of test LogRecords
logger.addHandler(new logging.Handler {
def publish(record: logging.LogRecord) {
def publish(record: logging.LogRecord): Unit = {
testActor ! record
}
def flush() {}
def close() {}
def flush(): Unit = {}
def close(): Unit = {}
})
val producer = system.actorOf(Props[JavaLoggerSpec.LogProducer], name = "log")

View file

@ -27,7 +27,7 @@ object PeekMailboxSpec {
togo -= 1
PeekMailboxExtension.ack()
}
override def preRestart(cause: Throwable, msg: Option[Any]) {
override def preRestart(cause: Throwable, msg: Option[Any]): Unit = {
for (m msg if m == "DIE") context stop self // for testing the case of mailbox.cleanUp
}
}
@ -103,12 +103,12 @@ class MyActor extends Actor {
//#business-logic-elided
var i = 0
def doStuff(m: Any) {
def doStuff(m: Any): Unit = {
if (i == 1) throw new Exception("DONTWANNA")
i += 1
}
override def postStop() {
override def postStop(): Unit = {
context.system.terminate()
}
//#business-logic-elided

View file

@ -94,7 +94,7 @@ class AccountBalanceRetriever extends Actor with Aggregator {
//#expect-timeout
//#expect-balance
def fetchCheckingAccountsBalance() {
def fetchCheckingAccountsBalance(): Unit = {
context.actorOf(Props[CheckingAccountProxy]) ! GetAccountBalances(id)
expectOnce {
case CheckingAccountBalances(balances)
@ -104,7 +104,7 @@ class AccountBalanceRetriever extends Actor with Aggregator {
}
//#expect-balance
def fetchSavingsAccountsBalance() {
def fetchSavingsAccountsBalance(): Unit = {
context.actorOf(Props[SavingsAccountProxy]) ! GetAccountBalances(id)
expectOnce {
case SavingsAccountBalances(balances)
@ -113,7 +113,7 @@ class AccountBalanceRetriever extends Actor with Aggregator {
}
}
def fetchMoneyMarketAccountsBalance() {
def fetchMoneyMarketAccountsBalance(): Unit = {
context.actorOf(Props[MoneyMarketAccountProxy]) ! GetAccountBalances(id)
expectOnce {
case MoneyMarketAccountBalances(balances)
@ -122,7 +122,7 @@ class AccountBalanceRetriever extends Actor with Aggregator {
}
}
def collectBalances(force: Boolean = false) {
def collectBalances(force: Boolean = false): Unit = {
if (results.size == types.size || force) {
originalSender ! results.toList // Make sure it becomes immutable
context.stop(self)
@ -167,7 +167,7 @@ class ChainingSample extends Actor with Aggregator {
case TimedOut processList()
}
def processList() {
def processList(): Unit = {
unexpect(handle)
if (values.size > 0) {
@ -179,7 +179,7 @@ class ChainingSample extends Actor with Aggregator {
}
//#unexpect-sample
def processFinal(eval: List[Int]) {
def processFinal(eval: List[Int]): Unit = {
// Select only the entries coming back from eval
originalSender ! FinalResponse(eval map values)
context.stop(self)

View file

@ -35,7 +35,7 @@ class TimerBasedThrottlerSpec extends TestKit(ActorSystem("TimerBasedThrottlerSp
import TimerBasedThrottlerSpec._
override def afterAll {
override def afterAll: Unit = {
shutdown()
}

View file

@ -20,7 +20,7 @@ class VersionVectorSpec extends TestKit(ActorSystem("VersionVectorSpec"))
val node3 = UniqueAddress(node1.address.copy(port = Some(2553)), 3)
val node4 = UniqueAddress(node1.address.copy(port = Some(2554)), 4)
override def afterAll {
override def afterAll: Unit = {
shutdown()
}

View file

@ -51,7 +51,7 @@ class ReplicatedDataSerializerSpec extends TestKit(ActorSystem(
val ref2 = system.actorOf(Props.empty, "ref2")
val ref3 = system.actorOf(Props.empty, "ref3")
override def afterAll {
override def afterAll: Unit = {
shutdown()
}

View file

@ -51,7 +51,7 @@ class ReplicatorMessageSerializerSpec extends TestKit(ActorSystem(
val keyA = GSetKey[String]("A")
override def afterAll {
override def afterAll: Unit = {
shutdown()
}

View file

@ -123,13 +123,13 @@ class ActorWithMessagesWrapper {
class Hook extends Actor {
var child: ActorRef = _
//#preStart
override def preStart() {
override def preStart(): Unit = {
child = context.actorOf(Props[MyActor], "child")
}
//#preStart
def receive = Actor.emptyBehavior
//#postStop
override def postStop() {
override def postStop(): Unit = {
//#clean-up-some-resources
()
//#clean-up-some-resources

View file

@ -125,7 +125,7 @@ class FSMDocSpec extends MyFavoriteTestFrameWorkPlusAkkaTestKit {
//#alt-transition-syntax
onTransition(handler _)
def handler(from: StateType, to: StateType) {
def handler(from: StateType, to: StateType): Unit = {
// handle it here ...
}
//#alt-transition-syntax

View file

@ -149,7 +149,7 @@ class CounterService extends Actor {
import context.dispatcher // Use this Actors' Dispatcher as ExecutionContext
override def preStart() {
override def preStart(): Unit = {
initStorage()
}
@ -159,7 +159,7 @@ class CounterService extends Actor {
* failing. When it has been stopped we will schedule a Reconnect after a delay.
* Watch the child so we receive Terminated message when it has been terminated.
*/
def initStorage() {
def initStorage(): Unit = {
storage = Some(context.watch(context.actorOf(Props[Storage], name = "storage")))
// Tell the counter, if any, to use the new storage
counter foreach { _ ! UseStorage(storage) }
@ -197,7 +197,7 @@ class CounterService extends Actor {
initStorage()
}
def forwardOrPlaceInBacklog(msg: Any) {
def forwardOrPlaceInBacklog(msg: Any): Unit = {
// We need the initial value from storage before we can start delegate to
// the counter. Before that we place the messages in a backlog, to be sent
// to the counter when it is initialized.
@ -246,7 +246,7 @@ class Counter(key: String, initialValue: Long) extends Actor {
}
def storeCount() {
def storeCount(): Unit = {
// Delegate dangerous work, to protect our valuable state.
// We can continue without storage.
storage foreach { _ ! Store(Entry(key, count)) }

Some files were not shown because too many files have changed in this diff Show more