diff --git a/LICENSE b/LICENSE index 2a9d5f00c4..68c22038ec 100755 --- a/LICENSE +++ b/LICENSE @@ -1,6 +1,6 @@ This software is licensed under the Apache 2 license, quoted below. -Copyright 2009-2011 Scalable Solutions AB [http://scalablesolutions.se] +Copyright 2009-2011 Typesafe Inc. [http://www.typesafe.com] Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of diff --git a/akka-actor-tests/src/main/scala/akka/testing/Serializers.scala b/akka-actor-tests/src/main/scala/akka/testing/Serializers.scala index 0a7aa8fb7b..b3836bc201 100644 --- a/akka-actor-tests/src/main/scala/akka/testing/Serializers.scala +++ b/akka-actor-tests/src/main/scala/akka/testing/Serializers.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ package akka.testing diff --git a/akka-actor-tests/src/test/java/akka/util/JavaDuration.java b/akka-actor-tests/src/test/java/akka/util/JavaDuration.java index aaa9c16115..56e7f68bf6 100644 --- a/akka-actor-tests/src/test/java/akka/util/JavaDuration.java +++ b/akka-actor-tests/src/test/java/akka/util/JavaDuration.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ package akka.util; diff --git a/akka-actor-tests/src/test/scala/akka/Messages.scala b/akka-actor-tests/src/test/scala/akka/Messages.scala index 9f54627a18..c6f2116509 100644 --- a/akka-actor-tests/src/test/scala/akka/Messages.scala +++ b/akka-actor-tests/src/test/scala/akka/Messages.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ package akka diff --git a/akka-actor-tests/src/test/scala/akka/actor/actor/ActorFireForgetRequestReplySpec.scala b/akka-actor-tests/src/test/scala/akka/actor/actor/ActorFireForgetRequestReplySpec.scala index c4e8ade76e..9e70a39492 100644 --- a/akka-actor-tests/src/test/scala/akka/actor/actor/ActorFireForgetRequestReplySpec.scala +++ b/akka-actor-tests/src/test/scala/akka/actor/actor/ActorFireForgetRequestReplySpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ package akka.actor diff --git a/akka-actor-tests/src/test/scala/akka/actor/actor/ActorRefSpec.scala b/akka-actor-tests/src/test/scala/akka/actor/actor/ActorRefSpec.scala index 7864ea67e7..28c68b15eb 100644 --- a/akka-actor-tests/src/test/scala/akka/actor/actor/ActorRefSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/actor/actor/ActorRefSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ package akka.actor diff --git a/akka-actor-tests/src/test/scala/akka/actor/actor/ActorTimeoutSpec.scala b/akka-actor-tests/src/test/scala/akka/actor/actor/ActorTimeoutSpec.scala index 4ff21e2a61..4997f5603a 100644 --- a/akka-actor-tests/src/test/scala/akka/actor/actor/ActorTimeoutSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/actor/actor/ActorTimeoutSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ package akka.actor diff --git a/akka-actor-tests/src/test/scala/akka/actor/actor/ChannelSpec.scala b/akka-actor-tests/src/test/scala/akka/actor/actor/ChannelSpec.scala index 61c898f8be..291cf0d40f 100644 --- a/akka-actor-tests/src/test/scala/akka/actor/actor/ChannelSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/actor/actor/ChannelSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ package akka.actor diff --git a/akka-actor-tests/src/test/scala/akka/actor/actor/DeployerSpec.scala b/akka-actor-tests/src/test/scala/akka/actor/actor/DeployerSpec.scala index 5fbf2dceaa..3ae5ea2397 100644 --- a/akka-actor-tests/src/test/scala/akka/actor/actor/DeployerSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/actor/actor/DeployerSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ package akka.actor diff --git a/akka-actor-tests/src/test/scala/akka/actor/actor/FSMActorSpec.scala b/akka-actor-tests/src/test/scala/akka/actor/actor/FSMActorSpec.scala index 3415d37702..028480ddbd 100644 --- a/akka-actor-tests/src/test/scala/akka/actor/actor/FSMActorSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/actor/actor/FSMActorSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ package akka.actor diff --git a/akka-actor-tests/src/test/scala/akka/actor/actor/FSMTimingSpec.scala b/akka-actor-tests/src/test/scala/akka/actor/actor/FSMTimingSpec.scala index 1c09abb227..d0656a70fb 100644 --- a/akka-actor-tests/src/test/scala/akka/actor/actor/FSMTimingSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/actor/actor/FSMTimingSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ package akka.actor diff --git a/akka-actor-tests/src/test/scala/akka/actor/actor/FSMTransitionSpec.scala b/akka-actor-tests/src/test/scala/akka/actor/actor/FSMTransitionSpec.scala index 0cbaddd8e6..131dd5cb96 100644 --- a/akka-actor-tests/src/test/scala/akka/actor/actor/FSMTransitionSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/actor/actor/FSMTransitionSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ package akka.actor diff --git a/akka-actor-tests/src/test/scala/akka/actor/actor/ForwardActorSpec.scala b/akka-actor-tests/src/test/scala/akka/actor/actor/ForwardActorSpec.scala index 78a1fbfd0a..68d4149e5a 100644 --- a/akka-actor-tests/src/test/scala/akka/actor/actor/ForwardActorSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/actor/actor/ForwardActorSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ package akka.actor diff --git a/akka-actor-tests/src/test/scala/akka/actor/actor/HotSwapSpec.scala b/akka-actor-tests/src/test/scala/akka/actor/actor/HotSwapSpec.scala index 53ce1fbbfd..77d6b885b6 100644 --- a/akka-actor-tests/src/test/scala/akka/actor/actor/HotSwapSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/actor/actor/HotSwapSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ package akka.actor diff --git a/akka-actor-tests/src/test/scala/akka/actor/actor/LoggingReceiveSpec.scala b/akka-actor-tests/src/test/scala/akka/actor/actor/LoggingReceiveSpec.scala index 748ed13941..02c5b13974 100644 --- a/akka-actor-tests/src/test/scala/akka/actor/actor/LoggingReceiveSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/actor/actor/LoggingReceiveSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ package akka.actor diff --git a/akka-actor-tests/src/test/scala/akka/actor/actor/ReceiveTimeoutSpec.scala b/akka-actor-tests/src/test/scala/akka/actor/actor/ReceiveTimeoutSpec.scala index a03193d4a6..6efb3102df 100644 --- a/akka-actor-tests/src/test/scala/akka/actor/actor/ReceiveTimeoutSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/actor/actor/ReceiveTimeoutSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ package akka.actor diff --git a/akka-actor-tests/src/test/scala/akka/actor/actor/TypedActorSpec.scala b/akka-actor-tests/src/test/scala/akka/actor/actor/TypedActorSpec.scala index 42b39ab0b4..31c1e5fbc5 100644 --- a/akka-actor-tests/src/test/scala/akka/actor/actor/TypedActorSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/actor/actor/TypedActorSpec.scala @@ -1,7 +1,7 @@ package akka.actor /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ import org.scalatest.matchers.MustMatchers diff --git a/akka-actor-tests/src/test/scala/akka/actor/supervisor/RestartStrategySpec.scala b/akka-actor-tests/src/test/scala/akka/actor/supervisor/RestartStrategySpec.scala index 0823369c20..066c696588 100644 --- a/akka-actor-tests/src/test/scala/akka/actor/supervisor/RestartStrategySpec.scala +++ b/akka-actor-tests/src/test/scala/akka/actor/supervisor/RestartStrategySpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ package akka.actor diff --git a/akka-actor-tests/src/test/scala/akka/actor/supervisor/SupervisorHierarchySpec.scala b/akka-actor-tests/src/test/scala/akka/actor/supervisor/SupervisorHierarchySpec.scala index 02955798c5..8ed783e544 100644 --- a/akka-actor-tests/src/test/scala/akka/actor/supervisor/SupervisorHierarchySpec.scala +++ b/akka-actor-tests/src/test/scala/akka/actor/supervisor/SupervisorHierarchySpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ package akka.actor diff --git a/akka-actor-tests/src/test/scala/akka/actor/supervisor/SupervisorMiscSpec.scala b/akka-actor-tests/src/test/scala/akka/actor/supervisor/SupervisorMiscSpec.scala index 6b1017e79d..26e718b86c 100644 --- a/akka-actor-tests/src/test/scala/akka/actor/supervisor/SupervisorMiscSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/actor/supervisor/SupervisorMiscSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ package akka.actor diff --git a/akka-actor-tests/src/test/scala/akka/actor/supervisor/SupervisorSpec.scala b/akka-actor-tests/src/test/scala/akka/actor/supervisor/SupervisorSpec.scala index d3f3d65c27..d82905b8cf 100644 --- a/akka-actor-tests/src/test/scala/akka/actor/supervisor/SupervisorSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/actor/supervisor/SupervisorSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ package akka.actor @@ -8,7 +8,6 @@ import org.scalatest.WordSpec import org.scalatest.matchers.MustMatchers import org.scalatest.BeforeAndAfterEach -import akka.testkit._ import akka.testkit.Testing.sleepFor import akka.util.duration._ import akka.config.Supervision._ @@ -205,7 +204,7 @@ class SupervisorSpec extends WordSpec with MustMatchers with BeforeAndAfterEach } def kill(pingPongActor: ActorRef) = { - intercept[RuntimeException] { pingPongActor !! (Die, TimeoutMillis) } + intercept[RuntimeException] { (pingPongActor ? (Die, TimeoutMillis)).as[Any] } messageLogPoll must be === ExceptionMessage } diff --git a/akka-actor-tests/src/test/scala/akka/actor/supervisor/SupervisorTreeSpec.scala b/akka-actor-tests/src/test/scala/akka/actor/supervisor/SupervisorTreeSpec.scala index d8aaa9d0e4..d6e15d40c5 100644 --- a/akka-actor-tests/src/test/scala/akka/actor/supervisor/SupervisorTreeSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/actor/supervisor/SupervisorTreeSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ package akka.actor diff --git a/akka-actor-tests/src/test/scala/akka/actor/supervisor/Ticket669Spec.scala b/akka-actor-tests/src/test/scala/akka/actor/supervisor/Ticket669Spec.scala index 0a70058c7b..bddad26176 100644 --- a/akka-actor-tests/src/test/scala/akka/actor/supervisor/Ticket669Spec.scala +++ b/akka-actor-tests/src/test/scala/akka/actor/supervisor/Ticket669Spec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ package akka.actor.supervisor @@ -14,10 +14,13 @@ import org.scalatest.matchers.MustMatchers class Ticket669Spec extends WordSpec with MustMatchers with BeforeAndAfterAll { import Ticket669Spec._ + override def beforeAll = Thread.interrupted() //remove interrupted status. + override def afterAll = Actor.registry.local.shutdownAll "A supervised actor with lifecycle PERMANENT" should { "be able to reply on failure during preRestart" in { + val latch = new CountDownLatch(1) val sender = Actor.actorOf(new Sender(latch)).start() diff --git a/akka-actor-tests/src/test/scala/akka/config/ConfigSpec.scala b/akka-actor-tests/src/test/scala/akka/config/ConfigSpec.scala index 594d0ad811..e7234509d7 100644 --- a/akka-actor-tests/src/test/scala/akka/config/ConfigSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/config/ConfigSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ package akka.config diff --git a/akka-actor-tests/src/test/scala/akka/dispatch/ActorModelSpec.scala b/akka-actor-tests/src/test/scala/akka/dispatch/ActorModelSpec.scala index dd7c6a5133..62297ca495 100644 --- a/akka-actor-tests/src/test/scala/akka/dispatch/ActorModelSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/dispatch/ActorModelSpec.scala @@ -1,36 +1,52 @@ /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ package akka.actor.dispatch import org.scalatest.junit.JUnitSuite -import org.junit.Test import org.scalatest.Assertions._ import akka.testkit.Testing import akka.dispatch._ import akka.actor.Actor._ import java.util.concurrent.atomic.AtomicLong -import java.util.concurrent.{ ConcurrentHashMap, CountDownLatch, TimeUnit } +import java.util.concurrent.{ConcurrentHashMap, CountDownLatch, TimeUnit} import akka.actor.dispatch.ActorModelSpec.MessageDispatcherInterceptor -import akka.util.{ Duration, Switch } -import org.multiverse.api.latches.StandardLatch -import akka.actor.{ ActorKilledException, PoisonPill, ActorRef, Actor } +import akka.util.Switch +import akka.actor.{ActorKilledException, PoisonPill, ActorRef, Actor} +import java.rmi.RemoteException +import org.junit.{After, Test} object ActorModelSpec { sealed trait ActorModelMessage + case class Reply_?(expect: Any) extends ActorModelMessage + case class Reply(expect: Any) extends ActorModelMessage + case class Forward(to: ActorRef, msg: Any) extends ActorModelMessage + case class CountDown(latch: CountDownLatch) extends ActorModelMessage + case class Increment(counter: AtomicLong) extends ActorModelMessage + case class Await(latch: CountDownLatch) extends ActorModelMessage + case class Meet(acknowledge: CountDownLatch, waitFor: CountDownLatch) extends ActorModelMessage + case class CountDownNStop(latch: CountDownLatch) extends ActorModelMessage + case class Wait(time: Long) extends ActorModelMessage + case class WaitAck(time: Long, latch: CountDownLatch) extends ActorModelMessage + + case object Interrupt extends ActorModelMessage + case object Restart extends ActorModelMessage + case class ThrowException(e: Throwable) extends ActorModelMessage + + val Ping = "Ping" val Pong = "Pong" @@ -52,17 +68,19 @@ object ActorModelSpec { } def receive = { - case Await(latch) ⇒ ack; latch.await(); busy.switchOff() - case Meet(sign, wait) ⇒ ack; sign.countDown(); wait.await(); busy.switchOff() - case Wait(time) ⇒ ack; Thread.sleep(time); busy.switchOff() - case WaitAck(time, l) ⇒ ack; Thread.sleep(time); l.countDown(); busy.switchOff() - case Reply(msg) ⇒ ack; self.reply(msg); busy.switchOff() - case Reply_?(msg) ⇒ ack; self.reply_?(msg); busy.switchOff() - case Forward(to, msg) ⇒ ack; to.forward(msg); busy.switchOff() - case CountDown(latch) ⇒ ack; latch.countDown(); busy.switchOff() - case Increment(count) ⇒ ack; count.incrementAndGet(); busy.switchOff() + case Await(latch) ⇒ ack; latch.await(); busy.switchOff() + case Meet(sign, wait) ⇒ ack; sign.countDown(); wait.await(); busy.switchOff() + case Wait(time) ⇒ ack; Thread.sleep(time); busy.switchOff() + case WaitAck(time, l) ⇒ ack; Thread.sleep(time); l.countDown(); busy.switchOff() + case Reply(msg) ⇒ ack; self.reply(msg); busy.switchOff() + case Reply_?(msg) ⇒ ack; self.reply_?(msg); busy.switchOff() + case Forward(to, msg) ⇒ ack; to.forward(msg); busy.switchOff() + case CountDown(latch) ⇒ ack; latch.countDown(); busy.switchOff() + case Increment(count) ⇒ ack; count.incrementAndGet(); busy.switchOff() case CountDownNStop(l) ⇒ ack; l.countDown(); self.stop(); busy.switchOff() - case Restart ⇒ ack; busy.switchOff(); throw new Exception("Restart requested") + case Restart ⇒ ack; busy.switchOff(); throw new Exception("Restart requested") + case Interrupt => ack; busy.switchOff(); throw new InterruptedException("Ping!") + case ThrowException(e: Throwable) => ack; busy.switchOff(); throw e } } @@ -183,7 +201,9 @@ object ActorModelSpec { if (condition) return true Thread.sleep(intervalMs) - } catch { case e: InterruptedException ⇒ } + } catch { + case e: InterruptedException ⇒ + } } false } @@ -192,6 +212,7 @@ object ActorModelSpec { } abstract class ActorModelSpec extends JUnitSuite { + import ActorModelSpec._ protected def newInterceptedDispatcher: MessageDispatcherInterceptor @@ -215,13 +236,17 @@ abstract class ActorModelSpec extends JUnitSuite { msgsProcessed = 0, restarts = 0) - val futures = for (i ← 1 to 10) yield Future { i } + val futures = for (i ← 1 to 10) yield Future { + i + } await(dispatcher.stops.get == 2)(withinMs = dispatcher.timeoutMs * 5) assertDispatcher(dispatcher)(starts = 2, stops = 2) val a2 = newTestActor a2.start - val futures2 = for (i ← 1 to 10) yield Future { i } + val futures2 = for (i ← 1 to 10) yield Future { + i + } await(dispatcher.starts.get == 3)(withinMs = dispatcher.timeoutMs * 5) assertDispatcher(dispatcher)(starts = 3, stops = 2) @@ -259,7 +284,13 @@ abstract class ActorModelSpec extends JUnitSuite { val counter = new CountDownLatch(200) a.start() - for (i ← 1 to 10) { spawn { for (i ← 1 to 20) { a ! WaitAck(1, counter) } } } + for (i ← 1 to 10) { + spawn { + for (i ← 1 to 20) { + a ! WaitAck(1, counter) + } + } + } assertCountDown(counter, Testing.testTime(3000), "Should process 200 messages") assertRefDefaultZero(a)(registers = 1, msgsReceived = 200, msgsProcessed = 200) @@ -267,7 +298,15 @@ abstract class ActorModelSpec extends JUnitSuite { } def spawn(f: ⇒ Unit) { - val thread = new Thread { override def run { try { f } catch { case e ⇒ e.printStackTrace } } } + val thread = new Thread { + override def run { + try { + f + } catch { + case e ⇒ e.printStackTrace + } + } + } thread.start() } @@ -329,8 +368,9 @@ abstract class ActorModelSpec extends JUnitSuite { def flood(num: Int) { val cachedMessage = CountDownNStop(new CountDownLatch(num)) - (1 to num) foreach { _ ⇒ - newTestActor.start() ! cachedMessage + (1 to num) foreach { + _ ⇒ + newTestActor.start() ! cachedMessage } assertCountDown(cachedMessage.latch, Testing.testTime(10000), "Should process " + num + " countdowns") } @@ -356,6 +396,52 @@ abstract class ActorModelSpec extends JUnitSuite { assert(each.exception.get.isInstanceOf[ActorKilledException]) a.stop() } + + @Test + def dispatcherShouldContinueToProcessMessagesWhenAThreadGetsInterrupted { + implicit val dispatcher = newInterceptedDispatcher + val a = newTestActor.start() + val f1 = a ? Reply("foo") + val f2 = a ? Reply("bar") + val f3 = a ? Interrupt + val f4 = a ? Reply("foo2") + val f5 = a ? Interrupt + val f6 = a ? Reply("bar2") + + assert(f1.get === "foo") + assert(f2.get === "bar") + assert((intercept[InterruptedException] { + f3.get + }).getMessage === "Ping!") + assert(f4.get === "foo2") + assert((intercept[InterruptedException] { + f5.get + }).getMessage === "Ping!") + assert(f6.get === "bar2") + } + + @Test + def dispatcherShouldContinueToProcessMessagesWhenExceptionIsThrown { + implicit val dispatcher = newInterceptedDispatcher + val a = newTestActor.start() + val f1 = a ? Reply("foo") + val f2 = a ? Reply("bar") + val f3 = a ? new ThrowException(new IndexOutOfBoundsException("IndexOutOfBoundsException")) + val f4 = a ? Reply("foo2") + val f5 = a ? new ThrowException(new RemoteException("RemoteException")) + val f6 = a ? Reply("bar2") + + assert(f1.get === "foo") + assert(f2.get === "bar") + assert((intercept[IndexOutOfBoundsException] { + f3.get + }).getMessage === "IndexOutOfBoundsException") + assert(f4.get === "foo2") + assert((intercept[RemoteException] { + f5.get + }).getMessage === "RemoteException") + assert(f6.get === "bar2") + } } class DispatcherModelTest extends ActorModelSpec { diff --git a/akka-actor-tests/src/test/scala/akka/dispatch/DispatchersSpec.scala b/akka-actor-tests/src/test/scala/akka/dispatch/DispatchersSpec.scala index f7df14e195..4ef2fc08f7 100644 --- a/akka-actor-tests/src/test/scala/akka/dispatch/DispatchersSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/dispatch/DispatchersSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ package akka.actor.dispatch diff --git a/akka-actor-tests/src/test/scala/akka/misc/SchedulerSpec.scala b/akka-actor-tests/src/test/scala/akka/misc/SchedulerSpec.scala index 6b5dbd684c..3c08d0983a 100644 --- a/akka-actor-tests/src/test/scala/akka/misc/SchedulerSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/misc/SchedulerSpec.scala @@ -2,16 +2,23 @@ package akka.actor import org.scalatest.junit.JUnitSuite import Actor._ -import java.util.concurrent.{ CountDownLatch, TimeUnit } import akka.config.Supervision._ import org.multiverse.api.latches.StandardLatch import org.junit.Test +import java.util.concurrent.{ScheduledFuture, ConcurrentLinkedQueue, CountDownLatch, TimeUnit} class SchedulerSpec extends JUnitSuite { + private val futures = new ConcurrentLinkedQueue[ScheduledFuture[AnyRef]]() + + def collectFuture(f: => ScheduledFuture[AnyRef]): ScheduledFuture[AnyRef] = { + val future = f + futures.add(future) + future + } def withCleanEndState(action: ⇒ Unit) { action - Scheduler.restart + while(futures.peek() ne null) { Option(futures.poll()).foreach(_.cancel(true)) } Actor.registry.local.shutdownAll } @@ -24,14 +31,14 @@ class SchedulerSpec extends JUnitSuite { def receive = { case Tick ⇒ countDownLatch.countDown() } }).start() // run every 50 millisec - Scheduler.schedule(tickActor, Tick, 0, 50, TimeUnit.MILLISECONDS) + collectFuture(Scheduler.schedule(tickActor, Tick, 0, 50, TimeUnit.MILLISECONDS)) // after max 1 second it should be executed at least the 3 times already assert(countDownLatch.await(1, TimeUnit.SECONDS)) val countDownLatch2 = new CountDownLatch(3) - Scheduler.schedule(() ⇒ countDownLatch2.countDown(), 0, 50, TimeUnit.MILLISECONDS) + collectFuture(Scheduler.schedule(() ⇒ countDownLatch2.countDown(), 0, 50, TimeUnit.MILLISECONDS)) // after max 1 second it should be executed at least the 3 times already assert(countDownLatch2.await(1, TimeUnit.SECONDS)) @@ -45,8 +52,8 @@ class SchedulerSpec extends JUnitSuite { def receive = { case Tick ⇒ countDownLatch.countDown() } }).start() // run every 50 millisec - Scheduler.scheduleOnce(tickActor, Tick, 50, TimeUnit.MILLISECONDS) - Scheduler.scheduleOnce(() ⇒ countDownLatch.countDown(), 50, TimeUnit.MILLISECONDS) + collectFuture(Scheduler.scheduleOnce(tickActor, Tick, 50, TimeUnit.MILLISECONDS)) + collectFuture(Scheduler.scheduleOnce(() ⇒ countDownLatch.countDown(), 50, TimeUnit.MILLISECONDS)) // after 1 second the wait should fail assert(countDownLatch.await(1, TimeUnit.SECONDS) == false) @@ -65,7 +72,7 @@ class SchedulerSpec extends JUnitSuite { def receive = { case Ping ⇒ ticks.countDown } }).start val numActors = Actor.registry.local.actors.length - (1 to 1000).foreach(_ ⇒ Scheduler.scheduleOnce(actor, Ping, 1, TimeUnit.MILLISECONDS)) + (1 to 1000).foreach(_ ⇒ collectFuture(Scheduler.scheduleOnce(actor, Ping, 1, TimeUnit.MILLISECONDS))) assert(ticks.await(10, TimeUnit.SECONDS)) assert(Actor.registry.local.actors.length === numActors) } @@ -83,7 +90,7 @@ class SchedulerSpec extends JUnitSuite { }).start() (1 to 10).foreach { i ⇒ - val future = Scheduler.scheduleOnce(actor, Ping, 1, TimeUnit.SECONDS) + val future = collectFuture(Scheduler.scheduleOnce(actor, Ping, 1, TimeUnit.SECONDS)) future.cancel(true) } assert(ticks.await(3, TimeUnit.SECONDS) == false) //No counting down should've been made @@ -120,9 +127,9 @@ class SchedulerSpec extends JUnitSuite { Permanent) :: Nil)).start - Scheduler.schedule(actor, Ping, 500, 500, TimeUnit.MILLISECONDS) + collectFuture(Scheduler.schedule(actor, Ping, 500, 500, TimeUnit.MILLISECONDS)) // appx 2 pings before crash - Scheduler.scheduleOnce(actor, Crash, 1000, TimeUnit.MILLISECONDS) + collectFuture(Scheduler.scheduleOnce(actor, Crash, 1000, TimeUnit.MILLISECONDS)) assert(restartLatch.tryAwait(2, TimeUnit.SECONDS)) // should be enough time for the ping countdown to recover and reach 6 pings diff --git a/akka-actor-tests/src/test/scala/akka/performance/trading/common/BenchResultRepository.scala b/akka-actor-tests/src/test/scala/akka/performance/trading/common/BenchResultRepository.scala index 877c8a3460..2f9ea89dd8 100644 --- a/akka-actor-tests/src/test/scala/akka/performance/trading/common/BenchResultRepository.scala +++ b/akka-actor-tests/src/test/scala/akka/performance/trading/common/BenchResultRepository.scala @@ -86,6 +86,7 @@ class FileBenchResultRepository extends BenchResultRepository { } private def save(stats: Stats) { + new File(dir).mkdirs if (!dirExists) return val timestamp = new SimpleDateFormat("yyyyMMddHHmmss").format(new Date(stats.timestamp)) val name = stats.name + "--" + timestamp + "--" + stats.load + ".ser" @@ -96,9 +97,9 @@ class FileBenchResultRepository extends BenchResultRepository { out.writeObject(stats) } catch { case e: Exception ⇒ - EventHandler.error(this, "Failed to save [%s] to [%s]".format(stats, f.getAbsolutePath)) - } - finally { + EventHandler.error(this, "Failed to save [%s] to [%s], due to [%s]". + format(stats, f.getAbsolutePath, e.getMessage)) + } finally { if (out ne null) try { out.close() } catch { case ignore: Exception ⇒ } } } @@ -112,11 +113,11 @@ class FileBenchResultRepository extends BenchResultRepository { val stats = in.readObject.asInstanceOf[Stats] Some(stats) } catch { - case e: Exception ⇒ - EventHandler.error(this, "Failed to load from [%s]".format(f.getAbsolutePath)) + case e: Throwable ⇒ + EventHandler.error(this, "Failed to load from [%s], due to [%s]". + format(f.getAbsolutePath, e.getMessage)) None - } - finally { + } finally { if (in ne null) try { in.close() } catch { case ignore: Exception ⇒ } } } diff --git a/akka-actor-tests/src/test/scala/akka/performance/trading/common/GoogleChartBuilder.scala b/akka-actor-tests/src/test/scala/akka/performance/trading/common/GoogleChartBuilder.scala index f4fd02e924..d7f6c965a3 100644 --- a/akka-actor-tests/src/test/scala/akka/performance/trading/common/GoogleChartBuilder.scala +++ b/akka-actor-tests/src/test/scala/akka/performance/trading/common/GoogleChartBuilder.scala @@ -2,8 +2,9 @@ package akka.performance.trading.common import java.io.UnsupportedEncodingException import java.net.URLEncoder - import scala.collection.immutable.TreeMap +import java.util.Locale +import java.util.Formatter /** * Generates URLs to Google Chart API http://code.google.com/apis/chart/ @@ -21,7 +22,7 @@ object GoogleChartBuilder { val current = statistics.last - val sb = new StringBuilder() + val sb = new StringBuilder sb.append(BaseUrl) // bar chart sb.append("cht=bvg") @@ -74,7 +75,7 @@ object GoogleChartBuilder { // grid lines appendGridSpacing(maxValue, sb) - return sb.toString() + return sb.toString } private def percentileLabels(percentiles: TreeMap[Int, Long], sb: StringBuilder) { @@ -119,4 +120,104 @@ object GoogleChartBuilder { } } + def latencyAndThroughputChartUrl(statistics: Seq[Stats], title: String): String = { + if (statistics.isEmpty) return "" + + val sb = new StringBuilder + sb.append(BaseUrl) + // line chart + sb.append("cht=lxy") + sb.append("&") + // size + sb.append("chs=").append(ChartWidth).append("x").append(ChartHeight) + sb.append("&") + // title + sb.append("chtt=").append(urlEncode(title)) + sb.append("&") + // axis locations + sb.append("chxt=x,y,r,x,y,r") + sb.append("&") + // labels + sb.append("chxl=3:|clients|4:|Latency+(us)|5:|Throughput+(tps)") + sb.append("&") + // label color and font + sb.append("chxs=0,676767,11.5,0,lt,676767|1,676767,11.5,0,lt,676767|2,676767,11.5,0,lt,676767") + sb.append("&") + sb.append("chco=") + val seriesColors = List("25B33B", "3072F3", "FF0000", "FF9900") + sb.append(seriesColors.mkString(",")) + sb.append("&") + // legend + sb.append("chdl=5th Percentile|Median|95th Percentile|Throughput") + sb.append("&") + + sb.append("chdlp=b") + sb.append("&") + + sb.append("chls=1|1|1") + sb.append("&") + + sb.append("chls=1|1|1") + sb.append("&") + + sb.append("chma=5,5,5,25") + sb.append("&") + + // data points + sb.append("chm=") + val chmStr = seriesColors.zipWithIndex.map(each ⇒ "o," + each._1 + "," + each._2 + ",-1,7").mkString("|") + sb.append(chmStr) + sb.append("&") + + // data series + val loadStr = statistics.map(_.load).mkString(",") + sb.append("chd=t:") + val maxP = 95 + val percentiles = List(5, 50, maxP) + val maxValue = statistics.map(_.percentiles(maxP)).max + val percentileSeries: List[String] = + for (p ← percentiles) yield { + loadStr + "|" + statistics.map(_.percentiles(p)).mkString(",") + } + sb.append(percentileSeries.mkString("|")) + + sb.append("|") + val maxTps: Double = statistics.map(_.tps).max + sb.append(loadStr).append("|") + val tpsSeries = statistics.map(s ⇒ formatDouble(s.tps)).mkString(",") + sb.append(tpsSeries) + + val minLoad = statistics.head.load + val maxLoad = statistics.last.load + + // y range + sb.append("&") + sb.append("chxr=0,").append(minLoad).append(",").append(maxLoad).append("|1,0,").append(maxValue).append("|2,0,") + .append(formatDouble(maxTps)) + sb.append("&") + + sb.append("chds=") + for (p ← percentiles) { + sb.append(minLoad).append(",").append(maxLoad) + sb.append(",0,").append(maxValue) + sb.append(",") + } + sb.append(minLoad).append(",").append(maxLoad) + sb.append(",0,").append(formatDouble(maxTps)) + sb.append("&") + + // label positions + sb.append("chxp=3,").append("50").append("|4,").append("100").append("|5,").append("100") + sb.append("&") + + // grid lines + appendGridSpacing(maxValue, sb) + + return sb.toString + } + + def formatDouble(value: Double): String = { + new java.math.BigDecimal(value).setScale(2, java.math.RoundingMode.HALF_EVEN).toString + } + } \ No newline at end of file diff --git a/akka-actor-tests/src/test/scala/akka/performance/trading/common/PerformanceTest.scala b/akka-actor-tests/src/test/scala/akka/performance/trading/common/PerformanceTest.scala index 106a5db3b9..69a7b4bd08 100755 --- a/akka-actor-tests/src/test/scala/akka/performance/trading/common/PerformanceTest.scala +++ b/akka-actor-tests/src/test/scala/akka/performance/trading/common/PerformanceTest.scala @@ -52,8 +52,7 @@ trait PerformanceTest extends JUnitSuite { var stat: DescriptiveStatistics = _ val resultRepository = BenchResultRepository() - - val legendTimeFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm") + lazy val report = new Report(resultRepository, compareResultWith) type TS <: TradingSystem @@ -128,78 +127,7 @@ trait PerformanceTest extends JUnitSuite { resultRepository.add(stats) - EventHandler.info(this, formatResultsTable(resultRepository.get(name))) - - val chartTitle = name + " Percentiles (microseconds)" - val chartUrl = GoogleChartBuilder.percentilChartUrl(resultRepository.get(name), chartTitle, _.load + " clients") - EventHandler.info(this, chartTitle + " Chart:\n" + chartUrl) - - for { - compareName ← compareResultWith - compareStats ← resultRepository.get(compareName, numberOfClients) - } { - val chartTitle = name + " vs. " + compareName + ", " + numberOfClients + " clients" + ", Percentiles (microseconds)" - val chartUrl = GoogleChartBuilder.percentilChartUrl(Seq(compareStats, stats), chartTitle, _.name) - EventHandler.info(this, chartTitle + " Chart:\n" + chartUrl) - } - - val withHistorical = resultRepository.getWithHistorical(name, numberOfClients) - if (withHistorical.size > 1) { - val chartTitle = name + " vs. historical, " + numberOfClients + " clients" + ", Percentiles (microseconds)" - val chartUrl = GoogleChartBuilder.percentilChartUrl(withHistorical, chartTitle, - stats ⇒ legendTimeFormat.format(new Date(stats.timestamp))) - EventHandler.info(this, chartTitle + " Chart:\n" + chartUrl) - } - - } - - def formatResultsTable(statsSeq: Seq[Stats]): String = { - - val name = statsSeq.head.name - - val spaces = " " - val headerScenarioCol = ("Scenario" + spaces).take(name.length) - - val headerLine = (headerScenarioCol :: "clients" :: "TPS" :: "mean" :: "5% " :: "25% " :: "50% " :: "75% " :: "95% " :: "Durat." :: "N" :: Nil) - .mkString("\t") - val headerLine2 = (spaces.take(name.length) :: " " :: " " :: "(us)" :: "(us)" :: "(us)" :: "(us)" :: "(us)" :: "(us)" :: "(s) " :: " " :: Nil) - .mkString("\t") - val line = List.fill(formatStats(statsSeq.head).replaceAll("\t", " ").length)("-").mkString - val formattedStats = "\n" + - line.replace('-', '=') + "\n" + - headerLine + "\n" + - headerLine2 + "\n" + - line + "\n" + - statsSeq.map(formatStats(_)).mkString("\n") + "\n" + - line + "\n" - - formattedStats - - } - - def formatStats(stats: Stats): String = { - val durationS = stats.durationNanos.toDouble / 1000000000.0 - val duration = durationS.formatted("%.0f") - - val tpsStr = stats.tps.formatted("%.0f") - val meanStr = stats.mean.formatted("%.0f") - - val summaryLine = - stats.name :: - stats.load.toString :: - tpsStr :: - meanStr :: - stats.percentiles(5).toString :: - stats.percentiles(25).toString :: - stats.percentiles(50).toString :: - stats.percentiles(75).toString :: - stats.percentiles(95).toString :: - duration :: - stats.n.toString :: - Nil - - summaryLine.mkString("\t") - + report.html(resultRepository.get(name)) } def delay(delayMs: Int) { diff --git a/akka-actor-tests/src/test/scala/akka/performance/trading/common/Report.scala b/akka-actor-tests/src/test/scala/akka/performance/trading/common/Report.scala new file mode 100644 index 0000000000..9160fa631e --- /dev/null +++ b/akka-actor-tests/src/test/scala/akka/performance/trading/common/Report.scala @@ -0,0 +1,179 @@ +package akka.performance.trading.common +import java.io.File +import java.text.SimpleDateFormat +import java.io.PrintWriter +import java.io.FileWriter +import akka.event.EventHandler +import java.util.Date + +class Report( + resultRepository: BenchResultRepository, + compareResultWith: Option[String] = None) { + + private val dir = System.getProperty("benchmark.resultDir", "target/benchmark") + + private def dirExists: Boolean = new File(dir).exists + private def log = System.getProperty("benchmark.logResult", "false").toBoolean + + val dateTimeFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm") + val legendTimeFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm") + val fileTimestampFormat = new SimpleDateFormat("yyyyMMddHHmmss") + + def html(statistics: Seq[Stats]): Unit = if (dirExists) { + + val current = statistics.last + val sb = new StringBuilder + + val title = current.name + " " + dateTimeFormat.format(new Date(current.timestamp)) + sb.append(header(title)) + sb.append("

%s

\n".format(title)) + + sb.append("
\n")
+    sb.append(formatResultsTable(statistics))
+    sb.append("\n
\n") + + sb.append(img(percentilesChart(current))) + sb.append(img(latencyAndThroughputChart(current))) + + for (stats ← statistics) { + compareWithHistoricalPercentiliesChart(stats).foreach(url ⇒ sb.append(img(url))) + } + + for (stats ← statistics) { + comparePercentilesChart(stats).foreach(url ⇒ sb.append(img(url))) + } + + if (dirExists) { + val timestamp = fileTimestampFormat.format(new Date(current.timestamp)) + val name = current.name + "--" + timestamp + ".html" + write(sb.toString, name) + } + + } + + private def img(url: String): String = { + """""".format( + url, GoogleChartBuilder.ChartWidth, GoogleChartBuilder.ChartHeight) + "\n" + } + + def percentilesChart(stats: Stats): String = { + val chartTitle = stats.name + " Percentiles (microseconds)" + val chartUrl = GoogleChartBuilder.percentilChartUrl(resultRepository.get(stats.name), chartTitle, _.load + " clients") + if (log) EventHandler.info(this, chartTitle + " Chart:\n" + chartUrl) + chartUrl + } + + def comparePercentilesChart(stats: Stats): Seq[String] = { + for { + compareName ← compareResultWith.toSeq + compareStats ← resultRepository.get(compareName, stats.load) + } yield { + val chartTitle = stats.name + " vs. " + compareName + ", " + stats.load + " clients" + ", Percentiles (microseconds)" + val chartUrl = GoogleChartBuilder.percentilChartUrl(Seq(compareStats, stats), chartTitle, _.name) + if (log) EventHandler.info(this, chartTitle + " Chart:\n" + chartUrl) + chartUrl + } + } + + def compareWithHistoricalPercentiliesChart(stats: Stats): Option[String] = { + val withHistorical = resultRepository.getWithHistorical(stats.name, stats.load) + if (withHistorical.size > 1) { + val chartTitle = stats.name + " vs. historical, " + stats.load + " clients" + ", Percentiles (microseconds)" + val chartUrl = GoogleChartBuilder.percentilChartUrl(withHistorical, chartTitle, + stats ⇒ legendTimeFormat.format(new Date(stats.timestamp))) + if (log) EventHandler.info(this, chartTitle + " Chart:\n" + chartUrl) + Some(chartUrl) + } else { + None + } + } + + def latencyAndThroughputChart(stats: Stats): String = { + val chartTitle = stats.name + " Latency (microseconds) and Throughput (TPS)" + val chartUrl = GoogleChartBuilder.latencyAndThroughputChartUrl(resultRepository.get(stats.name), chartTitle) + if (log) EventHandler.info(this, chartTitle + " Chart:\n" + chartUrl) + chartUrl + } + + def formatResultsTable(statsSeq: Seq[Stats]): String = { + + val name = statsSeq.head.name + + val spaces = " " + val headerScenarioCol = ("Scenario" + spaces).take(name.length) + + val headerLine = (headerScenarioCol :: "clients" :: "TPS" :: "mean" :: "5% " :: "25% " :: "50% " :: "75% " :: "95% " :: "Durat." :: "N" :: Nil) + .mkString("\t") + val headerLine2 = (spaces.take(name.length) :: " " :: " " :: "(us)" :: "(us)" :: "(us)" :: "(us)" :: "(us)" :: "(us)" :: "(s) " :: " " :: Nil) + .mkString("\t") + val line = List.fill(formatStats(statsSeq.head).replaceAll("\t", " ").length)("-").mkString + val formattedStats = "\n" + + line.replace('-', '=') + "\n" + + headerLine + "\n" + + headerLine2 + "\n" + + line + "\n" + + statsSeq.map(formatStats(_)).mkString("\n") + "\n" + + line + "\n" + + if (log) EventHandler.info(this, formattedStats) + + formattedStats + + } + + def formatStats(stats: Stats): String = { + val durationS = stats.durationNanos.toDouble / 1000000000.0 + val duration = durationS.formatted("%.0f") + + val tpsStr = stats.tps.formatted("%.0f") + val meanStr = stats.mean.formatted("%.0f") + + val summaryLine = + stats.name :: + stats.load.toString :: + tpsStr :: + meanStr :: + stats.percentiles(5).toString :: + stats.percentiles(25).toString :: + stats.percentiles(50).toString :: + stats.percentiles(75).toString :: + stats.percentiles(95).toString :: + duration :: + stats.n.toString :: + Nil + + summaryLine.mkString("\t") + + } + + def write(content: String, fileName: String) { + val f = new File(dir, fileName) + var writer: PrintWriter = null + try { + writer = new PrintWriter(new FileWriter(f)) + writer.print(content) + writer.flush() + } catch { + case e: Exception ⇒ + EventHandler.error(this, "Failed to save report to [%s], due to [%s]". + format(f.getAbsolutePath, e.getMessage)) + } finally { + if (writer ne null) try { writer.close() } catch { case ignore: Exception ⇒ } + } + } + + def header(title: String) = + """| + | + | + | + |%s + | + | + |""".stripMargin.format(title) + + def footer = + """|" + |""".stripMargin + +} \ No newline at end of file diff --git a/akka-actor-tests/src/test/scala/akka/routing/RoutingSpec.scala b/akka-actor-tests/src/test/scala/akka/routing/RoutingSpec.scala index 2cdd3e2c4a..b6d7777a95 100644 --- a/akka-actor-tests/src/test/scala/akka/routing/RoutingSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/routing/RoutingSpec.scala @@ -95,37 +95,45 @@ class RoutingSpec extends WordSpec with MustMatchers { "dispatch to smallest mailbox" in { val t1Count = new AtomicInteger(0) val t2Count = new AtomicInteger(0) - val latch = TestLatch(500) + val latch1 = TestLatch(2501) + val latch2 = TestLatch(2499) val t1 = actorOf(new Actor { def receive = { case x ⇒ - sleepFor(50 millis) // slow actor t1Count.incrementAndGet - latch.countDown() + latch1.countDown() } }).start() + t1.dispatcher.suspend(t1) + + for (i <- 1 to 2501) t1 ! i + val t2 = actorOf(new Actor { def receive = { case x ⇒ t2Count.incrementAndGet - latch.countDown() + latch2.countDown() } }).start() - val d = loadBalancerActor(new SmallestMailboxFirstIterator(t1 :: t2 :: Nil)) + val d = loadBalancerActor(new SmallestMailboxFirstIterator(t1 :: t2 :: Nil)) //Will pick the last with the smallest mailbox, so make sure t1 is last - for (i ← 1 to 500) d ! i + for (i ← 1 to 2499 ) d ! i + + latch2.await(20 seconds) + + t1.dispatcher.resume(t1) try { - latch.await(20 seconds) + latch1.await(20 seconds) } finally { // because t1 is much slower and thus has a bigger mailbox all the time - t1Count.get must be < (t2Count.get) + t1Count.get must be === 2501 + t2Count.get must be === 2499 + for (a ← List(t1, t2, d)) a.stop() } - - for (a ← List(t1, t2, d)) a.stop() } "listen" in { diff --git a/akka-actor-tests/src/test/scala/akka/serialization/SerializeSpec.scala b/akka-actor-tests/src/test/scala/akka/serialization/SerializeSpec.scala index 48e43a5e6d..cd534c4fb2 100644 --- a/akka-actor-tests/src/test/scala/akka/serialization/SerializeSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/serialization/SerializeSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ package akka.serialization diff --git a/akka-actor-tests/src/test/scala/akka/testkit/CallingThreadDispatcherModelSpec.scala b/akka-actor-tests/src/test/scala/akka/testkit/CallingThreadDispatcherModelSpec.scala index e0aa5369e7..9fbc5fd7ac 100644 --- a/akka-actor-tests/src/test/scala/akka/testkit/CallingThreadDispatcherModelSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/testkit/CallingThreadDispatcherModelSpec.scala @@ -1,10 +1,11 @@ /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ package akka.testkit import akka.actor.dispatch.ActorModelSpec import java.util.concurrent.CountDownLatch +import org.junit.{After, Test} class CallingThreadDispatcherModelSpec extends ActorModelSpec { import ActorModelSpec._ @@ -42,6 +43,13 @@ class CallingThreadDispatcherModelSpec extends ActorModelSpec { //Can't handle this... } + + @After + def after { + //remove the interrupted status since we are messing with interrupted exceptions. + Thread.interrupted() + } + } // vim: set ts=2 sw=2 et: diff --git a/akka-actor-tests/src/test/scala/akka/util/DurationSpec.scala b/akka-actor-tests/src/test/scala/akka/util/DurationSpec.scala index 713c3f28ae..d30fdc83be 100644 --- a/akka-actor-tests/src/test/scala/akka/util/DurationSpec.scala +++ b/akka-actor-tests/src/test/scala/akka/util/DurationSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ package akka.util diff --git a/akka-actor/src/main/scala/akka/AkkaException.scala b/akka-actor/src/main/scala/akka/AkkaException.scala index 77fa99d0f9..f87453db10 100644 --- a/akka-actor/src/main/scala/akka/AkkaException.scala +++ b/akka-actor/src/main/scala/akka/AkkaException.scala @@ -1,12 +1,12 @@ /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ package akka import akka.actor.newUuid import java.net.{ InetAddress, UnknownHostException } - + /** * Akka base Exception. Each Exception gets: *
    diff --git a/akka-actor/src/main/scala/akka/actor/Actor.scala b/akka-actor/src/main/scala/akka/actor/Actor.scala index 8bad9204bf..27095a580e 100644 --- a/akka-actor/src/main/scala/akka/actor/Actor.scala +++ b/akka-actor/src/main/scala/akka/actor/Actor.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ package akka.actor diff --git a/akka-actor/src/main/scala/akka/actor/ActorRef.scala b/akka-actor/src/main/scala/akka/actor/ActorRef.scala index 88cfdfe6a2..bea369af83 100644 --- a/akka-actor/src/main/scala/akka/actor/ActorRef.scala +++ b/akka-actor/src/main/scala/akka/actor/ActorRef.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ package akka.actor @@ -9,15 +9,15 @@ import akka.dispatch._ import akka.config._ import akka.config.Supervision._ import akka.util._ -import akka.serialization.{ Format, Serializer, Serialization } +import akka.serialization.{Serializer, Serialization} import ReflectiveAccess._ import ClusterModule._ -import DeploymentConfig.{ TransactionLog ⇒ TransactionLogConfig, _ } +import DeploymentConfig.{TransactionLog ⇒ TransactionLogConfig, _} import java.net.InetSocketAddress import java.util.concurrent.atomic.AtomicReference -import java.util.concurrent.{ ScheduledFuture, ConcurrentHashMap, TimeUnit } -import java.util.{ Map ⇒ JMap } +import java.util.concurrent.{ScheduledFuture, ConcurrentHashMap, TimeUnit} +import java.util.{Map ⇒ JMap} import scala.reflect.BeanProperty import scala.collection.immutable.Stack @@ -30,10 +30,15 @@ private[akka] object ActorRefInternals { * LifeCycles for ActorRefs. */ private[akka] sealed trait StatusType + object UNSTARTED extends StatusType + object RUNNING extends StatusType + object BEING_RESTARTED extends StatusType + object SHUTDOWN extends StatusType + } /** @@ -68,7 +73,8 @@ private[akka] object ActorRefInternals { * * @author Jonas Bonér */ -trait ActorRef extends ActorRefShared with ForwardableChannel with java.lang.Comparable[ActorRef] with Serializable { scalaRef: ScalaActorRef ⇒ +trait ActorRef extends ActorRefShared with ForwardableChannel with java.lang.Comparable[ActorRef] with Serializable { + scalaRef: ScalaActorRef ⇒ // Only mutable for RemoteServer in order to maintain identity across nodes @volatile protected[akka] var _uuid = newUuid @@ -105,6 +111,7 @@ trait ActorRef extends ActorRefShared with ForwardableChannel with java.lang.Com def setReceiveTimeout(timeout: Long) { this.receiveTimeout = Some(timeout) } + def getReceiveTimeout: Option[Long] = receiveTimeout /** @@ -121,6 +128,7 @@ trait ActorRef extends ActorRefShared with ForwardableChannel with java.lang.Com * */ def setFaultHandler(handler: FaultHandlingStrategy) + def getFaultHandler: FaultHandlingStrategy /** @@ -139,6 +147,7 @@ trait ActorRef extends ActorRefShared with ForwardableChannel with java.lang.Com * */ def setLifeCycle(lifeCycle: LifeCycle) + def getLifeCycle: LifeCycle /** @@ -153,7 +162,10 @@ trait ActorRef extends ActorRefShared with ForwardableChannel with java.lang.Com * The default is also that all actors that are created and spawned from within this actor * is sharing the same dispatcher as its creator. */ - def setDispatcher(dispatcher: MessageDispatcher) { this.dispatcher = dispatcher } + def setDispatcher(dispatcher: MessageDispatcher) { + this.dispatcher = dispatcher + } + def getDispatcher: MessageDispatcher = dispatcher /** @@ -177,6 +189,7 @@ trait ActorRef extends ActorRefShared with ForwardableChannel with java.lang.Com * Returns the uuid for the actor. */ def getUuid = _uuid + def uuid = _uuid /** @@ -366,9 +379,13 @@ trait ActorRef extends ActorRefShared with ForwardableChannel with java.lang.Com */ def sendException(ex: Throwable) {} + def isUsableOnlyOnce = false + def isUsable = true + def isReplyable = true + def canSendException = false /** @@ -382,9 +399,9 @@ trait ActorRef extends ActorRefShared with ForwardableChannel with java.lang.Com protected[akka] def postMessageToMailbox(message: Any, channel: UntypedChannel): Unit protected[akka] def postMessageToMailboxAndCreateFutureResultWithTimeout( - message: Any, - timeout: Long, - channel: UntypedChannel): Future[Any] + message: Any, + timeout: Long, + channel: UntypedChannel): Future[Any] protected[akka] def actorInstance: AtomicReference[Actor] @@ -393,6 +410,7 @@ trait ActorRef extends ActorRefShared with ForwardableChannel with java.lang.Com protected[akka] def supervisor_=(sup: Option[ActorRef]) protected[akka] def mailbox: AnyRef + protected[akka] def mailbox_=(value: AnyRef): AnyRef protected[akka] def handleTrapExit(dead: ActorRef, reason: Throwable) @@ -416,7 +434,7 @@ trait ActorRef extends ActorRefShared with ForwardableChannel with java.lang.Com * * @author Jonas Bonér */ -class LocalActorRef private[akka] (private[this] val actorFactory: () ⇒ Actor, val address: String) +class LocalActorRef private[akka](private[this] val actorFactory: () ⇒ Actor, val address: String) extends ActorRef with ScalaActorRef { protected[akka] val guard = new ReentrantGuard @@ -442,7 +460,9 @@ class LocalActorRef private[akka] (private[this] val actorFactory: () ⇒ Actor, @volatile private[akka] var _dispatcher: MessageDispatcher = Dispatchers.defaultGlobalDispatcher - protected[akka] val actorInstance = guard.withGuard { new AtomicReference[Actor](newActor) } + protected[akka] val actorInstance = guard.withGuard { + new AtomicReference[Actor](newActor) + } def serializerErrorDueTo(reason: String) = throw new akka.config.ConfigurationException( @@ -480,16 +500,16 @@ class LocalActorRef private[akka] (private[this] val actorFactory: () ⇒ Actor, // used only for deserialization private[akka] def this( - __uuid: Uuid, - __address: String, - __timeout: Long, - __receiveTimeout: Option[Long], - __lifeCycle: LifeCycle, - __supervisor: Option[ActorRef], - __hotswap: Stack[PartialFunction[Any, Unit]], - __factory: () ⇒ Actor) = { + __uuid: Uuid, + __address: String, + __timeout: Long, + __receiveTimeout: Option[Long], + __lifeCycle: LifeCycle, + __supervisor: Option[ActorRef], + __hotswap: Stack[PartialFunction[Any, Unit]], + __factory: () ⇒ Actor) = { - this(__factory, __address) + this (__factory, __address) _uuid = __uuid timeout = __timeout @@ -627,7 +647,9 @@ class LocalActorRef private[akka] (private[this] val actorFactory: () ⇒ Actor, */ def mailbox: AnyRef = _mailbox - protected[akka] def mailbox_=(value: AnyRef): AnyRef = { _mailbox = value; value } + protected[akka] def mailbox_=(value: AnyRef): AnyRef = { + _mailbox = value; value + } /** * Returns the supervisor, if there is one. @@ -651,12 +673,12 @@ class LocalActorRef private[akka] (private[this] val actorFactory: () ⇒ Actor, dispatcher dispatchMessage new MessageInvocation(this, message, channel) protected[akka] def postMessageToMailboxAndCreateFutureResultWithTimeout( - message: Any, - timeout: Long, - channel: UntypedChannel): Future[Any] = { + message: Any, + timeout: Long, + channel: UntypedChannel): Future[Any] = { val future = channel match { case f: ActorPromise ⇒ f - case _ ⇒ new ActorPromise(timeout) + case _ ⇒ new ActorPromise(timeout) } dispatcher dispatchMessage new MessageInvocation(this, message, future) future @@ -677,7 +699,8 @@ class LocalActorRef private[akka] (private[this] val actorFactory: () ⇒ Actor, currentMessage = null // reset current message after successful invocation } catch { case e: InterruptedException ⇒ - currentMessage = null // received message while actor is shutting down, ignore + handleExceptionInDispatch(e, messageHandle.message) + throw e case e ⇒ handleExceptionInDispatch(e, messageHandle.message) } @@ -716,13 +739,16 @@ class LocalActorRef private[akka] (private[this] val actorFactory: () ⇒ Actor, private def requestRestartPermission(maxNrOfRetries: Option[Int], withinTimeRange: Option[Int]): Boolean = { - val denied = if (maxNrOfRetries.isEmpty && withinTimeRange.isEmpty) { //Immortal + val denied = if (maxNrOfRetries.isEmpty && withinTimeRange.isEmpty) { + //Immortal false - } else if (withinTimeRange.isEmpty) { // restrict number of restarts + } else if (withinTimeRange.isEmpty) { + // restrict number of restarts val retries = maxNrOfRetriesCount + 1 maxNrOfRetriesCount = retries //Increment number of retries retries > maxNrOfRetries.get - } else { // cannot restart more than N within M timerange + } else { + // cannot restart more than N within M timerange val retries = maxNrOfRetriesCount + 1 val windowStart = restartTimeWindowStartNanos @@ -826,7 +852,7 @@ class LocalActorRef private[akka] (private[this] val actorFactory: () ⇒ Actor, // ========= PRIVATE FUNCTIONS ========= private[this] def newActor: Actor = { - import Actor.{ actorRefInCreation ⇒ refStack } + import Actor.{actorRefInCreation ⇒ refStack} val stackBefore = refStack.get refStack.set(stackBefore.push(this)) try { @@ -837,7 +863,7 @@ class LocalActorRef private[akka] (private[this] val actorFactory: () ⇒ Actor, refStack.set(if (stackAfter.head eq null) stackAfter.pop.pop else stackAfter.pop) //pop null marker plus self } } match { - case null ⇒ throw new ActorInitializationException("Actor instance passed to ActorRef can not be 'null'") + case null ⇒ throw new ActorInitializationException("Actor instance passed to ActorRef can not be 'null'") case valid ⇒ valid } @@ -862,27 +888,34 @@ class LocalActorRef private[akka] (private[this] val actorFactory: () ⇒ Actor, if (supervisor.isDefined) notifySupervisorWithMessage(Death(this, reason)) else { lifeCycle match { +<<<<<<< HEAD case Temporary ⇒ shutDownTemporaryActor(this, reason) case _ ⇒ dispatcher.resume(this) //Resume processing for this actor +======= + case Temporary ⇒ shutDownTemporaryActor(this) + case _ ⇒ dispatcher.resume(this) //Resume processing for this actor +>>>>>>> 2cf64bccae0afcfa2ed9062e1590cd9e4f187aeb } } } private def notifySupervisorWithMessage(notification: LifeCycleMessage) { // FIXME to fix supervisor restart of remote actor for oneway calls, inject a supervisor proxy that can send notification back to client - _supervisor.foreach { sup ⇒ - if (sup.isShutdown) { // if supervisor is shut down, game over for all linked actors - //Scoped stop all linked actors, to avoid leaking the 'i' val - { - val i = _linkedActors.values.iterator - while (i.hasNext) { - i.next.stop() - i.remove + _supervisor.foreach { + sup ⇒ + if (sup.isShutdown) { + // if supervisor is shut down, game over for all linked actors + //Scoped stop all linked actors, to avoid leaking the 'i' val + { + val i = _linkedActors.values.iterator + while (i.hasNext) { + i.next.stop() + i.remove + } } - } - //Stop the actor itself - stop - } else sup ! notification // else notify supervisor + //Stop the actor itself + stop + } else sup ! notification // else notify supervisor } } @@ -923,7 +956,8 @@ class LocalActorRef private[akka] (private[this] val actorFactory: () ⇒ Actor, protected[akka] def checkReceiveTimeout() { cancelReceiveTimeout() - if (receiveTimeout.isDefined && dispatcher.mailboxIsEmpty(this)) { //Only reschedule if desired and there are currently no more messages to be processed + if (receiveTimeout.isDefined && dispatcher.mailboxIsEmpty(this)) { + //Only reschedule if desired and there are currently no more messages to be processed _futureTimeout = Some(Scheduler.scheduleOnce(this, ReceiveTimeout, receiveTimeout.get, TimeUnit.MILLISECONDS)) } } @@ -951,11 +985,11 @@ object RemoteActorSystemMessage { * * @author Jonas Bonér */ -private[akka] case class RemoteActorRef private[akka] ( - val remoteAddress: InetSocketAddress, - val address: String, - _timeout: Long, - loader: Option[ClassLoader]) +private[akka] case class RemoteActorRef private[akka]( + val remoteAddress: InetSocketAddress, + val address: String, + _timeout: Long, + loader: Option[ClassLoader]) extends ActorRef with ScalaActorRef { ClusterModule.ensureEnabled() @@ -967,22 +1001,22 @@ private[akka] case class RemoteActorRef private[akka] ( def postMessageToMailbox(message: Any, channel: UntypedChannel): Unit = { val chSender = channel match { case ref: ActorRef ⇒ Some(ref) - case _ ⇒ None + case _ ⇒ None } Actor.remote.send[Any](message, chSender, None, remoteAddress, timeout, true, this, loader) } def postMessageToMailboxAndCreateFutureResultWithTimeout( - message: Any, - timeout: Long, - channel: UntypedChannel): Future[Any] = { + message: Any, + timeout: Long, + channel: UntypedChannel): Future[Any] = { val chSender = channel match { case ref: ActorRef ⇒ Some(ref) - case _ ⇒ None + case _ ⇒ None } val chFuture = channel match { case f: Promise[Any] ⇒ Some(f) - case _ ⇒ None + case _ ⇒ None } val future = Actor.remote.send[Any](message, chSender, chFuture, remoteAddress, timeout, false, this, loader) if (future.isDefined) ActorPromise(future.get) @@ -1013,34 +1047,49 @@ private[akka] case class RemoteActorRef private[akka] ( def dispatcher_=(md: MessageDispatcher) { unsupported } + def dispatcher: MessageDispatcher = unsupported + def link(actorRef: ActorRef) { unsupported } + def unlink(actorRef: ActorRef) { unsupported } + def startLink(actorRef: ActorRef): ActorRef = unsupported + def supervisor: Option[ActorRef] = unsupported + def linkedActors: JMap[Uuid, ActorRef] = unsupported + protected[akka] def mailbox: AnyRef = unsupported + protected[akka] def mailbox_=(value: AnyRef): AnyRef = unsupported + protected[akka] def handleTrapExit(dead: ActorRef, reason: Throwable) { unsupported } + protected[akka] def restart(reason: Throwable, maxNrOfRetries: Option[Int], withinTimeRange: Option[Int]) { unsupported } + protected[akka] def restartLinkedActors(reason: Throwable, maxNrOfRetries: Option[Int], withinTimeRange: Option[Int]) { unsupported } + protected[akka] def invoke(messageHandle: MessageInvocation) { unsupported } + protected[akka] def supervisor_=(sup: Option[ActorRef]) { unsupported } + protected[akka] def actorInstance: AtomicReference[Actor] = unsupported + private def unsupported = throw new UnsupportedOperationException("Not supported for RemoteActorRef") } @@ -1072,7 +1121,8 @@ trait ActorRefShared { * There are implicit conversions in ../actor/Implicits.scala * from ActorRef -> ScalaActorRef and back */ -trait ScalaActorRef extends ActorRefShared with ForwardableChannel { ref: ActorRef ⇒ +trait ScalaActorRef extends ActorRefShared with ForwardableChannel { + ref: ActorRef ⇒ /** * Address for actor, must be a unique one. @@ -1116,7 +1166,7 @@ trait ScalaActorRef extends ActorRefShared with ForwardableChannel { ref: ActorR if (msg eq null) None else msg.channel match { case ref: ActorRef ⇒ Some(ref) - case _ ⇒ None + case _ ⇒ None } } @@ -1130,7 +1180,7 @@ trait ScalaActorRef extends ActorRefShared with ForwardableChannel { ref: ActorR if (msg eq null) None else msg.channel match { case f: ActorPromise ⇒ Some(f) - case _ ⇒ None + case _ ⇒ None } } @@ -1154,28 +1204,6 @@ trait ScalaActorRef extends ActorRefShared with ForwardableChannel { ref: ActorR "Actor has not been started, you need to invoke 'actor.start()' before using it") } - /** - * Sends a message asynchronously and waits on a future for a reply message. - *

    - * It waits on the reply either until it receives it (in the form of Some(replyMessage)) - * or until the timeout expires (which will return None). E.g. send-and-receive-eventually semantics. - *

    - * NOTE: - * Use this method with care. In most cases it is better to use '!' together with the 'sender' member field to - * implement request/response message exchanges. - * If you are sending messages using !! then you have to use self.reply(..) - * to send a reply message to the original sender. If not then the sender will block until the timeout expires. - */ - @deprecated("use `(actor ? msg).as[T]` instead", "1.2") - def !!(message: Any, timeout: Long = this.timeout)(implicit channel: UntypedChannel = NullChannel): Option[Any] = { - if (isRunning) { - val future = postMessageToMailboxAndCreateFutureResultWithTimeout(message, timeout, channel) - - try { future.await.resultOrException } catch { case e: FutureTimeoutException ⇒ None } - } else throw new ActorInitializationException( - "Actor has not been started, you need to invoke 'actor.start()' before using it") - } - /** * Sends a message asynchronously, returning a future which may eventually hold the reply. */ diff --git a/akka-actor/src/main/scala/akka/actor/ActorRegistry.scala b/akka-actor/src/main/scala/akka/actor/ActorRegistry.scala index 9dafb5a90e..e4fe387fcf 100644 --- a/akka-actor/src/main/scala/akka/actor/ActorRegistry.scala +++ b/akka-actor/src/main/scala/akka/actor/ActorRegistry.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ package akka.actor diff --git a/akka-actor/src/main/scala/akka/actor/BootableActorLoaderService.scala b/akka-actor/src/main/scala/akka/actor/BootableActorLoaderService.scala index 0cf3a8fa2e..7cf4fbaf4e 100644 --- a/akka-actor/src/main/scala/akka/actor/BootableActorLoaderService.scala +++ b/akka-actor/src/main/scala/akka/actor/BootableActorLoaderService.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ package akka.actor diff --git a/akka-actor/src/main/scala/akka/actor/Channel.scala b/akka-actor/src/main/scala/akka/actor/Channel.scala index 2896883f2b..f74f68cd00 100644 --- a/akka-actor/src/main/scala/akka/actor/Channel.scala +++ b/akka-actor/src/main/scala/akka/actor/Channel.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ package akka.actor diff --git a/akka-actor/src/main/scala/akka/actor/Deployer.scala b/akka-actor/src/main/scala/akka/actor/Deployer.scala index f681e8ab50..c754d1733d 100644 --- a/akka-actor/src/main/scala/akka/actor/Deployer.scala +++ b/akka-actor/src/main/scala/akka/actor/Deployer.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ package akka.actor diff --git a/akka-actor/src/main/scala/akka/actor/DeploymentConfig.scala b/akka-actor/src/main/scala/akka/actor/DeploymentConfig.scala index 1d4f23e545..bc058e87f0 100644 --- a/akka-actor/src/main/scala/akka/actor/DeploymentConfig.scala +++ b/akka-actor/src/main/scala/akka/actor/DeploymentConfig.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ package akka.actor diff --git a/akka-actor/src/main/scala/akka/actor/FSM.scala b/akka-actor/src/main/scala/akka/actor/FSM.scala index b7e94e864e..b911af4c1a 100644 --- a/akka-actor/src/main/scala/akka/actor/FSM.scala +++ b/akka-actor/src/main/scala/akka/actor/FSM.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ package akka.actor diff --git a/akka-actor/src/main/scala/akka/actor/Routing.scala b/akka-actor/src/main/scala/akka/actor/Routing.scala index 6bce48e677..49a133d4c9 100644 --- a/akka-actor/src/main/scala/akka/actor/Routing.scala +++ b/akka-actor/src/main/scala/akka/actor/Routing.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ package akka.actor diff --git a/akka-actor/src/main/scala/akka/actor/Scheduler.scala b/akka-actor/src/main/scala/akka/actor/Scheduler.scala index 4096188a88..efbec15239 100644 --- a/akka-actor/src/main/scala/akka/actor/Scheduler.scala +++ b/akka-actor/src/main/scala/akka/actor/Scheduler.scala @@ -18,7 +18,6 @@ package akka.actor import akka.event.EventHandler import akka.AkkaException import java.util.concurrent.atomic.AtomicLong -import java.lang.ref.WeakReference import java.util.concurrent._ import java.lang.RuntimeException @@ -27,20 +26,19 @@ object Scheduler { case class SchedulerException(msg: String, e: Throwable) extends AkkaException(msg, e) - @volatile - private var service = Executors.newSingleThreadScheduledExecutor(SchedulerThreadFactory) + private[akka] val service = Executors.newSingleThreadScheduledExecutor(SchedulerThreadFactory) private def createSendRunnable(receiver: ActorRef, message: Any, throwWhenReceiverExpired: Boolean): Runnable = { receiver match { - case local: LocalActorRef => - val ref = new WeakReference[ActorRef](local) + case local: LocalActorRef ⇒ + val uuid = local.uuid new Runnable { - def run = ref.get match { - case null => if(throwWhenReceiverExpired) throw new RuntimeException("Receiver not found: GC:ed") - case actor => actor ! message + def run = Actor.registry.local.actorFor(uuid) match { + case None ⇒ if (throwWhenReceiverExpired) throw new RuntimeException("Receiver not found, unregistered") + case Some(actor) ⇒ actor ! message } } - case other => new Runnable { def run = other ! message } + case other ⇒ new Runnable { def run = other ! message } } } @@ -128,18 +126,7 @@ object Scheduler { } } - def shutdown() { - synchronized { - service.shutdown() - } - } - - def restart() { - synchronized { - shutdown() - service = Executors.newSingleThreadScheduledExecutor(SchedulerThreadFactory) - } - } + private[akka] def shutdown() { service.shutdown() } } private object SchedulerThreadFactory extends ThreadFactory { diff --git a/akka-actor/src/main/scala/akka/actor/Supervisor.scala b/akka-actor/src/main/scala/akka/actor/Supervisor.scala index 85e206be46..5063ca994a 100644 --- a/akka-actor/src/main/scala/akka/actor/Supervisor.scala +++ b/akka-actor/src/main/scala/akka/actor/Supervisor.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ package akka.actor diff --git a/akka-actor/src/main/scala/akka/actor/TypedActor.scala b/akka-actor/src/main/scala/akka/actor/TypedActor.scala index 3282a36555..1511419184 100644 --- a/akka-actor/src/main/scala/akka/actor/TypedActor.scala +++ b/akka-actor/src/main/scala/akka/actor/TypedActor.scala @@ -1,7 +1,7 @@ package akka.actor /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ import akka.japi.{ Creator, Option ⇒ JOption } @@ -10,6 +10,8 @@ import akka.dispatch.{ MessageDispatcher, Dispatchers, Future, FutureTimeoutExce import java.lang.reflect.{ InvocationTargetException, Method, InvocationHandler, Proxy } import akka.util.{ Duration } import java.util.concurrent.atomic.{ AtomicReference ⇒ AtomVar } +import akka.serialization.Serialization +import com.sun.xml.internal.ws.developer.MemberSubmissionAddressing.Validation //TODO Document this class, not only in Scaladoc, but also in a dedicated typed-actor.rst, for both java and scala /** @@ -87,16 +89,35 @@ object TypedActor { } } catch { case i: InvocationTargetException ⇒ throw i.getTargetException } - private def writeReplace(): AnyRef = new SerializedMethodCall(method.getDeclaringClass, method.getName, method.getParameterTypes, parameters) + private def writeReplace(): AnyRef = { + val serializedParameters: Array[(Array[Byte],String)] = parameters match { + case null => null + case a if a.length == 0 => Array[(Array[Byte],String)]() + case a => a.map( { + case null => null + case value => Serialization.serializerFor(value.getClass).fold(throw _, s => (s.toBinary(value), s.getClass.getName)) + }) + } + new SerializedMethodCall(method.getDeclaringClass, method.getName, method.getParameterTypes, serializedParameters) + } } /** * Represents the serialized form of a MethodCall, uses readResolve and writeReplace to marshall the call */ - case class SerializedMethodCall(ownerType: Class[_], methodName: String, parameterTypes: Array[Class[_]], parameterValues: Array[AnyRef]) { + case class SerializedMethodCall(ownerType: Class[_], methodName: String, parameterTypes: Array[Class[_]], serializedParameters: Array[(Array[Byte],String)]) { //TODO implement writeObject and readObject to serialize //TODO Possible optimization is to special encode the parameter-types to conserve space - private def readResolve(): AnyRef = MethodCall(ownerType.getDeclaredMethod(methodName, parameterTypes: _*), parameterValues) + private def readResolve(): AnyRef = { + MethodCall(ownerType.getDeclaredMethod(methodName, parameterTypes: _*), serializedParameters match { + case null => null + case a if a.length == 0 => Array[AnyRef]() + case a => a.map( { + case null => null + case (bytes, serializerFQN) => Serialization.serializerOf(serializerFQN).fold(throw _, _.fromBinary(bytes)) + }) + }) + } } /** diff --git a/akka-actor/src/main/scala/akka/actor/UntypedActor.scala b/akka-actor/src/main/scala/akka/actor/UntypedActor.scala index d561ce6221..d23002c5bc 100644 --- a/akka-actor/src/main/scala/akka/actor/UntypedActor.scala +++ b/akka-actor/src/main/scala/akka/actor/UntypedActor.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ package akka.actor diff --git a/akka-actor/src/main/scala/akka/actor/package.scala b/akka-actor/src/main/scala/akka/actor/package.scala index 01ef989f2a..98d87106cc 100644 --- a/akka-actor/src/main/scala/akka/actor/package.scala +++ b/akka-actor/src/main/scala/akka/actor/package.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ package akka diff --git a/akka-actor/src/main/scala/akka/cluster/ClusterInterface.scala b/akka-actor/src/main/scala/akka/cluster/ClusterInterface.scala index 714207458c..690a69841f 100644 --- a/akka-actor/src/main/scala/akka/cluster/ClusterInterface.scala +++ b/akka-actor/src/main/scala/akka/cluster/ClusterInterface.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2010 Scalable Solutions AB + * Copyright (C) 2009-2010 Typesafe Inc. */ package akka.cluster @@ -122,8 +122,6 @@ object NodeAddress { trait ClusterNode { import ChangeListener._ - val isConnected = new AtomicBoolean(false) - private[cluster] val locallyCachedMembershipNodes = new ConcurrentSkipListSet[String]() def membershipNodes: Array[String] @@ -136,7 +134,7 @@ trait ClusterNode { def remoteServerAddress: InetSocketAddress - def isRunning: Boolean = isConnected.get + def isRunning: Boolean def start(): ClusterNode diff --git a/akka-actor/src/main/scala/akka/config/Config.scala b/akka-actor/src/main/scala/akka/config/Config.scala index 68660ef840..0c1505ae21 100644 --- a/akka-actor/src/main/scala/akka/config/Config.scala +++ b/akka-actor/src/main/scala/akka/config/Config.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ package akka.config diff --git a/akka-actor/src/main/scala/akka/config/ConfigParser.scala b/akka-actor/src/main/scala/akka/config/ConfigParser.scala index 6138143db3..91f40df096 100644 --- a/akka-actor/src/main/scala/akka/config/ConfigParser.scala +++ b/akka-actor/src/main/scala/akka/config/ConfigParser.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. * * Based on Configgy by Robey Pointer. * Copyright 2009 Robey Pointer diff --git a/akka-actor/src/main/scala/akka/config/Configuration.scala b/akka-actor/src/main/scala/akka/config/Configuration.scala index a799a973bd..8213d853eb 100644 --- a/akka-actor/src/main/scala/akka/config/Configuration.scala +++ b/akka-actor/src/main/scala/akka/config/Configuration.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. * * Based on Configgy by Robey Pointer. * Copyright 2009 Robey Pointer diff --git a/akka-actor/src/main/scala/akka/config/Configurator.scala b/akka-actor/src/main/scala/akka/config/Configurator.scala index 2818339b0f..4f7ed90a26 100644 --- a/akka-actor/src/main/scala/akka/config/Configurator.scala +++ b/akka-actor/src/main/scala/akka/config/Configurator.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ package akka.config diff --git a/akka-actor/src/main/scala/akka/config/Importer.scala b/akka-actor/src/main/scala/akka/config/Importer.scala index c5ec6cb9b2..6045662f35 100644 --- a/akka-actor/src/main/scala/akka/config/Importer.scala +++ b/akka-actor/src/main/scala/akka/config/Importer.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. * * Based on Configgy by Robey Pointer. * Copyright 2009 Robey Pointer diff --git a/akka-actor/src/main/scala/akka/config/SupervisionConfig.scala b/akka-actor/src/main/scala/akka/config/SupervisionConfig.scala index b06c01b119..6c96ae1b0d 100644 --- a/akka-actor/src/main/scala/akka/config/SupervisionConfig.scala +++ b/akka-actor/src/main/scala/akka/config/SupervisionConfig.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ package akka.config diff --git a/akka-actor/src/main/scala/akka/dispatch/BalancingDispatcher.scala b/akka-actor/src/main/scala/akka/dispatch/BalancingDispatcher.scala index 98b7465e5a..a5787e6c0d 100644 --- a/akka-actor/src/main/scala/akka/dispatch/BalancingDispatcher.scala +++ b/akka-actor/src/main/scala/akka/dispatch/BalancingDispatcher.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ package akka.dispatch diff --git a/akka-actor/src/main/scala/akka/dispatch/Dispatcher.scala b/akka-actor/src/main/scala/akka/dispatch/Dispatcher.scala index 4516597acf..5097f69aa0 100644 --- a/akka-actor/src/main/scala/akka/dispatch/Dispatcher.scala +++ b/akka-actor/src/main/scala/akka/dispatch/Dispatcher.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ package akka.dispatch @@ -160,8 +160,6 @@ class Dispatcher( private[akka] def reRegisterForExecution(mbox: MessageQueue with ExecutableMailbox): Unit = registerForExecution(mbox) - private[akka] def doneProcessingMailbox(mbox: MessageQueue with ExecutableMailbox): Unit = () - protected override def cleanUpMailboxFor(actorRef: ActorRef) { val m = getMailbox(actorRef) if (!m.isEmpty) { @@ -195,19 +193,13 @@ trait ExecutableMailbox extends Runnable { self: MessageQueue ⇒ def dispatcher: Dispatcher final def run = { - try { - processMailbox() - } catch { - case ie: InterruptedException ⇒ - } - finally { + try { processMailbox() } catch { + case ie: InterruptedException => Thread.currentThread().interrupt() //Restore interrupt + } finally { dispatcherLock.unlock() + if (!self.isEmpty) + dispatcher.reRegisterForExecution(this) } - - if (!self.isEmpty) - dispatcher.reRegisterForExecution(this) - - dispatcher.doneProcessingMailbox(this) } /** diff --git a/akka-actor/src/main/scala/akka/dispatch/Dispatchers.scala b/akka-actor/src/main/scala/akka/dispatch/Dispatchers.scala index 963927582f..9af29eed98 100644 --- a/akka-actor/src/main/scala/akka/dispatch/Dispatchers.scala +++ b/akka-actor/src/main/scala/akka/dispatch/Dispatchers.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ package akka.dispatch diff --git a/akka-actor/src/main/scala/akka/dispatch/Future.scala b/akka-actor/src/main/scala/akka/dispatch/Future.scala index a02b0c8910..8705d3da4e 100644 --- a/akka-actor/src/main/scala/akka/dispatch/Future.scala +++ b/akka-actor/src/main/scala/akka/dispatch/Future.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ package akka.dispatch diff --git a/akka-actor/src/main/scala/akka/dispatch/MailboxHandling.scala b/akka-actor/src/main/scala/akka/dispatch/MailboxHandling.scala index 8a0579676e..03be4d08dc 100644 --- a/akka-actor/src/main/scala/akka/dispatch/MailboxHandling.scala +++ b/akka-actor/src/main/scala/akka/dispatch/MailboxHandling.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ package akka.dispatch diff --git a/akka-actor/src/main/scala/akka/dispatch/MessageHandling.scala b/akka-actor/src/main/scala/akka/dispatch/MessageHandling.scala index b5c50ea939..42caa4fca8 100644 --- a/akka-actor/src/main/scala/akka/dispatch/MessageHandling.scala +++ b/akka-actor/src/main/scala/akka/dispatch/MessageHandling.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ package akka.dispatch @@ -127,6 +127,10 @@ trait MessageDispatcher { } } + /** + * Only "private[akka] for the sake of intercepting calls, DO NOT CALL THIS OUTSIDE OF THE DISPATCHER, + * and only call it under the dispatcher-guard, see "attach" for the only invocation + */ private[akka] def register(actorRef: ActorRef) { if (actorRef.mailbox eq null) actorRef.mailbox = createMailbox(actorRef) @@ -139,6 +143,10 @@ trait MessageDispatcher { } } + /** + * Only "private[akka] for the sake of intercepting calls, DO NOT CALL THIS OUTSIDE OF THE DISPATCHER, + * and only call it under the dispatcher-guard, see "detach" for the only invocation + */ private[akka] def unregister(actorRef: ActorRef) = { if (uuids remove actorRef.uuid) { cleanUpMailboxFor(actorRef) diff --git a/akka-actor/src/main/scala/akka/dispatch/PinnedDispatcher.scala b/akka-actor/src/main/scala/akka/dispatch/PinnedDispatcher.scala index e03e6af9e2..80c4a95031 100644 --- a/akka-actor/src/main/scala/akka/dispatch/PinnedDispatcher.scala +++ b/akka-actor/src/main/scala/akka/dispatch/PinnedDispatcher.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ package akka.dispatch @@ -34,14 +34,15 @@ class PinnedDispatcher(_actor: ActorRef, _name: String, _mailboxType: MailboxTyp private[akka] val owner = new AtomicReference[ActorRef](_actor) - override def register(actorRef: ActorRef) = { + //Relies on an external lock provided by MessageDispatcher.attach + private[akka] override def register(actorRef: ActorRef) = { val actor = owner.get() if ((actor ne null) && actorRef != actor) throw new IllegalArgumentException("Cannot register to anyone but " + actor) owner.compareAndSet(null, actorRef) //Register if unregistered super.register(actorRef) } - - override def unregister(actorRef: ActorRef) = { + //Relies on an external lock provided by MessageDispatcher.detach + private[akka] override def unregister(actorRef: ActorRef) = { super.unregister(actorRef) owner.compareAndSet(actorRef, null) //Unregister (prevent memory leak) } diff --git a/akka-actor/src/main/scala/akka/dispatch/ThreadPoolBuilder.scala b/akka-actor/src/main/scala/akka/dispatch/ThreadPoolBuilder.scala index b52e17d3a2..737b4dda59 100644 --- a/akka-actor/src/main/scala/akka/dispatch/ThreadPoolBuilder.scala +++ b/akka-actor/src/main/scala/akka/dispatch/ThreadPoolBuilder.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ package akka.dispatch diff --git a/akka-actor/src/main/scala/akka/event/EventHandler.scala b/akka-actor/src/main/scala/akka/event/EventHandler.scala index 038cd9f959..134543e284 100644 --- a/akka-actor/src/main/scala/akka/event/EventHandler.scala +++ b/akka-actor/src/main/scala/akka/event/EventHandler.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ package akka.event diff --git a/akka-actor/src/main/scala/akka/package.scala b/akka-actor/src/main/scala/akka/package.scala index 269e3e068e..e96a1eddbf 100644 --- a/akka-actor/src/main/scala/akka/package.scala +++ b/akka-actor/src/main/scala/akka/package.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ import akka.dispatch.{ FutureTimeoutException, Future } diff --git a/akka-actor/src/main/scala/akka/remoteinterface/RemoteEventHandler.scala b/akka-actor/src/main/scala/akka/remoteinterface/RemoteEventHandler.scala index 74200894ba..ee24efe99a 100644 --- a/akka-actor/src/main/scala/akka/remoteinterface/RemoteEventHandler.scala +++ b/akka-actor/src/main/scala/akka/remoteinterface/RemoteEventHandler.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ package akka.remoteinterface diff --git a/akka-actor/src/main/scala/akka/remoteinterface/RemoteInterface.scala b/akka-actor/src/main/scala/akka/remoteinterface/RemoteInterface.scala index b1b1a79ac9..2b18ae2676 100644 --- a/akka-actor/src/main/scala/akka/remoteinterface/RemoteInterface.scala +++ b/akka-actor/src/main/scala/akka/remoteinterface/RemoteInterface.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2010 Scalable Solutions AB + * Copyright (C) 2009-2010 Typesafe Inc. */ package akka.remoteinterface diff --git a/akka-actor/src/main/scala/akka/routing/ConsistentHash.scala b/akka-actor/src/main/scala/akka/routing/ConsistentHash.scala index 9e9a5e41ac..3abadd01d8 100644 --- a/akka-actor/src/main/scala/akka/routing/ConsistentHash.scala +++ b/akka-actor/src/main/scala/akka/routing/ConsistentHash.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ package akka.routing diff --git a/akka-actor/src/main/scala/akka/routing/Listeners.scala b/akka-actor/src/main/scala/akka/routing/Listeners.scala index 448511d8dd..0f2baf85ff 100644 --- a/akka-actor/src/main/scala/akka/routing/Listeners.scala +++ b/akka-actor/src/main/scala/akka/routing/Listeners.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ package akka.routing diff --git a/akka-actor/src/main/scala/akka/routing/Pool.scala b/akka-actor/src/main/scala/akka/routing/Pool.scala index ea280adcae..ef6db353a4 100644 --- a/akka-actor/src/main/scala/akka/routing/Pool.scala +++ b/akka-actor/src/main/scala/akka/routing/Pool.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ package akka.routing diff --git a/akka-actor/src/main/scala/akka/routing/Routing.scala b/akka-actor/src/main/scala/akka/routing/Routing.scala index e0679fc0eb..b285d6783a 100644 --- a/akka-actor/src/main/scala/akka/routing/Routing.scala +++ b/akka-actor/src/main/scala/akka/routing/Routing.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ package akka.routing diff --git a/akka-actor/src/main/scala/akka/serialization/Format.scala b/akka-actor/src/main/scala/akka/serialization/Format.scala index 0e221c9b01..85dc0abec0 100644 --- a/akka-actor/src/main/scala/akka/serialization/Format.scala +++ b/akka-actor/src/main/scala/akka/serialization/Format.scala @@ -1,7 +1,7 @@ package akka.serialization /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ import akka.actor.Actor diff --git a/akka-actor/src/main/scala/akka/serialization/Serialization.scala b/akka-actor/src/main/scala/akka/serialization/Serialization.scala index ae56c8d2b1..49dd527be6 100644 --- a/akka-actor/src/main/scala/akka/serialization/Serialization.scala +++ b/akka-actor/src/main/scala/akka/serialization/Serialization.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ package akka.serialization @@ -9,77 +9,84 @@ import akka.config.Config import akka.config.Config._ import akka.actor.{ ActorRef, Actor } import akka.AkkaException +import akka.util.ReflectiveAccess + + +case class NoSerializerFoundException(m: String) extends AkkaException(m) /** * Serialization module. Contains methods for serialization and deserialization as well as * locating a Serializer for a particular class as defined in the mapping in the 'akka.conf' file. */ object Serialization { - case class NoSerializerFoundException(m: String) extends AkkaException(m) - + //TODO document me def serialize(o: AnyRef): Either[Exception, Array[Byte]] = serializerFor(o.getClass) match { case Left(ex) ⇒ Left(ex) case Right(serializer) ⇒ Right(serializer.toBinary(o)) } - + //TODO document me def deserialize( bytes: Array[Byte], clazz: Class[_], classLoader: Option[ClassLoader]): Either[Exception, AnyRef] = serializerFor(clazz) match { - case Left(ex) ⇒ Left(ex) + case Left(e) ⇒ Left(e) case Right(serializer) ⇒ Right(serializer.fromBinary(bytes, Some(clazz), classLoader)) } - - def serializerFor(clazz: Class[_]): Either[Exception, Serializer] = { - serializerMap.get(clazz.getName) match { - case Some(serializerName: String) ⇒ - getClassFor(serializerName) match { - case Right(serializer) ⇒ Right(serializer.newInstance.asInstanceOf[Serializer]) - case Left(exception) ⇒ Left(exception) - } - case _ ⇒ - defaultSerializer match { - case Some(s: Serializer) ⇒ Right(s) - case None ⇒ Left(NoSerializerFoundException("No default serializer found for " + clazz)) - } + //TODO document me + //TODO memoize the lookups + def serializerFor(clazz: Class[_]): Either[Exception, Serializer] = //TODO fall back on BestMatchClass THEN default + getClassFor(serializerMap.get(clazz.getName).getOrElse(serializers("default"))) match { + case Right(serializer) ⇒ Right(serializer.newInstance.asInstanceOf[Serializer]) + case Left(e) => Left(e) } - } - private def defaultSerializer = serializers.get("default") match { - case Some(ser: String) ⇒ - getClassFor(ser) match { - case Right(serializer) ⇒ Some(serializer.newInstance.asInstanceOf[Serializer]) - case Left(exception) ⇒ None - } - case None ⇒ None - } + /** + * Tries to load the specified Serializer by the FQN + */ + def serializerOf(serializerFQN: String): Either[Exception, Serializer] = + createInstance(serializerFQN, ReflectiveAccess.emptyParams, ReflectiveAccess.emptyArguments) - private def getSerializerInstanceForBestMatchClass(cl: Class[_]) = bindings match { - case Some(mappings) ⇒ mappings find { - case (clazzName, ser) ⇒ + private def serializerForBestMatchClass(cl: Class[_]): Either[Exception, Serializer] = { + if (bindings.isEmpty) + Left(NoSerializerFoundException("No mapping serializer found for " + cl)) + else { + bindings find { + case (clazzName, _) ⇒ getClassFor(clazzName) match { case Right(clazz) ⇒ clazz.isAssignableFrom(cl) case _ ⇒ false } - } map { - case (_, ser) ⇒ - getClassFor(ser) match { - case Right(s) ⇒ Right(s.newInstance.asInstanceOf[Serializer]) - case _ ⇒ Left(new Exception("Error instantiating " + ser)) - } - } getOrElse Left(NoSerializerFoundException("No mapping serializer found for " + cl)) - case None ⇒ Left(NoSerializerFoundException("No mapping serializer found for " + cl)) + } map { + case (_, ser) ⇒ serializerOf(ser) + } getOrElse Left(NoSerializerFoundException("No mapping serializer found for " + cl)) + } } - //TODO: Add type and docs - val serializers = config.getSection("akka.actor.serializers").map(_.map).getOrElse(Map("default" -> "akka.serialization.JavaSerializer")) + /** + * A Map of serializer from alias to implementation (FQN of a class implementing akka.serialization.Serializer) + * By default always contains the following mapping: "default" -> "akka.serialization.JavaSerializer" + * But "default" can be overridden in config + */ + val serializers: Map[String, String] = config.getSection("akka.actor.serializers") map { + _.map.foldLeft(Map("default" -> "akka.serialization.JavaSerializer")) { + case (result, (k: String, v: String)) => result + (k -> v) + case (result, _) => result + } + } getOrElse Map("default" -> "akka.serialization.JavaSerializer") - //TODO: Add type and docs - val bindings = config.getSection("akka.actor.serialization-bindings") - .map(_.map) - .map(m ⇒ Map() ++ m.map { case (k, v: List[String]) ⇒ Map() ++ v.map((_, k)) }.flatten) + /** + * bindings is a Map whose keys = FQN of class that is serializable and values = the alias of the serializer to be used + */ + val bindings: Map[String, String] = config.getSection("akka.actor.serialization-bindings") map { + _.map.foldLeft(Map[String,String]()) { + case (result, (k: String, vs: List[_])) => result ++ (vs collect { case v: String => (v, k) }) //All keys which are lists, take the Strings from them and Map them + case (result, _) => result //For any other values, just skip them, TODO: print out warnings? + } + } getOrElse Map() - //TODO: Add type and docs - val serializerMap = bindings.map(m ⇒ m.map { case (k, v: String) ⇒ (k, serializers(v)) }).getOrElse(Map()) + /** + * serializerMap is a Map whose keys = FQN of class that is serializable and values = the FQN of the serializer to be used for that class + */ + val serializerMap: Map[String, String] = bindings mapValues serializers } diff --git a/akka-actor/src/main/scala/akka/serialization/Serializer.scala b/akka-actor/src/main/scala/akka/serialization/Serializer.scala index 7ac3eea2df..e79b2bcd37 100644 --- a/akka-actor/src/main/scala/akka/serialization/Serializer.scala +++ b/akka-actor/src/main/scala/akka/serialization/Serializer.scala @@ -1,7 +1,7 @@ package akka.serialization /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ import java.io.{ ObjectOutputStream, ByteArrayOutputStream, ObjectInputStream, ByteArrayInputStream } diff --git a/akka-actor/src/main/scala/akka/util/Address.scala b/akka-actor/src/main/scala/akka/util/Address.scala index 23920b725e..c3819a3087 100644 --- a/akka-actor/src/main/scala/akka/util/Address.scala +++ b/akka-actor/src/main/scala/akka/util/Address.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ package akka.util diff --git a/akka-actor/src/main/scala/akka/util/AkkaLoader.scala b/akka-actor/src/main/scala/akka/util/AkkaLoader.scala index cb246f2ecf..7057ecc561 100644 --- a/akka-actor/src/main/scala/akka/util/AkkaLoader.scala +++ b/akka-actor/src/main/scala/akka/util/AkkaLoader.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ package akka.util diff --git a/akka-actor/src/main/scala/akka/util/Bootable.scala b/akka-actor/src/main/scala/akka/util/Bootable.scala index d07643e1ac..87acbe2781 100644 --- a/akka-actor/src/main/scala/akka/util/Bootable.scala +++ b/akka-actor/src/main/scala/akka/util/Bootable.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ package akka.util diff --git a/akka-actor/src/main/scala/akka/util/BoundedBlockingQueue.scala b/akka-actor/src/main/scala/akka/util/BoundedBlockingQueue.scala index 48d25613ad..d198a81984 100644 --- a/akka-actor/src/main/scala/akka/util/BoundedBlockingQueue.scala +++ b/akka-actor/src/main/scala/akka/util/BoundedBlockingQueue.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ package akka.util diff --git a/akka-actor/src/main/scala/akka/util/BoxedType.scala b/akka-actor/src/main/scala/akka/util/BoxedType.scala index 7bcacaa5f9..4051dc6a51 100644 --- a/akka-actor/src/main/scala/akka/util/BoxedType.scala +++ b/akka-actor/src/main/scala/akka/util/BoxedType.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ package akka.util diff --git a/akka-actor/src/main/scala/akka/util/ClassLoaderObjectInputStream.scala b/akka-actor/src/main/scala/akka/util/ClassLoaderObjectInputStream.scala index 1749c118d0..07bf054eaa 100644 --- a/akka-actor/src/main/scala/akka/util/ClassLoaderObjectInputStream.scala +++ b/akka-actor/src/main/scala/akka/util/ClassLoaderObjectInputStream.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ package akka.util diff --git a/akka-actor/src/main/scala/akka/util/Convert.scala b/akka-actor/src/main/scala/akka/util/Convert.scala index d91267bf6d..278b94f422 100644 --- a/akka-actor/src/main/scala/akka/util/Convert.scala +++ b/akka-actor/src/main/scala/akka/util/Convert.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ package akka.util diff --git a/akka-actor/src/main/scala/akka/util/Crypt.scala b/akka-actor/src/main/scala/akka/util/Crypt.scala index bf83a546a7..164a271432 100644 --- a/akka-actor/src/main/scala/akka/util/Crypt.scala +++ b/akka-actor/src/main/scala/akka/util/Crypt.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ package akka.util diff --git a/akka-actor/src/main/scala/akka/util/Duration.scala b/akka-actor/src/main/scala/akka/util/Duration.scala index 6e00ef0c17..112940386b 100644 --- a/akka-actor/src/main/scala/akka/util/Duration.scala +++ b/akka-actor/src/main/scala/akka/util/Duration.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ package akka.util diff --git a/akka-actor/src/main/scala/akka/util/HashCode.scala b/akka-actor/src/main/scala/akka/util/HashCode.scala index 4dc74c932a..d515a57ec5 100644 --- a/akka-actor/src/main/scala/akka/util/HashCode.scala +++ b/akka-actor/src/main/scala/akka/util/HashCode.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ package akka.util diff --git a/akka-actor/src/main/scala/akka/util/Helpers.scala b/akka-actor/src/main/scala/akka/util/Helpers.scala index 6b80dfec5a..53a3e6c1a5 100644 --- a/akka-actor/src/main/scala/akka/util/Helpers.scala +++ b/akka-actor/src/main/scala/akka/util/Helpers.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ package akka.util diff --git a/akka-actor/src/main/scala/akka/util/Index.scala b/akka-actor/src/main/scala/akka/util/Index.scala index d7df32efd6..d87239c88e 100644 --- a/akka-actor/src/main/scala/akka/util/Index.scala +++ b/akka-actor/src/main/scala/akka/util/Index.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ package akka.util diff --git a/akka-actor/src/main/scala/akka/util/JMX.scala b/akka-actor/src/main/scala/akka/util/JMX.scala index d75269c1e0..33b118d547 100644 --- a/akka-actor/src/main/scala/akka/util/JMX.scala +++ b/akka-actor/src/main/scala/akka/util/JMX.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ package akka.util diff --git a/akka-actor/src/main/scala/akka/util/ListenerManagement.scala b/akka-actor/src/main/scala/akka/util/ListenerManagement.scala index e667890914..164157f8e1 100644 --- a/akka-actor/src/main/scala/akka/util/ListenerManagement.scala +++ b/akka-actor/src/main/scala/akka/util/ListenerManagement.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ package akka.util diff --git a/akka-actor/src/main/scala/akka/util/LockUtil.scala b/akka-actor/src/main/scala/akka/util/LockUtil.scala index 5a334e44a9..27cb8807ae 100644 --- a/akka-actor/src/main/scala/akka/util/LockUtil.scala +++ b/akka-actor/src/main/scala/akka/util/LockUtil.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ package akka.util @@ -125,22 +125,49 @@ class Switch(startAsOn: Boolean = false) { } else false } + /** + * Executes the provided action if the lock is on under a lock, so be _very_ careful with longrunning/blocking operations in it + * Only executes the action if the switch is on, and switches it off immediately after obtaining the lock + * Will switch it back on if the provided action throws an exception + */ def switchOff(action: ⇒ Unit): Boolean = transcend(from = true, action) + + /** + * Executes the provided action if the lock is off under a lock, so be _very_ careful with longrunning/blocking operations in it + * Only executes the action if the switch is off, and switches it on immediately after obtaining the lock + * Will switch it back off if the provided action throws an exception + */ def switchOn(action: ⇒ Unit): Boolean = transcend(from = false, action) + /** + * Switches the switch off (if on), uses locking + */ def switchOff: Boolean = synchronized { switch.compareAndSet(true, false) } + + /** + * Switches the switch on (if off), uses locking + */ def switchOn: Boolean = synchronized { switch.compareAndSet(false, true) } + /** + * Executes the provided action and returns its value if the switch is IMMEDIATELY on (i.e. no lock involved) + */ def ifOnYield[T](action: ⇒ T): Option[T] = { if (switch.get) Some(action) else None } + /** + * Executes the provided action and returns its value if the switch is IMMEDIATELY off (i.e. no lock involved) + */ def ifOffYield[T](action: ⇒ T): Option[T] = { if (!switch.get) Some(action) else None } + /** + * Executes the provided action and returns if the action was executed or not, if the switch is IMMEDIATELY on (i.e. no lock involved) + */ def ifOn(action: ⇒ Unit): Boolean = { if (switch.get) { action @@ -148,6 +175,9 @@ class Switch(startAsOn: Boolean = false) { } else false } + /** + * Executes the provided action and returns if the action was executed or not, if the switch is IMMEDIATELY off (i.e. no lock involved) + */ def ifOff(action: ⇒ Unit): Boolean = { if (!switch.get) { action @@ -155,16 +185,28 @@ class Switch(startAsOn: Boolean = false) { } else false } + /** + * Executes the provided action and returns its value if the switch is on, waiting for any pending changes to happen before (locking) + * Be careful of longrunning or blocking within the provided action as it can lead to deadlocks or bad performance + */ def whileOnYield[T](action: ⇒ T): Option[T] = synchronized { if (switch.get) Some(action) else None } + /** + * Executes the provided action and returns its value if the switch is off, waiting for any pending changes to happen before (locking) + * Be careful of longrunning or blocking within the provided action as it can lead to deadlocks or bad performance + */ def whileOffYield[T](action: ⇒ T): Option[T] = synchronized { if (!switch.get) Some(action) else None } + /** + * Executes the provided action and returns if the action was executed or not, if the switch is on, waiting for any pending changes to happen before (locking) + * Be careful of longrunning or blocking within the provided action as it can lead to deadlocks or bad performance + */ def whileOn(action: ⇒ Unit): Boolean = synchronized { if (switch.get) { action @@ -172,6 +214,10 @@ class Switch(startAsOn: Boolean = false) { } else false } + /** + * Executes the provided action and returns if the action was executed or not, if the switch is off, waiting for any pending changes to happen before (locking) + * Be careful of longrunning or blocking within the provided action as it can lead to deadlocks or bad performance + */ def whileOff(action: ⇒ Unit): Boolean = synchronized { if (switch.get) { action @@ -179,10 +225,21 @@ class Switch(startAsOn: Boolean = false) { } else false } - def ifElseYield[T](on: ⇒ T)(off: ⇒ T) = synchronized { + /** + * Executes the provided callbacks depending on if the switch is either on or off waiting for any pending changes to happen before (locking) + * Be careful of longrunning or blocking within the provided action as it can lead to deadlocks or bad performance + */ + def fold[T](on: ⇒ T)(off: ⇒ T) = synchronized { if (switch.get) on else off } + /** + * Returns whether the switch is IMMEDIATELY on (no locking) + */ def isOn = switch.get + + /** + * Returns whether the switch is IMMEDDIATELY off (no locking) + */ def isOff = !isOn } diff --git a/akka-actor/src/main/scala/akka/util/ReflectiveAccess.scala b/akka-actor/src/main/scala/akka/util/ReflectiveAccess.scala index b1bfe83466..3f0f33f01c 100644 --- a/akka-actor/src/main/scala/akka/util/ReflectiveAccess.scala +++ b/akka-actor/src/main/scala/akka/util/ReflectiveAccess.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ package akka.util @@ -23,6 +23,8 @@ import java.net.InetSocketAddress object ReflectiveAccess { val loader = getClass.getClassLoader + val emptyParams: Array[Class[_]] = Array() + val emptyArguments: Array[AnyRef] = Array() /** * Reflective access to the Cluster module. diff --git a/akka-actor/src/main/scala/akka/util/duration/package.scala b/akka-actor/src/main/scala/akka/util/duration/package.scala index 26a24929c9..97e0e82c39 100644 --- a/akka-actor/src/main/scala/akka/util/duration/package.scala +++ b/akka-actor/src/main/scala/akka/util/duration/package.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ package akka.util diff --git a/akka-camel-typed/src/main/java/akka/camel/consume.java b/akka-camel-typed/src/main/java/akka/camel/consume.java index ebcc2efd29..cac399b162 100644 --- a/akka-camel-typed/src/main/java/akka/camel/consume.java +++ b/akka-camel-typed/src/main/java/akka/camel/consume.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2010 Scalable Solutions AB + * Copyright (C) 2009-2010 Typesafe Inc. */ package akka.camel; diff --git a/akka-camel-typed/src/main/scala/akka/camel/TypedCamel.scala b/akka-camel-typed/src/main/scala/akka/camel/TypedCamel.scala index f96bcce264..a3e05c309e 100644 --- a/akka-camel-typed/src/main/scala/akka/camel/TypedCamel.scala +++ b/akka-camel-typed/src/main/scala/akka/camel/TypedCamel.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ package akka.camel diff --git a/akka-camel-typed/src/main/scala/akka/camel/TypedConsumer.scala b/akka-camel-typed/src/main/scala/akka/camel/TypedConsumer.scala index 8df76dadbe..3ed0dff0c4 100644 --- a/akka-camel-typed/src/main/scala/akka/camel/TypedConsumer.scala +++ b/akka-camel-typed/src/main/scala/akka/camel/TypedConsumer.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ package akka.camel diff --git a/akka-camel-typed/src/main/scala/akka/camel/TypedConsumerPublisher.scala b/akka-camel-typed/src/main/scala/akka/camel/TypedConsumerPublisher.scala index fae8426cbe..e3b99e3535 100644 --- a/akka-camel-typed/src/main/scala/akka/camel/TypedConsumerPublisher.scala +++ b/akka-camel-typed/src/main/scala/akka/camel/TypedConsumerPublisher.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ package akka.camel diff --git a/akka-camel-typed/src/main/scala/akka/camel/component/TypedActorComponent.scala b/akka-camel-typed/src/main/scala/akka/camel/component/TypedActorComponent.scala index 36d6c50516..b446a1fab1 100644 --- a/akka-camel-typed/src/main/scala/akka/camel/component/TypedActorComponent.scala +++ b/akka-camel-typed/src/main/scala/akka/camel/component/TypedActorComponent.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2010 Scalable Solutions AB + * Copyright (C) 2009-2010 Typesafe Inc. */ package akka.camel.component diff --git a/akka-camel/src/main/scala/akka/camel/CamelContextLifecycle.scala b/akka-camel/src/main/scala/akka/camel/CamelContextLifecycle.scala index d273c9cb6c..9ff71e0f43 100644 --- a/akka-camel/src/main/scala/akka/camel/CamelContextLifecycle.scala +++ b/akka-camel/src/main/scala/akka/camel/CamelContextLifecycle.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2010 Scalable Solutions AB + * Copyright (C) 2009-2010 Typesafe Inc. */ package akka.camel diff --git a/akka-camel/src/main/scala/akka/camel/CamelService.scala b/akka-camel/src/main/scala/akka/camel/CamelService.scala index 9aa1c77043..0f9cf017c8 100644 --- a/akka-camel/src/main/scala/akka/camel/CamelService.scala +++ b/akka-camel/src/main/scala/akka/camel/CamelService.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2010 Scalable Solutions AB + * Copyright (C) 2009-2010 Typesafe Inc. */ package akka.camel diff --git a/akka-camel/src/main/scala/akka/camel/Consumer.scala b/akka-camel/src/main/scala/akka/camel/Consumer.scala index da45532b82..cfa348e38e 100644 --- a/akka-camel/src/main/scala/akka/camel/Consumer.scala +++ b/akka-camel/src/main/scala/akka/camel/Consumer.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2010 Scalable Solutions AB + * Copyright (C) 2009-2010 Typesafe Inc. */ package akka.camel diff --git a/akka-camel/src/main/scala/akka/camel/ConsumerPublisher.scala b/akka-camel/src/main/scala/akka/camel/ConsumerPublisher.scala index 507124ba2f..22cf5e76a5 100644 --- a/akka-camel/src/main/scala/akka/camel/ConsumerPublisher.scala +++ b/akka-camel/src/main/scala/akka/camel/ConsumerPublisher.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2010 Scalable Solutions AB + * Copyright (C) 2009-2010 Typesafe Inc. */ package akka.camel diff --git a/akka-camel/src/main/scala/akka/camel/Message.scala b/akka-camel/src/main/scala/akka/camel/Message.scala index 5e21df4bff..f15c7041ce 100644 --- a/akka-camel/src/main/scala/akka/camel/Message.scala +++ b/akka-camel/src/main/scala/akka/camel/Message.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2010 Scalable Solutions AB + * Copyright (C) 2009-2010 Typesafe Inc. */ package akka.camel diff --git a/akka-camel/src/main/scala/akka/camel/Producer.scala b/akka-camel/src/main/scala/akka/camel/Producer.scala index 041f3397ff..3281feff83 100644 --- a/akka-camel/src/main/scala/akka/camel/Producer.scala +++ b/akka-camel/src/main/scala/akka/camel/Producer.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2010 Scalable Solutions AB + * Copyright (C) 2009-2010 Typesafe Inc. */ package akka.camel diff --git a/akka-camel/src/main/scala/akka/camel/TypedCamelAccess.scala b/akka-camel/src/main/scala/akka/camel/TypedCamelAccess.scala index 003fe595b2..81b254ecb6 100644 --- a/akka-camel/src/main/scala/akka/camel/TypedCamelAccess.scala +++ b/akka-camel/src/main/scala/akka/camel/TypedCamelAccess.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ package akka.camel diff --git a/akka-camel/src/main/scala/akka/camel/component/ActorComponent.scala b/akka-camel/src/main/scala/akka/camel/component/ActorComponent.scala index 0000a74503..6fdbd37149 100644 --- a/akka-camel/src/main/scala/akka/camel/component/ActorComponent.scala +++ b/akka-camel/src/main/scala/akka/camel/component/ActorComponent.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2010 Scalable Solutions AB + * Copyright (C) 2009-2010 Typesafe Inc. */ package akka.camel.component @@ -15,9 +15,8 @@ import org.apache.camel.impl.{ DefaultProducer, DefaultEndpoint, DefaultComponen import akka.actor._ import akka.camel.{ Ack, Failure, Message } import akka.camel.CamelMessageConversion.toExchangeAdapter -import akka.dispatch.{ Promise, MessageInvocation, MessageDispatcher } - import scala.reflect.BeanProperty +import akka.dispatch.{FutureTimeoutException, Promise, MessageInvocation, MessageDispatcher} /** * @author Martin Krasser @@ -171,11 +170,9 @@ class ActorProducer(val ep: ActorEndpoint) extends DefaultProducer(ep) with Asyn } } - private def sendSync(exchange: Exchange) = { - import akka.camel.Consumer._ - + private def sendSync(exchange: Exchange) = { val actor = target(exchange) - val result: Any = try { actor !! requestFor(exchange) } catch { case e ⇒ Some(Failure(e)) } + val result: Any = try { (actor ? requestFor(exchange)).as[Any] } catch { case e ⇒ Some(Failure(e)) } result match { case Some(Ack) ⇒ { /* no response message to set */ } @@ -184,7 +181,7 @@ class ActorProducer(val ep: ActorEndpoint) extends DefaultProducer(ep) with Asyn case None ⇒ throw new TimeoutException("timeout (%d ms) while waiting response from %s" format (actor.timeout, ep.getEndpointUri)) } - } + } private def sendAsync(exchange: Exchange, sender: Option[ActorRef] = None) = target(exchange).!(requestFor(exchange))(sender) diff --git a/akka-cluster/src/main/java/akka/cluster/zookeeper/ZooKeeperQueue.java b/akka-cluster/src/main/java/akka/cluster/zookeeper/ZooKeeperQueue.java index 00524ed91f..af76896f4b 100644 --- a/akka-cluster/src/main/java/akka/cluster/zookeeper/ZooKeeperQueue.java +++ b/akka-cluster/src/main/java/akka/cluster/zookeeper/ZooKeeperQueue.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ package akka.cluster.zookeeper; diff --git a/akka-cluster/src/main/protocol/ClusterProtocol.proto b/akka-cluster/src/main/protocol/ClusterProtocol.proto index e5d2b5ebf0..476548b4ac 100644 --- a/akka-cluster/src/main/protocol/ClusterProtocol.proto +++ b/akka-cluster/src/main/protocol/ClusterProtocol.proto @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ option java_package = "akka.cluster"; diff --git a/akka-cluster/src/main/protocol/RemoteProtocol.proto b/akka-cluster/src/main/protocol/RemoteProtocol.proto index be1020fe7e..5123dc8592 100644 --- a/akka-cluster/src/main/protocol/RemoteProtocol.proto +++ b/akka-cluster/src/main/protocol/RemoteProtocol.proto @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ option java_package = "akka.remote.protocol"; diff --git a/akka-cluster/src/main/scala/akka/cluster/BookKeeperServer.scala b/akka-cluster/src/main/scala/akka/cluster/BookKeeperServer.scala index 24f9918fd8..99e1f308cd 100644 --- a/akka-cluster/src/main/scala/akka/cluster/BookKeeperServer.scala +++ b/akka-cluster/src/main/scala/akka/cluster/BookKeeperServer.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ package akka.cluster diff --git a/akka-cluster/src/main/scala/akka/cluster/Cluster.scala b/akka-cluster/src/main/scala/akka/cluster/Cluster.scala index 772d614264..9a26ad985b 100644 --- a/akka-cluster/src/main/scala/akka/cluster/Cluster.scala +++ b/akka-cluster/src/main/scala/akka/cluster/Cluster.scala @@ -1,24 +1,22 @@ /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ package akka.cluster import org.apache.zookeeper._ import org.apache.zookeeper.Watcher.Event._ import org.apache.zookeeper.data.Stat -import org.apache.zookeeper.recipes.lock.{ WriteLock, LockListener } +import org.apache.zookeeper.recipes.lock.{WriteLock, LockListener} import org.I0Itec.zkclient._ import org.I0Itec.zkclient.serialize._ import org.I0Itec.zkclient.exception._ -import java.util.{ List ⇒ JList } -import java.util.concurrent.atomic.{ AtomicBoolean, AtomicReference, AtomicInteger } -import java.util.concurrent.{ ConcurrentSkipListSet, CopyOnWriteArrayList, Callable, ConcurrentHashMap } +import java.util.{List ⇒ JList} +import java.util.concurrent.atomic.{AtomicBoolean, AtomicReference} import java.net.InetSocketAddress import javax.management.StandardMBean -import scala.collection.immutable.{ HashMap, HashSet } import scala.collection.mutable.ConcurrentMap import scala.collection.JavaConversions._ @@ -31,39 +29,37 @@ import Status._ import DeploymentConfig._ import akka.event.EventHandler -import akka.dispatch.{ Dispatchers, Future } +import akka.dispatch.{Dispatchers, Future} import akka.remoteinterface._ import akka.routing.RouterType -import akka.config.{ Config, Supervision } +import akka.config.{Config, Supervision} import Supervision._ import Config._ -import akka.serialization.{ Serialization, Serializer, Compression, ActorSerialization } +import akka.serialization.{Serialization, Serializer, ActorSerialization} import ActorSerialization._ -import Compression.LZF +import akka.serialization.Compression.LZF import akka.cluster.zookeeper._ import ChangeListener._ import ClusterProtocol._ import RemoteDaemonMessageType._ -import akka.AkkaException - import com.eaio.uuid.UUID import com.google.protobuf.ByteString +import java.util.concurrent.{CopyOnWriteArrayList, Callable, ConcurrentHashMap} // FIXME add watch for each node that when the entry for the node is removed then the node shuts itself down /** * JMX MBean for the cluster service. * - * FIXME revisit the methods in this MBean interface, they are not up to date with new cluster API - * * @author Jonas Bonér */ trait ClusterNodeMBean { + def start() def stop() @@ -88,7 +84,11 @@ trait ClusterNodeMBean { def getMemberNodes: Array[String] - def getLeader: String + def getNodeAddres(): NodeAddress + + def getLeaderLockName: String + + def isLeader: Boolean def getUuidsForClusteredActors: Array[String] @@ -111,6 +111,32 @@ trait ClusterNodeMBean { def removeConfigElement(key: String) def getConfigElementKeys: Array[String] + + def getMemberShipPathFor(node: String): String + + def getConfigurationPathFor(key: String): String + + def getActorAddresstoNodesPathFor(actorAddress: String): String + + def getActorAddressToNodesPathForWithNodeName(actorAddress: String, nodeName: String): String + + def getNodeToUuidsPathFor(node: String): String + + def getNodeToUuidsPathFor(node: String, uuid: UUID): String + + def getActorAddressRegistryPathFor(actorAddress: String): String + + def getActorAddressRegistrySerializerPathFor(actorAddress: String): String + + def getActorAddressRegistryUuidPathFor(actorAddress: String): String + + def getActorUuidRegistryNodePathFor(uuid: UUID): String + + def getActorUuidRegistryRemoteAddressPathFor(uuid: UUID): String + + def getActorAddressToUuidsPathFor(actorAddress: String): String + + def getActorAddressToUuidsPathForWithNodeName(actorAddress: String, uuid: UUID): String } /** @@ -155,17 +181,17 @@ object Cluster { private def nodename: String = properties.get("akka.cluster.nodename") match { case Some(uberride) ⇒ uberride - case None ⇒ Config.nodename + case None ⇒ Config.nodename } private def hostname: String = properties.get("akka.cluster.hostname") match { case Some(uberride) ⇒ uberride - case None ⇒ Config.hostname + case None ⇒ Config.hostname } private def port: Int = properties.get("akka.cluster.port") match { case Some(uberride) ⇒ uberride.toInt - case None ⇒ Config.remoteServerPort + case None ⇒ Config.remoteServerPort } val defaultZooKeeperSerializer = new SerializableSerializer @@ -303,12 +329,12 @@ object Cluster { * * @author Jonas Bonér */ -class DefaultClusterNode private[akka] ( - val nodeAddress: NodeAddress, - val hostname: String = Config.hostname, - val port: Int = Config.remoteServerPort, - val zkServerAddresses: String, - val serializer: ZkSerializer) extends ErrorHandler with ClusterNode { +class DefaultClusterNode private[akka]( + val nodeAddress: NodeAddress, + val hostname: String = Config.hostname, + val port: Int = Config.remoteServerPort, + val zkServerAddresses: String, + val serializer: ZkSerializer) extends ErrorHandler with ClusterNode { self ⇒ if ((hostname eq null) || hostname == "") throw new NullPointerException("Host name must not be null or empty string") @@ -323,7 +349,7 @@ class DefaultClusterNode private[akka] ( def receive = { case RemoteClientError(cause, client, address) ⇒ client.shutdownClientModule() case RemoteClientDisconnected(client, address) ⇒ client.shutdownClientModule() - case _ ⇒ //ignore other + case _ ⇒ //ignore other } }, "akka.cluster.RemoteClientLifeCycleListener").start() @@ -347,6 +373,8 @@ class DefaultClusterNode private[akka] ( lazy val remoteServerAddress: InetSocketAddress = remoteService.address + val isConnected = new Switch(false) + // static nodes val CLUSTER_PATH = "/" + nodeAddress.clusterName val MEMBERSHIP_PATH = CLUSTER_PATH + "/members" @@ -419,15 +447,37 @@ class DefaultClusterNode private[akka] ( // Node // ======================================= + def isRunning: Boolean = isConnected.isOn + def start(): ClusterNode = { - if (isConnected.compareAndSet(false, true)) { + isConnected.switchOn { initializeNode() } + this } + private[cluster] def initializeNode() { + EventHandler.info(this, + ("\nCreating cluster node with" + + "\n\tcluster name = [%s]" + + "\n\tnode name = [%s]" + + "\n\tport = [%s]" + + "\n\tzookeeper server addresses = [%s]" + + "\n\tserializer = [%s]") + .format(nodeAddress.clusterName, nodeAddress.nodeName, port, zkServerAddresses, serializer)) + EventHandler.info(this, "Starting up remote server [%s]".format(remoteServerAddress.toString)) + createZooKeeperPathStructureIfNeeded() + registerListeners() + joinCluster() + joinLeaderElection() + fetchMembershipNodes() + EventHandler.info(this, "Cluster node [%s] started successfully".format(nodeAddress)) + } + + def shutdown() { - if (isConnected.compareAndSet(true, false)) { + def shutdownNode() { ignore[ZkNoNodeException](zkClient.deleteRecursive(membershipNodePath)) locallyCachedMembershipNodes.clear() @@ -450,6 +500,10 @@ class DefaultClusterNode private[akka] ( disconnect() EventHandler.info(this, "Cluster node shut down [%s]".format(nodeAddress)) } + + isConnected.switchOff { + shutdownNode() + } } def disconnect(): ClusterNode = { @@ -642,12 +696,12 @@ class DefaultClusterNode private[akka] ( * available durable store. */ def store( - actorAddress: String, - actorFactory: () ⇒ ActorRef, - replicationFactor: Int, - replicationScheme: ReplicationScheme, - serializeMailbox: Boolean, - serializer: Serializer): ClusterNode = if (isConnected.get) { + actorAddress: String, + actorFactory: () ⇒ ActorRef, + replicationFactor: Int, + replicationScheme: ReplicationScheme, + serializeMailbox: Boolean, + serializer: Serializer): ClusterNode = if (isConnected.isOn) { val serializerClassName = serializer.getClass.getName @@ -678,7 +732,7 @@ class DefaultClusterNode private[akka] ( } } }) match { - case Left(path) ⇒ path + case Left(path) ⇒ path case Right(exception) ⇒ actorAddressRegistryPath } } @@ -723,7 +777,7 @@ class DefaultClusterNode private[akka] ( /** * Is the actor with uuid clustered or not? */ - def isClustered(actorAddress: String): Boolean = if (isConnected.get) { + def isClustered(actorAddress: String): Boolean = if (isConnected.isOn) { zkClient.exists(actorAddressRegistryPathFor(actorAddress)) } else false @@ -735,7 +789,7 @@ class DefaultClusterNode private[akka] ( /** * Is the actor with uuid in use or not? */ - def isInUseOnNode(actorAddress: String, node: NodeAddress): Boolean = if (isConnected.get) { + def isInUseOnNode(actorAddress: String, node: NodeAddress): Boolean = if (isConnected.isOn) { zkClient.exists(actorAddressToNodesPathFor(actorAddress, node.nodeName)) } else false @@ -749,7 +803,7 @@ class DefaultClusterNode private[akka] ( * Checks out an actor for use on this node, e.g. checked out as a 'LocalActorRef' but it makes it available * for remote access through lookup by its UUID. */ - def use[T <: Actor](actorAddress: String, serializer: Serializer): Option[LocalActorRef] = if (isConnected.get) { + def use[T <: Actor](actorAddress: String, serializer: Serializer): Option[LocalActorRef] = if (isConnected.isOn) { val nodeName = nodeAddress.nodeName ignore[ZkNodeExistsException](zkClient.createEphemeral(actorAddressToNodesPathFor(actorAddress, nodeName))) @@ -765,7 +819,7 @@ class DefaultClusterNode private[akka] ( val actorFactory = Serialization.deserialize(actorFactoryBytes, classOf[() ⇒ LocalActorRef], None) match { - case Left(error) ⇒ throw error + case Left(error) ⇒ throw error case Right(instance) ⇒ instance.asInstanceOf[() ⇒ LocalActorRef] } @@ -834,7 +888,7 @@ class DefaultClusterNode private[akka] ( EventHandler.debug(this, "Sending command to nodes [%s] for checking out actor [%s]".format(nodes.mkString(", "), actorAddress)) - if (isConnected.get) { + if (isConnected.isOn) { val builder = RemoteDaemonMessageProtocol.newBuilder .setMessageType(USE) @@ -845,11 +899,12 @@ class DefaultClusterNode private[akka] ( val command = builder.build - nodes foreach { node ⇒ - nodeConnections.get(node) foreach { - case (_, connection) ⇒ - sendCommandToNode(connection, command, async = false) - } + nodes foreach { + node ⇒ + nodeConnections.get(node) foreach { + case (_, connection) ⇒ + sendCommandToNode(connection, command, async = false) + } } } } @@ -882,15 +937,16 @@ class DefaultClusterNode private[akka] ( // FIXME 'Cluster.release' needs to notify all existing ClusterActorRef's that are using the instance that it is no longer available. Then what to do? Should we even remove this method? - if (isConnected.get) { + if (isConnected.isOn) { ignore[ZkNoNodeException](zkClient.delete(actorAddressToNodesPathFor(actorAddress, nodeAddress.nodeName))) - uuidsForActorAddress(actorAddress) foreach { uuid ⇒ - EventHandler.debug(this, - "Releasing actor [%s] with UUID [%s] after usage".format(actorAddress, uuid)) + uuidsForActorAddress(actorAddress) foreach { + uuid ⇒ + EventHandler.debug(this, + "Releasing actor [%s] with UUID [%s] after usage".format(actorAddress, uuid)) - ignore[ZkNoNodeException](zkClient.deleteRecursive(nodeToUuidsPathFor(nodeAddress.nodeName, uuid))) - ignore[ZkNoNodeException](zkClient.delete(actorUuidRegistryRemoteAddressPathFor(uuid))) + ignore[ZkNoNodeException](zkClient.deleteRecursive(nodeToUuidsPathFor(nodeAddress.nodeName, uuid))) + ignore[ZkNoNodeException](zkClient.delete(actorUuidRegistryRemoteAddressPathFor(uuid))) } } } @@ -899,7 +955,7 @@ class DefaultClusterNode private[akka] ( * Releases (checking in) all actors with a specific address on all nodes in the cluster where the actor is in 'use'. */ private[akka] def releaseActorOnAllNodes(actorAddress: String) { - if (isConnected.get) { + if (isConnected.isOn) { EventHandler.debug(this, "Releasing (checking in) all actors with address [%s] on all nodes in cluster".format(actorAddress)) @@ -908,10 +964,11 @@ class DefaultClusterNode private[akka] ( .setActorAddress(actorAddress) .build - nodesForActorsInUseWithAddress(actorAddress) foreach { node ⇒ - nodeConnections.get(node) foreach { - case (_, connection) ⇒ sendCommandToNode(connection, command, async = true) - } + nodesForActorsInUseWithAddress(actorAddress) foreach { + node ⇒ + nodeConnections.get(node) foreach { + case (_, connection) ⇒ sendCommandToNode(connection, command, async = true) + } } } } @@ -919,14 +976,16 @@ class DefaultClusterNode private[akka] ( /** * Creates an ActorRef with a Router to a set of clustered actors. */ - def ref(actorAddress: String, router: RouterType): ActorRef = if (isConnected.get) { + def ref(actorAddress: String, router: RouterType): ActorRef = if (isConnected.isOn) { val addresses = addressesForActor(actorAddress) EventHandler.debug(this, "Checking out cluster actor ref with address [%s] and router [%s] on [%s] connected to [\n\t%s]" .format(actorAddress, router, remoteServerAddress, addresses.map(_._2).mkString("\n\t"))) val actorRef = Router newRouter (router, addresses, actorAddress, Actor.TIMEOUT) - addresses foreach { case (_, address) ⇒ clusterActorRefs.put(address, actorRef) } + addresses foreach { + case (_, address) ⇒ clusterActorRefs.put(address, actorRef) + } actorRef.start() } else throw new ClusterException("Not connected to cluster") @@ -944,7 +1003,7 @@ class DefaultClusterNode private[akka] ( /** * Returns the UUIDs of all actors registered in this cluster. */ - private[akka] def uuidsForClusteredActors: Array[UUID] = if (isConnected.get) { + private[akka] def uuidsForClusteredActors: Array[UUID] = if (isConnected.isOn) { zkClient.getChildren(ACTOR_UUID_REGISTRY_PATH).toList.map(new UUID(_)).toArray.asInstanceOf[Array[UUID]] } else Array.empty[UUID] @@ -956,7 +1015,7 @@ class DefaultClusterNode private[akka] ( /** * Returns the actor id for the actor with a specific UUID. */ - private[akka] def actorAddressForUuid(uuid: UUID): Option[String] = if (isConnected.get) { + private[akka] def actorAddressForUuid(uuid: UUID): Option[String] = if (isConnected.isOn) { try { Some(zkClient.readData(actorUuidRegistryAddressPathFor(uuid)).asInstanceOf[String]) } catch { @@ -973,7 +1032,7 @@ class DefaultClusterNode private[akka] ( /** * Returns the actor UUIDs for actor ID. */ - private[akka] def uuidsForActorAddress(actorAddress: String): Array[UUID] = if (isConnected.get) { + private[akka] def uuidsForActorAddress(actorAddress: String): Array[UUID] = if (isConnected.isOn) { try { zkClient.getChildren(actorAddressToUuidsPathFor(actorAddress)).toList.toArray map { case c: CharSequence ⇒ new UUID(c) @@ -986,7 +1045,7 @@ class DefaultClusterNode private[akka] ( /** * Returns the node names of all actors in use with UUID. */ - private[akka] def nodesForActorsInUseWithAddress(actorAddress: String): Array[String] = if (isConnected.get) { + private[akka] def nodesForActorsInUseWithAddress(actorAddress: String): Array[String] = if (isConnected.isOn) { try { zkClient.getChildren(actorAddressToNodesPathFor(actorAddress)).toList.toArray.asInstanceOf[Array[String]] } catch { @@ -997,7 +1056,7 @@ class DefaultClusterNode private[akka] ( /** * Returns the UUIDs of all actors in use registered on a specific node. */ - private[akka] def uuidsForActorsInUseOnNode(nodeName: String): Array[UUID] = if (isConnected.get) { + private[akka] def uuidsForActorsInUseOnNode(nodeName: String): Array[UUID] = if (isConnected.isOn) { try { zkClient.getChildren(nodeToUuidsPathFor(nodeName)).toList.toArray map { case c: CharSequence ⇒ new UUID(c) @@ -1010,7 +1069,7 @@ class DefaultClusterNode private[akka] ( /** * Returns the addresses of all actors in use registered on a specific node. */ - def addressesForActorsInUseOnNode(nodeName: String): Array[String] = if (isConnected.get) { + def addressesForActorsInUseOnNode(nodeName: String): Array[String] = if (isConnected.isOn) { val uuids = try { zkClient.getChildren(nodeToUuidsPathFor(nodeName)).toList.toArray map { @@ -1033,7 +1092,8 @@ class DefaultClusterNode private[akka] ( case e: ZkNoNodeException ⇒ throw new IllegalStateException("No serializer found for actor with address [%s]".format(actorAddress)) } - ReflectiveAccess.getClassFor(serializerClassName) match { // FIXME need to pass in a user provide class loader? Now using default in ReflectiveAccess. + ReflectiveAccess.getClassFor(serializerClassName) match { + // FIXME need to pass in a user provide class loader? Now using default in ReflectiveAccess. case Right(clazz) ⇒ clazz.newInstance.asInstanceOf[Serializer] case Left(error) ⇒ EventHandler.error(error, this, "Could not load serializer class [%s] due to: %s".format(serializerClassName, error.toString)) @@ -1157,7 +1217,7 @@ class DefaultClusterNode private[akka] ( } } }) match { - case Left(_) ⇒ /* do nothing */ + case Left(_) ⇒ /* do nothing */ case Right(exception) ⇒ throw exception } } @@ -1216,44 +1276,35 @@ class DefaultClusterNode private[akka] ( } private[cluster] def membershipPathFor(node: String): String = "%s/%s".format(MEMBERSHIP_PATH, node) + private[cluster] def configurationPathFor(key: String): String = "%s/%s".format(CONFIGURATION_PATH, key) private[cluster] def actorAddressToNodesPathFor(actorAddress: String): String = "%s/%s".format(ACTOR_ADDRESS_NODES_TO_PATH, actorAddress) + private[cluster] def actorAddressToNodesPathFor(actorAddress: String, nodeName: String): String = "%s/%s".format(actorAddressToNodesPathFor(actorAddress), nodeName) private[cluster] def nodeToUuidsPathFor(node: String): String = "%s/%s".format(NODE_TO_ACTOR_UUIDS_PATH, node) + private[cluster] def nodeToUuidsPathFor(node: String, uuid: UUID): String = "%s/%s/%s".format(NODE_TO_ACTOR_UUIDS_PATH, node, uuid) private[cluster] def actorAddressRegistryPathFor(actorAddress: String): String = "%s/%s".format(ACTOR_ADDRESS_REGISTRY_PATH, actorAddress) + private[cluster] def actorAddressRegistrySerializerPathFor(actorAddress: String): String = "%s/%s".format(actorAddressRegistryPathFor(actorAddress), "serializer") + private[cluster] def actorAddressRegistryUuidPathFor(actorAddress: String): String = "%s/%s".format(actorAddressRegistryPathFor(actorAddress), "uuid") private[cluster] def actorUuidRegistryPathFor(uuid: UUID): String = "%s/%s".format(ACTOR_UUID_REGISTRY_PATH, uuid) + private[cluster] def actorUuidRegistryNodePathFor(uuid: UUID): String = "%s/%s".format(actorUuidRegistryPathFor(uuid), "node") + private[cluster] def actorUuidRegistryAddressPathFor(uuid: UUID): String = "%s/%s".format(actorUuidRegistryPathFor(uuid), "address") private[cluster] def actorUuidRegistryRemoteAddressPathFor(uuid: UUID): String = "%s/%s".format(actorUuidRegistryPathFor(uuid), "remote-address") private[cluster] def actorAddressToUuidsPathFor(actorAddress: String): String = "%s/%s".format(ACTOR_ADDRESS_TO_UUIDS_PATH, actorAddress.replace('.', '_')) + private[cluster] def actorAddressToUuidsPathFor(actorAddress: String, uuid: UUID): String = "%s/%s".format(actorAddressToUuidsPathFor(actorAddress), uuid) - private[cluster] def initializeNode() { - EventHandler.info(this, - ("\nCreating cluster node with" + - "\n\tcluster name = [%s]" + - "\n\tnode name = [%s]" + - "\n\tport = [%s]" + - "\n\tzookeeper server addresses = [%s]" + - "\n\tserializer = [%s]") - .format(nodeAddress.clusterName, nodeAddress.nodeName, port, zkServerAddresses, serializer)) - EventHandler.info(this, "Starting up remote server [%s]".format(remoteServerAddress.toString)) - createZooKeeperPathStructureIfNeeded() - registerListeners() - joinCluster() - joinLeaderElection() - fetchMembershipNodes() - EventHandler.info(this, "Cluster node [%s] started successfully".format(nodeAddress)) - } /** * Returns a random set with node names of size 'replicationFactor'. @@ -1269,7 +1320,8 @@ class DefaultClusterNode private[akka] ( "] is greater than the number of available nodeNames [" + nrOfClusterNodes + "]") val preferredNodes = - if (actorAddress.isDefined) { // use 'preferred-nodes' in deployment config for the actor + if (actorAddress.isDefined) { + // use 'preferred-nodes' in deployment config for the actor Deployer.deploymentFor(actorAddress.get) match { case Deploy(_, _, Clustered(nodes, _, _)) ⇒ nodes map (node ⇒ DeploymentConfig.nodeNameFor(node)) take replicationFactor @@ -1324,13 +1376,16 @@ class DefaultClusterNode private[akka] ( * @returns a Map with the remote socket addresses to of disconnected node connections */ private[cluster] def connectToAllNewlyArrivedMembershipNodesInCluster( - newlyConnectedMembershipNodes: Traversable[String], - newlyDisconnectedMembershipNodes: Traversable[String]): Map[String, InetSocketAddress] = { + newlyConnectedMembershipNodes: Traversable[String], + newlyDisconnectedMembershipNodes: Traversable[String]): Map[String, InetSocketAddress] = { // cache the disconnected connections in a map, needed for fail-over of these connections later var disconnectedConnections = Map.empty[String, InetSocketAddress] - newlyDisconnectedMembershipNodes foreach { node ⇒ - disconnectedConnections += (node -> (nodeConnections(node) match { case (address, _) ⇒ address })) + newlyDisconnectedMembershipNodes foreach { + node ⇒ + disconnectedConnections += (node -> (nodeConnections(node) match { + case (address, _) ⇒ address + })) } if (connectToAllNewlyArrivedMembershipNodesInClusterLock.compareAndSet(false, true)) { @@ -1339,17 +1394,20 @@ class DefaultClusterNode private[akka] ( newlyDisconnectedMembershipNodes foreach (nodeConnections.remove(_)) // add connections newly arrived nodes - newlyConnectedMembershipNodes foreach { node ⇒ - if (!nodeConnections.contains(node)) { // only connect to each replica once + newlyConnectedMembershipNodes foreach { + node ⇒ + if (!nodeConnections.contains(node)) { + // only connect to each replica once - remoteSocketAddressForNode(node) foreach { address ⇒ - EventHandler.debug(this, - "Setting up connection to node with nodename [%s] and address [%s]".format(node, address)) + remoteSocketAddressForNode(node) foreach { + address ⇒ + EventHandler.debug(this, + "Setting up connection to node with nodename [%s] and address [%s]".format(node, address)) - val clusterDaemon = Actor.remote.actorFor(RemoteClusterDaemon.Address, address.getHostName, address.getPort).start() - nodeConnections.put(node, (address, clusterDaemon)) + val clusterDaemon = Actor.remote.actorFor(RemoteClusterDaemon.Address, address.getHostName, address.getPort).start() + nodeConnections.put(node, (address, clusterDaemon)) + } } - } } } finally { connectToAllNewlyArrivedMembershipNodesInClusterLock.set(false) @@ -1396,84 +1454,87 @@ class DefaultClusterNode private[akka] ( } private[cluster] def migrateActorsOnFailedNodes( - failedNodes: List[String], - currentClusterNodes: List[String], - oldClusterNodes: List[String], - disconnectedConnections: Map[String, InetSocketAddress]) { + failedNodes: List[String], + currentClusterNodes: List[String], + oldClusterNodes: List[String], + disconnectedConnections: Map[String, InetSocketAddress]) { - failedNodes.foreach { failedNodeName ⇒ + failedNodes.foreach { + failedNodeName ⇒ - val failedNodeAddress = NodeAddress(nodeAddress.clusterName, failedNodeName) + val failedNodeAddress = NodeAddress(nodeAddress.clusterName, failedNodeName) - val myIndex = oldClusterNodes.indexWhere(_.endsWith(nodeAddress.nodeName)) - val failedNodeIndex = oldClusterNodes.indexWhere(_ == failedNodeName) + val myIndex = oldClusterNodes.indexWhere(_.endsWith(nodeAddress.nodeName)) + val failedNodeIndex = oldClusterNodes.indexWhere(_ == failedNodeName) - // Migrate to the successor of the failed node (using a sorted circular list of the node names) - if ((failedNodeIndex == 0 && myIndex == oldClusterNodes.size - 1) || // No leftmost successor exists, check the tail - (failedNodeIndex == myIndex + 1)) { // Am I the leftmost successor? + // Migrate to the successor of the failed node (using a sorted circular list of the node names) + if ((failedNodeIndex == 0 && myIndex == oldClusterNodes.size - 1) || // No leftmost successor exists, check the tail + (failedNodeIndex == myIndex + 1)) { + // Am I the leftmost successor? - // Takes the lead of migrating the actors. Not all to this node. - // All to this node except if the actor already resides here, then pick another node it is not already on. + // Takes the lead of migrating the actors. Not all to this node. + // All to this node except if the actor already resides here, then pick another node it is not already on. - // Yes I am the node to migrate the actor to (can only be one in the cluster) - val actorUuidsForFailedNode = zkClient.getChildren(nodeToUuidsPathFor(failedNodeName)).toList + // Yes I am the node to migrate the actor to (can only be one in the cluster) + val actorUuidsForFailedNode = zkClient.getChildren(nodeToUuidsPathFor(failedNodeName)).toList - actorUuidsForFailedNode.foreach { uuidAsString ⇒ - EventHandler.debug(this, - "Cluster node [%s] has failed, migrating actor with UUID [%s] to [%s]" - .format(failedNodeName, uuidAsString, nodeAddress.nodeName)) + actorUuidsForFailedNode.foreach { + uuidAsString ⇒ + EventHandler.debug(this, + "Cluster node [%s] has failed, migrating actor with UUID [%s] to [%s]" + .format(failedNodeName, uuidAsString, nodeAddress.nodeName)) - val uuid = uuidFrom(uuidAsString) - val actorAddress = actorAddressForUuid(uuid).getOrElse( - throw new IllegalStateException("No actor address found for UUID [" + uuidAsString + "]")) + val uuid = uuidFrom(uuidAsString) + val actorAddress = actorAddressForUuid(uuid).getOrElse( + throw new IllegalStateException("No actor address found for UUID [" + uuidAsString + "]")) - val migrateToNodeAddress = - if (isInUseOnNode(actorAddress)) { - // already in use on this node, pick another node to instantiate the actor on - val replicaNodesForActor = nodesForActorsInUseWithAddress(actorAddress) - val nodesAvailableForMigration = (currentClusterNodes.toSet diff failedNodes.toSet) diff replicaNodesForActor.toSet + val migrateToNodeAddress = + if (isInUseOnNode(actorAddress)) { + // already in use on this node, pick another node to instantiate the actor on + val replicaNodesForActor = nodesForActorsInUseWithAddress(actorAddress) + val nodesAvailableForMigration = (currentClusterNodes.toSet diff failedNodes.toSet) diff replicaNodesForActor.toSet - if (nodesAvailableForMigration.isEmpty) throw new ClusterException( - "Can not migrate actor to new node since there are not any available nodes left. " + - "(However, the actor already has >1 replica in cluster, so we are ok)") + if (nodesAvailableForMigration.isEmpty) throw new ClusterException( + "Can not migrate actor to new node since there are not any available nodes left. " + + "(However, the actor already has >1 replica in cluster, so we are ok)") - NodeAddress(nodeAddress.clusterName, nodesAvailableForMigration.head) - } else { - // actor is not in use on this node, migrate it here - nodeAddress - } + NodeAddress(nodeAddress.clusterName, nodesAvailableForMigration.head) + } else { + // actor is not in use on this node, migrate it here + nodeAddress + } - // if actor is replicated => pass along the UUID for the actor to replicate from (replay transaction log etc.) - val replicateFromUuid = - if (isReplicated(actorAddress)) Some(uuid) - else None + // if actor is replicated => pass along the UUID for the actor to replicate from (replay transaction log etc.) + val replicateFromUuid = + if (isReplicated(actorAddress)) Some(uuid) + else None - migrateWithoutCheckingThatActorResidesOnItsHomeNode( - failedNodeAddress, - migrateToNodeAddress, - actorAddress, - replicateFromUuid) + migrateWithoutCheckingThatActorResidesOnItsHomeNode( + failedNodeAddress, + migrateToNodeAddress, + actorAddress, + replicateFromUuid) + } + + // notify all available nodes that they should fail-over all connections from 'from' to 'to' + val from = disconnectedConnections(failedNodeName) + val to = remoteServerAddress + + Serialization.serialize((from, to)) match { + case Left(error) ⇒ throw error + case Right(bytes) ⇒ + + val command = RemoteDaemonMessageProtocol.newBuilder + .setMessageType(FAIL_OVER_CONNECTIONS) + .setPayload(ByteString.copyFrom(bytes)) + .build + + // FIXME now we are broadcasting to ALL nodes in the cluster even though a fraction might have a reference to the actors - should that be fixed? + nodeConnections.values foreach { + case (_, connection) ⇒ sendCommandToNode(connection, command, async = true) + } + } } - - // notify all available nodes that they should fail-over all connections from 'from' to 'to' - val from = disconnectedConnections(failedNodeName) - val to = remoteServerAddress - - Serialization.serialize((from, to)) match { - case Left(error) ⇒ throw error - case Right(bytes) ⇒ - - val command = RemoteDaemonMessageProtocol.newBuilder - .setMessageType(FAIL_OVER_CONNECTIONS) - .setPayload(ByteString.copyFrom(bytes)) - .build - - // FIXME now we are broadcasting to ALL nodes in the cluster even though a fraction might have a reference to the actors - should that be fixed? - nodeConnections.values foreach { - case (_, connection) ⇒ sendCommandToNode(connection, command, async = true) - } - } - } } } @@ -1481,7 +1542,7 @@ class DefaultClusterNode private[akka] ( * Used when the ephemeral "home" node is already gone, so we can't check if it is available. */ private def migrateWithoutCheckingThatActorResidesOnItsHomeNode( - from: NodeAddress, to: NodeAddress, actorAddress: String, replicateFromUuid: Option[UUID]) { + from: NodeAddress, to: NodeAddress, actorAddress: String, replicateFromUuid: Option[UUID]) { EventHandler.debug(this, "Migrating actor [%s] from node [%s] to node [%s]".format(actorAddress, from, to)) if (!isInUseOnNode(actorAddress, to)) { @@ -1507,16 +1568,17 @@ class DefaultClusterNode private[akka] ( EventHandler.info(this, "Created node [%s]".format(CLUSTER_PATH)) } - basePaths.foreach { path ⇒ - try { - ignore[ZkNodeExistsException](zkClient.create(path, null, CreateMode.PERSISTENT)) - EventHandler.debug(this, "Created node [%s]".format(path)) - } catch { - case e ⇒ - val error = new ClusterException(e.toString) - EventHandler.error(error, this) - throw error - } + basePaths.foreach { + path ⇒ + try { + ignore[ZkNodeExistsException](zkClient.create(path, null, CreateMode.PERSISTENT)) + EventHandler.debug(this, "Created node [%s]".format(path)) + } catch { + case e ⇒ + val error = new ClusterException(e.toString) + EventHandler.error(error, this) + throw error + } } } @@ -1542,19 +1604,19 @@ class DefaultClusterNode private[akka] ( private def createMBean = { val clusterMBean = new StandardMBean(classOf[ClusterNodeMBean]) with ClusterNodeMBean { - import Cluster._ + override def start() = self.start() - override def start(): Unit = self.start() - - override def stop(): Unit = self.shutdown() + override def stop() = self.shutdown() override def disconnect() = self.disconnect() - override def reconnect(): Unit = self.reconnect() + override def reconnect() = self.reconnect() - override def resign(): Unit = self.resign() + override def resign() = self.resign() - override def isConnected = self.isConnected.get + override def isConnected = self.isConnected.isOn + + override def getNodeAddres = self.nodeAddress override def getRemoteServerHostname = self.hostname @@ -1568,7 +1630,9 @@ class DefaultClusterNode private[akka] ( override def getMemberNodes = self.locallyCachedMembershipNodes.iterator.map(_.toString).toArray - override def getLeader = self.leader.toString + override def getLeaderLockName = self.leader.toString + + override def isLeader = self.isLeader override def getUuidsForActorsInUse = self.uuidsForActorsInUse.map(_.toString).toArray @@ -1591,6 +1655,32 @@ class DefaultClusterNode private[akka] ( override def removeConfigElement(key: String): Unit = self.removeConfigElement(key) override def getConfigElementKeys = self.getConfigElementKeys.toArray + + override def getMemberShipPathFor(node: String) = self.membershipPathFor(node) + + override def getConfigurationPathFor(key: String) = self.configurationPathFor(key) + + override def getActorAddresstoNodesPathFor(actorAddress: String) = self.actorAddressToNodesPathFor(actorAddress) + + override def getActorAddressToNodesPathForWithNodeName(actorAddress: String, nodeName: String) = self.actorAddressToNodesPathFor(actorAddress, nodeName) + + override def getNodeToUuidsPathFor(node: String) = self.nodeToUuidsPathFor(node) + + override def getNodeToUuidsPathFor(node: String, uuid: UUID) = self.nodeToUuidsPathFor(node, uuid) + + override def getActorAddressRegistryPathFor(actorAddress: String) = self.actorAddressRegistryPathFor(actorAddress) + + override def getActorAddressRegistrySerializerPathFor(actorAddress: String) = self.actorAddressRegistrySerializerPathFor(actorAddress) + + override def getActorAddressRegistryUuidPathFor(actorAddress: String) = self.actorAddressRegistryUuidPathFor(actorAddress) + + override def getActorUuidRegistryNodePathFor(uuid: UUID) = self.actorUuidRegistryNodePathFor(uuid) + + override def getActorUuidRegistryRemoteAddressPathFor(uuid: UUID) = self.actorUuidRegistryNodePathFor(uuid) + + override def getActorAddressToUuidsPathFor(actorAddress: String) = self.actorAddressToUuidsPathFor(actorAddress) + + override def getActorAddressToUuidsPathForWithNodeName(actorAddress: String, uuid: UUID) = self.actorAddressToUuidsPathFor(actorAddress, uuid) } JMX.register(clusterJmxObjectName, clusterMBean) @@ -1716,81 +1806,85 @@ class RemoteClusterDaemon(cluster: ClusterNode) extends Actor { try { if (message.hasActorAddress) { val actorAddress = message.getActorAddress - cluster.serializerForActor(actorAddress) foreach { serializer ⇒ - cluster.use(actorAddress, serializer) foreach { newActorRef ⇒ - cluster.remoteService.register(actorAddress, newActorRef) + cluster.serializerForActor(actorAddress) foreach { + serializer ⇒ + cluster.use(actorAddress, serializer) foreach { + newActorRef ⇒ + cluster.remoteService.register(actorAddress, newActorRef) - if (message.hasReplicateActorFromUuid) { - // replication is used - fetch the messages and replay them - import akka.remote.protocol.RemoteProtocol._ - import akka.remote.MessageSerializer + if (message.hasReplicateActorFromUuid) { + // replication is used - fetch the messages and replay them + import akka.remote.protocol.RemoteProtocol._ + import akka.remote.MessageSerializer - val replicateFromUuid = uuidProtocolToUuid(message.getReplicateActorFromUuid) - val deployment = Deployer.deploymentFor(actorAddress) - val replicationScheme = DeploymentConfig.replicationSchemeFor(deployment).getOrElse( - throw new IllegalStateException( - "Actor [" + actorAddress + "] should have been configured as a replicated actor but could not find its ReplicationScheme")) - val isWriteBehind = DeploymentConfig.isWriteBehindReplication(replicationScheme) + val replicateFromUuid = uuidProtocolToUuid(message.getReplicateActorFromUuid) + val deployment = Deployer.deploymentFor(actorAddress) + val replicationScheme = DeploymentConfig.replicationSchemeFor(deployment).getOrElse( + throw new IllegalStateException( + "Actor [" + actorAddress + "] should have been configured as a replicated actor but could not find its ReplicationScheme")) + val isWriteBehind = DeploymentConfig.isWriteBehindReplication(replicationScheme) - try { - // get the transaction log for the actor UUID - val txLog = TransactionLog.logFor(replicateFromUuid.toString, isWriteBehind, replicationScheme) + try { + // get the transaction log for the actor UUID + val txLog = TransactionLog.logFor(replicateFromUuid.toString, isWriteBehind, replicationScheme) - // get the latest snapshot (Option[Array[Byte]]) and all the subsequent messages (Array[Byte]) - val (snapshotAsBytes, entriesAsBytes) = txLog.latestSnapshotAndSubsequentEntries + // get the latest snapshot (Option[Array[Byte]]) and all the subsequent messages (Array[Byte]) + val (snapshotAsBytes, entriesAsBytes) = txLog.latestSnapshotAndSubsequentEntries - // deserialize and restore actor snapshot - val actorRefToUseForReplay = - snapshotAsBytes match { + // deserialize and restore actor snapshot + val actorRefToUseForReplay = + snapshotAsBytes match { - // we have a new actor ref - the snapshot - case Some(bytes) ⇒ - // stop the new actor ref and use the snapshot instead - cluster.remoteService.unregister(actorAddress) + // we have a new actor ref - the snapshot + case Some(bytes) ⇒ + // stop the new actor ref and use the snapshot instead + cluster.remoteService.unregister(actorAddress) - // deserialize the snapshot actor ref and register it as remote actor - val uncompressedBytes = - if (Cluster.shouldCompressData) LZF.uncompress(bytes) - else bytes + // deserialize the snapshot actor ref and register it as remote actor + val uncompressedBytes = + if (Cluster.shouldCompressData) LZF.uncompress(bytes) + else bytes - val snapshotActorRef = fromBinary(uncompressedBytes, newActorRef.uuid).start() - cluster.remoteService.register(actorAddress, snapshotActorRef) + val snapshotActorRef = fromBinary(uncompressedBytes, newActorRef.uuid).start() + cluster.remoteService.register(actorAddress, snapshotActorRef) - // FIXME we should call 'stop()' here (to GC the actor), but can't since that will currently shut down the TransactionLog for this UUID - since both this actor and the new snapshotActorRef have the same UUID (which they should) - //newActorRef.stop() + // FIXME we should call 'stop()' here (to GC the actor), but can't since that will currently shut down the TransactionLog for this UUID - since both this actor and the new snapshotActorRef have the same UUID (which they should) + //newActorRef.stop() - snapshotActorRef + snapshotActorRef - // we have no snapshot - use the new actor ref - case None ⇒ - newActorRef + // we have no snapshot - use the new actor ref + case None ⇒ + newActorRef + } + + // deserialize the messages + val messages: Vector[AnyRef] = entriesAsBytes map { + bytes ⇒ + val messageBytes = + if (Cluster.shouldCompressData) LZF.uncompress(bytes) + else bytes + MessageSerializer.deserialize(MessageProtocol.parseFrom(messageBytes), None) + } + + EventHandler.info(this, "Replaying [%s] messages to actor [%s]".format(messages.size, actorAddress)) + + // replay all messages + messages foreach { + message ⇒ + EventHandler.debug(this, "Replaying message [%s] to actor [%s]".format(message, actorAddress)) + + // FIXME how to handle '?' messages? We can *not* replay them with the correct semantics. Should we: 1. Ignore/drop them and log warning? 2. Throw exception when about to log them? 3. Other? + actorRefToUseForReplay ! message + } + + } catch { + case e: Throwable ⇒ + EventHandler.error(e, this, e.toString) + throw e } - - // deserialize the messages - val messages: Vector[AnyRef] = entriesAsBytes map { bytes ⇒ - val messageBytes = - if (Cluster.shouldCompressData) LZF.uncompress(bytes) - else bytes - MessageSerializer.deserialize(MessageProtocol.parseFrom(messageBytes), None) } - - EventHandler.info(this, "Replaying [%s] messages to actor [%s]".format(messages.size, actorAddress)) - - // replay all messages - messages foreach { message ⇒ - EventHandler.debug(this, "Replaying message [%s] to actor [%s]".format(message, actorAddress)) - - // FIXME how to handle '?' messages? We can *not* replay them with the correct semantics. Should we: 1. Ignore/drop them and log warning? 2. Throw exception when about to log them? 3. Other? - actorRefToUseForReplay ! message - } - - } catch { - case e: Throwable ⇒ - EventHandler.error(e, this, e.toString) - throw e - } } - } } } else { EventHandler.error(this, "Actor 'address' is not defined, ignoring remote cluster daemon command [%s]".format(message)) @@ -1805,8 +1899,9 @@ class RemoteClusterDaemon(cluster: ClusterNode) extends Actor { case RELEASE ⇒ if (message.hasActorUuid) { - cluster.actorAddressForUuid(uuidProtocolToUuid(message.getActorUuid)) foreach { address ⇒ - cluster.release(address) + cluster.actorAddressForUuid(uuidProtocolToUuid(message.getActorUuid)) foreach { + address ⇒ + cluster.release(address) } } else if (message.hasActorAddress) { cluster release message.getActorAddress @@ -1816,15 +1911,15 @@ class RemoteClusterDaemon(cluster: ClusterNode) extends Actor { .format(message)) } - case START ⇒ cluster.start() + case START ⇒ cluster.start() - case STOP ⇒ cluster.shutdown() + case STOP ⇒ cluster.shutdown() case DISCONNECT ⇒ cluster.disconnect() - case RECONNECT ⇒ cluster.reconnect() + case RECONNECT ⇒ cluster.reconnect() - case RESIGN ⇒ cluster.resign() + case RESIGN ⇒ cluster.resign() case FAIL_OVER_CONNECTIONS ⇒ val (from, to) = payloadFor(message, classOf[(InetSocketAddress, InetSocketAddress)]) @@ -1888,7 +1983,7 @@ class RemoteClusterDaemon(cluster: ClusterNode) extends Actor { private def payloadFor[T](message: RemoteDaemonMessageProtocol, clazz: Class[T]): T = { Serialization.deserialize(message.getPayload.toByteArray, clazz, None) match { - case Left(error) ⇒ throw error + case Left(error) ⇒ throw error case Right(instance) ⇒ instance.asInstanceOf[T] } } diff --git a/akka-cluster/src/main/scala/akka/cluster/ClusterActorRef.scala b/akka-cluster/src/main/scala/akka/cluster/ClusterActorRef.scala index fba4a1e52a..3dc5f14b78 100644 --- a/akka-cluster/src/main/scala/akka/cluster/ClusterActorRef.scala +++ b/akka-cluster/src/main/scala/akka/cluster/ClusterActorRef.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ package akka.cluster diff --git a/akka-cluster/src/main/scala/akka/cluster/ClusterDeployer.scala b/akka-cluster/src/main/scala/akka/cluster/ClusterDeployer.scala index 6f251eb593..a0a5fa40f2 100644 --- a/akka-cluster/src/main/scala/akka/cluster/ClusterDeployer.scala +++ b/akka-cluster/src/main/scala/akka/cluster/ClusterDeployer.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ package akka.cluster diff --git a/akka-cluster/src/main/scala/akka/cluster/Routing.scala b/akka-cluster/src/main/scala/akka/cluster/Routing.scala index 1c9c1f5043..c165e699fe 100644 --- a/akka-cluster/src/main/scala/akka/cluster/Routing.scala +++ b/akka-cluster/src/main/scala/akka/cluster/Routing.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ package akka.cluster diff --git a/akka-cluster/src/main/scala/akka/cluster/TransactionLog.scala b/akka-cluster/src/main/scala/akka/cluster/TransactionLog.scala index 7a15673754..ae98e6848e 100644 --- a/akka-cluster/src/main/scala/akka/cluster/TransactionLog.scala +++ b/akka-cluster/src/main/scala/akka/cluster/TransactionLog.scala @@ -1,7 +1,7 @@ package akka.cluster /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ import org.apache.bookkeeper.client.{ BookKeeper, LedgerHandle, LedgerEntry, BKException, AsyncCallback } diff --git a/akka-cluster/src/main/scala/akka/cluster/storage/Storage.scala b/akka-cluster/src/main/scala/akka/cluster/storage/Storage.scala index 5718d41fe5..ef7b1a1aa4 100755 --- a/akka-cluster/src/main/scala/akka/cluster/storage/Storage.scala +++ b/akka-cluster/src/main/scala/akka/cluster/storage/Storage.scala @@ -1,7 +1,7 @@ package akka.cluster.storage /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ import akka.cluster.zookeeper.AkkaZkClient import akka.AkkaException diff --git a/akka-cluster/src/main/scala/akka/cluster/zookeeper/AkkaZkClient.scala b/akka-cluster/src/main/scala/akka/cluster/zookeeper/AkkaZkClient.scala index 42df10ee63..c405240bfd 100644 --- a/akka-cluster/src/main/scala/akka/cluster/zookeeper/AkkaZkClient.scala +++ b/akka-cluster/src/main/scala/akka/cluster/zookeeper/AkkaZkClient.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ package akka.cluster.zookeeper diff --git a/akka-cluster/src/main/scala/akka/cluster/zookeeper/AkkaZooKeeper.scala b/akka-cluster/src/main/scala/akka/cluster/zookeeper/AkkaZooKeeper.scala index 0c85ca505d..ce9e74c260 100644 --- a/akka-cluster/src/main/scala/akka/cluster/zookeeper/AkkaZooKeeper.scala +++ b/akka-cluster/src/main/scala/akka/cluster/zookeeper/AkkaZooKeeper.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ package akka.cluster.zookeeper diff --git a/akka-cluster/src/main/scala/akka/cluster/zookeeper/ZooKeeperBarrier.scala b/akka-cluster/src/main/scala/akka/cluster/zookeeper/ZooKeeperBarrier.scala index 3128d525b0..f3294e7f91 100644 --- a/akka-cluster/src/main/scala/akka/cluster/zookeeper/ZooKeeperBarrier.scala +++ b/akka-cluster/src/main/scala/akka/cluster/zookeeper/ZooKeeperBarrier.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ package akka.cluster.zookeeper diff --git a/akka-cluster/src/main/scala/akka/package.scala b/akka-cluster/src/main/scala/akka/package.scala index 1df19b4488..4fae688d03 100644 --- a/akka-cluster/src/main/scala/akka/package.scala +++ b/akka-cluster/src/main/scala/akka/package.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ package akka diff --git a/akka-cluster/src/main/scala/akka/remote/BootableRemoteActorService.scala b/akka-cluster/src/main/scala/akka/remote/BootableRemoteActorService.scala index 8e523f2c7f..95492a30f5 100644 --- a/akka-cluster/src/main/scala/akka/remote/BootableRemoteActorService.scala +++ b/akka-cluster/src/main/scala/akka/remote/BootableRemoteActorService.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ package akka.remote diff --git a/akka-cluster/src/main/scala/akka/remote/MessageSerializer.scala b/akka-cluster/src/main/scala/akka/remote/MessageSerializer.scala index 181acb7f03..21060e7973 100644 --- a/akka-cluster/src/main/scala/akka/remote/MessageSerializer.scala +++ b/akka-cluster/src/main/scala/akka/remote/MessageSerializer.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ package akka.remote diff --git a/akka-cluster/src/main/scala/akka/remote/RemoteConfig.scala b/akka-cluster/src/main/scala/akka/remote/RemoteConfig.scala index d6803013f2..c54e2b4a9f 100644 --- a/akka-cluster/src/main/scala/akka/remote/RemoteConfig.scala +++ b/akka-cluster/src/main/scala/akka/remote/RemoteConfig.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ package akka.remote diff --git a/akka-cluster/src/main/scala/akka/remote/netty/NettyRemoteSupport.scala b/akka-cluster/src/main/scala/akka/remote/netty/NettyRemoteSupport.scala index 4a2d84a80a..e79e426d94 100644 --- a/akka-cluster/src/main/scala/akka/remote/netty/NettyRemoteSupport.scala +++ b/akka-cluster/src/main/scala/akka/remote/netty/NettyRemoteSupport.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ package akka.remote.netty diff --git a/akka-cluster/src/main/scala/akka/serialization/Compression.scala b/akka-cluster/src/main/scala/akka/serialization/Compression.scala index 48287adc2c..3602b81438 100644 --- a/akka-cluster/src/main/scala/akka/serialization/Compression.scala +++ b/akka-cluster/src/main/scala/akka/serialization/Compression.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ package akka.serialization diff --git a/akka-cluster/src/main/scala/akka/serialization/SerializationProtocol.scala b/akka-cluster/src/main/scala/akka/serialization/SerializationProtocol.scala index cd64a83067..e86665295b 100644 --- a/akka-cluster/src/main/scala/akka/serialization/SerializationProtocol.scala +++ b/akka-cluster/src/main/scala/akka/serialization/SerializationProtocol.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ package akka.serialization diff --git a/akka-cluster/src/multi-jvm/scala/akka/cluster/ClusterTestNode.scala b/akka-cluster/src/multi-jvm/scala/akka/cluster/ClusterTestNode.scala index a8b6489ca1..aebfd8b651 100644 --- a/akka-cluster/src/multi-jvm/scala/akka/cluster/ClusterTestNode.scala +++ b/akka-cluster/src/multi-jvm/scala/akka/cluster/ClusterTestNode.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ package akka.cluster diff --git a/akka-cluster/src/multi-jvm/scala/akka/cluster/api/changelisteners/newleader/NewLeaderChangeListenerMultiJvmNode1.conf b/akka-cluster/src/multi-jvm/scala/akka/cluster/api/changelisteners/newleader/NewLeaderChangeListenerMultiJvmNode1.conf index 762f32d92a..2f642a20f0 100644 --- a/akka-cluster/src/multi-jvm/scala/akka/cluster/api/changelisteners/newleader/NewLeaderChangeListenerMultiJvmNode1.conf +++ b/akka-cluster/src/multi-jvm/scala/akka/cluster/api/changelisteners/newleader/NewLeaderChangeListenerMultiJvmNode1.conf @@ -1,2 +1,2 @@ akka.enabled-modules = ["cluster"] -akka.event-handler-level = "DEBUG" +akka.event-handler-level = "WARNING" diff --git a/akka-cluster/src/multi-jvm/scala/akka/cluster/api/changelisteners/newleader/NewLeaderChangeListenerMultiJvmNode2.conf b/akka-cluster/src/multi-jvm/scala/akka/cluster/api/changelisteners/newleader/NewLeaderChangeListenerMultiJvmNode2.conf index 762f32d92a..2f642a20f0 100644 --- a/akka-cluster/src/multi-jvm/scala/akka/cluster/api/changelisteners/newleader/NewLeaderChangeListenerMultiJvmNode2.conf +++ b/akka-cluster/src/multi-jvm/scala/akka/cluster/api/changelisteners/newleader/NewLeaderChangeListenerMultiJvmNode2.conf @@ -1,2 +1,2 @@ akka.enabled-modules = ["cluster"] -akka.event-handler-level = "DEBUG" +akka.event-handler-level = "WARNING" diff --git a/akka-cluster/src/multi-jvm/scala/akka/cluster/api/changelisteners/newleader/NewLeaderChangeListenerMultiJvmSpec.scala b/akka-cluster/src/multi-jvm/scala/akka/cluster/api/changelisteners/newleader/NewLeaderChangeListenerMultiJvmSpec.scala index d296926653..43f128e4d9 100644 --- a/akka-cluster/src/multi-jvm/scala/akka/cluster/api/changelisteners/newleader/NewLeaderChangeListenerMultiJvmSpec.scala +++ b/akka-cluster/src/multi-jvm/scala/akka/cluster/api/changelisteners/newleader/NewLeaderChangeListenerMultiJvmSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ package akka.cluster.api.changelisteners.newleader diff --git a/akka-cluster/src/multi-jvm/scala/akka/cluster/api/changelisteners/nodeconnected/NodeConnectedChangeListenerMultiJvmNode1.conf b/akka-cluster/src/multi-jvm/scala/akka/cluster/api/changelisteners/nodeconnected/NodeConnectedChangeListenerMultiJvmNode1.conf index 762f32d92a..2f642a20f0 100644 --- a/akka-cluster/src/multi-jvm/scala/akka/cluster/api/changelisteners/nodeconnected/NodeConnectedChangeListenerMultiJvmNode1.conf +++ b/akka-cluster/src/multi-jvm/scala/akka/cluster/api/changelisteners/nodeconnected/NodeConnectedChangeListenerMultiJvmNode1.conf @@ -1,2 +1,2 @@ akka.enabled-modules = ["cluster"] -akka.event-handler-level = "DEBUG" +akka.event-handler-level = "WARNING" diff --git a/akka-cluster/src/multi-jvm/scala/akka/cluster/api/changelisteners/nodeconnected/NodeConnectedChangeListenerMultiJvmNode2.conf b/akka-cluster/src/multi-jvm/scala/akka/cluster/api/changelisteners/nodeconnected/NodeConnectedChangeListenerMultiJvmNode2.conf index 762f32d92a..2f642a20f0 100644 --- a/akka-cluster/src/multi-jvm/scala/akka/cluster/api/changelisteners/nodeconnected/NodeConnectedChangeListenerMultiJvmNode2.conf +++ b/akka-cluster/src/multi-jvm/scala/akka/cluster/api/changelisteners/nodeconnected/NodeConnectedChangeListenerMultiJvmNode2.conf @@ -1,2 +1,2 @@ akka.enabled-modules = ["cluster"] -akka.event-handler-level = "DEBUG" +akka.event-handler-level = "WARNING" diff --git a/akka-cluster/src/multi-jvm/scala/akka/cluster/api/changelisteners/nodeconnected/NodeConnectedChangeListenerMultiJvmSpec.scala b/akka-cluster/src/multi-jvm/scala/akka/cluster/api/changelisteners/nodeconnected/NodeConnectedChangeListenerMultiJvmSpec.scala index f77f14b568..3ba61bbd26 100644 --- a/akka-cluster/src/multi-jvm/scala/akka/cluster/api/changelisteners/nodeconnected/NodeConnectedChangeListenerMultiJvmSpec.scala +++ b/akka-cluster/src/multi-jvm/scala/akka/cluster/api/changelisteners/nodeconnected/NodeConnectedChangeListenerMultiJvmSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ package akka.cluster.api.changelisteners.nodeconnected diff --git a/akka-cluster/src/multi-jvm/scala/akka/cluster/api/changelisteners/nodedisconnected/NodeDisconnectedChangeListenerMultiJvmNode1.conf b/akka-cluster/src/multi-jvm/scala/akka/cluster/api/changelisteners/nodedisconnected/NodeDisconnectedChangeListenerMultiJvmNode1.conf index 762f32d92a..2f642a20f0 100644 --- a/akka-cluster/src/multi-jvm/scala/akka/cluster/api/changelisteners/nodedisconnected/NodeDisconnectedChangeListenerMultiJvmNode1.conf +++ b/akka-cluster/src/multi-jvm/scala/akka/cluster/api/changelisteners/nodedisconnected/NodeDisconnectedChangeListenerMultiJvmNode1.conf @@ -1,2 +1,2 @@ akka.enabled-modules = ["cluster"] -akka.event-handler-level = "DEBUG" +akka.event-handler-level = "WARNING" diff --git a/akka-cluster/src/multi-jvm/scala/akka/cluster/api/changelisteners/nodedisconnected/NodeDisconnectedChangeListenerMultiJvmNode2.conf b/akka-cluster/src/multi-jvm/scala/akka/cluster/api/changelisteners/nodedisconnected/NodeDisconnectedChangeListenerMultiJvmNode2.conf index 762f32d92a..2f642a20f0 100644 --- a/akka-cluster/src/multi-jvm/scala/akka/cluster/api/changelisteners/nodedisconnected/NodeDisconnectedChangeListenerMultiJvmNode2.conf +++ b/akka-cluster/src/multi-jvm/scala/akka/cluster/api/changelisteners/nodedisconnected/NodeDisconnectedChangeListenerMultiJvmNode2.conf @@ -1,2 +1,2 @@ akka.enabled-modules = ["cluster"] -akka.event-handler-level = "DEBUG" +akka.event-handler-level = "WARNING" diff --git a/akka-cluster/src/multi-jvm/scala/akka/cluster/api/changelisteners/nodedisconnected/NodeDisconnectedChangeListenerMultiJvmSpec.scala b/akka-cluster/src/multi-jvm/scala/akka/cluster/api/changelisteners/nodedisconnected/NodeDisconnectedChangeListenerMultiJvmSpec.scala index b59c95de8d..8eb7a272f7 100644 --- a/akka-cluster/src/multi-jvm/scala/akka/cluster/api/changelisteners/nodedisconnected/NodeDisconnectedChangeListenerMultiJvmSpec.scala +++ b/akka-cluster/src/multi-jvm/scala/akka/cluster/api/changelisteners/nodedisconnected/NodeDisconnectedChangeListenerMultiJvmSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ package akka.cluster.api.changelisteners.nodedisconnected diff --git a/akka-cluster/src/multi-jvm/scala/akka/cluster/api/configuration/ConfigurationStorageMultiJvmNode1.conf b/akka-cluster/src/multi-jvm/scala/akka/cluster/api/configuration/ConfigurationStorageMultiJvmNode1.conf index 762f32d92a..2f642a20f0 100644 --- a/akka-cluster/src/multi-jvm/scala/akka/cluster/api/configuration/ConfigurationStorageMultiJvmNode1.conf +++ b/akka-cluster/src/multi-jvm/scala/akka/cluster/api/configuration/ConfigurationStorageMultiJvmNode1.conf @@ -1,2 +1,2 @@ akka.enabled-modules = ["cluster"] -akka.event-handler-level = "DEBUG" +akka.event-handler-level = "WARNING" diff --git a/akka-cluster/src/multi-jvm/scala/akka/cluster/api/configuration/ConfigurationStorageMultiJvmNode2.conf b/akka-cluster/src/multi-jvm/scala/akka/cluster/api/configuration/ConfigurationStorageMultiJvmNode2.conf index 762f32d92a..2f642a20f0 100644 --- a/akka-cluster/src/multi-jvm/scala/akka/cluster/api/configuration/ConfigurationStorageMultiJvmNode2.conf +++ b/akka-cluster/src/multi-jvm/scala/akka/cluster/api/configuration/ConfigurationStorageMultiJvmNode2.conf @@ -1,2 +1,2 @@ akka.enabled-modules = ["cluster"] -akka.event-handler-level = "DEBUG" +akka.event-handler-level = "WARNING" diff --git a/akka-cluster/src/multi-jvm/scala/akka/cluster/api/configuration/ConfigurationStorageMultiJvmSpec.scala b/akka-cluster/src/multi-jvm/scala/akka/cluster/api/configuration/ConfigurationStorageMultiJvmSpec.scala index 1a32184054..314562be4d 100644 --- a/akka-cluster/src/multi-jvm/scala/akka/cluster/api/configuration/ConfigurationStorageMultiJvmSpec.scala +++ b/akka-cluster/src/multi-jvm/scala/akka/cluster/api/configuration/ConfigurationStorageMultiJvmSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ package akka.cluster.api.configuration diff --git a/akka-cluster/src/multi-jvm/scala/akka/cluster/api/leader/election/LeaderElectionMultiJvmNode1.conf b/akka-cluster/src/multi-jvm/scala/akka/cluster/api/leader/election/LeaderElectionMultiJvmNode1.conf index 762f32d92a..2f642a20f0 100644 --- a/akka-cluster/src/multi-jvm/scala/akka/cluster/api/leader/election/LeaderElectionMultiJvmNode1.conf +++ b/akka-cluster/src/multi-jvm/scala/akka/cluster/api/leader/election/LeaderElectionMultiJvmNode1.conf @@ -1,2 +1,2 @@ akka.enabled-modules = ["cluster"] -akka.event-handler-level = "DEBUG" +akka.event-handler-level = "WARNING" diff --git a/akka-cluster/src/multi-jvm/scala/akka/cluster/api/leader/election/LeaderElectionMultiJvmNode2.conf b/akka-cluster/src/multi-jvm/scala/akka/cluster/api/leader/election/LeaderElectionMultiJvmNode2.conf index 762f32d92a..2f642a20f0 100644 --- a/akka-cluster/src/multi-jvm/scala/akka/cluster/api/leader/election/LeaderElectionMultiJvmNode2.conf +++ b/akka-cluster/src/multi-jvm/scala/akka/cluster/api/leader/election/LeaderElectionMultiJvmNode2.conf @@ -1,2 +1,2 @@ akka.enabled-modules = ["cluster"] -akka.event-handler-level = "DEBUG" +akka.event-handler-level = "WARNING" diff --git a/akka-cluster/src/multi-jvm/scala/akka/cluster/api/leader/election/LeaderElectionMultiJvmSpec.scala b/akka-cluster/src/multi-jvm/scala/akka/cluster/api/leader/election/LeaderElectionMultiJvmSpec.scala index 493fe57d6e..28aaaf656a 100644 --- a/akka-cluster/src/multi-jvm/scala/akka/cluster/api/leader/election/LeaderElectionMultiJvmSpec.scala +++ b/akka-cluster/src/multi-jvm/scala/akka/cluster/api/leader/election/LeaderElectionMultiJvmSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ package akka.cluster.api.leader.election diff --git a/akka-cluster/src/multi-jvm/scala/akka/cluster/api/registry/RegistryStoreMultiJvmNode1.conf b/akka-cluster/src/multi-jvm/scala/akka/cluster/api/registry/RegistryStoreMultiJvmNode1.conf index 762f32d92a..2f642a20f0 100644 --- a/akka-cluster/src/multi-jvm/scala/akka/cluster/api/registry/RegistryStoreMultiJvmNode1.conf +++ b/akka-cluster/src/multi-jvm/scala/akka/cluster/api/registry/RegistryStoreMultiJvmNode1.conf @@ -1,2 +1,2 @@ akka.enabled-modules = ["cluster"] -akka.event-handler-level = "DEBUG" +akka.event-handler-level = "WARNING" diff --git a/akka-cluster/src/multi-jvm/scala/akka/cluster/api/registry/RegistryStoreMultiJvmNode2.conf b/akka-cluster/src/multi-jvm/scala/akka/cluster/api/registry/RegistryStoreMultiJvmNode2.conf index 762f32d92a..2f642a20f0 100644 --- a/akka-cluster/src/multi-jvm/scala/akka/cluster/api/registry/RegistryStoreMultiJvmNode2.conf +++ b/akka-cluster/src/multi-jvm/scala/akka/cluster/api/registry/RegistryStoreMultiJvmNode2.conf @@ -1,2 +1,2 @@ akka.enabled-modules = ["cluster"] -akka.event-handler-level = "DEBUG" +akka.event-handler-level = "WARNING" diff --git a/akka-cluster/src/multi-jvm/scala/akka/cluster/api/registry/RegistryStoreMultiJvmSpec.scala b/akka-cluster/src/multi-jvm/scala/akka/cluster/api/registry/RegistryStoreMultiJvmSpec.scala index 792924f338..ae0eadd97f 100644 --- a/akka-cluster/src/multi-jvm/scala/akka/cluster/api/registry/RegistryStoreMultiJvmSpec.scala +++ b/akka-cluster/src/multi-jvm/scala/akka/cluster/api/registry/RegistryStoreMultiJvmSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ package akka.cluster.api.registry diff --git a/akka-cluster/src/multi-jvm/scala/akka/cluster/deployment/DeploymentMultiJvmNode1.conf b/akka-cluster/src/multi-jvm/scala/akka/cluster/deployment/DeploymentMultiJvmNode1.conf index 6f117d6ce2..e23553c931 100644 --- a/akka-cluster/src/multi-jvm/scala/akka/cluster/deployment/DeploymentMultiJvmNode1.conf +++ b/akka-cluster/src/multi-jvm/scala/akka/cluster/deployment/DeploymentMultiJvmNode1.conf @@ -1,4 +1,4 @@ akka.enabled-modules = ["cluster"] -akka.event-handler-level = "DEBUG" +akka.event-handler-level = "WARNING" akka.actor.deployment.service-hello.router = "round-robin" akka.actor.deployment.service-hello.clustered.replicas = 1 diff --git a/akka-cluster/src/multi-jvm/scala/akka/cluster/deployment/DeploymentMultiJvmNode2.conf b/akka-cluster/src/multi-jvm/scala/akka/cluster/deployment/DeploymentMultiJvmNode2.conf index 6f117d6ce2..e23553c931 100644 --- a/akka-cluster/src/multi-jvm/scala/akka/cluster/deployment/DeploymentMultiJvmNode2.conf +++ b/akka-cluster/src/multi-jvm/scala/akka/cluster/deployment/DeploymentMultiJvmNode2.conf @@ -1,4 +1,4 @@ akka.enabled-modules = ["cluster"] -akka.event-handler-level = "DEBUG" +akka.event-handler-level = "WARNING" akka.actor.deployment.service-hello.router = "round-robin" akka.actor.deployment.service-hello.clustered.replicas = 1 diff --git a/akka-cluster/src/multi-jvm/scala/akka/cluster/deployment/DeploymentMultiJvmSpec.scala b/akka-cluster/src/multi-jvm/scala/akka/cluster/deployment/DeploymentMultiJvmSpec.scala index a511681732..f90693baad 100644 --- a/akka-cluster/src/multi-jvm/scala/akka/cluster/deployment/DeploymentMultiJvmSpec.scala +++ b/akka-cluster/src/multi-jvm/scala/akka/cluster/deployment/DeploymentMultiJvmSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ package akka.cluster.deployment diff --git a/akka-cluster/src/multi-jvm/scala/akka/cluster/migration/automatic/MigrationAutomaticMultiJvmNode1.conf b/akka-cluster/src/multi-jvm/scala/akka/cluster/migration/automatic/MigrationAutomaticMultiJvmNode1.conf index 7d8a1476ad..a17a4d98ab 100644 --- a/akka-cluster/src/multi-jvm/scala/akka/cluster/migration/automatic/MigrationAutomaticMultiJvmNode1.conf +++ b/akka-cluster/src/multi-jvm/scala/akka/cluster/migration/automatic/MigrationAutomaticMultiJvmNode1.conf @@ -1,4 +1,4 @@ akka.enabled-modules = ["cluster"] -akka.event-handler-level = "DEBUG" +akka.event-handler-level = "WARNING" akka.actor.deployment.hello-world.router = "direct" akka.actor.deployment.hello-world.clustered.replicas = 1 \ No newline at end of file diff --git a/akka-cluster/src/multi-jvm/scala/akka/cluster/migration/automatic/MigrationAutomaticMultiJvmNode2.conf b/akka-cluster/src/multi-jvm/scala/akka/cluster/migration/automatic/MigrationAutomaticMultiJvmNode2.conf index 7d8a1476ad..a17a4d98ab 100644 --- a/akka-cluster/src/multi-jvm/scala/akka/cluster/migration/automatic/MigrationAutomaticMultiJvmNode2.conf +++ b/akka-cluster/src/multi-jvm/scala/akka/cluster/migration/automatic/MigrationAutomaticMultiJvmNode2.conf @@ -1,4 +1,4 @@ akka.enabled-modules = ["cluster"] -akka.event-handler-level = "DEBUG" +akka.event-handler-level = "WARNING" akka.actor.deployment.hello-world.router = "direct" akka.actor.deployment.hello-world.clustered.replicas = 1 \ No newline at end of file diff --git a/akka-cluster/src/multi-jvm/scala/akka/cluster/migration/automatic/MigrationAutomaticMultiJvmNode3.conf b/akka-cluster/src/multi-jvm/scala/akka/cluster/migration/automatic/MigrationAutomaticMultiJvmNode3.conf index 7d8a1476ad..a17a4d98ab 100644 --- a/akka-cluster/src/multi-jvm/scala/akka/cluster/migration/automatic/MigrationAutomaticMultiJvmNode3.conf +++ b/akka-cluster/src/multi-jvm/scala/akka/cluster/migration/automatic/MigrationAutomaticMultiJvmNode3.conf @@ -1,4 +1,4 @@ akka.enabled-modules = ["cluster"] -akka.event-handler-level = "DEBUG" +akka.event-handler-level = "WARNING" akka.actor.deployment.hello-world.router = "direct" akka.actor.deployment.hello-world.clustered.replicas = 1 \ No newline at end of file diff --git a/akka-cluster/src/multi-jvm/scala/akka/cluster/migration/automatic/MigrationAutomaticMultiJvmSpec.scala b/akka-cluster/src/multi-jvm/scala/akka/cluster/migration/automatic/MigrationAutomaticMultiJvmSpec.scala index c929fdeb6f..68e49a51fb 100644 --- a/akka-cluster/src/multi-jvm/scala/akka/cluster/migration/automatic/MigrationAutomaticMultiJvmSpec.scala +++ b/akka-cluster/src/multi-jvm/scala/akka/cluster/migration/automatic/MigrationAutomaticMultiJvmSpec.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ package akka.cluster.migration.automatic diff --git a/akka-cluster/src/multi-jvm/scala/akka/cluster/migration/explicit/MigrationExplicitMultiJvmNode1.conf b/akka-cluster/src/multi-jvm/scala/akka/cluster/migration/explicit/MigrationExplicitMultiJvmNode1.conf index 762f32d92a..2f642a20f0 100644 --- a/akka-cluster/src/multi-jvm/scala/akka/cluster/migration/explicit/MigrationExplicitMultiJvmNode1.conf +++ b/akka-cluster/src/multi-jvm/scala/akka/cluster/migration/explicit/MigrationExplicitMultiJvmNode1.conf @@ -1,2 +1,2 @@ akka.enabled-modules = ["cluster"] -akka.event-handler-level = "DEBUG" +akka.event-handler-level = "WARNING" diff --git a/akka-cluster/src/multi-jvm/scala/akka/cluster/migration/explicit/MigrationExplicitMultiJvmNode2.conf b/akka-cluster/src/multi-jvm/scala/akka/cluster/migration/explicit/MigrationExplicitMultiJvmNode2.conf index 762f32d92a..2f642a20f0 100644 --- a/akka-cluster/src/multi-jvm/scala/akka/cluster/migration/explicit/MigrationExplicitMultiJvmNode2.conf +++ b/akka-cluster/src/multi-jvm/scala/akka/cluster/migration/explicit/MigrationExplicitMultiJvmNode2.conf @@ -1,2 +1,2 @@ akka.enabled-modules = ["cluster"] -akka.event-handler-level = "DEBUG" +akka.event-handler-level = "WARNING" diff --git a/akka-cluster/src/multi-jvm/scala/akka/cluster/migration/explicit/MigrationExplicitMultiJvmSpec.scala b/akka-cluster/src/multi-jvm/scala/akka/cluster/migration/explicit/MigrationExplicitMultiJvmSpec.scala index 0772b7798a..a88c29694f 100644 --- a/akka-cluster/src/multi-jvm/scala/akka/cluster/migration/explicit/MigrationExplicitMultiJvmSpec.scala +++ b/akka-cluster/src/multi-jvm/scala/akka/cluster/migration/explicit/MigrationExplicitMultiJvmSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. * * * package akka.cluster.migration.explicit diff --git a/akka-cluster/src/multi-jvm/scala/akka/cluster/replication/transactionlog/writebehind/nosnapshot/ReplicationTransactionLogWriteBehindNoSnapshotMultiJvmNode1.conf b/akka-cluster/src/multi-jvm/scala/akka/cluster/replication/transactionlog/writebehind/nosnapshot/ReplicationTransactionLogWriteBehindNoSnapshotMultiJvmNode1.conf index d8bee0cb07..8a5bd70eec 100644 --- a/akka-cluster/src/multi-jvm/scala/akka/cluster/replication/transactionlog/writebehind/nosnapshot/ReplicationTransactionLogWriteBehindNoSnapshotMultiJvmNode1.conf +++ b/akka-cluster/src/multi-jvm/scala/akka/cluster/replication/transactionlog/writebehind/nosnapshot/ReplicationTransactionLogWriteBehindNoSnapshotMultiJvmNode1.conf @@ -1,5 +1,5 @@ akka.enabled-modules = ["cluster"] -akka.event-handler-level = "DEBUG" +akka.event-handler-level = "WARNING" akka.actor.deployment.hello-world.router = "direct" akka.actor.deployment.hello-world.clustered.replicas = 1 akka.actor.deployment.hello-world.clustered.replication.storage = "transaction-log" diff --git a/akka-cluster/src/multi-jvm/scala/akka/cluster/replication/transactionlog/writebehind/nosnapshot/ReplicationTransactionLogWriteBehindNoSnapshotMultiJvmNode2.conf b/akka-cluster/src/multi-jvm/scala/akka/cluster/replication/transactionlog/writebehind/nosnapshot/ReplicationTransactionLogWriteBehindNoSnapshotMultiJvmNode2.conf index d8bee0cb07..8a5bd70eec 100644 --- a/akka-cluster/src/multi-jvm/scala/akka/cluster/replication/transactionlog/writebehind/nosnapshot/ReplicationTransactionLogWriteBehindNoSnapshotMultiJvmNode2.conf +++ b/akka-cluster/src/multi-jvm/scala/akka/cluster/replication/transactionlog/writebehind/nosnapshot/ReplicationTransactionLogWriteBehindNoSnapshotMultiJvmNode2.conf @@ -1,5 +1,5 @@ akka.enabled-modules = ["cluster"] -akka.event-handler-level = "DEBUG" +akka.event-handler-level = "WARNING" akka.actor.deployment.hello-world.router = "direct" akka.actor.deployment.hello-world.clustered.replicas = 1 akka.actor.deployment.hello-world.clustered.replication.storage = "transaction-log" diff --git a/akka-cluster/src/multi-jvm/scala/akka/cluster/replication/transactionlog/writebehind/nosnapshot/ReplicationTransactionLogWriteBehindNoSnapshotMultiJvmSpec.scala b/akka-cluster/src/multi-jvm/scala/akka/cluster/replication/transactionlog/writebehind/nosnapshot/ReplicationTransactionLogWriteBehindNoSnapshotMultiJvmSpec.scala index 7ed05307ae..23aaaec5a4 100644 --- a/akka-cluster/src/multi-jvm/scala/akka/cluster/replication/transactionlog/writebehind/nosnapshot/ReplicationTransactionLogWriteBehindNoSnapshotMultiJvmSpec.scala +++ b/akka-cluster/src/multi-jvm/scala/akka/cluster/replication/transactionlog/writebehind/nosnapshot/ReplicationTransactionLogWriteBehindNoSnapshotMultiJvmSpec.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ package akka.cluster.replication.transactionlog.writebehind.nosnapshot diff --git a/akka-cluster/src/multi-jvm/scala/akka/cluster/replication/transactionlog/writebehind/snapshot/ReplicationTransactionLogWriteBehindSnapshotMultiJvmNode1.conf b/akka-cluster/src/multi-jvm/scala/akka/cluster/replication/transactionlog/writebehind/snapshot/ReplicationTransactionLogWriteBehindSnapshotMultiJvmNode1.conf index 8aeaf3135f..84969a04e5 100644 --- a/akka-cluster/src/multi-jvm/scala/akka/cluster/replication/transactionlog/writebehind/snapshot/ReplicationTransactionLogWriteBehindSnapshotMultiJvmNode1.conf +++ b/akka-cluster/src/multi-jvm/scala/akka/cluster/replication/transactionlog/writebehind/snapshot/ReplicationTransactionLogWriteBehindSnapshotMultiJvmNode1.conf @@ -1,5 +1,5 @@ akka.enabled-modules = ["cluster"] -akka.event-handler-level = "DEBUG" +akka.event-handler-level = "WARNING" akka.actor.deployment.hello-world.router = "direct" akka.actor.deployment.hello-world.clustered.replicas = 1 akka.actor.deployment.hello-world.clustered.replication.storage = "transaction-log" diff --git a/akka-cluster/src/multi-jvm/scala/akka/cluster/replication/transactionlog/writebehind/snapshot/ReplicationTransactionLogWriteBehindSnapshotMultiJvmNode2.conf b/akka-cluster/src/multi-jvm/scala/akka/cluster/replication/transactionlog/writebehind/snapshot/ReplicationTransactionLogWriteBehindSnapshotMultiJvmNode2.conf index 8aeaf3135f..84969a04e5 100644 --- a/akka-cluster/src/multi-jvm/scala/akka/cluster/replication/transactionlog/writebehind/snapshot/ReplicationTransactionLogWriteBehindSnapshotMultiJvmNode2.conf +++ b/akka-cluster/src/multi-jvm/scala/akka/cluster/replication/transactionlog/writebehind/snapshot/ReplicationTransactionLogWriteBehindSnapshotMultiJvmNode2.conf @@ -1,5 +1,5 @@ akka.enabled-modules = ["cluster"] -akka.event-handler-level = "DEBUG" +akka.event-handler-level = "WARNING" akka.actor.deployment.hello-world.router = "direct" akka.actor.deployment.hello-world.clustered.replicas = 1 akka.actor.deployment.hello-world.clustered.replication.storage = "transaction-log" diff --git a/akka-cluster/src/multi-jvm/scala/akka/cluster/replication/transactionlog/writebehind/snapshot/ReplicationTransactionLogWriteBehindSnapshotMultiJvmSpec.scala b/akka-cluster/src/multi-jvm/scala/akka/cluster/replication/transactionlog/writebehind/snapshot/ReplicationTransactionLogWriteBehindSnapshotMultiJvmSpec.scala index c37a863ba0..80ad04a4df 100644 --- a/akka-cluster/src/multi-jvm/scala/akka/cluster/replication/transactionlog/writebehind/snapshot/ReplicationTransactionLogWriteBehindSnapshotMultiJvmSpec.scala +++ b/akka-cluster/src/multi-jvm/scala/akka/cluster/replication/transactionlog/writebehind/snapshot/ReplicationTransactionLogWriteBehindSnapshotMultiJvmSpec.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ package akka.cluster.replication.transactionlog.writebehind.snapshot diff --git a/akka-cluster/src/multi-jvm/scala/akka/cluster/replication/transactionlog/writethrough/nosnapshot/ReplicationTransactionLogWriteBehindNoSnapshotMultiJvmNode1.conf b/akka-cluster/src/multi-jvm/scala/akka/cluster/replication/transactionlog/writethrough/nosnapshot/ReplicationTransactionLogWriteBehindNoSnapshotMultiJvmNode1.conf index 470c4c7a33..211cdbd6ee 100644 --- a/akka-cluster/src/multi-jvm/scala/akka/cluster/replication/transactionlog/writethrough/nosnapshot/ReplicationTransactionLogWriteBehindNoSnapshotMultiJvmNode1.conf +++ b/akka-cluster/src/multi-jvm/scala/akka/cluster/replication/transactionlog/writethrough/nosnapshot/ReplicationTransactionLogWriteBehindNoSnapshotMultiJvmNode1.conf @@ -1,5 +1,5 @@ akka.enabled-modules = ["cluster"] -akka.event-handler-level = "DEBUG" +akka.event-handler-level = "WARNING" akka.actor.deployment.hello-world.router = "direct" akka.actor.deployment.hello-world.clustered.replicas = 1 diff --git a/akka-cluster/src/multi-jvm/scala/akka/cluster/replication/transactionlog/writethrough/nosnapshot/ReplicationTransactionLogWriteBehindNoSnapshotMultiJvmNode2.conf b/akka-cluster/src/multi-jvm/scala/akka/cluster/replication/transactionlog/writethrough/nosnapshot/ReplicationTransactionLogWriteBehindNoSnapshotMultiJvmNode2.conf index 5fb92ab01f..567b03b9cb 100644 --- a/akka-cluster/src/multi-jvm/scala/akka/cluster/replication/transactionlog/writethrough/nosnapshot/ReplicationTransactionLogWriteBehindNoSnapshotMultiJvmNode2.conf +++ b/akka-cluster/src/multi-jvm/scala/akka/cluster/replication/transactionlog/writethrough/nosnapshot/ReplicationTransactionLogWriteBehindNoSnapshotMultiJvmNode2.conf @@ -1,5 +1,5 @@ akka.enabled-modules = ["cluster"] -akka.event-handler-level = "DEBUG" +akka.event-handler-level = "WARNING" akka.actor.deployment.hello-world.router = "direct" akka.actor.deployment.hello-world.clustered.replicas = 1 akka.actor.deployment.hello-world.clustered.replication.storage = "transaction-log" diff --git a/akka-cluster/src/multi-jvm/scala/akka/cluster/replication/transactionlog/writethrough/nosnapshot/ReplicationTransactionLogWriteBehindNoSnapshotMultiJvmSpec.scala b/akka-cluster/src/multi-jvm/scala/akka/cluster/replication/transactionlog/writethrough/nosnapshot/ReplicationTransactionLogWriteBehindNoSnapshotMultiJvmSpec.scala index c9c53a9a25..d408f59df7 100644 --- a/akka-cluster/src/multi-jvm/scala/akka/cluster/replication/transactionlog/writethrough/nosnapshot/ReplicationTransactionLogWriteBehindNoSnapshotMultiJvmSpec.scala +++ b/akka-cluster/src/multi-jvm/scala/akka/cluster/replication/transactionlog/writethrough/nosnapshot/ReplicationTransactionLogWriteBehindNoSnapshotMultiJvmSpec.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ package akka.cluster.replication.transactionlog.writethrough.nosnapshot diff --git a/akka-cluster/src/multi-jvm/scala/akka/cluster/replication/transactionlog/writethrough/nosnapshot/ReplicationTransactionLogWriteThroughNoSnapshotMultiJvmNode1.conf b/akka-cluster/src/multi-jvm/scala/akka/cluster/replication/transactionlog/writethrough/nosnapshot/ReplicationTransactionLogWriteThroughNoSnapshotMultiJvmNode1.conf index 470c4c7a33..211cdbd6ee 100644 --- a/akka-cluster/src/multi-jvm/scala/akka/cluster/replication/transactionlog/writethrough/nosnapshot/ReplicationTransactionLogWriteThroughNoSnapshotMultiJvmNode1.conf +++ b/akka-cluster/src/multi-jvm/scala/akka/cluster/replication/transactionlog/writethrough/nosnapshot/ReplicationTransactionLogWriteThroughNoSnapshotMultiJvmNode1.conf @@ -1,5 +1,5 @@ akka.enabled-modules = ["cluster"] -akka.event-handler-level = "DEBUG" +akka.event-handler-level = "WARNING" akka.actor.deployment.hello-world.router = "direct" akka.actor.deployment.hello-world.clustered.replicas = 1 diff --git a/akka-cluster/src/multi-jvm/scala/akka/cluster/replication/transactionlog/writethrough/nosnapshot/ReplicationTransactionLogWriteThroughNoSnapshotMultiJvmNode2.conf b/akka-cluster/src/multi-jvm/scala/akka/cluster/replication/transactionlog/writethrough/nosnapshot/ReplicationTransactionLogWriteThroughNoSnapshotMultiJvmNode2.conf index 5fb92ab01f..567b03b9cb 100644 --- a/akka-cluster/src/multi-jvm/scala/akka/cluster/replication/transactionlog/writethrough/nosnapshot/ReplicationTransactionLogWriteThroughNoSnapshotMultiJvmNode2.conf +++ b/akka-cluster/src/multi-jvm/scala/akka/cluster/replication/transactionlog/writethrough/nosnapshot/ReplicationTransactionLogWriteThroughNoSnapshotMultiJvmNode2.conf @@ -1,5 +1,5 @@ akka.enabled-modules = ["cluster"] -akka.event-handler-level = "DEBUG" +akka.event-handler-level = "WARNING" akka.actor.deployment.hello-world.router = "direct" akka.actor.deployment.hello-world.clustered.replicas = 1 akka.actor.deployment.hello-world.clustered.replication.storage = "transaction-log" diff --git a/akka-cluster/src/multi-jvm/scala/akka/cluster/replication/transactionlog/writethrough/nosnapshot/ReplicationTransactionLogWriteThroughNoSnapshotMultiJvmSpec.scala b/akka-cluster/src/multi-jvm/scala/akka/cluster/replication/transactionlog/writethrough/nosnapshot/ReplicationTransactionLogWriteThroughNoSnapshotMultiJvmSpec.scala index 10fc3883dc..2d95ae6047 100644 --- a/akka-cluster/src/multi-jvm/scala/akka/cluster/replication/transactionlog/writethrough/nosnapshot/ReplicationTransactionLogWriteThroughNoSnapshotMultiJvmSpec.scala +++ b/akka-cluster/src/multi-jvm/scala/akka/cluster/replication/transactionlog/writethrough/nosnapshot/ReplicationTransactionLogWriteThroughNoSnapshotMultiJvmSpec.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ package akka.cluster.replication.transactionlog.writethrough.nosnapshot diff --git a/akka-cluster/src/multi-jvm/scala/akka/cluster/replication/transactionlog/writethrough/snapshot/ReplicationTransactionLogWriteThroughSnapshotMultiJvmNode1.conf b/akka-cluster/src/multi-jvm/scala/akka/cluster/replication/transactionlog/writethrough/snapshot/ReplicationTransactionLogWriteThroughSnapshotMultiJvmNode1.conf index 1d332847b6..58c66d3e42 100644 --- a/akka-cluster/src/multi-jvm/scala/akka/cluster/replication/transactionlog/writethrough/snapshot/ReplicationTransactionLogWriteThroughSnapshotMultiJvmNode1.conf +++ b/akka-cluster/src/multi-jvm/scala/akka/cluster/replication/transactionlog/writethrough/snapshot/ReplicationTransactionLogWriteThroughSnapshotMultiJvmNode1.conf @@ -1,5 +1,5 @@ akka.enabled-modules = ["cluster"] -akka.event-handler-level = "DEBUG" +akka.event-handler-level = "WARNING" akka.actor.deployment.hello-world.router = "direct" akka.actor.deployment.hello-world.clustered.replicas = 1 akka.actor.deployment.hello-world.clustered.replication.storage = "transaction-log" diff --git a/akka-cluster/src/multi-jvm/scala/akka/cluster/replication/transactionlog/writethrough/snapshot/ReplicationTransactionLogWriteThroughSnapshotMultiJvmNode2.conf b/akka-cluster/src/multi-jvm/scala/akka/cluster/replication/transactionlog/writethrough/snapshot/ReplicationTransactionLogWriteThroughSnapshotMultiJvmNode2.conf index 1d332847b6..58c66d3e42 100644 --- a/akka-cluster/src/multi-jvm/scala/akka/cluster/replication/transactionlog/writethrough/snapshot/ReplicationTransactionLogWriteThroughSnapshotMultiJvmNode2.conf +++ b/akka-cluster/src/multi-jvm/scala/akka/cluster/replication/transactionlog/writethrough/snapshot/ReplicationTransactionLogWriteThroughSnapshotMultiJvmNode2.conf @@ -1,5 +1,5 @@ akka.enabled-modules = ["cluster"] -akka.event-handler-level = "DEBUG" +akka.event-handler-level = "WARNING" akka.actor.deployment.hello-world.router = "direct" akka.actor.deployment.hello-world.clustered.replicas = 1 akka.actor.deployment.hello-world.clustered.replication.storage = "transaction-log" diff --git a/akka-cluster/src/multi-jvm/scala/akka/cluster/replication/transactionlog/writethrough/snapshot/ReplicationTransactionLogWriteThroughSnapshotMultiJvmSpec.scala b/akka-cluster/src/multi-jvm/scala/akka/cluster/replication/transactionlog/writethrough/snapshot/ReplicationTransactionLogWriteThroughSnapshotMultiJvmSpec.scala index a7fbc7b4f1..b1136a5490 100644 --- a/akka-cluster/src/multi-jvm/scala/akka/cluster/replication/transactionlog/writethrough/snapshot/ReplicationTransactionLogWriteThroughSnapshotMultiJvmSpec.scala +++ b/akka-cluster/src/multi-jvm/scala/akka/cluster/replication/transactionlog/writethrough/snapshot/ReplicationTransactionLogWriteThroughSnapshotMultiJvmSpec.scala @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ package akka.cluster.replication.transactionlog.writethrough.snapshot diff --git a/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/direct/bad_address/BadAddressDirectRoutingMultiJvmNode1.conf b/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/direct/bad_address/BadAddressDirectRoutingMultiJvmNode1.conf index 7b2ecc1583..1345a2287c 100644 --- a/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/direct/bad_address/BadAddressDirectRoutingMultiJvmNode1.conf +++ b/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/direct/bad_address/BadAddressDirectRoutingMultiJvmNode1.conf @@ -1,4 +1,5 @@ -akka.event-handler-level = "DEBUG" +akka.enabled-modules = ["cluster"] +akka.event-handler-level = "WARNING" akka.actor.deployment.service-hello.router = "round-robin" akka.actor.deployment.service-hello.clustered.home = "node:node1" akka.actor.deployment.service-hello.clustered.replicas = 1 \ No newline at end of file diff --git a/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/direct/bad_address/BadAddressDirectRoutingMultiJvmSpec.scala b/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/direct/bad_address/BadAddressDirectRoutingMultiJvmSpec.scala index 6df40132c6..7c6911e70d 100644 --- a/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/direct/bad_address/BadAddressDirectRoutingMultiJvmSpec.scala +++ b/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/direct/bad_address/BadAddressDirectRoutingMultiJvmSpec.scala @@ -23,7 +23,7 @@ object BadAddressDirectRoutingMultiJvmSpec { } -/* + class BadAddressDirectRoutingMultiJvmNode1 extends MasterClusterTestNode { import BadAddressDirectRoutingMultiJvmSpec._ @@ -39,5 +39,5 @@ class BadAddressDirectRoutingMultiJvmNode1 extends MasterClusterTestNode { Cluster.node.shutdown() } } -}*/ +} diff --git a/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/direct/multiple_replicas/MultiReplicaDirectRoutingMultiJvmNode1.conf b/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/direct/multiple_replicas/MultiReplicaDirectRoutingMultiJvmNode1.conf index 150095d5bf..40fcfa5d51 100644 --- a/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/direct/multiple_replicas/MultiReplicaDirectRoutingMultiJvmNode1.conf +++ b/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/direct/multiple_replicas/MultiReplicaDirectRoutingMultiJvmNode1.conf @@ -1,2 +1,3 @@ -akka.event-handler-level = "DEBUG" +akka.enabled-modules = ["cluster"] +akka.event-handler-level = "WARNING" akka.actor.deployment.service-hello.router = "direct" \ No newline at end of file diff --git a/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/direct/multiple_replicas/MultiReplicaDirectRoutingMultiJvmNode2.conf b/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/direct/multiple_replicas/MultiReplicaDirectRoutingMultiJvmNode2.conf index 0bac6e8004..b60f6a3b5c 100644 --- a/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/direct/multiple_replicas/MultiReplicaDirectRoutingMultiJvmNode2.conf +++ b/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/direct/multiple_replicas/MultiReplicaDirectRoutingMultiJvmNode2.conf @@ -1,2 +1,3 @@ -akka.event-handler-level = "DEBUG" +akka.enabled-modules = ["cluster"] +akka.event-handler-level = "WARNING" akka.actor.deployment.service-hello.router = "direct" diff --git a/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/direct/multiple_replicas/MultiReplicaDirectRoutingMultiJvmSpec.scala b/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/direct/multiple_replicas/MultiReplicaDirectRoutingMultiJvmSpec.scala index dd9207ac17..ca1f87503b 100644 --- a/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/direct/multiple_replicas/MultiReplicaDirectRoutingMultiJvmSpec.scala +++ b/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/direct/multiple_replicas/MultiReplicaDirectRoutingMultiJvmSpec.scala @@ -22,7 +22,6 @@ object MultiReplicaDirectRoutingMultiJvmSpec { } -/* class MultiReplicaDirectRoutingMultiJvmNode2 extends ClusterTestNode { import MultiReplicaDirectRoutingMultiJvmSpec._ @@ -64,5 +63,4 @@ class MultiReplicaDirectRoutingMultiJvmNode1 extends MasterClusterTestNode { } } } -*/ diff --git a/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/direct/single_replica/SingleReplicaDirectRoutingMultiJvmNode1.conf b/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/direct/single_replica/SingleReplicaDirectRoutingMultiJvmNode1.conf index 81b5034354..2a3d9ba765 100644 --- a/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/direct/single_replica/SingleReplicaDirectRoutingMultiJvmNode1.conf +++ b/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/direct/single_replica/SingleReplicaDirectRoutingMultiJvmNode1.conf @@ -1,3 +1,4 @@ -akka.event-handler-level = "DEBUG" +akka.enabled-modules = ["cluster"] +akka.event-handler-level = "WARNING" akka.actor.deployment.service-hello.router = "direct" akka.actor.deployment.service-hello.clustered.replicas = 1 \ No newline at end of file diff --git a/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/direct/single_replica/SingleReplicaDirectRoutingMultiJvmNode2.conf b/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/direct/single_replica/SingleReplicaDirectRoutingMultiJvmNode2.conf index 150095d5bf..40fcfa5d51 100644 --- a/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/direct/single_replica/SingleReplicaDirectRoutingMultiJvmNode2.conf +++ b/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/direct/single_replica/SingleReplicaDirectRoutingMultiJvmNode2.conf @@ -1,2 +1,3 @@ -akka.event-handler-level = "DEBUG" +akka.enabled-modules = ["cluster"] +akka.event-handler-level = "WARNING" akka.actor.deployment.service-hello.router = "direct" \ No newline at end of file diff --git a/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/direct/single_replica/SingleReplicaDirectRoutingMultiJvmSpec.scala b/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/direct/single_replica/SingleReplicaDirectRoutingMultiJvmSpec.scala index 707f6e6c26..97b94d305c 100644 --- a/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/direct/single_replica/SingleReplicaDirectRoutingMultiJvmSpec.scala +++ b/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/direct/single_replica/SingleReplicaDirectRoutingMultiJvmSpec.scala @@ -22,7 +22,7 @@ object SingleReplicaDirectRoutingMultiJvmSpec { } -/* + class SingleReplicaDirectRoutingMultiJvmNode1 extends MasterClusterTestNode { import SingleReplicaDirectRoutingMultiJvmSpec._ @@ -56,5 +56,5 @@ class SingleReplicaDirectRoutingMultiJvmNode2 extends ClusterTestNode { Cluster.node.shutdown() } } -}*/ +} diff --git a/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/homenode/HomeNodeMultiJvmNode1.conf b/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/homenode/HomeNodeMultiJvmNode1.conf index 0a5f18c2b9..d7e17c84d8 100644 --- a/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/homenode/HomeNodeMultiJvmNode1.conf +++ b/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/homenode/HomeNodeMultiJvmNode1.conf @@ -1,5 +1,5 @@ akka.enabled-modules = ["cluster"] -akka.event-handler-level = "DEBUG" +akka.event-handler-level = "WARNING" akka.actor.deployment.service-hello.router = "round-robin" akka.actor.deployment.service-hello.clustered.preferred-nodes = ["node:node1"] akka.actor.deployment.service-hello.clustered.replicas = 1 \ No newline at end of file diff --git a/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/homenode/HomeNodeMultiJvmNode2.conf b/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/homenode/HomeNodeMultiJvmNode2.conf index 0a5f18c2b9..d7e17c84d8 100644 --- a/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/homenode/HomeNodeMultiJvmNode2.conf +++ b/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/homenode/HomeNodeMultiJvmNode2.conf @@ -1,5 +1,5 @@ akka.enabled-modules = ["cluster"] -akka.event-handler-level = "DEBUG" +akka.event-handler-level = "WARNING" akka.actor.deployment.service-hello.router = "round-robin" akka.actor.deployment.service-hello.clustered.preferred-nodes = ["node:node1"] akka.actor.deployment.service-hello.clustered.replicas = 1 \ No newline at end of file diff --git a/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/roundrobin_1_replica/RoundRobin1ReplicaMultiJvmNode1.conf b/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/roundrobin_1_replica/RoundRobin1ReplicaMultiJvmNode1.conf index 221ccd25ae..dcbd276918 100644 --- a/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/roundrobin_1_replica/RoundRobin1ReplicaMultiJvmNode1.conf +++ b/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/roundrobin_1_replica/RoundRobin1ReplicaMultiJvmNode1.conf @@ -1,4 +1,4 @@ akka.enabled-modules = ["cluster"] -akka.event-handler-level = "DEBUG" +akka.event-handler-level = "WARNING" akka.actor.deployment.service-hello.router = "round-robin" akka.actor.deployment.service-hello.clustered.replicas = 1 \ No newline at end of file diff --git a/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/roundrobin_1_replica/RoundRobin1ReplicaMultiJvmSpec.scala b/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/roundrobin_1_replica/RoundRobin1ReplicaMultiJvmSpec.scala index 6f756ffef6..9a9c1dfbb2 100644 --- a/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/roundrobin_1_replica/RoundRobin1ReplicaMultiJvmSpec.scala +++ b/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/roundrobin_1_replica/RoundRobin1ReplicaMultiJvmSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ package akka.cluster.routing.roundrobin_1_replica diff --git a/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/roundrobin_2_replicas/RoundRobin2ReplicasMultiJvmNode1.conf b/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/roundrobin_2_replicas/RoundRobin2ReplicasMultiJvmNode1.conf index 401a5bd8e4..09f4cfc93a 100644 --- a/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/roundrobin_2_replicas/RoundRobin2ReplicasMultiJvmNode1.conf +++ b/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/roundrobin_2_replicas/RoundRobin2ReplicasMultiJvmNode1.conf @@ -1,5 +1,5 @@ akka.enabled-modules = ["cluster"] -akka.event-handler-level = "DEBUG" +akka.event-handler-level = "WARNING" akka.actor.deployment.service-hello.router = "round-robin" akka.actor.deployment.service-hello.clustered.preferred-nodes = ["node:node1","node:node2"] akka.actor.deployment.service-hello.clustered.replicas = 2 diff --git a/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/roundrobin_2_replicas/RoundRobin2ReplicasMultiJvmNode2.conf b/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/roundrobin_2_replicas/RoundRobin2ReplicasMultiJvmNode2.conf index 401a5bd8e4..09f4cfc93a 100644 --- a/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/roundrobin_2_replicas/RoundRobin2ReplicasMultiJvmNode2.conf +++ b/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/roundrobin_2_replicas/RoundRobin2ReplicasMultiJvmNode2.conf @@ -1,5 +1,5 @@ akka.enabled-modules = ["cluster"] -akka.event-handler-level = "DEBUG" +akka.event-handler-level = "WARNING" akka.actor.deployment.service-hello.router = "round-robin" akka.actor.deployment.service-hello.clustered.preferred-nodes = ["node:node1","node:node2"] akka.actor.deployment.service-hello.clustered.replicas = 2 diff --git a/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/roundrobin_2_replicas/RoundRobin2ReplicasMultiJvmSpec.scala b/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/roundrobin_2_replicas/RoundRobin2ReplicasMultiJvmSpec.scala index c1c76e61a9..b16addfe3d 100644 --- a/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/roundrobin_2_replicas/RoundRobin2ReplicasMultiJvmSpec.scala +++ b/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/roundrobin_2_replicas/RoundRobin2ReplicasMultiJvmSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ package akka.cluster.routing.roundrobin_2_replicas @@ -13,6 +13,9 @@ import Cluster._ import akka.actor._ import akka.actor.Actor._ import akka.config.Config +import java.util.concurrent.atomic.AtomicInteger +import java.util.concurrent.ConcurrentHashMap +import akka.util.Duration /** * When a MultiJvmNode is started, will it automatically be part of the cluster (so will it automatically be eligible @@ -32,7 +35,7 @@ object RoundRobin2ReplicasMultiJvmSpec { /** * What is the purpose of this node? Is this just a node for the cluster to make use of? */ -/* + class RoundRobin2ReplicasMultiJvmNode1 extends WordSpec with MustMatchers with BeforeAndAfterAll { import RoundRobin2ReplicasMultiJvmSpec._ @@ -70,9 +73,9 @@ class RoundRobin2ReplicasMultiJvmNode1 extends WordSpec with MustMatchers with B override def afterAll() { shutdownLocalCluster() } -}*/ +} + -/* class RoundRobin2ReplicasMultiJvmNode2 extends WordSpec with MustMatchers { import RoundRobin2ReplicasMultiJvmSpec._ @@ -106,12 +109,14 @@ class RoundRobin2ReplicasMultiJvmNode2 extends WordSpec with MustMatchers { //todo: is there a reason to check for null again since it already has been done in the previous block. hello must not equal (null) - val replies = collection.mutable.Map.empty[String, Int] + val replies = new ConcurrentHashMap[String,AtomicInteger]() def count(reply: String) = { - if (replies.get(reply).isEmpty) replies.put(reply, 1) - else replies.put(reply, replies(reply) + 1) + val counter = new AtomicInteger(0) + Option(replies.putIfAbsent(reply, counter)).getOrElse(counter).incrementAndGet() } + implicit val timeout = Timeout(Duration(20, "seconds")) + count((hello ? "Hello").as[String].getOrElse(fail("Should have recieved reply from node1"))) count((hello ? "Hello").as[String].getOrElse(fail("Should have recieved reply from node2"))) count((hello ? "Hello").as[String].getOrElse(fail("Should have recieved reply from node1"))) @@ -121,12 +126,11 @@ class RoundRobin2ReplicasMultiJvmNode2 extends WordSpec with MustMatchers { count((hello ? "Hello").as[String].getOrElse(fail("Should have recieved reply from node1"))) count((hello ? "Hello").as[String].getOrElse(fail("Should have recieved reply from node2"))) - replies("World from node [node1]") must equal(4) - replies("World from node [node2]") must equal(4) + replies.get("World from node [node1]").get must equal(4) + replies.get("World from node [node2]").get must equal(4) } node.shutdown() } } -} -*/ \ No newline at end of file +} \ No newline at end of file diff --git a/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/roundrobin_3_replicas/RoundRobin3ReplicasMultiJvmNode1.conf b/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/roundrobin_3_replicas/RoundRobin3ReplicasMultiJvmNode1.conf index 851d7a98e8..75249c7713 100644 --- a/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/roundrobin_3_replicas/RoundRobin3ReplicasMultiJvmNode1.conf +++ b/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/roundrobin_3_replicas/RoundRobin3ReplicasMultiJvmNode1.conf @@ -1,4 +1,4 @@ akka.enabled-modules = ["cluster"] -akka.event-handler-level = "DEBUG" +akka.event-handler-level = "WARNING" akka.actor.deployment.service-hello.router = "round-robin" akka.actor.deployment.service-hello.clustered.replicas = 3 diff --git a/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/roundrobin_3_replicas/RoundRobin3ReplicasMultiJvmNode2.conf b/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/roundrobin_3_replicas/RoundRobin3ReplicasMultiJvmNode2.conf index 851d7a98e8..75249c7713 100644 --- a/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/roundrobin_3_replicas/RoundRobin3ReplicasMultiJvmNode2.conf +++ b/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/roundrobin_3_replicas/RoundRobin3ReplicasMultiJvmNode2.conf @@ -1,4 +1,4 @@ akka.enabled-modules = ["cluster"] -akka.event-handler-level = "DEBUG" +akka.event-handler-level = "WARNING" akka.actor.deployment.service-hello.router = "round-robin" akka.actor.deployment.service-hello.clustered.replicas = 3 diff --git a/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/roundrobin_3_replicas/RoundRobin3ReplicasMultiJvmNode3.conf b/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/roundrobin_3_replicas/RoundRobin3ReplicasMultiJvmNode3.conf index 851d7a98e8..75249c7713 100644 --- a/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/roundrobin_3_replicas/RoundRobin3ReplicasMultiJvmNode3.conf +++ b/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/roundrobin_3_replicas/RoundRobin3ReplicasMultiJvmNode3.conf @@ -1,4 +1,4 @@ akka.enabled-modules = ["cluster"] -akka.event-handler-level = "DEBUG" +akka.event-handler-level = "WARNING" akka.actor.deployment.service-hello.router = "round-robin" akka.actor.deployment.service-hello.clustered.replicas = 3 diff --git a/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/roundrobin_3_replicas/RoundRobin3ReplicasMultiJvmSpec.scala b/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/roundrobin_3_replicas/RoundRobin3ReplicasMultiJvmSpec.scala index eee003409d..d3d5a48cbf 100644 --- a/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/roundrobin_3_replicas/RoundRobin3ReplicasMultiJvmSpec.scala +++ b/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/roundrobin_3_replicas/RoundRobin3ReplicasMultiJvmSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ package akka.cluster.routing.roundrobin_3_replicas diff --git a/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/roundrobin_failover/RoundRobinFailoverMultiJvmNode1.conf b/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/roundrobin_failover/RoundRobinFailoverMultiJvmNode1.conf index 0a5f18c2b9..d7e17c84d8 100644 --- a/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/roundrobin_failover/RoundRobinFailoverMultiJvmNode1.conf +++ b/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/roundrobin_failover/RoundRobinFailoverMultiJvmNode1.conf @@ -1,5 +1,5 @@ akka.enabled-modules = ["cluster"] -akka.event-handler-level = "DEBUG" +akka.event-handler-level = "WARNING" akka.actor.deployment.service-hello.router = "round-robin" akka.actor.deployment.service-hello.clustered.preferred-nodes = ["node:node1"] akka.actor.deployment.service-hello.clustered.replicas = 1 \ No newline at end of file diff --git a/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/roundrobin_failover/RoundRobinFailoverMultiJvmNode2.conf b/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/roundrobin_failover/RoundRobinFailoverMultiJvmNode2.conf index 0a5f18c2b9..d7e17c84d8 100644 --- a/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/roundrobin_failover/RoundRobinFailoverMultiJvmNode2.conf +++ b/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/roundrobin_failover/RoundRobinFailoverMultiJvmNode2.conf @@ -1,5 +1,5 @@ akka.enabled-modules = ["cluster"] -akka.event-handler-level = "DEBUG" +akka.event-handler-level = "WARNING" akka.actor.deployment.service-hello.router = "round-robin" akka.actor.deployment.service-hello.clustered.preferred-nodes = ["node:node1"] akka.actor.deployment.service-hello.clustered.replicas = 1 \ No newline at end of file diff --git a/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/roundrobin_failover/RoundRobinFailoverMultiJvmNode3.conf b/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/roundrobin_failover/RoundRobinFailoverMultiJvmNode3.conf index 0a5f18c2b9..d7e17c84d8 100644 --- a/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/roundrobin_failover/RoundRobinFailoverMultiJvmNode3.conf +++ b/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/roundrobin_failover/RoundRobinFailoverMultiJvmNode3.conf @@ -1,5 +1,5 @@ akka.enabled-modules = ["cluster"] -akka.event-handler-level = "DEBUG" +akka.event-handler-level = "WARNING" akka.actor.deployment.service-hello.router = "round-robin" akka.actor.deployment.service-hello.clustered.preferred-nodes = ["node:node1"] akka.actor.deployment.service-hello.clustered.replicas = 1 \ No newline at end of file diff --git a/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/roundrobin_failover/RoundRobinFailoverMultiJvmNode4.conf b/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/roundrobin_failover/RoundRobinFailoverMultiJvmNode4.conf index 0a5f18c2b9..d7e17c84d8 100644 --- a/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/roundrobin_failover/RoundRobinFailoverMultiJvmNode4.conf +++ b/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/roundrobin_failover/RoundRobinFailoverMultiJvmNode4.conf @@ -1,5 +1,5 @@ akka.enabled-modules = ["cluster"] -akka.event-handler-level = "DEBUG" +akka.event-handler-level = "WARNING" akka.actor.deployment.service-hello.router = "round-robin" akka.actor.deployment.service-hello.clustered.preferred-nodes = ["node:node1"] akka.actor.deployment.service-hello.clustered.replicas = 1 \ No newline at end of file diff --git a/akka-cluster/src/multi-jvm/scala/akka/cluster/sample/PingPongMultiJvmExample.scala b/akka-cluster/src/multi-jvm/scala/akka/cluster/sample/PingPongMultiJvmExample.scala index f97724bf67..7a44247ceb 100644 --- a/akka-cluster/src/multi-jvm/scala/akka/cluster/sample/PingPongMultiJvmExample.scala +++ b/akka-cluster/src/multi-jvm/scala/akka/cluster/sample/PingPongMultiJvmExample.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ package akka.cluster.sample diff --git a/akka-cluster/src/test/protocol/ProtobufProtocol.proto b/akka-cluster/src/test/protocol/ProtobufProtocol.proto index 5e41f75978..404e288e09 100644 --- a/akka-cluster/src/test/protocol/ProtobufProtocol.proto +++ b/akka-cluster/src/test/protocol/ProtobufProtocol.proto @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ package akka.actor; diff --git a/akka-cluster/src/test/scala/akka/cluster/TransactionLogSpec.scala b/akka-cluster/src/test/scala/akka/cluster/TransactionLogSpec.scala index c267bc6f98..259c5179e1 100644 --- a/akka-cluster/src/test/scala/akka/cluster/TransactionLogSpec.scala +++ b/akka-cluster/src/test/scala/akka/cluster/TransactionLogSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ package akka.cluster diff --git a/akka-cluster/src/test/scala/akka/cluster/sample/ClusteredPingPongSample.scala b/akka-cluster/src/test/scala/akka/cluster/sample/ClusteredPingPongSample.scala index d44212899b..31eef26fee 100644 --- a/akka-cluster/src/test/scala/akka/cluster/sample/ClusteredPingPongSample.scala +++ b/akka-cluster/src/test/scala/akka/cluster/sample/ClusteredPingPongSample.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ package akka.cluster.sample diff --git a/akka-cluster/src/test/scala/akka/cluster/sample/ComputeGridSample.scala b/akka-cluster/src/test/scala/akka/cluster/sample/ComputeGridSample.scala index f95adb3c88..4cf7a7010f 100644 --- a/akka-cluster/src/test/scala/akka/cluster/sample/ComputeGridSample.scala +++ b/akka-cluster/src/test/scala/akka/cluster/sample/ComputeGridSample.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ package akka.cluster.sample diff --git a/akka-cluster/src/test/scala/akka/serialization/ActorSerializeSpec.scala b/akka-cluster/src/test/scala/akka/serialization/ActorSerializeSpec.scala index 0a041d9b40..1430ba4c9e 100644 --- a/akka-cluster/src/test/scala/akka/serialization/ActorSerializeSpec.scala +++ b/akka-cluster/src/test/scala/akka/serialization/ActorSerializeSpec.scala @@ -14,6 +14,7 @@ import Actor._ import SerializeSpec._ case class MyMessage(id: Long, name: String, status: Boolean) + @RunWith(classOf[JUnitRunner]) class ActorSerializeSpec extends Spec with ShouldMatchers with BeforeAndAfterAll { @@ -21,13 +22,13 @@ class ActorSerializeSpec extends Spec with ShouldMatchers with BeforeAndAfterAll it("should be able to serialize and de-serialize a stateful actor with a given serializer") { val actor1 = localActorOf[MyJavaSerializableActor].start() - (actor1 !! "hello").getOrElse("_") should equal("world 1") - (actor1 !! "hello").getOrElse("_") should equal("world 2") + (actor1 ? "hello").get should equal("world 1") + (actor1 ? "hello").get should equal("world 2") val bytes = toBinary(actor1) val actor2 = fromBinary(bytes) actor2.start() - (actor2 !! "hello").getOrElse("_") should equal("world 3") + (actor2 ? "hello").get should equal("world 3") actor2.receiveTimeout should equal(Some(1000)) actor1.stop() @@ -51,12 +52,12 @@ class ActorSerializeSpec extends Spec with ShouldMatchers with BeforeAndAfterAll val actor2 = fromBinary(toBinary(actor1)) Thread.sleep(1000) actor2.getDispatcher.mailboxSize(actor1) should be > (0) - (actor2 !! "hello-reply").getOrElse("_") should equal("world") + (actor2 ? "hello-reply").get should equal("world") val actor3 = fromBinary(toBinary(actor1, false)) Thread.sleep(1000) actor3.getDispatcher.mailboxSize(actor1) should equal(0) - (actor3 !! "hello-reply").getOrElse("_") should equal("world") + (actor3 ? "hello-reply").get should equal("world") } it("should be able to serialize and deserialize a PersonActorWithMessagesInMailbox") { @@ -77,12 +78,12 @@ class ActorSerializeSpec extends Spec with ShouldMatchers with BeforeAndAfterAll val actor2 = fromBinary(toBinary(actor1)) Thread.sleep(1000) actor2.getDispatcher.mailboxSize(actor1) should be > (0) - (actor2 !! "hello-reply").getOrElse("_") should equal("hello") + (actor2 ? "hello-reply").get should equal("hello") val actor3 = fromBinary(toBinary(actor1, false)) Thread.sleep(1000) actor3.getDispatcher.mailboxSize(actor1) should equal(0) - (actor3 !! "hello-reply").getOrElse("_") should equal("hello") + (actor3 ? "hello-reply").get should equal("hello") } } @@ -123,12 +124,12 @@ class ActorSerializeSpec extends Spec with ShouldMatchers with BeforeAndAfterAll val actor2 = fromBinary(toBinary(actor1)) Thread.sleep(1000) actor2.getDispatcher.mailboxSize(actor1) should be > (0) - (actor2 !! "hello-reply").getOrElse("_") should equal("world") + (actor2 ? "hello-reply").get should equal("world") val actor3 = fromBinary(toBinary(actor1, false)) Thread.sleep(1000) actor3.getDispatcher.mailboxSize(actor1) should equal(0) - (actor3 !! "hello-reply").getOrElse("_") should equal("world") + (actor3 ? "hello-reply").get should equal("world") } } } diff --git a/akka-docs/cluster/durable-mailbox.rst b/akka-docs/cluster/durable-mailbox.rst index b24e1fea30..1c9b577997 100644 --- a/akka-docs/cluster/durable-mailbox.rst +++ b/akka-docs/cluster/durable-mailbox.rst @@ -29,7 +29,7 @@ The durable mailboxes currently supported are: - ``RedisDurableMailboxStorage`` -- backed by Redis - ``ZooKeeperDurableMailboxStorage`` -- backed by ZooKeeper - ``BeanstalkDurableMailboxStorage`` -- backed by Beanstalkd - + - ``MongoNaiveDurableMailboxStorage`` -- backed by MongoDB We'll walk through each one of these in detail in the sections below. Soon Akka will also have: @@ -226,3 +226,57 @@ server. This is done in the ``akka.actor.mailbox.beanstalk`` section in the } } } + +MongoDB-based Durable Mailboxes +=============================== + +This mailbox is backed by `MongoDB `_. +MongoDB is a fast, lightweight and scalable document-oriented database. It contains a number of +features cohesive to a fast, reliable & durable queueing mechanism which the Akka Mailbox takes advantage of. + + +Akka's implementations of MongoDB mailboxes are built on top of the purely asynchronous MongoDB driver (often known as `Hammersmith `_ and ``com.mongodb.async``) and as such are purely callback based with a Netty network layer. This makes them extremely fast & lightweight versus building on other MongoDB implementations such as `mongo-java-driver `_ and `Casbah `_. This is done in +the ``akka.actor.mailbox.mongodb`` section in the ``akka.conf`` configuration +file. + +.. code-block:: none + + mongodb { + # Any specified collection name will be used as a prefix for collections that use durable mongo mailboxes + uri = "mongodb://localhost/akka.mailbox" # Follow Mongo URI Spec - http://www.mongodb.org/display/DOCS/Connections + # Configurable timeouts for certain ops + timeout { + read = 3000 # number of milliseconds to wait for a read to succeed before timing out the future + write = 3000 # number of milliseconds to wait for a write to succeed before timing out the future + } + } + +You must specify a hostname (and optionally port) and at *least* a Database name. If you specify a collection name, it will be used as a 'prefix' for the collections Akka creates to store mailbox messages. Otherwise, collections will be prefixed with ``mailbox.`` + +It is also possible to configure the timeout threshholds for Read and Write operations in the ``timeout`` block. +Currently Akka offers only one "type" of MongoDB based Mailbox but there are plans to support at least +one other kind which uses a different queueing strategy. + + +'Naive' MongoDB-based Durable Mailbox +------------------------------------- +The currently supported mailbox is considered "Naive" as it removes messages (using the ``findAndRemove`` +command) from the MongoDB datastore as soon as the actor consumes them. This could cause message loss +if an actor crashes before completely processing a message. It is not a problem per sé, but behavior +users should be aware of. + +Here is an example of how you can configure your dispatcher to use this mailbox:: + + val dispatcher = DurableDispatcher( + "my:service", + MongoNaiveDurableMailboxStorage) + +or for a thread-based durable dispatcher:: + + self.dispatcher = DurablePinnedDispatcher( + self, + MongoNaiveDurableMailboxStorage) + + diff --git a/akka-docs/dev/multi-jvm-testing.rst b/akka-docs/dev/multi-jvm-testing.rst index 91ee612658..691306f850 100644 --- a/akka-docs/dev/multi-jvm-testing.rst +++ b/akka-docs/dev/multi-jvm-testing.rst @@ -1,31 +1,94 @@ -Multi-JVM Testing -================= -Included in the example is an sbt trait for multi-JVM testing which will fork -JVMs for multi-node testing. There is support for running applications (objects -with main methods) and running ScalaTest tests. +.. _multi-jvm-testing: -Using the multi-JVM testing is straight-forward. First, mix the ``MultiJvmTests`` -trait into your sbt project:: +################### + Multi-JVM Testing +################### - class SomeProject(info: ProjectInfo) extends DefaultProject(info) with MultiJvmTests +Support for running applications (objects with main methods) and +ScalaTest tests in multiple JVMs. + +.. contents:: :local: + + +Setup +===== + +The multi-JVM testing is an sbt plugin that you can find here: + +http://github.com/typesafehub/sbt-multi-jvm + +You can add it as a plugin by adding the following to your plugins/build.sbt:: + + resolvers += "Typesafe Repo" at "http://repo.typesafe.com/typesafe/releases/" + + libraryDependencies += "com.typesafe" %% "sbt-multi-jvm" % "0.1" + +You can then add multi-JVM testing to a project by including the ``MultiJvm`` +settings and config. For example, here is how the akka-cluster project adds +multi-JVM testing:: + + import MultiJvmPlugin.{ MultiJvm, extraOptions } + + lazy val cluster = Project( + id = "akka-cluster", + base = file("akka-cluster"), + settings = defaultSettings ++ MultiJvmPlugin.settings ++ Seq( + extraOptions in MultiJvm <<= (sourceDirectory in MultiJvm) { src => + (name: String) => (src ** (name + ".conf")).get.headOption.map("-Dakka.config=" + _.absolutePath).toSeq + }, + test in Test <<= (test in Test) dependsOn (test in MultiJvm) + ) + ) configs (MultiJvm) You can specify JVM options for the forked JVMs:: - class SomeProject(info: ProjectInfo) extends DefaultProject(info) with MultiJvmTests { - override def multiJvmOptions = Seq("-Xmx256M") - } + jvmOptions in MultiJvm := Seq("-Xmx256M") -There are two sbt commands: ``multi-jvm-run`` for running applications and -``multi-jvm-test`` for running ScalaTest tests. -The ``MultiJvmTests`` trait resides in the ``project/build`` directory. +Running tests +============= + +The multi-jvm tasks are similar to the normal tasks: ``test``, ``test-only``, +and ``run``, but are under the ``multi-jvm`` configuration. + +So in Akka, to run all the multi-JVM tests in the akka-cluster project use (at +the sbt prompt): + +.. code-block:: none + + akka-cluster/multi-jvm:test + +Or one can change to the ``akka-cluster`` project first, and then run the +tests: + +.. code-block:: none + + project akka-cluster + multi-jvm:test + +To run individual tests use ``test-only``: + +.. code-block:: none + + multi-jvm:test-only akka.cluster.deployment.Deployment + +More than one test name can be listed to run multiple specific +tests. Tab-completion in sbt makes it easy to complete the test names. + +It's also possible to specify JVM options with ``test-only`` by including those +options after the test names and ``--``. For example: + +.. code-block:: none + + multi-jvm:test-only akka.cluster.deployment.Deployment -- -Dsome.option=something + Creating application tests -~~~~~~~~~~~~~~~~~~~~~~~~~~ +========================== -The tests are discovered through a naming convention. A test is named with the -following pattern: +The tests are discovered, and combined, through a naming convention. A test is +named with the following pattern: .. code-block:: none @@ -36,10 +99,10 @@ it groups together tests/applications under a single ``TestName`` that will run together. The part after, the ``NodeName``, is a distinguishing name for each forked JVM. -So to create a 3-node test called ``Test``, you can create three applications +So to create a 3-node test called ``Sample``, you can create three applications like the following:: - package example + package sample object SampleMultiJvmNode1 { def main(args: Array[String]) { @@ -59,40 +122,35 @@ like the following:: } } -When you call ``multi-jvm-run Test`` at the sbt prompt, three JVMs will be +When you call ``multi-jvm:run sample.Sample`` at the sbt prompt, three JVMs will be spawned, one for each node. It will look like this: .. code-block:: none - > multi-jvm-run Test + > multi-jvm:run sample.Sample ... - [info] == multi-jvm-run == - [info] == multi-jvm / Test == - [info] Starting JVM-Node1 for example.SampleMultiJvmNode1 - [info] Starting JVM-Node2 for example.SampleMultiJvmNode2 - [info] Starting JVM-Node3 for example.SampleMultiJvmNode3 + [info] Starting JVM-Node1 for sample.SampleMultiJvmNode1 + [info] Starting JVM-Node2 for sample.SampleMultiJvmNode2 + [info] Starting JVM-Node3 for sample.SampleMultiJvmNode3 [JVM-Node1] Hello from node 1 [JVM-Node2] Hello from node 2 [JVM-Node3] Hello from node 3 - [info] == multi-jvm / Test == - [info] == multi-jvm-run == - [success] Successful. + [success] Total time: ... Naming -~~~~~~ +====== You can change what the ``MultiJvm`` identifier is. For example, to change it to -``ClusterTest`` override the ``multiJvmTestName`` method:: +``ClusterTest`` use the ``multiJvmMarker`` setting:: - class SomeProject(info: ProjectInfo) extends DefaultProject(info) with MultiJvmTests { - override def multiJvmTestName = "ClusterSpec" - } + multiJvmMarker in MultiJvm := "ClusterTest" Your tests should now be named ``{TestName}ClusterTest{NodeName}``. + Configuration of the JVM instances -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +================================== Setting JVM options ------------------- @@ -117,6 +175,7 @@ and add the options to them. -Dakka.cluster.nodename=node3 -Dakka.cluster.port=9993 + Overriding akka.conf options ---------------------------- @@ -139,15 +198,16 @@ For example, to override the configuration option ``akka.cluster.name`` let's cr akka.cluster.name = "test-cluster" + ScalaTest -~~~~~~~~~ +========= There is also support for creating ScalaTest tests rather than applications. To do this use the same naming convention as above, but create ScalaTest suites rather than objects with main methods. You need to have ScalaTest on the classpath. Here is a similar example to the one above but using ScalaTest:: - package example + package sample import org.scalatest.WordSpec import org.scalatest.matchers.MustMatchers @@ -170,11 +230,12 @@ classpath. Here is a similar example to the one above but using ScalaTest:: } } -To run these tests you would call ``multi-jvm-test Spec`` at the sbt prompt. +To run just these tests you would call ``multi-jvm:test-only sample.Spec`` at +the sbt prompt. -Zookeeper Barrier -~~~~~~~~~~~~~~~~~ +ZookeeperBarrier +================ When running multi-JVM tests it's common to need to coordinate timing across nodes. To do this there is a ZooKeeper-based double-barrier (there is both an @@ -190,6 +251,8 @@ timeout is 60 seconds. Here is an example of coordinating the starting of two nodes and then running something in coordination:: + package sample + import org.scalatest.WordSpec import org.scalatest.matchers.MustMatchers import org.scalatest.BeforeAndAfterAll @@ -254,10 +317,8 @@ An example output from this would be: .. code-block:: none - > multi-jvm-run Test + > multi-jvm:test-only sample.Sample ... - [info] == multi-jvm-run == - [info] == multi-jvm / Test == [info] Starting JVM-Node1 for example.SampleMultiJvmNode1 [info] Starting JVM-Node2 for example.SampleMultiJvmNode2 [JVM-Node1] Loading config [akka.conf] from the application classpath. @@ -265,23 +326,25 @@ An example output from this would be: ... [JVM-Node2] Hello from node 2 [JVM-Node1] Hello from node 1 - [info] == multi-jvm / Test == - [info] == multi-jvm-run == - [success] Successful. + [success] + NetworkFailureTest -^^^^^^^^^^^^^^^^^^ +================== -You can use the 'NetworkFailureTest' trait to test network failure. See the 'RemoteErrorHandlingNetworkTest' test. Your tests needs to end with 'NetworkTest'. They are disabled by default. To run them you need to enable a flag. +You can use the ``NetworkFailureTest`` trait to test network failure. See the +``RemoteErrorHandlingNetworkTest`` test. Your tests needs to end with +``NetworkTest``. They are disabled by default. To run them you need to enable a +flag. -Example: +Example:: -:: + project akka-remote + set akka.test.network true + test-only akka.actor.remote.RemoteErrorHandlingNetworkTest - project akka-remote - set akka.test.network true - test-only akka.actor.remote.RemoteErrorHandlingNetworkTest +It uses ``ipfw`` for network management. Mac OSX comes with it installed but if +you are on another platform you might need to install it yourself. Here is a +port: -It uses 'ipfw' for network management. Mac OSX comes with it installed but if you are on another platform you might need to install it yourself. Here is a port: - -``_ +http://info.iet.unipi.it/~luigi/dummynet diff --git a/akka-docs/project/licenses.rst b/akka-docs/project/licenses.rst index b7104d9679..0d7417e44e 100644 --- a/akka-docs/project/licenses.rst +++ b/akka-docs/project/licenses.rst @@ -12,7 +12,7 @@ Akka License This software is licensed under the Apache 2 license, quoted below. - Copyright 2009-2011 Scalable Solutions AB + Copyright 2009-2011 Typesafe Inc. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of @@ -35,11 +35,11 @@ All committers have signed this CLA Based on: http://www.apache.org/licenses/icla.txt - Scalable Solutions AB + Typesafe Inc. Individual Contributor License Agreement ("Agreement") V2.0 http://www.scalablesolutions.se/licenses/ - Thank you for your interest in Akka, a Scalable Solutions AB (the + Thank you for your interest in Akka, a Typesafe Inc. (the "Company") Open Source project. In order to clarify the intellectual property license granted with Contributions from any person or entity, the Company must have a Contributor License Agreement ("CLA") on file diff --git a/akka-docs/scala/actors.rst b/akka-docs/scala/actors.rst index eb25cb2d1a..20ceda4285 100644 --- a/akka-docs/scala/actors.rst +++ b/akka-docs/scala/actors.rst @@ -561,6 +561,34 @@ The actor has a well-defined non-circular life-cycle. => STARTED (when 'start' is invoked) - can receive messages => SHUT DOWN (when 'exit' or 'stop' is invoked) - can't do anything +Actors and exceptions +--------------------- +It can happen that while a message is being processed by an actor, that some kind of exception is thrown, e.g. a +database exception. + +What happens to the Message +^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +If an exception is thrown while a message is being processed (so taken of his mailbox and handed over the the receive), +then this message will be lost. It is important to understand that it is not put back on the mailbox. So if you want to +retry processing of a message, you need to deal with it yourself by catching the exception and retry your flow. Make +sure that you put a bound on the number of retries since you don't want a system to livelock (so consuming a lot of +cpu cycles without making progress). + +What happens to the mailbox +^^^^^^^^^^^^^^^^^^^^^^^^^^^ +If an exception is thrown while a message is being processed, nothing happens to the mailbox. If the actor is restarted, +the same mailbox will be there. So all messages on that mailbox, will be there as well. + +What happens to the actor +^^^^^^^^^^^^^^^^^^^^^^^^^ +If an exception is thrown and the actor is supervised, the actor object itself is discarded and a new instance is +created. This new instance will now be used in the actor references to this actor (so this is done invisible +to the developer). +If the actor is _not_ supervised, but its lifeCycle is set to Permanent (default), it will just keep on processing messages as if nothing had happened. +If the actor is _not_ supervised, but its lifeCycle is set to Temporary, it will be stopped immediately. + + Extending Actors using PartialFunction chaining ----------------------------------------------- diff --git a/akka-durable-mailboxes/akka-beanstalk-mailbox/src/main/scala/akka/actor/mailbox/BeanstalkBasedMailbox.scala b/akka-durable-mailboxes/akka-beanstalk-mailbox/src/main/scala/akka/actor/mailbox/BeanstalkBasedMailbox.scala index 577af5135a..35ec40edee 100644 --- a/akka-durable-mailboxes/akka-beanstalk-mailbox/src/main/scala/akka/actor/mailbox/BeanstalkBasedMailbox.scala +++ b/akka-durable-mailboxes/akka-beanstalk-mailbox/src/main/scala/akka/actor/mailbox/BeanstalkBasedMailbox.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ package akka.actor.mailbox diff --git a/akka-durable-mailboxes/akka-file-mailbox/src/main/scala/akka/actor/mailbox/FiledBasedMailbox.scala b/akka-durable-mailboxes/akka-file-mailbox/src/main/scala/akka/actor/mailbox/FiledBasedMailbox.scala index 27ec3dc6b6..2964eb19b9 100644 --- a/akka-durable-mailboxes/akka-file-mailbox/src/main/scala/akka/actor/mailbox/FiledBasedMailbox.scala +++ b/akka-durable-mailboxes/akka-file-mailbox/src/main/scala/akka/actor/mailbox/FiledBasedMailbox.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ package akka.actor.mailbox diff --git a/akka-durable-mailboxes/akka-mailboxes-common/src/main/protocol/MailboxProtocol.proto b/akka-durable-mailboxes/akka-mailboxes-common/src/main/protocol/MailboxProtocol.proto index 47052886ab..914d40de67 100644 --- a/akka-durable-mailboxes/akka-mailboxes-common/src/main/protocol/MailboxProtocol.proto +++ b/akka-durable-mailboxes/akka-mailboxes-common/src/main/protocol/MailboxProtocol.proto @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ option java_package = "akka.actor.mailbox"; diff --git a/akka-durable-mailboxes/akka-mailboxes-common/src/main/scala/akka/actor/mailbox/DurableDispatcher.scala b/akka-durable-mailboxes/akka-mailboxes-common/src/main/scala/akka/actor/mailbox/DurableDispatcher.scala index 42332ab205..e1aab05c71 100644 --- a/akka-durable-mailboxes/akka-mailboxes-common/src/main/scala/akka/actor/mailbox/DurableDispatcher.scala +++ b/akka-durable-mailboxes/akka-mailboxes-common/src/main/scala/akka/actor/mailbox/DurableDispatcher.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ package akka.actor.mailbox @@ -42,10 +42,11 @@ sealed abstract class DurableMailboxStorage(mailboxFQN: String) { } } -case object RedisDurableMailboxStorage extends DurableMailboxStorage("akka.actor.mailbox.RedisBasedMailbox") -case object BeanstalkDurableMailboxStorage extends DurableMailboxStorage("akka.actor.mailbox.BeanstalkBasedMailbox") -case object FileDurableMailboxStorage extends DurableMailboxStorage("akka.actor.mailbox.FileBasedMailbox") -case object ZooKeeperDurableMailboxStorage extends DurableMailboxStorage("akka.actor.mailbox.ZooKeeperBasedMailbox") +case object RedisDurableMailboxStorage extends DurableMailboxStorage("akka.actor.mailbox.RedisBasedMailbox") +case object MongoNaiveDurableMailboxStorage extends DurableMailboxStorage("akka.actor.mailbox.MongoBasedNaiveMailbox") +case object BeanstalkDurableMailboxStorage extends DurableMailboxStorage("akka.actor.mailbox.BeanstalkBasedMailbox") +case object FileDurableMailboxStorage extends DurableMailboxStorage("akka.actor.mailbox.FileBasedMailbox") +case object ZooKeeperDurableMailboxStorage extends DurableMailboxStorage("akka.actor.mailbox.ZooKeeperBasedMailbox") /** * The durable equivalent of Dispatcher @@ -135,7 +136,7 @@ case class DurablePinnedDispatcher( /** * Configurator for the DurableDispatcher - * Do not forget to specify the "storage", valid values are "redis", "beanstalkd", "zookeeper" and "file" + * Do not forget to specify the "storage", valid values are "redis", "beanstalkd", "zookeeper", "mongodb" and "file" * * @author Jonas Bonér */ @@ -153,6 +154,7 @@ class DurableDispatcherConfigurator extends MessageDispatcherConfigurator { def getStorage(config: Configuration): DurableMailboxStorage = { val storage = config.getString("storage") map { case "redis" => RedisDurableMailboxStorage + case "mongodb" => MongoNaiveDurableMailboxStorage case "beanstalk" => BeanstalkDurableMailboxStorage case "zookeeper" => ZooKeeperDurableMailboxStorage case "file" => FileDurableMailboxStorage diff --git a/akka-durable-mailboxes/akka-mailboxes-common/src/main/scala/akka/actor/mailbox/DurableMailbox.scala b/akka-durable-mailboxes/akka-mailboxes-common/src/main/scala/akka/actor/mailbox/DurableMailbox.scala index 5a928ce148..844023e445 100644 --- a/akka-durable-mailboxes/akka-mailboxes-common/src/main/scala/akka/actor/mailbox/DurableMailbox.scala +++ b/akka-durable-mailboxes/akka-mailboxes-common/src/main/scala/akka/actor/mailbox/DurableMailbox.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ package akka.actor.mailbox diff --git a/akka-durable-mailboxes/akka-mongo-mailbox/src/main/scala/akka/actor/mailbox/BSONSerialization.scala b/akka-durable-mailboxes/akka-mongo-mailbox/src/main/scala/akka/actor/mailbox/BSONSerialization.scala new file mode 100644 index 0000000000..9f277ec1e5 --- /dev/null +++ b/akka-durable-mailboxes/akka-mongo-mailbox/src/main/scala/akka/actor/mailbox/BSONSerialization.scala @@ -0,0 +1,110 @@ +/** + * Copyright (C) 2009-2011 Typesafe Inc. + */ +package akka.actor.mailbox + +import akka.actor.{Actor, ActorRef, NullChannel} +import akka.config.Config.config +import akka.dispatch._ +import akka.event.EventHandler +import akka.AkkaException +import akka.remote.MessageSerializer +import akka.remote.protocol.RemoteProtocol.MessageProtocol + +import MailboxProtocol._ + +import com.mongodb.async._ + +import org.bson.util._ +import org.bson.io.{BasicOutputBuffer, OutputBuffer} +import org.bson.types.ObjectId +import java.io.{ByteArrayInputStream, InputStream} + +import org.bson._ +import org.bson.collection._ + +object BSONSerializableMailbox extends SerializableBSONObject[MongoDurableMessage] with Logging { + + protected[akka] def serializeDurableMsg(msg: MongoDurableMessage)(implicit serializer: BSONSerializer) = { + EventHandler.debug(this, "Serializing a durable message to MongoDB: %s".format(msg)) + val msgData = MessageSerializer.serialize(msg.message.asInstanceOf[AnyRef]) + EventHandler.debug(this, "Serialized Message: %s".format(msgData)) + + // TODO - Skip the whole map creation step for performance, fun, and profit! (Needs Salat) + val b = Map.newBuilder[String, Any] + b += "_id" -> msg._id + b += "ownerAddress" -> msg.ownerAddress + + msg.channel match { + case a : ActorRef => { b += "senderAddress" -> a.address } + case _ => + } + /** + * TODO - Figure out a way for custom serialization of the message instance + * TODO - Test if a serializer is registered for the message and if not, use toByteString + */ + b += "message" -> new org.bson.types.Binary(0, msgData.toByteArray) + val doc = b.result + EventHandler.debug(this, "Serialized Document: %s".format(doc)) + serializer.putObject(doc) + } + + /* + * TODO - Implement some object pooling for the Encoders/decoders + */ + def encode(msg: MongoDurableMessage, out: OutputBuffer) = { + implicit val serializer = new DefaultBSONSerializer + serializer.set(out) + serializeDurableMsg(msg) + serializer.done + } + + def encode(msg: MongoDurableMessage): Array[Byte] = { + implicit val serializer = new DefaultBSONSerializer + val buf = new BasicOutputBuffer + serializer.set(buf) + serializeDurableMsg(msg) + val bytes = buf.toByteArray + serializer.done + bytes + } + + def decode(in: InputStream): MongoDurableMessage = { + val deserializer = new DefaultBSONDeserializer + // TODO - Skip the whole doc step for performance, fun, and profit! (Needs Salat / custom Deser) + val doc = deserializer.decodeAndFetch(in).asInstanceOf[BSONDocument] + EventHandler.debug(this, "Deserializing a durable message from MongoDB: %s".format(doc)) + val msgData = MessageProtocol.parseFrom(doc.as[org.bson.types.Binary]("message").getData) + val msg = MessageSerializer.deserialize(msgData) + val ownerAddress = doc.as[String]("ownerAddress") + val owner = Actor.registry.actorFor(ownerAddress).getOrElse( + throw new DurableMailboxException("No actor could be found for address [" + ownerAddress + "], could not deserialize message.")) + + val senderOption = if (doc.contains("senderAddress")) { + Actor.registry.actorFor(doc.as[String]("senderAddress")) + } else None + + val sender = senderOption match { + case Some(ref) => ref + case None => NullChannel + } + + MongoDurableMessage(ownerAddress, owner, msg, sender) + } + + def checkObject(msg: MongoDurableMessage, isQuery: Boolean = false) = {} // object expected to be OK with this message type. + + def checkKeys(msg: MongoDurableMessage) {} // keys expected to be OK with this message type. + + /** + * Checks for an ID and generates one. + * Not all implementers will need this, but it gets invoked nonetheless + * as a signal to BSONDocument, etc implementations to verify an id is there + * and generate one if needed. + */ + def checkID(msg: MongoDurableMessage) = msg // OID already generated in wrapper message + + def _id(msg: MongoDurableMessage): Option[AnyRef] = Some(msg._id) +} + +// vim: set ts=2 sw=2 sts=2 et: diff --git a/akka-durable-mailboxes/akka-mongo-mailbox/src/main/scala/akka/actor/mailbox/MongoBasedMailbox.scala b/akka-durable-mailboxes/akka-mongo-mailbox/src/main/scala/akka/actor/mailbox/MongoBasedMailbox.scala new file mode 100644 index 0000000000..15083d9a72 --- /dev/null +++ b/akka-durable-mailboxes/akka-mongo-mailbox/src/main/scala/akka/actor/mailbox/MongoBasedMailbox.scala @@ -0,0 +1,135 @@ +/** + * Copyright (C) 2009-2011 Typesafe Inc. + */ +package akka.actor.mailbox + +import akka.actor.ActorRef +import akka.config.Config.config +import akka.dispatch._ +import akka.event.EventHandler +import akka.AkkaException + +import MailboxProtocol._ + +import com.mongodb.async._ +import com.mongodb.async.futures.RequestFutures +import org.bson.collection._ + +class MongoBasedMailboxException(message: String) extends AkkaException(message) + +/** + * A "naive" durable mailbox which uses findAndRemove; it's possible if the actor crashes + * after consuming a message that the message could be lost. + * + * Does not use the Protobuf protocol, instead using a pure Mongo based serialization for sanity + * (and mongo-iness). + * + * TODO - Integrate Salat or a Salat-Based solution for the case classiness + * + * @author Brendan W. McAdams + */ +class MongoBasedNaiveMailbox(val owner: ActorRef) extends DurableExecutableMailbox(owner) { + // this implicit object provides the context for reading/writing things as MongoDurableMessage + implicit val mailboxBSONSer = BSONSerializableMailbox + implicit val safeWrite = WriteConcern.Safe // TODO - Replica Safe when appropriate! + + val URI_CONFIG_KEY = "akka.actor.mailbox.mongodb.uri" + val WRITE_TIMEOUT_KEY = "akka.actor.mailbox.mongodb.timeout.write" + val READ_TIMEOUT_KEY = "akka.actor.mailbox.mongodb.timeout.read" + val mongoURI = config.getString(URI_CONFIG_KEY) + val writeTimeout = config.getInt(WRITE_TIMEOUT_KEY, 3000) + val readTimeout = config.getInt(READ_TIMEOUT_KEY, 3000) + + + @volatile private var mongo = connect() + + def enqueue(msg: MessageInvocation) = { + EventHandler.debug(this, + "\nENQUEUING message in mongodb-based mailbox [%s]".format(msg)) + /* TODO - Test if a BSON serializer is registered for the message and only if not, use toByteString? */ + val durableMessage = MongoDurableMessage(ownerAddress, msg.receiver, msg.message, msg.channel) + // todo - do we need to filter the actor name at all for safe collection naming? + val result = new DefaultPromise[Boolean](writeTimeout) + mongo.insert(durableMessage, false)(RequestFutures.write { wr: Either[Throwable, (Option[AnyRef], WriteResult)] => wr match { + case Right((oid, wr)) => result.completeWithResult(true) + case Left(t) => result.completeWithException(t) + }}) + + result.as[Boolean].orNull + } + + def dequeue: MessageInvocation = withErrorHandling { + /** + * Retrieves first item in natural order (oldest first, assuming no modification/move) + * Waits 3 seconds for now for a message, else pops back out. + * TODO - How do we handle fetch, but sleep if nothing is in there cleanly? + * TODO - Should we have a specific query in place? Which way do we sort? + * TODO - Error handling version! + */ + val msgInvocation = new DefaultPromise[MessageInvocation](readTimeout) + mongo.findAndRemove(Document.empty) { doc: Option[MongoDurableMessage] => doc match { + case Some(msg) => { + EventHandler.debug(this, + "\nDEQUEUING message in mongo-based mailbox [%s]".format(msg)) + msgInvocation.completeWithResult(msg.messageInvocation()) + EventHandler.debug(this, + "\nDEQUEUING messageInvocation in mongo-based mailbox [%s]".format(msgInvocation)) + } + case None => { + EventHandler.info(this, + "\nNo matching document found. Not an error, just an empty queue.") + msgInvocation.completeWithResult(null) + } + () + }} + msgInvocation.as[MessageInvocation].orNull + } + + def size: Int = { + val count = new DefaultPromise[Int](readTimeout) + mongo.count()(count.completeWithResult) + count.as[Int].getOrElse(-1) + } + + + def isEmpty: Boolean = size == 0 //TODO review find other solution, this will be very expensive + + private[akka] def connect() = { + require(mongoURI.isDefined, "Mongo URI (%s) must be explicitly defined in akka.conf; will not assume defaults for safety sake.".format(URI_CONFIG_KEY)) + EventHandler.info(this, + "\nCONNECTING mongodb { uri : [%s] } ".format(mongoURI)) + val _dbh = MongoConnection.fromURI(mongoURI.get) match { + case (conn, None, None) => { + throw new UnsupportedOperationException("You must specify a database name to use with MongoDB; please see the MongoDB Connection URI Spec: 'http://www.mongodb.org/display/DOCS/Connections'") + } + case (conn, Some(db), Some(coll)) => { + EventHandler.warning(this, + "\nCollection name (%s) specified in MongoURI Config will be used as a prefix for mailbox names".format(coll.name)) + db("%s.%s".format(coll.name, name)) + } + case (conn, Some(db), None) => { + db("mailbox.%s".format(name)) + } + case default => throw new IllegalArgumentException("Illegal or unexpected response from Mongo Connection URI Parser: %s".format(default)) + } + EventHandler.debug(this, + "\nCONNECTED to mongodb { dbh: '%s | %s'} ".format(_dbh, _dbh.name)) + _dbh + } + + private def withErrorHandling[T](body: => T): T = { + try { + body + } catch { + case e: Exception => { + mongo = connect() + body + } + case e => { + val error = new MongoBasedMailboxException("Could not connect to MongoDB server") + EventHandler.error(error, this, "Could not connect to MongoDB server") + throw error + } + } + } +} diff --git a/akka-durable-mailboxes/akka-mongo-mailbox/src/main/scala/akka/actor/mailbox/MongoDurableMessage.scala b/akka-durable-mailboxes/akka-mongo-mailbox/src/main/scala/akka/actor/mailbox/MongoDurableMessage.scala new file mode 100644 index 0000000000..01a666b16f --- /dev/null +++ b/akka-durable-mailboxes/akka-mongo-mailbox/src/main/scala/akka/actor/mailbox/MongoDurableMessage.scala @@ -0,0 +1,44 @@ +/** + * Copyright (C) 2009-2011 Typesafe Inc. + */ +package akka.actor.mailbox + +import akka.actor.{ActorRef, UntypedChannel, NullChannel} +import akka.config.Config.config +import akka.dispatch._ +import akka.event.EventHandler +import akka.AkkaException + +import MailboxProtocol._ + +import com.mongodb.async._ + +import org.bson.util._ +import org.bson.io.OutputBuffer +import org.bson.types.ObjectId +import java.io.InputStream + +import org.bson.collection._ + +/** + * A container message for durable mailbox messages, which can be easily stuffed into + * and out of MongoDB. + * + * Does not use the Protobuf protocol, instead using a pure Mongo based serialization for sanity + * (and mongo-iness). + * + * This should eventually branch out into a more flat, compound solution for all remote actor stuff + * TODO - Integrate Salat or a Salat-Based solution for the case classiness + * + * @author Brendan W. McAdams + */ +case class MongoDurableMessage(val ownerAddress: String, + val receiver: ActorRef, + val message: Any, + val channel: UntypedChannel, + val _id: ObjectId = new ObjectId) { + + def messageInvocation() = MessageInvocation(this.receiver, this.message, this.channel) +} + +// vim: set ts=2 sw=2 sts=2 et: diff --git a/akka-durable-mailboxes/akka-mongo-mailbox/src/test/scala/akka/actor/mailbox/MongoBasedMailboxSpec.scala b/akka-durable-mailboxes/akka-mongo-mailbox/src/test/scala/akka/actor/mailbox/MongoBasedMailboxSpec.scala new file mode 100644 index 0000000000..bb34252035 --- /dev/null +++ b/akka-durable-mailboxes/akka-mongo-mailbox/src/test/scala/akka/actor/mailbox/MongoBasedMailboxSpec.scala @@ -0,0 +1,74 @@ +package akka.actor.mailbox + +import java.util.concurrent.TimeUnit + +import org.scalatest.WordSpec +import org.scalatest.matchers.MustMatchers +import org.scalatest.{ BeforeAndAfterEach, BeforeAndAfterAll } + +import akka.actor._ +import akka.actor.Actor._ +import java.util.concurrent.CountDownLatch +import akka.config.Supervision.Temporary +import akka.dispatch.MessageDispatcher + +class MongoBasedMailboxSpec extends DurableMailboxSpec("mongodb", MongoNaiveDurableMailboxStorage) { + import org.apache.log4j.{Logger, Level} + import com.mongodb.async._ + + val mongo = MongoConnection("localhost", 27017)("akka") + + mongo.dropDatabase(){ success => } + + Logger.getRootLogger.setLevel(Level.DEBUG) +} + +/*object DurableMongoMailboxSpecActorFactory { + + class MongoMailboxTestActor extends Actor { + self.lifeCycle = Temporary + def receive = { + case "sum" => self.reply("sum") + } + } + + def createMongoMailboxTestActor(id: String)(implicit dispatcher: MessageDispatcher): ActorRef = { + val queueActor = localActorOf[MongoMailboxTestActor] + queueActor.dispatcher = dispatcher + queueActor.start + } +}*/ + +/*class MongoBasedMailboxSpec extends WordSpec with MustMatchers with BeforeAndAfterEach with BeforeAndAfterAll { + import DurableMongoMailboxSpecActorFactory._ + + implicit val dispatcher = DurableDispatcher("mongodb", MongoNaiveDurableMailboxStorage, 1) + + "A MongoDB based naive mailbox backed actor" should { + "should handle reply to ! for 1 message" in { + val latch = new CountDownLatch(1) + val queueActor = createMongoMailboxTestActor("mongoDB Backend should handle Reply to !") + val sender = localActorOf(new Actor { def receive = { case "sum" => latch.countDown } }).start + + queueActor.!("sum")(Some(sender)) + latch.await(10, TimeUnit.SECONDS) must be (true) + } + + "should handle reply to ! for multiple messages" in { + val latch = new CountDownLatch(5) + val queueActor = createMongoMailboxTestActor("mongoDB Backend should handle reply to !") + val sender = localActorOf( new Actor { def receive = { case "sum" => latch.countDown } } ).start + + queueActor.!("sum")(Some(sender)) + queueActor.!("sum")(Some(sender)) + queueActor.!("sum")(Some(sender)) + queueActor.!("sum")(Some(sender)) + queueActor.!("sum")(Some(sender)) + latch.await(10, TimeUnit.SECONDS) must be (true) + } + } + + override def beforeEach() { + registry.local.shutdownAll + } +}*/ diff --git a/akka-durable-mailboxes/akka-redis-mailbox/src/main/scala/akka/actor/mailbox/RedisBasedMailbox.scala b/akka-durable-mailboxes/akka-redis-mailbox/src/main/scala/akka/actor/mailbox/RedisBasedMailbox.scala index 0ec5f72df8..4a4ade1e38 100644 --- a/akka-durable-mailboxes/akka-redis-mailbox/src/main/scala/akka/actor/mailbox/RedisBasedMailbox.scala +++ b/akka-durable-mailboxes/akka-redis-mailbox/src/main/scala/akka/actor/mailbox/RedisBasedMailbox.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ package akka.actor.mailbox diff --git a/akka-durable-mailboxes/akka-zookeeper-mailbox/src/main/scala/akka/actor/mailbox/ZooKeeperBasedMailbox.scala b/akka-durable-mailboxes/akka-zookeeper-mailbox/src/main/scala/akka/actor/mailbox/ZooKeeperBasedMailbox.scala index 3886cf1cf4..8d577a8aca 100644 --- a/akka-durable-mailboxes/akka-zookeeper-mailbox/src/main/scala/akka/actor/mailbox/ZooKeeperBasedMailbox.scala +++ b/akka-durable-mailboxes/akka-zookeeper-mailbox/src/main/scala/akka/actor/mailbox/ZooKeeperBasedMailbox.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ package akka.actor.mailbox diff --git a/akka-http/src/main/scala/akka/http/Mist.scala b/akka-http/src/main/scala/akka/http/Mist.scala index d5139795ba..532475b995 100644 --- a/akka-http/src/main/scala/akka/http/Mist.scala +++ b/akka-http/src/main/scala/akka/http/Mist.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ package akka.http diff --git a/akka-http/src/main/scala/akka/http/Servlet30Context.scala b/akka-http/src/main/scala/akka/http/Servlet30Context.scala index cbafffaea3..809e5229bd 100644 --- a/akka-http/src/main/scala/akka/http/Servlet30Context.scala +++ b/akka-http/src/main/scala/akka/http/Servlet30Context.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ package akka.http diff --git a/akka-http/src/test/scala/config/ConfigSpec.scala b/akka-http/src/test/scala/config/ConfigSpec.scala index fe4ad0c2f9..67a26a42f0 100644 --- a/akka-http/src/test/scala/config/ConfigSpec.scala +++ b/akka-http/src/test/scala/config/ConfigSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ package akka.config diff --git a/akka-kernel/src/main/scala/akka/kernel/DefaultAkkaLoader.scala b/akka-kernel/src/main/scala/akka/kernel/DefaultAkkaLoader.scala index 33b8c855a8..8218c67efc 100644 --- a/akka-kernel/src/main/scala/akka/kernel/DefaultAkkaLoader.scala +++ b/akka-kernel/src/main/scala/akka/kernel/DefaultAkkaLoader.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ package akka.http diff --git a/akka-kernel/src/main/scala/akka/kernel/EmbeddedAppServer.scala b/akka-kernel/src/main/scala/akka/kernel/EmbeddedAppServer.scala index 471f45213e..b3a26f3936 100644 --- a/akka-kernel/src/main/scala/akka/kernel/EmbeddedAppServer.scala +++ b/akka-kernel/src/main/scala/akka/kernel/EmbeddedAppServer.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ package akka.http diff --git a/akka-kernel/src/main/scala/akka/kernel/Kernel.scala b/akka-kernel/src/main/scala/akka/kernel/Kernel.scala index 1561a488aa..4e3e5a6d74 100644 --- a/akka-kernel/src/main/scala/akka/kernel/Kernel.scala +++ b/akka-kernel/src/main/scala/akka/kernel/Kernel.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2010 Scalable Solutions AB + * Copyright (C) 2009-2010 Typesafe Inc. */ package akka.kernel diff --git a/akka-kernel/src/main/scala/akka/servlet/Initializer.scala b/akka-kernel/src/main/scala/akka/servlet/Initializer.scala index 7b683c3d76..41ca1ab932 100644 --- a/akka-kernel/src/main/scala/akka/servlet/Initializer.scala +++ b/akka-kernel/src/main/scala/akka/servlet/Initializer.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ package akka.servlet diff --git a/akka-samples/akka-sample-ants/src/main/scala/Ants.scala b/akka-samples/akka-sample-ants/src/main/scala/Ants.scala index ebcf1d0a79..24349047ec 100644 --- a/akka-samples/akka-sample-ants/src/main/scala/Ants.scala +++ b/akka-samples/akka-sample-ants/src/main/scala/Ants.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ package sample.ants diff --git a/akka-samples/akka-sample-chat/src/main/scala/ChatServer.scala b/akka-samples/akka-sample-chat/src/main/scala/ChatServer.scala index 0c76929ff9..c278efecc5 100644 --- a/akka-samples/akka-sample-chat/src/main/scala/ChatServer.scala +++ b/akka-samples/akka-sample-chat/src/main/scala/ChatServer.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2010 Scalable Solutions AB . + * Copyright (C) 2009-2010 Typesafe Inc. . */ package sample.chat diff --git a/akka-samples/akka-sample-hello/src/main/scala/sample/hello/Boot.scala b/akka-samples/akka-sample-hello/src/main/scala/sample/hello/Boot.scala index 91f413e1fc..c1769bff04 100644 --- a/akka-samples/akka-sample-hello/src/main/scala/sample/hello/Boot.scala +++ b/akka-samples/akka-sample-hello/src/main/scala/sample/hello/Boot.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ package sample.hello diff --git a/akka-samples/akka-sample-hello/src/main/scala/sample/hello/HelloEndpoint.scala b/akka-samples/akka-sample-hello/src/main/scala/sample/hello/HelloEndpoint.scala index fec6f55470..c08aee2618 100644 --- a/akka-samples/akka-sample-hello/src/main/scala/sample/hello/HelloEndpoint.scala +++ b/akka-samples/akka-sample-hello/src/main/scala/sample/hello/HelloEndpoint.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ package sample.hello diff --git a/akka-samples/akka-sample-osgi/src/main/scala/OsgiExample.scala b/akka-samples/akka-sample-osgi/src/main/scala/OsgiExample.scala index 5ee3f016a9..0e9fda067a 100644 --- a/akka-samples/akka-sample-osgi/src/main/scala/OsgiExample.scala +++ b/akka-samples/akka-sample-osgi/src/main/scala/OsgiExample.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ package sample.osgi diff --git a/akka-samples/akka-sample-remote/src/main/scala/ServerManagedRemoteActorSample.scala b/akka-samples/akka-sample-remote/src/main/scala/ServerManagedRemoteActorSample.scala index 4776c19004..f3969112ef 100644 --- a/akka-samples/akka-sample-remote/src/main/scala/ServerManagedRemoteActorSample.scala +++ b/akka-samples/akka-sample-remote/src/main/scala/ServerManagedRemoteActorSample.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ package sample.remote diff --git a/akka-sbt-plugin/build.sbt b/akka-sbt-plugin/build.sbt new file mode 100644 index 0000000000..e01a2e9809 --- /dev/null +++ b/akka-sbt-plugin/build.sbt @@ -0,0 +1,14 @@ + +sbtPlugin := true + +organization := "se.scalablesolutions.akka" + +name := "akka-sbt-plugin" + +version := "2.0-SNAPSHOT" + +publishMavenStyle := true + +publishTo := Some("Typesafe Publish Repo" at "http://repo.typesafe.com/typesafe/maven-releases/") + +credentials += Credentials(Path.userHome / ".ivy2" / "typesafe-credentials") diff --git a/akka-sbt-plugin/src/main/scala/AkkaKernelPlugin.scala b/akka-sbt-plugin/src/main/scala/AkkaKernelPlugin.scala new file mode 100644 index 0000000000..65f24ae9d8 --- /dev/null +++ b/akka-sbt-plugin/src/main/scala/AkkaKernelPlugin.scala @@ -0,0 +1,151 @@ +/** + * Copyright (C) 2011 Typesafe + */ + +import sbt._ +import sbt.Keys._ +import sbt.classpath.ClasspathUtilities +import sbt.Project.Initialize +import java.io.File + +object AkkaMicrokernelPlugin extends Plugin { + + val Dist = config("dist") extend (Runtime) + val dist = TaskKey[File]("dist", "Builds an Akka microkernel directory") + // TODO how to reuse keyword "clean" here instead (dist:clean) + val distClean = TaskKey[File]("clean-dist", "Removes Akka microkernel directory") + val outputDirectory = SettingKey[File]("output-directory") + val configSourceDirs = TaskKey[Seq[File]]("config-source-directories", + "Configuration files are copied from these directories") + + val distJvmOptions = SettingKey[String]("jvm-options", "JVM parameters to use in start script") + val distMainClass = SettingKey[String]("kernel-main-class", "Kernel main class to use in start script") + + val libFilter = SettingKey[File ⇒ Boolean]("lib-filter", "Filter of dependency jar files") + val additionalLibs = TaskKey[Seq[File]]("additional-libs", "Additional dependency jar files") + + override lazy val settings = + inConfig(Dist)(Seq( + dist <<= packageBin.identity, + packageBin <<= distTask, + distClean <<= distCleanTask, + dependencyClasspath <<= (dependencyClasspath in Runtime).identity, + unmanagedResourceDirectories <<= (unmanagedResourceDirectories in Runtime).identity, + outputDirectory <<= target / "dist", + configSourceDirs <<= defaultConfigSourceDirs, + distJvmOptions := "-Xms1024M -Xmx1024M -Xss1M -XX:MaxPermSize=256M -XX:+UseParallelGC", + distMainClass := "akka.kernel.Main", + libFilter := { f ⇒ true }, + additionalLibs <<= defaultAdditionalLibs)) ++ + Seq( + dist <<= (dist in Dist).identity) + + private def distTask: Initialize[Task[File]] = + (outputDirectory, sourceDirectory, crossTarget, dependencyClasspath, + configSourceDirs, distJvmOptions, distMainClass, libFilter, streams) map { + (outDir, src, tgt, cp, configSrc, jvmOptions, mainClass, libFilt, s) ⇒ + val log = s.log + val distBinPath = outDir / "bin" + val distConfigPath = outDir / "config" + val distDeployPath = outDir / "deploy" + val distLibPath = outDir / "lib" + // TODO how do I grab the additionalLibs setting? Can't add it in input tuple, limitation of number of elements in map of tuple. + val addLibs = Seq.empty[File] + + log.info("Creating distribution %s ..." format outDir) + IO.createDirectory(outDir) + Scripts(jvmOptions, mainClass).writeScripts(distBinPath) + copyDirectories(configSrc, distConfigPath) + copyJars(tgt, distDeployPath) + copyFiles(libFiles(cp, libFilt), distLibPath) + copyFiles(addLibs, distLibPath) + log.info("Distribution created.") + outDir + } + + private def distCleanTask: Initialize[Task[File]] = + (outputDirectory, streams) map { (outDir, s) ⇒ + val log = s.log + log.info("Cleaning " + outDir) + IO.delete(outDir) + outDir + } + + def defaultConfigSourceDirs = (sourceDirectory, unmanagedResourceDirectories) map { (src, resources) ⇒ + Seq(src / "main" / "config") ++ resources + } + + def defaultAdditionalLibs = (libraryDependencies) map { (libs) ⇒ + Seq.empty[File] + } + + private case class Scripts(jvmOptions: String, mainClass: String) { + + def writeScripts(to: File) = { + scripts.map { script ⇒ + val target = new File(to, script.name) + IO.write(target, script.contents) + setExecutable(target, script.executable) + }.foldLeft(None: Option[String])(_ orElse _) + } + + private case class DistScript(name: String, contents: String, executable: Boolean) + + private def scripts = Set(DistScript("start", distShScript, true), + DistScript("start.bat", distBatScript, true)) + + private def distShScript = + """|#!/bin/sh + | + |AKKA_HOME="$(cd "$(cd "$(dirname "$0")"; pwd -P)"/..; pwd)" + |AKKA_CLASSPATH="$AKKA_HOME/lib/*:$AKKA_HOME/config" + |JAVA_OPTS="%s" + | + |java $JAVA_OPTS -cp "$AKKA_CLASSPATH" -Dakka.home="$AKKA_HOME" %s + |""".stripMargin.format(jvmOptions, mainClass) + + private def distBatScript = + """|@echo off + |set AKKA_HOME=%%~dp0.. + |set AKKA_CLASSPATH=%%AKKA_HOME%%\lib\*;%%AKKA_HOME%%\config + |set JAVA_OPTS=%s + | + |java %%JAVA_OPTS%% -cp "%%AKKA_CLASSPATH%%" -Dakka.home="%%AKKA_HOME%%" %s + |""".stripMargin.format(jvmOptions, mainClass) + + private def setExecutable(target: File, executable: Boolean): Option[String] = { + val success = target.setExecutable(executable, false) + if (success) None else Some("Couldn't set permissions of " + target) + } + } + + private def copyDirectories(fromDirs: Seq[File], to: File) = { + IO.createDirectory(to) + for (from ← fromDirs) { + IO.copyDirectory(from, to) + } + } + + private def copyJars(fromDir: File, toDir: File) = { + val jarFiles = fromDir.listFiles.filter(f ⇒ + f.isFile && + f.name.endsWith(".jar") && + !f.name.contains("-sources") && + !f.name.contains("-docs")) + + copyFiles(jarFiles, toDir) + } + + private def copyFiles(files: Seq[File], toDir: File) = { + for (f ← files) { + IO.copyFile(f, new File(toDir, f.getName)) + } + } + + private def libFiles(classpath: Classpath, libFilter: File ⇒ Boolean): Seq[File] = { + val (libs, directories) = classpath.map(_.data).partition(ClasspathUtilities.isArchive) + libs.map(_.asFile).filter(libFilter) + } + +} + diff --git a/akka-sbt-plugin/src/main/scala/AkkaKernelProject.scala b/akka-sbt-plugin/src/main/scala/AkkaKernelProject.scala deleted file mode 100644 index 29f44f8404..0000000000 --- a/akka-sbt-plugin/src/main/scala/AkkaKernelProject.scala +++ /dev/null @@ -1,116 +0,0 @@ -/** - * Copyright (C) 2009-2011 Scalable Solutions AB - */ - -import sbt._ - -trait AkkaKernelProject extends AkkaProject with AkkaMicrokernelProject { - // automatic akka kernel dependency - val akkaKernel = akkaModule("kernel") -} - -trait AkkaMicrokernelProject extends AkkaConfigProject { - def distOutputPath = outputPath / "dist" - - def distBinName = "bin" - def distConfigName = "config" - def distDeployName = "deploy" - def distLibName = "lib" - - def distBinPath = distOutputPath / distBinName - def distConfigPath = distOutputPath / distConfigName - def distDeployPath = distOutputPath / distDeployName - def distLibPath = distOutputPath / distLibName - - def distJvmOptions = "-Xms1024M -Xmx1024M -Xss1M -XX:MaxPermSize=256M -XX:+UseParallelGC" - def distMainClass = "akka.kernel.Main" - - def distProjectDependencies = topologicalSort.dropRight(1) - - def distProjectDependenciesConfig = { - distProjectDependencies.flatMap( p => p match { - case acp: AkkaConfigProject => Some(acp.configSources) - case _ => None - }).foldLeft(Path.emptyPathFinder)(_ +++ _) - } - - def distConfigSources = configSources +++ distProjectDependenciesConfig - - def distDeployJars = jarPath - - def distRuntimeJars = { - runClasspath - .filter(ClasspathUtilities.isArchive) - .filter(jar => !jar.name.contains("-sources")) - .filter(jar => !jar.name.contains("-docs")) - } - - def distProjectDependencyJars = jarsOfProjectDependencies - - def distLibs = distRuntimeJars +++ distProjectDependencyJars +++ buildLibraryJar - - lazy val dist = (distAction dependsOn (`package`, distClean) - describedAs "Create an Akka microkernel distribution.") - - def distAction = task { - log.info("Creating distribution %s ..." format distOutputPath) - writeScripts(distScripts, distBinPath) orElse - copyFiles(distConfigSources, distConfigPath) orElse - copyFiles(distDeployJars, distDeployPath) orElse - copyFiles(distLibs, distLibPath) orElse { - log.info("Distribution created.") - None - } - } - - def copyFiles(from: PathFinder, to: Path) = { - FileUtilities.copyFlat(from.get, to, log).left.toOption - } - - lazy val distClean = distCleanAction describedAs "Clean the dist target dir." - - def distCleanAction = task { FileUtilities.clean(distOutputPath, log) } - - case class DistScript(name: String, contents: String, executable: Boolean) - - def distScripts = Set(DistScript("start", distShScript, true), - DistScript("start.bat", distBatScript, true)) - - def distShScript = """|#!/bin/sh - | - |AKKA_HOME="$(cd "$(cd "$(dirname "$0")"; pwd -P)"/..; pwd)" - |AKKA_CLASSPATH="$AKKA_HOME/lib/*:$AKKA_HOME/config" - |JAVA_OPTS="%s" - | - |java $JAVA_OPTS -cp "$AKKA_CLASSPATH" -Dakka.home="$AKKA_HOME" %s - |""".stripMargin.format(distJvmOptions, distMainClass) - - def distBatScript = """|@echo off - |set AKKA_HOME=%%~dp0.. - |set AKKA_CLASSPATH=%%AKKA_HOME%%\lib\*;%%AKKA_HOME%%\config - |set JAVA_OPTS=%s - | - |java %%JAVA_OPTS%% -cp "%%AKKA_CLASSPATH%%" -Dakka.home="%%AKKA_HOME%%" %s - |""".stripMargin.format(distJvmOptions, distMainClass) - - def writeScripts(scripts: Set[DistScript], to: Path) = { - scripts.map { script => - val target = to / script.name - FileUtilities.write(target.asFile, script.contents, log) orElse - setExecutable(target, script.executable) - }.foldLeft(None: Option[String])(_ orElse _) - } - - def setExecutable(target: Path, executable: Boolean): Option[String] = { - val success = target.asFile.setExecutable(executable, false) - if (success) None else Some("Couldn't set permissions of " + target) - } -} - -trait AkkaConfigProject extends BasicScalaProject with MavenStyleScalaPaths { - def mainConfigPath = mainSourcePath / "config" - - def configSources = mainConfigPath ** "*.*" - - override def mainUnmanagedClasspath = super.mainUnmanagedClasspath +++ mainConfigPath -} diff --git a/akka-slf4j/src/main/scala/akka/event/slf4j/SLF4J.scala b/akka-slf4j/src/main/scala/akka/event/slf4j/SLF4J.scala index 4ab3d1976e..289d4aa5df 100644 --- a/akka-slf4j/src/main/scala/akka/event/slf4j/SLF4J.scala +++ b/akka-slf4j/src/main/scala/akka/event/slf4j/SLF4J.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ package akka.event.slf4j diff --git a/akka-spring/src/main/scala/akka/spring/ActorBeanDefinitionParser.scala b/akka-spring/src/main/scala/akka/spring/ActorBeanDefinitionParser.scala index de6beba97d..2617444ceb 100644 --- a/akka-spring/src/main/scala/akka/spring/ActorBeanDefinitionParser.scala +++ b/akka-spring/src/main/scala/akka/spring/ActorBeanDefinitionParser.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2010 Scalable Solutions AB + * Copyright (C) 2009-2010 Typesafe Inc. */ package akka.spring diff --git a/akka-spring/src/main/scala/akka/spring/ActorFactoryBean.scala b/akka-spring/src/main/scala/akka/spring/ActorFactoryBean.scala index ea5bb755e7..1ffb4e0c3d 100644 --- a/akka-spring/src/main/scala/akka/spring/ActorFactoryBean.scala +++ b/akka-spring/src/main/scala/akka/spring/ActorFactoryBean.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2010 Scalable Solutions AB + * Copyright (C) 2009-2010 Typesafe Inc. */ package akka.spring diff --git a/akka-spring/src/main/scala/akka/spring/ActorParser.scala b/akka-spring/src/main/scala/akka/spring/ActorParser.scala index e52871963c..32b9061ae3 100644 --- a/akka-spring/src/main/scala/akka/spring/ActorParser.scala +++ b/akka-spring/src/main/scala/akka/spring/ActorParser.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2010 Scalable Solutions AB + * Copyright (C) 2009-2010 Typesafe Inc. */ package akka.spring diff --git a/akka-spring/src/main/scala/akka/spring/ActorProperties.scala b/akka-spring/src/main/scala/akka/spring/ActorProperties.scala index 7713b52be9..bc73e5a59b 100644 --- a/akka-spring/src/main/scala/akka/spring/ActorProperties.scala +++ b/akka-spring/src/main/scala/akka/spring/ActorProperties.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2010 Scalable Solutions AB + * Copyright (C) 2009-2010 Typesafe Inc. */ package akka.spring diff --git a/akka-spring/src/main/scala/akka/spring/AkkaNamespaceHandler.scala b/akka-spring/src/main/scala/akka/spring/AkkaNamespaceHandler.scala index 38041a3ea4..2f923f31da 100644 --- a/akka-spring/src/main/scala/akka/spring/AkkaNamespaceHandler.scala +++ b/akka-spring/src/main/scala/akka/spring/AkkaNamespaceHandler.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2010 Scalable Solutions AB + * Copyright (C) 2009-2010 Typesafe Inc. */ package akka.spring diff --git a/akka-spring/src/main/scala/akka/spring/AkkaSpringConfigurationTags.scala b/akka-spring/src/main/scala/akka/spring/AkkaSpringConfigurationTags.scala index c4ee997ef2..d253eef3c4 100644 --- a/akka-spring/src/main/scala/akka/spring/AkkaSpringConfigurationTags.scala +++ b/akka-spring/src/main/scala/akka/spring/AkkaSpringConfigurationTags.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2010 Scalable Solutions AB + * Copyright (C) 2009-2010 Typesafe Inc. */ package akka.spring diff --git a/akka-spring/src/main/scala/akka/spring/CamelServiceBeanDefinitionParser.scala b/akka-spring/src/main/scala/akka/spring/CamelServiceBeanDefinitionParser.scala index b07ae068f3..4b465b1843 100644 --- a/akka-spring/src/main/scala/akka/spring/CamelServiceBeanDefinitionParser.scala +++ b/akka-spring/src/main/scala/akka/spring/CamelServiceBeanDefinitionParser.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2010 Scalable Solutions AB + * Copyright (C) 2009-2010 Typesafe Inc. */ package akka.spring diff --git a/akka-spring/src/main/scala/akka/spring/CamelServiceFactoryBean.scala b/akka-spring/src/main/scala/akka/spring/CamelServiceFactoryBean.scala index 46f22a53bb..37a47c3490 100644 --- a/akka-spring/src/main/scala/akka/spring/CamelServiceFactoryBean.scala +++ b/akka-spring/src/main/scala/akka/spring/CamelServiceFactoryBean.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2010 Scalable Solutions AB + * Copyright (C) 2009-2010 Typesafe Inc. */ package akka.spring diff --git a/akka-spring/src/main/scala/akka/spring/ConfiggyPropertyPlaceholderConfigurer.scala b/akka-spring/src/main/scala/akka/spring/ConfiggyPropertyPlaceholderConfigurer.scala index c06ab18781..c346d94263 100644 --- a/akka-spring/src/main/scala/akka/spring/ConfiggyPropertyPlaceholderConfigurer.scala +++ b/akka-spring/src/main/scala/akka/spring/ConfiggyPropertyPlaceholderConfigurer.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2010 Scalable Solutions AB + * Copyright (C) 2009-2010 Typesafe Inc. */ package akka.spring diff --git a/akka-spring/src/main/scala/akka/spring/DispatcherBeanDefinitionParser.scala b/akka-spring/src/main/scala/akka/spring/DispatcherBeanDefinitionParser.scala index c2754e3174..d62549625e 100644 --- a/akka-spring/src/main/scala/akka/spring/DispatcherBeanDefinitionParser.scala +++ b/akka-spring/src/main/scala/akka/spring/DispatcherBeanDefinitionParser.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2010 Scalable Solutions AB + * Copyright (C) 2009-2010 Typesafe Inc. */ package akka.spring diff --git a/akka-spring/src/main/scala/akka/spring/DispatcherFactoryBean.scala b/akka-spring/src/main/scala/akka/spring/DispatcherFactoryBean.scala index 703b713302..8ba77319b8 100644 --- a/akka-spring/src/main/scala/akka/spring/DispatcherFactoryBean.scala +++ b/akka-spring/src/main/scala/akka/spring/DispatcherFactoryBean.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2010 Scalable Solutions AB + * Copyright (C) 2009-2010 Typesafe Inc. */ package akka.spring diff --git a/akka-spring/src/main/scala/akka/spring/DispatcherProperties.scala b/akka-spring/src/main/scala/akka/spring/DispatcherProperties.scala index 22d18998bb..6e7a51bd7e 100644 --- a/akka-spring/src/main/scala/akka/spring/DispatcherProperties.scala +++ b/akka-spring/src/main/scala/akka/spring/DispatcherProperties.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2010 Scalable Solutions AB + * Copyright (C) 2009-2010 Typesafe Inc. */ package akka.spring diff --git a/akka-spring/src/main/scala/akka/spring/PropertyEntries.scala b/akka-spring/src/main/scala/akka/spring/PropertyEntries.scala index 771c83872c..275ba8ca1b 100644 --- a/akka-spring/src/main/scala/akka/spring/PropertyEntries.scala +++ b/akka-spring/src/main/scala/akka/spring/PropertyEntries.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2010 Scalable Solutions AB + * Copyright (C) 2009-2010 Typesafe Inc. */ package akka.spring diff --git a/akka-spring/src/main/scala/akka/spring/SupervisionBeanDefinitionParser.scala b/akka-spring/src/main/scala/akka/spring/SupervisionBeanDefinitionParser.scala index fc258d44f2..3979efab60 100644 --- a/akka-spring/src/main/scala/akka/spring/SupervisionBeanDefinitionParser.scala +++ b/akka-spring/src/main/scala/akka/spring/SupervisionBeanDefinitionParser.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2010 Scalable Solutions AB + * Copyright (C) 2009-2010 Typesafe Inc. */ package akka.spring diff --git a/akka-spring/src/main/scala/akka/spring/SupervisionFactoryBean.scala b/akka-spring/src/main/scala/akka/spring/SupervisionFactoryBean.scala index 00aa4e9157..664513f809 100644 --- a/akka-spring/src/main/scala/akka/spring/SupervisionFactoryBean.scala +++ b/akka-spring/src/main/scala/akka/spring/SupervisionFactoryBean.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2010 Scalable Solutions AB + * Copyright (C) 2009-2010 Typesafe Inc. */ package akka.spring diff --git a/akka-spring/src/test/scala/ActorFactoryBeanTest.scala b/akka-spring/src/test/scala/ActorFactoryBeanTest.scala index 4b8f24396a..9345d881e0 100644 --- a/akka-spring/src/test/scala/ActorFactoryBeanTest.scala +++ b/akka-spring/src/test/scala/ActorFactoryBeanTest.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2010 Scalable Solutions AB + * Copyright (C) 2009-2010 Typesafe Inc. */ package akka.spring diff --git a/akka-spring/src/test/scala/ConfiggyPropertyPlaceholderConfigurerSpec.scala b/akka-spring/src/test/scala/ConfiggyPropertyPlaceholderConfigurerSpec.scala index 718b39bdad..c54fca14d3 100644 --- a/akka-spring/src/test/scala/ConfiggyPropertyPlaceholderConfigurerSpec.scala +++ b/akka-spring/src/test/scala/ConfiggyPropertyPlaceholderConfigurerSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2010 Scalable Solutions AB + * Copyright (C) 2009-2010 Typesafe Inc. */ package akka.spring diff --git a/akka-spring/src/test/scala/DispatcherBeanDefinitionParserTest.scala b/akka-spring/src/test/scala/DispatcherBeanDefinitionParserTest.scala index 18eba19f10..5b85c9b3eb 100644 --- a/akka-spring/src/test/scala/DispatcherBeanDefinitionParserTest.scala +++ b/akka-spring/src/test/scala/DispatcherBeanDefinitionParserTest.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2010 Scalable Solutions AB + * Copyright (C) 2009-2010 Typesafe Inc. */ package akka.spring diff --git a/akka-spring/src/test/scala/DispatcherFactoryBeanTest.scala b/akka-spring/src/test/scala/DispatcherFactoryBeanTest.scala index 486ec8820c..6bcdbd919c 100644 --- a/akka-spring/src/test/scala/DispatcherFactoryBeanTest.scala +++ b/akka-spring/src/test/scala/DispatcherFactoryBeanTest.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2010 Scalable Solutions AB + * Copyright (C) 2009-2010 Typesafe Inc. */ package akka.spring diff --git a/akka-spring/src/test/scala/DispatcherSpringFeatureTest.scala b/akka-spring/src/test/scala/DispatcherSpringFeatureTest.scala index 06155181af..ad33910e1d 100644 --- a/akka-spring/src/test/scala/DispatcherSpringFeatureTest.scala +++ b/akka-spring/src/test/scala/DispatcherSpringFeatureTest.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2010 Scalable Solutions AB + * Copyright (C) 2009-2010 Typesafe Inc. */ package akka.spring diff --git a/akka-spring/src/test/scala/SupervisionBeanDefinitionParserTest.scala b/akka-spring/src/test/scala/SupervisionBeanDefinitionParserTest.scala index c1c2fac1f8..2dc0445005 100644 --- a/akka-spring/src/test/scala/SupervisionBeanDefinitionParserTest.scala +++ b/akka-spring/src/test/scala/SupervisionBeanDefinitionParserTest.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2010 Scalable Solutions AB + * Copyright (C) 2009-2010 Typesafe Inc. */ package akka.spring diff --git a/akka-spring/src/test/scala/SupervisionFactoryBeanTest.scala b/akka-spring/src/test/scala/SupervisionFactoryBeanTest.scala index 3f28363011..f17e2cc92c 100644 --- a/akka-spring/src/test/scala/SupervisionFactoryBeanTest.scala +++ b/akka-spring/src/test/scala/SupervisionFactoryBeanTest.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2010 Scalable Solutions AB + * Copyright (C) 2009-2010 Typesafe Inc. */ package akka.spring diff --git a/akka-spring/src/test/scala/SupervisorSpringFeatureTest.scala b/akka-spring/src/test/scala/SupervisorSpringFeatureTest.scala index ec3406b5c2..58097d2af9 100644 --- a/akka-spring/src/test/scala/SupervisorSpringFeatureTest.scala +++ b/akka-spring/src/test/scala/SupervisorSpringFeatureTest.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2010 Scalable Solutions AB + * Copyright (C) 2009-2010 Typesafe Inc. */ package akka.spring diff --git a/akka-spring/src/test/scala/TypedActorBeanDefinitionParserTest.scala b/akka-spring/src/test/scala/TypedActorBeanDefinitionParserTest.scala index 01dd15214e..02868db903 100644 --- a/akka-spring/src/test/scala/TypedActorBeanDefinitionParserTest.scala +++ b/akka-spring/src/test/scala/TypedActorBeanDefinitionParserTest.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2010 Scalable Solutions AB + * Copyright (C) 2009-2010 Typesafe Inc. */ package akka.spring diff --git a/akka-spring/src/test/scala/TypedActorSpringFeatureTest.scala b/akka-spring/src/test/scala/TypedActorSpringFeatureTest.scala index 3d5caf785e..0f1c0eaea0 100644 --- a/akka-spring/src/test/scala/TypedActorSpringFeatureTest.scala +++ b/akka-spring/src/test/scala/TypedActorSpringFeatureTest.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2010 Scalable Solutions AB + * Copyright (C) 2009-2010 Typesafe Inc. */ package akka.spring diff --git a/akka-spring/src/test/scala/UntypedActorSpringFeatureTest.scala b/akka-spring/src/test/scala/UntypedActorSpringFeatureTest.scala index 5f13c81ffb..fa5b2c3572 100644 --- a/akka-spring/src/test/scala/UntypedActorSpringFeatureTest.scala +++ b/akka-spring/src/test/scala/UntypedActorSpringFeatureTest.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2010 Scalable Solutions AB + * Copyright (C) 2009-2010 Typesafe Inc. */ package akka.spring diff --git a/akka-stm/src/main/scala/akka/agent/Agent.scala b/akka-stm/src/main/scala/akka/agent/Agent.scala index be30615b85..fb525cc376 100644 --- a/akka-stm/src/main/scala/akka/agent/Agent.scala +++ b/akka-stm/src/main/scala/akka/agent/Agent.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ package akka.agent diff --git a/akka-stm/src/main/scala/akka/stm/Atomic.scala b/akka-stm/src/main/scala/akka/stm/Atomic.scala index c6d7244272..46c0200cd7 100644 --- a/akka-stm/src/main/scala/akka/stm/Atomic.scala +++ b/akka-stm/src/main/scala/akka/stm/Atomic.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ package akka.stm diff --git a/akka-stm/src/main/scala/akka/stm/Ref.scala b/akka-stm/src/main/scala/akka/stm/Ref.scala index 285edcc4be..da166de669 100644 --- a/akka-stm/src/main/scala/akka/stm/Ref.scala +++ b/akka-stm/src/main/scala/akka/stm/Ref.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ package akka.stm diff --git a/akka-stm/src/main/scala/akka/stm/Stm.scala b/akka-stm/src/main/scala/akka/stm/Stm.scala index 071ea66da6..0cd136d762 100644 --- a/akka-stm/src/main/scala/akka/stm/Stm.scala +++ b/akka-stm/src/main/scala/akka/stm/Stm.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ package akka.stm diff --git a/akka-stm/src/main/scala/akka/stm/TransactionFactory.scala b/akka-stm/src/main/scala/akka/stm/TransactionFactory.scala index 7ba747c6da..76364bf160 100644 --- a/akka-stm/src/main/scala/akka/stm/TransactionFactory.scala +++ b/akka-stm/src/main/scala/akka/stm/TransactionFactory.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ package akka.stm diff --git a/akka-stm/src/main/scala/akka/stm/TransactionFactoryBuilder.scala b/akka-stm/src/main/scala/akka/stm/TransactionFactoryBuilder.scala index 147984b5e9..e975d284f8 100644 --- a/akka-stm/src/main/scala/akka/stm/TransactionFactoryBuilder.scala +++ b/akka-stm/src/main/scala/akka/stm/TransactionFactoryBuilder.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ package akka.stm diff --git a/akka-stm/src/main/scala/akka/stm/TransactionalMap.scala b/akka-stm/src/main/scala/akka/stm/TransactionalMap.scala index 1f833e9b06..681635807d 100644 --- a/akka-stm/src/main/scala/akka/stm/TransactionalMap.scala +++ b/akka-stm/src/main/scala/akka/stm/TransactionalMap.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ package akka.stm diff --git a/akka-stm/src/main/scala/akka/stm/TransactionalVector.scala b/akka-stm/src/main/scala/akka/stm/TransactionalVector.scala index 42fcecfc34..c31bba4fde 100644 --- a/akka-stm/src/main/scala/akka/stm/TransactionalVector.scala +++ b/akka-stm/src/main/scala/akka/stm/TransactionalVector.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ package akka.stm diff --git a/akka-stm/src/main/scala/akka/stm/package.scala b/akka-stm/src/main/scala/akka/stm/package.scala index 26f81f8f7a..49ba55e327 100644 --- a/akka-stm/src/main/scala/akka/stm/package.scala +++ b/akka-stm/src/main/scala/akka/stm/package.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ package akka diff --git a/akka-stm/src/main/scala/akka/transactor/Atomically.scala b/akka-stm/src/main/scala/akka/transactor/Atomically.scala index 94394c777f..7f74d34cdc 100644 --- a/akka-stm/src/main/scala/akka/transactor/Atomically.scala +++ b/akka-stm/src/main/scala/akka/transactor/Atomically.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ package akka.transactor diff --git a/akka-stm/src/main/scala/akka/transactor/Coordinated.scala b/akka-stm/src/main/scala/akka/transactor/Coordinated.scala index 2adec6098f..a43cf39786 100644 --- a/akka-stm/src/main/scala/akka/transactor/Coordinated.scala +++ b/akka-stm/src/main/scala/akka/transactor/Coordinated.scala @@ -1,15 +1,23 @@ /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ package akka.transactor +import akka.AkkaException import akka.config.Config -import akka.stm.{ Atomic, DefaultTransactionConfig, TransactionFactory } +import akka.stm.{Atomic, DefaultTransactionConfig, TransactionFactory} -import org.multiverse.api.{ Transaction ⇒ MultiverseTransaction } import org.multiverse.commitbarriers.CountDownCommitBarrier import org.multiverse.templates.TransactionalCallable +import akka.actor.ActorTimeoutException +import org.multiverse.api.{TransactionConfiguration, Transaction ⇒ MultiverseTransaction} +import org.multiverse.api.exceptions.ControlFlowError + +/** + * Akka-specific exception for coordinated transactions. + */ +class CoordinatedTransactionException(message: String, cause: Throwable = null) extends AkkaException(message, cause) /** * Coordinated transactions across actors. @@ -86,8 +94,9 @@ object Coordinated { class Coordinated(val message: Any, barrier: CountDownCommitBarrier) { // Java API constructors - def this(message: Any) = this(message, Coordinated.createBarrier) - def this() = this(null, Coordinated.createBarrier) + def this(message: Any) = this (message, Coordinated.createBarrier) + + def this() = this (null, Coordinated.createBarrier) /** * Create a new Coordinated object and increment the number of parties by one. @@ -118,6 +127,8 @@ class Coordinated(val message: Any, barrier: CountDownCommitBarrier) { /** * Delimits the coordinated transaction. The transaction will wait for all other transactions * in this coordination before committing. The timeout is specified by the transaction factory. + * + * @throws ActorTimeoutException if the coordinated transaction times out. */ def atomic[T](body: ⇒ T)(implicit factory: TransactionFactory = Coordinated.DefaultFactory): T = atomic(factory)(body) @@ -125,13 +136,38 @@ class Coordinated(val message: Any, barrier: CountDownCommitBarrier) { /** * Delimits the coordinated transaction. The transaction will wait for all other transactions * in this coordination before committing. The timeout is specified by the transaction factory. + * + * @throws ActorTimeoutException if the coordinated transaction times out. */ def atomic[T](factory: TransactionFactory)(body: ⇒ T): T = { factory.boilerplate.execute(new TransactionalCallable[T]() { def call(mtx: MultiverseTransaction): T = { - val result = body + val result = try { + body + } catch { + case e: ControlFlowError => throw e + case e: Exception => { + barrier.abort() + throw e + } + } + val timeout = factory.config.timeout - barrier.tryJoinCommit(mtx, timeout.length, timeout.unit) + val success = try { + barrier.tryJoinCommit(mtx, timeout.length, timeout.unit) + } catch { + case e: IllegalStateException => { + val config: TransactionConfiguration = mtx.getConfiguration + throw new CoordinatedTransactionException("Coordinated transaction [" + config.getFamilyName + "] aborted", e) + } + } + + if (!success) { + val config: TransactionConfiguration = mtx.getConfiguration + throw new ActorTimeoutException( + "Failed to complete coordinated transaction [" + config.getFamilyName + "] " + + "with a maxium timeout of [" + config.getTimeoutNs + "] ns") + } result } }) @@ -141,6 +177,8 @@ class Coordinated(val message: Any, barrier: CountDownCommitBarrier) { * Java API: coordinated atomic method that accepts an [[akka.stm.Atomic]]. * Delimits the coordinated transaction. The transaction will wait for all other transactions * in this coordination before committing. The timeout is specified by the transaction factory. + * + * @throws ActorTimeoutException if the coordinated transaction times out */ def atomic[T](jatomic: Atomic[T]): T = atomic(jatomic.factory)(jatomic.atomically) @@ -148,6 +186,8 @@ class Coordinated(val message: Any, barrier: CountDownCommitBarrier) { * Java API: coordinated atomic method that accepts an [[akka.transactor.Atomically]]. * Delimits the coordinated transaction. The transaction will wait for all other transactions * in this coordination before committing. The timeout is specified by the transaction factory. + * + * @throws ActorTimeoutException if the coordinated transaction times out. */ def atomic(atomically: Atomically): Unit = atomic(atomically.factory)(atomically.atomically) diff --git a/akka-stm/src/main/scala/akka/transactor/Transactor.scala b/akka-stm/src/main/scala/akka/transactor/Transactor.scala index e1bb115650..5f3303405e 100644 --- a/akka-stm/src/main/scala/akka/transactor/Transactor.scala +++ b/akka-stm/src/main/scala/akka/transactor/Transactor.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ package akka.transactor diff --git a/akka-stm/src/main/scala/akka/transactor/UntypedTransactor.scala b/akka-stm/src/main/scala/akka/transactor/UntypedTransactor.scala index 7c1dcb5d4f..850e80e120 100644 --- a/akka-stm/src/main/scala/akka/transactor/UntypedTransactor.scala +++ b/akka-stm/src/main/scala/akka/transactor/UntypedTransactor.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ package akka.transactor diff --git a/akka-stm/src/test/java/akka/transactor/test/UntypedFailer.java b/akka-stm/src/test/java/akka/transactor/test/UntypedFailer.java index 6d2db1b803..898dafa529 100644 --- a/akka-stm/src/test/java/akka/transactor/test/UntypedFailer.java +++ b/akka-stm/src/test/java/akka/transactor/test/UntypedFailer.java @@ -1,9 +1,9 @@ package akka.transactor.test; -import akka.actor.UntypedActor; +import akka.transactor.UntypedTransactor; -public class UntypedFailer extends UntypedActor { - public void onReceive(Object incoming) throws Exception { +public class UntypedFailer extends UntypedTransactor { + public void atomically(Object message) throws Exception { throw new RuntimeException("Expected failure"); } } diff --git a/akka-stm/src/test/scala/config/ConfigSpec.scala b/akka-stm/src/test/scala/config/ConfigSpec.scala index 8636254ced..957f73e966 100644 --- a/akka-stm/src/test/scala/config/ConfigSpec.scala +++ b/akka-stm/src/test/scala/config/ConfigSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ package akka.config diff --git a/akka-stm/src/test/scala/transactor/CoordinatedIncrementSpec.scala b/akka-stm/src/test/scala/transactor/CoordinatedIncrementSpec.scala index 8e589c6ff8..7d568d03a3 100644 --- a/akka-stm/src/test/scala/transactor/CoordinatedIncrementSpec.scala +++ b/akka-stm/src/test/scala/transactor/CoordinatedIncrementSpec.scala @@ -36,9 +36,13 @@ object CoordinatedIncrement { } class Failer extends Actor { + val txFactory = TransactionFactory(timeout = 3 seconds) + def receive = { - case Coordinated(Increment(friends)) ⇒ { - throw new RuntimeException("Expected failure") + case coordinated@Coordinated(Increment(friends)) ⇒ { + coordinated.atomic(txFactory) { + throw new RuntimeException("Expected failure") + } } } } diff --git a/akka-testkit/src/main/scala/akka/testkit/CallingThreadDispatcher.scala b/akka-testkit/src/main/scala/akka/testkit/CallingThreadDispatcher.scala index 19058b4951..9128afab7f 100644 --- a/akka-testkit/src/main/scala/akka/testkit/CallingThreadDispatcher.scala +++ b/akka-testkit/src/main/scala/akka/testkit/CallingThreadDispatcher.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ package akka.testkit @@ -140,7 +140,7 @@ class CallingThreadDispatcher(val warnings: Boolean = true) extends MessageDispa private[akka] override def dispatch(handle: MessageInvocation) { val mbox = getMailbox(handle.receiver) val queue = mbox.queue - val execute = mbox.suspended.ifElseYield { + val execute = mbox.suspended.fold { queue.push(handle) if (warnings && handle.channel.isInstanceOf[Promise[_]]) { EventHandler.warning(this, "suspended, creating Future could deadlock; target: %s" format handle.receiver) @@ -177,7 +177,7 @@ class CallingThreadDispatcher(val warnings: Boolean = true) extends MessageDispa assert(queue.isActive) mbox.lock.lock val recurse = try { - val handle = mbox.suspended.ifElseYield[MessageInvocation] { + val handle = mbox.suspended.fold[MessageInvocation] { queue.leave null } { diff --git a/akka-testkit/src/main/scala/akka/testkit/TestActorRef.scala b/akka-testkit/src/main/scala/akka/testkit/TestActorRef.scala index 753ea97bf7..6920faea64 100644 --- a/akka-testkit/src/main/scala/akka/testkit/TestActorRef.scala +++ b/akka-testkit/src/main/scala/akka/testkit/TestActorRef.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ package akka.testkit diff --git a/akka-testkit/src/main/scala/akka/testkit/TestBarrier.scala b/akka-testkit/src/main/scala/akka/testkit/TestBarrier.scala index 87c258b255..98fd94a57a 100644 --- a/akka-testkit/src/main/scala/akka/testkit/TestBarrier.scala +++ b/akka-testkit/src/main/scala/akka/testkit/TestBarrier.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ package akka.testkit diff --git a/akka-testkit/src/main/scala/akka/testkit/TestFSMRef.scala b/akka-testkit/src/main/scala/akka/testkit/TestFSMRef.scala index f7b064923f..86e77896c3 100644 --- a/akka-testkit/src/main/scala/akka/testkit/TestFSMRef.scala +++ b/akka-testkit/src/main/scala/akka/testkit/TestFSMRef.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ package akka.testkit diff --git a/akka-testkit/src/main/scala/akka/testkit/TestKit.scala b/akka-testkit/src/main/scala/akka/testkit/TestKit.scala index 0b07e61783..e7c23d30d7 100644 --- a/akka-testkit/src/main/scala/akka/testkit/TestKit.scala +++ b/akka-testkit/src/main/scala/akka/testkit/TestKit.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ package akka.testkit diff --git a/akka-testkit/src/main/scala/akka/testkit/TestLatch.scala b/akka-testkit/src/main/scala/akka/testkit/TestLatch.scala index 7240149c45..3c852bf75f 100644 --- a/akka-testkit/src/main/scala/akka/testkit/TestLatch.scala +++ b/akka-testkit/src/main/scala/akka/testkit/TestLatch.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ package akka.testkit diff --git a/akka-testkit/src/main/scala/akka/testkit/Testing.scala b/akka-testkit/src/main/scala/akka/testkit/Testing.scala index 889b87920b..70e4929b54 100644 --- a/akka-testkit/src/main/scala/akka/testkit/Testing.scala +++ b/akka-testkit/src/main/scala/akka/testkit/Testing.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ package akka.testkit diff --git a/akka-testkit/src/test/scala/akka/testkit/TestActorRefSpec.scala b/akka-testkit/src/test/scala/akka/testkit/TestActorRefSpec.scala index 3ef904fe88..93264524c2 100644 --- a/akka-testkit/src/test/scala/akka/testkit/TestActorRefSpec.scala +++ b/akka-testkit/src/test/scala/akka/testkit/TestActorRefSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ package akka.testkit diff --git a/akka-testkit/src/test/scala/akka/testkit/TestFSMRefSpec.scala b/akka-testkit/src/test/scala/akka/testkit/TestFSMRefSpec.scala index 67fdadc529..d65850b050 100644 --- a/akka-testkit/src/test/scala/akka/testkit/TestFSMRefSpec.scala +++ b/akka-testkit/src/test/scala/akka/testkit/TestFSMRefSpec.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ package akka.testkit diff --git a/akka-tutorials/akka-tutorial-first/src/main/java/akka/tutorial/first/java/Pi.java b/akka-tutorials/akka-tutorial-first/src/main/java/akka/tutorial/first/java/Pi.java index 653800ca3e..fc950ff2ff 100644 --- a/akka-tutorials/akka-tutorial-first/src/main/java/akka/tutorial/first/java/Pi.java +++ b/akka-tutorials/akka-tutorial-first/src/main/java/akka/tutorial/first/java/Pi.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ package akka.tutorial.first.java; diff --git a/akka-tutorials/akka-tutorial-first/src/main/scala/Pi.scala b/akka-tutorials/akka-tutorial-first/src/main/scala/Pi.scala index 51fcfac45d..ff09cf88ff 100644 --- a/akka-tutorials/akka-tutorial-first/src/main/scala/Pi.scala +++ b/akka-tutorials/akka-tutorial-first/src/main/scala/Pi.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ package akka.tutorial.first.scala diff --git a/akka-tutorials/akka-tutorial-second/src/main/java/akka/tutorial/java/second/Pi.java b/akka-tutorials/akka-tutorial-second/src/main/java/akka/tutorial/java/second/Pi.java index 673aeddf42..baa75fc501 100644 --- a/akka-tutorials/akka-tutorial-second/src/main/java/akka/tutorial/java/second/Pi.java +++ b/akka-tutorials/akka-tutorial-second/src/main/java/akka/tutorial/java/second/Pi.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ package akka.tutorial.java.second; diff --git a/akka-tutorials/akka-tutorial-second/src/main/scala/Pi.scala b/akka-tutorials/akka-tutorial-second/src/main/scala/Pi.scala index 72ee614752..46f59cdabf 100644 --- a/akka-tutorials/akka-tutorial-second/src/main/scala/Pi.scala +++ b/akka-tutorials/akka-tutorial-second/src/main/scala/Pi.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ package akka.tutorial.second diff --git a/config/akka-reference.conf b/config/akka-reference.conf index d224fbe42d..eb8b8262c0 100644 --- a/config/akka-reference.conf +++ b/config/akka-reference.conf @@ -131,6 +131,16 @@ akka { port = 6379 } + mongodb { + # Any specified collection name will be used as a prefix for collections that use durable mongo mailboxes + uri = "mongodb://localhost/akka.mailbox" # Follow Mongo URI Spec - http://www.mongodb.org/display/DOCS/Connections + # Configurable timeouts for certain ops + timeout { + read = 3000 # number of milliseconds to wait for a read to succeed before timing out the future + write = 3000 # number of milliseconds to wait for a write to succeed before timing out the future + } + } + zookeeper { server-addresses = "localhost:2181" session-timeout = 60 diff --git a/project/AkkaBuild.scala b/project/AkkaBuild.scala index ee8ed6d9b8..12695596cb 100644 --- a/project/AkkaBuild.scala +++ b/project/AkkaBuild.scala @@ -96,7 +96,7 @@ object AkkaBuild extends Build { id = "akka-durable-mailboxes", base = file("akka-durable-mailboxes"), settings = parentSettings, - aggregate = Seq(mailboxesCommon, beanstalkMailbox, fileMailbox, redisMailbox, zookeeperMailbox) + aggregate = Seq(mailboxesCommon, beanstalkMailbox, fileMailbox, redisMailbox, zookeeperMailbox, mongoMailbox) ) lazy val mailboxesCommon = Project( @@ -148,6 +148,19 @@ object AkkaBuild extends Build { settings = defaultSettings ) + val testMongoMailbox = SettingKey[Boolean]("test-mongo-mailbox") + + lazy val mongoMailbox = Project( + id = "akka-mongo-mailbox", + base = file("akka-durable-mailboxes/akka-mongo-mailbox"), + dependencies = Seq(mailboxesCommon % "compile;test->test"), + settings = defaultSettings ++ Seq( + libraryDependencies ++= Dependencies.mongoMailbox, + testMongoMailbox := false, + testOptions in Test <+= testMongoMailbox map { test => Tests.Filter(s => test) } + ) + ) + lazy val camel = Project( id = "akka-camel", base = file("akka-camel"), @@ -256,8 +269,16 @@ object AkkaBuild extends Build { publishArtifact in Compile := false ) + val testExcludes = SettingKey[Seq[String]]("test-excludes") + + def akkaTestExcludes: Seq[String] = { + val exclude = System.getProperty("akka.test.exclude", "") + if (exclude.isEmpty) Seq.empty else exclude.split(",").toSeq + } + lazy val defaultSettings = baseSettings ++ Seq( resolvers += "Typesafe Repo" at "http://repo.typesafe.com/typesafe/releases/", + resolvers += "Twitter Public Repo" at "http://maven.twttr.com", // This will be going away with com.mongodb.async's next release // compile options scalacOptions ++= Seq("-encoding", "UTF-8", "-optimise", "-deprecation", "-unchecked"), @@ -268,7 +289,11 @@ object AkkaBuild extends Build { unmanagedClasspath in Test <+= (baseDirectory in LocalProject("akka")) map { base => Attributed.blank(base / "config") }, // disable parallel tests - parallelExecution in Test := false + parallelExecution in Test := false, + + // for excluding tests in jenkins builds (-Dakka.test.exclude=TimingSpec) + testExcludes := akkaTestExcludes, + testOptions in Test <++= testExcludes map { _.map(exclude => Tests.Filter(test => !test.contains(exclude))) } ) // reStructuredText docs @@ -321,6 +346,8 @@ object Dependencies { val redisMailbox = Seq(redis) + val mongoMailbox = Seq(mongoAsync, twttrUtilCore) + val camel = Seq(camelCore, Test.junit, Test.scalatest, Test.logback) val spring = Seq(springBeans, springContext, Test.camelSpring, Test.junit, Test.scalatest) @@ -372,6 +399,7 @@ object Dependency { val jsr250 = "javax.annotation" % "jsr250-api" % "1.0" // CDDL v1 val jsr311 = "javax.ws.rs" % "jsr311-api" % "1.1" // CDDL v1 val log4j = "log4j" % "log4j" % "1.2.15" // ApacheV2 + val mongoAsync = "com.mongodb.async" % "mongo-driver_2.9.0-1" % "0.2.7" //ApacheV2 val multiverse = "org.multiverse" % "multiverse-alpha" % V.Multiverse // ApacheV2 val netty = "org.jboss.netty" % "netty" % V.Netty // ApacheV2 val osgi = "org.osgi" % "org.osgi.core" % "4.2.0" // ApacheV2 @@ -382,6 +410,7 @@ object Dependency { val springBeans = "org.springframework" % "spring-beans" % V.Spring // ApacheV2 val springContext = "org.springframework" % "spring-context" % V.Spring // ApacheV2 val staxApi = "javax.xml.stream" % "stax-api" % "1.0-2" // ApacheV2 + val twttrUtilCore = "com.twitter" % "util-core" % "1.8.1" // ApacheV2 val zkClient = "zkclient" % "zkclient" % "0.3" // ApacheV2 val zookeeper = "org.apache.hadoop.zookeeper" % "zookeeper" % V.Zookeeper // ApacheV2 val zookeeperLock = "org.apache.hadoop.zookeeper" % "zookeeper-recipes-lock" % V.Zookeeper // ApacheV2 diff --git a/project/Publish.scala b/project/Publish.scala index a0add06443..8f5997edb2 100644 --- a/project/Publish.scala +++ b/project/Publish.scala @@ -20,8 +20,8 @@ object Publish { 2009 http://akka.io - Scalable Solutions AB - http://scalablesolutions.se + Typesafe Inc. + http://www.typesafe.com diff --git a/project/plugins/build.sbt b/project/plugins/build.sbt index 7e787bb85d..a5d9739061 100644 --- a/project/plugins/build.sbt +++ b/project/plugins/build.sbt @@ -1,4 +1,4 @@ resolvers += "Typesafe Repo" at "http://repo.typesafe.com/typesafe/releases/" -libraryDependencies += "com.typesafe" %% "sbt-multi-jvm" % "0.1" +libraryDependencies += "com.typesafe" %% "sbt-multi-jvm" % "0.1.1" diff --git a/project/sbt7/build/AkkaProject.scala b/project/sbt7/build/AkkaProject.scala index 3fdc1832f1..bd0dd4a56f 100644 --- a/project/sbt7/build/AkkaProject.scala +++ b/project/sbt7/build/AkkaProject.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ import com.weiglewilczek.bnd4sbt.BNDPlugin @@ -41,6 +41,7 @@ class AkkaParentProject(info: ProjectInfo) extends ParentProject(info) with Exec lazy val DatabinderRepo = MavenRepository("Databinder Repo", "http://databinder.net/repo") lazy val ScalaToolsSnapshotRepo = MavenRepository("Scala-Tools Snapshot Repo", "http://scala-tools.org/repo-snapshots") lazy val SunJDMKRepo = MavenRepository("WP5 Repository", "http://wp5.e-taxonomy.eu/cdmlib/mavenrepo") + lazy val TwitterRepo = MavenRepository("Twitter Public Repo", "http://maven.twttr.com") } // ------------------------------------------------------------------------------------------------------------------- @@ -64,6 +65,8 @@ class AkkaParentProject(info: ProjectInfo) extends ParentProject(info) with Exec lazy val processingModuleConfig = ModuleConfiguration("org.processing", DatabinderRepo) lazy val sjsonModuleConfig = ModuleConfiguration("net.debasishg", ScalaToolsRelRepo) lazy val redisModuleConfig = ModuleConfiguration("net.debasishg", ScalaToolsRelRepo) + lazy val mongoModuleConfig = ModuleConfiguration("com.mongodb.async", ScalaToolsRelRepo) + lazy val twitterUtilModuleConfig = ModuleConfiguration("com.twitter", TwitterRepo) lazy val beanstalkModuleConfig = ModuleConfiguration("beanstalk", AkkaRepo) lazy val lzfModuleConfig = ModuleConfiguration("voldemort.store.compress", "h2-lzf", AkkaRepo) lazy val vscaladocModuleConfig = ModuleConfiguration("org.scala-tools", "vscaladoc", "1.1-md-3", AkkaRepo) @@ -127,6 +130,7 @@ class AkkaParentProject(info: ProjectInfo) extends ParentProject(info) with Exec lazy val jsr250 = "javax.annotation" % "jsr250-api" % "1.0" % "compile" //CDDL v1 lazy val jsr311 = "javax.ws.rs" % "jsr311-api" % "1.1" % "compile" //CDDL v1 + lazy val mongo = "com.mongodb.async" % "mongo-driver_2.9.0-1" % "0.2.6" //ApacheV2 lazy val multiverse = "org.multiverse" % "multiverse-alpha" % MULTIVERSE_VERSION % "compile" //ApacheV2 lazy val netty = "org.jboss.netty" % "netty" % "3.2.4.Final" % "compile" //ApacheV2 lazy val osgi_core = "org.osgi" % "org.osgi.core" % "4.2.0" //ApacheV2 @@ -140,6 +144,7 @@ class AkkaParentProject(info: ProjectInfo) extends ParentProject(info) with Exec lazy val spring_jms = "org.springframework" % "spring-jms" % SPRING_VERSION % "compile" //ApacheV2 lazy val stax_api = "javax.xml.stream" % "stax-api" % "1.0-2" % "compile" //ApacheV2 + lazy val twitter_util_core= "com.twitter" % "util-core" % "1.8.1" // ApacheV2 lazy val logback = "ch.qos.logback" % "logback-classic" % "0.9.28" % "runtime" //MIT lazy val log4j = "log4j" % "log4j" % "1.2.15" //ApacheV2 lazy val zookeeper = "org.apache.hadoop.zookeeper" % "zookeeper" % ZOOKEEPER_VERSION //ApacheV2 @@ -220,8 +225,8 @@ class AkkaParentProject(info: ProjectInfo) extends ParentProject(info) with Exec 2009 http://akka.io - Scalable Solutions AB - http://scalablesolutions.se + Typesafe Inc. + http://www.typesafe.com @@ -394,6 +399,8 @@ class AkkaParentProject(info: ProjectInfo) extends ParentProject(info) with Exec project("akka-mailboxes-common", "akka-mailboxes-common", new AkkaMailboxesCommonProject(_), akka_cluster) lazy val akka_redis_mailbox = project("akka-redis-mailbox", "akka-redis-mailbox", new AkkaRedisMailboxProject(_), akka_mailboxes_common) + lazy val akka_mongo_mailbox = + project("akka-mongo-mailbox", "akka-mongo-mailbox", new AkkaMongoMailboxProject(_), akka_mailboxes_common) lazy val akka_file_mailbox = project("akka-file-mailbox", "akka-file-mailbox", new AkkaFileMailboxProject(_), akka_mailboxes_common) lazy val akka_beanstalk_mailbox = @@ -416,6 +423,16 @@ class AkkaParentProject(info: ProjectInfo) extends ParentProject(info) with Exec super.testOptions ++ (if (!redisTestsEnabled.value) Seq(testFilter("Redis")) else Seq.empty) } + class AkkaMongoMailboxProject(info: ProjectInfo) extends AkkaDefaultProject(info) { + val mongo = Dependencies.mongo + val twitter = Dependencies.twitter_util_core + + lazy val mongoTestsEnabled = systemOptional[Boolean]("mailbox.test.mongo", true) + + override def testOptions = + super.testOptions ++ (if (!mongoTestsEnabled.value) Seq(testFilter("Mongo")) else Seq.empty) + } + class AkkaFileMailboxProject(info: ProjectInfo) extends AkkaDefaultProject(info) class AkkaBeanstalkMailboxProject(info: ProjectInfo) extends AkkaDefaultProject(info) { diff --git a/project/sbt7/build/DistProject.scala b/project/sbt7/build/DistProject.scala index f930adbf37..30f758610f 100644 --- a/project/sbt7/build/DistProject.scala +++ b/project/sbt7/build/DistProject.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ import sbt._ diff --git a/project/sbt7/build/DocParentProject.scala b/project/sbt7/build/DocParentProject.scala index 1e7bf0266c..28ff8541dd 100644 --- a/project/sbt7/build/DocParentProject.scala +++ b/project/sbt7/build/DocParentProject.scala @@ -1,5 +1,5 @@ /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ import sbt._ diff --git a/scripts/generate_config_with_secure_cookie.sh b/scripts/generate_config_with_secure_cookie.sh index 3fcc36a19e..6959697d0d 100755 --- a/scripts/generate_config_with_secure_cookie.sh +++ b/scripts/generate_config_with_secure_cookie.sh @@ -3,7 +3,7 @@ exec scala "$0" "$@" !# /** - * Copyright (C) 2009-2011 Scalable Solutions AB + * Copyright (C) 2009-2011 Typesafe Inc. */ import java.security.{MessageDigest, SecureRandom}