diff --git a/.gitignore b/.gitignore
index 91eba2fc6b..2f6e90f79a 100755
--- a/.gitignore
+++ b/.gitignore
@@ -16,6 +16,7 @@ reports
dist
target
deploy/*.jar
+.history
data
out
logs
@@ -58,3 +59,4 @@ akka.sublime-project
akka.sublime-workspace
.target
.multi-jvm
+_mb
diff --git a/.history b/.history
deleted file mode 100644
index 7bbf31e478..0000000000
--- a/.history
+++ /dev/null
@@ -1,4 +0,0 @@
-update
-reload
-projects
-exit
diff --git a/akka-actor-tests/src/test/java/akka/actor/JavaAPI.java b/akka-actor-tests/src/test/java/akka/actor/JavaAPI.java
index f8600f3e1b..9678cbc76d 100644
--- a/akka-actor-tests/src/test/java/akka/actor/JavaAPI.java
+++ b/akka-actor-tests/src/test/java/akka/actor/JavaAPI.java
@@ -26,7 +26,7 @@ public class JavaAPI {
@Test
public void mustBeAbleToCreateActorRefFromClass() {
- ActorRef ref = system.actorOf(JavaAPITestActor.class);
+ ActorRef ref = system.actorOf(new Props(JavaAPITestActor.class));
assertNotNull(ref);
}
@@ -42,7 +42,7 @@ public class JavaAPI {
@Test
public void mustAcceptSingleArgTell() {
- ActorRef ref = system.actorOf(JavaAPITestActor.class);
+ ActorRef ref = system.actorOf(new Props(JavaAPITestActor.class));
ref.tell("hallo");
ref.tell("hallo", ref);
}
diff --git a/akka-actor-tests/src/test/scala/akka/actor/ActorFireForgetRequestReplySpec.scala b/akka-actor-tests/src/test/scala/akka/actor/ActorFireForgetRequestReplySpec.scala
index 2537b996ad..aa7d76d3dc 100644
--- a/akka-actor-tests/src/test/scala/akka/actor/ActorFireForgetRequestReplySpec.scala
+++ b/akka-actor-tests/src/test/scala/akka/actor/ActorFireForgetRequestReplySpec.scala
@@ -63,16 +63,16 @@ class ActorFireForgetRequestReplySpec extends AkkaSpec with BeforeAndAfterEach w
"An Actor" must {
"reply to bang message using reply" in {
- val replyActor = system.actorOf[ReplyActor]
- val senderActor = system.actorOf(new SenderActor(replyActor))
+ val replyActor = system.actorOf(Props[ReplyActor])
+ val senderActor = system.actorOf(Props(new SenderActor(replyActor)))
senderActor ! "Init"
state.finished.await
state.s must be("Reply")
}
"reply to bang message using implicit sender" in {
- val replyActor = system.actorOf[ReplyActor]
- val senderActor = system.actorOf(new SenderActor(replyActor))
+ val replyActor = system.actorOf(Props[ReplyActor])
+ val senderActor = system.actorOf(Props(new SenderActor(replyActor)))
senderActor ! "InitImplicit"
state.finished.await
state.s must be("ReplyImplicit")
diff --git a/akka-actor-tests/src/test/scala/akka/actor/ActorLookupSpec.scala b/akka-actor-tests/src/test/scala/akka/actor/ActorLookupSpec.scala
index 1806c48830..af3b0ba65c 100644
--- a/akka-actor-tests/src/test/scala/akka/actor/ActorLookupSpec.scala
+++ b/akka-actor-tests/src/test/scala/akka/actor/ActorLookupSpec.scala
@@ -123,7 +123,8 @@ class ActorLookupSpec extends AkkaSpec with DefaultTimeout {
a ! 42
f.isCompleted must be === true
f.get must be === 42
- system.actorFor(a.path) must be === system.deadLetters
+ // clean-up is run as onComplete callback, i.e. dispatched on another thread
+ awaitCond(system.actorFor(a.path) == system.deadLetters, 1 second)
}
}
@@ -229,7 +230,8 @@ class ActorLookupSpec extends AkkaSpec with DefaultTimeout {
a ! 42
f.isCompleted must be === true
f.get must be === 42
- (c2 ? LookupPath(a.path)).get must be === system.deadLetters
+ // clean-up is run as onComplete callback, i.e. dispatched on another thread
+ awaitCond((c2 ? LookupPath(a.path)).get == system.deadLetters, 1 second)
}
}
diff --git a/akka-actor-tests/src/test/scala/akka/actor/ActorRefSpec.scala b/akka-actor-tests/src/test/scala/akka/actor/ActorRefSpec.scala
index b3b8ece741..f1cca42011 100644
--- a/akka-actor-tests/src/test/scala/akka/actor/ActorRefSpec.scala
+++ b/akka-actor-tests/src/test/scala/akka/actor/ActorRefSpec.scala
@@ -145,82 +145,82 @@ class ActorRefSpec extends AkkaSpec with DefaultTimeout {
filterException[akka.actor.ActorInitializationException] {
intercept[akka.actor.ActorInitializationException] {
wrap(result ⇒
- actorOf(new Actor {
+ actorOf(Props(new Actor {
val nested = promiseIntercept(new Actor { def receive = { case _ ⇒ } })(result)
def receive = { case _ ⇒ }
- }))
+ })))
}
contextStackMustBeEmpty
intercept[akka.actor.ActorInitializationException] {
wrap(result ⇒
- actorOf(promiseIntercept(new FailingOuterActor(actorOf(new InnerActor)))(result)))
+ actorOf(Props(promiseIntercept(new FailingOuterActor(actorOf(Props(new InnerActor))))(result))))
}
contextStackMustBeEmpty
intercept[akka.actor.ActorInitializationException] {
wrap(result ⇒
- actorOf(new OuterActor(actorOf(promiseIntercept(new FailingInnerActor)(result)))))
+ actorOf(Props(new OuterActor(actorOf(Props(promiseIntercept(new FailingInnerActor)(result)))))))
}
contextStackMustBeEmpty
intercept[akka.actor.ActorInitializationException] {
wrap(result ⇒
- actorOf(promiseIntercept(new FailingInheritingOuterActor(actorOf(new InnerActor)))(result)))
+ actorOf(Props(promiseIntercept(new FailingInheritingOuterActor(actorOf(Props(new InnerActor))))(result))))
}
contextStackMustBeEmpty
intercept[akka.actor.ActorInitializationException] {
wrap(result ⇒
- actorOf(new FailingOuterActor(actorOf(promiseIntercept(new FailingInheritingInnerActor)(result)))))
+ actorOf(Props(new FailingOuterActor(actorOf(Props(promiseIntercept(new FailingInheritingInnerActor)(result)))))))
}
contextStackMustBeEmpty
intercept[akka.actor.ActorInitializationException] {
wrap(result ⇒
- actorOf(new FailingInheritingOuterActor(actorOf(promiseIntercept(new FailingInheritingInnerActor)(result)))))
+ actorOf(Props(new FailingInheritingOuterActor(actorOf(Props(promiseIntercept(new FailingInheritingInnerActor)(result)))))))
}
contextStackMustBeEmpty
intercept[akka.actor.ActorInitializationException] {
wrap(result ⇒
- actorOf(new FailingInheritingOuterActor(actorOf(promiseIntercept(new FailingInnerActor)(result)))))
+ actorOf(Props(new FailingInheritingOuterActor(actorOf(Props(promiseIntercept(new FailingInnerActor)(result)))))))
}
contextStackMustBeEmpty
intercept[akka.actor.ActorInitializationException] {
wrap(result ⇒
- actorOf(new OuterActor(actorOf(new InnerActor {
+ actorOf(Props(new OuterActor(actorOf(Props(new InnerActor {
val a = promiseIntercept(new InnerActor)(result)
- }))))
+ }))))))
}
contextStackMustBeEmpty
intercept[akka.actor.ActorInitializationException] {
wrap(result ⇒
- actorOf(new FailingOuterActor(actorOf(promiseIntercept(new FailingInheritingInnerActor)(result)))))
+ actorOf(Props(new FailingOuterActor(actorOf(Props(promiseIntercept(new FailingInheritingInnerActor)(result)))))))
}
contextStackMustBeEmpty
intercept[akka.actor.ActorInitializationException] {
wrap(result ⇒
- actorOf(new OuterActor(actorOf(promiseIntercept(new FailingInheritingInnerActor)(result)))))
+ actorOf(Props(new OuterActor(actorOf(Props(promiseIntercept(new FailingInheritingInnerActor)(result)))))))
}
contextStackMustBeEmpty
intercept[akka.actor.ActorInitializationException] {
wrap(result ⇒
- actorOf(new OuterActor(actorOf(promiseIntercept({ new InnerActor; new InnerActor })(result)))))
+ actorOf(Props(new OuterActor(actorOf(Props(promiseIntercept({ new InnerActor; new InnerActor })(result)))))))
}
contextStackMustBeEmpty
@@ -229,7 +229,7 @@ class ActorRefSpec extends AkkaSpec with DefaultTimeout {
filterException[java.lang.IllegalStateException] {
(intercept[java.lang.IllegalStateException] {
wrap(result ⇒
- actorOf(new OuterActor(actorOf(promiseIntercept({ throw new IllegalStateException("Ur state be b0rked"); new InnerActor })(result)))))
+ actorOf(Props(new OuterActor(actorOf(Props(promiseIntercept({ throw new IllegalStateException("Ur state be b0rked"); new InnerActor })(result)))))))
}).getMessage must be === "Ur state be b0rked"
contextStackMustBeEmpty
@@ -237,7 +237,7 @@ class ActorRefSpec extends AkkaSpec with DefaultTimeout {
}
"be serializable using Java Serialization on local node" in {
- val a = system.actorOf[InnerActor]
+ val a = system.actorOf(Props[InnerActor])
import java.io._
@@ -260,7 +260,7 @@ class ActorRefSpec extends AkkaSpec with DefaultTimeout {
}
"throw an exception on deserialize if no system in scope" in {
- val a = system.actorOf[InnerActor]
+ val a = system.actorOf(Props[InnerActor])
import java.io._
@@ -301,10 +301,10 @@ class ActorRefSpec extends AkkaSpec with DefaultTimeout {
}
"support nested actorOfs" in {
- val a = system.actorOf(new Actor {
- val nested = system.actorOf(new Actor { def receive = { case _ ⇒ } })
+ val a = system.actorOf(Props(new Actor {
+ val nested = system.actorOf(Props(new Actor { def receive = { case _ ⇒ } }))
def receive = { case _ ⇒ sender ! nested }
- })
+ }))
val nested = (a ? "any").as[ActorRef].get
a must not be null
diff --git a/akka-actor-tests/src/test/scala/akka/actor/Bench.scala b/akka-actor-tests/src/test/scala/akka/actor/Bench.scala
index 52a18e0f3b..4ef5a94b12 100644
--- a/akka-actor-tests/src/test/scala/akka/actor/Bench.scala
+++ b/akka-actor-tests/src/test/scala/akka/actor/Bench.scala
@@ -76,7 +76,7 @@ object Chameneos {
var numFaded = 0
override def preStart() = {
- for (i ← 0 until numChameneos) context.actorOf(new Chameneo(self, colours(i % 3), i))
+ for (i ← 0 until numChameneos) context.actorOf(Props(new Chameneo(self, colours(i % 3), i)))
}
def receive = {
@@ -107,7 +107,7 @@ object Chameneos {
def run {
// System.setProperty("akka.config", "akka.conf")
Chameneos.start = System.currentTimeMillis
- val system = ActorSystem().actorOf(new Mall(1000000, 4))
+ val system = ActorSystem().actorOf(Props(new Mall(1000000, 4)))
Thread.sleep(10000)
println("Elapsed: " + (end - start))
system.stop()
diff --git a/akka-actor-tests/src/test/scala/akka/actor/FSMActorSpec.scala b/akka-actor-tests/src/test/scala/akka/actor/FSMActorSpec.scala
index 0f09c3e1d2..e4a30e10e0 100644
--- a/akka-actor-tests/src/test/scala/akka/actor/FSMActorSpec.scala
+++ b/akka-actor-tests/src/test/scala/akka/actor/FSMActorSpec.scala
@@ -112,14 +112,14 @@ class FSMActorSpec extends AkkaSpec(Map("akka.actor.debug.fsm" -> true)) with Im
import latches._
// lock that locked after being open for 1 sec
- val lock = system.actorOf(new Lock("33221", 1 second, latches))
+ val lock = system.actorOf(Props(new Lock("33221", 1 second, latches)))
- val transitionTester = system.actorOf(new Actor {
+ val transitionTester = system.actorOf(Props(new Actor {
def receive = {
case Transition(_, _, _) ⇒ transitionCallBackLatch.open
case CurrentState(_, Locked) ⇒ initialStateLatch.open
}
- })
+ }))
lock ! SubscribeTransitionCallBack(transitionTester)
initialStateLatch.await
@@ -143,13 +143,13 @@ class FSMActorSpec extends AkkaSpec(Map("akka.actor.debug.fsm" -> true)) with Im
val answerLatch = TestLatch()
object Hello
object Bye
- val tester = system.actorOf(new Actor {
+ val tester = system.actorOf(Props(new Actor {
protected def receive = {
case Hello ⇒ lock ! "hello"
case "world" ⇒ answerLatch.open
case Bye ⇒ lock ! "bye"
}
- })
+ }))
tester ! Hello
answerLatch.await
@@ -185,7 +185,7 @@ class FSMActorSpec extends AkkaSpec(Map("akka.actor.debug.fsm" -> true)) with Im
case x ⇒ testActor ! x
}
}
- val ref = system.actorOf(fsm)
+ val ref = system.actorOf(Props(fsm))
started.await
ref.stop()
expectMsg(1 second, fsm.StopEvent(Shutdown, 1, null))
diff --git a/akka-actor-tests/src/test/scala/akka/actor/FSMTimingSpec.scala b/akka-actor-tests/src/test/scala/akka/actor/FSMTimingSpec.scala
index 85bd70248f..2d7534c755 100644
--- a/akka-actor-tests/src/test/scala/akka/actor/FSMTimingSpec.scala
+++ b/akka-actor-tests/src/test/scala/akka/actor/FSMTimingSpec.scala
@@ -14,7 +14,7 @@ class FSMTimingSpec extends AkkaSpec with ImplicitSender {
import FSMTimingSpec._
import FSM._
- val fsm = system.actorOf(new StateMachine(testActor))
+ val fsm = system.actorOf(Props(new StateMachine(testActor)))
fsm ! SubscribeTransitionCallBack(testActor)
expectMsg(1 second, CurrentState(fsm, Initial))
diff --git a/akka-actor-tests/src/test/scala/akka/actor/FSMTransitionSpec.scala b/akka-actor-tests/src/test/scala/akka/actor/FSMTransitionSpec.scala
index 7c67d8e1e1..1b1f90e5b3 100644
--- a/akka-actor-tests/src/test/scala/akka/actor/FSMTransitionSpec.scala
+++ b/akka-actor-tests/src/test/scala/akka/actor/FSMTransitionSpec.scala
@@ -56,7 +56,7 @@ class FSMTransitionSpec extends AkkaSpec with ImplicitSender {
"A FSM transition notifier" must {
"notify listeners" in {
- val fsm = system.actorOf(new MyFSM(testActor))
+ val fsm = system.actorOf(Props(new MyFSM(testActor)))
within(1 second) {
fsm ! SubscribeTransitionCallBack(testActor)
expectMsg(CurrentState(fsm, 0))
@@ -68,8 +68,8 @@ class FSMTransitionSpec extends AkkaSpec with ImplicitSender {
}
"not fail when listener goes away" in {
- val forward = system.actorOf(new Forwarder(testActor))
- val fsm = system.actorOf(new MyFSM(testActor))
+ val forward = system.actorOf(Props(new Forwarder(testActor)))
+ val fsm = system.actorOf(Props(new MyFSM(testActor)))
val sup = system.actorOf(Props(new Actor {
context.watch(fsm)
def receive = { case _ ⇒ }
@@ -88,7 +88,7 @@ class FSMTransitionSpec extends AkkaSpec with ImplicitSender {
"A FSM" must {
"make previous and next state data available in onTransition" in {
- val fsm = system.actorOf(new OtherFSM(testActor))
+ val fsm = system.actorOf(Props(new OtherFSM(testActor)))
within(300 millis) {
fsm ! "tick"
expectMsg((0, 1))
diff --git a/akka-actor-tests/src/test/scala/akka/actor/ForwardActorSpec.scala b/akka-actor-tests/src/test/scala/akka/actor/ForwardActorSpec.scala
index 86af471d13..aa2cd4c9ff 100644
--- a/akka-actor-tests/src/test/scala/akka/actor/ForwardActorSpec.scala
+++ b/akka-actor-tests/src/test/scala/akka/actor/ForwardActorSpec.scala
@@ -13,14 +13,14 @@ object ForwardActorSpec {
val ExpectedMessage = "FOO"
def createForwardingChain(system: ActorSystem): ActorRef = {
- val replier = system.actorOf(new Actor {
+ val replier = system.actorOf(Props(new Actor {
def receive = { case x ⇒ sender ! x }
- })
+ }))
- def mkforwarder(forwardTo: ActorRef) = system.actorOf(
+ def mkforwarder(forwardTo: ActorRef) = system.actorOf(Props(
new Actor {
def receive = { case x ⇒ forwardTo forward x }
- })
+ }))
mkforwarder(mkforwarder(mkforwarder(replier)))
}
@@ -35,7 +35,7 @@ class ForwardActorSpec extends AkkaSpec {
"forward actor reference when invoking forward on bang" in {
val latch = new TestLatch(1)
- val replyTo = system.actorOf(new Actor { def receive = { case ExpectedMessage ⇒ latch.countDown() } })
+ val replyTo = system.actorOf(Props(new Actor { def receive = { case ExpectedMessage ⇒ latch.countDown() } }))
val chain = createForwardingChain(system)
diff --git a/akka-actor-tests/src/test/scala/akka/actor/HotSwapSpec.scala b/akka-actor-tests/src/test/scala/akka/actor/HotSwapSpec.scala
index e951760cd9..e3027a4c00 100644
--- a/akka-actor-tests/src/test/scala/akka/actor/HotSwapSpec.scala
+++ b/akka-actor-tests/src/test/scala/akka/actor/HotSwapSpec.scala
@@ -11,22 +11,13 @@ class HotSwapSpec extends AkkaSpec with ImplicitSender {
"An Actor" must {
- "be able to hotswap its behavior with HotSwap(..)" in {
- val a = system.actorOf(new Actor {
- def receive = { case _ ⇒ sender ! "default" }
- })
- a ! HotSwap(context ⇒ { case _ ⇒ context.sender ! "swapped" })
- a ! "swapped"
- expectMsg("swapped")
- }
-
"be able to hotswap its behavior with become(..)" in {
- val a = system.actorOf(new Actor {
+ val a = system.actorOf(Props(new Actor {
def receive = {
case "init" ⇒ sender ! "init"
case "swap" ⇒ context.become({ case x: String ⇒ context.sender ! x })
}
- })
+ }))
a ! "init"
expectMsg("init")
@@ -35,34 +26,8 @@ class HotSwapSpec extends AkkaSpec with ImplicitSender {
expectMsg("swapped")
}
- "be able to revert hotswap its behavior with RevertHotSwap(..)" in {
- val a = system.actorOf(new Actor {
- def receive = {
- case "init" ⇒ sender ! "init"
- }
- })
-
- a ! "init"
- expectMsg("init")
- a ! HotSwap(context ⇒ { case "swapped" ⇒ context.sender ! "swapped" })
-
- a ! "swapped"
- expectMsg("swapped")
-
- a ! RevertHotSwap
-
- a ! "init"
- expectMsg("init")
-
- // try to revert hotswap below the bottom of the stack
- a ! RevertHotSwap
-
- a ! "init"
- expectMsg("init")
- }
-
"be able to revert hotswap its behavior with unbecome" in {
- val a = system.actorOf(new Actor {
+ val a = system.actorOf(Props(new Actor {
def receive = {
case "init" ⇒ sender ! "init"
case "swap" ⇒
@@ -73,7 +38,7 @@ class HotSwapSpec extends AkkaSpec with ImplicitSender {
context.unbecome()
})
}
- })
+ }))
a ! "init"
expectMsg("init")
@@ -89,7 +54,7 @@ class HotSwapSpec extends AkkaSpec with ImplicitSender {
"revert to initial state on restart" in {
- val a = system.actorOf(new Actor {
+ val a = system.actorOf(Props(new Actor {
def receive = {
case "state" ⇒ sender ! "0"
case "swap" ⇒
@@ -100,7 +65,7 @@ class HotSwapSpec extends AkkaSpec with ImplicitSender {
})
sender ! "swapped"
}
- })
+ }))
a ! "state"
expectMsg("0")
a ! "swap"
diff --git a/akka-actor-tests/src/test/scala/akka/actor/IOActor.scala b/akka-actor-tests/src/test/scala/akka/actor/IOActor.scala
index 8e15f5fbbe..f2127d92bc 100644
--- a/akka-actor-tests/src/test/scala/akka/actor/IOActor.scala
+++ b/akka-actor-tests/src/test/scala/akka/actor/IOActor.scala
@@ -46,7 +46,7 @@ object IOActorSpec {
class SimpleEchoClient(host: String, port: Int, ioManager: ActorRef) extends Actor with IO {
lazy val socket: SocketHandle = connect(ioManager, host, port)(reader)
- lazy val reader: ActorRef = context.actorOf {
+ lazy val reader: ActorRef = context.actorOf(Props({
new Actor with IO {
def receiveIO = {
case length: Int ⇒
@@ -54,7 +54,7 @@ object IOActorSpec {
sender ! bytes
}
}
- }
+ }))
def receiveIO = {
case bytes: ByteString ⇒
@@ -186,10 +186,10 @@ class IOActorSpec extends AkkaSpec with BeforeAndAfterEach with DefaultTimeout {
"an IO Actor" must {
"run echo server" in {
val started = TestLatch(1)
- val ioManager = system.actorOf(new IOManager(2)) // teeny tiny buffer
- val server = system.actorOf(new SimpleEchoServer("localhost", 8064, ioManager, started))
+ val ioManager = system.actorOf(Props(new IOManager(2))) // teeny tiny buffer
+ val server = system.actorOf(Props(new SimpleEchoServer("localhost", 8064, ioManager, started)))
started.await
- val client = system.actorOf(new SimpleEchoClient("localhost", 8064, ioManager))
+ val client = system.actorOf(Props(new SimpleEchoClient("localhost", 8064, ioManager)))
val f1 = client ? ByteString("Hello World!1")
val f2 = client ? ByteString("Hello World!2")
val f3 = client ? ByteString("Hello World!3")
@@ -203,10 +203,10 @@ class IOActorSpec extends AkkaSpec with BeforeAndAfterEach with DefaultTimeout {
"run echo server under high load" in {
val started = TestLatch(1)
- val ioManager = system.actorOf(new IOManager())
- val server = system.actorOf(new SimpleEchoServer("localhost", 8065, ioManager, started))
+ val ioManager = system.actorOf(Props(new IOManager()))
+ val server = system.actorOf(Props(new SimpleEchoServer("localhost", 8065, ioManager, started)))
started.await
- val client = system.actorOf(new SimpleEchoClient("localhost", 8065, ioManager))
+ val client = system.actorOf(Props(new SimpleEchoClient("localhost", 8065, ioManager)))
val list = List.range(0, 1000)
val f = Future.traverse(list)(i ⇒ client ? ByteString(i.toString))
assert(f.get.size === 1000)
@@ -217,10 +217,10 @@ class IOActorSpec extends AkkaSpec with BeforeAndAfterEach with DefaultTimeout {
"run echo server under high load with small buffer" in {
val started = TestLatch(1)
- val ioManager = system.actorOf(new IOManager(2))
- val server = system.actorOf(new SimpleEchoServer("localhost", 8066, ioManager, started))
+ val ioManager = system.actorOf(Props(new IOManager(2)))
+ val server = system.actorOf(Props(new SimpleEchoServer("localhost", 8066, ioManager, started)))
started.await
- val client = system.actorOf(new SimpleEchoClient("localhost", 8066, ioManager))
+ val client = system.actorOf(Props(new SimpleEchoClient("localhost", 8066, ioManager)))
val list = List.range(0, 1000)
val f = Future.traverse(list)(i ⇒ client ? ByteString(i.toString))
assert(f.get.size === 1000)
@@ -231,11 +231,11 @@ class IOActorSpec extends AkkaSpec with BeforeAndAfterEach with DefaultTimeout {
"run key-value store" in {
val started = TestLatch(1)
- val ioManager = system.actorOf(new IOManager(2)) // teeny tiny buffer
- val server = system.actorOf(new KVStore("localhost", 8067, ioManager, started))
+ val ioManager = system.actorOf(Props(new IOManager(2))) // teeny tiny buffer
+ val server = system.actorOf(Props(new KVStore("localhost", 8067, ioManager, started)))
started.await
- val client1 = system.actorOf(new KVClient("localhost", 8067, ioManager))
- val client2 = system.actorOf(new KVClient("localhost", 8067, ioManager))
+ val client1 = system.actorOf(Props(new KVClient("localhost", 8067, ioManager)))
+ val client2 = system.actorOf(Props(new KVClient("localhost", 8067, ioManager)))
val f1 = client1 ? (('set, "hello", ByteString("World")))
val f2 = client1 ? (('set, "test", ByteString("No one will read me")))
val f3 = client1 ? (('get, "hello"))
diff --git a/akka-actor-tests/src/test/scala/akka/actor/LocalActorRefProviderSpec.scala b/akka-actor-tests/src/test/scala/akka/actor/LocalActorRefProviderSpec.scala
index 991332871c..ad92865124 100644
--- a/akka-actor-tests/src/test/scala/akka/actor/LocalActorRefProviderSpec.scala
+++ b/akka-actor-tests/src/test/scala/akka/actor/LocalActorRefProviderSpec.scala
@@ -42,12 +42,12 @@ class LocalActorRefProviderSpec extends AkkaSpec {
}
"only create one instance of an actor from within the same message invocation" in {
- val supervisor = system.actorOf(new Actor {
+ val supervisor = system.actorOf(Props(new Actor {
def receive = {
case "" ⇒
val a, b = context.actorOf(Props.empty, "duplicate")
}
- })
+ }))
EventFilter[InvalidActorNameException](occurrences = 1) intercept {
supervisor ! ""
}
diff --git a/akka-actor-tests/src/test/scala/akka/actor/ReceiveTimeoutSpec.scala b/akka-actor-tests/src/test/scala/akka/actor/ReceiveTimeoutSpec.scala
index a706fd6bdb..02b5aab8c1 100644
--- a/akka-actor-tests/src/test/scala/akka/actor/ReceiveTimeoutSpec.scala
+++ b/akka-actor-tests/src/test/scala/akka/actor/ReceiveTimeoutSpec.scala
@@ -17,53 +17,30 @@ class ReceiveTimeoutSpec extends AkkaSpec {
"get timeout" in {
val timeoutLatch = TestLatch()
- val timeoutActor = system.actorOf(new Actor {
- context.receiveTimeout = Some(500 milliseconds)
+ val timeoutActor = system.actorOf(Props(new Actor {
+ context.setReceiveTimeout(500 milliseconds)
protected def receive = {
case ReceiveTimeout ⇒ timeoutLatch.open
}
- })
+ }))
timeoutLatch.await
timeoutActor.stop()
}
- "get timeout when swapped" in {
- val timeoutLatch = TestLatch()
-
- val timeoutActor = system.actorOf(new Actor {
- context.receiveTimeout = Some(500 milliseconds)
-
- protected def receive = {
- case ReceiveTimeout ⇒ timeoutLatch.open
- }
- })
-
- timeoutLatch.await
-
- val swappedLatch = TestLatch()
-
- timeoutActor ! HotSwap(context ⇒ {
- case ReceiveTimeout ⇒ swappedLatch.open
- })
-
- swappedLatch.await
- timeoutActor.stop()
- }
-
"reschedule timeout after regular receive" in {
val timeoutLatch = TestLatch()
case object Tick
- val timeoutActor = system.actorOf(new Actor {
- context.receiveTimeout = Some(500 milliseconds)
+ val timeoutActor = system.actorOf(Props(new Actor {
+ context.setReceiveTimeout(500 milliseconds)
protected def receive = {
case Tick ⇒ ()
case ReceiveTimeout ⇒ timeoutLatch.open
}
- })
+ }))
timeoutActor ! Tick
@@ -76,17 +53,17 @@ class ReceiveTimeoutSpec extends AkkaSpec {
val timeoutLatch = TestLatch()
case object Tick
- val timeoutActor = system.actorOf(new Actor {
- context.receiveTimeout = Some(500 milliseconds)
+ val timeoutActor = system.actorOf(Props(new Actor {
+ context.setReceiveTimeout(500 milliseconds)
protected def receive = {
case Tick ⇒ ()
case ReceiveTimeout ⇒
count.incrementAndGet
timeoutLatch.open
- context.receiveTimeout = None
+ context.resetReceiveTimeout()
}
- })
+ }))
timeoutActor ! Tick
@@ -98,11 +75,11 @@ class ReceiveTimeoutSpec extends AkkaSpec {
"not receive timeout message when not specified" in {
val timeoutLatch = TestLatch()
- val timeoutActor = system.actorOf(new Actor {
+ val timeoutActor = system.actorOf(Props(new Actor {
protected def receive = {
case ReceiveTimeout ⇒ timeoutLatch.open
}
- })
+ }))
timeoutLatch.awaitTimeout(1 second) // timeout expected
timeoutActor.stop()
diff --git a/akka-actor-tests/src/test/scala/akka/actor/SchedulerSpec.scala b/akka-actor-tests/src/test/scala/akka/actor/SchedulerSpec.scala
index bca3a754c8..ceeb768b6c 100644
--- a/akka-actor-tests/src/test/scala/akka/actor/SchedulerSpec.scala
+++ b/akka-actor-tests/src/test/scala/akka/actor/SchedulerSpec.scala
@@ -26,9 +26,9 @@ class SchedulerSpec extends AkkaSpec with BeforeAndAfterEach with DefaultTimeout
"schedule more than once" in {
case object Tick
val countDownLatch = new CountDownLatch(3)
- val tickActor = system.actorOf(new Actor {
+ val tickActor = system.actorOf(Props(new Actor {
def receive = { case Tick ⇒ countDownLatch.countDown() }
- })
+ }))
// run every 50 milliseconds
collectCancellable(system.scheduler.schedule(0 milliseconds, 50 milliseconds, tickActor, Tick))
@@ -56,9 +56,9 @@ class SchedulerSpec extends AkkaSpec with BeforeAndAfterEach with DefaultTimeout
"schedule once" in {
case object Tick
val countDownLatch = new CountDownLatch(3)
- val tickActor = system.actorOf(new Actor {
+ val tickActor = system.actorOf(Props(new Actor {
def receive = { case Tick ⇒ countDownLatch.countDown() }
- })
+ }))
// run after 300 millisec
collectCancellable(system.scheduler.scheduleOnce(300 milliseconds, tickActor, Tick))
@@ -81,9 +81,9 @@ class SchedulerSpec extends AkkaSpec with BeforeAndAfterEach with DefaultTimeout
object Ping
val ticks = new CountDownLatch(1)
- val actor = system.actorOf(new Actor {
+ val actor = system.actorOf(Props(new Actor {
def receive = { case Ping ⇒ ticks.countDown() }
- })
+ }))
(1 to 10).foreach { i ⇒
val timeout = collectCancellable(system.scheduler.scheduleOnce(1 second, actor, Ping))
@@ -131,7 +131,7 @@ class SchedulerSpec extends AkkaSpec with BeforeAndAfterEach with DefaultTimeout
case class Msg(ts: Long)
- val actor = system.actorOf(new Actor {
+ val actor = system.actorOf(Props(new Actor {
def receive = {
case Msg(ts) ⇒
val now = System.nanoTime
@@ -139,7 +139,7 @@ class SchedulerSpec extends AkkaSpec with BeforeAndAfterEach with DefaultTimeout
if (now - ts < 10000000) throw new RuntimeException("Interval is too small: " + (now - ts))
ticks.countDown()
}
- })
+ }))
(1 to 300).foreach { i ⇒
collectCancellable(system.scheduler.scheduleOnce(10 milliseconds, actor, Msg(System.nanoTime)))
@@ -154,11 +154,11 @@ class SchedulerSpec extends AkkaSpec with BeforeAndAfterEach with DefaultTimeout
case object Msg
- val actor = system.actorOf(new Actor {
+ val actor = system.actorOf(Props(new Actor {
def receive = {
case Msg ⇒ ticks.countDown()
}
- })
+ }))
val startTime = System.nanoTime()
val cancellable = system.scheduler.schedule(1 second, 100 milliseconds, actor, Msg)
diff --git a/akka-actor-tests/src/test/scala/akka/actor/TypedActorSpec.scala b/akka-actor-tests/src/test/scala/akka/actor/TypedActorSpec.scala
index 3cc54ea6bb..b2265367c7 100644
--- a/akka-actor-tests/src/test/scala/akka/actor/TypedActorSpec.scala
+++ b/akka-actor-tests/src/test/scala/akka/actor/TypedActorSpec.scala
@@ -334,7 +334,7 @@ class TypedActorSpec extends AkkaSpec with BeforeAndAfterEach with BeforeAndAfte
mustStop(t)
}
- "be able to use work-stealing dispatcher" in {
+ "be able to use balancing dispatcher" in {
val props = Props(
timeout = Timeout(6600),
dispatcher = system.dispatcherFactory.newBalancingDispatcher("pooled-dispatcher")
diff --git a/akka-actor-tests/src/test/scala/akka/actor/dispatch/DispatcherActorsSpec.scala b/akka-actor-tests/src/test/scala/akka/actor/dispatch/DispatcherActorsSpec.scala
index 71a03c6e01..8ad5bc641d 100644
--- a/akka-actor-tests/src/test/scala/akka/actor/dispatch/DispatcherActorsSpec.scala
+++ b/akka-actor-tests/src/test/scala/akka/actor/dispatch/DispatcherActorsSpec.scala
@@ -1,13 +1,11 @@
package akka.actor.dispatch
import java.util.concurrent.CountDownLatch
-import akka.actor.Actor
+import akka.actor._
import akka.testkit.AkkaSpec
/**
* Tests the behavior of the executor based event driven dispatcher when multiple actors are being dispatched on it.
- *
- * @author Jan Van Besien
*/
@org.junit.runner.RunWith(classOf[org.scalatest.junit.JUnitRunner])
class DispatcherActorsSpec extends AkkaSpec {
@@ -33,8 +31,8 @@ class DispatcherActorsSpec extends AkkaSpec {
"not block fast actors by slow actors" in {
val sFinished = new CountDownLatch(50)
val fFinished = new CountDownLatch(10)
- val s = system.actorOf(new SlowActor(sFinished))
- val f = system.actorOf(new FastActor(fFinished))
+ val s = system.actorOf(Props(new SlowActor(sFinished)))
+ val f = system.actorOf(Props(new FastActor(fFinished)))
// send a lot of stuff to s
for (i ← 1 to 50) {
diff --git a/akka-actor-tests/src/test/scala/akka/actor/dispatch/DispatchersSpec.scala b/akka-actor-tests/src/test/scala/akka/actor/dispatch/DispatchersSpec.scala
index d23bc8ce57..471cd957c0 100644
--- a/akka-actor-tests/src/test/scala/akka/actor/dispatch/DispatchersSpec.scala
+++ b/akka-actor-tests/src/test/scala/akka/actor/dispatch/DispatchersSpec.scala
@@ -10,8 +10,18 @@ import akka.testkit.AkkaSpec
import scala.collection.JavaConverters._
import com.typesafe.config.ConfigFactory
+object DispatchersSpec {
+ val config = """
+ myapp {
+ mydispatcher {
+ throughput = 17
+ }
+ }
+ """
+}
+
@org.junit.runner.RunWith(classOf[org.scalatest.junit.JUnitRunner])
-class DispatchersSpec extends AkkaSpec {
+class DispatchersSpec extends AkkaSpec(DispatchersSpec.config) {
val df = system.dispatcherFactory
import df._
@@ -34,14 +44,6 @@ class DispatchersSpec extends AkkaSpec {
val defaultDispatcherConfig = settings.config.getConfig("akka.actor.default-dispatcher")
- val dispatcherConf = ConfigFactory.parseString("""
- myapp {
- mydispatcher {
- throughput = 17
- }
- }
- """)
-
lazy val allDispatchers: Map[String, Option[MessageDispatcher]] = {
validTypes.map(t ⇒ (t, from(ConfigFactory.parseMap(Map(tipe -> t).asJava).withFallback(defaultDispatcherConfig)))).toMap
}
@@ -58,16 +60,21 @@ class DispatchersSpec extends AkkaSpec {
dispatcher.map(_.throughput) must be(Some(17))
}
- "use defined properties when fromConfig" in {
- val dispatcher = fromConfig("myapp.mydispatcher", cfg = dispatcherConf)
+ "use defined properties when newFromConfig" in {
+ val dispatcher = newFromConfig("myapp.mydispatcher")
dispatcher.throughput must be(17)
}
- "use specific name when fromConfig" in {
- val dispatcher = fromConfig("myapp.mydispatcher", cfg = dispatcherConf)
+ "use specific name when newFromConfig" in {
+ val dispatcher = newFromConfig("myapp.mydispatcher")
dispatcher.name must be("mydispatcher")
}
+ "use default dispatcher when not configured" in {
+ val dispatcher = newFromConfig("myapp.other-dispatcher")
+ dispatcher must be === defaultGlobalDispatcher
+ }
+
"throw IllegalArgumentException if type does not exist" in {
intercept[IllegalArgumentException] {
from(ConfigFactory.parseMap(Map(tipe -> "typedoesntexist").asJava).withFallback(defaultDispatcherConfig))
@@ -81,6 +88,13 @@ class DispatchersSpec extends AkkaSpec {
assert(typesAndValidators.forall(tuple ⇒ tuple._2(allDispatchers(tuple._1).get)))
}
+ "provide lookup of dispatchers by key" in {
+ val d1 = lookup("myapp.mydispatcher")
+ val d2 = lookup("myapp.mydispatcher")
+ d1 must be === d2
+ d1.name must be("mydispatcher")
+ }
+
}
}
diff --git a/akka-actor-tests/src/test/scala/akka/actor/routing/ListenerSpec.scala b/akka-actor-tests/src/test/scala/akka/actor/routing/ListenerSpec.scala
index 8f074a504c..ab149216a7 100644
--- a/akka-actor-tests/src/test/scala/akka/actor/routing/ListenerSpec.scala
+++ b/akka-actor-tests/src/test/scala/akka/actor/routing/ListenerSpec.scala
@@ -16,13 +16,13 @@ class ListenerSpec extends AkkaSpec {
val barLatch = TestLatch(2)
val barCount = new AtomicInteger(0)
- val broadcast = system.actorOf(new Actor with Listeners {
+ val broadcast = system.actorOf(Props(new Actor with Listeners {
def receive = listenerManagement orElse {
case "foo" ⇒ gossip("bar")
}
- })
+ }))
- def newListener = system.actorOf(new Actor {
+ def newListener = system.actorOf(Props(new Actor {
def receive = {
case "bar" ⇒
barCount.incrementAndGet
@@ -30,7 +30,7 @@ class ListenerSpec extends AkkaSpec {
case "foo" ⇒
fooLatch.countDown()
}
- })
+ }))
val a1 = newListener
val a2 = newListener
diff --git a/akka-actor-tests/src/test/scala/akka/dispatch/FutureSpec.scala b/akka-actor-tests/src/test/scala/akka/dispatch/FutureSpec.scala
index 2ef735b05f..44ddf4f8bc 100644
--- a/akka-actor-tests/src/test/scala/akka/dispatch/FutureSpec.scala
+++ b/akka-actor-tests/src/test/scala/akka/dispatch/FutureSpec.scala
@@ -6,7 +6,7 @@ import org.scalacheck._
import org.scalacheck.Arbitrary._
import org.scalacheck.Prop._
import org.scalacheck.Gen._
-import akka.actor.{ Actor, ActorRef, Status }
+import akka.actor._
import akka.testkit.{ EventFilter, filterEvents, filterException }
import akka.util.duration._
import org.multiverse.api.latches.StandardLatch
@@ -116,7 +116,7 @@ class FutureSpec extends AkkaSpec with Checkers with BeforeAndAfterAll with Defa
"from an Actor" that {
"returns a result" must {
behave like futureWithResult { test ⇒
- val actor = system.actorOf[TestActor]
+ val actor = system.actorOf(Props[TestActor])
val future = actor ? "Hello"
future.await
test(future, "World")
@@ -126,7 +126,7 @@ class FutureSpec extends AkkaSpec with Checkers with BeforeAndAfterAll with Defa
"throws an exception" must {
behave like futureWithException[RuntimeException] { test ⇒
filterException[RuntimeException] {
- val actor = system.actorOf[TestActor]
+ val actor = system.actorOf(Props[TestActor])
val future = actor ? "Failure"
future.await
test(future, "Expected exception; to test fault-tolerance")
@@ -139,8 +139,8 @@ class FutureSpec extends AkkaSpec with Checkers with BeforeAndAfterAll with Defa
"using flatMap with an Actor" that {
"will return a result" must {
behave like futureWithResult { test ⇒
- val actor1 = system.actorOf[TestActor]
- val actor2 = system.actorOf(new Actor { def receive = { case s: String ⇒ sender ! s.toUpperCase } })
+ val actor1 = system.actorOf(Props[TestActor])
+ val actor2 = system.actorOf(Props(new Actor { def receive = { case s: String ⇒ sender ! s.toUpperCase } }))
val future = actor1 ? "Hello" flatMap { case s: String ⇒ actor2 ? s }
future.await
test(future, "WORLD")
@@ -151,8 +151,8 @@ class FutureSpec extends AkkaSpec with Checkers with BeforeAndAfterAll with Defa
"will throw an exception" must {
behave like futureWithException[ArithmeticException] { test ⇒
filterException[ArithmeticException] {
- val actor1 = system.actorOf[TestActor]
- val actor2 = system.actorOf(new Actor { def receive = { case s: String ⇒ sender ! Status.Failure(new ArithmeticException("/ by zero")) } })
+ val actor1 = system.actorOf(Props[TestActor])
+ val actor2 = system.actorOf(Props(new Actor { def receive = { case s: String ⇒ sender ! Status.Failure(new ArithmeticException("/ by zero")) } }))
val future = actor1 ? "Hello" flatMap { case s: String ⇒ actor2 ? s }
future.await
test(future, "/ by zero")
@@ -164,8 +164,8 @@ class FutureSpec extends AkkaSpec with Checkers with BeforeAndAfterAll with Defa
"will throw a MatchError when matching wrong type" must {
behave like futureWithException[MatchError] { test ⇒
filterException[MatchError] {
- val actor1 = system.actorOf[TestActor]
- val actor2 = system.actorOf(new Actor { def receive = { case s: String ⇒ sender ! s.toUpperCase } })
+ val actor1 = system.actorOf(Props[TestActor])
+ val actor2 = system.actorOf(Props(new Actor { def receive = { case s: String ⇒ sender ! s.toUpperCase } }))
val future = actor1 ? "Hello" flatMap { case i: Int ⇒ actor2 ? i }
future.await
test(future, "World (of class java.lang.String)")
@@ -180,12 +180,12 @@ class FutureSpec extends AkkaSpec with Checkers with BeforeAndAfterAll with Defa
"compose with for-comprehensions" in {
filterException[ClassCastException] {
- val actor = system.actorOf(new Actor {
+ val actor = system.actorOf(Props(new Actor {
def receive = {
case s: String ⇒ sender ! s.length
case i: Int ⇒ sender ! (i * 2).toString
}
- })
+ }))
val future0 = actor ? "Hello"
@@ -212,12 +212,12 @@ class FutureSpec extends AkkaSpec with Checkers with BeforeAndAfterAll with Defa
filterException[MatchError] {
case class Req[T](req: T)
case class Res[T](res: T)
- val actor = system.actorOf(new Actor {
+ val actor = system.actorOf(Props(new Actor {
def receive = {
case Req(s: String) ⇒ sender ! Res(s.length)
case Req(i: Int) ⇒ sender ! Res((i * 2).toString)
}
- })
+ }))
val future1 = for {
Res(a: Int) ← actor ? Req("Hello")
@@ -257,7 +257,7 @@ class FutureSpec extends AkkaSpec with Checkers with BeforeAndAfterAll with Defa
val future7 = future3 recover { case e: ArithmeticException ⇒ "You got ERROR" }
- val actor = system.actorOf[TestActor]
+ val actor = system.actorOf(Props[TestActor])
val future8 = actor ? "Failure"
val future9 = actor ? "Failure" recover {
@@ -300,9 +300,9 @@ class FutureSpec extends AkkaSpec with Checkers with BeforeAndAfterAll with Defa
"fold" in {
val actors = (1 to 10).toList map { _ ⇒
- system.actorOf(new Actor {
+ system.actorOf(Props(new Actor {
def receive = { case (add: Int, wait: Int) ⇒ Thread.sleep(wait); sender.tell(add) }
- })
+ }))
}
val timeout = 10000
def futures = actors.zipWithIndex map { case (actor: ActorRef, idx: Int) ⇒ actor.?((idx, idx * 200), timeout).mapTo[Int] }
@@ -311,9 +311,9 @@ class FutureSpec extends AkkaSpec with Checkers with BeforeAndAfterAll with Defa
"fold by composing" in {
val actors = (1 to 10).toList map { _ ⇒
- system.actorOf(new Actor {
+ system.actorOf(Props(new Actor {
def receive = { case (add: Int, wait: Int) ⇒ Thread.sleep(wait); sender.tell(add) }
- })
+ }))
}
def futures = actors.zipWithIndex map { case (actor: ActorRef, idx: Int) ⇒ actor.?((idx, idx * 200), 10000).mapTo[Int] }
futures.foldLeft(Future(0))((fr, fa) ⇒ for (r ← fr; a ← fa) yield (r + a)).get must be(45)
@@ -322,14 +322,14 @@ class FutureSpec extends AkkaSpec with Checkers with BeforeAndAfterAll with Defa
"fold with an exception" in {
filterException[IllegalArgumentException] {
val actors = (1 to 10).toList map { _ ⇒
- system.actorOf(new Actor {
+ system.actorOf(Props(new Actor {
def receive = {
case (add: Int, wait: Int) ⇒
Thread.sleep(wait)
if (add == 6) sender ! Status.Failure(new IllegalArgumentException("shouldFoldResultsWithException: expected"))
else sender.tell(add)
}
- })
+ }))
}
val timeout = 10000
def futures = actors.zipWithIndex map { case (actor: ActorRef, idx: Int) ⇒ actor.?((idx, idx * 100), timeout).mapTo[Int] }
@@ -358,9 +358,9 @@ class FutureSpec extends AkkaSpec with Checkers with BeforeAndAfterAll with Defa
"shouldReduceResults" in {
val actors = (1 to 10).toList map { _ ⇒
- system.actorOf(new Actor {
+ system.actorOf(Props(new Actor {
def receive = { case (add: Int, wait: Int) ⇒ Thread.sleep(wait); sender.tell(add) }
- })
+ }))
}
val timeout = 10000
def futures = actors.zipWithIndex map { case (actor: ActorRef, idx: Int) ⇒ actor.?((idx, idx * 200), timeout).mapTo[Int] }
@@ -370,14 +370,14 @@ class FutureSpec extends AkkaSpec with Checkers with BeforeAndAfterAll with Defa
"shouldReduceResultsWithException" in {
filterException[IllegalArgumentException] {
val actors = (1 to 10).toList map { _ ⇒
- system.actorOf(new Actor {
+ system.actorOf(Props(new Actor {
def receive = {
case (add: Int, wait: Int) ⇒
Thread.sleep(wait)
if (add == 6) sender ! Status.Failure(new IllegalArgumentException("shouldFoldResultsWithException: expected"))
else sender.tell(add)
}
- })
+ }))
}
val timeout = 10000
def futures = actors.zipWithIndex map { case (actor: ActorRef, idx: Int) ⇒ actor.?((idx, idx * 100), timeout).mapTo[Int] }
@@ -393,21 +393,21 @@ class FutureSpec extends AkkaSpec with Checkers with BeforeAndAfterAll with Defa
"receiveShouldExecuteOnComplete" in {
val latch = new StandardLatch
- val actor = system.actorOf[TestActor]
+ val actor = system.actorOf(Props[TestActor])
actor ? "Hello" onResult { case "World" ⇒ latch.open }
assert(latch.tryAwait(5, TimeUnit.SECONDS))
actor.stop()
}
"shouldTraverseFutures" in {
- val oddActor = system.actorOf(new Actor {
+ val oddActor = system.actorOf(Props(new Actor {
var counter = 1
def receive = {
case 'GetNext ⇒
sender ! counter
counter += 2
}
- })
+ }))
val oddFutures = List.fill(100)(oddActor ? 'GetNext mapTo manifest[Int])
assert(Future.sequence(oddFutures).get.sum === 10000)
@@ -461,7 +461,7 @@ class FutureSpec extends AkkaSpec with Checkers with BeforeAndAfterAll with Defa
"futureComposingWithContinuations" in {
import Future.flow
- val actor = system.actorOf[TestActor]
+ val actor = system.actorOf(Props[TestActor])
val x = Future("Hello")
val y = x flatMap (actor ? _) mapTo manifest[String]
@@ -490,7 +490,7 @@ class FutureSpec extends AkkaSpec with Checkers with BeforeAndAfterAll with Defa
filterException[ClassCastException] {
import Future.flow
- val actor = system.actorOf[TestActor]
+ val actor = system.actorOf(Props[TestActor])
val x = Future(3)
val y = (actor ? "Hello").mapTo[Int]
@@ -505,7 +505,7 @@ class FutureSpec extends AkkaSpec with Checkers with BeforeAndAfterAll with Defa
filterException[ClassCastException] {
import Future.flow
- val actor = system.actorOf[TestActor]
+ val actor = system.actorOf(Props[TestActor])
val x = Future("Hello")
val y = actor ? "Hello" mapTo manifest[Nothing]
diff --git a/akka-actor-tests/src/test/scala/akka/event/LoggingReceiveSpec.scala b/akka-actor-tests/src/test/scala/akka/event/LoggingReceiveSpec.scala
index 4cd1221afd..6427997b78 100644
--- a/akka-actor-tests/src/test/scala/akka/event/LoggingReceiveSpec.scala
+++ b/akka-actor-tests/src/test/scala/akka/event/LoggingReceiveSpec.scala
@@ -13,7 +13,6 @@ import scala.collection.JavaConverters._
import java.util.Properties
import akka.actor.Actor
import akka.actor.ActorSystem
-import akka.actor.HotSwap
import akka.actor.UnhandledMessageException
import akka.actor.PoisonPill
import akka.actor.ActorSystemImpl
@@ -77,22 +76,27 @@ class LoggingReceiveSpec extends WordSpec with BeforeAndAfterEach with BeforeAnd
ignoreMute(this)
system.eventStream.subscribe(testActor, classOf[Logging.Debug])
system.eventStream.subscribe(testActor, classOf[Logging.Error])
+
+ val r: Actor.Receive = {
+ case null ⇒
+ }
+
val actor = TestActorRef(new Actor {
- def receive = LoggingReceive(this) {
- case x ⇒
- sender ! "x"
+ def switch: Actor.Receive = { case "becomenull" ⇒ context.become(r, false) }
+ def receive = switch orElse LoggingReceive(this) {
+ case x ⇒ sender ! "x"
}
})
+
val name = actor.path.toString
actor ! "buh"
within(1 second) {
expectMsg(Logging.Debug(name, "received handled message buh"))
expectMsg("x")
}
- val r: Actor.Receive = {
- case null ⇒
- }
- actor ! HotSwap(_ ⇒ r, false)
+
+ actor ! "becomenull"
+
EventFilter[UnhandledMessageException](pattern = "does not handle", occurrences = 1) intercept {
within(500 millis) {
actor ! "bah"
diff --git a/akka-actor-tests/src/test/scala/akka/performance/microbench/TellLatencyPerformanceSpec.scala b/akka-actor-tests/src/test/scala/akka/performance/microbench/TellLatencyPerformanceSpec.scala
index 2de861c62c..c1de7702e3 100644
--- a/akka-actor-tests/src/test/scala/akka/performance/microbench/TellLatencyPerformanceSpec.scala
+++ b/akka-actor-tests/src/test/scala/akka/performance/microbench/TellLatencyPerformanceSpec.scala
@@ -58,11 +58,11 @@ class TellLatencyPerformanceSpec extends PerformanceSpec {
val latch = new CountDownLatch(numberOfClients)
val repeatsPerClient = repeat / numberOfClients
val clients = (for (i ← 0 until numberOfClients) yield {
- val destination = system.actorOf[Destination]
- val w4 = system.actorOf(new Waypoint(destination))
- val w3 = system.actorOf(new Waypoint(w4))
- val w2 = system.actorOf(new Waypoint(w3))
- val w1 = system.actorOf(new Waypoint(w2))
+ val destination = system.actorOf(Props[Destination])
+ val w4 = system.actorOf(Props(new Waypoint(destination)))
+ val w3 = system.actorOf(Props(new Waypoint(w4)))
+ val w2 = system.actorOf(Props(new Waypoint(w3)))
+ val w1 = system.actorOf(Props(new Waypoint(w2)))
Props(new Client(w1, latch, repeatsPerClient, clientDelay.toMicros.intValue, stat)).withDispatcher(clientDispatcher)
}).toList.map(system.actorOf(_))
@@ -133,4 +133,4 @@ object TellLatencyPerformanceSpec {
}
-}
\ No newline at end of file
+}
diff --git a/akka-actor-tests/src/test/scala/akka/performance/microbench/TellThroughputSeparateDispatchersPerformanceSpec.scala b/akka-actor-tests/src/test/scala/akka/performance/microbench/TellThroughputSeparateDispatchersPerformanceSpec.scala
index aef501bb2d..ca471b2222 100644
--- a/akka-actor-tests/src/test/scala/akka/performance/microbench/TellThroughputSeparateDispatchersPerformanceSpec.scala
+++ b/akka-actor-tests/src/test/scala/akka/performance/microbench/TellThroughputSeparateDispatchersPerformanceSpec.scala
@@ -127,7 +127,7 @@ class TellThroughputSeparateDispatchersPerformanceSpec extends PerformanceSpec {
yield system.actorOf(Props(new Destination).withDispatcher(clientDispatcher))
val clients = for ((dest, j) ← destinations.zipWithIndex)
yield system.actorOf(Props(new Client(dest, latch, repeatsPerClient)).withDispatcher(clientDispatcher))
- */
+ */
val start = System.nanoTime
clients.foreach(_ ! Run)
diff --git a/akka-actor-tests/src/test/scala/akka/routing/ActorPoolSpec.scala b/akka-actor-tests/src/test/scala/akka/routing/ActorPoolSpec.scala
index 943718848a..1893732686 100644
--- a/akka-actor-tests/src/test/scala/akka/routing/ActorPoolSpec.scala
+++ b/akka-actor-tests/src/test/scala/akka/routing/ActorPoolSpec.scala
@@ -82,11 +82,11 @@ class ActorPoolSpec extends AkkaSpec with DefaultTimeout {
}).withFaultHandler(faultHandler))
val successes = TestLatch(2)
- val successCounter = system.actorOf(new Actor {
+ val successCounter = system.actorOf(Props(new Actor {
def receive = {
case "success" ⇒ successes.countDown()
}
- })
+ }))
implicit val replyTo = successCounter
pool ! "a"
diff --git a/akka-actor-tests/src/test/scala/akka/routing/RoutingSpec.scala b/akka-actor-tests/src/test/scala/akka/routing/RoutingSpec.scala
index fec56431e5..e6e0f1c898 100644
--- a/akka-actor-tests/src/test/scala/akka/routing/RoutingSpec.scala
+++ b/akka-actor-tests/src/test/scala/akka/routing/RoutingSpec.scala
@@ -109,12 +109,12 @@ class RoutingSpec extends AkkaSpec with DefaultTimeout with ImplicitSender {
for (i ← 0 until connectionCount) {
counters = counters :+ new AtomicInteger()
- val actor = system.actorOf(new Actor {
+ val actor = system.actorOf(Props(new Actor {
def receive = {
case "end" ⇒ doneLatch.countDown()
case msg: Int ⇒ counters.get(i).get.addAndGet(msg)
}
- })
+ }))
actors = actors :+ actor
}
@@ -141,20 +141,20 @@ class RoutingSpec extends AkkaSpec with DefaultTimeout with ImplicitSender {
val doneLatch = new CountDownLatch(2)
val counter1 = new AtomicInteger
- val actor1 = system.actorOf(new Actor {
+ val actor1 = system.actorOf(Props(new Actor {
def receive = {
case "end" ⇒ doneLatch.countDown()
case msg: Int ⇒ counter1.addAndGet(msg)
}
- })
+ }))
val counter2 = new AtomicInteger
- val actor2 = system.actorOf(new Actor {
+ val actor2 = system.actorOf(Props(new Actor {
def receive = {
case "end" ⇒ doneLatch.countDown()
case msg: Int ⇒ counter2.addAndGet(msg)
}
- })
+ }))
val routedActor = system.actorOf(Props(new TestActor).withRouter(RoundRobinRouter(targets = List(actor1, actor2))))
@@ -179,20 +179,20 @@ class RoutingSpec extends AkkaSpec with DefaultTimeout with ImplicitSender {
val doneLatch = new CountDownLatch(2)
val counter1 = new AtomicInteger
- val actor1 = system.actorOf(new Actor {
+ val actor1 = system.actorOf(Props(new Actor {
def receive = {
case "end" ⇒ doneLatch.countDown()
case msg: Int ⇒ counter1.addAndGet(msg)
}
- })
+ }))
val counter2 = new AtomicInteger
- val actor2 = system.actorOf(new Actor {
+ val actor2 = system.actorOf(Props(new Actor {
def receive = {
case "end" ⇒ doneLatch.countDown()
case msg: Int ⇒ counter2.addAndGet(msg)
}
- })
+ }))
val routedActor = system.actorOf(Props(new TestActor).withRouter(RandomRouter(targets = List(actor1, actor2))))
@@ -216,20 +216,20 @@ class RoutingSpec extends AkkaSpec with DefaultTimeout with ImplicitSender {
val doneLatch = new CountDownLatch(2)
val counter1 = new AtomicInteger
- val actor1 = system.actorOf(new Actor {
+ val actor1 = system.actorOf(Props(new Actor {
def receive = {
case "end" ⇒ doneLatch.countDown()
case msg: Int ⇒ counter1.addAndGet(msg)
}
- })
+ }))
val counter2 = new AtomicInteger
- val actor2 = system.actorOf(new Actor {
+ val actor2 = system.actorOf(Props(new Actor {
def receive = {
case "end" ⇒ doneLatch.countDown()
case msg: Int ⇒ counter2.addAndGet(msg)
}
- })
+ }))
val routedActor = system.actorOf(Props(new TestActor).withRouter(BroadcastRouter(targets = List(actor1, actor2))))
routedActor ! 1
@@ -245,22 +245,22 @@ class RoutingSpec extends AkkaSpec with DefaultTimeout with ImplicitSender {
val doneLatch = new CountDownLatch(2)
val counter1 = new AtomicInteger
- val actor1 = system.actorOf(new Actor {
+ val actor1 = system.actorOf(Props(new Actor {
def receive = {
case "end" ⇒ doneLatch.countDown()
case msg: Int ⇒
counter1.addAndGet(msg)
sender ! "ack"
}
- })
+ }))
val counter2 = new AtomicInteger
- val actor2 = system.actorOf(new Actor {
+ val actor2 = system.actorOf(Props(new Actor {
def receive = {
case "end" ⇒ doneLatch.countDown()
case msg: Int ⇒ counter2.addAndGet(msg)
}
- })
+ }))
val routedActor = system.actorOf(Props(new TestActor).withRouter(BroadcastRouter(targets = List(actor1, actor2))))
routedActor ? 1
@@ -284,20 +284,20 @@ class RoutingSpec extends AkkaSpec with DefaultTimeout with ImplicitSender {
val doneLatch = new TestLatch(2)
val counter1 = new AtomicInteger
- val actor1 = system.actorOf(new Actor {
+ val actor1 = system.actorOf(Props(new Actor {
def receive = {
case "end" ⇒ doneLatch.countDown()
case msg: Int ⇒ counter1.addAndGet(msg)
}
- })
+ }))
val counter2 = new AtomicInteger
- val actor2 = system.actorOf(new Actor {
+ val actor2 = system.actorOf(Props(new Actor {
def receive = {
case "end" ⇒ doneLatch.countDown()
case msg: Int ⇒ counter2.addAndGet(msg)
}
- })
+ }))
val routedActor = system.actorOf(Props(new TestActor).withRouter(ScatterGatherFirstCompletedRouter(targets = List(actor1, actor2))))
routedActor ! Broadcast(1)
diff --git a/akka-actor-tests/src/test/scala/akka/serialization/SerializeSpec.scala b/akka-actor-tests/src/test/scala/akka/serialization/SerializeSpec.scala
index 499d214ff8..3b0b6ea5bc 100644
--- a/akka-actor-tests/src/test/scala/akka/serialization/SerializeSpec.scala
+++ b/akka-actor-tests/src/test/scala/akka/serialization/SerializeSpec.scala
@@ -7,10 +7,9 @@ package akka.serialization
import akka.serialization.Serialization._
import scala.reflect._
import akka.testkit.AkkaSpec
-import akka.actor.{ ActorSystem, ActorSystemImpl }
-import java.io.{ ObjectInputStream, ByteArrayInputStream, ByteArrayOutputStream, ObjectOutputStream }
-import akka.actor.DeadLetterActorRef
import com.typesafe.config.ConfigFactory
+import akka.actor._
+import java.io._
object SerializeSpec {
@@ -94,6 +93,22 @@ class SerializeSpec extends AkkaSpec(SerializeSpec.serializationConf) {
}
}
+ "not serialize ActorCell" in {
+ val a = system.actorOf(Props(new Actor {
+ def receive = {
+ case o: ObjectOutputStream ⇒
+ try {
+ o.writeObject(this)
+ } catch {
+ case _: NotSerializableException ⇒ testActor ! "pass"
+ }
+ }
+ }))
+ a ! new ObjectOutputStream(new ByteArrayOutputStream())
+ expectMsg("pass")
+ a.stop()
+ }
+
"serialize DeadLetterActorRef" in {
val outbuf = new ByteArrayOutputStream()
val out = new ObjectOutputStream(outbuf)
diff --git a/akka-actor-tests/src/test/scala/akka/util/DurationSpec.scala b/akka-actor-tests/src/test/scala/akka/util/DurationSpec.scala
index d30fdc83be..6a291872b8 100644
--- a/akka-actor-tests/src/test/scala/akka/util/DurationSpec.scala
+++ b/akka-actor-tests/src/test/scala/akka/util/DurationSpec.scala
@@ -40,6 +40,16 @@ class DurationSpec extends WordSpec with MustMatchers {
(minf + minf) must be(minf)
}
+ "support fromNow" in {
+ val dead = 2.seconds.fromNow
+ val dead2 = 2 seconds fromNow
+ dead.timeLeft must be > 1.second
+ dead2.timeLeft must be > 1.second
+ 1.second.sleep
+ dead.timeLeft must be < 1.second
+ dead2.timeLeft must be < 1.second
+ }
+
}
}
diff --git a/akka-actor/src/main/java/com/typesafe/config/Config.java b/akka-actor/src/main/java/com/typesafe/config/Config.java
index 1c7fca50e5..44eebe1158 100644
--- a/akka-actor/src/main/java/com/typesafe/config/Config.java
+++ b/akka-actor/src/main/java/com/typesafe/config/Config.java
@@ -4,6 +4,8 @@
package com.typesafe.config;
import java.util.List;
+import java.util.Map;
+import java.util.Set;
/**
* An immutable map from config paths to config values.
@@ -32,6 +34,10 @@ import java.util.List;
* {@code ConfigObject} is a tree of nested maps from keys to values.
*
*
+ * Use {@link ConfigUtil#joinPath} and {@link ConfigUtil#splitPath} to convert
+ * between path expressions and individual path elements (keys).
+ *
+ *
* Another difference between {@code Config} and {@code ConfigObject} is that
* conceptually, {@code ConfigValue}s with a {@link ConfigValue#valueType()
* valueType()} of {@link ConfigValueType#NULL NULL} exist in a
@@ -54,10 +60,11 @@ import java.util.List;
* are performed for you though.
*
*
- * If you want to iterate over the contents of a {@code Config}, you have to get
- * its {@code ConfigObject} with {@link #root()}, and then iterate over the
- * {@code ConfigObject}.
- *
+ * If you want to iterate over the contents of a {@code Config}, you can get its
+ * {@code ConfigObject} with {@link #root()}, and then iterate over the
+ * {@code ConfigObject} (which implements java.util.Map). Or, you
+ * can use {@link #entrySet()} which recurses the object tree for you and builds
+ * up a Set of all path-value pairs where the value is not null.
*
*
* Do not implement {@code Config}; it should only be implemented by
@@ -256,6 +263,17 @@ public interface Config extends ConfigMergeable {
*/
boolean isEmpty();
+ /**
+ * Returns the set of path-value pairs, excluding any null values, found by
+ * recursing {@link #root() the root object}. Note that this is very
+ * different from root().entrySet() which returns the set of
+ * immediate-child keys in the root object and includes null values.
+ *
+ * @return set of paths with non-null values, built up by recursing the
+ * entire tree of {@link ConfigObject}
+ */
+ Set> entrySet();
+
/**
*
* @param path
diff --git a/akka-actor/src/main/java/com/typesafe/config/ConfigFactory.java b/akka-actor/src/main/java/com/typesafe/config/ConfigFactory.java
index 9251b3fb45..dc851d7f2b 100644
--- a/akka-actor/src/main/java/com/typesafe/config/ConfigFactory.java
+++ b/akka-actor/src/main/java/com/typesafe/config/ConfigFactory.java
@@ -11,7 +11,7 @@ import java.util.Map;
import java.util.Properties;
import com.typesafe.config.impl.ConfigImpl;
-import com.typesafe.config.impl.ConfigUtil;
+import com.typesafe.config.impl.ConfigImplUtil;
import com.typesafe.config.impl.Parseable;
/**
@@ -179,7 +179,7 @@ public final class ConfigFactory {
try {
return DefaultConfigHolder.defaultConfig;
} catch (ExceptionInInitializerError e) {
- throw ConfigUtil.extractInitializerError(e);
+ throw ConfigImplUtil.extractInitializerError(e);
}
}
diff --git a/akka-actor/src/main/java/com/typesafe/config/ConfigObject.java b/akka-actor/src/main/java/com/typesafe/config/ConfigObject.java
index 8613840223..54cce1c39f 100644
--- a/akka-actor/src/main/java/com/typesafe/config/ConfigObject.java
+++ b/akka-actor/src/main/java/com/typesafe/config/ConfigObject.java
@@ -8,34 +8,38 @@ import java.util.Map;
/**
* Subtype of {@link ConfigValue} representing an object (dictionary, map)
* value, as in JSON's { "a" : 42 } syntax.
- *
+ *
*
* {@code ConfigObject} implements {@code java.util.Map} so
* you can use it like a regular Java map. Or call {@link #unwrapped()} to
* unwrap the map to a map with plain Java values rather than
* {@code ConfigValue}.
- *
+ *
*
* Like all {@link ConfigValue} subtypes, {@code ConfigObject} is immutable.
* This makes it threadsafe and you never have to create "defensive copies." The
* mutator methods from {@link java.util.Map} all throw
* {@link java.lang.UnsupportedOperationException}.
- *
+ *
*
* The {@link ConfigValue#valueType} method on an object returns
* {@link ConfigValueType#OBJECT}.
- *
+ *
*
* In most cases you want to use the {@link Config} interface rather than this
* one. Call {@link #toConfig()} to convert a {@code ConfigObject} to a
* {@code Config}.
- *
+ *
*
* The API for a {@code ConfigObject} is in terms of keys, while the API for a
* {@link Config} is in terms of path expressions. Conceptually,
* {@code ConfigObject} is a tree of maps from keys to values, while a
* {@code ConfigObject} is a one-level map from paths to values.
- *
+ *
+ *
+ * Use {@link ConfigUtil#joinPath} and {@link ConfigUtil#splitPath} to convert
+ * between path expressions and individual path elements (keys).
+ *
*
* A {@code ConfigObject} may contain null values, which will have
* {@link ConfigValue#valueType()} equal to {@link ConfigValueType#NULL}. If
@@ -43,7 +47,7 @@ import java.util.Map;
* file (or wherever this value tree came from). If {@code get()} returns a
* {@link ConfigValue} with type {@code ConfigValueType#NULL} then the key was
* set to null explicitly in the config file.
- *
+ *
*
* Do not implement {@code ConfigObject}; it should only be implemented
* by the config library. Arbitrary implementations will not work because the
diff --git a/akka-actor/src/main/java/com/typesafe/config/ConfigUtil.java b/akka-actor/src/main/java/com/typesafe/config/ConfigUtil.java
new file mode 100644
index 0000000000..1aa463f46c
--- /dev/null
+++ b/akka-actor/src/main/java/com/typesafe/config/ConfigUtil.java
@@ -0,0 +1,70 @@
+package com.typesafe.config;
+
+import java.util.List;
+
+import com.typesafe.config.impl.ConfigImplUtil;
+
+public final class ConfigUtil {
+ private ConfigUtil() {
+
+ }
+
+ /**
+ * Quotes and escapes a string, as in the JSON specification.
+ *
+ * @param s
+ * a string
+ * @return the string quoted and escaped
+ */
+ public static String quoteString(String s) {
+ return ConfigImplUtil.renderJsonString(s);
+ }
+
+ /**
+ * Converts a list of keys to a path expression, by quoting the path
+ * elements as needed and then joining them separated by a period. A path
+ * expression is usable with a {@link Config}, while individual path
+ * elements are usable with a {@link ConfigObject}.
+ *
+ * @param elements
+ * the keys in the path
+ * @return a path expression
+ * @throws ConfigException
+ * if there are no elements
+ */
+ public static String joinPath(String... elements) {
+ return ConfigImplUtil.joinPath(elements);
+ }
+
+ /**
+ * Converts a list of strings to a path expression, by quoting the path
+ * elements as needed and then joining them separated by a period. A path
+ * expression is usable with a {@link Config}, while individual path
+ * elements are usable with a {@link ConfigObject}.
+ *
+ * @param elements
+ * the keys in the path
+ * @return a path expression
+ * @throws ConfigException
+ * if the list is empty
+ */
+ public static String joinPath(List elements) {
+ return ConfigImplUtil.joinPath(elements);
+ }
+
+ /**
+ * Converts a path expression into a list of keys, by splitting on period
+ * and unquoting the individual path elements. A path expression is usable
+ * with a {@link Config}, while individual path elements are usable with a
+ * {@link ConfigObject}.
+ *
+ * @param path
+ * a path expression
+ * @return the individual keys in the path
+ * @throws ConfigException
+ * if the path expression is invalid
+ */
+ public static List splitPath(String path) {
+ return ConfigImplUtil.splitPath(path);
+ }
+}
diff --git a/akka-actor/src/main/java/com/typesafe/config/impl/AbstractConfigValue.java b/akka-actor/src/main/java/com/typesafe/config/impl/AbstractConfigValue.java
index 1bec6ec536..68ab5cc316 100644
--- a/akka-actor/src/main/java/com/typesafe/config/impl/AbstractConfigValue.java
+++ b/akka-actor/src/main/java/com/typesafe/config/impl/AbstractConfigValue.java
@@ -144,7 +144,7 @@ abstract class AbstractConfigValue implements ConfigValue, MergeableValue {
return canEqual(other)
&& (this.valueType() ==
((ConfigValue) other).valueType())
- && ConfigUtil.equalsHandlingNull(this.unwrapped(),
+ && ConfigImplUtil.equalsHandlingNull(this.unwrapped(),
((ConfigValue) other).unwrapped());
} else {
return false;
@@ -178,7 +178,7 @@ abstract class AbstractConfigValue implements ConfigValue, MergeableValue {
protected void render(StringBuilder sb, int indent, String atKey, boolean formatted) {
if (atKey != null) {
- sb.append(ConfigUtil.renderJsonString(atKey));
+ sb.append(ConfigImplUtil.renderJsonString(atKey));
sb.append(" : ");
}
render(sb, indent, formatted);
diff --git a/akka-actor/src/main/java/com/typesafe/config/impl/ConfigDelayedMerge.java b/akka-actor/src/main/java/com/typesafe/config/impl/ConfigDelayedMerge.java
index b916d9a0a7..9846cc57f2 100644
--- a/akka-actor/src/main/java/com/typesafe/config/impl/ConfigDelayedMerge.java
+++ b/akka-actor/src/main/java/com/typesafe/config/impl/ConfigDelayedMerge.java
@@ -189,7 +189,7 @@ final class ConfigDelayedMerge extends AbstractConfigValue implements
indent(sb, indent);
if (atKey != null) {
sb.append("# unmerged value " + i + " for key "
- + ConfigUtil.renderJsonString(atKey) + " from ");
+ + ConfigImplUtil.renderJsonString(atKey) + " from ");
} else {
sb.append("# unmerged value " + i + " from ");
}
@@ -200,7 +200,7 @@ final class ConfigDelayedMerge extends AbstractConfigValue implements
}
if (atKey != null) {
- sb.append(ConfigUtil.renderJsonString(atKey));
+ sb.append(ConfigImplUtil.renderJsonString(atKey));
sb.append(" : ");
}
v.render(sb, indent, formatted);
diff --git a/akka-actor/src/main/java/com/typesafe/config/impl/ConfigImpl.java b/akka-actor/src/main/java/com/typesafe/config/impl/ConfigImpl.java
index 8c016d6f98..217f4385e9 100644
--- a/akka-actor/src/main/java/com/typesafe/config/impl/ConfigImpl.java
+++ b/akka-actor/src/main/java/com/typesafe/config/impl/ConfigImpl.java
@@ -40,47 +40,66 @@ public class ConfigImpl {
|| name.endsWith(".properties")) {
ConfigParseable p = source.nameToParseable(name);
- if (p != null) {
- obj = p.parse(p.options().setAllowMissing(
- options.getAllowMissing()));
- } else {
- obj = SimpleConfigObject.emptyMissing(SimpleConfigOrigin.newSimple(name));
- }
+ obj = p.parse(p.options().setAllowMissing(options.getAllowMissing()));
} else {
ConfigParseable confHandle = source.nameToParseable(name + ".conf");
ConfigParseable jsonHandle = source.nameToParseable(name + ".json");
ConfigParseable propsHandle = source.nameToParseable(name
+ ".properties");
-
- if (!options.getAllowMissing() && confHandle == null
- && jsonHandle == null && propsHandle == null) {
- throw new ConfigException.IO(SimpleConfigOrigin.newSimple(name),
- "No config files {.conf,.json,.properties} found");
- }
+ boolean gotSomething = false;
+ List failMessages = new ArrayList();
ConfigSyntax syntax = options.getSyntax();
obj = SimpleConfigObject.empty(SimpleConfigOrigin.newSimple(name));
- if (confHandle != null
- && (syntax == null || syntax == ConfigSyntax.CONF)) {
- obj = confHandle.parse(confHandle.options()
- .setAllowMissing(true).setSyntax(ConfigSyntax.CONF));
+ if (syntax == null || syntax == ConfigSyntax.CONF) {
+ try {
+ obj = confHandle.parse(confHandle.options().setAllowMissing(false)
+ .setSyntax(ConfigSyntax.CONF));
+ gotSomething = true;
+ } catch (ConfigException.IO e) {
+ failMessages.add(e.getMessage());
+ }
}
- if (jsonHandle != null
- && (syntax == null || syntax == ConfigSyntax.JSON)) {
- ConfigObject parsed = jsonHandle.parse(jsonHandle
- .options().setAllowMissing(true)
- .setSyntax(ConfigSyntax.JSON));
- obj = obj.withFallback(parsed);
+ if (syntax == null || syntax == ConfigSyntax.JSON) {
+ try {
+ ConfigObject parsed = jsonHandle.parse(jsonHandle.options()
+ .setAllowMissing(false).setSyntax(ConfigSyntax.JSON));
+ obj = obj.withFallback(parsed);
+ gotSomething = true;
+ } catch (ConfigException.IO e) {
+ failMessages.add(e.getMessage());
+ }
}
- if (propsHandle != null
- && (syntax == null || syntax == ConfigSyntax.PROPERTIES)) {
- ConfigObject parsed = propsHandle.parse(propsHandle.options()
- .setAllowMissing(true)
- .setSyntax(ConfigSyntax.PROPERTIES));
- obj = obj.withFallback(parsed);
+ if (syntax == null || syntax == ConfigSyntax.PROPERTIES) {
+ try {
+ ConfigObject parsed = propsHandle.parse(propsHandle.options()
+ .setAllowMissing(false).setSyntax(ConfigSyntax.PROPERTIES));
+ obj = obj.withFallback(parsed);
+ gotSomething = true;
+ } catch (ConfigException.IO e) {
+ failMessages.add(e.getMessage());
+ }
+ }
+
+ if (!options.getAllowMissing() && !gotSomething) {
+ String failMessage;
+ if (failMessages.isEmpty()) {
+ // this should not happen
+ throw new ConfigException.BugOrBroken(
+ "should not be reached: nothing found but no exceptions thrown");
+ } else {
+ StringBuilder sb = new StringBuilder();
+ for (String msg : failMessages) {
+ sb.append(msg);
+ sb.append(", ");
+ }
+ sb.setLength(sb.length() - 2);
+ failMessage = sb.toString();
+ }
+ throw new ConfigException.IO(SimpleConfigOrigin.newSimple(name), failMessage);
}
}
@@ -269,7 +288,14 @@ public class ConfigImpl {
NameSource source = new NameSource() {
@Override
public ConfigParseable nameToParseable(String name) {
- return context.relativeTo(name);
+ ConfigParseable p = context.relativeTo(name);
+ if (p == null) {
+ // avoid returning null
+ return Parseable.newNotFound(name, "include was not found: '" + name + "'",
+ ConfigParseOptions.defaults());
+ } else {
+ return p;
+ }
}
};
@@ -308,7 +334,7 @@ public class ConfigImpl {
try {
return DefaultIncluderHolder.defaultIncluder;
} catch (ExceptionInInitializerError e) {
- throw ConfigUtil.extractInitializerError(e);
+ throw ConfigImplUtil.extractInitializerError(e);
}
}
@@ -326,7 +352,7 @@ public class ConfigImpl {
try {
return SystemPropertiesHolder.systemProperties;
} catch (ExceptionInInitializerError e) {
- throw ConfigUtil.extractInitializerError(e);
+ throw ConfigImplUtil.extractInitializerError(e);
}
}
@@ -362,7 +388,7 @@ public class ConfigImpl {
try {
return EnvVariablesHolder.envVariables;
} catch (ExceptionInInitializerError e) {
- throw ConfigUtil.extractInitializerError(e);
+ throw ConfigImplUtil.extractInitializerError(e);
}
}
@@ -384,7 +410,7 @@ public class ConfigImpl {
try {
return ReferenceHolder.referenceConfig;
} catch (ExceptionInInitializerError e) {
- throw ConfigUtil.extractInitializerError(e);
+ throw ConfigImplUtil.extractInitializerError(e);
}
}
}
diff --git a/akka-actor/src/main/java/com/typesafe/config/impl/ConfigUtil.java b/akka-actor/src/main/java/com/typesafe/config/impl/ConfigImplUtil.java
similarity index 78%
rename from akka-actor/src/main/java/com/typesafe/config/impl/ConfigUtil.java
rename to akka-actor/src/main/java/com/typesafe/config/impl/ConfigImplUtil.java
index 6f7b2c5aaa..cbc0ecca09 100644
--- a/akka-actor/src/main/java/com/typesafe/config/impl/ConfigUtil.java
+++ b/akka-actor/src/main/java/com/typesafe/config/impl/ConfigImplUtil.java
@@ -6,12 +6,14 @@ package com.typesafe.config.impl;
import java.io.File;
import java.net.URISyntaxException;
import java.net.URL;
+import java.util.ArrayList;
+import java.util.List;
import com.typesafe.config.ConfigException;
/** This is public just for the "config" package to use, don't touch it */
-final public class ConfigUtil {
+final public class ConfigImplUtil {
static boolean equalsHandlingNull(Object a, Object b) {
if (a == null && b != null)
return false;
@@ -23,7 +25,11 @@ final public class ConfigUtil {
return a.equals(b);
}
- static String renderJsonString(String s) {
+ /**
+ * This is public ONLY for use by the "config" package, DO NOT USE this ABI
+ * may change.
+ */
+ public static String renderJsonString(String s) {
StringBuilder sb = new StringBuilder();
sb.append('"');
for (int i = 0; i < s.length(); ++i) {
@@ -146,4 +152,34 @@ final public class ConfigUtil {
return new File(url.getPath());
}
}
+
+ /**
+ * This is public ONLY for use by the "config" package, DO NOT USE this ABI
+ * may change. You can use the version in ConfigUtil instead.
+ */
+ public static String joinPath(String... elements) {
+ return (new Path(elements)).render();
+ }
+
+ /**
+ * This is public ONLY for use by the "config" package, DO NOT USE this ABI
+ * may change. You can use the version in ConfigUtil instead.
+ */
+ public static String joinPath(List elements) {
+ return joinPath(elements.toArray(new String[0]));
+ }
+
+ /**
+ * This is public ONLY for use by the "config" package, DO NOT USE this ABI
+ * may change. You can use the version in ConfigUtil instead.
+ */
+ public static List splitPath(String path) {
+ Path p = Path.newPath(path);
+ List elements = new ArrayList();
+ while (p != null) {
+ elements.add(p.first());
+ p = p.remainder();
+ }
+ return elements;
+ }
}
diff --git a/akka-actor/src/main/java/com/typesafe/config/impl/ConfigString.java b/akka-actor/src/main/java/com/typesafe/config/impl/ConfigString.java
index dd8a5fa3b0..0d1bc97920 100644
--- a/akka-actor/src/main/java/com/typesafe/config/impl/ConfigString.java
+++ b/akka-actor/src/main/java/com/typesafe/config/impl/ConfigString.java
@@ -32,6 +32,6 @@ final class ConfigString extends AbstractConfigValue {
@Override
protected void render(StringBuilder sb, int indent, boolean formatted) {
- sb.append(ConfigUtil.renderJsonString(value));
+ sb.append(ConfigImplUtil.renderJsonString(value));
}
}
diff --git a/akka-actor/src/main/java/com/typesafe/config/impl/ConfigSubstitution.java b/akka-actor/src/main/java/com/typesafe/config/impl/ConfigSubstitution.java
index 8f1b43571c..9a8590bade 100644
--- a/akka-actor/src/main/java/com/typesafe/config/impl/ConfigSubstitution.java
+++ b/akka-actor/src/main/java/com/typesafe/config/impl/ConfigSubstitution.java
@@ -266,7 +266,7 @@ final class ConfigSubstitution extends AbstractConfigValue implements
if (p instanceof SubstitutionExpression) {
sb.append(p.toString());
} else {
- sb.append(ConfigUtil.renderJsonString((String) p));
+ sb.append(ConfigImplUtil.renderJsonString((String) p));
}
}
}
diff --git a/akka-actor/src/main/java/com/typesafe/config/impl/Parseable.java b/akka-actor/src/main/java/com/typesafe/config/impl/Parseable.java
index e5b67540de..62b8ee575a 100644
--- a/akka-actor/src/main/java/com/typesafe/config/impl/Parseable.java
+++ b/akka-actor/src/main/java/com/typesafe/config/impl/Parseable.java
@@ -6,6 +6,7 @@ package com.typesafe.config.impl;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileInputStream;
+import java.io.FileNotFoundException;
import java.io.FilterReader;
import java.io.IOException;
import java.io.InputStream;
@@ -261,6 +262,34 @@ public abstract class Parseable implements ConfigParseable {
return new File(parent, filename);
}
+ // this is a parseable that doesn't exist and just throws when you try to
+ // parse it
+ private final static class ParseableNotFound extends Parseable {
+ final private String what;
+ final private String message;
+
+ ParseableNotFound(String what, String message, ConfigParseOptions options) {
+ this.what = what;
+ this.message = message;
+ postConstruct(options);
+ }
+
+ @Override
+ protected Reader reader() throws IOException {
+ throw new FileNotFoundException(message);
+ }
+
+ @Override
+ protected ConfigOrigin createOrigin() {
+ return SimpleConfigOrigin.newSimple(what);
+ }
+ }
+
+ public static Parseable newNotFound(String whatNotFound, String message,
+ ConfigParseOptions options) {
+ return new ParseableNotFound(whatNotFound, message, options);
+ }
+
private final static class ParseableReader extends Parseable {
final private Reader reader;
@@ -355,7 +384,7 @@ public abstract class Parseable implements ConfigParseable {
// we want file: URLs and files to always behave the same, so switch
// to a file if it's a file: URL
if (input.getProtocol().equals("file")) {
- return newFile(ConfigUtil.urlToFile(input), options);
+ return newFile(ConfigImplUtil.urlToFile(input), options);
} else {
return new ParseableURL(input, options);
}
diff --git a/akka-actor/src/main/java/com/typesafe/config/impl/Parser.java b/akka-actor/src/main/java/com/typesafe/config/impl/Parser.java
index 8c1434b566..6f0de1211c 100644
--- a/akka-actor/src/main/java/com/typesafe/config/impl/Parser.java
+++ b/akka-actor/src/main/java/com/typesafe/config/impl/Parser.java
@@ -41,6 +41,10 @@ final class Parser {
final private ConfigSyntax flavor;
final private ConfigOrigin baseOrigin;
final private LinkedList pathStack;
+ // this is the number of "equals" we are inside,
+ // used to modify the error message to reflect that
+ // someone may think this is .properties format.
+ int equalsCount;
ParseContext(ConfigSyntax flavor, ConfigOrigin origin,
Iterator tokens, ConfigIncluder includer,
@@ -53,6 +57,7 @@ final class Parser {
this.includer = includer;
this.includeContext = includeContext;
this.pathStack = new LinkedList();
+ this.equalsCount = 0;
}
private Token nextToken() {
@@ -63,12 +68,25 @@ final class Parser {
t = buffer.pop();
}
+ if (Tokens.isProblem(t)) {
+ ConfigOrigin origin = t.origin();
+ String message = Tokens.getProblemMessage(t);
+ Throwable cause = Tokens.getProblemCause(t);
+ boolean suggestQuotes = Tokens.getProblemSuggestQuotes(t);
+ if (suggestQuotes) {
+ message = addQuoteSuggestion(t.toString(), message);
+ } else {
+ message = addKeyName(message);
+ }
+ throw new ConfigException.Parse(origin, message, cause);
+ }
+
if (flavor == ConfigSyntax.JSON) {
if (Tokens.isUnquotedText(t)) {
- throw parseError("Token not allowed in valid JSON: '"
- + Tokens.getUnquotedText(t) + "'");
+ throw parseError(addKeyName("Token not allowed in valid JSON: '"
+ + Tokens.getUnquotedText(t) + "'"));
} else if (Tokens.isSubstitution(t)) {
- throw parseError("Substitutions (${} syntax) not allowed in JSON");
+ throw parseError(addKeyName("Substitutions (${} syntax) not allowed in JSON"));
}
}
@@ -84,7 +102,7 @@ final class Parser {
while (Tokens.isNewline(t)) {
// line number tokens have the line that was _ended_ by the
// newline, so we have to add one.
- lineNumber = Tokens.getLineNumber(t) + 1;
+ lineNumber = t.lineNumber() + 1;
t = nextToken();
}
return t;
@@ -111,7 +129,7 @@ final class Parser {
while (true) {
if (Tokens.isNewline(t)) {
// newline number is the line just ended, so add one
- lineNumber = Tokens.getLineNumber(t) + 1;
+ lineNumber = t.lineNumber() + 1;
sawSeparatorOrNewline = true;
// we want to continue to also eat
// a comma if there is one.
@@ -172,11 +190,11 @@ final class Parser {
} else if (Tokens.isUnquotedText(valueToken)) {
String text = Tokens.getUnquotedText(valueToken);
if (firstOrigin == null)
- firstOrigin = Tokens.getUnquotedTextOrigin(valueToken);
+ firstOrigin = valueToken.origin();
sb.append(text);
} else if (Tokens.isSubstitution(valueToken)) {
if (firstOrigin == null)
- firstOrigin = Tokens.getSubstitutionOrigin(valueToken);
+ firstOrigin = valueToken.origin();
if (sb.length() > 0) {
// save string so far
@@ -186,8 +204,7 @@ final class Parser {
// now save substitution
List expression = Tokens
.getSubstitutionPathExpression(valueToken);
- Path path = parsePathExpression(expression.iterator(),
- Tokens.getSubstitutionOrigin(valueToken));
+ Path path = parsePathExpression(expression.iterator(), valueToken.origin());
boolean optional = Tokens.getSubstitutionOptional(valueToken);
minimized.add(new SubstitutionExpression(path, optional));
@@ -233,6 +250,65 @@ final class Parser {
return new ConfigException.Parse(lineOrigin(), message, cause);
}
+
+ private String previousFieldName(Path lastPath) {
+ if (lastPath != null) {
+ return lastPath.render();
+ } else if (pathStack.isEmpty())
+ return null;
+ else
+ return pathStack.peek().render();
+ }
+
+ private String previousFieldName() {
+ return previousFieldName(null);
+ }
+
+ private String addKeyName(String message) {
+ String previousFieldName = previousFieldName();
+ if (previousFieldName != null) {
+ return "in value for key '" + previousFieldName + "': " + message;
+ } else {
+ return message;
+ }
+ }
+
+ private String addQuoteSuggestion(String badToken, String message) {
+ return addQuoteSuggestion(null, equalsCount > 0, badToken, message);
+ }
+
+ private String addQuoteSuggestion(Path lastPath, boolean insideEquals, String badToken,
+ String message) {
+ String previousFieldName = previousFieldName(lastPath);
+
+ String part;
+ if (badToken.equals(Tokens.END.toString())) {
+ // EOF requires special handling for the error to make sense.
+ if (previousFieldName != null)
+ part = message + " (if you intended '" + previousFieldName
+ + "' to be part of a value, instead of a key, "
+ + "try adding double quotes around the whole value";
+ else
+ return message;
+ } else {
+ if (previousFieldName != null) {
+ part = message + " (if you intended " + badToken
+ + " to be part of the value for '" + previousFieldName + "', "
+ + "try enclosing the value in double quotes";
+ } else {
+ part = message + " (if you intended " + badToken
+ + " to be part of a key or string value, "
+ + "try enclosing the key or value in double quotes";
+ }
+ }
+
+ if (insideEquals)
+ return part
+ + ", or you may be able to rename the file .properties rather than .conf)";
+ else
+ return part + ")";
+ }
+
private AbstractConfigValue parseValue(Token token) {
if (Tokens.isValue(token)) {
return Tokens.getValue(token);
@@ -241,8 +317,8 @@ final class Parser {
} else if (token == Tokens.OPEN_SQUARE) {
return parseArray();
} else {
- throw parseError("Expecting a value but got wrong token: "
- + token);
+ throw parseError(addQuoteSuggestion(token.toString(),
+ "Expecting a value but got wrong token: " + token));
}
}
@@ -283,8 +359,8 @@ final class Parser {
String key = (String) Tokens.getValue(token).unwrapped();
return Path.newKey(key);
} else {
- throw parseError("Expecting close brace } or a field name, got "
- + token);
+ throw parseError(addKeyName("Expecting close brace } or a field name here, got "
+ + token));
}
} else {
List expression = new ArrayList();
@@ -293,6 +369,12 @@ final class Parser {
expression.add(t);
t = nextToken(); // note: don't cross a newline
}
+
+ if (expression.isEmpty()) {
+ throw parseError(addKeyName("expecting a close brace or a field name here, got "
+ + t));
+ }
+
putBack(t); // put back the token we ended with
return parsePathExpression(expression.iterator(), lineOrigin());
}
@@ -311,7 +393,7 @@ final class Parser {
for (int i = 0; i < s.length(); ++i) {
char c = s.charAt(i);
- if (!ConfigUtil.isWhitespace(c))
+ if (!ConfigImplUtil.isWhitespace(c))
return false;
}
return true;
@@ -362,13 +444,18 @@ final class Parser {
Map values = new HashMap();
ConfigOrigin objectOrigin = lineOrigin();
boolean afterComma = false;
+ Path lastPath = null;
+ boolean lastInsideEquals = false;
+
while (true) {
Token t = nextTokenIgnoringNewline();
if (t == Tokens.CLOSE_CURLY) {
if (flavor == ConfigSyntax.JSON && afterComma) {
- throw parseError("expecting a field name after comma, got a close brace }");
+ throw parseError(addQuoteSuggestion(t.toString(),
+ "expecting a field name after a comma, got a close brace } instead"));
} else if (!hadOpenCurly) {
- throw parseError("unbalanced close brace '}' with no open brace");
+ throw parseError(addQuoteSuggestion(t.toString(),
+ "unbalanced close brace '}' with no open brace"));
}
break;
} else if (t == Tokens.END && !hadOpenCurly) {
@@ -381,6 +468,7 @@ final class Parser {
} else {
Path path = parseKey(t);
Token afterKey = nextTokenIgnoringNewline();
+ boolean insideEquals = false;
// path must be on-stack while we parse the value
pathStack.push(path);
@@ -394,8 +482,14 @@ final class Parser {
newValue = parseObject(true);
} else {
if (!isKeyValueSeparatorToken(afterKey)) {
- throw parseError("Key may not be followed by token: "
- + afterKey);
+ throw parseError(addQuoteSuggestion(afterKey.toString(),
+ "Key '" + path.render() + "' may not be followed by token: "
+ + afterKey));
+ }
+
+ if (afterKey == Tokens.EQUALS) {
+ insideEquals = true;
+ equalsCount += 1;
}
consolidateValueTokens();
@@ -403,7 +497,11 @@ final class Parser {
newValue = parseValue(valueToken);
}
- pathStack.pop();
+ lastPath = pathStack.pop();
+ if (insideEquals) {
+ equalsCount -= 1;
+ }
+ lastInsideEquals = insideEquals;
String key = path.first();
Path remaining = path.remainder();
@@ -451,25 +549,25 @@ final class Parser {
t = nextTokenIgnoringNewline();
if (t == Tokens.CLOSE_CURLY) {
if (!hadOpenCurly) {
- throw parseError("unbalanced close brace '}' with no open brace");
+ throw parseError(addQuoteSuggestion(lastPath, lastInsideEquals,
+ t.toString(), "unbalanced close brace '}' with no open brace"));
}
break;
} else if (hadOpenCurly) {
- throw parseError("Expecting close brace } or a comma, got "
- + t);
+ throw parseError(addQuoteSuggestion(lastPath, lastInsideEquals,
+ t.toString(), "Expecting close brace } or a comma, got " + t));
} else {
if (t == Tokens.END) {
putBack(t);
break;
} else {
- throw parseError("Expecting end of input or a comma, got "
- + t);
+ throw parseError(addQuoteSuggestion(lastPath, lastInsideEquals,
+ t.toString(), "Expecting end of input or a comma, got " + t));
}
}
}
}
- return new SimpleConfigObject(objectOrigin,
- values);
+ return new SimpleConfigObject(objectOrigin, values);
}
private SimpleConfigList parseArray() {
@@ -492,8 +590,11 @@ final class Parser {
} else if (t == Tokens.OPEN_SQUARE) {
values.add(parseArray());
} else {
- throw parseError("List should have ] or a first element after the open [, instead had token: "
- + t);
+ throw parseError(addKeyName("List should have ] or a first element after the open [, instead had token: "
+ + t
+ + " (if you want "
+ + t
+ + " to be part of a string value, then double-quote it)"));
}
// now remaining elements
@@ -506,8 +607,11 @@ final class Parser {
if (t == Tokens.CLOSE_SQUARE) {
return new SimpleConfigList(arrayOrigin, values);
} else {
- throw parseError("List should have ended with ] or had a comma, instead had token: "
- + t);
+ throw parseError(addKeyName("List should have ended with ] or had a comma, instead had token: "
+ + t
+ + " (if you want "
+ + t
+ + " to be part of a string value, then double-quote it)"));
}
}
@@ -526,8 +630,11 @@ final class Parser {
// we allow one trailing comma
putBack(t);
} else {
- throw parseError("List should have had new element after a comma, instead had token: "
- + t);
+ throw parseError(addKeyName("List should have had new element after a comma, instead had token: "
+ + t
+ + " (if you want the comma or "
+ + t
+ + " to be part of a string value, then double-quote it)"));
}
}
}
@@ -659,9 +766,12 @@ final class Parser {
} else if (Tokens.isUnquotedText(t)) {
text = Tokens.getUnquotedText(t);
} else {
- throw new ConfigException.BadPath(origin, originalText,
+ throw new ConfigException.BadPath(
+ origin,
+ originalText,
"Token not allowed in path expression: "
- + t);
+ + t
+ + " (you can double-quote this token if you really want it here)");
}
addPathText(buf, false, text);
@@ -728,7 +838,7 @@ final class Parser {
// do something much faster than the full parser if
// we just have something like "foo" or "foo.bar"
private static Path speculativeFastParsePath(String path) {
- String s = ConfigUtil.unicodeTrim(path);
+ String s = ConfigImplUtil.unicodeTrim(path);
if (s.isEmpty())
return null;
if (hasUnsafeChars(s))
diff --git a/akka-actor/src/main/java/com/typesafe/config/impl/Path.java b/akka-actor/src/main/java/com/typesafe/config/impl/Path.java
index f19552c890..193d930002 100644
--- a/akka-actor/src/main/java/com/typesafe/config/impl/Path.java
+++ b/akka-actor/src/main/java/com/typesafe/config/impl/Path.java
@@ -125,7 +125,7 @@ final class Path {
if (other instanceof Path) {
Path that = (Path) other;
return this.first.equals(that.first)
- && ConfigUtil.equalsHandlingNull(this.remainder,
+ && ConfigImplUtil.equalsHandlingNull(this.remainder,
that.remainder);
} else {
return false;
@@ -167,7 +167,7 @@ final class Path {
private void appendToStringBuilder(StringBuilder sb) {
if (hasFunkyChars(first) || first.isEmpty())
- sb.append(ConfigUtil.renderJsonString(first));
+ sb.append(ConfigImplUtil.renderJsonString(first));
else
sb.append(first);
if (remainder != null) {
diff --git a/akka-actor/src/main/java/com/typesafe/config/impl/SimpleConfig.java b/akka-actor/src/main/java/com/typesafe/config/impl/SimpleConfig.java
index 127a98a05b..17979ba6cc 100644
--- a/akka-actor/src/main/java/com/typesafe/config/impl/SimpleConfig.java
+++ b/akka-actor/src/main/java/com/typesafe/config/impl/SimpleConfig.java
@@ -3,10 +3,13 @@
*/
package com.typesafe.config.impl;
+import java.util.AbstractMap;
import java.util.ArrayList;
import java.util.HashMap;
+import java.util.HashSet;
import java.util.List;
import java.util.Map;
+import java.util.Set;
import java.util.concurrent.TimeUnit;
import com.typesafe.config.Config;
@@ -20,12 +23,10 @@ import com.typesafe.config.ConfigValue;
import com.typesafe.config.ConfigValueType;
/**
- * One thing to keep in mind in the future: if any Collection-like APIs are
- * added here, including iterators or size() or anything, then we'd have to
- * grapple with whether ConfigNull values are "in" the Config (probably not) and
- * we'd probably want to make the collection look flat - not like a tree. So the
- * key-value pairs would be all the tree's leaf values, in a big flat list with
- * their full paths.
+ * One thing to keep in mind in the future: as Collection-like APIs are added
+ * here, including iterators or size() or anything, they should be consistent
+ * with a one-level java.util.Map from paths to non-null values. Null values are
+ * not "in" the map.
*/
final class SimpleConfig implements Config, MergeableValue {
@@ -73,6 +74,31 @@ final class SimpleConfig implements Config, MergeableValue {
return object.isEmpty();
}
+ private static void findPaths(Set> entries, Path parent,
+ AbstractConfigObject obj) {
+ for (Map.Entry entry : obj.entrySet()) {
+ String elem = entry.getKey();
+ ConfigValue v = entry.getValue();
+ Path path = Path.newKey(elem);
+ if (parent != null)
+ path = path.prepend(parent);
+ if (v instanceof AbstractConfigObject) {
+ findPaths(entries, path, (AbstractConfigObject) v);
+ } else if (v instanceof ConfigNull) {
+ // nothing; nulls are conceptually not in a Config
+ } else {
+ entries.add(new AbstractMap.SimpleImmutableEntry(path.render(), v));
+ }
+ }
+ }
+
+ @Override
+ public Set> entrySet() {
+ Set> entries = new HashSet>();
+ findPaths(entries, null, object);
+ return entries;
+ }
+
static private AbstractConfigValue find(AbstractConfigObject self,
String pathExpression, ConfigValueType expected, String originalPath) {
Path path = Path.newPath(pathExpression);
@@ -440,10 +466,10 @@ final class SimpleConfig implements Config, MergeableValue {
*/
public static long parseDuration(String input,
ConfigOrigin originForException, String pathForException) {
- String s = ConfigUtil.unicodeTrim(input);
+ String s = ConfigImplUtil.unicodeTrim(input);
String originalUnitString = getUnits(s);
String unitString = originalUnitString;
- String numberString = ConfigUtil.unicodeTrim(s.substring(0, s.length()
+ String numberString = ConfigImplUtil.unicodeTrim(s.substring(0, s.length()
- unitString.length()));
TimeUnit units = null;
@@ -592,9 +618,9 @@ final class SimpleConfig implements Config, MergeableValue {
*/
public static long parseBytes(String input, ConfigOrigin originForException,
String pathForException) {
- String s = ConfigUtil.unicodeTrim(input);
+ String s = ConfigImplUtil.unicodeTrim(input);
String unitString = getUnits(s);
- String numberString = ConfigUtil.unicodeTrim(s.substring(0,
+ String numberString = ConfigImplUtil.unicodeTrim(s.substring(0,
s.length() - unitString.length()));
// this would be caught later anyway, but the error message
diff --git a/akka-actor/src/main/java/com/typesafe/config/impl/SimpleConfigOrigin.java b/akka-actor/src/main/java/com/typesafe/config/impl/SimpleConfigOrigin.java
index 1ae914c0e4..01d5b6070b 100644
--- a/akka-actor/src/main/java/com/typesafe/config/impl/SimpleConfigOrigin.java
+++ b/akka-actor/src/main/java/com/typesafe/config/impl/SimpleConfigOrigin.java
@@ -97,7 +97,7 @@ final class SimpleConfigOrigin implements ConfigOrigin {
&& this.lineNumber == otherOrigin.lineNumber
&& this.endLineNumber == otherOrigin.endLineNumber
&& this.originType == otherOrigin.originType
- && ConfigUtil.equalsHandlingNull(this.urlOrNull, otherOrigin.urlOrNull);
+ && ConfigImplUtil.equalsHandlingNull(this.urlOrNull, otherOrigin.urlOrNull);
} else {
return false;
}
@@ -227,7 +227,7 @@ final class SimpleConfigOrigin implements ConfigOrigin {
}
String mergedURL;
- if (ConfigUtil.equalsHandlingNull(a.urlOrNull, b.urlOrNull)) {
+ if (ConfigImplUtil.equalsHandlingNull(a.urlOrNull, b.urlOrNull)) {
mergedURL = a.urlOrNull;
} else {
mergedURL = null;
@@ -252,7 +252,7 @@ final class SimpleConfigOrigin implements ConfigOrigin {
count += 1;
if (a.endLineNumber == b.endLineNumber)
count += 1;
- if (ConfigUtil.equalsHandlingNull(a.urlOrNull, b.urlOrNull))
+ if (ConfigImplUtil.equalsHandlingNull(a.urlOrNull, b.urlOrNull))
count += 1;
}
diff --git a/akka-actor/src/main/java/com/typesafe/config/impl/Token.java b/akka-actor/src/main/java/com/typesafe/config/impl/Token.java
index 7c888c748e..afff3247d6 100644
--- a/akka-actor/src/main/java/com/typesafe/config/impl/Token.java
+++ b/akka-actor/src/main/java/com/typesafe/config/impl/Token.java
@@ -3,20 +3,57 @@
*/
package com.typesafe.config.impl;
+import com.typesafe.config.ConfigException;
+import com.typesafe.config.ConfigOrigin;
+
class Token {
final private TokenType tokenType;
+ final private String debugString;
+ final private ConfigOrigin origin;
- Token(TokenType tokenType) {
- this.tokenType = tokenType;
+ Token(TokenType tokenType, ConfigOrigin origin) {
+ this(tokenType, origin, null);
}
- public TokenType tokenType() {
+ Token(TokenType tokenType, ConfigOrigin origin, String debugString) {
+ this.tokenType = tokenType;
+ this.origin = origin;
+ this.debugString = debugString;
+ }
+
+ // this is used for singleton tokens like COMMA or OPEN_CURLY
+ static Token newWithoutOrigin(TokenType tokenType, String debugString) {
+ return new Token(tokenType, null, debugString);
+ }
+
+ final TokenType tokenType() {
return tokenType;
}
+ // this is final because we don't always use the origin() accessor,
+ // and we don't because it throws if origin is null
+ final ConfigOrigin origin() {
+ // code is only supposed to call origin() on token types that are
+ // expected to have an origin.
+ if (origin == null)
+ throw new ConfigException.BugOrBroken(
+ "tried to get origin from token that doesn't have one: " + this);
+ return origin;
+ }
+
+ final int lineNumber() {
+ if (origin != null)
+ return origin.lineNumber();
+ else
+ return -1;
+ }
+
@Override
public String toString() {
- return tokenType.name();
+ if (debugString != null)
+ return debugString;
+ else
+ return tokenType.name();
}
protected boolean canEqual(Object other) {
@@ -26,6 +63,7 @@ class Token {
@Override
public boolean equals(Object other) {
if (other instanceof Token) {
+ // origin is deliberately left out
return canEqual(other)
&& this.tokenType == ((Token) other).tokenType;
} else {
@@ -35,6 +73,7 @@ class Token {
@Override
public int hashCode() {
+ // origin is deliberately left out
return tokenType.hashCode();
}
}
diff --git a/akka-actor/src/main/java/com/typesafe/config/impl/TokenType.java b/akka-actor/src/main/java/com/typesafe/config/impl/TokenType.java
index 19b6a106a9..ace12fa70b 100644
--- a/akka-actor/src/main/java/com/typesafe/config/impl/TokenType.java
+++ b/akka-actor/src/main/java/com/typesafe/config/impl/TokenType.java
@@ -4,5 +4,18 @@
package com.typesafe.config.impl;
enum TokenType {
- START, END, COMMA, EQUALS, COLON, OPEN_CURLY, CLOSE_CURLY, OPEN_SQUARE, CLOSE_SQUARE, VALUE, NEWLINE, UNQUOTED_TEXT, SUBSTITUTION;
+ START,
+ END,
+ COMMA,
+ EQUALS,
+ COLON,
+ OPEN_CURLY,
+ CLOSE_CURLY,
+ OPEN_SQUARE,
+ CLOSE_SQUARE,
+ VALUE,
+ NEWLINE,
+ UNQUOTED_TEXT,
+ SUBSTITUTION,
+ PROBLEM;
}
diff --git a/akka-actor/src/main/java/com/typesafe/config/impl/Tokenizer.java b/akka-actor/src/main/java/com/typesafe/config/impl/Tokenizer.java
index 4965b2a619..2aeb7184bc 100644
--- a/akka-actor/src/main/java/com/typesafe/config/impl/Tokenizer.java
+++ b/akka-actor/src/main/java/com/typesafe/config/impl/Tokenizer.java
@@ -16,6 +16,34 @@ import com.typesafe.config.ConfigOrigin;
import com.typesafe.config.ConfigSyntax;
final class Tokenizer {
+ // this exception should not leave this file
+ private static class ProblemException extends Exception {
+ private static final long serialVersionUID = 1L;
+
+ final private Token problem;
+
+ ProblemException(Token problem) {
+ this.problem = problem;
+ }
+
+ Token problem() {
+ return problem;
+ }
+ }
+
+ private static String asString(int codepoint) {
+ if (codepoint == '\n')
+ return "newline";
+ else if (codepoint == '\t')
+ return "tab";
+ else if (codepoint == -1)
+ return "end of file";
+ else if (Character.isISOControl(codepoint))
+ return String.format("control character 0x%x", codepoint);
+ else
+ return String.format("%c", codepoint);
+ }
+
/**
* Tokenizes a Reader. Does not close the reader; you have to arrange to do
* that after you're done with the returned iterator.
@@ -85,20 +113,22 @@ final class Tokenizer {
}
}
- final private ConfigOrigin origin;
+ final private SimpleConfigOrigin origin;
final private Reader input;
final private LinkedList buffer;
private int lineNumber;
+ private ConfigOrigin lineOrigin;
final private Queue tokens;
final private WhitespaceSaver whitespaceSaver;
final private boolean allowComments;
TokenIterator(ConfigOrigin origin, Reader input, boolean allowComments) {
- this.origin = origin;
+ this.origin = (SimpleConfigOrigin) origin;
this.input = input;
this.allowComments = allowComments;
this.buffer = new LinkedList();
lineNumber = 1;
+ lineOrigin = this.origin.setLineNumber(lineNumber);
tokens = new LinkedList();
tokens.add(Tokens.START);
whitespaceSaver = new WhitespaceSaver();
@@ -131,11 +161,11 @@ final class Tokenizer {
}
static boolean isWhitespace(int c) {
- return ConfigUtil.isWhitespace(c);
+ return ConfigImplUtil.isWhitespace(c);
}
static boolean isWhitespaceNotNewline(int c) {
- return c != '\n' && ConfigUtil.isWhitespace(c);
+ return c != '\n' && ConfigImplUtil.isWhitespace(c);
}
private int slurpComment() {
@@ -194,27 +224,44 @@ final class Tokenizer {
}
}
- private ConfigException parseError(String message) {
- return parseError(message, null);
+ private ProblemException problem(String message) {
+ return problem("", message, null);
}
- private ConfigException parseError(String message, Throwable cause) {
- return parseError(lineOrigin(), message, cause);
+ private ProblemException problem(String what, String message) {
+ return problem(what, message, null);
}
- private static ConfigException parseError(ConfigOrigin origin,
+ private ProblemException problem(String what, String message, boolean suggestQuotes) {
+ return problem(what, message, suggestQuotes, null);
+ }
+
+ private ProblemException problem(String what, String message, Throwable cause) {
+ return problem(lineOrigin, what, message, cause);
+ }
+
+ private ProblemException problem(String what, String message, boolean suggestQuotes,
+ Throwable cause) {
+ return problem(lineOrigin, what, message, suggestQuotes, cause);
+ }
+
+ private static ProblemException problem(ConfigOrigin origin, String what,
String message,
Throwable cause) {
- return new ConfigException.Parse(origin, message, cause);
+ return problem(origin, what, message, false, cause);
}
- private static ConfigException parseError(ConfigOrigin origin,
- String message) {
- return parseError(origin, message, null);
+ private static ProblemException problem(ConfigOrigin origin, String what, String message,
+ boolean suggestQuotes, Throwable cause) {
+ if (what == null || message == null)
+ throw new ConfigException.BugOrBroken(
+ "internal error, creating bad ProblemException");
+ return new ProblemException(Tokens.newProblem(origin, what, message, suggestQuotes,
+ cause));
}
- private ConfigOrigin lineOrigin() {
- return lineOrigin(origin, lineNumber);
+ private static ProblemException problem(ConfigOrigin origin, String message) {
+ return problem(origin, "", message, null);
}
private static ConfigOrigin lineOrigin(ConfigOrigin baseOrigin,
@@ -234,7 +281,7 @@ final class Tokenizer {
// that parses as JSON is treated the JSON way and otherwise
// we assume it's a string and let the parser sort it out.
private Token pullUnquotedText() {
- ConfigOrigin origin = lineOrigin();
+ ConfigOrigin origin = lineOrigin;
StringBuilder sb = new StringBuilder();
int c = nextCharSkippingComments();
while (true) {
@@ -273,7 +320,7 @@ final class Tokenizer {
return Tokens.newUnquotedText(origin, s);
}
- private Token pullNumber(int firstChar) {
+ private Token pullNumber(int firstChar) throws ProblemException {
StringBuilder sb = new StringBuilder();
sb.appendCodePoint(firstChar);
boolean containedDecimalOrE = false;
@@ -291,23 +338,20 @@ final class Tokenizer {
try {
if (containedDecimalOrE) {
// force floating point representation
- return Tokens.newDouble(lineOrigin(),
- Double.parseDouble(s), s);
+ return Tokens.newDouble(lineOrigin, Double.parseDouble(s), s);
} else {
// this should throw if the integer is too large for Long
- return Tokens.newLong(lineOrigin(), Long.parseLong(s), s);
+ return Tokens.newLong(lineOrigin, Long.parseLong(s), s);
}
} catch (NumberFormatException e) {
- throw parseError("Invalid number: '" + s
- + "' (if this is in a path, try quoting it with double quotes)",
- e);
+ throw problem(s, "Invalid number: '" + s + "'", true /* suggestQuotes */, e);
}
}
- private void pullEscapeSequence(StringBuilder sb) {
+ private void pullEscapeSequence(StringBuilder sb) throws ProblemException {
int escaped = nextCharRaw();
if (escaped == -1)
- throw parseError("End of input but backslash in string had nothing after it");
+ throw problem("End of input but backslash in string had nothing after it");
switch (escaped) {
case '"':
@@ -340,67 +384,57 @@ final class Tokenizer {
for (int i = 0; i < 4; ++i) {
int c = nextCharSkippingComments();
if (c == -1)
- throw parseError("End of input but expecting 4 hex digits for \\uXXXX escape");
+ throw problem("End of input but expecting 4 hex digits for \\uXXXX escape");
a[i] = (char) c;
}
String digits = new String(a);
try {
sb.appendCodePoint(Integer.parseInt(digits, 16));
} catch (NumberFormatException e) {
- throw parseError(
- String.format(
- "Malformed hex digits after \\u escape in string: '%s'",
- digits), e);
+ throw problem(digits, String.format(
+ "Malformed hex digits after \\u escape in string: '%s'", digits), e);
}
}
break;
default:
- throw parseError(String
- .format("backslash followed by '%c', this is not a valid escape sequence",
- escaped));
+ throw problem(
+ asString(escaped),
+ String.format(
+ "backslash followed by '%s', this is not a valid escape sequence (quoted strings use JSON escaping, so use double-backslash \\\\ for literal backslash)",
+ asString(escaped)));
}
}
- private ConfigException controlCharacterError(int c) {
- String asString;
- if (c == '\n')
- asString = "newline";
- else if (c == '\t')
- asString = "tab";
- else
- asString = String.format("control character 0x%x", c);
- return parseError("JSON does not allow unescaped " + asString
- + " in quoted strings, use a backslash escape");
- }
-
- private Token pullQuotedString() {
+ private Token pullQuotedString() throws ProblemException {
// the open quote has already been consumed
StringBuilder sb = new StringBuilder();
int c = '\0'; // value doesn't get used
do {
c = nextCharRaw();
if (c == -1)
- throw parseError("End of input but string quote was still open");
+ throw problem("End of input but string quote was still open");
if (c == '\\') {
pullEscapeSequence(sb);
} else if (c == '"') {
// end the loop, done!
} else if (Character.isISOControl(c)) {
- throw controlCharacterError(c);
+ throw problem(asString(c), "JSON does not allow unescaped " + asString(c)
+ + " in quoted strings, use a backslash escape");
} else {
sb.appendCodePoint(c);
}
} while (c != '"');
- return Tokens.newString(lineOrigin(), sb.toString());
+ return Tokens.newString(lineOrigin, sb.toString());
}
- private Token pullSubstitution() {
+ private Token pullSubstitution() throws ProblemException {
// the initial '$' has already been consumed
- ConfigOrigin origin = lineOrigin();
+ ConfigOrigin origin = lineOrigin;
int c = nextCharSkippingComments();
if (c != '{') {
- throw parseError("'$' not followed by {");
+ throw problem(asString(c), "'$' not followed by {, '" + asString(c)
+ + "' not allowed after '$'", true /* suggestQuotes */);
}
boolean optional = false;
@@ -425,7 +459,7 @@ final class Tokenizer {
// end the loop, done!
break;
} else if (t == Tokens.END) {
- throw parseError(origin,
+ throw problem(origin,
"Substitution ${ was not closed with a }");
} else {
Token whitespace = saver.check(t, origin, lineNumber);
@@ -438,14 +472,16 @@ final class Tokenizer {
return Tokens.newSubstitution(origin, optional, expression);
}
- private Token pullNextToken(WhitespaceSaver saver) {
+ private Token pullNextToken(WhitespaceSaver saver) throws ProblemException {
int c = nextCharAfterWhitespace(saver);
if (c == -1) {
return Tokens.END;
} else if (c == '\n') {
// newline tokens have the just-ended line number
+ Token line = Tokens.newLine(lineOrigin);
lineNumber += 1;
- return Tokens.newLine(lineNumber - 1);
+ lineOrigin = origin.setLineNumber(lineNumber);
+ return line;
} else {
Token t = null;
switch (c) {
@@ -482,9 +518,8 @@ final class Tokenizer {
if (firstNumberChars.indexOf(c) >= 0) {
t = pullNumber(c);
} else if (notInUnquotedText.indexOf(c) >= 0) {
- throw parseError(String
- .format("Character '%c' is not the start of any valid token",
- c));
+ throw problem(asString(c), "Reserved character '" + asString(c)
+ + "' is not allowed outside quotes", true /* suggestQuotes */);
} else {
putBack(c);
t = pullUnquotedText();
@@ -508,7 +543,7 @@ final class Tokenizer {
}
}
- private void queueNextToken() {
+ private void queueNextToken() throws ProblemException {
Token t = pullNextToken(whitespaceSaver);
Token whitespace = whitespaceSaver.check(t, origin, lineNumber);
if (whitespace != null)
@@ -525,7 +560,11 @@ final class Tokenizer {
public Token next() {
Token t = tokens.remove();
if (tokens.isEmpty() && t != Tokens.END) {
- queueNextToken();
+ try {
+ queueNextToken();
+ } catch (ProblemException e) {
+ tokens.add(e.problem());
+ }
if (tokens.isEmpty())
throw new ConfigException.BugOrBroken(
"bug: tokens queue should not be empty here");
diff --git a/akka-actor/src/main/java/com/typesafe/config/impl/Tokens.java b/akka-actor/src/main/java/com/typesafe/config/impl/Tokens.java
index f36527d738..9f7bd42e7c 100644
--- a/akka-actor/src/main/java/com/typesafe/config/impl/Tokens.java
+++ b/akka-actor/src/main/java/com/typesafe/config/impl/Tokens.java
@@ -9,13 +9,14 @@ import com.typesafe.config.ConfigException;
import com.typesafe.config.ConfigOrigin;
import com.typesafe.config.ConfigValueType;
+/* FIXME the way the subclasses of Token are private with static isFoo and accessors is kind of ridiculous. */
final class Tokens {
static private class Value extends Token {
final private AbstractConfigValue value;
Value(AbstractConfigValue value) {
- super(TokenType.VALUE);
+ super(TokenType.VALUE, value.origin());
this.value = value;
}
@@ -25,10 +26,7 @@ final class Tokens {
@Override
public String toString() {
- String s = tokenType().name() + "(" + value.valueType().name()
- + ")";
-
- return s + "='" + value().unwrapped() + "'";
+ return "'" + value().unwrapped() + "' (" + value.valueType().name() + ")";
}
@Override
@@ -48,20 +46,13 @@ final class Tokens {
}
static private class Line extends Token {
- final private int lineNumber;
-
- Line(int lineNumber) {
- super(TokenType.NEWLINE);
- this.lineNumber = lineNumber;
- }
-
- int lineNumber() {
- return lineNumber;
+ Line(ConfigOrigin origin) {
+ super(TokenType.NEWLINE, origin);
}
@Override
public String toString() {
- return "NEWLINE@" + lineNumber;
+ return "'\n'@" + lineNumber();
}
@Override
@@ -71,38 +62,31 @@ final class Tokens {
@Override
public boolean equals(Object other) {
- return super.equals(other)
- && ((Line) other).lineNumber == lineNumber;
+ return super.equals(other) && ((Line) other).lineNumber() == lineNumber();
}
@Override
public int hashCode() {
- return 41 * (41 + super.hashCode()) + lineNumber;
+ return 41 * (41 + super.hashCode()) + lineNumber();
}
}
// This is not a Value, because it requires special processing
static private class UnquotedText extends Token {
- final private ConfigOrigin origin;
final private String value;
UnquotedText(ConfigOrigin origin, String s) {
- super(TokenType.UNQUOTED_TEXT);
- this.origin = origin;
+ super(TokenType.UNQUOTED_TEXT, origin);
this.value = s;
}
- ConfigOrigin origin() {
- return origin;
- }
-
String value() {
return value;
}
@Override
public String toString() {
- return tokenType().name() + "(" + value + ")";
+ return "'" + value + "'";
}
@Override
@@ -122,23 +106,78 @@ final class Tokens {
}
}
+ static private class Problem extends Token {
+ final private String what;
+ final private String message;
+ final private boolean suggestQuotes;
+ final private Throwable cause;
+
+ Problem(ConfigOrigin origin, String what, String message, boolean suggestQuotes,
+ Throwable cause) {
+ super(TokenType.PROBLEM, origin);
+ this.what = what;
+ this.message = message;
+ this.suggestQuotes = suggestQuotes;
+ this.cause = cause;
+ }
+
+ String message() {
+ return message;
+ }
+
+ boolean suggestQuotes() {
+ return suggestQuotes;
+ }
+
+ Throwable cause() {
+ return cause;
+ }
+
+ @Override
+ public String toString() {
+ StringBuilder sb = new StringBuilder();
+ sb.append('\'');
+ sb.append(what);
+ sb.append('\'');
+ return sb.toString();
+ }
+
+ @Override
+ protected boolean canEqual(Object other) {
+ return other instanceof Problem;
+ }
+
+ @Override
+ public boolean equals(Object other) {
+ return super.equals(other) && ((Problem) other).what.equals(what)
+ && ((Problem) other).message.equals(message)
+ && ((Problem) other).suggestQuotes == suggestQuotes
+ && ConfigImplUtil.equalsHandlingNull(((Problem) other).cause, cause);
+ }
+
+ @Override
+ public int hashCode() {
+ int h = 41 * (41 + super.hashCode());
+ h = 41 * (h + what.hashCode());
+ h = 41 * (h + message.hashCode());
+ h = 41 * (h + Boolean.valueOf(suggestQuotes).hashCode());
+ if (cause != null)
+ h = 41 * (h + cause.hashCode());
+ return h;
+ }
+ }
+
// This is not a Value, because it requires special processing
static private class Substitution extends Token {
- final private ConfigOrigin origin;
final private boolean optional;
final private List value;
Substitution(ConfigOrigin origin, boolean optional, List expression) {
- super(TokenType.SUBSTITUTION);
- this.origin = origin;
+ super(TokenType.SUBSTITUTION, origin);
this.optional = optional;
this.value = expression;
}
- ConfigOrigin origin() {
- return origin;
- }
-
boolean optional() {
return optional;
}
@@ -149,7 +188,11 @@ final class Tokens {
@Override
public String toString() {
- return tokenType().name() + "(" + value.toString() + ")";
+ StringBuilder sb = new StringBuilder();
+ for (Token t : value) {
+ sb.append(t.toString());
+ }
+ return "'${" + sb.toString() + "}'";
}
@Override
@@ -190,12 +233,32 @@ final class Tokens {
return token instanceof Line;
}
- static int getLineNumber(Token token) {
- if (token instanceof Line) {
- return ((Line) token).lineNumber();
+ static boolean isProblem(Token token) {
+ return token instanceof Problem;
+ }
+
+ static String getProblemMessage(Token token) {
+ if (token instanceof Problem) {
+ return ((Problem) token).message();
} else {
- throw new ConfigException.BugOrBroken(
- "tried to get line number from non-newline " + token);
+ throw new ConfigException.BugOrBroken("tried to get problem message from " + token);
+ }
+ }
+
+ static boolean getProblemSuggestQuotes(Token token) {
+ if (token instanceof Problem) {
+ return ((Problem) token).suggestQuotes();
+ } else {
+ throw new ConfigException.BugOrBroken("tried to get problem suggestQuotes from "
+ + token);
+ }
+ }
+
+ static Throwable getProblemCause(Token token) {
+ if (token instanceof Problem) {
+ return ((Problem) token).cause();
+ } else {
+ throw new ConfigException.BugOrBroken("tried to get problem cause from " + token);
}
}
@@ -212,15 +275,6 @@ final class Tokens {
}
}
- static ConfigOrigin getUnquotedTextOrigin(Token token) {
- if (token instanceof UnquotedText) {
- return ((UnquotedText) token).origin();
- } else {
- throw new ConfigException.BugOrBroken(
- "tried to get unquoted text from " + token);
- }
- }
-
static boolean isSubstitution(Token token) {
return token instanceof Substitution;
}
@@ -234,15 +288,6 @@ final class Tokens {
}
}
- static ConfigOrigin getSubstitutionOrigin(Token token) {
- if (token instanceof Substitution) {
- return ((Substitution) token).origin();
- } else {
- throw new ConfigException.BugOrBroken(
- "tried to get substitution origin from " + token);
- }
- }
-
static boolean getSubstitutionOptional(Token token) {
if (token instanceof Substitution) {
return ((Substitution) token).optional();
@@ -252,18 +297,23 @@ final class Tokens {
}
}
- final static Token START = new Token(TokenType.START);
- final static Token END = new Token(TokenType.END);
- final static Token COMMA = new Token(TokenType.COMMA);
- final static Token EQUALS = new Token(TokenType.EQUALS);
- final static Token COLON = new Token(TokenType.COLON);
- final static Token OPEN_CURLY = new Token(TokenType.OPEN_CURLY);
- final static Token CLOSE_CURLY = new Token(TokenType.CLOSE_CURLY);
- final static Token OPEN_SQUARE = new Token(TokenType.OPEN_SQUARE);
- final static Token CLOSE_SQUARE = new Token(TokenType.CLOSE_SQUARE);
+ final static Token START = Token.newWithoutOrigin(TokenType.START, "start of file");
+ final static Token END = Token.newWithoutOrigin(TokenType.END, "end of file");
+ final static Token COMMA = Token.newWithoutOrigin(TokenType.COMMA, "','");
+ final static Token EQUALS = Token.newWithoutOrigin(TokenType.EQUALS, "'='");
+ final static Token COLON = Token.newWithoutOrigin(TokenType.COLON, "':'");
+ final static Token OPEN_CURLY = Token.newWithoutOrigin(TokenType.OPEN_CURLY, "'{'");
+ final static Token CLOSE_CURLY = Token.newWithoutOrigin(TokenType.CLOSE_CURLY, "'}'");
+ final static Token OPEN_SQUARE = Token.newWithoutOrigin(TokenType.OPEN_SQUARE, "'['");
+ final static Token CLOSE_SQUARE = Token.newWithoutOrigin(TokenType.CLOSE_SQUARE, "']'");
- static Token newLine(int lineNumberJustEnded) {
- return new Line(lineNumberJustEnded);
+ static Token newLine(ConfigOrigin origin) {
+ return new Line(origin);
+ }
+
+ static Token newProblem(ConfigOrigin origin, String what, String message,
+ boolean suggestQuotes, Throwable cause) {
+ return new Problem(origin, what, message, suggestQuotes, cause);
}
static Token newUnquotedText(ConfigOrigin origin, String s) {
diff --git a/akka-actor/src/main/java/org/jboss/netty/akka/util/HashedWheelTimer.java b/akka-actor/src/main/java/org/jboss/netty/akka/util/HashedWheelTimer.java
index d0112ded79..6e54fa2233 100644
--- a/akka-actor/src/main/java/org/jboss/netty/akka/util/HashedWheelTimer.java
+++ b/akka-actor/src/main/java/org/jboss/netty/akka/util/HashedWheelTimer.java
@@ -482,10 +482,10 @@ public class HashedWheelTimer implements Timer {
buf.append("deadline: ");
if (remaining > 0) {
buf.append(remaining);
- buf.append(" ms later, ");
+ buf.append(" ns later, ");
} else if (remaining < 0) {
buf.append(-remaining);
- buf.append(" ms ago, ");
+ buf.append(" ns ago, ");
} else {
buf.append("now, ");
}
diff --git a/akka-actor/src/main/resources/reference.conf b/akka-actor/src/main/resources/reference.conf
index 4c58a85b23..ecbf916a9a 100644
--- a/akka-actor/src/main/resources/reference.conf
+++ b/akka-actor/src/main/resources/reference.conf
@@ -79,7 +79,7 @@ akka {
type = "Dispatcher" # Must be one of the following
# Dispatcher, (BalancingDispatcher, only valid when all actors using it are of the same type),
# A FQCN to a class inheriting MessageDispatcherConfigurator with a no-arg visible constructor
- name = "DefaultDispatcher" # Optional, will be a generated UUID if omitted
+ name = "DefaultDispatcher" # Name used in log messages and thread names.
keep-alive-time = 60s # Keep alive time for threads
core-pool-size-min = 8 # minimum number of threads to cap factor-based core number to
core-pool-size-factor = 8.0 # No of core threads ... ceil(available processors * factor)
@@ -91,7 +91,8 @@ akka {
task-queue-size = -1 # Specifies the bounded capacity of the task queue (< 1 == unbounded)
task-queue-type = "linked" # Specifies which type of task queue will be used, can be "array" or "linked" (default)
allow-core-timeout = on # Allow core threads to time out
- throughput = 5 # Throughput for Dispatcher, set to 1 for complete fairness
+ throughput = 5 # Throughput defines the number of messages that are processed in a batch before the
+ # thread is returned to the pool. Set to 1 for as fair as possible.
throughput-deadline-time = 0ms # Throughput deadline for Dispatcher, set to 0 or negative for no deadline
mailbox-capacity = -1 # If negative (or zero) then an unbounded mailbox is used (default)
# If positive then a bounded mailbox is used and the capacity is set using the property
diff --git a/akka-actor/src/main/scala/akka/AkkaException.scala b/akka-actor/src/main/scala/akka/AkkaException.scala
index 0dc3a81728..b8d83abf7a 100644
--- a/akka-actor/src/main/scala/akka/AkkaException.scala
+++ b/akka-actor/src/main/scala/akka/AkkaException.scala
@@ -14,8 +14,6 @@ import java.net.{ InetAddress, UnknownHostException }
*
toString that includes exception name, message and uuid
*
toLongString which also includes the stack trace
*
- *
- * @author Jonas Bonér
*/
class AkkaException(message: String = "", cause: Throwable = null) extends RuntimeException(message, cause) with Serializable {
val uuid = "%s_%s".format(AkkaException.hostname, newUuid)
diff --git a/akka-actor/src/main/scala/akka/actor/Actor.scala b/akka-actor/src/main/scala/akka/actor/Actor.scala
index caa813be3f..ffb941408a 100644
--- a/akka-actor/src/main/scala/akka/actor/Actor.scala
+++ b/akka-actor/src/main/scala/akka/actor/Actor.scala
@@ -28,29 +28,8 @@ trait AutoReceivedMessage extends Serializable
trait PossiblyHarmful
-case class HotSwap(code: ActorContext ⇒ Actor.Receive, discardOld: Boolean = true) extends AutoReceivedMessage {
-
- /**
- * Java API
- */
- def this(code: akka.japi.Function[ActorContext, Procedure[Any]], discardOld: Boolean) = {
- this((context: ActorContext) ⇒ {
- val behavior = code(context)
- val result: Actor.Receive = { case msg ⇒ behavior(msg) }
- result
- }, discardOld)
- }
-
- /**
- * Java API with default non-stacking behavior
- */
- def this(code: akka.japi.Function[ActorContext, Procedure[Any]]) = this(code, true)
-}
-
case class Failed(cause: Throwable) extends AutoReceivedMessage with PossiblyHarmful
-case object RevertHotSwap extends AutoReceivedMessage with PossiblyHarmful
-
case object PoisonPill extends AutoReceivedMessage with PossiblyHarmful
case object Kill extends AutoReceivedMessage with PossiblyHarmful
@@ -112,7 +91,7 @@ case class ActorInterruptedException private[akka] (cause: Throwable)
/**
* This message is thrown by default when an Actors behavior doesn't match a message
*/
-case class UnhandledMessageException(msg: Any, ref: ActorRef = null) extends Exception {
+case class UnhandledMessageException(msg: Any, ref: ActorRef = null) extends RuntimeException {
def this(msg: String) = this(msg, null)
@@ -186,8 +165,6 @@ object Actor {
*
*
* The Actor's own ActorRef is available in the 'self' member variable.
- *
- * @author Jonas Bonér
*/
trait Actor {
@@ -204,18 +181,17 @@ trait Actor {
* [[akka.actor.UntypedActorContext]], which is the Java API of the actor
* context.
*/
- @transient
protected[akka] implicit val context: ActorContext = {
val contextStack = ActorCell.contextStack.get
def noContextError =
throw new ActorInitializationException(
- "\n\tYou cannot create an instance of " + getClass.getName + " explicitly using the constructor (new)." +
+ "\n\tYou cannot create an instance of [" + getClass.getName + "] explicitly using the constructor (new)." +
"\n\tYou have to use one of the factory methods to create a new actor. Either use:" +
- "\n\t\t'val actor = context.actorOf[MyActor]' (to create a supervised child actor from within an actor), or" +
- "\n\t\t'val actor = system.actorOf(new MyActor(..))' (to create a top level actor from the ActorSystem), or" +
- "\n\t\t'val actor = context.actorOf[MyActor]' (to create a supervised child actor from within an actor), or" +
- "\n\t\t'val actor = system.actorOf(new MyActor(..))' (to create a top level actor from the ActorSystem)")
+ "\n\t\t'val actor = context.actorOf(Props[MyActor])' (to create a supervised child actor from within an actor), or" +
+ "\n\t\t'val actor = system.actorOf(Props(new MyActor(..)))' (to create a top level actor from the ActorSystem), or" +
+ "\n\t\t'val actor = context.actorOf(Props[MyActor])' (to create a supervised child actor from within an actor), or" +
+ "\n\t\t'val actor = system.actorOf(Props(new MyActor(..)))' (to create a top level actor from the ActorSystem)")
if (contextStack.isEmpty) noContextError
val c = contextStack.head
@@ -267,14 +243,17 @@ trait Actor {
/**
* User overridable callback.
*
- * Is called when an Actor is started by invoking 'actor'.
+ * Is called when an Actor is started.
+ * Actors are automatically started asynchronously when created.
+ * Empty default implementation.
*/
def preStart() {}
/**
* User overridable callback.
*
- * Is called when 'actor.stop()' is invoked.
+ * Is called asynchronously after 'actor.stop()' is invoked.
+ * Empty default implementation.
*/
def postStop() {}
diff --git a/akka-actor/src/main/scala/akka/actor/ActorCell.scala b/akka-actor/src/main/scala/akka/actor/ActorCell.scala
index a5446c4e3a..c4053081cd 100644
--- a/akka-actor/src/main/scala/akka/actor/ActorCell.scala
+++ b/akka-actor/src/main/scala/akka/actor/ActorCell.scala
@@ -12,6 +12,7 @@ import java.util.concurrent.TimeUnit.MILLISECONDS
import akka.event.Logging.{ Debug, Warning, Error }
import akka.util.{ Duration, Helpers }
import akka.japi.Procedure
+import java.io.{ NotSerializableException, ObjectOutputStream }
/**
* The actor context - the view of the actor cell from the actor.
@@ -27,16 +28,16 @@ import akka.japi.Procedure
* context.actorOf(props)
*
* // Scala
- * context.actorOf[MyActor]("name")
- * context.actorOf[MyActor]
- * context.actorOf(new MyActor(...))
+ * context.actorOf(Props[MyActor]("name")
+ * context.actorOf(Props[MyActor]
+ * context.actorOf(Props(new MyActor(...))
*
* // Java
* context.actorOf(classOf[MyActor]);
- * context.actorOf(new Creator() {
+ * context.actorOf(Props(new Creator() {
* public MyActor create() { ... }
* });
- * context.actorOf(new Creator() {
+ * context.actorOf(Props(new Creator() {
* public MyActor create() { ... }
* }, "name");
* }}}
@@ -63,7 +64,12 @@ trait ActorContext extends ActorRefFactory {
* When specified, the receive function should be able to handle a 'ReceiveTimeout' message.
* 1 millisecond is the minimum supported timeout.
*/
- def receiveTimeout_=(timeout: Option[Duration]): Unit
+ def setReceiveTimeout(timeout: Duration): Unit
+
+ /**
+ * Resets the current receive timeout.
+ */
+ def resetReceiveTimeout(): Unit
/**
* Changes the Actor's behavior to become the new 'Receive' (PartialFunction[Any, Unit]) handler.
@@ -72,19 +78,29 @@ trait ActorContext extends ActorRefFactory {
*/
def become(behavior: Actor.Receive, discardOld: Boolean = true): Unit
- def hotswap: Stack[PartialFunction[Any, Unit]]
-
/**
* Reverts the Actor behavior to the previous one in the hotswap stack.
*/
def unbecome(): Unit
+ /**
+ * Returns the current message envelope.
+ */
def currentMessage: Envelope
- def currentMessage_=(invocation: Envelope): Unit
+ /**
+ * Returns a stack with the hotswapped behaviors (as Scala PartialFunction).
+ */
+ def hotswap: Stack[PartialFunction[Any, Unit]]
+ /**
+ * Returns the sender 'ActorRef' of the current message.
+ */
def sender: ActorRef
+ /**
+ * Returns all supervised children.
+ */
def children: Iterable[ActorRef]
/**
@@ -103,41 +119,35 @@ trait ActorContext extends ActorRefFactory {
*/
implicit def system: ActorSystem
+ /**
+ * Returns the supervising parent ActorRef.
+ */
def parent: ActorRef
/**
- * Registers this actor as a Monitor for the provided ActorRef
+ * Registers this actor as a Monitor for the provided ActorRef.
* @return the provided ActorRef
*/
def watch(subject: ActorRef): ActorRef
/**
- * Unregisters this actor as Monitor for the provided ActorRef
+ * Unregisters this actor as Monitor for the provided ActorRef.
* @return the provided ActorRef
*/
def unwatch(subject: ActorRef): ActorRef
+
+ final protected def writeObject(o: ObjectOutputStream): Unit =
+ throw new NotSerializableException("ActorContext is not serializable!")
}
trait UntypedActorContext extends ActorContext {
+
/**
* Returns an unmodifiable Java Collection containing the linked actors,
* please note that the backing map is thread-safe but not immutable
*/
def getChildren(): java.lang.Iterable[ActorRef]
- /**
- * Gets the current receive timeout
- * When specified, the receive method should be able to handle a 'ReceiveTimeout' message.
- */
- def getReceiveTimeout: Option[Duration]
-
- /**
- * Defines the default timeout for an initial receive invocation.
- * When specified, the receive function should be able to handle a 'ReceiveTimeout' message.
- * 1 millisecond is the minimum supported timeout.
- */
- def setReceiveTimeout(timeout: Duration): Unit
-
/**
* Changes the Actor's behavior to become the new 'Procedure' handler.
* Puts the behavior on top of the hotswap stack.
@@ -191,7 +201,9 @@ private[akka] class ActorCell(
override final def receiveTimeout: Option[Duration] = if (receiveTimeoutData._1 > 0) Some(Duration(receiveTimeoutData._1, MILLISECONDS)) else None
- override final def receiveTimeout_=(timeout: Option[Duration]): Unit = {
+ override final def setReceiveTimeout(timeout: Duration): Unit = setReceiveTimeout(Some(timeout))
+
+ final def setReceiveTimeout(timeout: Option[Duration]): Unit = {
val timeoutMs = timeout match {
case None ⇒ -1L
case Some(duration) ⇒
@@ -204,22 +216,14 @@ private[akka] class ActorCell(
receiveTimeoutData = (timeoutMs, receiveTimeoutData._2)
}
+ final override def resetReceiveTimeout(): Unit = setReceiveTimeout(None)
+
/**
* In milliseconds
*/
final var receiveTimeoutData: (Long, Cancellable) =
if (_receiveTimeout.isDefined) (_receiveTimeout.get.toMillis, emptyCancellable) else emptyReceiveTimeoutData
- /**
- * UntypedActorContext impl
- */
- final def getReceiveTimeout: Option[Duration] = receiveTimeout
-
- /**
- * UntypedActorContext impl
- */
- final def setReceiveTimeout(timeout: Duration): Unit = receiveTimeout = Some(timeout)
-
final var childrenRefs: TreeMap[String, ChildRestartStats] = emptyChildrenRefs
private def _actorOf(props: Props, name: String): ActorRef = {
@@ -392,7 +396,7 @@ private[akka] class ActorCell(
def resume(): Unit = dispatcher resume this
def terminate() {
- receiveTimeout = None
+ setReceiveTimeout(None)
cancelReceiveTimeout
val c = children
@@ -515,23 +519,23 @@ private[akka] class ActorCell(
if (system.settings.DebugAutoReceive) system.eventStream.publish(Debug(self.path.toString, "received AutoReceiveMessage " + msg))
msg.message match {
- case HotSwap(code, discardOld) ⇒ become(code(this), discardOld)
- case RevertHotSwap ⇒ unbecome()
- case Failed(cause) ⇒ handleFailure(sender, cause)
- case Kill ⇒ throw new ActorKilledException("Kill")
- case PoisonPill ⇒ self.stop()
- case SelectParent(m) ⇒ parent.tell(m, msg.sender)
- case SelectChildName(name, m) ⇒ if (childrenRefs contains name) childrenRefs(name).child.tell(m, msg.sender)
- case SelectChildPattern(p, m) ⇒ for (c ← children if p.matcher(c.path.name).matches) c.tell(m, msg.sender)
+ case Failed(cause) ⇒ handleFailure(sender, cause)
+ case Kill ⇒ throw new ActorKilledException("Kill")
+ case PoisonPill ⇒ self.stop()
+ case SelectParent(m) ⇒ parent.tell(m, msg.sender)
+ case SelectChildName(name, m) ⇒ if (childrenRefs contains name) childrenRefs(name).child.tell(m, msg.sender)
+ case SelectChildPattern(p, m) ⇒ for (c ← children if p.matcher(c.path.name).matches) c.tell(m, msg.sender)
}
}
private def doTerminate() {
- dispatcher.detach(this)
-
try {
- val a = actor
- if (a ne null) a.postStop()
+ try {
+ val a = actor
+ if (a ne null) a.postStop()
+ } finally {
+ dispatcher.detach(this)
+ }
} finally {
try {
parent.sendSystemMessage(ChildTerminated(self))
diff --git a/akka-actor/src/main/scala/akka/actor/ActorRef.scala b/akka-actor/src/main/scala/akka/actor/ActorRef.scala
index 43936eff12..d705fb1b52 100644
--- a/akka-actor/src/main/scala/akka/actor/ActorRef.scala
+++ b/akka-actor/src/main/scala/akka/actor/ActorRef.scala
@@ -25,28 +25,26 @@ import akka.event.LoggingAdapter
*
* import Actor._
*
- * val actor = actorOf[MyActor]
+ * val actor = actorOf(Props[MyActor]
* actor ! message
* actor.stop()
*
*
* You can also create and start actors like this:
*
- * val actor = actorOf[MyActor]
+ * val actor = actorOf(Props[MyActor]
*
*
* Here is an example on how to create an actor with a non-default constructor.
*
* import Actor._
*
- * val actor = actorOf(new MyActor(...))
+ * val actor = actorOf(Props(new MyActor(...))
* actor ! message
* actor.stop()
*
*
* The natural ordering of ActorRef is defined in terms of its [[akka.actor.ActorPath]].
- *
- * @author Jonas Bonér
*/
abstract class ActorRef extends java.lang.Comparable[ActorRef] with Serializable {
scalaRef: InternalActorRef ⇒
@@ -81,17 +79,29 @@ abstract class ActorRef extends java.lang.Comparable[ActorRef] with Serializable
final def tell(msg: Any, sender: ActorRef): Unit = this.!(msg)(sender)
/**
- * Akka Java API.
+ * Akka Java API.
+ *
* Sends a message asynchronously returns a future holding the eventual reply message.
- *
+ *
* NOTE:
- * Use this method with care. In most cases it is better to use 'tell' together with the 'getContext().getSender()' to
- * implement request/response message exchanges.
- *
- * If you are sending messages using ask then you have to use getContext().sender().tell(...)
- * to send a reply message to the original sender. If not then the sender will block until the timeout expires.
+ * Use this method with care. In most cases it is better to use 'tell' together with the sender
+ * parameter to implement non-blocking request/response message exchanges.
+ *
+ * If you are sending messages using ask and using blocking operations on the Future, such as
+ * 'get', then you have to use getContext().sender().tell(...)
+ * in the target actor to send a reply message to the original sender, and thereby completing the Future,
+ * otherwise the sender will block until the timeout expires.
+ *
+ * When using future callbacks, inside actors you need to carefully avoid closing over
+ * the containing actor’s reference, i.e. do not call methods or access mutable state
+ * on the enclosing actor from within the callback. This would break the actor
+ * encapsulation and may introduce synchronization bugs and race conditions because
+ * the callback will be scheduled concurrently to the enclosing actor. Unfortunately
+ * there is not yet a way to detect these illegal accesses at compile time.
*/
- def ask(message: AnyRef, timeout: Long): Future[AnyRef] = ?(message, Timeout(timeout)).asInstanceOf[Future[AnyRef]]
+ def ask(message: AnyRef, timeout: Timeout): Future[AnyRef] = ?(message, timeout).asInstanceOf[Future[AnyRef]]
+
+ def ask(message: AnyRef, timeoutMillis: Long): Future[AnyRef] = ask(message, new Timeout(timeoutMillis))
/**
* Forwards the message and passes the original sender actor as the sender.
@@ -146,6 +156,21 @@ trait ScalaActorRef { ref: ActorRef ⇒
/**
* Sends a message asynchronously, returning a future which may eventually hold the reply.
+ * NOTE:
+ * Use this method with care. In most cases it is better to use '!' together with implicit or explicit
+ * sender parameter to implement non-blocking request/response message exchanges.
+ *
+ * If you are sending messages using ask and using blocking operations on the Future, such as
+ * 'get', then you have to use getContext().sender().tell(...)
+ * in the target actor to send a reply message to the original sender, and thereby completing the Future,
+ * otherwise the sender will block until the timeout expires.
+ *
+ * When using future callbacks, inside actors you need to carefully avoid closing over
+ * the containing actor’s reference, i.e. do not call methods or access mutable state
+ * on the enclosing actor from within the callback. This would break the actor
+ * encapsulation and may introduce synchronization bugs and race conditions because
+ * the callback will be scheduled concurrently to the enclosing actor. Unfortunately
+ * there is not yet a way to detect these illegal accesses at compile time.
*/
def ?(message: Any)(implicit timeout: Timeout): Future[Any]
@@ -186,8 +211,6 @@ private[akka] case object Nobody extends MinimalActorRef {
/**
* Local (serializable) ActorRef that is used when referencing the Actor on its "home" node.
- *
- * @author Jonas Bonér
*/
private[akka] class LocalActorRef private[akka] (
_system: ActorSystemImpl,
@@ -473,7 +496,7 @@ class AskActorRef(
}
override def ?(message: Any)(implicit timeout: Timeout): Future[Any] =
- new KeptPromise[Any](Left(new UnsupportedOperationException("Ask/? is not supported for %s".format(getClass.getName))))(dispatcher)
+ new KeptPromise[Any](Left(new UnsupportedOperationException("Ask/? is not supported for [%s]".format(getClass.getName))))(dispatcher)
override def isTerminated = result.isCompleted || result.isExpired
diff --git a/akka-actor/src/main/scala/akka/actor/ActorRefProvider.scala b/akka-actor/src/main/scala/akka/actor/ActorRefProvider.scala
index 57df2f786d..74762f170b 100644
--- a/akka-actor/src/main/scala/akka/actor/ActorRefProvider.scala
+++ b/akka-actor/src/main/scala/akka/actor/ActorRefProvider.scala
@@ -154,82 +154,6 @@ trait ActorRefFactory {
*/
def actorOf(props: Props, name: String): ActorRef
- /**
- * Create new actor of the given type as child of this context and give it an automatically
- * generated name (currently similar to base64-encoded integer count,
- * reversed and with “$” prepended, may change in the future). The type must have
- * a no-arg constructor which will be invoked using reflection.
- *
- * When invoked on ActorSystem, this method sends a message to the guardian
- * actor and blocks waiting for a reply, see `akka.actor.creation-timeout` in
- * the `reference.conf`.
- */
- def actorOf[T <: Actor](implicit m: Manifest[T]): ActorRef = actorOf(Props(m.erasure.asInstanceOf[Class[_ <: Actor]]))
-
- /**
- * Create new actor of the given type as child of this context with the given name, which must
- * not be null, empty or start with “$”. If the given name is already in use,
- * and `InvalidActorNameException` is thrown. The type must have
- * a no-arg constructor which will be invoked using reflection.
- *
- * When invoked on ActorSystem, this method sends a message to the guardian
- * actor and blocks waiting for a reply, see `akka.actor.creation-timeout` in
- * the `reference.conf`.
- */
- def actorOf[T <: Actor](name: String)(implicit m: Manifest[T]): ActorRef =
- actorOf(Props(m.erasure.asInstanceOf[Class[_ <: Actor]]), name)
-
- /**
- * Create new actor of the given class as child of this context and give it an automatically
- * generated name (currently similar to base64-encoded integer count,
- * reversed and with “$” prepended, may change in the future). The class must have
- * a no-arg constructor which will be invoked using reflection.
- *
- * When invoked on ActorSystem, this method sends a message to the guardian
- * actor and blocks waiting for a reply, see `akka.actor.creation-timeout` in
- * the `reference.conf`.
- */
- def actorOf[T <: Actor](clazz: Class[T]): ActorRef = actorOf(Props(clazz))
-
- /**
- * Create new actor as child of this context and give it an automatically
- * generated name (currently similar to base64-encoded integer count,
- * reversed and with “$” prepended, may change in the future). Use this
- * method to pass constructor arguments to the [[akka.actor.Actor]] while using
- * only default [[akka.actor.Props]]; otherwise refer to `actorOf(Props)`.
- *
- * When invoked on ActorSystem, this method sends a message to the guardian
- * actor and blocks waiting for a reply, see `akka.actor.creation-timeout` in
- * the `reference.conf`.
- */
- def actorOf(factory: ⇒ Actor): ActorRef = actorOf(Props(() ⇒ factory))
-
- /**
- * ''Java API'': Create new actor as child of this context and give it an
- * automatically generated name (currently similar to base64-encoded integer
- * count, reversed and with “$” prepended, may change in the future).
- *
- * Identical to `actorOf(Props(() => creator.create()))`.
- *
- * When invoked on ActorSystem, this method sends a message to the guardian
- * actor and blocks waiting for a reply, see `akka.actor.creation-timeout` in
- * the `reference.conf`.
- */
- def actorOf(creator: UntypedActorFactory): ActorRef = actorOf(Props(() ⇒ creator.create()))
-
- /**
- * ''Java API'': Create new actor as child of this context with the given name, which must
- * not be null, empty or start with “$”. If the given name is already in use,
- * and `InvalidActorNameException` is thrown.
- *
- * Identical to `actorOf(Props(() => creator.create()), name)`.
- *
- * When invoked on ActorSystem, this method sends a message to the guardian
- * actor and blocks waiting for a reply, see `akka.actor.creation-timeout` in
- * the `reference.conf`.
- */
- def actorOf(creator: UntypedActorFactory, name: String): ActorRef = actorOf(Props(() ⇒ creator.create()), name)
-
/**
* Look-up an actor by path; if it does not exist, returns a reference to
* the dead-letter mailbox of the [[akka.actor.ActorSystem]]. If the path
@@ -386,14 +310,14 @@ class LocalActorRefProvider(
override def !(message: Any)(implicit sender: ActorRef = null): Unit = stopped.ifOff(message match {
case Failed(ex) if sender ne null ⇒ causeOfTermination = Some(ex); sender.stop()
- case _ ⇒ log.error(this + " received unexpected message " + message)
+ case _ ⇒ log.error(this + " received unexpected message [" + message + "]")
})
override def sendSystemMessage(message: SystemMessage): Unit = stopped ifOff {
message match {
case Supervise(child) ⇒ // TODO register child in some map to keep track of it and enable shutdown after all dead
case ChildTerminated(child) ⇒ stop()
- case _ ⇒ log.error(this + " received unexpected system message " + message)
+ case _ ⇒ log.error(this + " received unexpected system message [" + message + "]")
}
}
}
diff --git a/akka-actor/src/main/scala/akka/actor/ActorSystem.scala b/akka-actor/src/main/scala/akka/actor/ActorSystem.scala
index 4c94b53c82..af0ec81d7b 100644
--- a/akka-actor/src/main/scala/akka/actor/ActorSystem.scala
+++ b/akka-actor/src/main/scala/akka/actor/ActorSystem.scala
@@ -104,8 +104,7 @@ object ActorSystem {
val SchedulerTicksPerWheel = getInt("akka.scheduler.ticksPerWheel")
if (ConfigVersion != Version)
- throw new ConfigurationException("Akka JAR version [" + Version +
- "] does not match the provided config version [" + ConfigVersion + "]")
+ throw new ConfigurationException("Akka JAR version [" + Version + "] does not match the provided config version [" + ConfigVersion + "]")
override def toString: String = config.root.render
}
@@ -168,16 +167,16 @@ object ActorSystem {
* system.actorOf(props)
*
* // Scala
- * system.actorOf[MyActor]("name")
- * system.actorOf[MyActor]
- * system.actorOf(new MyActor(...))
+ * system.actorOf(Props[MyActor]("name")
+ * system.actorOf(Props[MyActor]
+ * system.actorOf(Props(new MyActor(...))
*
* // Java
* system.actorOf(classOf[MyActor]);
- * system.actorOf(new Creator() {
+ * system.actorOf(Props(new Creator() {
* public MyActor create() { ... }
* });
- * system.actorOf(new Creator() {
+ * system.actorOf(Props(new Creator() {
* public MyActor create() { ... }
* }, "name");
* }}}
@@ -262,12 +261,13 @@ abstract class ActorSystem extends ActorRefFactory {
* effort basis and hence not strictly guaranteed.
*/
def deadLetters: ActorRef
-
+ //#scheduler
/**
* Light-weight scheduler for running asynchronous tasks after some deadline
* in the future. Not terribly precise but cheap.
*/
def scheduler: Scheduler
+ //#scheduler
/**
* Helper object for creating new dispatchers and passing in all required
@@ -327,7 +327,7 @@ abstract class ActorSystem extends ActorRefFactory {
class ActorSystemImpl(val name: String, applicationConfig: Config) extends ActorSystem {
if (!name.matches("""^\w+$"""))
- throw new IllegalArgumentException("invalid ActorSystem name '" + name + "', must contain only word characters (i.e. [a-zA-Z_0-9])")
+ throw new IllegalArgumentException("invalid ActorSystem name [" + name + "], must contain only word characters (i.e. [a-zA-Z_0-9])")
import ActorSystem._
@@ -464,8 +464,8 @@ class ActorSystemImpl(val name: String, applicationConfig: Config) extends Actor
}
/*
- * This is called after the last actor has signaled its termination, i.e.
- * after the last dispatcher has had its chance to schedule its shutdown
+ * This is called after the last actor has signaled its termination, i.e.
+ * after the last dispatcher has had its chance to schedule its shutdown
* action.
*/
protected def stopScheduler(): Unit = scheduler match {
@@ -492,7 +492,7 @@ class ActorSystemImpl(val name: String, applicationConfig: Config) extends Actor
extensions.putIfAbsent(ext, inProcessOfRegistration) match { // Signal that registration is in process
case null ⇒ try { // Signal was successfully sent
ext.createExtension(this) match { // Create and initialize the extension
- case null ⇒ throw new IllegalStateException("Extension instance created as null for Extension: " + ext)
+ case null ⇒ throw new IllegalStateException("Extension instance created as 'null' for extension [" + ext + "]")
case instance ⇒
extensions.replace(ext, inProcessOfRegistration, instance) //Replace our in process signal with the initialized extension
instance //Profit!
@@ -511,7 +511,7 @@ class ActorSystemImpl(val name: String, applicationConfig: Config) extends Actor
}
def extension[T <: Extension](ext: ExtensionId[T]): T = findExtension(ext) match {
- case null ⇒ throw new IllegalArgumentException("Trying to get non-registered extension " + ext)
+ case null ⇒ throw new IllegalArgumentException("Trying to get non-registered extension [" + ext + "]")
case some ⇒ some.asInstanceOf[T]
}
@@ -524,8 +524,8 @@ class ActorSystemImpl(val name: String, applicationConfig: Config) extends Actor
getObjectFor[AnyRef](fqcn).fold(_ ⇒ createInstance[AnyRef](fqcn, noParams, noArgs), Right(_)) match {
case Right(p: ExtensionIdProvider) ⇒ registerExtension(p.lookup());
case Right(p: ExtensionId[_]) ⇒ registerExtension(p);
- case Right(other) ⇒ log.error("'{}' is not an ExtensionIdProvider or ExtensionId, skipping...", fqcn)
- case Left(problem) ⇒ log.error(problem, "While trying to load extension '{}', skipping...", fqcn)
+ case Right(other) ⇒ log.error("[{}] is not an 'ExtensionIdProvider' or 'ExtensionId', skipping...", fqcn)
+ case Left(problem) ⇒ log.error(problem, "While trying to load extension [{}], skipping...", fqcn)
}
}
diff --git a/akka-actor/src/main/scala/akka/actor/FSM.scala b/akka-actor/src/main/scala/akka/actor/FSM.scala
index 9419fccc60..ce7e7f8318 100644
--- a/akka-actor/src/main/scala/akka/actor/FSM.scala
+++ b/akka-actor/src/main/scala/akka/actor/FSM.scala
@@ -564,7 +564,6 @@ trait FSM[S, D] extends ListenerManagement {
/**
* Stackable trait for FSM which adds a rolling event log.
*
- * @author Roland Kuhn
* @since 1.2
*/
trait LoggingFSM[S, D] extends FSM[S, D] { this: Actor ⇒
diff --git a/akka-actor/src/main/scala/akka/actor/IO.scala b/akka-actor/src/main/scala/akka/actor/IO.scala
index 1a92679c4b..1551eef2ec 100644
--- a/akka-actor/src/main/scala/akka/actor/IO.scala
+++ b/akka-actor/src/main/scala/akka/actor/IO.scala
@@ -193,7 +193,7 @@ trait IO {
private def run() {
_next match {
case ByteStringLength(continuation, handle, message, waitingFor) ⇒
- context.currentMessage = message
+ context.asInstanceOf[ActorCell].currentMessage = message
val st = state(handle)
if (st.readBytes.length >= waitingFor) {
val bytes = st.readBytes.take(waitingFor) //.compact
@@ -202,7 +202,7 @@ trait IO {
run()
}
case bsd @ ByteStringDelimited(continuation, handle, message, delimiter, inclusive, scanned) ⇒
- context.currentMessage = message
+ context.asInstanceOf[ActorCell].currentMessage = message
val st = state(handle)
val idx = st.readBytes.indexOfSlice(delimiter, scanned)
if (idx >= 0) {
@@ -215,7 +215,7 @@ trait IO {
_next = bsd.copy(scanned = math.min(idx - delimiter.length, 0))
}
case ByteStringAny(continuation, handle, message) ⇒
- context.currentMessage = message
+ context.asInstanceOf[ActorCell].currentMessage = message
val st = state(handle)
if (st.readBytes.length > 0) {
val bytes = st.readBytes //.compact
diff --git a/akka-actor/src/main/scala/akka/actor/Props.scala b/akka-actor/src/main/scala/akka/actor/Props.scala
index 28812c0d3b..b496df6ce3 100644
--- a/akka-actor/src/main/scala/akka/actor/Props.scala
+++ b/akka-actor/src/main/scala/akka/actor/Props.scala
@@ -86,6 +86,25 @@ case class Props(creator: () ⇒ Actor = Props.defaultCreator,
timeout: Timeout = Props.defaultTimeout,
faultHandler: FaultHandlingStrategy = Props.defaultFaultHandler,
routerConfig: RouterConfig = Props.defaultRoutedProps) {
+
+ /**
+ * Java API
+ */
+ def this(factory: UntypedActorFactory) = this(
+ creator = () ⇒ factory.create(),
+ dispatcher = Props.defaultDispatcher,
+ timeout = Props.defaultTimeout,
+ faultHandler = Props.defaultFaultHandler)
+
+ /**
+ * Java API
+ */
+ def this(actorClass: Class[_ <: Actor]) = this(
+ creator = () ⇒ actorClass.newInstance,
+ dispatcher = Props.defaultDispatcher,
+ timeout = Props.defaultTimeout,
+ faultHandler = Props.defaultFaultHandler)
+
/**
* No-args constructor that sets all the default values
* Java API
diff --git a/akka-actor/src/main/scala/akka/actor/Scheduler.scala b/akka-actor/src/main/scala/akka/actor/Scheduler.scala
index 19697921fd..7075ee0a8a 100644
--- a/akka-actor/src/main/scala/akka/actor/Scheduler.scala
+++ b/akka-actor/src/main/scala/akka/actor/Scheduler.scala
@@ -13,7 +13,7 @@
package akka.actor
import akka.util.Duration
-
+//#scheduler
/**
* An Akka scheduler service. This one needs one special behavior: if
* Closeable, it MUST execute all outstanding tasks upon .close() in order
@@ -28,6 +28,8 @@ trait Scheduler {
* Schedules a message to be sent repeatedly with an initial delay and frequency.
* E.g. if you would like a message to be sent immediately and thereafter every 500ms you would set
* delay = Duration.Zero and frequency = Duration(500, TimeUnit.MILLISECONDS)
+ *
+ * Java & Scala API
*/
def schedule(initialDelay: Duration, frequency: Duration, receiver: ActorRef, message: Any): Cancellable
@@ -35,33 +37,53 @@ trait Scheduler {
* Schedules a function to be run repeatedly with an initial delay and a frequency.
* E.g. if you would like the function to be run after 2 seconds and thereafter every 100ms you would set
* delay = Duration(2, TimeUnit.SECONDS) and frequency = Duration(100, TimeUnit.MILLISECONDS)
+ *
+ * Scala API
*/
def schedule(initialDelay: Duration, frequency: Duration)(f: ⇒ Unit): Cancellable
/**
* Schedules a Runnable to be run once with a delay, i.e. a time period that has to pass before the runnable is executed.
+ *
+ * Java & Scala API
*/
def scheduleOnce(delay: Duration, runnable: Runnable): Cancellable
/**
* Schedules a message to be sent once with a delay, i.e. a time period that has to pass before the message is sent.
+ *
+ * Java & Scala API
*/
def scheduleOnce(delay: Duration, receiver: ActorRef, message: Any): Cancellable
/**
* Schedules a function to be run once with a delay, i.e. a time period that has to pass before the function is run.
+ *
+ * Scala API
*/
def scheduleOnce(delay: Duration)(f: ⇒ Unit): Cancellable
}
+//#scheduler
+//#cancellable
+/**
+ * Signifies something that can be cancelled
+ * There is no strict guarantee that the implementation is thread-safe,
+ * but it should be good practice to make it so.
+ */
trait Cancellable {
/**
- * Cancels the underlying scheduled task.
+ * Cancels this Cancellable
+ *
+ * Java & Scala API
*/
def cancel(): Unit
/**
- * Checks if the underlying scheduled task has been cancelled.
+ * Returns whether this Cancellable has been cancelled
+ *
+ * Java & Scala API
*/
def isCancelled: Boolean
-}
\ No newline at end of file
+}
+//#cancellable
\ No newline at end of file
diff --git a/akka-actor/src/main/scala/akka/actor/UntypedActor.scala b/akka-actor/src/main/scala/akka/actor/UntypedActor.scala
index 3ac3e7770d..1692396a8f 100644
--- a/akka-actor/src/main/scala/akka/actor/UntypedActor.scala
+++ b/akka-actor/src/main/scala/akka/actor/UntypedActor.scala
@@ -48,8 +48,6 @@ import akka.dispatch.{ MessageDispatcher, Promise }
* }
* }
*
- *
- * @author Jonas Bonér
*/
abstract class UntypedActor extends Actor {
@@ -75,30 +73,36 @@ abstract class UntypedActor extends Actor {
/**
* User overridable callback.
*
- * Is called when an Actor is started, this only happens at most once in the life of an actor.
+ * Is called when an Actor is started.
+ * Actor are automatically started asynchronously when created.
+ * Empty default implementation.
*/
override def preStart() {}
/**
* User overridable callback.
*
- * Is called when 'actor.stop()' is invoked.
+ * Is called asynchronously after 'actor.stop()' is invoked.
+ * Empty default implementation.
*/
override def postStop() {}
/**
* User overridable callback.
*
- * Is called on a crashed Actor right BEFORE it is restarted to allow clean up of resources before Actor is terminated.
+ * Is called on a crashed Actor right BEFORE it is restarted to allow clean
+ * up of resources before Actor is terminated.
+ * By default it calls postStop()
*/
- override def preRestart(reason: Throwable, lastMessage: Option[Any]) {}
+ override def preRestart(reason: Throwable, message: Option[Any]) { postStop() }
/**
* User overridable callback.
*
* Is called right AFTER restart on the newly created Actor to allow reinitialization after an Actor crash.
+ * By default it calls preStart()
*/
- override def postRestart(reason: Throwable) {}
+ override def postRestart(reason: Throwable) { preStart() }
/**
* User overridable callback.
@@ -117,7 +121,5 @@ abstract class UntypedActor extends Actor {
/**
* Factory closure for an UntypedActor, to be used with 'Actors.actorOf(factory)'.
- *
- * @author Jonas Bonér
*/
trait UntypedActorFactory extends Creator[Actor]
diff --git a/akka-actor/src/main/scala/akka/dispatch/AbstractDispatcher.scala b/akka-actor/src/main/scala/akka/dispatch/AbstractDispatcher.scala
index 4cea1871b5..0f6091d23b 100644
--- a/akka-actor/src/main/scala/akka/dispatch/AbstractDispatcher.scala
+++ b/akka-actor/src/main/scala/akka/dispatch/AbstractDispatcher.scala
@@ -17,9 +17,6 @@ import akka.event.EventStream
import akka.actor.ActorSystem.Settings
import com.typesafe.config.Config
-/**
- * @author Jonas Bonér
- */
final case class Envelope(val message: Any, val sender: ActorRef) {
if (message.isInstanceOf[AnyRef] && (message.asInstanceOf[AnyRef] eq null)) throw new InvalidMessageException("Message is null")
}
@@ -87,9 +84,6 @@ object MessageDispatcher {
implicit def defaultDispatcher(implicit system: ActorSystem) = system.dispatcher
}
-/**
- * @author Jonas Bonér
- */
abstract class MessageDispatcher(val prerequisites: DispatcherPrerequisites) extends AbstractMessageDispatcher with Serializable {
import MessageDispatcher._
@@ -138,7 +132,7 @@ abstract class MessageDispatcher(val prerequisites: DispatcherPrerequisites) ext
shutdownScheduleUpdater.get(this) match {
case UNSCHEDULED ⇒
if (shutdownScheduleUpdater.compareAndSet(this, UNSCHEDULED, SCHEDULED)) {
- scheduler.scheduleOnce(shutdownTimeout, shutdownAction)
+ scheduleShutdownAction()
()
} else ifSensibleToDoSoThenScheduleShutdown()
case SCHEDULED ⇒
@@ -149,6 +143,13 @@ abstract class MessageDispatcher(val prerequisites: DispatcherPrerequisites) ext
case _ ⇒ ()
}
+ private def scheduleShutdownAction(): Unit = {
+ // IllegalStateException is thrown if scheduler has been shutdown
+ try scheduler.scheduleOnce(shutdownTimeout, shutdownAction) catch {
+ case _: IllegalStateException ⇒ shutdown()
+ }
+ }
+
private final val taskCleanup: () ⇒ Unit =
() ⇒ if (inhabitantsUpdater.decrementAndGet(this) == 0) ifSensibleToDoSoThenScheduleShutdown()
@@ -169,36 +170,9 @@ abstract class MessageDispatcher(val prerequisites: DispatcherPrerequisites) ext
val mailBox = actor.mailbox
mailBox.becomeClosed() // FIXME reschedule in tell if possible race with cleanUp is detected in order to properly clean up
actor.mailbox = deadLetterMailbox
- cleanUpMailboxFor(actor, mailBox)
mailBox.cleanUp()
}
- /**
- * Overridable callback to clean up the mailbox for a given actor,
- * called when an actor is unregistered.
- */
- protected def cleanUpMailboxFor(actor: ActorCell, mailBox: Mailbox) {
-
- if (mailBox.hasSystemMessages) {
- var message = mailBox.systemDrain()
- while (message ne null) {
- // message must be “virgin” before being able to systemEnqueue again
- val next = message.next
- message.next = null
- deadLetterMailbox.systemEnqueue(actor.self, message)
- message = next
- }
- }
-
- if (mailBox.hasMessages) {
- var envelope = mailBox.dequeue
- while (envelope ne null) {
- deadLetterMailbox.enqueue(actor.self, envelope)
- envelope = mailBox.dequeue
- }
- }
- }
-
private val shutdownAction = new Runnable {
@tailrec
final def run() {
@@ -213,9 +187,7 @@ abstract class MessageDispatcher(val prerequisites: DispatcherPrerequisites) ext
}
case RESCHEDULED ⇒
if (shutdownScheduleUpdater.compareAndSet(MessageDispatcher.this, RESCHEDULED, SCHEDULED))
- try scheduler.scheduleOnce(shutdownTimeout, this) catch {
- case _: IllegalStateException ⇒ shutdown()
- }
+ scheduleShutdownAction()
else run()
}
}
@@ -224,7 +196,7 @@ abstract class MessageDispatcher(val prerequisites: DispatcherPrerequisites) ext
/**
* When the dispatcher no longer has any actors registered, how long will it wait until it shuts itself down,
* defaulting to your akka configs "akka.actor.dispatcher-shutdown-timeout" or default specified in
- * akka-actor-reference.conf
+ * reference.conf
*/
protected[akka] def shutdownTimeout: Duration
@@ -290,7 +262,7 @@ abstract class MessageDispatcher(val prerequisites: DispatcherPrerequisites) ext
}
/**
- * Trait to be used for hooking in new dispatchers into Dispatchers.fromConfig
+ * Trait to be used for hooking in new dispatchers into Dispatchers.from(cfg: Config)
*/
abstract class MessageDispatcherConfigurator() {
/**
@@ -307,9 +279,10 @@ abstract class MessageDispatcherConfigurator() {
}
}
- def configureThreadPool(config: Config,
- settings: Settings,
- createDispatcher: ⇒ (ThreadPoolConfig) ⇒ MessageDispatcher): ThreadPoolConfigDispatcherBuilder = {
+ def configureThreadPool(
+ config: Config,
+ settings: Settings,
+ createDispatcher: ⇒ (ThreadPoolConfig) ⇒ MessageDispatcher): ThreadPoolConfigDispatcherBuilder = {
import ThreadPoolConfigDispatcherBuilder.conf_?
//Apply the following options to the config if they are present in the config
diff --git a/akka-actor/src/main/scala/akka/dispatch/BalancingDispatcher.scala b/akka-actor/src/main/scala/akka/dispatch/BalancingDispatcher.scala
index 6f45d8629c..96477b0d56 100644
--- a/akka-actor/src/main/scala/akka/dispatch/BalancingDispatcher.scala
+++ b/akka-actor/src/main/scala/akka/dispatch/BalancingDispatcher.scala
@@ -28,8 +28,6 @@ import akka.util.Duration
*
* @see akka.dispatch.BalancingDispatcher
* @see akka.dispatch.Dispatchers
- *
- * @author Viktor Klang
*/
class BalancingDispatcher(
_prerequisites: DispatcherPrerequisites,
@@ -65,6 +63,21 @@ class BalancingDispatcher(
final def numberOfMessages: Int = messageQueue.numberOfMessages
final def hasMessages: Boolean = messageQueue.hasMessages
+
+ override def cleanUp(): Unit = {
+ //Don't call the original implementation of this since it scraps all messages, and we don't want to do that
+ if (hasSystemMessages) {
+ val dlq = actor.systemImpl.deadLetterMailbox
+ var message = systemDrain()
+ while (message ne null) {
+ // message must be “virgin” before being able to systemEnqueue again
+ val next = message.next
+ message.next = null
+ dlq.systemEnqueue(actor.self, message)
+ message = next
+ }
+ }
+ }
}
protected[akka] override def register(actor: ActorCell) = {
@@ -78,19 +91,6 @@ class BalancingDispatcher(
intoTheFray(except = actor) //When someone leaves, he tosses a friend into the fray
}
- protected override def cleanUpMailboxFor(actor: ActorCell, mailBox: Mailbox) {
- if (mailBox.hasSystemMessages) {
- var message = mailBox.systemDrain()
- while (message ne null) {
- // message must be “virgin” before being able to systemEnqueue again
- val next = message.next
- message.next = null
- prerequisites.deadLetterMailbox.systemEnqueue(actor.self, message)
- message = next
- }
- }
- }
-
def intoTheFray(except: ActorCell): Unit =
if (rebalance.compareAndSet(false, true)) {
try {
diff --git a/akka-actor/src/main/scala/akka/dispatch/Dispatcher.scala b/akka-actor/src/main/scala/akka/dispatch/Dispatcher.scala
index 1a40ee23cd..02c84b3099 100644
--- a/akka-actor/src/main/scala/akka/dispatch/Dispatcher.scala
+++ b/akka-actor/src/main/scala/akka/dispatch/Dispatcher.scala
@@ -55,7 +55,6 @@ import java.util.concurrent._
* But the preferred way of creating dispatchers is to use
* the {@link akka.dispatch.Dispatchers} factory object.
*
- * @author Jonas Bonér
* @param throughput positive integer indicates the dispatcher will only process so much messages at a time from the
* mailbox, without checking the mailboxes of other actors. Zero or negative means the dispatcher
* always continues until the mailbox is empty.
@@ -153,4 +152,4 @@ abstract class PriorityGenerator extends java.util.Comparator[Envelope] {
final def compare(thisMessage: Envelope, thatMessage: Envelope): Int =
gen(thisMessage.message) - gen(thatMessage.message)
-}
\ No newline at end of file
+}
diff --git a/akka-actor/src/main/scala/akka/dispatch/Dispatchers.scala b/akka-actor/src/main/scala/akka/dispatch/Dispatchers.scala
index c905a7297d..cdcb056372 100644
--- a/akka-actor/src/main/scala/akka/dispatch/Dispatchers.scala
+++ b/akka-actor/src/main/scala/akka/dispatch/Dispatchers.scala
@@ -4,16 +4,19 @@
package akka.dispatch
+import java.util.concurrent.TimeUnit
+import java.util.concurrent.ConcurrentHashMap
+
import akka.actor.LocalActorRef
import akka.actor.newUuid
import akka.util.{ Duration, ReflectiveAccess }
-import java.util.concurrent.TimeUnit
import akka.actor.ActorSystem
import akka.event.EventStream
import akka.actor.Scheduler
import akka.actor.ActorSystem.Settings
import com.typesafe.config.Config
import com.typesafe.config.ConfigFactory
+import akka.config.ConfigurationException
trait DispatcherPrerequisites {
def eventStream: EventStream
@@ -27,6 +30,10 @@ case class DefaultDispatcherPrerequisites(
val scheduler: Scheduler) extends DispatcherPrerequisites
/**
+ * It is recommended to define the dispatcher in configuration to allow for tuning
+ * for different environments. Use the `lookup` or `newFromConfig` method to create
+ * a dispatcher as specified in configuration.
+ *
* Scala API. Dispatcher factory.
*
* Example usage:
@@ -53,8 +60,6 @@ case class DefaultDispatcherPrerequisites(
* .build();
*
*
- *
- * @author Jonas Bonér
*/
class Dispatchers(val settings: ActorSystem.Settings, val prerequisites: DispatcherPrerequisites) {
@@ -64,9 +69,33 @@ class Dispatchers(val settings: ActorSystem.Settings, val prerequisites: Dispatc
val defaultDispatcherConfig = settings.config.getConfig("akka.actor.default-dispatcher")
- // TODO PN Shouldn't we fail hard if default-dispatcher is wrong?
- lazy val defaultGlobalDispatcher =
- from(defaultDispatcherConfig) getOrElse newDispatcher("AkkaDefaultGlobalDispatcher", 1, MailboxType).build
+ lazy val defaultGlobalDispatcher: MessageDispatcher =
+ from(defaultDispatcherConfig) getOrElse {
+ throw new ConfigurationException("Wrong configuration [akka.actor.default-dispatcher]")
+ }
+
+ // FIXME: Dispatchers registered here are are not removed, see ticket #1494
+ private val dispatchers = new ConcurrentHashMap[String, MessageDispatcher]
+
+ /**
+ * Returns a dispatcher as specified in configuration, or if not defined it uses
+ * the default dispatcher. The same dispatcher instance is returned for subsequent
+ * lookups.
+ */
+ def lookup(key: String): MessageDispatcher = {
+ dispatchers.get(key) match {
+ case null ⇒
+ // It doesn't matter if we create a dispatcher that isn't used due to concurrent lookup.
+ // That shouldn't happen often and in case it does the actual ExecutorService isn't
+ // created until used, i.e. cheap.
+ val newDispatcher = newFromConfig(key)
+ dispatchers.putIfAbsent(key, newDispatcher) match {
+ case null ⇒ newDispatcher
+ case existing ⇒ existing
+ }
+ case existing ⇒ existing
+ }
+ }
/**
* Creates an thread based dispatcher serving a single actor through the same single thread.
@@ -135,7 +164,7 @@ class Dispatchers(val settings: ActorSystem.Settings, val prerequisites: Dispatc
new Dispatcher(prerequisites, name, throughput, throughputDeadline, mailboxType, config, settings.DispatcherDefaultShutdown), ThreadPoolConfig())
/**
- * Creates a executor-based event-driven dispatcher, with work-stealing, serving multiple (millions) of actors through a thread pool.
+ * Creates a executor-based event-driven dispatcher, with work-sharing, serving multiple (millions) of actors through a thread pool.
*
* Has a fluent builder interface for configuring its semantics.
*/
@@ -144,7 +173,7 @@ class Dispatchers(val settings: ActorSystem.Settings, val prerequisites: Dispatc
settings.DispatcherThroughputDeadlineTime, MailboxType, config, settings.DispatcherDefaultShutdown), ThreadPoolConfig())
/**
- * Creates a executor-based event-driven dispatcher, with work-stealing, serving multiple (millions) of actors through a thread pool.
+ * Creates a executor-based event-driven dispatcher, with work-sharing, serving multiple (millions) of actors through a thread pool.
*
* Has a fluent builder interface for configuring its semantics.
*/
@@ -154,7 +183,7 @@ class Dispatchers(val settings: ActorSystem.Settings, val prerequisites: Dispatc
config, settings.DispatcherDefaultShutdown), ThreadPoolConfig())
/**
- * Creates a executor-based event-driven dispatcher, with work-stealing, serving multiple (millions) of actors through a thread pool.
+ * Creates a executor-based event-driven dispatcher, with work-sharing, serving multiple (millions) of actors through a thread pool.
*
* Has a fluent builder interface for configuring its semantics.
*/
@@ -164,7 +193,7 @@ class Dispatchers(val settings: ActorSystem.Settings, val prerequisites: Dispatc
config, settings.DispatcherDefaultShutdown), ThreadPoolConfig())
/**
- * Creates a executor-based event-driven dispatcher, with work-stealing, serving multiple (millions) of actors through a thread pool.
+ * Creates a executor-based event-driven dispatcher, with work-sharing, serving multiple (millions) of actors through a thread pool.
*
* Has a fluent builder interface for configuring its semantics.
*/
@@ -172,13 +201,13 @@ class Dispatchers(val settings: ActorSystem.Settings, val prerequisites: Dispatc
ThreadPoolConfigDispatcherBuilder(config ⇒
new BalancingDispatcher(prerequisites, name, throughput, throughputDeadline, mailboxType,
config, settings.DispatcherDefaultShutdown), ThreadPoolConfig())
+
/**
- * Utility function that tries to load the specified dispatcher config from the akka.conf
+ * Creates a new dispatcher as specified in configuration
* or if not defined it uses the supplied dispatcher.
- * Uses default values from default-dispatcher, i.e. all options doesn't need to be defined
- * in config.
+ * Uses default values from default-dispatcher, i.e. all options doesn't need to be defined.
*/
- def fromConfig(key: String, default: ⇒ MessageDispatcher = defaultGlobalDispatcher, cfg: Config = settings.config): MessageDispatcher = {
+ def newFromConfig(key: String, default: ⇒ MessageDispatcher, cfg: Config): MessageDispatcher = {
import scala.collection.JavaConverters._
def simpleName = key.substring(key.lastIndexOf('.') + 1)
cfg.hasPath(key) match {
@@ -186,13 +215,21 @@ class Dispatchers(val settings: ActorSystem.Settings, val prerequisites: Dispatc
case true ⇒
val conf = cfg.getConfig(key)
val confWithName = conf.withFallback(ConfigFactory.parseMap(Map("name" -> simpleName).asJava))
- from(confWithName).getOrElse(default)
+ from(confWithName).getOrElse(throw new ConfigurationException("Wrong configuration [%s]".format(key)))
}
}
+ /**
+ * Creates a new dispatcher as specified in configuration, or if not defined it uses
+ * the default dispatcher.
+ * Uses default configuration values from default-dispatcher, i.e. all options doesn't
+ * need to be defined.
+ */
+ def newFromConfig(key: String): MessageDispatcher = newFromConfig(key, defaultGlobalDispatcher, settings.config)
+
/*
* Creates of obtains a dispatcher from a ConfigMap according to the format below.
- * Uses default values from default-dispatcher.
+ * Uses default values from default-dispatcher.
*
* my-dispatcher {
* type = "Dispatcher" # Must be one of the following
diff --git a/akka-actor/src/main/scala/akka/dispatch/Mailbox.scala b/akka-actor/src/main/scala/akka/dispatch/Mailbox.scala
index 666ee3baac..fdb46f0ec4 100644
--- a/akka-actor/src/main/scala/akka/dispatch/Mailbox.scala
+++ b/akka-actor/src/main/scala/akka/dispatch/Mailbox.scala
@@ -33,9 +33,6 @@ object Mailbox {
final val debug = false
}
-/**
- * @author Jonas Bonér
- */
abstract class Mailbox(val actor: ActorCell) extends MessageQueue with SystemMessageQueue with Runnable {
import Mailbox._
@@ -209,8 +206,29 @@ abstract class Mailbox(val actor: ActorCell) extends MessageQueue with SystemMes
/**
* Overridable callback to clean up the mailbox,
* called when an actor is unregistered.
+ * By default it dequeues all system messages + messages and ships them to the owning actors' systems' DeadLetterMailbox
*/
- protected[dispatch] def cleanUp() {}
+ protected[dispatch] def cleanUp(): Unit = if (actor ne null) {
+ val dlq = actor.systemImpl.deadLetterMailbox
+ if (hasSystemMessages) {
+ var message = systemDrain()
+ while (message ne null) {
+ // message must be “virgin” before being able to systemEnqueue again
+ val next = message.next
+ message.next = null
+ dlq.systemEnqueue(actor.self, message)
+ message = next
+ }
+ }
+
+ if (hasMessages) {
+ var envelope = dequeue
+ while (envelope ne null) {
+ dlq.enqueue(actor.self, envelope)
+ envelope = dequeue
+ }
+ }
+ }
}
trait MessageQueue {
diff --git a/akka-actor/src/main/scala/akka/dispatch/PinnedDispatcher.scala b/akka-actor/src/main/scala/akka/dispatch/PinnedDispatcher.scala
index ed0b3cde99..3cb7bda73e 100644
--- a/akka-actor/src/main/scala/akka/dispatch/PinnedDispatcher.scala
+++ b/akka-actor/src/main/scala/akka/dispatch/PinnedDispatcher.scala
@@ -14,8 +14,6 @@ import java.util.concurrent.TimeUnit
/**
* Dedicates a unique thread for each actor passed in as reference. Served through its messageQueue.
- *
- * @author Jonas Bonér
*/
class PinnedDispatcher(
_prerequisites: DispatcherPrerequisites,
@@ -32,7 +30,7 @@ class PinnedDispatcher(
_shutdownTimeout) {
@volatile
- protected[akka] var owner: ActorCell = _actor
+ private var owner: ActorCell = _actor
//Relies on an external lock provided by MessageDispatcher.attach
protected[akka] override def register(actorCell: ActorCell) = {
diff --git a/akka-actor/src/main/scala/akka/dispatch/ThreadPoolBuilder.scala b/akka-actor/src/main/scala/akka/dispatch/ThreadPoolBuilder.scala
index d26842cc3b..d58444c166 100644
--- a/akka-actor/src/main/scala/akka/dispatch/ThreadPoolBuilder.scala
+++ b/akka-actor/src/main/scala/akka/dispatch/ThreadPoolBuilder.scala
@@ -144,9 +144,6 @@ case class ThreadPoolConfigDispatcherBuilder(dispatcherFactory: (ThreadPoolConfi
def configure(fs: Option[Function[ThreadPoolConfigDispatcherBuilder, ThreadPoolConfigDispatcherBuilder]]*): ThreadPoolConfigDispatcherBuilder = fs.foldLeft(this)((c, f) ⇒ f.map(_(c)).getOrElse(c))
}
-/**
- * @author Jonas Bonér
- */
class MonitorableThreadFactory(val name: String, val daemonic: Boolean = false) extends ThreadFactory {
protected val counter = new AtomicLong
@@ -157,9 +154,6 @@ class MonitorableThreadFactory(val name: String, val daemonic: Boolean = false)
}
}
-/**
- * @author Jonas Bonér
- */
object MonitorableThread {
val DEFAULT_NAME = "MonitorableThread".intern
@@ -168,9 +162,6 @@ object MonitorableThread {
val alive = new AtomicInteger
}
-/**
- * @author Jonas Bonér
- */
class MonitorableThread(runnable: Runnable, name: String)
extends Thread(runnable, name + "-" + MonitorableThread.created.incrementAndGet) {
diff --git a/akka-actor/src/main/scala/akka/event/Logging.scala b/akka-actor/src/main/scala/akka/event/Logging.scala
index d6e71aa586..0425b4c661 100644
--- a/akka-actor/src/main/scala/akka/event/Logging.scala
+++ b/akka-actor/src/main/scala/akka/event/Logging.scala
@@ -281,19 +281,34 @@ object Logging {
val debugFormat = "[DEBUG] [%s] [%s] [%s] %s".intern
/**
- * Obtain LoggingAdapter for the given application and source object. The
- * source is used to identify the source of this logging channel and must have
+ * Obtain LoggingAdapter for the given event stream (system) and source object.
+ * Note that there is an implicit conversion from [[akka.actor.ActorSystem]]
+ * to [[akka.event.LoggingBus]].
+ *
+ * The source is used to identify the source of this logging channel and must have
* a corresponding LogSource[T] instance in scope; by default these are
- * provided for Class[_], Actor, ActorRef and String types.
+ * provided for Class[_], Actor, ActorRef and String types. The source
+ * object is translated to a String according to the following rules:
+ *
+ *
if it is an Actor or ActorRef, its path is used
+ *
in case of a String it is used as is
+ *
in case of a class an approximation of its simpleName
+ *
and in all other cases the simpleName of its class
+ *
*/
def apply[T: LogSource](eventStream: LoggingBus, logSource: T): LoggingAdapter =
new BusLogging(eventStream, implicitly[LogSource[T]].genString(logSource))
/**
- * Java API: Obtain LoggingAdapter for the given application and source object. The
- * source object is used to identify the source of this logging channel; if it is
- * an Actor or ActorRef, its address is used, in case of a class an approximation of
- * its simpleName and in all other cases the simpleName of its class.
+ * Java API: Obtain LoggingAdapter for the given system and source object. The
+ * source object is used to identify the source of this logging channel. The source
+ * object is translated to a String according to the following rules:
+ *
+ *
if it is an Actor or ActorRef, its path is used
+ *
in case of a String it is used as is
+ *
in case of a class an approximation of its simpleName
+ *
and in all other cases the simpleName of its class
+ *
*/
def getLogger(system: ActorSystem, logSource: AnyRef): LoggingAdapter = apply(system.eventStream, LogSource.fromAnyRef(logSource))
@@ -354,6 +369,11 @@ object Logging {
*/
case object LoggerInitialized
+ /**
+ * Java API to create a LoggerInitialized message.
+ */
+ def loggerInitialized() = LoggerInitialized
+
class LoggerInitializationException(msg: String) extends AkkaException(msg)
trait StdOutLogger {
diff --git a/akka-actor/src/main/scala/akka/experimental.scala b/akka-actor/src/main/scala/akka/experimental.scala
index cfc976551a..c37197a10d 100644
--- a/akka-actor/src/main/scala/akka/experimental.scala
+++ b/akka-actor/src/main/scala/akka/experimental.scala
@@ -10,7 +10,6 @@ import annotation.target._
* This annotation marks a feature which is not yet considered stable and may
* change or be removed in a future release.
*
- * @author Roland Kuhn
* @since 1.2
*/
@getter
diff --git a/akka-actor/src/main/scala/akka/routing/ConnectionManager.scala b/akka-actor/src/main/scala/akka/routing/ConnectionManager.scala
index cf7dd3fda5..a417c75bac 100644
--- a/akka-actor/src/main/scala/akka/routing/ConnectionManager.scala
+++ b/akka-actor/src/main/scala/akka/routing/ConnectionManager.scala
@@ -24,8 +24,6 @@ trait VersionedIterable[A] {
/**
* Manages connections (ActorRefs) for a router.
- *
- * @author Jonas Bonér
*/
trait ConnectionManager {
/**
diff --git a/akka-actor/src/main/scala/akka/routing/ConsistentHash.scala b/akka-actor/src/main/scala/akka/routing/ConsistentHash.scala
index 3abadd01d8..ead70b4b7a 100644
--- a/akka-actor/src/main/scala/akka/routing/ConsistentHash.scala
+++ b/akka-actor/src/main/scala/akka/routing/ConsistentHash.scala
@@ -15,8 +15,6 @@ import scala.collection.mutable.{ Buffer, Map }
* Consistent Hashing node ring abstraction.
*
* Not thread-safe, to be used from within an Actor or protected some other way.
- *
- * @author Jonas Bonér
*/
class ConsistentHash[T](nodes: Seq[T], replicas: Int) {
private val cluster = Buffer[T]()
diff --git a/akka-actor/src/main/scala/akka/routing/Routing.scala b/akka-actor/src/main/scala/akka/routing/Routing.scala
index 179a3bf23f..5fb34d3de3 100644
--- a/akka-actor/src/main/scala/akka/routing/Routing.scala
+++ b/akka-actor/src/main/scala/akka/routing/Routing.scala
@@ -364,4 +364,4 @@ trait ScatterGatherFirstCompletedLike { this: RouterConfig ⇒
}
}
}
-}
\ No newline at end of file
+}
diff --git a/akka-actor/src/main/scala/akka/serialization/Format.scala b/akka-actor/src/main/scala/akka/serialization/Format.scala
index 43177862d4..41f9e9ce73 100644
--- a/akka-actor/src/main/scala/akka/serialization/Format.scala
+++ b/akka-actor/src/main/scala/akka/serialization/Format.scala
@@ -7,7 +7,6 @@ package akka.serialization
import akka.actor.Actor
/**
- * @author Jonas Bonér
* trait Serializer extends scala.Serializable {
* @volatile
* var classLoader: Option[ClassLoader] = None
diff --git a/akka-actor/src/main/scala/akka/util/Convert.scala b/akka-actor/src/main/scala/akka/util/Convert.scala
index 278b94f422..42a98f3849 100644
--- a/akka-actor/src/main/scala/akka/util/Convert.scala
+++ b/akka-actor/src/main/scala/akka/util/Convert.scala
@@ -4,9 +4,6 @@
package akka.util
-/**
- * @author Jonas Bonér
- */
object Convert {
def intToBytes(value: Int): Array[Byte] = {
diff --git a/akka-actor/src/main/scala/akka/util/Crypt.scala b/akka-actor/src/main/scala/akka/util/Crypt.scala
index 2507b0e421..50e8c881a6 100644
--- a/akka-actor/src/main/scala/akka/util/Crypt.scala
+++ b/akka-actor/src/main/scala/akka/util/Crypt.scala
@@ -6,9 +6,6 @@ package akka.util
import java.security.{ MessageDigest, SecureRandom }
-/**
- * @author Jonas Bonér
- */
object Crypt {
val hex = "0123456789ABCDEF"
val lineSeparator = System.getProperty("line.separator")
diff --git a/akka-actor/src/main/scala/akka/util/Duration.scala b/akka-actor/src/main/scala/akka/util/Duration.scala
index eec371d724..6e9310e5d8 100644
--- a/akka-actor/src/main/scala/akka/util/Duration.scala
+++ b/akka-actor/src/main/scala/akka/util/Duration.scala
@@ -38,7 +38,19 @@ case class Timer(duration: Duration, throwExceptionOnTimeout: Boolean = false) {
}
}
+case class Deadline(d: Duration) {
+ def +(other: Duration): Deadline = copy(d = d + other)
+ def -(other: Duration): Deadline = copy(d = d - other)
+ def -(other: Deadline): Duration = d - other.d
+ def timeLeft: Duration = this - Deadline.now
+}
+object Deadline {
+ def now: Deadline = Deadline(Duration(System.nanoTime, NANOSECONDS))
+}
+
object Duration {
+ implicit def timeLeft(implicit d: Deadline): Duration = d.timeLeft
+
def apply(length: Long, unit: TimeUnit): Duration = new FiniteDuration(length, unit)
def apply(length: Double, unit: TimeUnit): Duration = fromNanos(unit.toNanos(1) * length)
def apply(length: Long, unit: String): Duration = new FiniteDuration(length, timeUnit(unit))
@@ -129,10 +141,7 @@ object Duration {
override def *(factor: Double): Duration = throw new IllegalArgumentException("cannot multiply Undefined duration")
override def /(factor: Double): Duration = throw new IllegalArgumentException("cannot divide Undefined duration")
override def /(other: Duration): Double = throw new IllegalArgumentException("cannot divide Undefined duration")
- def >(other: Duration) = throw new IllegalArgumentException("cannot compare Undefined duration")
- def >=(other: Duration) = throw new IllegalArgumentException("cannot compare Undefined duration")
- def <(other: Duration) = throw new IllegalArgumentException("cannot compare Undefined duration")
- def <=(other: Duration) = throw new IllegalArgumentException("cannot compare Undefined duration")
+ def compare(other: Duration) = throw new IllegalArgumentException("cannot compare Undefined duration")
def unary_- : Duration = throw new IllegalArgumentException("cannot negate Undefined duration")
}
@@ -183,10 +192,7 @@ object Duration {
*/
val Inf: Duration = new Duration with Infinite {
override def toString = "Duration.Inf"
- def >(other: Duration) = true
- def >=(other: Duration) = true
- def <(other: Duration) = false
- def <=(other: Duration) = false
+ def compare(other: Duration) = 1
def unary_- : Duration = MinusInf
}
@@ -196,10 +202,7 @@ object Duration {
*/
val MinusInf: Duration = new Duration with Infinite {
override def toString = "Duration.MinusInf"
- def >(other: Duration) = false
- def >=(other: Duration) = false
- def <(other: Duration) = true
- def <=(other: Duration) = true
+ def compare(other: Duration) = -1
def unary_- : Duration = Inf
}
@@ -255,7 +258,7 @@ object Duration {
* val d3 = d2 + 1.millisecond
*
*/
-abstract class Duration extends Serializable {
+abstract class Duration extends Serializable with Ordered[Duration] {
def length: Long
def unit: TimeUnit
def toNanos: Long
@@ -267,10 +270,6 @@ abstract class Duration extends Serializable {
def toDays: Long
def toUnit(unit: TimeUnit): Double
def printHMS: String
- def <(other: Duration): Boolean
- def <=(other: Duration): Boolean
- def >(other: Duration): Boolean
- def >=(other: Duration): Boolean
def +(other: Duration): Duration
def -(other: Duration): Duration
def *(factor: Double): Duration
@@ -281,6 +280,7 @@ abstract class Duration extends Serializable {
def min(other: Duration): Duration = if (this < other) this else other
def max(other: Duration): Duration = if (this > other) this else other
def sleep(): Unit = Thread.sleep(toMillis)
+ def fromNow: Deadline = Deadline.now + this
// Java API
def lt(other: Duration) = this < other
@@ -329,37 +329,12 @@ class FiniteDuration(val length: Long, val unit: TimeUnit) extends Duration {
def printHMS = "%02d:%02d:%06.3f".format(toHours, toMinutes % 60, toMillis / 1000. % 60)
- def <(other: Duration) = {
+ def compare(other: Duration) =
if (other.finite_?) {
- toNanos < other.asInstanceOf[FiniteDuration].toNanos
- } else {
- other > this
- }
- }
-
- def <=(other: Duration) = {
- if (other.finite_?) {
- toNanos <= other.asInstanceOf[FiniteDuration].toNanos
- } else {
- other >= this
- }
- }
-
- def >(other: Duration) = {
- if (other.finite_?) {
- toNanos > other.asInstanceOf[FiniteDuration].toNanos
- } else {
- other < this
- }
- }
-
- def >=(other: Duration) = {
- if (other.finite_?) {
- toNanos >= other.asInstanceOf[FiniteDuration].toNanos
- } else {
- other <= this
- }
- }
+ val me = toNanos
+ val o = other.toNanos
+ if (me > o) 1 else if (me < o) -1 else 0
+ } else -other.compare(this)
def +(other: Duration) = {
if (!other.finite_?) {
@@ -397,6 +372,8 @@ class FiniteDuration(val length: Long, val unit: TimeUnit) extends Duration {
}
class DurationInt(n: Int) {
+ import duration.Classifier
+
def nanoseconds = Duration(n, NANOSECONDS)
def nanos = Duration(n, NANOSECONDS)
def nanosecond = Duration(n, NANOSECONDS)
@@ -423,9 +400,38 @@ class DurationInt(n: Int) {
def days = Duration(n, DAYS)
def day = Duration(n, DAYS)
+
+ def nanoseconds[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, NANOSECONDS))
+ def nanos[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, NANOSECONDS))
+ def nanosecond[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, NANOSECONDS))
+ def nano[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, NANOSECONDS))
+
+ def microseconds[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, MICROSECONDS))
+ def micros[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, MICROSECONDS))
+ def microsecond[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, MICROSECONDS))
+ def micro[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, MICROSECONDS))
+
+ def milliseconds[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, MILLISECONDS))
+ def millis[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, MILLISECONDS))
+ def millisecond[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, MILLISECONDS))
+ def milli[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, MILLISECONDS))
+
+ def seconds[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, SECONDS))
+ def second[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, SECONDS))
+
+ def minutes[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, MINUTES))
+ def minute[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, MINUTES))
+
+ def hours[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, HOURS))
+ def hour[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, HOURS))
+
+ def days[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, DAYS))
+ def day[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, DAYS))
}
class DurationLong(n: Long) {
+ import duration.Classifier
+
def nanoseconds = Duration(n, NANOSECONDS)
def nanos = Duration(n, NANOSECONDS)
def nanosecond = Duration(n, NANOSECONDS)
@@ -452,9 +458,38 @@ class DurationLong(n: Long) {
def days = Duration(n, DAYS)
def day = Duration(n, DAYS)
+
+ def nanoseconds[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, NANOSECONDS))
+ def nanos[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, NANOSECONDS))
+ def nanosecond[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, NANOSECONDS))
+ def nano[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, NANOSECONDS))
+
+ def microseconds[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, MICROSECONDS))
+ def micros[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, MICROSECONDS))
+ def microsecond[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, MICROSECONDS))
+ def micro[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, MICROSECONDS))
+
+ def milliseconds[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, MILLISECONDS))
+ def millis[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, MILLISECONDS))
+ def millisecond[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, MILLISECONDS))
+ def milli[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, MILLISECONDS))
+
+ def seconds[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, SECONDS))
+ def second[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, SECONDS))
+
+ def minutes[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, MINUTES))
+ def minute[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, MINUTES))
+
+ def hours[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, HOURS))
+ def hour[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, HOURS))
+
+ def days[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, DAYS))
+ def day[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, DAYS))
}
class DurationDouble(d: Double) {
+ import duration.Classifier
+
def nanoseconds = Duration(d, NANOSECONDS)
def nanos = Duration(d, NANOSECONDS)
def nanosecond = Duration(d, NANOSECONDS)
@@ -481,5 +516,32 @@ class DurationDouble(d: Double) {
def days = Duration(d, DAYS)
def day = Duration(d, DAYS)
+
+ def nanoseconds[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(d, NANOSECONDS))
+ def nanos[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(d, NANOSECONDS))
+ def nanosecond[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(d, NANOSECONDS))
+ def nano[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(d, NANOSECONDS))
+
+ def microseconds[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(d, MICROSECONDS))
+ def micros[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(d, MICROSECONDS))
+ def microsecond[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(d, MICROSECONDS))
+ def micro[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(d, MICROSECONDS))
+
+ def milliseconds[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(d, MILLISECONDS))
+ def millis[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(d, MILLISECONDS))
+ def millisecond[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(d, MILLISECONDS))
+ def milli[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(d, MILLISECONDS))
+
+ def seconds[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(d, SECONDS))
+ def second[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(d, SECONDS))
+
+ def minutes[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(d, MINUTES))
+ def minute[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(d, MINUTES))
+
+ def hours[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(d, HOURS))
+ def hour[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(d, HOURS))
+
+ def days[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(d, DAYS))
+ def day[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(d, DAYS))
}
diff --git a/akka-actor/src/main/scala/akka/util/HashCode.scala b/akka-actor/src/main/scala/akka/util/HashCode.scala
index d515a57ec5..40b740bfd6 100644
--- a/akka-actor/src/main/scala/akka/util/HashCode.scala
+++ b/akka-actor/src/main/scala/akka/util/HashCode.scala
@@ -21,8 +21,6 @@ import java.lang.{ Float ⇒ JFloat, Double ⇒ JDouble }
* result
* }
*
- *
- * @author Jonas Bonér
*/
object HashCode {
val SEED = 23
diff --git a/akka-actor/src/main/scala/akka/util/Helpers.scala b/akka-actor/src/main/scala/akka/util/Helpers.scala
index 830ec28881..c656ab37b1 100644
--- a/akka-actor/src/main/scala/akka/util/Helpers.scala
+++ b/akka-actor/src/main/scala/akka/util/Helpers.scala
@@ -8,17 +8,14 @@ import java.util.Comparator
import scala.annotation.tailrec
import java.util.regex.Pattern
-/**
- * @author Jonas Bonér
- */
object Helpers {
def makePattern(s: String): Pattern = Pattern.compile("^\\Q" + s.replace("?", "\\E.\\Q").replace("*", "\\E.*\\Q") + "\\E$")
def compareIdentityHash(a: AnyRef, b: AnyRef): Int = {
/*
- * make sure that there is no overflow or underflow in comparisons, so
- * that the ordering is actually consistent and you cannot have a
+ * make sure that there is no overflow or underflow in comparisons, so
+ * that the ordering is actually consistent and you cannot have a
* sequence which cyclically is monotone without end.
*/
val diff = ((System.identityHashCode(a) & 0xffffffffL) - (System.identityHashCode(b) & 0xffffffffL))
diff --git a/akka-actor/src/main/scala/akka/util/Index.scala b/akka-actor/src/main/scala/akka/util/Index.scala
index 3b0f68eabe..b7cb1a74a4 100644
--- a/akka-actor/src/main/scala/akka/util/Index.scala
+++ b/akka-actor/src/main/scala/akka/util/Index.scala
@@ -13,8 +13,6 @@ import scala.collection.mutable
* An implementation of a ConcurrentMultiMap
* Adds/remove is serialized over the specified key
* Reads are fully concurrent <-- el-cheapo
- *
- * @author Viktor Klang
*/
class Index[K, V](val mapSize: Int, val valueComparator: Comparator[V]) {
@@ -192,7 +190,5 @@ class Index[K, V](val mapSize: Int, val valueComparator: Comparator[V]) {
* An implementation of a ConcurrentMultiMap
* Adds/remove is serialized over the specified key
* Reads are fully concurrent <-- el-cheapo
- *
- * @author Viktor Klang
*/
class ConcurrentMultiMap[K, V](mapSize: Int, valueComparator: Comparator[V]) extends Index[K, V](mapSize, valueComparator)
diff --git a/akka-actor/src/main/scala/akka/util/JMX.scala b/akka-actor/src/main/scala/akka/util/JMX.scala
index 2c87524843..bcfd5d2477 100644
--- a/akka-actor/src/main/scala/akka/util/JMX.scala
+++ b/akka-actor/src/main/scala/akka/util/JMX.scala
@@ -9,9 +9,6 @@ import java.lang.management.ManagementFactory
import javax.management.{ ObjectInstance, ObjectName, InstanceAlreadyExistsException, InstanceNotFoundException }
import akka.actor.ActorSystem
-/**
- * @author Jonas Bonér
- */
object JMX {
private val mbeanServer = ManagementFactory.getPlatformMBeanServer
diff --git a/akka-actor/src/main/scala/akka/util/ListenerManagement.scala b/akka-actor/src/main/scala/akka/util/ListenerManagement.scala
index fad8f5b20a..3efbcbc902 100644
--- a/akka-actor/src/main/scala/akka/util/ListenerManagement.scala
+++ b/akka-actor/src/main/scala/akka/util/ListenerManagement.scala
@@ -9,8 +9,6 @@ import akka.actor.{ ActorInitializationException, ActorRef }
/**
* A manager for listener actors. Intended for mixin by observables.
- *
- * @author Martin Krasser
*/
trait ListenerManagement {
diff --git a/akka-actor/src/main/scala/akka/util/LockUtil.scala b/akka-actor/src/main/scala/akka/util/LockUtil.scala
index a31f4434d1..e17507d427 100644
--- a/akka-actor/src/main/scala/akka/util/LockUtil.scala
+++ b/akka-actor/src/main/scala/akka/util/LockUtil.scala
@@ -7,9 +7,6 @@ package akka.util
import java.util.concurrent.locks.{ ReentrantLock }
import java.util.concurrent.atomic.{ AtomicBoolean }
-/**
- * @author Jonas Bonér
- */
final class ReentrantGuard {
final val lock = new ReentrantLock
diff --git a/akka-actor/src/main/scala/akka/util/duration/package.scala b/akka-actor/src/main/scala/akka/util/duration/package.scala
index 97e0e82c39..88a328d6d8 100644
--- a/akka-actor/src/main/scala/akka/util/duration/package.scala
+++ b/akka-actor/src/main/scala/akka/util/duration/package.scala
@@ -7,6 +7,23 @@ package akka.util
import java.util.concurrent.TimeUnit
package object duration {
+ trait Classifier[C] {
+ type R
+ def convert(d: Duration): R
+ }
+
+ object span
+ implicit object spanConvert extends Classifier[span.type] {
+ type R = Duration
+ def convert(d: Duration) = d
+ }
+
+ object fromNow
+ implicit object fromNowConvert extends Classifier[fromNow.type] {
+ type R = Deadline
+ def convert(d: Duration) = Deadline.now + d
+ }
+
implicit def intToDurationInt(n: Int) = new DurationInt(n)
implicit def longToDurationLong(n: Long) = new DurationLong(n)
implicit def doubleToDurationDouble(d: Double) = new DurationDouble(d)
diff --git a/akka-camel-typed/src/test/scala/akka/camel/TypedConsumerPublishRequestorTest.scala b/akka-camel-typed/src/test/scala/akka/camel/TypedConsumerPublishRequestorTest.scala
index 9d034b86fd..358bedc070 100644
--- a/akka-camel-typed/src/test/scala/akka/camel/TypedConsumerPublishRequestorTest.scala
+++ b/akka-camel-typed/src/test/scala/akka/camel/TypedConsumerPublishRequestorTest.scala
@@ -21,10 +21,10 @@ class TypedConsumerPublishRequestorTest extends JUnitSuite {
@Before
def setUp{
- publisher = actorOf(new TypedConsumerPublisherMock)
- requestor = actorOf(new TypedConsumerPublishRequestor)
+ publisher = actorOf(Props(new TypedConsumerPublisherMock)
+ requestor = actorOf(Props(new TypedConsumerPublishRequestor)
requestor ! InitPublishRequestor(publisher)
- consumer = actorOf(new Actor with Consumer {
+ consumer = actorOf(Props(new Actor with Consumer {
def endpointUri = "mock:test"
protected def receive = null
})
diff --git a/akka-camel/src/test/scala/akka/camel/ConsumerPublishRequestorTest.scala b/akka-camel/src/test/scala/akka/camel/ConsumerPublishRequestorTest.scala
index e52295e26b..f77cec4c0b 100644
--- a/akka-camel/src/test/scala/akka/camel/ConsumerPublishRequestorTest.scala
+++ b/akka-camel/src/test/scala/akka/camel/ConsumerPublishRequestorTest.scala
@@ -18,10 +18,10 @@ class ConsumerPublishRequestorTest extends JUnitSuite {
@Before
def setUp{
- publisher = actorOf(new ConsumerPublisherMock)
- requestor = actorOf(new ConsumerPublishRequestor)
+ publisher = actorOf(Props(new ConsumerPublisherMock)
+ requestor = actorOf(Props(new ConsumerPublishRequestor)
requestor ! InitPublishRequestor(publisher)
- consumer = actorOf(new Actor with Consumer {
+ consumer = actorOf(Props(new Actor with Consumer {
def endpointUri = "mock:test"
protected def receive = null
}).asInstanceOf[LocalActorRef]
diff --git a/akka-camel/src/test/scala/akka/camel/ConsumerRegisteredTest.scala b/akka-camel/src/test/scala/akka/camel/ConsumerRegisteredTest.scala
index 5003413d42..2289b5a3d4 100644
--- a/akka-camel/src/test/scala/akka/camel/ConsumerRegisteredTest.scala
+++ b/akka-camel/src/test/scala/akka/camel/ConsumerRegisteredTest.scala
@@ -9,21 +9,21 @@ class ConsumerRegisteredTest extends JUnitSuite {
@Test
def shouldCreateSomeNonBlockingPublishRequestFromConsumer = {
- val c = Actor.actorOf[ConsumerActor1]
+ val c = Actor.actorOf(Props[ConsumerActor1]
val event = ConsumerActorRegistered.eventFor(c)
assert(event === Some(ConsumerActorRegistered(c, consumerOf(c))))
}
@Test
def shouldCreateSomeBlockingPublishRequestFromConsumer = {
- val c = Actor.actorOf[ConsumerActor2]
+ val c = Actor.actorOf(Props[ConsumerActor2]
val event = ConsumerActorRegistered.eventFor(c)
assert(event === Some(ConsumerActorRegistered(c, consumerOf(c))))
}
@Test
def shouldCreateNoneFromConsumer = {
- val event = ConsumerActorRegistered.eventFor(Actor.actorOf[PlainActor])
+ val event = ConsumerActorRegistered.eventFor(Actor.actorOf(Props[PlainActor])
assert(event === None)
}
diff --git a/akka-camel/src/test/scala/akka/camel/ConsumerScalaTest.scala b/akka-camel/src/test/scala/akka/camel/ConsumerScalaTest.scala
index efe7d6aee1..47dbdbba54 100644
--- a/akka-camel/src/test/scala/akka/camel/ConsumerScalaTest.scala
+++ b/akka-camel/src/test/scala/akka/camel/ConsumerScalaTest.scala
@@ -27,7 +27,7 @@ class ConsumerScalaTest extends WordSpec with BeforeAndAfterAll with MustMatcher
service = CamelServiceFactory.createCamelService
// register test consumer before registering the publish requestor
// and before starting the CamelService (registry is scanned for consumers)
- actorOf(new TestConsumer("direct:publish-test-1"))
+ actorOf(Props(new TestConsumer("direct:publish-test-1"))
service.registerPublishRequestor
service.awaitEndpointActivation(1) {
service.start
@@ -54,7 +54,7 @@ class ConsumerScalaTest extends WordSpec with BeforeAndAfterAll with MustMatcher
"started" must {
"support an in-out message exchange via its endpoint" in {
service.awaitEndpointActivation(1) {
- consumer = actorOf(new TestConsumer("direct:publish-test-2"))
+ consumer = actorOf(Props(new TestConsumer("direct:publish-test-2"))
} must be(true)
mandatoryTemplate.requestBody("direct:publish-test-2", "msg2") must equal("received msg2")
}
@@ -119,7 +119,7 @@ class ConsumerScalaTest extends WordSpec with BeforeAndAfterAll with MustMatcher
"activated with a custom error handler" must {
"handle thrown exceptions by generating a custom response" in {
service.awaitEndpointActivation(1) {
- actorOf[ErrorHandlingConsumer]
+ actorOf(Props[ErrorHandlingConsumer]
} must be(true)
mandatoryTemplate.requestBody("direct:error-handler-test", "hello") must equal("error: hello")
@@ -128,7 +128,7 @@ class ConsumerScalaTest extends WordSpec with BeforeAndAfterAll with MustMatcher
"activated with a custom redelivery handler" must {
"handle thrown exceptions by redelivering the initial message" in {
service.awaitEndpointActivation(1) {
- actorOf[RedeliveringConsumer]
+ actorOf(Props[RedeliveringConsumer]
} must be(true)
mandatoryTemplate.requestBody("direct:redelivery-test", "hello") must equal("accepted: hello")
@@ -143,7 +143,7 @@ class ConsumerScalaTest extends WordSpec with BeforeAndAfterAll with MustMatcher
var consumer: ActorRef = null
service.awaitEndpointActivation(1) {
- consumer = actorOf(new TestAckConsumer("direct:system-ack-test"))
+ consumer = actorOf(Props(new TestAckConsumer("direct:system-ack-test"))
} must be(true)
val endpoint = mandatoryContext.getEndpoint("direct:system-ack-test", classOf[DirectEndpoint])
@@ -169,19 +169,19 @@ class ConsumerScalaTest extends WordSpec with BeforeAndAfterAll with MustMatcher
"A supervised consumer" must {
"be able to reply during receive" in {
- val consumer = Actor.actorOf(new SupervisedConsumer("reply-channel-test-1"))
+ val consumer = Actor.actorOf(Props(new SupervisedConsumer("reply-channel-test-1"))
(consumer ? "succeed").get must equal("ok")
}
"be able to reply on failure during preRestart" in {
- val consumer = Actor.actorOf(new SupervisedConsumer("reply-channel-test-2"))
+ val consumer = Actor.actorOf(Props(new SupervisedConsumer("reply-channel-test-2"))
val supervisor = Supervisor(
SupervisorConfig(
OneForOneStrategy(List(classOf[Exception]), 2, 10000),
Supervise(consumer, Permanent) :: Nil))
val latch = new CountDownLatch(1)
- val sender = Actor.actorOf(new Sender("pr", latch))
+ val sender = Actor.actorOf(Props(new Sender("pr", latch))
consumer.!("fail")(Some(sender))
latch.await(5, TimeUnit.SECONDS) must be(true)
@@ -195,7 +195,7 @@ class ConsumerScalaTest extends WordSpec with BeforeAndAfterAll with MustMatcher
Supervise(consumer, Temporary) :: Nil))
val latch = new CountDownLatch(1)
- val sender = Actor.actorOf(new Sender("ps", latch))
+ val sender = Actor.actorOf(Props(new Sender("ps", latch))
consumer.!("fail")(Some(sender))
latch.await(5, TimeUnit.SECONDS) must be(true)
diff --git a/akka-camel/src/test/scala/akka/camel/ProducerFeatureTest.scala b/akka-camel/src/test/scala/akka/camel/ProducerFeatureTest.scala
index c2614d2263..a3c67064f8 100644
--- a/akka-camel/src/test/scala/akka/camel/ProducerFeatureTest.scala
+++ b/akka-camel/src/test/scala/akka/camel/ProducerFeatureTest.scala
@@ -31,7 +31,7 @@ class ProducerFeatureTest extends FeatureSpec with BeforeAndAfterAll with Before
scenario("produce message and receive normal response") {
given("a registered two-way producer")
- val producer = actorOf(new TestProducer("direct:producer-test-2", true))
+ val producer = actorOf(Props(new TestProducer("direct:producer-test-2", true))
when("a test message is sent to the producer with ?")
val message = Message("test", Map(Message.MessageExchangeId -> "123"))
@@ -44,7 +44,7 @@ class ProducerFeatureTest extends FeatureSpec with BeforeAndAfterAll with Before
scenario("produce message and receive failure response") {
given("a registered two-way producer")
- val producer = actorOf(new TestProducer("direct:producer-test-2"))
+ val producer = actorOf(Props(new TestProducer("direct:producer-test-2"))
when("a test message causing an exception is sent to the producer with ?")
val message = Message("fail", Map(Message.MessageExchangeId -> "123"))
@@ -59,7 +59,7 @@ class ProducerFeatureTest extends FeatureSpec with BeforeAndAfterAll with Before
scenario("produce message oneway") {
given("a registered one-way producer")
- val producer = actorOf(new TestProducer("direct:producer-test-1", true) with Oneway)
+ val producer = actorOf(Props(new TestProducer("direct:producer-test-1", true) with Oneway)
when("a test message is sent to the producer with !")
mockEndpoint.expectedBodiesReceived("TEST")
@@ -71,7 +71,7 @@ class ProducerFeatureTest extends FeatureSpec with BeforeAndAfterAll with Before
scenario("produce message twoway without sender reference") {
given("a registered two-way producer")
- val producer = actorOf(new TestProducer("direct:producer-test-1"))
+ val producer = actorOf(Props(new TestProducer("direct:producer-test-1"))
when("a test message is sent to the producer with !")
mockEndpoint.expectedBodiesReceived("test")
@@ -86,7 +86,7 @@ class ProducerFeatureTest extends FeatureSpec with BeforeAndAfterAll with Before
scenario("produce message and receive normal response") {
given("a registered two-way producer")
- val producer = actorOf(new TestProducer("direct:producer-test-3"))
+ val producer = actorOf(Props(new TestProducer("direct:producer-test-3"))
when("a test message is sent to the producer with ?")
val message = Message("test", Map(Message.MessageExchangeId -> "123"))
@@ -98,7 +98,7 @@ class ProducerFeatureTest extends FeatureSpec with BeforeAndAfterAll with Before
scenario("produce message and receive failure response") {
given("a registered two-way producer")
- val producer = actorOf(new TestProducer("direct:producer-test-3"))
+ val producer = actorOf(Props(new TestProducer("direct:producer-test-3"))
when("a test message causing an exception is sent to the producer with ?")
val message = Message("fail", Map(Message.MessageExchangeId -> "123"))
@@ -116,8 +116,8 @@ class ProducerFeatureTest extends FeatureSpec with BeforeAndAfterAll with Before
scenario("produce message, forward normal response to a replying target actor and receive response") {
given("a registered two-way producer configured with a forward target")
- val target = actorOf[ReplyingForwardTarget]
- val producer = actorOf(new TestForwarder("direct:producer-test-2", target))
+ val target = actorOf(Props[ReplyingForwardTarget]
+ val producer = actorOf(Props(new TestForwarder("direct:producer-test-2", target))
when("a test message is sent to the producer with ?")
val message = Message("test", Map(Message.MessageExchangeId -> "123"))
@@ -130,8 +130,8 @@ class ProducerFeatureTest extends FeatureSpec with BeforeAndAfterAll with Before
scenario("produce message, forward failure response to a replying target actor and receive response") {
given("a registered two-way producer configured with a forward target")
- val target = actorOf[ReplyingForwardTarget]
- val producer = actorOf(new TestForwarder("direct:producer-test-2", target))
+ val target = actorOf(Props[ReplyingForwardTarget]
+ val producer = actorOf(Props(new TestForwarder("direct:producer-test-2", target))
when("a test message causing an exception is sent to the producer with ?")
val message = Message("fail", Map(Message.MessageExchangeId -> "123"))
@@ -146,8 +146,8 @@ class ProducerFeatureTest extends FeatureSpec with BeforeAndAfterAll with Before
scenario("produce message, forward normal response to a producing target actor and produce response to direct:forward-test-1") {
given("a registered one-way producer configured with a forward target")
- val target = actorOf[ProducingForwardTarget]
- val producer = actorOf(new TestForwarder("direct:producer-test-2", target))
+ val target = actorOf(Props[ProducingForwardTarget]
+ val producer = actorOf(Props(new TestForwarder("direct:producer-test-2", target))
when("a test message is sent to the producer with !")
mockEndpoint.expectedBodiesReceived("received test")
@@ -159,8 +159,8 @@ class ProducerFeatureTest extends FeatureSpec with BeforeAndAfterAll with Before
scenario("produce message, forward failure response to a producing target actor and produce response to direct:forward-test-1") {
given("a registered one-way producer configured with a forward target")
- val target = actorOf[ProducingForwardTarget]
- val producer = actorOf(new TestForwarder("direct:producer-test-2", target))
+ val target = actorOf(Props[ProducingForwardTarget]
+ val producer = actorOf(Props(new TestForwarder("direct:producer-test-2", target))
when("a test message causing an exception is sent to the producer with !")
mockEndpoint.expectedMessageCount(1)
@@ -176,8 +176,8 @@ class ProducerFeatureTest extends FeatureSpec with BeforeAndAfterAll with Before
scenario("produce message, forward normal response to a replying target actor and receive response") {
given("a registered two-way producer configured with a forward target")
- val target = actorOf[ReplyingForwardTarget]
- val producer = actorOf(new TestForwarder("direct:producer-test-3", target))
+ val target = actorOf(Props[ReplyingForwardTarget]
+ val producer = actorOf(Props(new TestForwarder("direct:producer-test-3", target))
when("a test message is sent to the producer with ?")
val message = Message("test", Map(Message.MessageExchangeId -> "123"))
@@ -190,8 +190,8 @@ class ProducerFeatureTest extends FeatureSpec with BeforeAndAfterAll with Before
scenario("produce message, forward failure response to a replying target actor and receive response") {
given("a registered two-way producer configured with a forward target")
- val target = actorOf[ReplyingForwardTarget]
- val producer = actorOf(new TestForwarder("direct:producer-test-3", target))
+ val target = actorOf(Props[ReplyingForwardTarget]
+ val producer = actorOf(Props(new TestForwarder("direct:producer-test-3", target))
when("a test message causing an exception is sent to the producer with ?")
val message = Message("fail", Map(Message.MessageExchangeId -> "123"))
@@ -206,8 +206,8 @@ class ProducerFeatureTest extends FeatureSpec with BeforeAndAfterAll with Before
scenario("produce message, forward normal response to a producing target actor and produce response to direct:forward-test-1") {
given("a registered one-way producer configured with a forward target")
- val target = actorOf[ProducingForwardTarget]
- val producer = actorOf(new TestForwarder("direct:producer-test-3", target))
+ val target = actorOf(Props[ProducingForwardTarget]
+ val producer = actorOf(Props(new TestForwarder("direct:producer-test-3", target))
when("a test message is sent to the producer with !")
mockEndpoint.expectedBodiesReceived("received test")
@@ -219,8 +219,8 @@ class ProducerFeatureTest extends FeatureSpec with BeforeAndAfterAll with Before
scenario("produce message, forward failure response to a producing target actor and produce response to direct:forward-test-1") {
given("a registered one-way producer configured with a forward target")
- val target = actorOf[ProducingForwardTarget]
- val producer = actorOf(new TestForwarder("direct:producer-test-3", target))
+ val target = actorOf(Props[ProducingForwardTarget]
+ val producer = actorOf(Props(new TestForwarder("direct:producer-test-3", target))
when("a test message causing an exception is sent to the producer with !")
mockEndpoint.expectedMessageCount(1)
@@ -271,7 +271,7 @@ object ProducerFeatureTest {
}
class TestRoute extends RouteBuilder {
- val responder = actorOf[TestResponder]
+ val responder = actorOf(Props[TestResponder]
def configure {
from("direct:forward-test-1").to("mock:mock")
// for one-way messaging tests
diff --git a/akka-camel/src/test/scala/akka/camel/component/ActorComponentFeatureTest.scala b/akka-camel/src/test/scala/akka/camel/component/ActorComponentFeatureTest.scala
index 24fc306268..a86577e92b 100644
--- a/akka-camel/src/test/scala/akka/camel/component/ActorComponentFeatureTest.scala
+++ b/akka-camel/src/test/scala/akka/camel/component/ActorComponentFeatureTest.scala
@@ -33,7 +33,7 @@ class ActorComponentFeatureTest extends FeatureSpec with BeforeAndAfterAll with
import CamelContextManager.mandatoryTemplate
scenario("one-way communication") {
- val actor = actorOf[Tester1]
+ val actor = actorOf(Props[Tester1]
val latch = (actor ? SetExpectedMessageCount(1)).as[CountDownLatch].get
mandatoryTemplate.sendBody("actor:uuid:%s" format actor.uuid, "Martin")
assert(latch.await(5000, TimeUnit.MILLISECONDS))
@@ -42,7 +42,7 @@ class ActorComponentFeatureTest extends FeatureSpec with BeforeAndAfterAll with
}
scenario("two-way communication") {
- val actor = actorOf[Tester2]
+ val actor = actorOf(Props[Tester2]
assert(mandatoryTemplate.requestBody("actor:uuid:%s" format actor.uuid, "Martin") === "Hello Martin")
}
@@ -70,7 +70,7 @@ class ActorComponentFeatureTest extends FeatureSpec with BeforeAndAfterAll with
import CamelContextManager.mandatoryTemplate
scenario("one-way communication") {
- val actor = actorOf[Tester1]
+ val actor = actorOf(Props[Tester1]
val latch = (actor ? SetExpectedMessageCount(1)).as[CountDownLatch].get
mandatoryTemplate.sendBody("actor:%s" format actor.address, "Martin")
assert(latch.await(5000, TimeUnit.MILLISECONDS))
@@ -79,12 +79,12 @@ class ActorComponentFeatureTest extends FeatureSpec with BeforeAndAfterAll with
}
scenario("two-way communication") {
- val actor = actorOf[Tester2]
+ val actor = actorOf(Props[Tester2]
assert(mandatoryTemplate.requestBody("actor:%s" format actor.address, "Martin") === "Hello Martin")
}
scenario("two-way communication via a custom route") {
- val actor = actorOf[CustomIdActor]("custom-id")
+ val actor = actorOf(Props[CustomIdActor]("custom-id")
assert(mandatoryTemplate.requestBody("direct:custom-id-test-1", "Martin") === "Received Martin")
assert(mandatoryTemplate.requestBody("direct:custom-id-test-2", "Martin") === "Received Martin")
}
@@ -113,8 +113,8 @@ object ActorComponentFeatureTest {
}
class TestRoute extends RouteBuilder {
- val failWithMessage = actorOf[FailWithMessage]
- val failWithException = actorOf[FailWithException]
+ val failWithMessage = actorOf(Props[FailWithMessage]
+ val failWithException = actorOf(Props[FailWithException]
def configure {
from("direct:custom-id-test-1").to("actor:custom-id")
from("direct:custom-id-test-2").to("actor:id:custom-id")
diff --git a/akka-camel/src/test/scala/akka/camel/component/ActorProducerTest.scala b/akka-camel/src/test/scala/akka/camel/component/ActorProducerTest.scala
index f8e4aeec0a..31cbd33b0e 100644
--- a/akka-camel/src/test/scala/akka/camel/component/ActorProducerTest.scala
+++ b/akka-camel/src/test/scala/akka/camel/component/ActorProducerTest.scala
@@ -23,7 +23,7 @@ class ActorProducerTest extends JUnitSuite with BeforeAndAfterAll {
@Test
def shouldSendMessageToActorWithSyncProcessor = {
- val actor = actorOf[Tester1]
+ val actor = actorOf(Props[Tester1]
val latch = (actor ? SetExpectedMessageCount(1)).as[CountDownLatch].get
val endpoint = actorEndpoint("actor:uuid:%s" format actor.uuid)
val exchange = endpoint.createExchange(ExchangePattern.InOnly)
@@ -38,7 +38,7 @@ class ActorProducerTest extends JUnitSuite with BeforeAndAfterAll {
@Test
def shouldSendMessageToActorWithAsyncProcessor = {
- val actor = actorOf[Tester1]
+ val actor = actorOf(Props[Tester1]
val latch = (actor ? SetExpectedMessageCount(1)).as[CountDownLatch].get
val endpoint = actorEndpoint("actor:uuid:%s" format actor.uuid)
val exchange = endpoint.createExchange(ExchangePattern.InOnly)
@@ -53,7 +53,7 @@ class ActorProducerTest extends JUnitSuite with BeforeAndAfterAll {
@Test
def shouldSendMessageToActorAndReceiveResponseWithSyncProcessor = {
- val actor = actorOf(new Tester2 {
+ val actor = actorOf(Props(new Tester2 {
override def response(msg: Message) = Message(super.response(msg), Map("k2" -> "v2"))
})
val endpoint = actorEndpoint("actor:uuid:%s" format actor.uuid)
@@ -67,7 +67,7 @@ class ActorProducerTest extends JUnitSuite with BeforeAndAfterAll {
@Test
def shouldSendMessageToActorAndReceiveResponseWithAsyncProcessor = {
- val actor = actorOf(new Tester2 {
+ val actor = actorOf(Props(new Tester2 {
override def response(msg: Message) = Message(super.response(msg), Map("k2" -> "v2"))
})
val completion = expectAsyncCompletion
@@ -83,7 +83,7 @@ class ActorProducerTest extends JUnitSuite with BeforeAndAfterAll {
@Test
def shouldSendMessageToActorAndReceiveFailureWithAsyncProcessor = {
- val actor = actorOf(new Tester2 {
+ val actor = actorOf(Props(new Tester2 {
override def response(msg: Message) = Failure(new Exception("testmsg"), Map("k3" -> "v3"))
})
val completion = expectAsyncCompletion
@@ -100,7 +100,7 @@ class ActorProducerTest extends JUnitSuite with BeforeAndAfterAll {
@Test
def shouldSendMessageToActorAndReceiveAckWithAsyncProcessor = {
- val actor = actorOf(new Tester2 {
+ val actor = actorOf(Props(new Tester2 {
override def response(msg: Message) = akka.camel.Ack
})
val completion = expectAsyncCompletion
@@ -115,8 +115,8 @@ class ActorProducerTest extends JUnitSuite with BeforeAndAfterAll {
@Test
def shouldDynamicallyRouteMessageToActorWithDefaultId = {
- val actor1 = actorOf[Tester1]("x")
- val actor2 = actorOf[Tester1]("y")
+ val actor1 = actorOf(Props[Tester1]("x")
+ val actor2 = actorOf(Props[Tester1]("y")
actor1
actor2
val latch1 = (actor1 ? SetExpectedMessageCount(1)).as[CountDownLatch].get
@@ -139,8 +139,8 @@ class ActorProducerTest extends JUnitSuite with BeforeAndAfterAll {
@Test
def shouldDynamicallyRouteMessageToActorWithoutDefaultId = {
- val actor1 = actorOf[Tester1]("x")
- val actor2 = actorOf[Tester1]("y")
+ val actor1 = actorOf(Props[Tester1]("x")
+ val actor2 = actorOf(Props[Tester1]("y")
actor1
actor2
val latch1 = (actor1 ? SetExpectedMessageCount(1)).as[CountDownLatch].get
@@ -164,8 +164,8 @@ class ActorProducerTest extends JUnitSuite with BeforeAndAfterAll {
@Test
def shouldDynamicallyRouteMessageToActorWithDefaultUuid = {
- val actor1 = actorOf[Tester1]
- val actor2 = actorOf[Tester1]
+ val actor1 = actorOf(Props[Tester1]
+ val actor2 = actorOf(Props[Tester1]
val latch1 = (actor1 ? SetExpectedMessageCount(1)).as[CountDownLatch].get
val latch2 = (actor2 ? SetExpectedMessageCount(1)).as[CountDownLatch].get
val endpoint = actorEndpoint("actor:uuid:%s" format actor1.uuid)
@@ -186,8 +186,8 @@ class ActorProducerTest extends JUnitSuite with BeforeAndAfterAll {
@Test
def shouldDynamicallyRouteMessageToActorWithoutDefaultUuid = {
- val actor1 = actorOf[Tester1]
- val actor2 = actorOf[Tester1]
+ val actor1 = actorOf(Props[Tester1]
+ val actor2 = actorOf(Props[Tester1]
val latch1 = (actor1 ? SetExpectedMessageCount(1)).as[CountDownLatch].get
val latch2 = (actor2 ? SetExpectedMessageCount(1)).as[CountDownLatch].get
val endpoint = actorEndpoint("actor:uuid:")
@@ -209,7 +209,7 @@ class ActorProducerTest extends JUnitSuite with BeforeAndAfterAll {
@Test
def shouldThrowExceptionWhenIdNotSet{
- val actor = actorOf[Tester1]
+ val actor = actorOf(Props[Tester1]
val latch = (actor ? SetExpectedMessageCount(1)).as[CountDownLatch].get
val endpoint = actorEndpoint("actor:id:")
intercept[ActorIdentifierNotSetException] {
@@ -219,7 +219,7 @@ class ActorProducerTest extends JUnitSuite with BeforeAndAfterAll {
@Test
def shouldThrowExceptionWhenUuidNotSet{
- val actor = actorOf[Tester1]
+ val actor = actorOf(Props[Tester1]
val latch = (actor ? SetExpectedMessageCount(1)).as[CountDownLatch].get
val endpoint = actorEndpoint("actor:uuid:")
intercept[ActorIdentifierNotSetException] {
diff --git a/akka-cluster/src/main/scala/akka/cluster/BookKeeperServer.scala b/akka-cluster/src/main/scala/akka/cluster/BookKeeperServer.scala
index e546d2d9af..7c3b57969d 100644
--- a/akka-cluster/src/main/scala/akka/cluster/BookKeeperServer.scala
+++ b/akka-cluster/src/main/scala/akka/cluster/BookKeeperServer.scala
@@ -21,9 +21,6 @@ entry number it will use MAX_INTEGER). Once all the entries have been processed,
new one for its use.
*/
-/**
- * @author Jonas Bonér
- */
object BookKeeperServer {
val port = 3181
val zkServers = "localhost:2181"
diff --git a/akka-cluster/src/main/scala/akka/cluster/Cluster.scala b/akka-cluster/src/main/scala/akka/cluster/Cluster.scala
index 356a4461bd..5a3f115ef8 100644
--- a/akka-cluster/src/main/scala/akka/cluster/Cluster.scala
+++ b/akka-cluster/src/main/scala/akka/cluster/Cluster.scala
@@ -57,8 +57,6 @@ import com.google.protobuf.ByteString
/**
* JMX MBean for the cluster service.
- *
- * @author Jonas Bonér
*/
trait ClusterNodeMBean {
@@ -140,8 +138,6 @@ trait ClusterNodeMBean {
/**
* Module for the Cluster. Also holds global state such as configuration data etc.
- *
- * @author Jonas Bonér
*/
object Cluster {
val EMPTY_STRING = "".intern
@@ -257,8 +253,6 @@ object Cluster {
*
* /clusterName/'actor-address-to-uuids'/actorAddress/actorUuid
*
- *
- * @author Jonas Bonér
*/
class DefaultClusterNode private[akka] (
val nodeAddress: NodeAddress,
@@ -1601,9 +1595,6 @@ class DefaultClusterNode private[akka] (
}
}
-/**
- * @author Jonas Bonér
- */
class MembershipChildListener(self: ClusterNode) extends IZkChildListener with ErrorHandler {
def handleChildChange(parentPath: String, currentChilds: JList[String]) {
withErrorHandler {
@@ -1643,9 +1634,6 @@ class MembershipChildListener(self: ClusterNode) extends IZkChildListener with E
}
}
-/**
- * @author Jonas Bonér
- */
class StateListener(self: ClusterNode) extends IZkStateListener {
def handleStateChanged(state: KeeperState) {
state match {
@@ -1671,9 +1659,6 @@ class StateListener(self: ClusterNode) extends IZkStateListener {
}
}
-/**
- * @author Jonas Bonér
- */
trait ErrorHandler {
def withErrorHandler[T](body: ⇒ T) = {
try {
@@ -1686,9 +1671,6 @@ trait ErrorHandler {
}
}
-/**
- * @author Jonas Bonér
- */
object RemoteClusterDaemon {
val Address = "akka-cluster-daemon".intern
@@ -1700,8 +1682,6 @@ object RemoteClusterDaemon {
* Internal "daemon" actor for cluster internal communication.
*
* It acts as the brain of the cluster that responds to cluster events (messages) and undertakes action.
- *
- * @author Jonas Bonér
*/
class RemoteClusterDaemon(cluster: ClusterNode) extends Actor {
diff --git a/akka-cluster/src/main/scala/akka/cluster/ClusterActorRef.scala b/akka-cluster/src/main/scala/akka/cluster/ClusterActorRef.scala
index a61ca3a6e1..84d23af736 100644
--- a/akka-cluster/src/main/scala/akka/cluster/ClusterActorRef.scala
+++ b/akka-cluster/src/main/scala/akka/cluster/ClusterActorRef.scala
@@ -21,8 +21,6 @@ import annotation.tailrec
/**
* ClusterActorRef factory and locator.
- *
- * @author Jonas Bonér
*/
object ClusterActorRef {
import FailureDetectorType._
@@ -77,8 +75,6 @@ object ClusterActorRef {
/**
* ActorRef representing a one or many instances of a clustered, load-balanced and sometimes replicated actor
* where the instances can reside on other nodes in the cluster.
- *
- * @author Jonas Bonér
*/
private[akka] class ClusterActorRef(props: RoutedProps, val address: String) extends AbstractRoutedActorRef(props) {
diff --git a/akka-cluster/src/main/scala/akka/cluster/ClusterDeployer.scala b/akka-cluster/src/main/scala/akka/cluster/ClusterDeployer.scala
index c9ac211821..4cc791fd89 100644
--- a/akka-cluster/src/main/scala/akka/cluster/ClusterDeployer.scala
+++ b/akka-cluster/src/main/scala/akka/cluster/ClusterDeployer.scala
@@ -24,10 +24,6 @@ import java.util.concurrent.{ CountDownLatch, TimeUnit }
/**
* A ClusterDeployer is responsible for deploying a Deploy.
- *
- * FIXME Document: what does Deploy mean?
- *
- * @author Jonas Bonér
*/
object ClusterDeployer extends ActorDeployer {
val clusterName = Cluster.name
diff --git a/akka-cluster/src/main/scala/akka/cluster/TransactionLog.scala b/akka-cluster/src/main/scala/akka/cluster/TransactionLog.scala
index 7643a0bd31..7d593437ae 100644
--- a/akka-cluster/src/main/scala/akka/cluster/TransactionLog.scala
+++ b/akka-cluster/src/main/scala/akka/cluster/TransactionLog.scala
@@ -26,19 +26,12 @@ import java.util.Enumeration
// FIXME allow user to choose dynamically between 'async' and 'sync' tx logging (asyncAddEntry(byte[] data, AddCallback cb, Object ctx))
// FIXME clean up old entries in log after doing a snapshot
-/**
- * @author Jonas Bonér
- */
class ReplicationException(message: String, cause: Throwable = null) extends AkkaException(message) {
def this(msg: String) = this(msg, null)
}
/**
- * TODO: Explain something about threadsafety.
- *
* A TransactionLog makes chunks of data durable.
- *
- * @author Jonas Bonér
*/
class TransactionLog private (
ledger: LedgerHandle,
@@ -352,7 +345,7 @@ class TransactionLog private (
}
/**
- * @author Jonas Bonér
+ * TODO: Documentation.
*/
object TransactionLog {
@@ -563,8 +556,6 @@ object TransactionLog {
/**
* TODO: Documentation.
- *
- * @author Jonas Bonér
*/
object LocalBookKeeperEnsemble {
private val isRunning = new Switch(false)
diff --git a/akka-cluster/src/multi-jvm/scala/akka/cluster/reflogic/ClusterActorRefCleanupMultiJvmSpec.scala b/akka-cluster/src/multi-jvm/scala/akka/cluster/reflogic/ClusterActorRefCleanupMultiJvmSpec.scala
index 87e2799da3..e723959c86 100644
--- a/akka-cluster/src/multi-jvm/scala/akka/cluster/reflogic/ClusterActorRefCleanupMultiJvmSpec.scala
+++ b/akka-cluster/src/multi-jvm/scala/akka/cluster/reflogic/ClusterActorRefCleanupMultiJvmSpec.scala
@@ -37,7 +37,7 @@ class ClusterActorRefCleanupMultiJvmNode1 extends MasterClusterTestNode {
Cluster.node.start()
barrier("awaitStarted", NrOfNodes).await()
- val ref = Actor.actorOf[ClusterActorRefCleanupMultiJvmSpec.TestActor]("service-test")
+ val ref = Actor.actorOf(Props[ClusterActorRefCleanupMultiJvmSpec.TestActor]("service-test")
ref.isInstanceOf[ClusterActorRef] must be(true)
diff --git a/akka-cluster/src/multi-jvm/scala/akka/cluster/replication/transactionlog/writebehind/nosnapshot/ReplicationTransactionLogWriteBehindNoSnapshotMultiJvmSpec.scala b/akka-cluster/src/multi-jvm/scala/akka/cluster/replication/transactionlog/writebehind/nosnapshot/ReplicationTransactionLogWriteBehindNoSnapshotMultiJvmSpec.scala
index b337007754..1d787c6572 100644
--- a/akka-cluster/src/multi-jvm/scala/akka/cluster/replication/transactionlog/writebehind/nosnapshot/ReplicationTransactionLogWriteBehindNoSnapshotMultiJvmSpec.scala
+++ b/akka-cluster/src/multi-jvm/scala/akka/cluster/replication/transactionlog/writebehind/nosnapshot/ReplicationTransactionLogWriteBehindNoSnapshotMultiJvmSpec.scala
@@ -42,7 +42,7 @@
// }
// barrier("create-actor-on-node1", NrOfNodes) {
-// val actorRef = Actor.actorOf[HelloWorld]("hello-world-write-behind-nosnapshot")
+// val actorRef = Actor.actorOf(Props[HelloWorld]("hello-world-write-behind-nosnapshot")
// // node.isInUseOnNode("hello-world") must be(true)
// actorRef.address must be("hello-world-write-behind-nosnapshot")
// for (i ← 0 until 10) {
diff --git a/akka-cluster/src/multi-jvm/scala/akka/cluster/replication/transactionlog/writebehind/snapshot/ReplicationTransactionLogWriteBehindSnapshotMultiJvmSpec.scala b/akka-cluster/src/multi-jvm/scala/akka/cluster/replication/transactionlog/writebehind/snapshot/ReplicationTransactionLogWriteBehindSnapshotMultiJvmSpec.scala
index b60891300c..7f3a6fc683 100644
--- a/akka-cluster/src/multi-jvm/scala/akka/cluster/replication/transactionlog/writebehind/snapshot/ReplicationTransactionLogWriteBehindSnapshotMultiJvmSpec.scala
+++ b/akka-cluster/src/multi-jvm/scala/akka/cluster/replication/transactionlog/writebehind/snapshot/ReplicationTransactionLogWriteBehindSnapshotMultiJvmSpec.scala
@@ -44,7 +44,7 @@
// }
// barrier("create-actor-on-node1", NrOfNodes) {
-// val actorRef = Actor.actorOf[HelloWorld]("hello-world-write-behind-snapshot")
+// val actorRef = Actor.actorOf(Props[HelloWorld]("hello-world-write-behind-snapshot")
// node.isInUseOnNode("hello-world-write-behind-snapshot") must be(true)
// actorRef.address must be("hello-world-write-behind-snapshot")
// var counter = 0
diff --git a/akka-cluster/src/multi-jvm/scala/akka/cluster/replication/transactionlog/writethrough/nosnapshot/ReplicationTransactionLogWriteThroughNoSnapshotMultiJvmSpec.scala b/akka-cluster/src/multi-jvm/scala/akka/cluster/replication/transactionlog/writethrough/nosnapshot/ReplicationTransactionLogWriteThroughNoSnapshotMultiJvmSpec.scala
index fd2ff324da..2626d0fe8f 100644
--- a/akka-cluster/src/multi-jvm/scala/akka/cluster/replication/transactionlog/writethrough/nosnapshot/ReplicationTransactionLogWriteThroughNoSnapshotMultiJvmSpec.scala
+++ b/akka-cluster/src/multi-jvm/scala/akka/cluster/replication/transactionlog/writethrough/nosnapshot/ReplicationTransactionLogWriteThroughNoSnapshotMultiJvmSpec.scala
@@ -44,7 +44,7 @@
// }
// barrier("create-actor-on-node1", NrOfNodes) {
-// val actorRef = Actor.actorOf[HelloWorld]("hello-world-write-through-nosnapshot")
+// val actorRef = Actor.actorOf(Props[HelloWorld]("hello-world-write-through-nosnapshot")
// actorRef.address must be("hello-world-write-through-nosnapshot")
// for (i ← 0 until 10)
// (actorRef ? Count(i)).as[String] must be(Some("World from node [node1]"))
diff --git a/akka-cluster/src/multi-jvm/scala/akka/cluster/replication/transactionlog/writethrough/snapshot/ReplicationTransactionLogWriteThroughSnapshotMultiJvmSpec.scala b/akka-cluster/src/multi-jvm/scala/akka/cluster/replication/transactionlog/writethrough/snapshot/ReplicationTransactionLogWriteThroughSnapshotMultiJvmSpec.scala
index 583662ebe3..fe2231715e 100644
--- a/akka-cluster/src/multi-jvm/scala/akka/cluster/replication/transactionlog/writethrough/snapshot/ReplicationTransactionLogWriteThroughSnapshotMultiJvmSpec.scala
+++ b/akka-cluster/src/multi-jvm/scala/akka/cluster/replication/transactionlog/writethrough/snapshot/ReplicationTransactionLogWriteThroughSnapshotMultiJvmSpec.scala
@@ -42,7 +42,7 @@
// }
// barrier("create-actor-on-node1", NrOfNodes) {
-// val actorRef = Actor.actorOf[HelloWorld]("hello-world-write-through-snapshot")
+// val actorRef = Actor.actorOf(Props[HelloWorld]("hello-world-write-through-snapshot")
// node.isInUseOnNode("hello-world-write-through-snapshot") must be(true)
// actorRef.address must be("hello-world-write-through-snapshot")
// var counter = 0
diff --git a/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/direct/failover/DirectRoutingFailoverMultiJvmSpec.scala b/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/direct/failover/DirectRoutingFailoverMultiJvmSpec.scala
index 46463f6537..260a365019 100644
--- a/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/direct/failover/DirectRoutingFailoverMultiJvmSpec.scala
+++ b/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/direct/failover/DirectRoutingFailoverMultiJvmSpec.scala
@@ -44,7 +44,7 @@ class DirectRoutingFailoverMultiJvmNode1 extends MasterClusterTestNode {
}
LocalCluster.barrier("actor-creation", NrOfNodes) {
- actor = Actor.actorOf[SomeActor]("service-hello")
+ actor = Actor.actorOf(Props[SomeActor]("service-hello")
}
LocalCluster.barrier("verify-actor", NrOfNodes) {
diff --git a/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/direct/homenode/HomeNode1MultiJvmSpec.scala b/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/direct/homenode/HomeNode1MultiJvmSpec.scala
index 2b84c0c3c9..6ce2219978 100644
--- a/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/direct/homenode/HomeNode1MultiJvmSpec.scala
+++ b/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/direct/homenode/HomeNode1MultiJvmSpec.scala
@@ -45,11 +45,11 @@ class HomeNodeMultiJvmNode2 extends ClusterTestNode {
Cluster.node.start()
barrier("waiting-for-begin", NrOfNodes).await()
- val actorNode1 = Actor.actorOf[SomeActor]("service-node1")
+ val actorNode1 = Actor.actorOf(Props[SomeActor]("service-node1")
val name1 = (actorNode1 ? "identify").get.asInstanceOf[String]
name1 must equal("node1")
- val actorNode2 = Actor.actorOf[SomeActor]("service-node2")
+ val actorNode2 = Actor.actorOf(Props[SomeActor]("service-node2")
val name2 = (actorNode2 ? "identify").get.asInstanceOf[String]
name2 must equal("node2")
diff --git a/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/direct/normalusage/SingleReplicaDirectRoutingMultiJvmSpec.scala b/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/direct/normalusage/SingleReplicaDirectRoutingMultiJvmSpec.scala
index e96804dfe2..a7b61af3e7 100644
--- a/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/direct/normalusage/SingleReplicaDirectRoutingMultiJvmSpec.scala
+++ b/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/direct/normalusage/SingleReplicaDirectRoutingMultiJvmSpec.scala
@@ -48,7 +48,7 @@ class SingleReplicaDirectRoutingMultiJvmNode2 extends ClusterTestNode {
Cluster.node.start()
LocalCluster.barrier("waiting-for-begin", NrOfNodes).await()
- val actor = Actor.actorOf[SomeActor]("service-hello").asInstanceOf[ClusterActorRef]
+ val actor = Actor.actorOf(Props[SomeActor]("service-hello").asInstanceOf[ClusterActorRef]
actor.isRunning must be(true)
val result = (actor ? "identify").get
diff --git a/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/random/failover/RandomFailoverMultiJvmSpec.scala b/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/random/failover/RandomFailoverMultiJvmSpec.scala
index e13688f2dd..2842c55a97 100644
--- a/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/random/failover/RandomFailoverMultiJvmSpec.scala
+++ b/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/random/failover/RandomFailoverMultiJvmSpec.scala
@@ -49,7 +49,7 @@ class RandomFailoverMultiJvmNode1 extends MasterClusterTestNode {
}
barrier("actor-creation", NrOfNodes) {
- actor = Actor.actorOf[SomeActor]("service-hello")
+ actor = Actor.actorOf(Props[SomeActor]("service-hello")
actor.isInstanceOf[ClusterActorRef] must be(true)
}
diff --git a/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/random/homenode/HomeNodeMultiJvmSpec.scala b/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/random/homenode/HomeNodeMultiJvmSpec.scala
index f29e441864..a8f4887464 100644
--- a/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/random/homenode/HomeNodeMultiJvmSpec.scala
+++ b/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/random/homenode/HomeNodeMultiJvmSpec.scala
@@ -45,11 +45,11 @@ class HomeNodeMultiJvmNode2 extends ClusterTestNode {
Cluster.node.start()
barrier("waiting-for-begin", NrOfNodes).await()
- val actorNode1 = Actor.actorOf[SomeActor]("service-node1")
+ val actorNode1 = Actor.actorOf(Props[SomeActor]("service-node1")
val nameNode1 = (actorNode1 ? "identify").get.asInstanceOf[String]
nameNode1 must equal("node1")
- val actorNode2 = Actor.actorOf[SomeActor]("service-node2")
+ val actorNode2 = Actor.actorOf(Props[SomeActor]("service-node2")
val nameNode2 = (actorNode2 ? "identify").get.asInstanceOf[String]
nameNode2 must equal("node2")
diff --git a/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/random/replicationfactor_1/Random1ReplicaMultiJvmSpec.scala b/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/random/replicationfactor_1/Random1ReplicaMultiJvmSpec.scala
index ddda4a07b7..c9e0412be2 100644
--- a/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/random/replicationfactor_1/Random1ReplicaMultiJvmSpec.scala
+++ b/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/random/replicationfactor_1/Random1ReplicaMultiJvmSpec.scala
@@ -36,7 +36,7 @@ class Random1ReplicaMultiJvmNode1 extends MasterClusterTestNode {
"create clustered actor, get a 'local' actor on 'home' node and a 'ref' to actor on remote node" in {
Cluster.node.start()
- var hello = Actor.actorOf[HelloWorld]("service-hello")
+ var hello = Actor.actorOf(Props[HelloWorld]("service-hello")
hello must not equal (null)
hello.address must equal("service-hello")
hello.isInstanceOf[ClusterActorRef] must be(true)
diff --git a/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/random/replicationfactor_3/Random3ReplicasMultiJvmSpec.scala b/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/random/replicationfactor_3/Random3ReplicasMultiJvmSpec.scala
index 41f54911e1..edb000b566 100644
--- a/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/random/replicationfactor_3/Random3ReplicasMultiJvmSpec.scala
+++ b/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/random/replicationfactor_3/Random3ReplicasMultiJvmSpec.scala
@@ -64,7 +64,7 @@ class Random3ReplicasMultiJvmNode2 extends ClusterTestNode {
//check if the actorRef is the expected remoteActorRef.
var hello: ActorRef = null
- hello = Actor.actorOf[HelloWorld]("service-hello")
+ hello = Actor.actorOf(Props[HelloWorld]("service-hello")
hello must not equal (null)
hello.address must equal("service-hello")
hello.isInstanceOf[ClusterActorRef] must be(true)
diff --git a/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/roundrobin/failover/RoundRobinFailoverMultiJvmSpec.scala b/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/roundrobin/failover/RoundRobinFailoverMultiJvmSpec.scala
index 5b8791231d..63cd6c6313 100644
--- a/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/roundrobin/failover/RoundRobinFailoverMultiJvmSpec.scala
+++ b/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/roundrobin/failover/RoundRobinFailoverMultiJvmSpec.scala
@@ -49,7 +49,7 @@ class RoundRobinFailoverMultiJvmNode1 extends MasterClusterTestNode {
}
barrier("actor-creation", NrOfNodes) {
- actor = Actor.actorOf[SomeActor]("service-hello")
+ actor = Actor.actorOf(Props[SomeActor]("service-hello")
actor.isInstanceOf[ClusterActorRef] must be(true)
}
diff --git a/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/roundrobin/homenode/HomeNodeMultiJvmSpec.scala b/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/roundrobin/homenode/HomeNodeMultiJvmSpec.scala
index bb5499be58..4dc9e96429 100644
--- a/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/roundrobin/homenode/HomeNodeMultiJvmSpec.scala
+++ b/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/roundrobin/homenode/HomeNodeMultiJvmSpec.scala
@@ -48,11 +48,11 @@ class HomeNodeMultiJvmNode2 extends ClusterTestNode {
Cluster.node.start()
barrier("waiting-for-begin", NrOfNodes).await()
- val actorNode1 = Actor.actorOf[SomeActor]("service-node1")
+ val actorNode1 = Actor.actorOf(Props[SomeActor]("service-node1")
val name1 = (actorNode1 ? "identify").get.asInstanceOf[String]
name1 must equal("node1")
- val actorNode2 = Actor.actorOf[SomeActor]("service-node2")
+ val actorNode2 = Actor.actorOf(Props[SomeActor]("service-node2")
val name2 = (actorNode2 ? "identify").get.asInstanceOf[String]
name2 must equal("node2")
diff --git a/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/roundrobin/replicationfactor_1/RoundRobin1ReplicaMultiJvmSpec.scala b/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/roundrobin/replicationfactor_1/RoundRobin1ReplicaMultiJvmSpec.scala
index c229d2c6c9..35938749ba 100644
--- a/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/roundrobin/replicationfactor_1/RoundRobin1ReplicaMultiJvmSpec.scala
+++ b/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/roundrobin/replicationfactor_1/RoundRobin1ReplicaMultiJvmSpec.scala
@@ -35,7 +35,7 @@ class RoundRobin1ReplicaMultiJvmNode1 extends MasterClusterTestNode {
"create clustered actor, get a 'local' actor on 'home' node and a 'ref' to actor on remote node" in {
Cluster.node.start()
- var hello = Actor.actorOf[HelloWorld]("service-hello")
+ var hello = Actor.actorOf(Props[HelloWorld]("service-hello")
hello must not equal (null)
hello.address must equal("service-hello")
hello.isInstanceOf[ClusterActorRef] must be(true)
diff --git a/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/roundrobin/replicationfactor_2/RoundRobin2ReplicasMultiJvmSpec.scala b/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/roundrobin/replicationfactor_2/RoundRobin2ReplicasMultiJvmSpec.scala
index 63a1f04ce7..a99dbbbae9 100644
--- a/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/roundrobin/replicationfactor_2/RoundRobin2ReplicasMultiJvmSpec.scala
+++ b/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/roundrobin/replicationfactor_2/RoundRobin2ReplicasMultiJvmSpec.scala
@@ -89,7 +89,7 @@ class RoundRobin2ReplicasMultiJvmNode2 extends ClusterTestNode {
//check if the actorRef is the expected remoteActorRef.
var hello: ActorRef = null
barrier("get-ref-to-actor-on-node2", NrOfNodes) {
- hello = Actor.actorOf[HelloWorld]("service-hello")
+ hello = Actor.actorOf(Props[HelloWorld]("service-hello")
hello must not equal (null)
hello.address must equal("service-hello")
hello.isInstanceOf[ClusterActorRef] must be(true)
diff --git a/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/roundrobin/replicationfactor_3/RoundRobin3ReplicasMultiJvmSpec.scala b/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/roundrobin/replicationfactor_3/RoundRobin3ReplicasMultiJvmSpec.scala
index 93bfbf4c47..ead1d693c0 100644
--- a/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/roundrobin/replicationfactor_3/RoundRobin3ReplicasMultiJvmSpec.scala
+++ b/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/roundrobin/replicationfactor_3/RoundRobin3ReplicasMultiJvmSpec.scala
@@ -91,7 +91,7 @@
// //check if the actorRef is the expected remoteActorRef.
// var hello: ActorRef = null
// barrier("get-ref-to-actor-on-node2", NrOfNodes) {
-// hello = Actor.actorOf[HelloWorld]("service-hello")
+// hello = Actor.actorOf(Props[HelloWorld]("service-hello")
// hello must not equal (null)
// hello.address must equal("service-hello")
// hello.isInstanceOf[ClusterActorRef] must be(true)
diff --git a/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/scattergather/failover/ScatterGatherFailoverMultiJvmSpec.scala b/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/scattergather/failover/ScatterGatherFailoverMultiJvmSpec.scala
index b19571f5a4..90f9e0aa56 100644
--- a/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/scattergather/failover/ScatterGatherFailoverMultiJvmSpec.scala
+++ b/akka-cluster/src/multi-jvm/scala/akka/cluster/routing/scattergather/failover/ScatterGatherFailoverMultiJvmSpec.scala
@@ -63,7 +63,7 @@ class ScatterGatherFailoverMultiJvmNode1 extends MasterClusterTestNode {
/*
FIXME: Uncomment, when custom routers will be fully supported (ticket #1109)
- val actor = Actor.actorOf[TestActor]("service-hello").asInstanceOf[ClusterActorRef]
+ val actor = Actor.actorOf(Props[TestActor]("service-hello").asInstanceOf[ClusterActorRef]
identifyConnections(actor).size() must be(2)
diff --git a/akka-cluster/src/test/scala/akka/cluster/sample/ComputeGridSample.scala b/akka-cluster/src/test/scala/akka/cluster/sample/ComputeGridSample.scala
index 4cf7a7010f..7a3a9ca606 100644
--- a/akka-cluster/src/test/scala/akka/cluster/sample/ComputeGridSample.scala
+++ b/akka-cluster/src/test/scala/akka/cluster/sample/ComputeGridSample.scala
@@ -7,9 +7,6 @@ package akka.cluster.sample
import akka.cluster._
import akka.dispatch.Futures
-/**
- * @author Jonas Bonér
- */
object ComputeGridSample {
//sample.cluster.ComputeGridSample.fun2
diff --git a/akka-docs/_sphinx/themes/akka/layout.html b/akka-docs/_sphinx/themes/akka/layout.html
index 0bd735c446..0d46ef708e 100644
--- a/akka-docs/_sphinx/themes/akka/layout.html
+++ b/akka-docs/_sphinx/themes/akka/layout.html
@@ -6,6 +6,7 @@
{% extends "basic/layout.html" %}
{% set script_files = script_files + ['_static/theme_extras.js'] %}
{% set css_files = css_files + ['_static/print.css'] %}
+{% set is_snapshot = version.endswith("-SNAPSHOT") %}
{# do not display relbars #}
{% block relbar1 %}{% endblock %}
@@ -37,7 +38,11 @@
{%- endif -%}
diff --git a/akka-docs/additional/benchmarks.rst b/akka-docs/additional/benchmarks.rst
index 6008b98f05..6080203e91 100644
--- a/akka-docs/additional/benchmarks.rst
+++ b/akka-docs/additional/benchmarks.rst
@@ -11,7 +11,6 @@ Simple Trading system.
Compares:
-- Synchronous Scala solution
- Scala library Actors
- Fire-forget
diff --git a/akka-docs/additional/external-sample-projects.rst b/akka-docs/additional/external-sample-projects.rst
index 35a54c3c80..80e56823af 100644
--- a/akka-docs/additional/external-sample-projects.rst
+++ b/akka-docs/additional/external-sample-projects.rst
@@ -56,22 +56,6 @@ Sample parallel computing with Akka and Scala API
``_
-Akka, Facebook Graph API, WebGL sample
-^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
-
-Showcasing Akka Mist HTTP module
-``_
-
-Akka Mist Sample
-^^^^^^^^^^^^^^^^
-
-``_
-
-Another Akka Mist Sample
-^^^^^^^^^^^^^^^^^^^^^^^^
-
-``_
-
Bank application
^^^^^^^^^^^^^^^^
diff --git a/akka-docs/additional/index.rst b/akka-docs/additional/index.rst
index ca92363ee3..e6bb72762d 100644
--- a/akka-docs/additional/index.rst
+++ b/akka-docs/additional/index.rst
@@ -11,4 +11,3 @@ Additional Information
companies-using-akka
third-party-integrations
language-bindings
- stability-matrix
diff --git a/akka-docs/additional/stability-matrix.rst b/akka-docs/additional/stability-matrix.rst
deleted file mode 100644
index 61e5d247fb..0000000000
--- a/akka-docs/additional/stability-matrix.rst
+++ /dev/null
@@ -1,33 +0,0 @@
-Feature Stability Matrix
-========================
-
-Akka is comprised of a number if modules, with different levels of maturity and in different parts of their lifecycle, the matrix below gives you get current stability level of the modules.
-
-Explanation of the different levels of stability
-------------------------------------------------
-
-* **Solid** - Proven solid in heavy production usage
-* **Stable** - Ready for use in production environment
-* **In progress** - Not enough feedback/use to claim it's ready for production use
-
-================================ ============ ============ ============
-Feature Solid Stable In progress
-================================ ============ ============ ============
-Actors (Scala) Solid
-Actors (Java) Solid
-Typed Actors (Scala) Solid
-Typed Actors (Java) Solid
-STM (Scala) Solid
-STM (Java) Solid
-Transactors (Scala) Solid
-Transactors (Java) Solid
-Remote Actors (Scala) Solid
-Remote Actors (Java) Solid
-Camel Solid
-AMQP Solid
-HTTP Solid
-Integration Guice Stable
-Integration Spring Stable
-Scheduler Solid
-Redis Pub Sub In progress
-================================ ============ ============ ============
diff --git a/akka-docs/cluster/durable-mailbox.rst b/akka-docs/cluster/durable-mailbox.rst
deleted file mode 100644
index 774008c6da..0000000000
--- a/akka-docs/cluster/durable-mailbox.rst
+++ /dev/null
@@ -1,282 +0,0 @@
-
-.. _durable-mailboxes:
-
-###################
- Durable Mailboxes
-###################
-
-Overview
-========
-
-Akka supports a set of durable mailboxes. A durable mailbox is a replacement for
-the standard actor mailbox that is durable. What this means in practice is that
-if there are pending messages in the actor's mailbox when the node of the actor
-resides on crashes, then when you restart the node, the actor will be able to
-continue processing as if nothing had happened; with all pending messages still
-in its mailbox.
-
-.. sidebar:: **IMPORTANT**
-
- None of these mailboxes work with blocking message send, e.g. the message
- send operations that are relying on futures; ``?`` or ``ask``. If the node
- has crashed and then restarted, the thread that was blocked waiting for the
- reply is gone and there is no way we can deliver the message.
-
-The durable mailboxes currently supported are:
-
- - ``FileDurableMailboxStorage`` -- backed by a journaling transaction log on the local file system
- - ``RedisDurableMailboxStorage`` -- backed by Redis
- - ``ZooKeeperDurableMailboxStorage`` -- backed by ZooKeeper
- - ``BeanstalkDurableMailboxStorage`` -- backed by Beanstalkd
- - ``MongoNaiveDurableMailboxStorage`` -- backed by MongoDB
-
-We'll walk through each one of these in detail in the sections below.
-
-Soon Akka will also have:
-
- - ``AmqpDurableMailboxStorage`` -- AMQP based mailbox (default RabbitMQ)
- - ``JmsDurableMailboxStorage`` -- JMS based mailbox (default ActiveMQ)
-
-
-File-based durable mailbox
-==========================
-
-This mailbox is backed by a journaling transaction log on the local file
-system. It is the simplest want to use since it does not require an extra
-infrastructure piece to administer, but it is usually sufficient and just what
-you need.
-
-The durable dispatchers and their configuration options reside in the
-``akka.actor.mailbox`` package.
-
-You configure durable mailboxes through the "Akka"-only durable dispatchers, the
-actor is oblivious to which type of mailbox it is using. Here is an example::
-
- val dispatcher = DurableDispatcher(
- "my:service",
- FileDurableMailboxStorage)
- // Then set the actors dispatcher to this dispatcher
-
-or for a thread-based durable dispatcher::
-
- self.dispatcher = DurablePinnedDispatcher(
- self,
- FileDurableMailboxStorage)
-
-There are 2 different durable dispatchers, ``DurableDispatcher`` and
-``DurablePinnedDispatcher``, which are durable versions of
-``Dispatcher`` and ``PinnedDispatcher``.
-
-This gives you an excellent way of creating bulkheads in your application, where
-groups of actors sharing the same dispatcher also share the same backing
-storage.
-
-Read more about that in the :ref:`dispatchers-scala` documentation.
-
-You can also configure and tune the file-based durable mailbox. This is done in
-the ``akka.actor.mailbox.file-based`` section in the ``akka.conf`` configuration
-file.
-
-.. code-block:: none
-
- akka {
- actor {
- mailbox {
- file-based {
- directory-path = "./_mb"
- max-items = 2147483647
- max-size = 2147483647
- max-items = 2147483647
- max-age = 0
- max-journal-size = 16777216 # 16 * 1024 * 1024
- max-memory-size = 134217728 # 128 * 1024 * 1024
- max-journal-overflow = 10
- max-journal-size-absolute = 9223372036854775807
- discard-old-when-full = on
- keep-journal = on
- sync-journal = off
- }
- }
- }
- }
-
-.. todo:: explain all the above options in detail
-
-
-Redis-based durable mailbox
-===========================
-
-This mailbox is backed by a Redis queue. `Redis `_ Is a very
-fast NOSQL database that has a wide range of data structure abstractions, one of
-them is a queue which is what we are using in this implementation. This means
-that you have to start up a Redis server that can host these durable
-mailboxes. Read more in the Redis documentation on how to do that.
-
-Here is an example of how you can configure your dispatcher to use this mailbox::
-
- val dispatcher = DurableDispatcher(
- "my:service",
- RedisDurableMailboxStorage)
-
-or for a thread-based durable dispatcher::
-
- self.dispatcher = DurablePinnedDispatcher(
- self,
- RedisDurableMailboxStorage)
-
-You also need to configure the IP and port for the Redis server. This is done in
-the ``akka.actor.mailbox.redis`` section in the ``akka.conf`` configuration
-file.
-
-.. code-block:: none
-
- akka {
- actor {
- mailbox {
- redis {
- hostname = "127.0.0.1"
- port = 6379
- }
- }
- }
- }
-
-
-ZooKeeper-based durable mailbox
-===============================
-
-This mailbox is backed by `ZooKeeper `_. ZooKeeper
-is a centralized service for maintaining configuration information, naming,
-providing distributed synchronization, and providing group services This means
-that you have to start up a ZooKeeper server (for production a ZooKeeper server
-ensamble) that can host these durable mailboxes. Read more in the ZooKeeper
-documentation on how to do that.
-
-Akka is using ZooKeeper for many other things, for example the clustering
-support so if you're using that you love to run a ZooKeeper server anyway and
-there will not be that much more work to set up this durable mailbox.
-
-Here is an example of how you can configure your dispatcher to use this mailbox::
-
- val dispatcher = DurableDispatcher(
- "my:service",
- ZooKeeperDurableMailboxStorage)
-
-or for a thread-based durable dispatcher::
-
- self.dispatcher = DurablePinnedDispatcher(
- self,
- ZooKeeperDurableMailboxStorage)
-
-You also need to configure ZooKeeper server addresses, timeouts, etc. This is
-done in the ``akka.actor.mailbox.zookeeper`` section in the ``akka.conf``
-configuration file.
-
-.. code-block:: none
-
- akka {
- actor {
- mailbox {
- zookeeper {
- server-addresses = "localhost:2181"
- session-timeout = 60
- connection-timeout = 30
- blocking-queue = on
- }
- }
- }
- }
-
-
-Beanstalk-based durable mailbox
-===============================
-
-This mailbox is backed by `Beanstalkd `_.
-Beanstalk is a simple, fast work queue. This means that you have to start up a
-Beanstalk server that can host these durable mailboxes. Read more in the
-Beanstalk documentation on how to do that. ::
-
- val dispatcher = DurableDispatcher(
- "my:service",
- BeanstalkDurableMailboxStorage)
-
-or for a thread-based durable dispatcher. ::
-
- self.dispatcher = DurablePinnedDispatcher(
- self,
- BeanstalkDurableMailboxStorage)
-
-You also need to configure the IP, and port, and so on, for the Beanstalk
-server. This is done in the ``akka.actor.mailbox.beanstalk`` section in the
-``akka.conf`` configuration file.
-
-.. code-block:: none
-
- akka {
- actor {
- mailbox {
- beanstalk {
- hostname = "127.0.0.1"
- port = 11300
- reconnect-window = 5
- message-submit-delay = 0
- message-submit-timeout = 5
- message-time-to-live = 120
- }
- }
- }
- }
-
-MongoDB-based Durable Mailboxes
-===============================
-
-This mailbox is backed by `MongoDB `_.
-MongoDB is a fast, lightweight and scalable document-oriented database. It contains a number of
-features cohesive to a fast, reliable & durable queueing mechanism which the Akka Mailbox takes advantage of.
-
-
-Akka's implementations of MongoDB mailboxes are built on top of the purely asynchronous MongoDB driver (often known as `Hammersmith `_ and ``com.mongodb.async``) and as such are purely callback based with a Netty network layer. This makes them extremely fast & lightweight versus building on other MongoDB implementations such as `mongo-java-driver `_ and `Casbah `_.
-
-You will need to configure the URI for the MongoDB server, using the URI Format specified in the `MongoDB Documentation `_. This is done in
-the ``akka.actor.mailbox.mongodb`` section in the ``akka.conf`` configuration
-file.
-
-.. code-block:: none
-
- mongodb {
- # Any specified collection name will be used as a prefix for collections that use durable mongo mailboxes
- uri = "mongodb://localhost/akka.mailbox" # Follow Mongo URI Spec - http://www.mongodb.org/display/DOCS/Connections
- # Configurable timeouts for certain ops
- timeout {
- read = 3000 # number of milliseconds to wait for a read to succeed before timing out the future
- write = 3000 # number of milliseconds to wait for a write to succeed before timing out the future
- }
- }
-
-You must specify a hostname (and optionally port) and at *least* a Database name. If you specify a collection name, it will be used as a 'prefix' for the collections Akka creates to store mailbox messages. Otherwise, collections will be prefixed with ``mailbox.``
-
-It is also possible to configure the timeout threshholds for Read and Write operations in the ``timeout`` block.
-Currently Akka offers only one "type" of MongoDB based Mailbox but there are plans to support at least
-one other kind which uses a different queueing strategy.
-
-
-'Naive' MongoDB-based Durable Mailbox
--------------------------------------
-The currently supported mailbox is considered "Naive" as it removes messages (using the ``findAndRemove``
-command) from the MongoDB datastore as soon as the actor consumes them. This could cause message loss
-if an actor crashes before completely processing a message. It is not a problem per sé, but behavior
-users should be aware of.
-
-Here is an example of how you can configure your dispatcher to use this mailbox::
-
- val dispatcher = DurableDispatcher(
- "my:service",
- MongoNaiveDurableMailboxStorage)
-
-or for a thread-based durable dispatcher::
-
- self.dispatcher = DurablePinnedDispatcher(
- self,
- MongoNaiveDurableMailboxStorage)
-
-
diff --git a/akka-docs/cluster/index.rst b/akka-docs/cluster/index.rst
index bdbd95bde6..35c4b2250a 100644
--- a/akka-docs/cluster/index.rst
+++ b/akka-docs/cluster/index.rst
@@ -5,4 +5,3 @@ Cluster
:maxdepth: 2
cluster
- durable-mailbox
diff --git a/akka-docs/common/code/SchedulerDocSpec.scala b/akka-docs/common/code/SchedulerDocSpec.scala
new file mode 100644
index 0000000000..ac101e396d
--- /dev/null
+++ b/akka-docs/common/code/SchedulerDocSpec.scala
@@ -0,0 +1,67 @@
+package akka.scheduler.actor
+
+//#imports1
+import akka.actor.Actor
+import akka.actor.Props
+import akka.util.duration._
+
+//#imports1
+
+import org.scalatest.{ BeforeAndAfterAll, WordSpec }
+import org.scalatest.matchers.MustMatchers
+import akka.testkit._
+import akka.util.duration._
+
+class SchedulerDocSpec extends AkkaSpec(Map("akka.loglevel" -> "INFO")) {
+ "schedule a one-off task" in {
+ //#schedule-one-off-message
+ //Schedules to send the "foo"-message to the testActor after 50ms
+ system.scheduler.scheduleOnce(50 milliseconds, testActor, "foo")
+ //#schedule-one-off-message
+
+ expectMsg(1 second, "foo")
+
+ //#schedule-one-off-thunk
+ //Schedules to send the "foo"-message to the testActor after 50ms
+ system.scheduler.scheduleOnce(50 milliseconds) {
+ testActor ! "foo"
+ }
+ //#schedule-one-off-thunk
+
+ expectMsg(1 second, "foo")
+
+ //#schedule-one-off-runnable
+ //Schedules to send the "foo"-message to the testActor after 50ms
+ system.scheduler.scheduleOnce(
+ 50 milliseconds,
+ new Runnable {
+ def run = testActor ! "foo"
+ })
+
+ //#schedule-one-off-runnable
+
+ expectMsg(1 second, "foo")
+ }
+
+ "schedule a recurring task" in {
+ //#schedule-recurring
+ val Tick = "tick"
+ val tickActor = system.actorOf(Props(new Actor {
+ def receive = {
+ case Tick ⇒ //Do something
+ }
+ }))
+ //This will schedule to send the Tick-message
+ //to the tickActor after 0ms repeating every 50ms
+ val cancellable =
+ system.scheduler.schedule(0 milliseconds,
+ 50 milliseconds,
+ tickActor,
+ Tick)
+
+ //This cancels further Ticks to be sent
+ cancellable.cancel()
+ //#schedule-recurring
+ tickActor.stop()
+ }
+}
diff --git a/akka-docs/common/duration.rst b/akka-docs/common/duration.rst
index 523c8a2283..fd25dc5128 100644
--- a/akka-docs/common/duration.rst
+++ b/akka-docs/common/duration.rst
@@ -4,8 +4,6 @@
Duration
########
-Module stability: **SOLID**
-
Durations are used throughout the Akka library, wherefore this concept is
represented by a special data type, :class:`Duration`. Values of this type may
represent infinite (:obj:`Duration.Inf`, :obj:`Duration.MinusInf`) or finite
@@ -48,4 +46,18 @@ method calls instead:
assert (diff.lt(fivesec));
assert (Duration.Zero().lt(Duration.Inf()));
+Deadline
+========
+Durations have a brother name :class:`Deadline`, which is a class holding a representation
+of an absolute point in time, and support deriving a duration from this by calculating the
+difference between now and the deadline. This is useful when you want to keep one overall
+deadline without having to take care of the book-keeping wrt. the passing of time yourself::
+
+ val deadline = 10 seconds fromNow
+ // do something which takes time
+ awaitCond(..., deadline.timeLeft)
+
+In Java you create these from durations::
+
+ final Deadline d = Duration.create(5, "seconds").fromNow();
diff --git a/akka-docs/common/scheduler.rst b/akka-docs/common/scheduler.rst
index 7fd28d37ee..d05cea60aa 100644
--- a/akka-docs/common/scheduler.rst
+++ b/akka-docs/common/scheduler.rst
@@ -1,12 +1,52 @@
Scheduler
=========
-//FIXME
+Sometimes the need for making things happen in the future arises, and where do you go look then?
+Look no further than ``ActorSystem``! There you find the :meth:``scheduler`` method that returns an instance
+of akka.actor.Scheduler, this instance is unique per ActorSystem and is used internally for scheduling things
+to happen at specific points in time. Please note that the scheduled tasks are executed by the default
+``MessageDispatcher`` of the ``ActorSystem``.
-Here is an example:
--------------------
+You can schedule sending of messages to actors and execution of tasks (functions or Runnable).
+You will get a ``Cancellable`` back that you can call :meth:``cancel`` on to cancel the execution of the
+scheduled operation.
-.. code-block:: scala
-
- //TODO FIXME
+Some examples
+-------------
+
+.. includecode:: code/SchedulerDocSpec.scala
+ :include: imports1,schedule-one-off-message
+
+.. includecode:: code/SchedulerDocSpec.scala
+ :include: imports1,schedule-one-off-thunk
+
+.. includecode:: code/SchedulerDocSpec.scala
+ :include: imports1,schedule-one-off-runnable
+
+.. includecode:: code/SchedulerDocSpec.scala
+ :include: imports1,schedule-recurring
+
+From ``akka.actor.ActorSystem``
+-------------------------------
+
+.. includecode:: ../../akka-actor/src/main/scala/akka/actor/ActorSystem.scala
+ :include: scheduler
+
+
+The Scheduler interface
+-----------------------
+
+.. includecode:: ../../akka-actor/src/main/scala/akka/actor/Scheduler.scala
+ :include: scheduler
+
+The Cancellable interface
+-------------------------
+
+This allows you to ``cancel`` something that has been scheduled for execution.
+
+.. warning::
+ This does not abort the execution of the task, if it had already been started.
+
+.. includecode:: ../../akka-actor/src/main/scala/akka/actor/Scheduler.scala
+ :include: cancellable
diff --git a/akka-docs/dev/multi-jvm-testing.rst b/akka-docs/dev/multi-jvm-testing.rst
index 7e79f65bfa..dade7c30c1 100644
--- a/akka-docs/dev/multi-jvm-testing.rst
+++ b/akka-docs/dev/multi-jvm-testing.rst
@@ -35,7 +35,7 @@ multi-JVM testing::
base = file("akka-cluster"),
settings = defaultSettings ++ MultiJvmPlugin.settings ++ Seq(
extraOptions in MultiJvm <<= (sourceDirectory in MultiJvm) { src =>
- (name: String) => (src ** (name + ".conf")).get.headOption.map("-Dakka.config=" + _.absolutePath).toSeq
+ (name: String) => (src ** (name + ".conf")).get.headOption.map("-Dconfig.file=" + _.absolutePath).toSeq
},
test in Test <<= (test in Test) dependsOn (test in MultiJvm)
)
@@ -176,10 +176,10 @@ and add the options to them.
-Dakka.cluster.nodename=node3 -Dakka.remote.port=9993
-Overriding akka.conf options
-----------------------------
+Overriding configuration options
+--------------------------------
-You can also override the options in the ``akka.conf`` file with different options for each
+You can also override the options in the :ref:`configuration` file with different options for each
spawned JVM. You do that by creating a file named after the node in the test with suffix
``.conf`` and put them in the same directory as the test .
diff --git a/akka-docs/dev/team.rst b/akka-docs/dev/team.rst
index 8f636ddafa..36e0cd1339 100644
--- a/akka-docs/dev/team.rst
+++ b/akka-docs/dev/team.rst
@@ -26,4 +26,5 @@ Scott Clasen Committer
Roland Kuhn Committer
Patrik Nordwall Committer patrik DOT nordwall AT gmail DOT com
Derek Williams Committer derek AT nebvin DOT ca
+Henrik Engström Committer
=================== ========================== ====================================
\ No newline at end of file
diff --git a/akka-docs/disabled/camel.rst b/akka-docs/disabled/camel.rst
new file mode 100644
index 0000000000..fd9d6c1181
--- /dev/null
+++ b/akka-docs/disabled/camel.rst
@@ -0,0 +1,2901 @@
+
+.. _camel-module:
+
+#######
+ Camel
+#######
+
+=======
+.. _Appendix E - Akka and Camel: http://www.manning.com/ibsen/appEsample.pdf
+.. _Camel in Action: http://www.manning.com/ibsen/
+
+Contents:
+
+.. contents:: :local:
+
+Other, more advanced external articles are:
+
+* `Akka Consumer Actors: New Features and Best Practices `_
+* `Akka Producer Actors: New Features and Best Practices `_
+
+
+Introduction
+============
+
+The akka-camel module allows actors, untyped actors, and typed actors to receive
+and send messages over a great variety of protocols and APIs. This section gives
+a brief overview of the general ideas behind the akka-camel module, the
+remaining sections go into the details. In addition to the native Scala and Java
+actor API, actors can now exchange messages with other systems over large number
+of protocols and APIs such as HTTP, SOAP, TCP, FTP, SMTP or JMS, to mention a
+few. At the moment, approximately 80 protocols and APIs are supported.
+
+The akka-camel module is based on `Apache Camel`_, a powerful and leight-weight
+integration framework for the JVM. For an introduction to Apache Camel you may
+want to read this `Apache Camel article`_. Camel comes with a
+large number of `components`_ that provide bindings to different protocols and
+APIs. The `camel-extra`_ project provides further components.
+
+.. _Apache Camel: http://camel.apache.org/
+.. _Apache Camel article: http://architects.dzone.com/articles/apache-camel-integration
+.. _components: http://camel.apache.org/components.html
+.. _camel-extra: http://code.google.com/p/camel-extra/
+
+Usage of Camel's integration components in Akka is essentially a
+one-liner. Here's an example.
+
+.. code-block:: scala
+
+ import akka.actor.Actor
+ import akka.actor.Actor._
+ import akka.camel.{Message, Consumer}
+
+ class MyActor extends Actor with Consumer {
+ def endpointUri = "mina:tcp://localhost:6200?textline=true"
+
+ def receive = {
+ case msg: Message => { /* ... */}
+ case _ => { /* ... */}
+ }
+ }
+
+ // start and expose actor via tcp
+ val myActor = actorOf(Props[MyActor])
+
+The above example exposes an actor over a tcp endpoint on port 6200 via Apache
+Camel's `Mina component`_. The actor implements the endpointUri method to define
+an endpoint from which it can receive messages. After starting the actor, tcp
+clients can immediately send messages to and receive responses from that
+actor. If the message exchange should go over HTTP (via Camel's `Jetty
+component`_), only the actor's endpointUri method must be changed.
+
+.. _Mina component: http://camel.apache.org/mina.html
+.. _Jetty component: http://camel.apache.org/jetty.html
+
+.. code-block:: scala
+
+ class MyActor extends Actor with Consumer {
+ def endpointUri = "jetty:http://localhost:8877/example"
+
+ def receive = {
+ case msg: Message => { /* ... */}
+ case _ => { /* ... */}
+ }
+ }
+
+Actors can also trigger message exchanges with external systems i.e. produce to
+Camel endpoints.
+
+.. code-block:: scala
+
+ import akka.actor.Actor
+ import akka.camel.{Producer, Oneway}
+
+ class MyActor extends Actor with Producer with Oneway {
+ def endpointUri = "jms:queue:example"
+ }
+
+In the above example, any message sent to this actor will be added (produced) to
+the example JMS queue. Producer actors may choose from the same set of Camel
+components as Consumer actors do.
+
+The number of Camel components is constantly increasing. The akka-camel module
+can support these in a plug-and-play manner. Just add them to your application's
+classpath, define a component-specific endpoint URI and use it to exchange
+messages over the component-specific protocols or APIs. This is possible because
+Camel components bind protocol-specific message formats to a Camel-specific
+`normalized message format`__. The normalized message format hides
+protocol-specific details from Akka and makes it therefore very easy to support
+a large number of protocols through a uniform Camel component interface. The
+akka-camel module further converts mutable Camel messages into `immutable
+representations`__ which are used by Consumer and Producer actors for pattern
+matching, transformation, serialization or storage, for example.
+
+__ https://svn.apache.org/repos/asf/camel/trunk/camel-core/src/main/java/org/apache/camel/Message.java
+__ http://github.com/jboner/akka/blob/v0.8/akka-camel/src/main/scala/akka/Message.scala#L17
+
+
+Dependencies
+============
+
+Akka's Camel Integration consists of two modules
+
+* akka-camel - this module depends on akka-actor and camel-core (+ transitive
+ dependencies) and implements the Camel integration for (untyped) actors
+
+* akka-camel-typed - this module depends on akka-typed-actor and akka-camel (+
+ transitive dependencies) and implements the Camel integration for typed actors
+
+The akka-camel-typed module is optional. To have both untyped and typed actors
+working with Camel, add the following dependencies to your SBT project
+definition.
+
+.. code-block:: scala
+
+ import sbt._
+
+ class Project(info: ProjectInfo) extends DefaultProject(info) with AkkaProject {
+ // ...
+ val akkaCamel = akkaModule("camel")
+ val akkaCamelTyped = akkaModule("camel-typed") // optional typed actor support
+ // ...
+ }
+
+
+.. _camel-consume-messages:
+
+Consume messages
+================
+
+Actors (untyped)
+----------------
+
+For actors (Scala) to receive messages, they must mixin the `Consumer`_
+trait. For example, the following actor class (Consumer1) implements the
+endpointUri method, which is declared in the Consumer trait, in order to receive
+messages from the ``file:data/input/actor`` Camel endpoint. Untyped actors
+(Java) need to extend the abstract UntypedConsumerActor class and implement the
+getEndpointUri() and onReceive(Object) methods.
+
+.. _Consumer: http://github.com/jboner/akka/blob/master/akka-camel/src/main/scala/akka/camel/Consumer.scala
+
+**Scala**
+
+.. code-block:: scala
+
+ import akka.actor.Actor
+ import akka.camel.{Message, Consumer}
+
+ class Consumer1 extends Actor with Consumer {
+ def endpointUri = "file:data/input/actor"
+
+ def receive = {
+ case msg: Message => println("received %s" format msg.bodyAs[String])
+ }
+ }
+
+**Java**
+
+.. code-block:: java
+
+ import akka.camel.Message;
+ import akka.camel.UntypedConsumerActor;
+
+ public class Consumer1 extends UntypedConsumerActor {
+ public String getEndpointUri() {
+ return "file:data/input/actor";
+ }
+
+ public void onReceive(Object message) {
+ Message msg = (Message)message;
+ String body = msg.getBodyAs(String.class);
+ System.out.println(String.format("received %s", body))
+ }
+ }
+
+Whenever a file is put into the data/input/actor directory, its content is
+picked up by the Camel `file component`_ and sent as message to the
+actor. Messages consumed by actors from Camel endpoints are of type
+`Message`_. These are immutable representations of Camel messages.
+
+.. _file component: http://camel.apache.org/file2.html
+.. _Message: http://github.com/jboner/akka/blob/master/akka-camel/src/main/scala/akka/camel/Message.scala
+
+For Message usage examples refer to the unit tests:
+
+* Message unit tests - `Scala API `_
+* Message unit tests - `Java API `_
+
+Here's another example that sets the endpointUri to
+``jetty:http://localhost:8877/camel/default``. It causes Camel's `Jetty
+component`_ to start an embedded `Jetty`_ server, accepting HTTP connections
+from localhost on port 8877.
+
+.. _Jetty component: http://camel.apache.org/jetty.html
+.. _Jetty: http://www.eclipse.org/jetty/
+
+**Scala**
+
+.. code-block:: scala
+
+ import akka.actor.Actor
+ import akka.camel.{Message, Consumer}
+
+ class Consumer2 extends Actor with Consumer {
+ def endpointUri = "jetty:http://localhost:8877/camel/default"
+
+ def receive = {
+ case msg: Message => self.reply("Hello %s" format msg.bodyAs[String])
+ }
+ }
+
+**Java**
+
+.. code-block:: java
+
+ import akka.camel.Message;
+ import akka.camel.UntypedConsumerActor;
+
+ public class Consumer2 extends UntypedConsumerActor {
+ public String getEndpointUri() {
+ return "jetty:http://localhost:8877/camel/default";
+ }
+
+ public void onReceive(Object message) {
+ Message msg = (Message)message;
+ String body = msg.getBodyAs(String.class);
+ getContext().tryReply(String.format("Hello %s", body));
+ }
+ }
+
+After starting the actor, clients can send messages to that actor by POSTing to
+``http://localhost:8877/camel/default``. The actor sends a response by using the
+self.reply method (Scala). For returning a message body and headers to the HTTP
+client the response type should be `Message`_. For any other response type, a
+new Message object is created by akka-camel with the actor response as message
+body.
+
+.. _Message: http://github.com/jboner/akka/blob/master/akka-camel/src/main/scala/akka/camel/Message.scala
+
+
+Typed actors
+------------
+
+Typed actors can also receive messages from Camel endpoints. In contrast to
+(untyped) actors, which only implement a single receive or onReceive method, a
+typed actor may define several (message processing) methods, each of which can
+receive messages from a different Camel endpoint. For a typed actor method to be
+exposed as Camel endpoint it must be annotated with the `@consume
+annotation`_. For example, the following typed consumer actor defines two
+methods, foo and bar.
+
+.. _@consume annotation: http://github.com/jboner/akka/blob/master/akka-camel/src/main/java/akka/camel/consume.java
+
+**Scala**
+
+.. code-block:: scala
+
+ import org.apache.camel.{Body, Header}
+ import akka.actor.TypedActor
+ import akka.camel.consume
+
+ trait TypedConsumer1 {
+ @consume("file:data/input/foo")
+ def foo(body: String): Unit
+
+ @consume("jetty:http://localhost:8877/camel/bar")
+ def bar(@Body body: String, @Header("X-Whatever") header: String): String
+ }
+
+ class TypedConsumer1Impl extends TypedActor with TypedConsumer1 {
+ def foo(body: String) = println("Received message: %s" format body)
+ def bar(body: String, header: String) = "body=%s header=%s" format (body, header)
+ }
+
+**Java**
+
+.. code-block:: java
+
+ import org.apache.camel.Body;
+ import org.apache.camel.Header;
+ import akka.actor.TypedActor;
+ import akka.camel.consume;
+
+ public interface TypedConsumer1 {
+ @consume("file:data/input/foo")
+ public void foo(String body);
+
+ @consume("jetty:http://localhost:8877/camel/bar")
+ public String bar(@Body String body, @Header("X-Whatever") String header);
+ }
+
+ public class TypedConsumer1Impl extends TypedActor implements TypedConsumer1 {
+ public void foo(String body) {
+ System.out.println(String.format("Received message: ", body));
+ }
+
+ public String bar(String body, String header) {
+ return String.format("body=%s header=%s", body, header);
+ }
+ }
+
+The foo method can be invoked by placing a file in the data/input/foo
+directory. Camel picks up the file from this directory and akka-camel invokes
+foo with the file content as argument (converted to a String). Camel
+automatically tries to convert messages to appropriate types as defined by the
+method parameter(s). The conversion rules are described in detail on the
+following pages:
+
+* `Bean integration `_
+* `Bean binding `_
+* `Parameter binding `_
+
+The bar method can be invoked by POSTing a message to
+http://localhost:8877/camel/bar. Here, parameter binding annotations are used to
+tell Camel how to extract data from the HTTP message. The @Body annotation binds
+the HTTP request body to the first parameter, the @Header annotation binds the
+X-Whatever header to the second parameter. The return value is sent as HTTP
+response message body to the client.
+
+Parameter binding annotations must be placed on the interface, the @consume
+annotation can also be placed on the methods in the implementation class.
+
+
+.. _camel-publishing:
+
+Consumer publishing
+-------------------
+
+Actors (untyped)
+^^^^^^^^^^^^^^^^
+
+Publishing a consumer actor at its Camel endpoint occurs when the actor is
+started. Publication is done asynchronously; setting up an endpoint (more
+precisely, the route from that endpoint to the actor) may still be in progress
+after the ActorRef method returned.
+
+**Scala**
+
+.. code-block:: scala
+
+ import akka.actor.Actor._
+
+ val actor = actorOf(Props[Consumer1]) // create Consumer actor and activate endpoint in background
+
+**Java**
+
+.. code-block:: java
+
+ import static akka.actor.Actors.*;
+ import akka.actor.ActorRef;
+
+ ActorRef actor = actorOf(new Props(Consumer1.class)); // create Consumer actor and activate endpoint in background
+
+
+Typed actors
+^^^^^^^^^^^^
+
+Publishing of typed actor methods is done when the typed actor is created with
+one of the TypedActor.newInstance(..) methods. Publication is done in the
+background here as well i.e. it may still be in progress when
+TypedActor.newInstance(..) returns.
+
+**Scala**
+
+.. code-block:: scala
+
+ import akka.actor.TypedActor
+
+ // create TypedConsumer1 object and activate endpoint(s) in background
+ val consumer = TypedActor.newInstance(classOf[TypedConsumer1], classOf[TypedConumer1Impl])
+
+**Java**
+
+.. code-block:: java
+
+ import akka.actor.TypedActor;
+
+ // create TypedConsumer1 object and activate endpoint(s) in background
+ TypedConsumer1 consumer = TypedActor.newInstance(TypedConsumer1.class, TypedConumer1Impl.class);
+
+
+.. _camel-consumers-and-camel-service:
+
+Consumers and the CamelService
+------------------------------
+
+Publishing of consumer actors or typed actor methods requires a running
+CamelService. The Akka :ref:`microkernel` can start a CamelService automatically
+(see :ref:`camel-configuration`). When using Akka in other environments, a
+CamelService must be started manually. Applications can do that by calling the
+CamelServiceManager.startCamelService method.
+
+**Scala**
+
+.. code-block:: scala
+
+ import akka.camel.CamelServiceManager._
+
+ startCamelService
+
+**Java**
+
+.. code-block:: java
+
+ import static akka.camel.CamelServiceManager.*;
+
+ startCamelService();
+
+If applications need to wait for a certain number of consumer actors or typed
+actor methods to be published they can do so with the
+``CamelServiceManager.mandatoryService.awaitEndpointActivation`` method, where
+``CamelServiceManager.mandatoryService`` is the current CamelService instance
+(or throws an IllegalStateException there's no current CamelService).
+
+**Scala**
+
+.. code-block:: scala
+
+ import akka.camel.CamelServiceManager._
+
+ startCamelService
+
+ // Wait for three conumer endpoints to be activated
+ mandatoryService.awaitEndpointActivation(3) {
+ // Start three consumer actors (for example)
+ // ...
+ }
+
+ // Communicate with consumer actors via their activated endpoints
+ // ...
+
+**Java**
+
+.. code-block:: java
+
+ import akka.japi.SideEffect;
+ import static akka.camel.CamelServiceManager.*;
+
+ startCamelService();
+
+ // Wait for three conumer endpoints to be activated
+ getMandatoryService().awaitEndpointActivation(3, new SideEffect() {
+ public void apply() {
+ // Start three consumer actors (for example)
+ // ...
+ }
+ });
+
+ // Communicate with consumer actors via their activated endpoints
+ // ...
+
+Alternatively, one can also use ``Option[CamelService]`` returned by
+``CamelServiceManager.service``.
+
+**Scala**
+
+.. code-block:: scala
+
+ import akka.camel.CamelServiceManager._
+
+ startCamelService
+
+ for(s <- service) s.awaitEndpointActivation(3) {
+ // ...
+ }
+
+**Java**
+
+.. code-block:: java
+
+ import java.util.concurrent.CountDownLatch;
+
+ import akka.camel.CamelService;
+ import static akka.camel.CamelServiceManager.*;
+
+ startCamelService();
+
+ for (CamelService s : getService()) s.awaitEndpointActivation(3, new SideEffect() {
+ public void apply() {
+ // ...
+ }
+ });
+
+:ref:`camel-configuration` additionally describes how a CamelContext, that is
+managed by a CamelService, can be cutomized before starting the service. When
+the CamelService is no longer needed, it should be stopped.
+
+**Scala**
+
+.. code-block:: scala
+
+ import akka.camel.CamelServiceManager._
+
+ stopCamelService
+
+**Java**
+
+.. code-block:: java
+
+ import static akka.camel.CamelServiceManager.*;
+
+ stopCamelService();
+
+
+.. _camel-unpublishing:
+
+Consumer un-publishing
+----------------------
+
+Actors (untyped)
+^^^^^^^^^^^^^^^^
+
+When an actor is stopped, the route from the endpoint to that actor is stopped
+as well. For example, stopping an actor that has been previously published at
+``http://localhost:8877/camel/test`` will cause a connection failure when trying
+to access that endpoint. Stopping the route is done asynchronously; it may be
+still in progress after the ``ActorRef.stop`` method returned.
+
+**Scala**
+
+.. code-block:: scala
+
+ import akka.actor.Actor._
+
+ val actor = actorOf(Props[Consumer1]) // create Consumer actor
+ actor // activate endpoint in background
+ // ...
+ actor.stop // deactivate endpoint in background
+
+**Java**
+
+.. code-block:: java
+
+ import static akka.actor.Actors.*;
+ import akka.actor.ActorRef;
+
+ ActorRef actor = actorOf(new Props(Consumer1.class)); // create Consumer actor and activate endpoint in background
+ // ...
+ actor.stop(); // deactivate endpoint in background
+
+
+Typed actors
+^^^^^^^^^^^^
+
+When a typed actor is stopped, routes to @consume annotated methods of this
+typed actors are stopped as well. Stopping the routes is done asynchronously; it
+may be still in progress after the TypedActor.stop method returned.
+
+**Scala**
+
+.. code-block:: scala
+
+ import akka.actor.TypedActor
+
+ // create TypedConsumer1 object and activate endpoint(s) in background
+ val consumer = TypedActor.newInstance(classOf[TypedConsumer1], classOf[TypedConumer1Impl])
+
+ // deactivate endpoints in background
+ TypedActor.stop(consumer)
+
+**Java**
+
+.. code-block:: java
+
+ import akka.actor.TypedActor;
+
+ // Create typed consumer actor and activate endpoints in background
+ TypedConsumer1 consumer = TypedActor.newInstance(TypedConsumer1.class, TypedConumer1Impl.class);
+
+ // Deactivate endpoints in background
+ TypedActor.stop(consumer);
+
+
+.. _camel-acknowledgements:
+
+Acknowledgements
+----------------
+
+Actors (untyped)
+^^^^^^^^^^^^^^^^
+
+With in-out message exchanges, clients usually know that a message exchange is
+done when they receive a reply from a consumer actor. The reply message can be a
+Message (or any object which is then internally converted to a Message) on
+success, and a Failure message on failure.
+
+With in-only message exchanges, by default, an exchange is done when a message
+is added to the consumer actor's mailbox. Any failure or exception that occurs
+during processing of that message by the consumer actor cannot be reported back
+to the endpoint in this case. To allow consumer actors to positively or
+negatively acknowledge the receipt of a message from an in-only message
+exchange, they need to override the ``autoack`` (Scala) or ``isAutoack`` (Java)
+method to return false. In this case, consumer actors must reply either with a
+special Ack message (positive acknowledgement) or a Failure (negative
+acknowledgement).
+
+**Scala**
+
+.. code-block:: scala
+
+ import akka.camel.{Ack, Failure}
+ // ... other imports omitted
+
+ class Consumer3 extends Actor with Consumer {
+ override def autoack = false
+
+ def endpointUri = "jms:queue:test"
+
+ def receive = {
+ // ...
+ self.reply(Ack) // on success
+ // ...
+ self.reply(Failure(...)) // on failure
+ }
+ }
+
+**Java**
+
+.. code-block:: java
+
+ import akka.camel.Failure
+ import static akka.camel.Ack.ack;
+ // ... other imports omitted
+
+ public class Consumer3 extends UntypedConsumerActor {
+
+ public String getEndpointUri() {
+ return "jms:queue:test";
+ }
+
+ public boolean isAutoack() {
+ return false;
+ }
+
+ public void onReceive(Object message) {
+ // ...
+ getContext().reply(ack()) // on success
+ // ...
+ val e: Exception = ...
+ getContext().reply(new Failure(e)) // on failure
+ }
+ }
+
+
+.. _camel-blocking-exchanges:
+
+Blocking exchanges
+------------------
+
+By default, message exchanges between a Camel endpoint and a consumer actor are
+non-blocking because, internally, the ! (bang) operator is used to commicate
+with the actor. The route to the actor does not block waiting for a reply. The
+reply is sent asynchronously (see also :ref:`camel-asynchronous-routing`).
+Consumer actors however can be configured to make this interaction blocking.
+
+**Scala**
+
+.. code-block:: scala
+
+ class ExampleConsumer extends Actor with Consumer {
+ override def blocking = true
+
+ def endpointUri = ...
+ def receive = {
+ // ...
+ }
+ }
+
+**Java**
+
+.. code-block:: java
+
+ public class ExampleConsumer extends UntypedConsumerActor {
+
+ public boolean isBlocking() {
+ return true;
+ }
+
+ public String getEndpointUri() {
+ // ...
+ }
+
+ public void onReceive(Object message) {
+ // ...
+ }
+ }
+
+In this case, the ``!!`` (bangbang) operator is used internally to communicate
+with the actor which blocks a thread until the consumer sends a response or
+throws an exception within receive. Although it may decrease scalability, this
+setting can simplify error handling (see `this article`_) or allows timeout
+configurations on actor-level (see :ref:`camel-timeout`).
+
+.. _this article: http://krasserm.blogspot.com/2011/02/akka-consumer-actors-new-features-and.html
+
+
+.. _camel-timeout:
+
+Consumer timeout
+----------------
+
+Endpoints that support two-way communications need to wait for a response from
+an (untyped) actor or typed actor before returning it to the initiating client.
+For some endpoint types, timeout values can be defined in an endpoint-specific
+way which is described in the documentation of the individual `Camel
+components`_. Another option is to configure timeouts on the level of consumer
+actors and typed consumer actors.
+
+.. _Camel components: http://camel.apache.org/components.html
+
+
+Typed actors
+^^^^^^^^^^^^
+
+For typed actors, timeout values for method calls that return a result can be
+set when the typed actor is created. In the following example, the timeout is
+set to 20 seconds (default is 5 seconds).
+
+**Scala**
+
+.. code-block:: scala
+
+ import akka.actor.TypedActor
+
+ val consumer = TypedActor.newInstance(classOf[TypedConsumer1], classOf[TypedConumer1Impl], 20000 /* 20 seconds */)
+
+**Java**
+
+.. code-block:: java
+
+ import akka.actor.TypedActor;
+
+ TypedConsumer1 consumer = TypedActor.newInstance(TypedConsumer1.class, TypedConumer1Impl.class, 20000 /* 20 seconds */);
+
+
+Actors (untyped)
+^^^^^^^^^^^^^^^^
+
+Two-way communications between a Camel endpoint and an (untyped) actor are
+initiated by sending the request message to the actor with the ``!`` (bang)
+operator and the actor replies to the endpoint when the response is ready. In
+order to support timeouts on actor-level, endpoints need to send the request
+message with the ``!!`` (bangbang) operator for which a timeout value is
+applicable. This can be achieved by overriding the Consumer.blocking method to
+return true.
+
+**Scala**
+
+.. code-block:: scala
+
+ class Consumer2 extends Actor with Consumer {
+ self.timeout = 20000 // timeout set to 20 seconds
+
+ override def blocking = true
+
+ def endpointUri = "direct:example"
+
+ def receive = {
+ // ...
+ }
+ }
+
+**Java**
+
+.. code-block:: java
+
+ public class Consumer2 extends UntypedConsumerActor {
+
+ public Consumer2() {
+ getContext().setTimeout(20000); // timeout set to 20 seconds
+ }
+
+ public String getEndpointUri() {
+ return "direct:example";
+ }
+
+ public boolean isBlocking() {
+ return true;
+ }
+
+ public void onReceive(Object message) {
+ // ...
+ }
+ }
+
+This is a valid approach for all endpoint types that do not "natively" support
+asynchronous two-way message exchanges. For all other endpoint types (like
+`Jetty`_ endpoints) is it not recommended to switch to blocking mode but rather
+to configure timeouts in an endpoint-specific way (see
+also :ref:`camel-asynchronous-routing`).
+
+
+Remote consumers
+----------------
+
+Actors (untyped)
+^^^^^^^^^^^^^^^^
+
+Publishing of remote consumer actors is always done on the server side, local
+proxies are never published. Hence the CamelService must be started on the
+remote node. For example, to publish an (untyped) actor on a remote node at
+endpoint URI ``jetty:http://localhost:6644/remote-actor-1``, define the
+following consumer actor class.
+
+**Scala**
+
+.. code-block:: scala
+
+ import akka.actor.Actor
+ import akka.annotation.consume
+ import akka.camel.Consumer
+
+ class RemoteActor1 extends Actor with Consumer {
+ def endpointUri = "jetty:http://localhost:6644/remote-actor-1"
+
+ protected def receive = {
+ case msg => self.reply("response from remote actor 1")
+ }
+ }
+
+**Java**
+
+.. code-block:: java
+
+ import akka.camel.UntypedConsumerActor;
+
+ public class RemoteActor1 extends UntypedConsumerActor {
+ public String getEndpointUri() {
+ return "jetty:http://localhost:6644/remote-actor-1";
+ }
+
+ public void onReceive(Object message) {
+ getContext().tryReply("response from remote actor 1");
+ }
+ }
+
+On the remote node, start a `CamelService`_, start a remote server, create the
+actor and register it at the remote server.
+
+.. _CamelService: http://github.com/jboner/akka/blob/master/akka-camel/src/main/scala/akka/camel/CamelService.scala
+
+**Scala**
+
+.. code-block:: scala
+
+ import akka.camel.CamelServiceManager._
+ import akka.actor.Actor._
+ import akka.actor.ActorRef
+
+ // ...
+ startCamelService
+
+ val consumer = val consumer = actorOf(Props[RemoteActor1])
+
+ remote.start("localhost", 7777)
+ remote.register(consumer) // register and start remote consumer
+ // ...
+
+**Java**
+
+.. code-block:: java
+
+ import akka.camel.CamelServiceManager;
+ import static akka.actor.Actors.*;
+
+ // ...
+ CamelServiceManager.startCamelService();
+
+ ActorRef actor = actorOf(new Props(RemoteActor1.class));
+
+ remote().start("localhost", 7777);
+ remote().register(actor); // register and start remote consumer
+ // ...
+
+Explicitly starting a CamelService can be omitted when Akka is running in Kernel
+mode, for example (see also :ref:`camel-configuration`).
+
+
+Typed actors
+^^^^^^^^^^^^
+
+Remote typed consumer actors can be registered with one of the
+``registerTyped*`` methods on the remote server. The following example registers
+the actor with the custom id "123".
+
+**Scala**
+
+.. code-block:: scala
+
+ import akka.actor.TypedActor
+
+ // ...
+ val obj = TypedActor.newRemoteInstance(
+ classOf[SampleRemoteTypedConsumer],
+ classOf[SampleRemoteTypedConsumerImpl])
+
+ remote.registerTypedActor("123", obj)
+ // ...
+
+**Java**
+
+.. code-block:: java
+
+ import akka.actor.TypedActor;
+
+ SampleRemoteTypedConsumer obj = (SampleRemoteTypedConsumer)TypedActor.newInstance(
+ SampleRemoteTypedConsumer.class,
+ SampleRemoteTypedConsumerImpl.class);
+
+ remote.registerTypedActor("123", obj)
+ // ...
+
+
+Produce messages
+================
+
+A minimum pre-requisite for producing messages to Camel endpoints with producer
+actors (see below) is an initialized and started CamelContextManager.
+
+**Scala**
+
+.. code-block:: scala
+
+ import akka.camel.CamelContextManager
+
+ CamelContextManager.init // optionally takes a CamelContext as argument
+ CamelContextManager.start // starts the managed CamelContext
+
+**Java**
+
+.. code-block:: java
+
+ import akka.camel.CamelContextManager;
+
+ CamelContextManager.init(); // optionally takes a CamelContext as argument
+ CamelContextManager; // starts the managed CamelContext
+
+For using producer actors, application may also start a CamelService. This will
+not only setup a CamelContextManager behind the scenes but also register
+listeners at the actor registry (needed to publish consumer actors). If your
+application uses producer actors only and you don't want to have the (very
+small) overhead generated by the registry listeners then setting up a
+CamelContextManager without starting CamelService is recommended. Otherwise,
+just start a CamelService as described for consumer
+actors: :ref:`camel-consumers-and-camel-service`.
+
+
+Producer trait
+--------------
+
+Actors (untyped)
+^^^^^^^^^^^^^^^^
+
+For sending messages to Camel endpoints, actors
+
+* written in Scala need to mixin the `Producer`_ trait and implement the
+ endpointUri method.
+
+* written in Java need to extend the abstract UntypedProducerActor class and
+ implement the getEndpointUri() method. By extending the UntypedProducerActor
+ class, untyped actors (Java) inherit the behaviour of the Producer trait.
+
+.. _Producer: http://github.com/jboner/akka/blob/master/akka-camel/src/main/scala/akka/camel/Producer.scala
+
+**Scala**
+
+.. code-block:: scala
+
+ import akka.actor.Actor
+ import akka.camel.Producer
+
+ class Producer1 extends Actor with Producer {
+ def endpointUri = "http://localhost:8080/news"
+ }
+
+**Java**
+
+.. code-block:: java
+
+ import akka.camel.UntypedProducerActor;
+
+ public class Producer1 extends UntypedProducerActor {
+ public String getEndpointUri() {
+ return "http://localhost:8080/news";
+ }
+ }
+
+Producer1 inherits a default implementation of the receive method from the
+Producer trait. To customize a producer actor's default behavior it is
+recommended to override the Producer.receiveBeforeProduce and
+Producer.receiveAfterProduce methods. This is explained later in more detail.
+Actors should not override the default Producer.receive method.
+
+Any message sent to a Producer actor (or UntypedProducerActor) will be sent to
+the associated Camel endpoint, in the above example to
+``http://localhost:8080/news``. Response messages (if supported by the
+configured endpoint) will, by default, be returned to the original sender. The
+following example uses the ``?`` operator (Scala) to send a message to a
+Producer actor and waits for a response. In Java, the sendRequestReply method is
+used.
+
+**Scala**
+
+.. code-block:: scala
+
+ import akka.actor.Actor._
+ import akka.actor.ActorRef
+
+ val producer = actorOf(Props[Producer1])
+ val response = (producer ? "akka rocks").get
+ val body = response.bodyAs[String]
+
+**Java**
+
+.. code-block:: java
+
+ import akka.actor.ActorRef;
+ import static akka.actor.Actors.*;
+ import akka.camel.Message;
+
+ ActorRef producer = actorOf(new Props(Producer1.class));
+ Message response = (Message)producer.sendRequestReply("akka rocks");
+ String body = response.getBodyAs(String.class)
+
+If the message is sent using the ! operator (or the tell method in Java)
+then the response message is sent back asynchronously to the original sender. In
+the following example, a Sender actor sends a message (a String) to a producer
+actor using the ! operator and asynchronously receives a response (of type
+Message).
+
+**Scala**
+
+.. code-block:: scala
+
+ import akka.actor.{Actor, ActorRef}
+ import akka.camel.Message
+
+ class Sender(producer: ActorRef) extends Actor {
+ def receive = {
+ case request: String => producer ! request
+ case response: Message => {
+ /* process response ... */
+ }
+ // ...
+ }
+ }
+
+**Java**
+
+.. code-block:: java
+
+ // TODO
+
+
+.. _camel-custom-processing:
+
+Custom Processing
+^^^^^^^^^^^^^^^^^
+
+Instead of replying to the initial sender, producer actors can implement custom
+reponse processing by overriding the receiveAfterProduce method (Scala) or
+onReceiveAfterProduce method (Java). In the following example, the reponse
+message is forwarded to a target actor instead of being replied to the original
+sender.
+
+**Scala**
+
+.. code-block:: scala
+
+ import akka.actor.{Actor, ActorRef}
+ import akka.camel.Producer
+
+ class Producer1(target: ActorRef) extends Actor with Producer {
+ def endpointUri = "http://localhost:8080/news"
+
+ override protected def receiveAfterProduce = {
+ // do not reply but forward result to target
+ case msg => target forward msg
+ }
+ }
+
+**Java**
+
+.. code-block:: java
+
+ import akka.actor.ActorRef;
+ import akka.camel.UntypedProducerActor;
+
+ public class Producer1 extends UntypedProducerActor {
+ private ActorRef target;
+
+ public Producer1(ActorRef target) {
+ this.target = target;
+ }
+
+ public String getEndpointUri() {
+ return "http://localhost:8080/news";
+ }
+
+ @Override
+ public void onReceiveAfterProduce(Object message) {
+ target.forward((Message)message, getContext());
+ }
+ }
+
+To create an untyped actor instance with a constructor argument, a factory is
+needed (this should be doable without a factory in upcoming Akka versions).
+
+.. code-block:: java
+
+ import akka.actor.ActorRef;
+ import akka.actor.UntypedActorFactory;
+ import akka.actor.UntypedActor;
+
+ public class Producer1Factory implements UntypedActorFactory {
+
+ private ActorRef target;
+
+ public Producer1Factory(ActorRef target) {
+ this.target = target;
+ }
+
+ public UntypedActor create() {
+ return new Producer1(target);
+ }
+ }
+
+The instanitation is done with the Actors.actorOf method and the factory as
+argument.
+
+.. code-block:: java
+
+ import static akka.actor.Actors.*;
+ import akka.actor.ActorRef;
+
+ ActorRef target = ...
+ ActorRef producer = actorOf(Props(new Producer1Factory(target)));
+ producer;
+
+Before producing messages to endpoints, producer actors can pre-process them by
+overriding the receiveBeforeProduce method (Scala) or onReceiveBeforeProduce
+method (Java).
+
+**Scala**
+
+.. code-block:: scala
+
+ import akka.actor.{Actor, ActorRef}
+ import akka.camel.{Message, Producer}
+
+ class Producer1(target: ActorRef) extends Actor with Producer {
+ def endpointUri = "http://localhost:8080/news"
+
+ override protected def receiveBeforeProduce = {
+ case msg: Message => {
+ // do some pre-processing (e.g. add endpoint-specific message headers)
+ // ...
+
+ // and return the modified message
+ msg
+ }
+ }
+ }
+
+**Java**
+
+.. code-block:: java
+
+ import akka.actor.ActorRef;
+ import akka.camel.Message
+ import akka.camel.UntypedProducerActor;
+
+ public class Producer1 extends UntypedProducerActor {
+ private ActorRef target;
+
+ public Producer1(ActorRef target) {
+ this.target = target;
+ }
+
+ public String getEndpointUri() {
+ return "http://localhost:8080/news";
+ }
+
+ @Override
+ public Object onReceiveBeforeProduce(Object message) {
+ Message msg = (Message)message;
+ // do some pre-processing (e.g. add endpoint-specific message headers)
+ // ...
+
+ // and return the modified message
+ return msg
+ }
+ }
+
+
+Producer configuration options
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+The interaction of producer actors with Camel endpoints can be configured to be
+one-way or two-way (by initiating in-only or in-out message exchanges,
+respectively). By default, the producer initiates an in-out message exchange
+with the endpoint. For initiating an in-only exchange, producer actors
+
+* written in Scala either have to override the oneway method to return true
+* written in Java have to override the isOneway method to return true.
+
+**Scala**
+
+.. code-block:: scala
+
+ import akka.camel.Producer
+
+ class Producer2 extends Actor with Producer {
+ def endpointUri = "jms:queue:test"
+ override def oneway = true
+ }
+
+**Java**
+
+.. code-block:: java
+
+ import akka.camel.UntypedProducerActor;
+
+ public class SampleUntypedReplyingProducer extends UntypedProducerActor {
+ public String getEndpointUri() {
+ return "jms:queue:test";
+ }
+
+ @Override
+ public boolean isOneway() {
+ return true;
+ }
+ }
+
+Message correlation
+^^^^^^^^^^^^^^^^^^^
+
+To correlate request with response messages, applications can set the
+Message.MessageExchangeId message header.
+
+**Scala**
+
+.. code-block:: scala
+
+ import akka.camel.Message
+
+ producer ! Message("bar", Map(Message.MessageExchangeId -> "123"))
+
+**Java**
+
+.. code-block:: java
+
+ // TODO
+
+Responses of type Message or Failure will contain that header as well. When
+receiving messages from Camel endpoints this message header is already set (see
+:ref:`camel-consume-messages`).
+
+
+Matching responses
+^^^^^^^^^^^^^^^^^^
+
+The following code snippet shows how to best match responses when sending
+messages with the ``?`` operator (Scala) or with the ``ask`` method
+(Java).
+
+**Scala**
+
+.. code-block:: scala
+
+ val response = (producer ? message).get
+
+ response match {
+ case Some(Message(body, headers)) => ...
+ case Some(Failure(exception, headers)) => ...
+ case _ => ...
+ }
+
+**Java**
+
+.. code-block:: java
+
+ // TODO
+
+
+ProducerTemplate
+----------------
+
+The `Producer`_ trait (and the abstract UntypedProducerActor class) is a very
+convenient way for actors to produce messages to Camel endpoints. (Untyped)
+actors and typed actors may also use a Camel `ProducerTemplate`_ for producing
+messages to endpoints. For typed actors it's the only way to produce messages to
+Camel endpoints.
+
+At the moment, only the Producer trait fully supports asynchronous in-out
+message exchanges with Camel endpoints without allocating a thread for the full
+duration of the exchange. For example, when using endpoints that support
+asynchronous message exchanges (such as Jetty endpoints that internally use
+`Jetty's asynchronous HTTP client`_) then usage of the Producer trait is highly
+recommended (see also :ref:`camel-asynchronous-routing`).
+
+.. _Producer: http://github.com/jboner/akka/blob/master/akka-camel/src/main/scala/akka/camel/Producer.scala
+.. _ProducerTemplate: http://camel.apache.org/maven/camel-2.2.0/camel-core/apidocs/index.html
+.. _Jetty's asynchronous HTTP client: http://wiki.eclipse.org/Jetty/Tutorial/HttpClient
+
+
+Actors (untyped)
+^^^^^^^^^^^^^^^^
+
+A managed ProducerTemplate instance can be obtained via
+CamelContextManager.mandatoryTemplate. In the following example, an actor uses a
+ProducerTemplate to send a one-way message to a ``direct:news`` endpoint.
+
+**Scala**
+
+.. code-block:: scala
+
+ import akka.actor.Actor
+ import akka.camel.CamelContextManager
+
+ class ProducerActor extends Actor {
+ protected def receive = {
+ // one-way message exchange with direct:news endpoint
+ case msg => CamelContextManager.mandatoryTemplate.sendBody("direct:news", msg)
+ }
+ }
+
+**Java**
+
+.. code-block:: java
+
+ import akka.actor.UntypedActor;
+ import akka.camel.CamelContextManager;
+
+ public class SampleUntypedActor extends UntypedActor {
+ public void onReceive(Object msg) {
+ CamelContextManager.getMandatoryTemplate().sendBody("direct:news", msg);
+ }
+ }
+
+Alternatively, one can also use ``Option[ProducerTemplate]`` returned by
+``CamelContextManager.template``.
+
+**Scala**
+
+.. code-block:: scala
+
+ import akka.actor.Actor
+ import akka.camel.CamelContextManager
+
+ class ProducerActor extends Actor {
+ protected def receive = {
+ // one-way message exchange with direct:news endpoint
+ case msg => for(t <- CamelContextManager.template) t.sendBody("direct:news", msg)
+ }
+ }
+
+**Java**
+
+.. code-block:: java
+
+ import org.apache.camel.ProducerTemplate
+
+ import akka.actor.UntypedActor;
+ import akka.camel.CamelContextManager;
+
+ public class SampleUntypedActor extends UntypedActor {
+ public void onReceive(Object msg) {
+ for (ProducerTemplate t : CamelContextManager.getTemplate()) {
+ t.sendBody("direct:news", msg);
+ }
+ }
+ }
+
+For initiating a a two-way message exchange, one of the
+``ProducerTemplate.request*`` methods must be used.
+
+**Scala**
+
+.. code-block:: scala
+
+ import akka.actor.Actor
+ import akka.camel.CamelContextManager
+
+ class ProducerActor extends Actor {
+ protected def receive = {
+ // two-way message exchange with direct:news endpoint
+ case msg => self.reply(CamelContextManager.mandatoryTemplate.requestBody("direct:news", msg))
+ }
+ }
+
+**Java**
+
+.. code-block:: java
+
+ import akka.actor.UntypedActor;
+ import akka.camel.CamelContextManager;
+
+ public class SampleUntypedActor extends UntypedActor {
+ public void onReceive(Object msg) {
+ getContext().tryReply(CamelContextManager.getMandatoryTemplate().requestBody("direct:news", msg));
+ }
+ }
+
+
+Typed actors
+^^^^^^^^^^^^
+
+Typed Actors get access to a managed ProducerTemplate in the same way, as shown
+in the next example.
+
+**Scala**
+
+.. code-block:: scala
+
+ // TODO
+
+**Java**
+
+.. code-block:: java
+
+ import akka.actor.TypedActor;
+ import akka.camel.CamelContextManager;
+
+ public class SampleProducerImpl extends TypedActor implements SampleProducer {
+ public void foo(String msg) {
+ ProducerTemplate template = CamelContextManager.getMandatoryTemplate();
+ template.sendBody("direct:news", msg);
+ }
+ }
+
+
+.. _camel-asynchronous-routing:
+
+Asynchronous routing
+====================
+
+Since Akka 0.10, in-out message exchanges between endpoints and actors are
+designed to be asynchronous. This is the case for both, consumer and producer
+actors.
+
+* A consumer endpoint sends request messages to its consumer actor using the ``!``
+ (bang) operator and the actor returns responses with self.reply once they are
+ ready. The sender reference used for reply is an adapter to Camel's asynchronous
+ routing engine that implements the ActorRef trait.
+
+* A producer actor sends request messages to its endpoint using Camel's
+ asynchronous routing engine. Asynchronous responses are wrapped and added to the
+ producer actor's mailbox for later processing. By default, response messages are
+ returned to the initial sender but this can be overridden by Producer
+ implementations (see also description of the ``receiveAfterProcessing`` method
+ in :ref:`camel-custom-processing`).
+
+However, asynchronous two-way message exchanges, without allocating a thread for
+the full duration of exchange, cannot be generically supported by Camel's
+asynchronous routing engine alone. This must be supported by the individual
+`Camel components`_ (from which endpoints are created) as well. They must be
+able to suspend any work started for request processing (thereby freeing threads
+to do other work) and resume processing when the response is ready. This is
+currently the case for a `subset of components`_ such as the `Jetty component`_.
+All other Camel components can still be used, of course, but they will cause
+allocation of a thread for the duration of an in-out message exchange. There's
+also a :ref:`camel-async-example` that implements both, an asynchronous
+consumer and an asynchronous producer, with the jetty component.
+
+.. _Camel components: http://camel.apache.org/components.html
+.. _subset of components: http://camel.apache.org/asynchronous-routing-engine.html
+.. _Jetty component: http://camel.apache.org/jetty.html
+
+
+Fault tolerance
+===============
+
+Consumer actors and typed actors can be also managed by supervisors. If a
+consumer is configured to be restarted upon failure the associated Camel
+endpoint is not restarted. It's behaviour during restart is as follows.
+
+* A one-way (in-only) message exchange will be queued by the consumer and
+ processed once restart completes.
+
+* A two-way (in-out) message exchange will wait and either succeed after restart
+ completes or time-out when the restart duration exceeds
+ the :ref:`camel-timeout`.
+
+If a consumer is configured to be shut down upon failure, the associated
+endpoint is shut down as well. For details refer to :ref:`camel-unpublishing`.
+
+For examples, tips and trick how to implement fault-tolerant consumer and
+producer actors, take a look at these two articles.
+
+* `Akka Consumer Actors: New Features and Best Practices `_
+* `Akka Producer Actors: New Features and Best Practices `_
+
+
+.. _camel-configuration:
+
+CamelService configuration
+==========================
+
+For publishing consumer actors and typed actor methods
+(:ref:`camel-publishing`), applications must start a CamelService. When starting
+Akka in :ref:`microkernel` mode then a CamelService can be started automatically
+when camel is added to the enabled-modules list in :ref:`configuration`, for example:
+
+.. code-block:: none
+
+ akka {
+ ...
+ enabled-modules = ["camel"] # Options: ["remote", "camel", "http"]
+ ...
+ }
+
+Applications that do not use the Akka Kernel, such as standalone applications
+for example, need to start a CamelService manually, as explained in the
+following subsections.When starting a CamelService manually, settings in
+:ref:`configuration` are ignored.
+
+
+Standalone applications
+-----------------------
+
+Standalone application should create and start a CamelService in the following way.
+
+**Scala**
+
+.. code-block:: scala
+
+ import akka.camel.CamelServiceManager._
+
+ startCamelService
+
+**Java**
+
+.. code-block:: java
+
+ import static akka.camel.CamelServiceManager.*;
+
+ startCamelService();
+
+Internally, a CamelService uses the CamelContextManager singleton to manage a
+CamelContext. A CamelContext manages the routes from endpoints to consumer
+actors and typed actors. These routes are added and removed at runtime (when
+(untyped) consumer actors and typed consumer actors are started and stopped).
+Applications may additionally want to add their own custom routes or modify the
+CamelContext in some other way. This can be done by initializing the
+CamelContextManager manually and making modifications to CamelContext **before**
+the CamelService is started.
+
+**Scala**
+
+.. code-block:: scala
+
+ import org.apache.camel.builder.RouteBuilder
+
+ import akka.camel.CamelContextManager
+ import akka.camel.CamelServiceManager._
+
+ CamelContextManager.init
+
+ // add a custom route to the managed CamelContext
+ CamelContextManager.mandatoryContext.addRoutes(new CustomRouteBuilder)
+
+ startCamelService
+
+ // an application-specific route builder
+ class CustomRouteBuilder extends RouteBuilder {
+ def configure {
+ // ...
+ }
+ }
+
+**Java**
+
+.. code-block:: java
+
+ import org.apache.camel.builder.RouteBuilder;
+
+ import akka.camel.CamelContextManager;
+ import static akka.camel.CamelServiceManager.*;
+
+ CamelContextManager.init();
+
+ // add a custom route to the managed CamelContext
+ CamelContextManager.getMandatoryContext().addRoutes(new CustomRouteBuilder());
+
+ startCamelService();
+
+ // an application-specific route builder
+ private static class CustomRouteBuilder extends RouteBuilder {
+ public void configure() {
+ // ...
+ }
+ }
+
+
+Applications may even provide their own CamelContext instance as argument to the
+init method call as shown in the following snippet. Here, a DefaultCamelContext
+is created using a Spring application context as `registry`_.
+
+.. _registry: http://camel.apache.org/registry.html
+
+
+**Scala**
+
+.. code-block:: scala
+
+ import org.apache.camel.impl.DefaultCamelContext
+ import org.apache.camel.spring.spi.ApplicationContextRegistry
+ import org.springframework.context.support.ClassPathXmlApplicationContext
+
+ import akka.camel.CamelContextManager
+ import akka.camel.CamelServiceManager._
+
+ // create a custom Camel registry backed up by a Spring application context
+ val context = new ClassPathXmlApplicationContext("/context.xml")
+ val registry = new ApplicationContextRegistry(context)
+
+ // initialize CamelContextManager with a DefaultCamelContext using the custom registry
+ CamelContextManager.init(new DefaultCamelContext(registry))
+
+ // ...
+
+ startCamelService
+
+**Java**
+
+.. code-block:: java
+
+ import org.apache.camel.impl.DefaultCamelContext
+ import org.apache.camel.spi.Registry;
+ import org.apache.camel.spring.spi.ApplicationContextRegistry;
+
+ import org.springframework.context.ApplicationContext;
+ import org.springframework.context.support.ClassPathXmlApplicationContext;
+
+ import akka.camel.CamelContextManager;
+ import static akka.camel.CamelServiceManager.*;
+
+ // create a custom Camel registry backed up by a Spring application context
+ ApplicationContext context = new ClassPathXmlApplicationContext("/context.xml");
+ Registry registry = new ApplicationContextRegistry(context);
+
+ // initialize CamelContextManager with a DefaultCamelContext using the custom registry
+ CamelContextManager.init(new DefaultCamelContext(registry));
+
+ // ...
+
+ startCamelService();
+
+
+.. _camel-spring-applications:
+
+Standalone Spring applications
+------------------------------
+
+A better approach to configure a Spring application context as registry for the
+CamelContext is to use `Camel's Spring support`_. Furthermore,
+the :ref:`spring-module` module additionally supports a element
+for creating and starting a CamelService. An optional reference to a custom
+CamelContext can be defined for as well. Here's an example.
+
+.. _Camel's Spring support: http://camel.apache.org/spring.html
+
+.. code-block:: xml
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+Creating a CamelContext this way automatically adds the defining Spring
+application context as registry to that CamelContext. The CamelService is
+started when the application context is started and stopped when the application
+context is closed. A simple usage example is shown in the following snippet.
+
+**Scala**
+
+.. code-block:: scala
+
+ import org.springframework.context.support.ClassPathXmlApplicationContext
+ import akka.camel.CamelContextManager
+
+ // Create and start application context (start CamelService)
+ val appctx = new ClassPathXmlApplicationContext("/context.xml")
+
+ // Access to CamelContext (SpringCamelContext)
+ val ctx = CamelContextManager.mandatoryContext
+ // Access to ProducerTemplate of that CamelContext
+ val tpl = CamelContextManager.mandatoryTemplate
+
+ // use ctx and tpl ...
+
+ // Close application context (stop CamelService)
+ appctx.close
+
+**Java**
+
+.. code-block:: java
+
+ // TODO
+
+
+If the CamelService doesn't reference a custom CamelContext then a
+DefaultCamelContext is created (and accessible via the CamelContextManager).
+
+.. code-block:: xml
+
+
+
+
+
+
+
+
+
+Kernel mode
+-----------
+
+For classes that are loaded by the Kernel or the Initializer, starting the
+CamelService can be omitted, as discussed in the previous section. Since these
+classes are loaded and instantiated before the CamelService is started (by
+Akka), applications can make modifications to a CamelContext here as well (and
+even provide their own CamelContext). Assuming there's a boot class
+sample.camel.Boot configured in :ref:`configuration`.
+
+.. code-block:: none
+
+ akka {
+ ...
+ boot = ["sample.camel.Boot"]
+ ...
+ }
+
+Modifications to the CamelContext can be done like in the following snippet.
+
+**Scala**
+
+.. code-block:: scala
+
+ package sample.camel
+
+ import org.apache.camel.builder.RouteBuilder
+
+ import akka.camel.CamelContextManager
+
+ class Boot {
+ CamelContextManager.init
+
+ // Customize CamelContext with application-specific routes
+ CamelContextManager.mandatoryContext.addRoutes(new CustomRouteBuilder)
+
+ // No need to start CamelService here. It will be started
+ // when this classes has been loaded and instantiated.
+ }
+
+ class CustomRouteBuilder extends RouteBuilder {
+ def configure {
+ // ...
+ }
+ }
+
+**Java**
+
+.. code-block:: java
+
+ // TODO
+
+
+Custom Camel routes
+===================
+
+In all the examples so far, routes to consumer actors have been automatically
+constructed by akka-camel, when the actor was started. Although the default
+route construction templates, used by akka-camel internally, are sufficient for
+most use cases, some applications may require more specialized routes to actors.
+The akka-camel module provides two mechanisms for customizing routes to actors,
+which will be explained in this section. These are
+
+* Usage of :ref:`camel-components` to access (untyped) actor and actors.
+ Any Camel route can use these components to access Akka actors.
+
+* :ref:`camel-intercepting-route-construction` to (untyped) actor and actors.
+ Default routes to consumer actors are extended using predefined extension
+ points.
+
+
+.. _camel-components:
+
+Akka Camel components
+---------------------
+
+Akka actors can be access from Camel routes using the `actor`_ and
+`typed-actor`_ Camel components, respectively. These components can be used to
+access any Akka actor (not only consumer actors) from Camel routes, as described
+in the following sections.
+
+.. _actor: http://github.com/jboner/akka/blob/master/akka-camel/src/main/scala/akka/camel/component/ActorComponent.scala
+.. _typed-actor: http://github.com/jboner/akka/blob/master/akka-camel/src/main/scala/akka/camel/component/TypedActorComponent.scala
+
+
+Access to actors
+----------------
+
+To access (untyped) actors from custom Camel routes, the `actor`_ Camel
+component should be used. It fully supports Camel's `asynchronous routing
+engine`_.
+
+.. _actor: http://github.com/jboner/akka/blob/master/akka-camel/src/main/scala/akka/camel/component/ActorComponent.scala
+.. _asynchronous routing engine: http://camel.apache.org/asynchronous-routing-engine.html
+
+This component accepts the following enpoint URI formats:
+
+* ``actor:[?]``
+* ``actor:id:[][?]``
+* ``actor:uuid:[][?]``
+
+where ```` and ```` refer to ``actorRef.id`` and the
+String-representation of ``actorRef.uuid``, respectively. The ```` are
+name-value pairs separated by ``&`` (i.e. ``name1=value1&name2=value2&...``).
+
+
+URI options
+^^^^^^^^^^^
+
+The following URI options are supported:
+
++----------+---------+---------+-------------------------------------------+
+| Name | Type | Default | Description |
++==========+=========+=========+===========================================+
+| blocking | Boolean | false | If set to true, in-out message exchanges |
+| | | | with the target actor will be made with |
+| | | | the ``!!`` operator, otherwise with the |
+| | | | ``!`` operator. |
+| | | | |
+| | | | See also :ref:`camel-timeout`. |
++----------+---------+---------+-------------------------------------------+
+| autoack | Boolean | true | If set to true, in-only message exchanges |
+| | | | are auto-acknowledged when the message is |
+| | | | added to the actor's mailbox. If set to |
+| | | | false, actors must acknowledge the |
+| | | | receipt of the message. |
+| | | | |
+| | | | See also :ref:`camel-acknowledgements`. |
++----------+---------+---------+-------------------------------------------+
+
+Here's an actor endpoint URI example containing an actor uuid::
+
+ actor:uuid:12345678?blocking=true
+
+In actor endpoint URIs that contain id: or uuid:, an actor identifier (id or
+uuid) is optional. In this case, the in-message of an exchange produced to an
+actor endpoint must contain a message header with name CamelActorIdentifier
+(which is defined by the ActorComponent.ActorIdentifier field) and a value that
+is the target actor's identifier. On the other hand, if the URI contains an
+actor identifier, it can be seen as a default actor identifier that can be
+overridden by messages containing a CamelActorIdentifier header.
+
+
+Message headers
+^^^^^^^^^^^^^^^
+
++----------------------+--------+-------------------------------------------+
+| Name | Type | Description |
++======================+========+===========================================+
+| CamelActorIdentifier | String | Contains the identifier (id or uuid) of |
+| | | the actor to route the message to. The |
+| | | identifier is interpreted as actor id if |
+| | | the URI contains id:, the identifier is |
+| | | interpreted as uuid id the URI contains |
+| | | uuid:. A uuid value may also be of type |
+| | | Uuid (not only String). The header name |
+| | | is defined by the |
+| | | ActorComponent.ActorIdentifier field. |
++----------------------+--------+-------------------------------------------+
+
+Here's another actor endpoint URI example that doesn't define an actor uuid. In
+this case the target actor uuid must be defined by the CamelActorIdentifier
+message header::
+
+ actor:uuid:
+
+In the following example, a custom route to an actor is created, using the
+actor's uuid (i.e. actorRef.uuid). The route starts from a `Jetty`_ endpoint and
+ends at the target actor.
+
+
+**Scala**
+
+.. code-block:: scala
+
+ import org.apache.camel.builder.RouteBuilder
+
+ import akka.actor._
+ import akka.actor.Actor
+ import akka.actor.Actor._
+ import akka.camel.{Message, CamelContextManager, CamelServiceManager}
+
+ object CustomRouteExample extends Application {
+ val target = actorOf(Props[CustomRouteTarget])
+
+ CamelServiceManager.startCamelService
+ CamelContextManager.mandatoryContext.addRoutes(new CustomRouteBuilder(target.uuid))
+ }
+
+ class CustomRouteTarget extends Actor {
+ def receive = {
+ case msg: Message => self.reply("Hello %s" format msg.bodyAs[String])
+ }
+ }
+
+ class CustomRouteBuilder(uuid: Uuid) extends RouteBuilder {
+ def configure {
+ val actorUri = "actor:uuid:%s" format uuid
+ from("jetty:http://localhost:8877/camel/custom").to(actorUri)
+ }
+ }
+
+
+**Java**
+
+.. code-block:: java
+
+ import com.eaio.uuid.UUID;
+
+ import org.apache.camel.builder.RouteBuilder;
+ import static akka.actor.Actors.*;
+ import akka.actor.ActorRef;
+ import akka.actor.UntypedActor;
+ import akka.camel.CamelServiceManager;
+ import akka.camel.CamelContextManager;
+ import akka.camel.Message;
+
+ public class CustomRouteExample {
+ public static void main(String... args) throws Exception {
+ ActorRef target = actorOf(new Props(CustomRouteTarget.class));
+ CamelServiceManager.startCamelService();
+ CamelContextManager.getMandatoryContext().addRoutes(new CustomRouteBuilder(target.getUuid()));
+ }
+ }
+
+ public class CustomRouteTarget extends UntypedActor {
+ public void onReceive(Object message) {
+ Message msg = (Message) message;
+ String body = msg.getBodyAs(String.class);
+ getContext().tryReply(String.format("Hello %s", body));
+ }
+ }
+
+ public class CustomRouteBuilder extends RouteBuilder {
+ private UUID uuid;
+
+ public CustomRouteBuilder(UUID uuid) {
+ this.uuid = uuid;
+ }
+
+ public void configure() {
+ String actorUri = String.format("actor:uuid:%s", uuid);
+ from("jetty:http://localhost:8877/camel/custom").to(actorUri);
+ }
+ }
+
+When the example is started, messages POSTed to
+``http://localhost:8877/camel/custom`` are routed to the target actor.
+
+
+Access to typed actors
+----------------------
+
+To access typed actor methods from custom Camel routes, the `typed-actor`_ Camel
+component should be used. It is a specialization of the Camel `bean`_ component.
+Applications should use the interface (endpoint URI syntax and options) as
+described in the bean component documentation but with the typed-actor schema.
+Typed Actors must be added to a `Camel registry`_ for being accessible by the
+typed-actor component.
+
+.. _typed-actor: http://github.com/jboner/akka/blob/master/akka-camel/src/main/scala/akka/camel/component/TypedActorComponent.scala
+.. _bean: http://camel.apache.org/bean.html
+.. _Camel registry: http://camel.apache.org/registry.html
+
+
+.. _camel-typed-actors-using-spring:
+
+Using Spring
+^^^^^^^^^^^^
+
+The following example shows how to access typed actors in a Spring application
+context. For adding typed actors to the application context and for starting
+:ref:`camel-spring-applications` the :ref:`spring-module` module is used in the
+following example. It offers a ```` element to define typed actor
+factory beans and a ```` element to create and start a
+CamelService.
+
+.. code-block:: xml
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+SampleTypedActor is the typed actor interface and SampleTypedActorImpl in the
+typed actor implementation class.
+
+**Scala**
+
+.. code-block:: scala
+
+ package sample
+
+ import akka.actor.TypedActor
+
+ trait SampleTypedActor {
+ def foo(s: String): String
+ }
+
+ class SampleTypedActorImpl extends TypedActor with SampleTypedActor {
+ def foo(s: String) = "hello %s" format s
+ }
+
+**Java**
+
+.. code-block:: java
+
+ package sample;
+
+ import akka.actor.TypedActor;
+
+ public interface SampleTypedActor {
+ public String foo(String s);
+ }
+
+ public class SampleTypedActorImpl extends TypedActor implements SampleTypedActor {
+
+ public String foo(String s) {
+ return "hello " + s;
+ }
+ }
+
+The SampleRouteBuilder defines a custom route from the direct:test endpoint to
+the sample typed actor using a typed-actor endpoint URI.
+
+**Scala**
+
+.. code-block:: scala
+
+ package sample
+
+ import org.apache.camel.builder.RouteBuilder
+
+ class SampleRouteBuilder extends RouteBuilder {
+ def configure = {
+ // route to typed actor
+ from("direct:test").to("typed-actor:sample?method=foo")
+ }
+ }
+
+**Java**
+
+.. code-block:: java
+
+ package sample;
+
+ import org.apache.camel.builder.RouteBuilder;
+
+ public class SampleRouteBuilder extends RouteBuilder {
+ public void configure() {
+ // route to typed actor
+ from("direct:test").to("typed-actor:sample?method=foo");
+ }
+ }
+
+The typed-actor endpoint URI syntax is:::
+
+ typed-actor:?method=
+
+where ```` is the id of the bean in the Spring application context and
+```` is the name of the typed actor method to invoke.
+
+Usage of the custom route for sending a message to the typed actor is shown in
+the following snippet.
+
+**Scala**
+
+.. code-block:: scala
+
+ package sample
+
+ import org.springframework.context.support.ClassPathXmlApplicationContext
+ import akka.camel.CamelContextManager
+
+ // load Spring application context (starts CamelService)
+ val appctx = new ClassPathXmlApplicationContext("/context-standalone.xml")
+
+ // access 'sample' typed actor via custom route
+ assert("hello akka" == CamelContextManager.mandatoryTemplate.requestBody("direct:test", "akka"))
+
+ // close Spring application context (stops CamelService)
+ appctx.close
+
+**Java**
+
+.. code-block:: java
+
+ package sample;
+
+ import org.springframework.context.support.ClassPathXmlApplicationContext;
+ import akka.camel.CamelContextManager;
+
+ // load Spring application context
+ ClassPathXmlApplicationContext appctx = new ClassPathXmlApplicationContext("/context-standalone.xml");
+
+ // access 'externally' registered typed actors with typed-actor component
+ assert("hello akka" == CamelContextManager.getMandatoryTemplate().requestBody("direct:test", "akka"));
+
+ // close Spring application context (stops CamelService)
+ appctx.close();
+
+The application uses a Camel `producer template`_ to access the typed actor via
+the ``direct:test`` endpoint.
+
+.. _producer template: http://camel.apache.org/producertemplate.html
+
+
+Without Spring
+^^^^^^^^^^^^^^
+
+Usage of :ref:`spring-module` for adding typed actors to the Camel registry and
+starting a CamelService is optional. Setting up a Spring-less application for
+accessing typed actors is shown in the next example.
+
+**Scala**
+
+.. code-block:: scala
+
+ package sample
+
+ import org.apache.camel.impl.{DefaultCamelContext, SimpleRegistry}
+ import akka.actor.TypedActor
+ import akka.camel.CamelContextManager
+ import akka.camel.CamelServiceManager._
+
+ // register typed actor
+ val registry = new SimpleRegistry
+ registry.put("sample", TypedActor.newInstance(classOf[SampleTypedActor], classOf[SampleTypedActorImpl]))
+
+ // customize CamelContext
+ CamelContextManager.init(new DefaultCamelContext(registry))
+ CamelContextManager.mandatoryContext.addRoutes(new SampleRouteBuilder)
+
+ startCamelService
+
+ // access 'sample' typed actor via custom route
+ assert("hello akka" == CamelContextManager.mandatoryTemplate.requestBody("direct:test", "akka"))
+
+ stopCamelService
+
+**Java**
+
+.. code-block:: java
+
+ package sample;
+
+ // register typed actor
+ SimpleRegistry registry = new SimpleRegistry();
+ registry.put("sample", TypedActor.newInstance(SampleTypedActor.class, SampleTypedActorImpl.class));
+
+ // customize CamelContext
+ CamelContextManager.init(new DefaultCamelContext(registry));
+ CamelContextManager.getMandatoryContext().addRoutes(new SampleRouteBuilder());
+
+ startCamelService();
+
+ // access 'sample' typed actor via custom route
+ assert("hello akka" == CamelContextManager.getMandatoryTemplate().requestBody("direct:test", "akka"));
+
+ stopCamelService();
+
+Here, `SimpleRegistry`_, a java.util.Map based registry, is used to register
+typed actors. The CamelService is started and stopped programmatically.
+
+.. _SimpleRegistry: https://svn.apache.org/repos/asf/camel/trunk/camel-core/src/main/java/org/apache/camel/impl/SimpleRegistry.java
+
+
+.. _camel-intercepting-route-construction:
+
+Intercepting route construction
+-------------------------------
+
+The previous section, :ref:`camel-components`, explained how to setup a route to
+an (untyped) actor or typed actor manually. It was the application's
+responsibility to define the route and add it to the current CamelContext. This
+section explains a more conventient way to define custom routes: akka-camel is
+still setting up the routes to consumer actors (and adds these routes to the
+current CamelContext) but applications can define extensions to these routes.
+Extensions can be defined with Camel's `Java DSL`_ or `Scala DSL`_. For example,
+an extension could be a custom error handler that redelivers messages from an
+endpoint to an actor's bounded mailbox when the mailbox was full.
+
+.. _Java DSL: http://camel.apache.org/dsl.html
+.. _Scala DSL: http://camel.apache.org/scala-dsl.html
+
+The following examples demonstrate how to extend a route to a consumer actor for
+handling exceptions thrown by that actor. To simplify the example, we configure
+:ref:`camel-blocking-exchanges` which reports any exception, that is thrown by
+receive, directly back to the Camel route. One could also report exceptions
+asynchronously using a Failure reply (see also `this article`__) but we'll do it
+differently here.
+
+__ http://krasserm.blogspot.com/2011/02/akka-consumer-actors-new-features-and.html
+
+
+Actors (untyped)
+^^^^^^^^^^^^^^^^
+
+**Scala**
+
+.. code-block:: scala
+
+ import akka.actor.Actor
+ import akka.camel.Consumer
+
+ import org.apache.camel.builder.Builder
+ import org.apache.camel.model.RouteDefinition
+
+ class ErrorHandlingConsumer extends Actor with Consumer {
+ def endpointUri = "direct:error-handler-test"
+
+ // Needed to propagate exception back to caller
+ override def blocking = true
+
+ onRouteDefinition {rd: RouteDefinition =>
+ // Catch any exception and handle it by returning the exception message as response
+ rd.onException(classOf[Exception]).handled(true).transform(Builder.exceptionMessage).end
+ }
+
+ protected def receive = {
+ case msg: Message => throw new Exception("error: %s" format msg.body)
+ }
+ }
+
+**Java**
+
+.. code-block:: java
+
+ import akka.camel.UntypedConsumerActor;
+
+ import org.apache.camel.builder.Builder;
+ import org.apache.camel.model.ProcessorDefinition;
+ import org.apache.camel.model.RouteDefinition;
+
+ public class SampleErrorHandlingConsumer extends UntypedConsumerActor {
+
+ public String getEndpointUri() {
+ return "direct:error-handler-test";
+ }
+
+ // Needed to propagate exception back to caller
+ public boolean isBlocking() {
+ return true;
+ }
+
+ public void preStart() {
+ onRouteDefinition(new RouteDefinitionHandler() {
+ public ProcessorDefinition> onRouteDefinition(RouteDefinition rd) {
+ // Catch any exception and handle it by returning the exception message as response
+ return rd.onException(Exception.class).handled(true).transform(Builder.exceptionMessage()).end();
+ }
+ });
+ }
+
+ public void onReceive(Object message) throws Exception {
+ Message msg = (Message)message;
+ String body = msg.getBodyAs(String.class);
+ throw new Exception(String.format("error: %s", body));
+ }
+
+ }
+
+
+
+For (untyped) actors, consumer route extensions are defined by calling the
+onRouteDefinition method with a route definition handler. In Scala, this is a
+function of type ``RouteDefinition => ProcessorDefinition[_]``, in Java it is an
+instance of ``RouteDefinitionHandler`` which is defined as follows.
+
+.. code-block:: scala
+
+ package akka.camel
+
+ import org.apache.camel.model.RouteDefinition
+ import org.apache.camel.model.ProcessorDefinition
+
+ trait RouteDefinitionHandler {
+ def onRouteDefinition(rd: RouteDefinition): ProcessorDefinition[_]
+ }
+
+The akka-camel module creates a RouteDefinition instance by calling
+from(endpointUri) on a Camel RouteBuilder (where endpointUri is the endpoint URI
+of the consumer actor) and passes that instance as argument to the route
+definition handler \*). The route definition handler then extends the route and
+returns a ProcessorDefinition (in the above example, the ProcessorDefinition
+returned by the end method. See the `org.apache.camel.model`__ package for
+details). After executing the route definition handler, akka-camel finally calls
+a to(actor:uuid:actorUuid) on the returned ProcessorDefinition to complete the
+route to the comsumer actor (where actorUuid is the uuid of the consumer actor).
+
+\*) Before passing the RouteDefinition instance to the route definition handler,
+akka-camel may make some further modifications to it.
+
+__ https://svn.apache.org/repos/asf/camel/trunk/camel-core/src/main/java/org/apache/camel/model/
+
+
+Typed actors
+^^^^^^^^^^^^
+
+For typed consumer actors to define a route definition handler, they must
+provide a RouteDefinitionHandler implementation class with the @consume
+annotation. The implementation class must have a no-arg constructor. Here's an
+example (in Java).
+
+.. code-block:: java
+
+ import org.apache.camel.builder.Builder;
+ import org.apache.camel.model.ProcessorDefinition;
+ import org.apache.camel.model.RouteDefinition;
+
+ public class SampleRouteDefinitionHandler implements RouteDefinitionHandler {
+ public ProcessorDefinition> onRouteDefinition(RouteDefinition rd) {
+ return rd.onException(Exception.class).handled(true).transform(Builder.exceptionMessage()).end();
+ }
+ }
+
+It can be used as follows.
+
+**Scala**
+
+.. code-block:: scala
+
+ trait TestTypedConsumer {
+ @consume(value="direct:error-handler-test", routeDefinitionHandler=classOf[SampleRouteDefinitionHandler])
+ def foo(s: String): String
+ }
+
+ // implementation class omitted
+
+**Java**
+
+.. code-block:: java
+
+ public interface SampleErrorHandlingTypedConsumer {
+
+ @consume(value="direct:error-handler-test", routeDefinitionHandler=SampleRouteDefinitionHandler.class)
+ String foo(String s);
+
+ }
+
+ // implementation class omitted
+
+
+.. _camel-examples:
+
+Examples
+========
+
+For all features described so far, there's running sample code in
+`akka-sample-camel`_. The examples in `sample.camel.Boot`_ are started during
+Kernel startup because this class has been added to the boot :ref:`configuration`.
+
+.. _akka-sample-camel: http://github.com/jboner/akka/tree/master/akka-samples/akka-sample-camel/
+.. _sample.camel.Boot: http://github.com/jboner/akka/blob/master/akka-samples/akka-sample-camel/src/main/scala/sample/camel/Boot.scala
+
+.. code-block:: none
+
+ akka {
+ ...
+ boot = ["sample.camel.Boot", ...]
+ ...
+ }
+
+If you don't want to have these examples started during Kernel startup, delete
+it from the :ref:`configuration`. Other examples are standalone applications (i.e. classes with a
+main method) that can be started from `sbt`_.
+
+.. _sbt: http://code.google.com/p/simple-build-tool/
+
+.. code-block:: none
+
+ $ sbt
+ [info] Building project akka 2.0-SNAPSHOT against Scala 2.9.0
+ [info] using AkkaModulesParentProject with sbt 0.7.7 and Scala 2.7.7
+ > project akka-sample-camel
+ Set current project to akka-sample-camel 2.0-SNAPSHOT
+ > run
+ ...
+ Multiple main classes detected, select one to run:
+
+ [1] sample.camel.ClientApplication
+ [2] sample.camel.ServerApplication
+ [3] sample.camel.StandaloneSpringApplication
+ [4] sample.camel.StandaloneApplication
+ [5] sample.camel.StandaloneFileApplication
+ [6] sample.camel.StandaloneJmsApplication
+
+
+Some of the examples in `akka-sample-camel`_ are described in more detail in the
+following subsections.
+
+
+.. _camel-async-example:
+
+Asynchronous routing and transformation example
+-----------------------------------------------
+
+This example demonstrates how to implement consumer and producer actors that
+support :ref:`camel-asynchronous-routing` with their Camel endpoints. The sample
+application transforms the content of the Akka homepage, http://akka.io, by
+replacing every occurrence of *Akka* with *AKKA*. After starting
+the :ref:`microkernel`, direct the browser to http://localhost:8875 and the
+transformed Akka homepage should be displayed. Please note that this example
+will probably not work if you're behind an HTTP proxy.
+
+The following figure gives an overview how the example actors interact with
+external systems and with each other. A browser sends a GET request to
+http://localhost:8875 which is the published endpoint of the ``HttpConsumer``
+actor. The ``HttpConsumer`` actor forwards the requests to the ``HttpProducer``
+actor which retrieves the Akka homepage from http://akka.io. The retrieved HTML
+is then forwarded to the ``HttpTransformer`` actor which replaces all occurences
+of *Akka* with *AKKA*. The transformation result is sent back the HttpConsumer
+which finally returns it to the browser.
+
+.. image:: camel-async-interact.png
+
+Implementing the example actor classes and wiring them together is rather easy
+as shown in the following snippet (see also `sample.camel.Boot`_).
+
+.. code-block:: scala
+
+ import org.apache.camel.Exchange
+ import akka.actor.Actor._
+ import akka.actor.{Actor, ActorRef}
+ import akka.camel.{Producer, Message, Consumer}
+
+ class HttpConsumer(producer: ActorRef) extends Actor with Consumer {
+ def endpointUri = "jetty:http://0.0.0.0:8875/"
+
+ protected def receive = {
+ case msg => producer forward msg
+ }
+ }
+
+ class HttpProducer(transformer: ActorRef) extends Actor with Producer {
+ def endpointUri = "jetty://http://akka.io/?bridgeEndpoint=true"
+
+ override protected def receiveBeforeProduce = {
+ // only keep Exchange.HTTP_PATH message header (which needed by bridge endpoint)
+ case msg: Message => msg.setHeaders(msg.headers(Set(Exchange.HTTP_PATH)))
+ }
+
+ override protected def receiveAfterProduce = {
+ // do not reply but forward result to transformer
+ case msg => transformer forward msg
+ }
+ }
+
+ class HttpTransformer extends Actor {
+ protected def receive = {
+ case msg: Message => self.reply(msg.transformBody {body: String => body replaceAll ("Akka ", "AKKA ")})
+ case msg: Failure => self.reply(msg)
+ }
+ }
+
+ // Wire and start the example actors
+ val httpTransformer = actorOf(Props(new HttpTransformer))
+ val httpProducer = actorOf(Props(new HttpProducer(httpTransformer)))
+ val httpConsumer = actorOf(Props(new HttpConsumer(httpProducer)))
+
+The `jetty endpoints`_ of HttpConsumer and HttpProducer support asynchronous
+in-out message exchanges and do not allocate threads for the full duration of
+the exchange. This is achieved by using `Jetty continuations`_ on the
+consumer-side and by using `Jetty's asynchronous HTTP client`_ on the producer
+side. The following high-level sequence diagram illustrates that.
+
+.. _jetty endpoints: http://camel.apache.org/jetty.html
+.. _Jetty continuations: http://wiki.eclipse.org/Jetty/Feature/Continuations
+.. _Jetty's asynchronous HTTP client: http://wiki.eclipse.org/Jetty/Tutorial/HttpClient
+
+.. image:: camel-async-sequence.png
+
+
+Custom Camel route example
+--------------------------
+
+This section also demonstrates the combined usage of a ``Producer`` and a
+``Consumer`` actor as well as the inclusion of a custom Camel route. The
+following figure gives an overview.
+
+.. image:: camel-custom-route.png
+
+* A consumer actor receives a message from an HTTP client
+
+* It forwards the message to another actor that transforms the message (encloses
+ the original message into hyphens)
+
+* The transformer actor forwards the transformed message to a producer actor
+
+* The producer actor sends the message to a custom Camel route beginning at the
+ ``direct:welcome`` endpoint
+
+* A processor (transformer) in the custom Camel route prepends "Welcome" to the
+ original message and creates a result message
+
+* The producer actor sends the result back to the consumer actor which returns
+ it to the HTTP client
+
+
+The example is part of `sample.camel.Boot`_. The consumer, transformer and
+producer actor implementations are as follows.
+
+.. code-block:: scala
+
+ package sample.camel
+
+ import akka.actor.{Actor, ActorRef}
+ import akka.camel.{Message, Consumer}
+
+ class Consumer3(transformer: ActorRef) extends Actor with Consumer {
+ def endpointUri = "jetty:http://0.0.0.0:8877/camel/welcome"
+
+ def receive = {
+ // Forward a string representation of the message body to transformer
+ case msg: Message => transformer.forward(msg.setBodyAs[String])
+ }
+ }
+
+ class Transformer(producer: ActorRef) extends Actor {
+ protected def receive = {
+ // example: transform message body "foo" to "- foo -" and forward result to producer
+ case msg: Message => producer.forward(msg.transformBody((body: String) => "- %s -" format body))
+ }
+ }
+
+ class Producer1 extends Actor with Producer {
+ def endpointUri = "direct:welcome"
+ }
+
+The producer actor knows where to reply the message to because the consumer and
+transformer actors have forwarded the original sender reference as well. The
+application configuration and the route starting from direct:welcome are as
+follows.
+
+.. code-block:: scala
+
+ package sample.camel
+
+ import org.apache.camel.builder.RouteBuilder
+ import org.apache.camel.{Exchange, Processor}
+
+ import akka.actor.Actor._
+ import akka.camel.CamelContextManager
+
+ class Boot {
+ CamelContextManager.init()
+ CamelContextManager.mandatoryContext.addRoutes(new CustomRouteBuilder)
+
+ val producer = actorOf(Props[Producer1])
+ val mediator = actorOf(Props(new Transformer(producer)))
+ val consumer = actorOf(Props(new Consumer3(mediator)))
+ }
+
+ class CustomRouteBuilder extends RouteBuilder {
+ def configure {
+ from("direct:welcome").process(new Processor() {
+ def process(exchange: Exchange) {
+ // Create a 'welcome' message from the input message
+ exchange.getOut.setBody("Welcome %s" format exchange.getIn.getBody)
+ }
+ })
+ }
+ }
+
+To run the example, start the :ref:`microkernel` and POST a message to
+``http://localhost:8877/camel/welcome``.
+
+.. code-block:: none
+
+ curl -H "Content-Type: text/plain" -d "Anke" http://localhost:8877/camel/welcome
+
+The response should be:
+
+.. code-block:: none
+
+ Welcome - Anke -
+
+
+Publish-subcribe example
+------------------------
+
+JMS
+^^^
+
+This section demonstrates how akka-camel can be used to implement
+publish/subscribe for actors. The following figure sketches an example for
+JMS-based publish/subscribe.
+
+.. image:: camel-pubsub.png
+
+A consumer actor receives a message from an HTTP client. It sends the message to
+a JMS producer actor (publisher). The JMS producer actor publishes the message
+to a JMS topic. Two other actors that subscribed to that topic both receive the
+message. The actor classes used in this example are shown in the following
+snippet.
+
+.. code-block:: scala
+
+ package sample.camel
+
+ import akka.actor.{Actor, ActorRef}
+ import akka.camel.{Producer, Message, Consumer}
+
+ class Subscriber(name:String, uri: String) extends Actor with Consumer {
+ def endpointUri = uri
+
+ protected def receive = {
+ case msg: Message => println("%s received: %s" format (name, msg.body))
+ }
+ }
+
+ class Publisher(name: String, uri: String) extends Actor with Producer {
+ self.id = name
+
+ def endpointUri = uri
+
+ // one-way communication with JMS
+ override def oneway = true
+ }
+
+ class PublisherBridge(uri: String, publisher: ActorRef) extends Actor with Consumer {
+ def endpointUri = uri
+
+ protected def receive = {
+ case msg: Message => {
+ publisher ! msg.bodyAs[String]
+ self.reply("message published")
+ }
+ }
+ }
+
+Wiring these actors to implement the above example is as simple as
+
+.. code-block:: scala
+
+ package sample.camel
+
+ import org.apache.camel.impl.DefaultCamelContext
+ import org.apache.camel.spring.spi.ApplicationContextRegistry
+ import org.springframework.context.support.ClassPathXmlApplicationContext
+
+ import akka.actor.Actor._
+ import akka.camel.CamelContextManager
+
+ class Boot {
+ // Create CamelContext with Spring-based registry and custom route builder
+ val context = new ClassPathXmlApplicationContext("/context-jms.xml", getClass)
+ val registry = new ApplicationContextRegistry(context)
+ CamelContextManager.init(new DefaultCamelContext(registry))
+
+ // Setup publish/subscribe example
+ val jmsUri = "jms:topic:test"
+ val jmsSubscriber1 = actorOf(Props(new Subscriber("jms-subscriber-1", jmsUri)))
+ val jmsSubscriber2 = actorOf(Props(new Subscriber("jms-subscriber-2", jmsUri)))
+ val jmsPublisher = actorOf(Props(new Publisher("jms-publisher", jmsUri)))
+
+ val jmsPublisherBridge = actorOf(Props(new PublisherBridge("jetty:http://0.0.0.0:8877/camel/pub/jms", jmsPublisher)))
+ }
+
+To publish messages to subscribers one could of course also use the JMS API
+directly; there's no need to do that over a JMS producer actor as in this
+example. For the example to work, Camel's `jms`_ component needs to be
+configured with a JMS connection factory which is done in a Spring application
+context XML file (context-jms.xml).
+
+.. _jms: http://camel.apache.org/jms.html
+
+.. code-block:: xml
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+To run the example, start the :ref:`microkernel` and POST a
+message to ``http://localhost:8877/camel/pub/jms``.
+
+.. code-block:: none
+
+ curl -H "Content-Type: text/plain" -d "Happy hAkking" http://localhost:8877/camel/pub/jms
+
+The HTTP response body should be
+
+.. code-block:: none
+
+ message published
+
+On the console, where you started the Akka Kernel, you should see something like
+
+.. code-block:: none
+
+ ...
+ INF [20100622-11:49:57.688] camel: jms-subscriber-2 received: Happy hAkking
+ INF [20100622-11:49:57.688] camel: jms-subscriber-1 received: Happy hAkking
+
+
+Cometd
+^^^^^^
+
+Publish/subscribe with `CometD`_ is equally easy using `Camel's cometd
+component`_.
+
+.. _CometD: http://cometd.org/
+.. _Camel's cometd component: http://camel.apache.org/cometd.html
+
+.. image:: camel-pubsub2.png
+
+All actor classes from the JMS example can re-used, only the endpoint URIs need
+to be changed.
+
+.. code-block:: scala
+
+ package sample.camel
+
+ import org.apache.camel.impl.DefaultCamelContext
+ import org.apache.camel.spring.spi.ApplicationContextRegistry
+ import org.springframework.context.support.ClassPathXmlApplicationContext
+
+ import akka.actor.Actor._
+ import akka.camel.CamelContextManager
+
+ class Boot {
+ // ...
+
+ // Setup publish/subscribe example
+ val cometdUri = "cometd://localhost:8111/test/abc?resourceBase=target"
+ val cometdSubscriber = actorOf(Props(new Subscriber("cometd-subscriber", cometdUri)))
+ val cometdPublisher = actorOf(Props(new Publisher("cometd-publisher", cometdUri)))
+
+ val cometdPublisherBridge = actorOf(Props(new PublisherBridge("jetty:http://0.0.0.0:8877/camel/pub/cometd", cometdPublisher)))
+ }
+
+
+Quartz Scheduler Example
+------------------------
+
+Here is an example showing how simple is to implement a cron-style scheduler by
+using the Camel Quartz component in Akka.
+
+The following example creates a "timer" actor which fires a message every 2
+seconds:
+
+.. code-block:: scala
+
+ package com.dimingo.akka
+
+ import akka.actor.Actor
+ import akka.actor.Actor.actorOf
+
+ import akka.camel.{Consumer, Message}
+ import akka.camel.CamelServiceManager._
+
+ class MyQuartzActor extends Actor with Consumer {
+
+ def endpointUri = "quartz://example?cron=0/2+*+*+*+*+?"
+
+ def receive = {
+
+ case msg => println("==============> received %s " format msg)
+
+ } // end receive
+
+ } // end MyQuartzActor
+
+ object MyQuartzActor {
+
+ def main(str: Array[String]) {
+
+ // start the Camel service
+ startCamelService
+
+ // create and start a quartz actor
+ val myActor = actorOf(Props[MyQuartzActor])
+
+ } // end main
+
+ } // end MyQuartzActor
+
+The full working example is available for download here:
+http://www.dimingo.com/akka/examples/example-akka-quartz.tar.gz
+
+You can launch it using the maven command:
+
+.. code-block:: none
+
+ $ mvn scala:run -DmainClass=com.dimingo.akka.MyQuartzActor
+
+For more information about the Camel Quartz component, see here:
+http://camel.apache.org/quartz.html
diff --git a/akka-docs/disabled/clustering.rst b/akka-docs/disabled/clustering.rst
index f384a37ca0..017994cb8a 100644
--- a/akka-docs/disabled/clustering.rst
+++ b/akka-docs/disabled/clustering.rst
@@ -48,8 +48,8 @@ cluster node.
Cluster configuration
~~~~~~~~~~~~~~~~~~~~~
-Cluster is configured in the ``akka.cloud.cluster`` section in the ``akka.conf``
-configuration file. Here you specify the default addresses to the ZooKeeper
+Cluster is configured in the ``akka.cloud.cluster`` section in the :ref:`configuration`.
+Here you specify the default addresses to the ZooKeeper
servers, timeouts, if compression should be on or off, and so on.
.. code-block:: conf
@@ -328,7 +328,7 @@ created actor::
val clusterNode = Cluster.newNode(NodeAddress("test-cluster", "node1")).start
- val hello = actorOf[HelloActor].start.asInstanceOf[LocalActorRef]
+ val hello = actorOf(Props[HelloActor]).asInstanceOf[LocalActorRef]
val serializeMailbox = false
val nrOfInstances = 5
@@ -594,7 +594,7 @@ Consolidation and management of the Akka configuration file
Not implemented yet.
-The actor configuration file ``akka.conf`` will also be stored into the cluster
+The actor :ref:`configuration` file will also be stored into the cluster
and it will be possible to have one single configuration file, stored on the server, and pushed out to all
the nodes that joins the cluster. Each node only needs to be configured with the ZooKeeper
server address and the master configuration will only reside in one single place
diff --git a/akka-docs/disabled/examples/Pi.scala b/akka-docs/disabled/examples/Pi.scala
index 2b0fb45914..9fcddef1a0 100644
--- a/akka-docs/disabled/examples/Pi.scala
+++ b/akka-docs/disabled/examples/Pi.scala
@@ -62,7 +62,7 @@ object Pi extends App {
//#create-workers
// create the workers
- val workers = Vector.fill(nrOfWorkers)(actorOf[Worker])
+ val workers = Vector.fill(nrOfWorkers)(actorOf(Props[Worker])
// wrap them with a load-balancing router
val router = Routing.actorOf(RoutedProps().withRoundRobinRouter.withLocalConnections(workers), "pi")
@@ -117,8 +117,7 @@ object Pi extends App {
val latch = new CountDownLatch(1)
// create the master
- val master = actorOf(
- new Master(nrOfWorkers, nrOfMessages, nrOfElements, latch))
+ val master = actorOf(Props(new Master(nrOfWorkers, nrOfMessages, nrOfElements, latch)))
// start the calculation
master ! Calculate
diff --git a/akka-docs/disabled/http.rst b/akka-docs/disabled/http.rst
new file mode 100644
index 0000000000..0d9423a676
--- /dev/null
+++ b/akka-docs/disabled/http.rst
@@ -0,0 +1,105 @@
+.. _http-module:
+
+HTTP
+====
+
+.. sidebar:: Contents
+
+ .. contents:: :local:
+
+When deploying in a servlet container:
+--------------------------------------------
+
+If you deploy Akka in a JEE container, don't forget to create an Akka initialization and cleanup hook:
+
+.. code-block:: scala
+
+ package com.my //<--- your own package
+ import akka.util.AkkaLoader
+ import akka.cluster.BootableRemoteActorService
+ import akka.actor.BootableActorLoaderService
+ import javax.servlet.{ServletContextListener, ServletContextEvent}
+
+ /**
+ * This class can be added to web.xml mappings as a listener to start and postStop Akka.
+ *
+ * ...
+ *
+ * com.my.Initializer
+ *
+ * ...
+ *
+ */
+ class Initializer extends ServletContextListener {
+ lazy val loader = new AkkaLoader
+ def contextDestroyed(e: ServletContextEvent): Unit = loader.shutdown
+ def contextInitialized(e: ServletContextEvent): Unit =
+ loader.boot(true, new BootableActorLoaderService with BootableRemoteActorService) //<--- Important
+ // loader.boot(true, new BootableActorLoaderService {}) // If you don't need akka-remote
+ }
+
+For Java users, it's currently only possible to use BootableActorLoaderService, but you'll need to use: akka.actor.DefaultBootableActorLoaderService
+
+
+Then you just declare it in your web.xml:
+
+.. code-block:: xml
+
+
+ ...
+
+ your.package.Initializer
+
+ ...
+
+
+Adapting your own Akka Initializer for the Servlet Container
+------------------------------------------------------------
+
+If you want to use akka-camel or any other modules that have their own "Bootable"'s you'll need to write your own Initializer, which is _ultra_ simple, see below for an example on how to include Akka-camel.
+
+.. code-block:: scala
+
+ package com.my //<--- your own package
+ import akka.cluster.BootableRemoteActorService
+ import akka.actor.BootableActorLoaderService
+ import akka.camel.CamelService
+ import javax.servlet.{ServletContextListener, ServletContextEvent}
+
+ /**
+ * This class can be added to web.xml mappings as a listener to start and postStop Akka.
+ *
+ * ...
+ *
+ * com.my.Initializer
+ *
+ * ...
+ *
+ */
+ class Initializer extends ServletContextListener {
+ lazy val loader = new AkkaLoader
+ def contextDestroyed(e: ServletContextEvent): Unit = loader.shutdown
+ def contextInitialized(e: ServletContextEvent): Unit =
+ loader.boot(true, new BootableActorLoaderService with BootableRemoteActorService with CamelService) //<--- Important
+ }
+
+Using Akka with the Pinky REST/MVC framework
+--------------------------------------------
+
+Pinky has a slick Akka integration. Read more `here `_
+
+jetty-run in SBT
+----------------
+
+If you want to use jetty-run in SBT you need to exclude the version of Jetty that is bundled in akka-http:
+
+.. code-block:: scala
+
+ override def ivyXML =
+
+
+
+
+
+
+
diff --git a/akka-docs/java/stm.rst b/akka-docs/disabled/java-stm.rst
similarity index 96%
rename from akka-docs/java/stm.rst
rename to akka-docs/disabled/java-stm.rst
index 67917e7e77..01d35e7487 100644
--- a/akka-docs/java/stm.rst
+++ b/akka-docs/disabled/java-stm.rst
@@ -7,8 +7,6 @@ Software Transactional Memory (Java)
.. contents:: :local:
-Module stability: **SOLID**
-
Overview of STM
---------------
@@ -182,23 +180,7 @@ The following settings are possible on a TransactionFactory:
- propagation - For controlling how nested transactions behave.
- traceLevel - Transaction trace level.
-You can also specify the default values for some of these options in akka.conf. Here they are with their default values:
-
-::
-
- stm {
- fair = on # Should global transactions be fair or non-fair (non fair yield better performance)
- max-retries = 1000
- timeout = 5 # Default timeout for blocking transactions and transaction set (in unit defined by
- # the time-unit property)
- write-skew = true
- blocking-allowed = false
- interruptible = false
- speculative = true
- quick-release = true
- propagation = "requires"
- trace-level = "none"
- }
+You can also specify the default values for some of these options in :ref:`configuration`.
Transaction lifecycle listeners
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
diff --git a/akka-docs/java/transactors.rst b/akka-docs/disabled/java-transactors.rst
similarity index 99%
rename from akka-docs/java/transactors.rst
rename to akka-docs/disabled/java-transactors.rst
index cc069e3d9c..2e1c3dc769 100644
--- a/akka-docs/java/transactors.rst
+++ b/akka-docs/disabled/java-transactors.rst
@@ -7,8 +7,6 @@ Transactors (Java)
.. contents:: :local:
-Module stability: **SOLID**
-
Why Transactors?
----------------
diff --git a/akka-docs/disabled/microkernel.rst b/akka-docs/disabled/microkernel.rst
new file mode 100644
index 0000000000..cbf9ba96ba
--- /dev/null
+++ b/akka-docs/disabled/microkernel.rst
@@ -0,0 +1,40 @@
+
+.. _microkernel:
+
+#############
+ Microkernel
+#############
+
+
+Run the microkernel
+===================
+
+To start the kernel use the scripts in the ``bin`` directory.
+
+All services are configured in the :ref:`configuration` file in the ``config`` directory.
+Services you want to be started up automatically should be listed in the list of ``boot`` classes in
+the :ref:`configuration`.
+
+Put your application in the ``deploy`` directory.
+
+
+Akka Home
+---------
+
+Note that the microkernel needs to know where the Akka home is (the base
+directory of the microkernel). The above scripts do this for you. Otherwise, you
+can set Akka home by:
+
+* Specifying the ``AKKA_HOME`` environment variable
+
+* Specifying the ``-Dakka.home`` java option
+
+
+.. _hello-microkernel:
+
+Hello Microkernel
+=================
+
+There is a very simple Akka Mist sample project included in the microkernel
+``deploy`` directory. Start the microkernel with the start script and then go to
+http://localhost:9998 to say Hello to the microkernel.
diff --git a/akka-docs/scala/stm.rst b/akka-docs/disabled/scala-stm.rst
similarity index 96%
rename from akka-docs/scala/stm.rst
rename to akka-docs/disabled/scala-stm.rst
index a35fb94676..f21f988939 100644
--- a/akka-docs/scala/stm.rst
+++ b/akka-docs/disabled/scala-stm.rst
@@ -271,23 +271,7 @@ The following settings are possible on a TransactionFactory:
- ``propagation`` - For controlling how nested transactions behave.
- ``traceLevel`` - Transaction trace level.
-You can also specify the default values for some of these options in ``akka.conf``. Here they are with their default values:
-
-::
-
- stm {
- fair = on # Should global transactions be fair or non-fair (non fair yield better performance)
- max-retries = 1000
- timeout = 5 # Default timeout for blocking transactions and transaction set (in unit defined by
- # the time-unit property)
- write-skew = true
- blocking-allowed = false
- interruptible = false
- speculative = true
- quick-release = true
- propagation = "requires"
- trace-level = "none"
- }
+You can also specify the default values for some of these options in the :ref:`configuration`.
You can also determine at which level a transaction factory is shared or not shared, which affects the way in which the STM can optimise transactions.
diff --git a/akka-docs/scala/transactors.rst b/akka-docs/disabled/scala-transactors.rst
similarity index 99%
rename from akka-docs/scala/transactors.rst
rename to akka-docs/disabled/scala-transactors.rst
index 1c1154eb06..7d654d5f15 100644
--- a/akka-docs/scala/transactors.rst
+++ b/akka-docs/disabled/scala-transactors.rst
@@ -7,8 +7,6 @@ Transactors (Scala)
.. contents:: :local:
-Module stability: **SOLID**
-
Why Transactors?
----------------
diff --git a/akka-docs/disabled/spring.rst b/akka-docs/disabled/spring.rst
new file mode 100644
index 0000000000..091da8089c
--- /dev/null
+++ b/akka-docs/disabled/spring.rst
@@ -0,0 +1,333 @@
+
+.. _spring-module:
+
+####################
+ Spring Integration
+####################
+
+Akkas integration with the `Spring Framework `_ supplies the Spring way of using the Typed Actor Java API and for CamelService configuration for :ref:`camel-spring-applications`. It uses Spring's custom namespaces to create Typed Actors, supervisor hierarchies and a CamelService in a Spring environment.
+
+Contents:
+
+.. contents:: :local:
+
+To use the custom name space tags for Akka you have to add the XML schema definition to your spring configuration. It is available at `http://akka.io/akka-1.0.xsd `_. The namespace for Akka is:
+
+.. code-block:: xml
+
+ xmlns:akka="http://akka.io/schema/akka"
+
+Example header for Akka Spring configuration:
+
+.. code-block:: xml
+
+
+
+
+-
+
+Actors
+------
+
+Actors in Java are created by extending the 'UntypedActor' class and implementing the 'onReceive' method.
+
+Example how to create Actors with the Spring framework:
+
+.. code-block:: xml
+
+
+
+
+
+
+Supported scopes are singleton and prototype. Dependencies and properties are set with Springs ```` element.
+A dependency can be either a ```` or a regular ````.
+
+Get the Actor from the Spring context:
+
+.. code-block:: java
+
+ ApplicationContext context = new ClassPathXmlApplicationContext("akka-spring-config.xml");
+ ActorRef actorRef = (ActorRef) context.getBean("myActor");
+
+Typed Actors
+------------
+
+Here are some examples how to create Typed Actors with the Spring framework:
+
+Creating a Typed Actor:
+^^^^^^^^^^^^^^^^^^^^^^^
+
+.. code-block:: xml
+
+
+
+
+
+
+
+
+Supported scopes are singleton and prototype. Dependencies and properties are set with Springs ```` element.
+A dependency can be either a ```` or a regular ````.
+
+Get the Typed Actor from the Spring context:
+
+.. code-block:: java
+
+ ApplicationContext context = new ClassPathXmlApplicationContext("akka-spring-config.xml");
+ MyPojo myPojo = (MyPojo) context.getBean("myActor");
+
+Remote Actors
+-------------
+
+For details on server managed and client managed remote actors see Remote Actor documentation.
+
+Configuration for a client managed remote Actor
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+::
+
+
+
+
+
+The default for 'managed-by' is "client", so in the above example it could be left out.
+
+Configuration for a server managed remote Actor
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+Server side
+***********
+
+::
+
+
+
+
+
+
+
+
+
+
+If the server specified by 'host' and 'port' does not exist it will not be registered.
+
+Client side
+***********
+
+::
+
+
+
+
+
+Configuration for a client managed remote Typed Actor
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+.. code-block:: xml
+
+
+
+
+
+Configuration for a server managed remote Typed Actor
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+Sever side setup
+****************
+
+::
+
+
+
+
+
+Client side setup
+*****************
+
+::
+
+
+
+
+Dispatchers
+-----------
+
+Configuration for a Typed Actor or Untyped Actor with a custom dispatcher
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+If you don't want to use the default dispatcher you can define your own dispatcher in the spring configuration. For more information on dispatchers have a look at Dispatchers documentation.
+
+.. code-block:: xml
+
+
+
+
+
+
+
+
+
+
+
+If you want to or have to share the dispatcher between Actors you can define a dispatcher and reference it from the Typed Actor configuration:
+
+.. code-block:: xml
+
+
+
+
+
+
+
+
+
+The following dispatcher types are available in spring configuration:
+
+* executor-based-event-driven
+* executor-based-event-driven-work-stealing
+* thread-based
+
+The following queue types are configurable for dispatchers using thread pools:
+
+* bounded-linked-blocking-queue
+* unbounded-linked-blocking-queue
+* synchronous-queue
+* bounded-array-blocking-queue
+
+If you have set up your IDE to be XSD-aware you can easily write your configuration through auto-completion.
+
+Stopping Typed Actors and Untyped Actors
+----------------------------------------
+
+Actors with scope singleton are stopped when the application context is closed. Actors with scope prototype must be stopped by the application.
+
+Supervisor Hierarchies
+----------------------
+
+The supervisor configuration in Spring follows the declarative configuration for the Java API. Have a look at Akka's approach to fault tolerance.
+
+Example spring supervisor configuration
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+.. code-block:: xml
+
+
+
+
+
+
+ java.io.IOException
+
+
+
+
+
+
+
+
+
+
+
+
+
+ java.io.IOException
+ java.lang.NullPointerException
+
+
+
+
+
+
+
+
+
+
+Get the TypedActorConfigurator from the Spring context
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+.. code-block:: java
+
+ TypedActorConfigurator myConfigurator = (TypedActorConfigurator) context.getBean("my-supervisor");
+ MyPojo myPojo = (MyPOJO) myConfigurator.getInstance(MyPojo.class);
+
+Property Placeholders
+---------------------
+
+The Akka configuration can be made available as property placeholders by using a custom property placeholder configurer for Configgy:
+
+::
+
+
+
+
+
+
+
+Camel configuration
+-------------------
+
+For details refer to the :ref:`camel-module` documentation:
+
+* CamelService configuration for :ref:`camel-spring-applications`
+* Access to Typed Actors :ref:`camel-typed-actors-using-spring`
diff --git a/akka-docs/general/code/ConfigDocSpec.scala b/akka-docs/general/code/akka/docs/config/ConfigDocSpec.scala
similarity index 100%
rename from akka-docs/general/code/ConfigDocSpec.scala
rename to akka-docs/general/code/akka/docs/config/ConfigDocSpec.scala
diff --git a/akka-docs/general/configuration.rst b/akka-docs/general/configuration.rst
index 0e96f8165e..6f00cae81f 100644
--- a/akka-docs/general/configuration.rst
+++ b/akka-docs/general/configuration.rst
@@ -1,3 +1,5 @@
+.. _configuration:
+
Configuration
=============
@@ -26,10 +28,10 @@ configuration for each actor system, and grab the specific configuration when in
::
myapp1 {
- akka.logLevel = WARNING
+ akka.loglevel = WARNING
}
myapp2 {
- akka.logLevel = ERROR
+ akka.loglevel = ERROR
}
.. code-block:: scala
@@ -45,7 +47,7 @@ with ``/``. ``-Dconfig.resource=/dev.conf`` will load the ``dev.conf`` from the
You may also specify and parse the configuration programmatically in other ways when instantiating
the ``ActorSystem``.
-.. includecode:: code/ConfigDocSpec.scala
+.. includecode:: code/akka/docs/config/ConfigDocSpec.scala
:include: imports,custom-config
The ``ConfigFactory`` provides several methods to parse the configuration from various sources.
@@ -118,7 +120,7 @@ A custom ``application.conf`` might look like this::
actor {
default-dispatcher {
- throughput = 10 # Throughput for default Dispatcher, set to 1 for complete fairness
+ throughput = 10 # Throughput for default Dispatcher, set to 1 for as fair as possible
}
}
@@ -133,7 +135,7 @@ A custom ``application.conf`` might look like this::
Config file format
------------------
-The configuration file syntax is described in the `HOCON `_
+The configuration file syntax is described in the `HOCON `_
specification. Note that it supports three formats; conf, json, and properties.
@@ -155,7 +157,7 @@ dev.conf:
loglevel = "DEBUG"
}
-More advanced include and substitution mechanisms are explained in the `HOCON `_
+More advanced include and substitution mechanisms are explained in the `HOCON `_
specification.
diff --git a/akka-docs/general/event-handler.rst b/akka-docs/general/event-handler.rst
deleted file mode 100644
index c23911939e..0000000000
--- a/akka-docs/general/event-handler.rst
+++ /dev/null
@@ -1,100 +0,0 @@
-.. _event-handler:
-
-Event Handler
-=============
-
-There is an Event Handler which takes the place of a logging system in Akka:
-
-.. code-block:: scala
-
- akka.event.EventHandler
-
-You can configure which event handlers should be registered at boot time. That is done using the 'event-handlers' element in akka.conf. Here you can also define the log level.
-
-.. code-block:: ruby
-
- akka {
- # event handlers to register at boot time (EventHandler$DefaultListener logs to STDOUT)
- event-handlers = ["akka.event.EventHandler$DefaultListener"]
- loglevel = "DEBUG" # Options: ERROR, WARNING, INFO, DEBUG
- }
-
-The default one logs to STDOUT and is registered by default. It is not intended to be used for production. There is also an :ref:`slf4j` event handler available in the 'akka-slf4j' module.
-
-Example of creating a listener from Scala (from Java you just have to create an 'UntypedActor' and create a handler for these messages):
-
-.. code-block:: scala
-
- val errorHandlerEventListener = Actor.actorOf(new Actor {
- self.dispatcher = EventHandler.EventHandlerDispatcher
-
- def receive = {
- case EventHandler.Error(cause, instance, message) => ...
- case EventHandler.Warning(instance, message) => ...
- case EventHandler.Info(instance, message) => ...
- case EventHandler.Debug(instance, message) => ...
- case genericEvent => ...
- }
- })
-
-To add the listener:
-
-.. code-block:: scala
-
- EventHandler.addListener(errorHandlerEventListener)
-
-To remove the listener:
-
-.. code-block:: scala
-
- EventHandler.removeListener(errorHandlerEventListener)
-
-To log an event:
-
-.. code-block:: scala
-
- EventHandler.notify(EventHandler.Error(exception, this, message))
-
- EventHandler.notify(EventHandler.Warning(this, message))
-
- EventHandler.notify(EventHandler.Info(this, message))
-
- EventHandler.notify(EventHandler.Debug(this, message))
-
- EventHandler.notify(object)
-
-You can also use one of the direct methods (for a bit better performance):
-
-.. code-block:: scala
-
- EventHandler.error(exception, this, message)
-
- EventHandler.error(this, message)
-
- EventHandler.warning(this, message)
-
- EventHandler.info(this, message)
-
- EventHandler.debug(this, message)
-
-The event handler allows you to send an arbitrary object to the handler which you can handle in your event handler listener. The default listener prints it's toString String out to STDOUT.
-
-.. code-block:: scala
-
- EventHandler.notify(anyRef)
-
-The methods take a call-by-name parameter for the message to avoid object allocation and execution if level is disabled. The following formatting function will not be evaluated if level is INFO, WARNING, or ERROR.
-
-.. code-block:: scala
-
- EventHandler.debug(this, "Processing took %s ms".format(duration))
-
-From Java you need to nest the call in an if statement to achieve the same thing.
-
-.. code-block:: java
-
- if (EventHandler.isDebugEnabled()) {
- EventHandler.debug(this, String.format("Processing took %s ms", duration));
- }
-
-
diff --git a/akka-docs/general/index.rst b/akka-docs/general/index.rst
index 687892b177..945b52a278 100644
--- a/akka-docs/general/index.rst
+++ b/akka-docs/general/index.rst
@@ -5,9 +5,7 @@ General
:maxdepth: 2
jmm
+ message-send-semantics
configuration
- event-handler
- slf4j
addressing
supervision
- guaranteed-delivery
diff --git a/akka-docs/general/jmm.rst b/akka-docs/general/jmm.rst
index 74ef84e752..ecb6dad6f1 100644
--- a/akka-docs/general/jmm.rst
+++ b/akka-docs/general/jmm.rst
@@ -59,4 +59,45 @@ How these rules are realized in Akka is an implementation detail and can change
even depend on the used configuration. But they will build on the other JMM rules like the monitor lock rule or the
volatile variable rule. This means that you, the Akka user, do not need to worry about adding synchronization to provide
such a "happens before" relation, because it is the responsibility of Akka. So you have your hands free to deal with your
-business logic, and the Akka framework makes sure that those rules are guaranteed on your behalf.
\ No newline at end of file
+business logic, and the Akka framework makes sure that those rules are guaranteed on your behalf.
+
+.. _jmm-shared-state:
+
+Actors and shared mutable state
+-------------------------------
+
+Since Akka runs on the JVM there are still some rules to be followed.
+
+* Closing over internal Actor state and exposing it to other threads
+
+.. code-block:: scala
+
+ class MyActor extends Actor {
+ var state = ...
+ def receive = {
+ case _ =>
+ //Wrongs
+
+ // Very bad, shared mutable state,
+ // will break your application in weird ways
+ Future { state = NewState }
+ anotherActor ? message onResult { r => state = r }
+
+ // Very bad, "sender" changes for every message,
+ // shared mutable state bug
+ Future { expensiveCalculation(sender) }
+
+ //Rights
+
+ // Completely safe, "self" is OK to close over
+ // and it's an ActorRef, which is thread-safe
+ Future { expensiveCalculation() } onComplete { f => self ! f.value.get }
+
+ // Completely safe, we close over a fixed value
+ // and it's an ActorRef, which is thread-safe
+ val currentSender = sender
+ Future { expensiveCalculation(currentSender) }
+ }
+ }
+
+* Messages **should** be immutable, this is to avoid the shared mutable state trap.
\ No newline at end of file
diff --git a/akka-docs/general/guaranteed-delivery.rst b/akka-docs/general/message-send-semantics.rst
similarity index 51%
rename from akka-docs/general/guaranteed-delivery.rst
rename to akka-docs/general/message-send-semantics.rst
index 550d84376c..d9488d1f2b 100644
--- a/akka-docs/general/guaranteed-delivery.rst
+++ b/akka-docs/general/message-send-semantics.rst
@@ -1,13 +1,14 @@
-.. _guaranteed-delivery:
+.. _message-send-semantics:
-#####################
- Guaranteed Delivery
-#####################
+#######################
+ Message send semantics
+#######################
-Guaranteed Delivery
-===================
+
+Guaranteed Delivery?
+====================
Akka does *not* support guaranteed delivery.
@@ -34,9 +35,35 @@ in Erlang and requires the user to model his application around. You can
read more about this approach in the `Erlang documentation`_ (section
10.9 and 10.10), Akka follows it closely.
-Bottom line; you as a developer knows what guarantees you need in your
+Bottom line: you as a developer know what guarantees you need in your
application and can solve it fastest and most reliable by explicit ``ACK`` and
``RETRY`` (if you really need it, most often you don't). Using Akka's Durable
Mailboxes could help with this.
-.. _Erlang documentation: http://www.erlang.org/faq/academic.html
+Delivery semantics
+==================
+
+At-most-once
+------------
+
+Actual transports may provide stronger semantics,
+but at-most-once is the semantics you should expect.
+The alternatives would be once-and-only-once, which is extremely costly,
+or at-least-once which essentially requires idempotency of message processing,
+which is a user-level concern.
+
+Ordering is preserved on a per-sender basis
+-------------------------------------------
+
+Actor ``A1` sends messages ``M1``, ``M2``, ``M3`` to ``A2``
+Actor ``A3`` sends messages ``M4``, ``M5``, ``M6`` to ``A2``
+
+This means that:
+ 1) If ``M1`` is delivered it must be delivered before ``M2`` and ``M3``
+ 2) If ``M2`` is delivered it must be delivered before ``M3``
+ 3) If ``M4`` is delivered it must be delivered before ``M5`` and ``M6``
+ 4) If ``M5`` is delivered it must be delivered before ``M6``
+ 5) ``A2`` can see messages from ``A1`` interleaved with messages from ``A3``
+ 6) Since there is no guaranteed delivery, none, some or all of the messages may arrive to ``A2``
+
+.. _Erlang documentation: http://www.erlang.org/faq/academic.html
\ No newline at end of file
diff --git a/akka-docs/general/slf4j.rst b/akka-docs/general/slf4j.rst
deleted file mode 100644
index 876b139d65..0000000000
--- a/akka-docs/general/slf4j.rst
+++ /dev/null
@@ -1,41 +0,0 @@
-.. _slf4j:
-
-SLF4J
-=====
-
-This module is available in the 'akka-slf4j.jar'. It has one single dependency; the slf4j-api jar. In runtime you
-also need a SLF4J backend, we recommend `Logback `_:
-
- .. code-block:: scala
-
- lazy val logback = "ch.qos.logback" % "logback-classic" % "1.0.0" % "runtime"
-
-
-Event Handler
--------------
-
-This module includes a SLF4J Event Handler that works with Akka's standard Event Handler. You enabled it in the 'event-handlers' element in akka.conf. Here you can also define the log level.
-
-.. code-block:: ruby
-
- akka {
- event-handlers = ["akka.event.slf4j.Slf4jEventHandler"]
- loglevel = "DEBUG"
- }
-
-Read more about how to use the :ref:`event-handler`.
-
-Logging thread in MDC
----------------------
-
-Since the logging is done asynchronously the thread in which the logging was performed is captured in
-Mapped Diagnostic Context (MDC) with attribute name ``sourceThread``.
-With Logback the thread name is available with ``%X{sourceThread}`` specifier within the pattern layout configuration::
-
-
-
- %date{ISO8601} %-5level %logger{36} %X{sourceThread} - %msg%n
-
-
-
-
\ No newline at end of file
diff --git a/akka-docs/intro/code/tutorials/first/Pi.scala b/akka-docs/intro/code/tutorials/first/Pi.scala
index 6be88d0f32..4c82dfaa93 100644
--- a/akka-docs/intro/code/tutorials/first/Pi.scala
+++ b/akka-docs/intro/code/tutorials/first/Pi.scala
@@ -66,7 +66,7 @@
// //#create-workers
// // create the workers
-// val workers = Vector.fill(nrOfWorkers)(system.actorOf[Worker])
+// val workers = Vector.fill(nrOfWorkers)(system.actorOf(Props[Worker])
// // wrap them with a load-balancing router
// val router = system.actorOf(RoutedProps().withRoundRobinRouter.withLocalConnections(workers), "pi")
@@ -119,7 +119,7 @@
// val latch = new CountDownLatch(1)
// // create the master
-// val master = system.actorOf(new Master(nrOfWorkers, nrOfMessages, nrOfElements, latch))
+// val master = system.actorOf(Props(new Master(nrOfWorkers, nrOfMessages, nrOfElements, latch))
// // start the calculation
// master ! Calculate
diff --git a/akka-docs/intro/deployment-scenarios.rst b/akka-docs/intro/deployment-scenarios.rst
index 829d93829e..a5da196d24 100644
--- a/akka-docs/intro/deployment-scenarios.rst
+++ b/akka-docs/intro/deployment-scenarios.rst
@@ -29,12 +29,12 @@ Actors as services
The simplest way you can use Akka is to use the actors as services in your Web
application. All that’s needed to do that is to put the Akka charts as well as
-its dependency jars into ``WEB-INF/lib``. You also need to put the ``akka.conf``
-config file in the ``$AKKA_HOME/config`` directory. Now you can create your
+its dependency jars into ``WEB-INF/lib``. You also need to put the :ref:`configuration`
+file in the ``$AKKA_HOME/config`` directory. Now you can create your
Actors as regular services referenced from your Web application. You should also
be able to use the Remoting service, e.g. be able to make certain Actors remote
on other hosts. Please note that remoting service does not speak HTTP over port
-80, but a custom protocol over the port is specified in ``akka.conf``.
+80, but a custom protocol over the port is specified in :ref:`configuration`.
Using Akka as a stand alone microkernel
diff --git a/akka-docs/intro/getting-started-first-java.rst b/akka-docs/intro/getting-started-first-java.rst
index ee890d723d..6d429b160d 100644
--- a/akka-docs/intro/getting-started-first-java.rst
+++ b/akka-docs/intro/getting-started-first-java.rst
@@ -108,7 +108,6 @@ Akka is very modular and has many JARs for containing different features. The co
- ``akka-typed-actor-2.0-SNAPSHOT.jar`` -- Typed Actors
- ``akka-remote-2.0-SNAPSHOT.jar`` -- Remote Actors
- ``akka-stm-2.0-SNAPSHOT.jar`` -- STM (Software Transactional Memory), transactors and transactional datastructures
-- ``akka-http-2.0-SNAPSHOT.jar`` -- Akka Mist for continuation-based asynchronous HTTP and also Jersey integration
- ``akka-slf4j-2.0-SNAPSHOT.jar`` -- SLF4J Event Handler Listener for logging with SLF4J
- ``akka-testkit-2.0-SNAPSHOT.jar`` -- Toolkit for testing Actors
@@ -159,8 +158,8 @@ Here is the layout that Maven created::
As you can see we already have a Java source file called ``App.java``, let's now rename it to ``Pi.java``.
-We also need to edit the ``pom.xml`` build file. Let's add the dependency we need as well as the Maven repository it should download it from. The Akka Maven repository can be found at ``_
-and Typesafe provides ``_ that proxies several other repositories, including akka.io.
+We also need to edit the ``pom.xml`` build file. Let's add the dependency we need as well as the Maven repository it should download it from. The Akka Maven repository can be found at ``_
+and Typesafe provides ``_ that proxies several other repositories, including akka.io.
It should now look something like this:
.. code-block:: xml
@@ -222,6 +221,7 @@ We start by creating a ``Pi.java`` file and adding these import statements at th
import static akka.actor.Actors.poisonPill;
import static java.util.Arrays.asList;
+ import akka.actor.Props;
import akka.actor.ActorRef;
import akka.actor.UntypedActor;
import akka.actor.UntypedActorFactory;
@@ -338,15 +338,15 @@ The master actor is a little bit more involved. In its constructor we need to cr
// create the workers
final ActorRef[] workers = new ActorRef[nrOfWorkers];
for (int i = 0; i < nrOfWorkers; i++) {
- workers[i] = actorOf(Worker.class);
+ workers[i] = actorOf(new Props(Worker.class));
}
// wrap them with a load-balancing router
- ActorRef router = actorOf(new UntypedActorFactory() {
+ ActorRef router = actorOf(new Props(new UntypedActorFactory() {
public UntypedActor create() {
return new PiRouter(workers);
}
- });
+ }));
}
}
@@ -360,7 +360,7 @@ One thing to note is that we used two different versions of the ``actorOf`` meth
The actor's life-cycle is:
-- Created & Started -- ``Actor.actorOf[MyActor]`` -- can receive messages
+- Created & Started -- ``Actor.actorOf(Props[MyActor]`` -- can receive messages
- Stopped -- ``actorRef.stop()`` -- can **not** receive messages
Once the actor has been stopped it is dead and can not be started again.
@@ -405,15 +405,15 @@ Here is the master actor::
// create the workers
final ActorRef[] workers = new ActorRef[nrOfWorkers];
for (int i = 0; i < nrOfWorkers; i++) {
- workers[i] = actorOf(Worker.class);
+ workers[i] = actorOf(new Props(Worker.class));
}
// wrap them with a load-balancing router
- router = actorOf(new UntypedActorFactory() {
+ router = actorOf(new Props(new UntypedActorFactory() {
public UntypedActor create() {
return new PiRouter(workers);
}
- });
+ }));
}
// message handler
@@ -496,11 +496,11 @@ Now the only thing that is left to implement is the runner that should bootstrap
final CountDownLatch latch = new CountDownLatch(1);
// create the master
- ActorRef master = actorOf(new UntypedActorFactory() {
+ ActorRef master = actorOf(new Props(new UntypedActorFactory() {
public UntypedActor create() {
return new Master(nrOfWorkers, nrOfMessages, nrOfElements, latch);
}
- });
+ }));
// start the calculation
master.tell(new Calculate());
@@ -520,6 +520,7 @@ Before we package it up and run it, let's take a look at the full code now, with
import static akka.actor.Actors.poisonPill;
import static java.util.Arrays.asList;
+ import akka.actor.Props;
import akka.actor.ActorRef;
import akka.actor.UntypedActor;
import akka.actor.UntypedActorFactory;
@@ -630,15 +631,15 @@ Before we package it up and run it, let's take a look at the full code now, with
// create the workers
final ActorRef[] workers = new ActorRef[nrOfWorkers];
for (int i = 0; i < nrOfWorkers; i++) {
- workers[i] = actorOf(Worker.class);
+ workers[i] = actorOf(new Props(Worker.class));
}
// wrap them with a load-balancing router
- router = actorOf(new UntypedActorFactory() {
+ router = actorOf(new Props(new UntypedActorFactory() {
public UntypedActor create() {
return new PiRouter(workers);
}
- });
+ }));
}
// message handler
@@ -692,11 +693,11 @@ Before we package it up and run it, let's take a look at the full code now, with
final CountDownLatch latch = new CountDownLatch(1);
// create the master
- ActorRef master = actorOf(new UntypedActorFactory() {
+ ActorRef master = actorOf(new Props(new UntypedActorFactory() {
public UntypedActor create() {
return new Master(nrOfWorkers, nrOfMessages, nrOfElements, latch);
}
- });
+ }));
// start the calculation
master.tell(new Calculate());
@@ -730,18 +731,12 @@ we compiled ourselves::
$ java \
-cp lib/scala-library.jar:lib/akka/akka-actor-2.0-SNAPSHOT.jar:tutorial \
akka.tutorial.java.first.Pi
- AKKA_HOME is defined as [/Users/jboner/tools/akka-actors-2.0-SNAPSHOT]
- loading config from [/Users/jboner/tools/akka-actors-2.0-SNAPSHOT/config/akka.conf].
Pi estimate: 3.1435501812459323
Calculation time: 822 millis
Yippee! It is working.
-If you have not defined the ``AKKA_HOME`` environment variable then Akka can't
-find the ``akka.conf`` configuration file and will print out a ``Can’t load
-akka.conf`` warning. This is ok since it will then just use the defaults.
-
Run it inside Maven
-------------------
@@ -759,8 +754,6 @@ When this in done we can run our application directly inside Maven::
Yippee! It is working.
-If you have not defined an the ``AKKA_HOME`` environment variable then Akka can't find the ``akka.conf`` configuration file and will print out a ``Can’t load akka.conf`` warning. This is ok since it will then just use the defaults.
-
Conclusion
----------
diff --git a/akka-docs/intro/getting-started-first-scala-eclipse.rst b/akka-docs/intro/getting-started-first-scala-eclipse.rst
index d764e0cdae..da473990d1 100644
--- a/akka-docs/intro/getting-started-first-scala-eclipse.rst
+++ b/akka-docs/intro/getting-started-first-scala-eclipse.rst
@@ -93,7 +93,6 @@ Akka is very modular and has many JARs for containing different features. The co
- ``akka-typed-actor-2.0-SNAPSHOT.jar`` -- Typed Actors
- ``akka-remote-2.0-SNAPSHOT.jar`` -- Remote Actors
- ``akka-stm-2.0-SNAPSHOT.jar`` -- STM (Software Transactional Memory), transactors and transactional datastructures
-- ``akka-http-2.0-SNAPSHOT.jar`` -- Akka Mist for continuation-based asynchronous HTTP and also Jersey integration
- ``akka-slf4j-2.0-SNAPSHOT.jar`` -- SLF4J Event Handler Listener for logging with SLF4J
- ``akka-testkit-2.0-SNAPSHOT.jar`` -- Toolkit for testing Actors
@@ -158,7 +157,7 @@ If you have not already done so, now is the time to create an Eclipse project fo
Using SBT in Eclipse
^^^^^^^^^^^^^^^^^^^^
-If you are an `SBT `_ user, you can follow the :ref:`getting-started-first-scala-download-sbt` instruction and additionally install the ``sbteclipse`` plugin. This adds support for generating Eclipse project files from your SBT project.
+If you are an `SBT `_ user, you can follow the :ref:`getting-started-first-scala-download-sbt` instruction and additionally install the ``sbteclipse`` plugin. This adds support for generating Eclipse project files from your SBT project.
You need to install the plugin as described in the `README of sbteclipse `_
Then run the ``eclipse`` target to generate the Eclipse project::
@@ -254,7 +253,7 @@ Now create a new class for the master actor. The master actor is a little bit mo
and then we can create the workers::
// create the workers
- val workers = Vector.fill(nrOfWorkers)(actorOf[Worker])
+ val workers = Vector.fill(nrOfWorkers)(actorOf(Props[Worker])
// wrap them with a load-balancing router
val router = Routing.loadBalancerActor(CyclicIterator(workers))
@@ -263,11 +262,11 @@ As you can see we are using the ``actorOf`` factory method to create actors, thi
import akka.actor.Actor.actorOf
-There are two versions of ``actorOf``; one of them taking a actor type and the other one an instance of an actor. The former one (``actorOf[MyActor]``) is used when the actor class has a no-argument constructor while the second one (``actorOf(new MyActor(..))``) is used when the actor class has a constructor that takes arguments. This is the only way to create an instance of an Actor and the ``actorOf`` method ensures this. The latter version is using call-by-name and lazily creates the actor within the scope of the ``actorOf`` method. The ``actorOf`` method instantiates the actor and returns, not an instance to the actor, but an instance to an ``ActorRef``. This reference is the handle through which you communicate with the actor. It is immutable, serializable and location-aware meaning that it "remembers" its original actor even if it is sent to other nodes across the network and can be seen as the equivalent to the Erlang actor's PID.
+There are two versions of ``actorOf``; one of them taking a actor type and the other one an instance of an actor. The former one (``actorOf(Props[MyActor]``) is used when the actor class has a no-argument constructor while the second one (``actorOf(Props(new MyActor(..))``) is used when the actor class has a constructor that takes arguments. This is the only way to create an instance of an Actor and the ``actorOf`` method ensures this. The latter version is using call-by-name and lazily creates the actor within the scope of the ``actorOf`` method. The ``actorOf`` method instantiates the actor and returns, not an instance to the actor, but an instance to an ``ActorRef``. This reference is the handle through which you communicate with the actor. It is immutable, serializable and location-aware meaning that it "remembers" its original actor even if it is sent to other nodes across the network and can be seen as the equivalent to the Erlang actor's PID.
The actor's life-cycle is:
-- Created -- ``Actor.actorOf[MyActor]`` -- can **not** receive messages
+- Created -- ``Actor.actorOf(Props[MyActor]`` -- can **not** receive messages
- Started -- ``actorRef`` -- can receive messages
- Stopped -- ``actorRef.stop()`` -- can **not** receive messages
@@ -290,7 +289,7 @@ Here is the master actor::
var start: Long = _
// create the workers
- val workers = Vector.fill(nrOfWorkers)(actorOf[Worker])
+ val workers = Vector.fill(nrOfWorkers)(actorOf(Props[Worker])
// wrap them with a load-balancing router
val router = Routing.loadBalancerActor(CyclicIterator(workers))
@@ -365,8 +364,7 @@ The ``Pi`` object is a perfect container module for our actors and messages, so
val latch = new CountDownLatch(1)
// create the master
- val master = actorOf(
- new Master(nrOfWorkers, nrOfMessages, nrOfElements, latch))
+ val master = actorOf(Props(new Master(nrOfWorkers, nrOfMessages, nrOfElements, latch)))
// start the calculation
master ! Calculate
@@ -383,15 +381,10 @@ Run it from Eclipse
Eclipse builds your project on every save when ``Project/Build Automatically`` is set. If not, bring you project up to date by clicking ``Project/Build Project``. If there are no compilation errors, you can right-click in the editor where ``Pi`` is defined, and choose ``Run as.. /Scala application``. If everything works fine, you should see::
- AKKA_HOME is defined as [/Users/jboner/tools/akka-actors-2.0-SNAPSHOT]
- loading config from [/Users/jboner/tools/akka-actors-2.0-SNAPSHOT/config/akka.conf].
-
Pi estimate: 3.1435501812459323
Calculation time: 858 millis
-If you have not defined an the ``AKKA_HOME`` environment variable then Akka can't find the ``akka.conf`` configuration file and will print out a ``Can’t load akka.conf`` warning. This is ok since it will then just use the defaults.
-
-You can also define a new Run configuration, by going to ``Run/Run Configurations``. Create a new ``Scala application`` and choose the tutorial project and the main class to be ``akkatutorial.Pi``. You can pass additional command line arguments to the JVM on the ``Arguments`` page, for instance to define where ``akka.conf`` is:
+You can also define a new Run configuration, by going to ``Run/Run Configurations``. Create a new ``Scala application`` and choose the tutorial project and the main class to be ``akkatutorial.Pi``. You can pass additional command line arguments to the JVM on the ``Arguments`` page, for instance to define where :ref:`configuration` is:
.. image:: ../images/run-config.png
diff --git a/akka-docs/intro/getting-started-first-scala.rst b/akka-docs/intro/getting-started-first-scala.rst
index 91a730819f..35ac80bcd5 100644
--- a/akka-docs/intro/getting-started-first-scala.rst
+++ b/akka-docs/intro/getting-started-first-scala.rst
@@ -114,7 +114,6 @@ core distribution has seven modules:
- ``akka-typed-actor-2.0-SNAPSHOT.jar`` -- Typed Actors
- ``akka-remote-2.0-SNAPSHOT.jar`` -- Remote Actors
- ``akka-stm-2.0-SNAPSHOT.jar`` -- STM (Software Transactional Memory), transactors and transactional datastructures
-- ``akka-http-2.0-SNAPSHOT.jar`` -- Akka Mist for continuation-based asynchronous HTTP and also Jersey integration
- ``akka-slf4j-2.0-SNAPSHOT.jar`` -- SLF4J Event Handler Listener for logging with SLF4J
- ``akka-testkit-2.0-SNAPSHOT.jar`` -- Toolkit for testing Actors
@@ -304,9 +303,9 @@ imported::
import akka.actor.Actor.actorOf
There are two versions of ``actorOf``; one of them taking a actor type and the
-other one an instance of an actor. The former one (``actorOf[MyActor]``) is used
+other one an instance of an actor. The former one (``actorOf(Props[MyActor]``) is used
when the actor class has a no-argument constructor while the second one
-(``actorOf(new MyActor(..))``) is used when the actor class has a constructor
+(``actorOf(Props(new MyActor(..))``) is used when the actor class has a constructor
that takes arguments. This is the only way to create an instance of an Actor and
the ``actorOf`` method ensures this. The latter version is using call-by-name
and lazily creates the actor within the scope of the ``actorOf`` method. The
@@ -319,7 +318,7 @@ Erlang actor's PID.
The actor's life-cycle is:
-- Created & Started -- ``Actor.actorOf[MyActor]`` -- can receive messages
+- Created & Started -- ``Actor.actorOf(Props[MyActor])`` -- can receive messages
- Stopped -- ``actorRef.stop()`` -- can **not** receive messages
Once the actor has been stopped it is dead and can not be started again.
@@ -425,19 +424,12 @@ compiled ourselves::
$ java \
-cp lib/scala-library.jar:lib/akka/akka-actor-2.0-SNAPSHOT.jar:. \
akka.tutorial.first.scala.Pi
- AKKA_HOME is defined as [/Users/jboner/tools/akka-actors-2.0-SNAPSHOT]
- loading config from [/Users/jboner/tools/akka-actors-2.0-SNAPSHOT/config/akka.conf].
Pi estimate: 3.1435501812459323
Calculation time: 858 millis
Yippee! It is working.
-If you have not defined the ``AKKA_HOME`` environment variable then Akka can't
-find the ``akka.conf`` configuration file and will print out a ``Can’t load
-akka.conf`` warning. This is ok since it will then just use the defaults.
-
-
Run it inside SBT
=================
@@ -457,11 +449,6 @@ When this in done we can run our application directly inside SBT::
Yippee! It is working.
-If you have not defined an the ``AKKA_HOME`` environment variable then Akka
-can't find the ``akka.conf`` configuration file and will print out a ``Can’t
-load akka.conf`` warning. This is ok since it will then just use the defaults.
-
-
Conclusion
==========
diff --git a/akka-docs/intro/getting-started.rst b/akka-docs/intro/getting-started.rst
index 31d579f3ed..5eeec4a12a 100644
--- a/akka-docs/intro/getting-started.rst
+++ b/akka-docs/intro/getting-started.rst
@@ -49,7 +49,6 @@ Akka is very modular and has many JARs for containing different features.
- ``akka-typed-actor-2.0-SNAPSHOT.jar`` -- Typed Actors
- ``akka-remote-2.0-SNAPSHOT.jar`` -- Remote Actors
- ``akka-stm-2.0-SNAPSHOT.jar`` -- STM (Software Transactional Memory), transactors and transactional datastructures
-- ``akka-http-2.0-SNAPSHOT.jar`` -- Akka Mist for continuation-based asynchronous HTTP and also Jersey integration
- ``akka-slf4j-2.0-SNAPSHOT.jar`` -- SLF4J Event Handler Listener
- ``akka-testkit-2.0-SNAPSHOT.jar`` -- Toolkit for testing Actors
- ``akka-camel-2.0-SNAPSHOT.jar`` -- Apache Camel Actors integration (it's the best way to have your Akka application communicate with the rest of the world)
diff --git a/akka-docs/java/actor-registry.rst b/akka-docs/java/actor-registry.rst
deleted file mode 100644
index 32a5af42c6..0000000000
--- a/akka-docs/java/actor-registry.rst
+++ /dev/null
@@ -1,98 +0,0 @@
-ActorRegistry (Java)
-====================
-
-Module stability: **SOLID**
-
-ActorRegistry: Finding Actors
------------------------------
-
-Actors can be looked up using the 'akka.actor.Actors.registry()' object. Through this registry you can look up actors by:
-
-* uuid com.eaio.uuid.UUID – this uses the ``uuid`` field in the Actor class, returns the actor reference for the actor with specified uuid, if one exists, otherwise None
-* id string – this uses the ``id`` field in the Actor class, which can be set by the user (default is the class name), returns all actor references to actors with specified id
-* parameterized type - returns a ``ActorRef[]`` with all actors that are a subtype of this specific type
-* specific actor class - returns a ``ActorRef[]`` with all actors of this exact class
-
-Actors are automatically registered in the ActorRegistry when they are started and removed when they are stopped. But you can explicitly register and unregister ActorRef's if you need to using the ``register`` and ``unregister`` methods.
-
-Here is a summary of the API for finding actors:
-
-.. code-block:: java
-
- import static akka.actor.Actors.*;
- Option actor = registry().actorFor(uuid);
- ActorRef[] actors = registry().actors();
- ActorRef[] otherActors = registry().actorsFor(id);
- ActorRef[] moreActors = registry().actorsFor(clazz);
-
-You can shut down all Actors in the system by invoking:
-
-.. code-block:: java
-
- registry().shutdownAll();
-
-If you want to know when a new Actor is added to or removed from the registry, you can use the subscription API on the registry. You can register an Actor that should be notified when an event happens in the ActorRegistry:
-
-.. code-block:: java
-
- void addListener(ActorRef listener);
- void removeListener(ActorRef listener);
-
-The messages sent to this Actor are:
-
-.. code-block:: java
-
- public class ActorRegistered {
- ActorRef getActor();
- String getAddress();
- }
-
- public class ActorUnregistered {
- ActorRef actor();
- String getAddress();
- }
-
- public class TypedActorRegistered {
- ActorRef getActor();
- String getAddress();
- Object getProxy();
- }
-
- public class TypedActorUnregistered {
- ActorRef actor();
- String getAddress();
- Object getProxy();
- }
-
-So your listener Actor needs to be able to handle these two messages. Example:
-
-.. code-block:: java
-
- import akka.actor.ActorRegistered;
- import akka.actor.ActorUnregistered;
- import akka.actor.TypedActorRegistered;
- import akka.actor.TypedActorUnregistered;
- import akka.actor.UntypedActor;
- import akka.event.EventHandler;
-
- public class RegistryListener extends UntypedActor {
- public void onReceive(Object message) throws Exception {
- if (message instanceof ActorRegistered) {
- ActorRegistered event = (ActorRegistered) message;
- EventHandler.info(this, String.format("Actor registered: %s - %s",
- event.actor().actorClassName(), event.actor().getUuid()));
- event.actor().actorClassName(), event.actor().getUuid()));
- } else if (message instanceof ActorUnregistered) {
- // ...
- }
- }
- }
-
-The above actor can be added as listener of registry events:
-
-.. code-block:: java
-
- import static akka.actor.Actors.*;
-
- ActorRef listener = actorOf(RegistryListener.class);
- registry().addListener(listener);
diff --git a/akka-docs/java/code/akka/docs/actor/FirstUntypedActor.java b/akka-docs/java/code/akka/docs/actor/FirstUntypedActor.java
new file mode 100644
index 0000000000..d09452f7db
--- /dev/null
+++ b/akka-docs/java/code/akka/docs/actor/FirstUntypedActor.java
@@ -0,0 +1,18 @@
+package akka.docs.actor;
+
+import akka.actor.ActorRef;
+import akka.actor.Props;
+import static akka.actor.Actors.*;
+import akka.actor.UntypedActor;
+
+//#context-actorOf
+public class FirstUntypedActor extends UntypedActor {
+ ActorRef myActor = getContext().actorOf(new Props(MyActor.class));
+
+ //#context-actorOf
+
+ public void onReceive(Object message) {
+ myActor.forward(message, getContext());
+ myActor.tell(poisonPill());
+ }
+}
diff --git a/akka-docs/java/code/akka/docs/actor/ImmutableMessage.java b/akka-docs/java/code/akka/docs/actor/ImmutableMessage.java
new file mode 100644
index 0000000000..20af6de1fb
--- /dev/null
+++ b/akka-docs/java/code/akka/docs/actor/ImmutableMessage.java
@@ -0,0 +1,25 @@
+package akka.docs.actor;
+
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+
+//#immutable-message
+public class ImmutableMessage {
+ private final int sequenceNumber;
+ private final List values;
+
+ public ImmutableMessage(int sequenceNumber, List values) {
+ this.sequenceNumber = sequenceNumber;
+ this.values = Collections.unmodifiableList(new ArrayList(values));
+ }
+
+ public int getSequenceNumber() {
+ return sequenceNumber;
+ }
+
+ public List getValues() {
+ return values;
+ }
+}
+//#immutable-message
diff --git a/akka-docs/java/code/akka/docs/actor/MyReceivedTimeoutUntypedActor.java b/akka-docs/java/code/akka/docs/actor/MyReceivedTimeoutUntypedActor.java
new file mode 100644
index 0000000000..4d88ab36ca
--- /dev/null
+++ b/akka-docs/java/code/akka/docs/actor/MyReceivedTimeoutUntypedActor.java
@@ -0,0 +1,26 @@
+package akka.docs.actor;
+
+//#receive-timeout
+import akka.actor.Actors;
+import akka.actor.ReceiveTimeout;
+import akka.actor.UnhandledMessageException;
+import akka.actor.UntypedActor;
+import akka.util.Duration;
+
+public class MyReceivedTimeoutUntypedActor extends UntypedActor {
+
+ public MyReceivedTimeoutUntypedActor() {
+ getContext().setReceiveTimeout(Duration.parse("30 seconds"));
+ }
+
+ public void onReceive(Object message) {
+ if (message.equals("Hello")) {
+ getSender().tell("Hello world");
+ } else if (message == Actors.receiveTimeout()) {
+ throw new RuntimeException("received timeout");
+ } else {
+ throw new UnhandledMessageException(message, getSelf());
+ }
+ }
+}
+//#receive-timeout
\ No newline at end of file
diff --git a/akka-docs/java/code/akka/docs/actor/MyUntypedActor.java b/akka-docs/java/code/akka/docs/actor/MyUntypedActor.java
new file mode 100644
index 0000000000..203ad5e596
--- /dev/null
+++ b/akka-docs/java/code/akka/docs/actor/MyUntypedActor.java
@@ -0,0 +1,20 @@
+package akka.docs.actor;
+
+//#my-untyped-actor
+import akka.actor.UntypedActor;
+import akka.actor.UnhandledMessageException;
+import akka.event.Logging;
+import akka.event.LoggingAdapter;
+
+public class MyUntypedActor extends UntypedActor {
+ LoggingAdapter log = Logging.getLogger(getContext().system(), this);
+
+ public void onReceive(Object message) throws Exception {
+ if (message instanceof String)
+ log.info("Received String message: {}", message);
+ else
+ throw new UnhandledMessageException(message, getSelf());
+ }
+}
+//#my-untyped-actor
+
diff --git a/akka-docs/java/code/akka/docs/actor/UntypedActorSwapper.java b/akka-docs/java/code/akka/docs/actor/UntypedActorSwapper.java
new file mode 100644
index 0000000000..cf9e3bfbf7
--- /dev/null
+++ b/akka-docs/java/code/akka/docs/actor/UntypedActorSwapper.java
@@ -0,0 +1,54 @@
+package akka.docs.actor;
+
+import static akka.docs.actor.UntypedActorSwapper.Swap.SWAP;
+import akka.actor.ActorRef;
+import akka.actor.Props;
+import akka.actor.ActorSystem;
+import akka.actor.UnhandledMessageException;
+import akka.actor.UntypedActor;
+import akka.event.Logging;
+import akka.event.LoggingAdapter;
+import akka.japi.Procedure;
+
+//#swapper
+public class UntypedActorSwapper {
+
+ public static class Swap {
+ public static Swap SWAP = new Swap();
+
+ private Swap() {
+ }
+ }
+
+ public static class Swapper extends UntypedActor {
+ LoggingAdapter log = Logging.getLogger(getContext().system(), this);
+
+ public void onReceive(Object message) {
+ if (message == SWAP) {
+ log.info("Hi");
+ getContext().become(new Procedure